seq_id stringlengths 4 11 | text stringlengths 113 2.92M | repo_name stringlengths 4 125 ⌀ | sub_path stringlengths 3 214 | file_name stringlengths 3 160 | file_ext stringclasses 18
values | file_size_in_byte int64 113 2.92M | program_lang stringclasses 1
value | lang stringclasses 93
values | doc_type stringclasses 1
value | stars int64 0 179k ⌀ | dataset stringclasses 3
values | pt stringclasses 78
values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
27253207979 | import pandas as pd
import numpy as np
s1 = pd.Series([1, 2, 3], index=list('ABC'))
s2 = pd.Series([4, 5, 6], index=list('BCD'))
s1 + s2
df1 = pd.DataFrame(np.arange(1, 13).reshape(3, 4), index=list('abc'), columns=list('ABCD'))
df1 - s1
df2 = pd.DataFrame(np.arange(1, 13).reshape(4, 3), index=list('bcde'), columns=list('CDE'))
df1 * df2
df1.div(df2, fill_value=0)
df0 = pd.DataFrame(np.random.rand(6, 4), index=pd.date_range('20160101', periods=6), columns=list('ABCD'))
df0.apply(max, axis=0)
f = lambda x: x.max() - x.min()
df0.apply(f, axis=1)
print(df1)
print(df2)
df3 = df1.mul(df2, fill_value=0)
df3.isnull()
df3.notnull()
df3.B[df3.B.notnull()]
df4 = pd.DataFrame(np.random.rand(5, 4), index=list('abcde'), columns=list('ABCD'))
df4.loc['c', 'A'] = np.nan
df4.loc['b': 'd', 'C'] = np.nan
print(df4)
df4.fillna(0)
df4.fillna(method='ffill')
df4.fillna(method='bfill')
df4.fillna(method='backfill', axis=1)
df4.fillna(method='pad', limit=2)
df4.fillna('missing', inplace=True)
print(df4)
df4.loc['c', 'A'] = np.nan
df4.loc['b': 'd', 'C'] = np.nan
print(df4)
df4.dropna(axis=0)
df4.dropna(axis=1, thresh=3)
df4.dropna(axis=1, how='all')
df5 = pd.DataFrame({'c1': ['apple'] * 3 + ['banana'] * 3 + ['apple'],
'c2': ['a', 'a', 3, 3, 'b', 'b', 'a']})
print(df5)
df5.duplicated()
df5.drop_duplicates()
df5.duplicated(['c2'])
df5.drop_duplicates(['c2']) | FunkyungJz/Some-thing-interesting-for-me | 量化投资书/量化投资以Python为工具/ch11/05.py | 05.py | py | 1,379 | python | en | code | null | github-code | 13 |
6874596416 | import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
import pdb
import math
import numpy as np
class Conv_spa(nn.Module):
def __init__(self, C_in, C_out, kernel_size, stride, padding, bias):
super(Conv_spa, self).__init__()
self.op = nn.Sequential(
nn.Conv2d(C_in, C_out, kernel_size, stride = stride, padding = padding, bias = bias),
nn.ReLU(inplace = True)
)
def forward(self,x): # [N,32,uv,h,w]
N,u,v,c,h,w = x.shape
x = x.reshape(N*u*v,c,h,w) # [N*uv,32,h,w]
out = self.op(x)
#print(out.shape)
out = out.reshape(N,u,v,32,h,w)
return out
class Conv_ang(nn.Module):
def __init__(self, C_in, C_out, kernel_size, stride, padding, angular, bias):
super(Conv_ang, self).__init__()
self.angular = angular
self.op = nn.Sequential(
nn.Conv2d(C_in, C_out, kernel_size, stride = stride, padding = padding, bias = bias),
nn.ReLU(inplace = True)
)
def forward(self,x): # [N,32,uv,h,w]
N,u,v,c,h,w = x.shape
x = x.permute(0,4,5,3,1,2).reshape(N*h*w,c,self.angular,self.angular) #[N*h*w,32,7,7]
out = self.op(x)
out = out.reshape(N,h,w,32,u,v).permute(0,4,5,3,1,2)
return out
class Conv_epi_h(nn.Module):
def __init__(self, C_in, C_out, kernel_size, stride, padding, bias):
super(Conv_epi_h, self).__init__()
self.op = nn.Sequential(
nn.Conv2d(C_in, C_out, kernel_size, stride = stride, padding = padding, bias = bias),
nn.ReLU(inplace = True)
)
def forward(self,x): # [N,64,uv,h,w]
N,u,v,c,h,w = x.shape
x = x.permute(0,1,4,3,2,5).reshape(N*u*h,c,v,w)
out = self.op(x)
out = out.reshape(N,u,h,32,v,w).permute(0,1,4,3,2,5)
return out
class Conv_epi_v(nn.Module):
def __init__(self, C_in, C_out, kernel_size, stride, padding, bias):
super(Conv_epi_v, self).__init__()
self.op = nn.Sequential(
nn.Conv2d(C_in, C_out, kernel_size, stride = stride, padding = padding, bias = bias),
nn.ReLU(inplace = True)
)
def forward(self,x):
N,u,v,c,h,w = x.shape
x = x.permute(0,2,5,3,1,4).reshape(N*v*w,c,u,h)
out = self.op(x)
out = out.reshape(N,v,w,32,u,h).permute(0,4,1,3,5,2)
return out
class Autocovnlayer(nn.Module):
def __init__(self,dence_num,component_num,angular,fn,bs):
super(Autocovnlayer, self).__init__()
self.dence_num = dence_num
self.component_num = component_num
self.angular = angular
self.kernel_size = 3
self.naslayers = nn.ModuleList([
Conv_spa(C_in = fn, C_out = 32, kernel_size = self.kernel_size, stride = 1, padding = 1, bias = bs),
Conv_ang(C_in = fn, C_out = 32, kernel_size = self.kernel_size, stride = 1, padding = 1, angular = self.angular, bias = bs),
Conv_epi_h(C_in = fn, C_out = 32, kernel_size = self.kernel_size, stride = 1, padding = 1, bias = bs),
Conv_epi_v(C_in = fn, C_out = 32, kernel_size = self.kernel_size, stride = 1, padding = 1, bias = bs)
])
###################
self.epi_boost = nn.Conv2d(in_channels = fn, out_channels=fn, kernel_size=3, stride=1, padding=1, bias = bs)
self.Conv_all = nn.Conv2d(in_channels = fn+4, out_channels=fn, kernel_size=3, stride=1, padding=1, bias = bs)
self.Conv_mixray = nn.Conv2d(in_channels = angular*angular, out_channels=angular*angular, kernel_size=3, stride=1, padding=1, bias = True)
self.Conv_down = nn.Conv2d(in_channels = 32, out_channels=4, kernel_size=1, stride=1, padding=0, bias = False)
self.Conv_mixdence = nn.Conv2d(in_channels = fn*self.dence_num, out_channels=fn, kernel_size=1, stride=1, padding=0, bias = False)
self.Conv_mixnas = nn.Conv2d(in_channels = 32*5, out_channels=fn, kernel_size=1, stride=1, padding=0, bias = False) ## 1*1 paddding!!
self.relu = nn.ReLU(inplace=True)
def forward(self,x):
x = torch.stack(x,dim = 0) # [dence_num N C uv H W] [dence_num,N,64,uv,h,w]
[fn, N, u,v, C, h, w] = x.shape
x = x.permute([1,2,3,0,4,5,6]).reshape([N*u*v,fn*C,h,w]) # [N*uv, fn*c, h,w]
x = self.relu(self.Conv_mixdence(x)) # ==> [N*uv, c', h, w]
x_mix = x.reshape([N,u,v,C,h,w]) # [N,64,uv,h,w] !!!
nas = []
for layer in self.naslayers:
nas_ = layer(x_mix)
nas.append(nas_)
# print(nas[-1].shape)
x_epi = nas[-1] + nas[-2] # (N,uv,32,h,w)
nas_ = self.relu(self.epi_boost(x_epi.reshape(N*u*v,C,h,w)))
nas.append(nas_.reshape(N,u,v,C,h,w))
nas_1 = self.relu(self.Conv_mixray(x_mix.permute([0,3,1,2,4,5]).reshape(N*32,u*v,h,w)))
nas_1 = nas_1.reshape(N,32,u,v,h,w).permute([0,2,3,1,4,5])
nas_1 = self.relu(self.Conv_down(nas_1.reshape(N*u*v,32,h,w)))
nas = torch.stack(nas,dim = 0)
nas = nas.permute([1,2,3,0,4,5,6]).reshape([N*u*v,5*32,h,w]) ##[N*uv, fn*c, h,w]
nas = self.relu(self.Conv_mixnas(nas))
nas_2 = self.Conv_all(torch.cat([nas,nas_1],dim=1))
nas_2 = nas_2.reshape(N,u,v,C,h,w)
out = self.relu(x_mix + nas_2)
return out
def initialize_weights(net_l, scale=1):
if not isinstance(net_l, list):
net_l = [net_l]
for net in net_l:
for m in net.modules():
if isinstance(m, nn.Conv2d):
init.kaiming_normal_(m.weight, a=0, mode='fan_in')
m.weight.data *= scale # for residual block
if m.bias is not None:
m.bias.data.zero_()
class Illumination(nn.Module):
def __init__(self,alpha,opt):
super(Illumination, self).__init__()
self.alpha = alpha
self.maxpoll = nn.MaxPool1d(kernel_size=3,stride=1,padding=0)
self.conv0 = nn.Conv2d(1, 32, kernel_size=3, stride=1, padding=1,bias = True)
self.conv1 = nn.Conv2d(32, 3, kernel_size=3, stride=1, padding=1,bias = True)
self.dense4d = make_autolayers(opt.sasLayerNum,32,opt)
self.gamma = nn.Parameter(torch.rand([1],dtype=torch.float32,requires_grad=True).cuda())
self.relu = nn.ReLU(inplace=True)
def forward(self, x, lowlf):
N,u,v,c,h,w = x.shape
I_init = x - self.gamma * (x - lowlf)
I_init = self.maxpoll(I_init.reshape(N*u*v,c,h*w).permute([0,2,1])) # out [Nuv,hw,1]
I_init = I_init.permute([0,2,1]).reshape(N*u*v,1,h,w)
I_feat = self.relu(self.conv0(I_init))
I_feat = I_feat.reshape(N,u,v,32,h,w)
feat = [I_feat]
for index,layer in enumerate(self.dense4d):
feat_ = layer(feat)
feat.append(feat_)
out = I_init.expand(-1,c,-1,-1) - self.conv1(feat[-1].reshape(N*u*v,32,h,w))
return out.reshape(N,u,v,c,h,w)
class StageBlock(nn.Module):
def __init__(self, opt):
super(StageBlock,self).__init__()
self.illum = Illumination(1,opt)
self.gredient_conv0 = nn.Conv2d(3, 32, kernel_size=3, stride=1, padding=1,bias = False)
self.gredient_conv1 = nn.Conv2d(32, 3, kernel_size=3, stride=1, padding=1,bias = False)
self.gredient_dense = make_autolayers(opt.sasLayerNum,32,opt)
self.denoise_conv0 = nn.Conv2d(6, 32, kernel_size=3, stride=1, padding=1,bias = True)
self.denoise_conv1 = nn.Conv2d(32, 3, kernel_size=3, stride=1, padding=1,bias = True)
self.denosie_dense = make_autolayers(opt.sasLayerNum,32,opt)
self.relu = nn.ReLU(inplace=True)
self.delta=nn.Parameter(torch.rand([1],dtype=torch.float32,requires_grad=True).cuda())
self.eta=nn.Parameter(torch.rand([1],dtype=torch.float32,requires_grad=True).cuda())
def forward(self,out_lastStage,imap_lastStage,lowlf,idx):
N,u,v,c,h,w= out_lastStage.shape
if idx == 0:
n_in = torch.cat([out_lastStage,lowlf],dim=3)
n_feat = self.relu(self.denoise_conv0(n_in.reshape(N*u*v,2*c,h,w)))
else:
n_in = lowlf - out_lastStage * torch.mean(lowlf,[1,2,3,4,5],keepdim=True) / torch.mean(out_lastStage,[1,2,3,4,5],keepdim=True)
n_in = torch.cat([n_in,lowlf],dim=3)
n_feat = self.relu(self.denoise_conv0(n_in.reshape(N*u*v,2*c,h,w)))
n_feat = n_feat.reshape(N,u,v,32,h,w)
feat2 = [n_feat]
for idx, layer2 in enumerate(self.denosie_dense):
feat_ = layer2(feat2)
feat2.append(feat_)
out_denoise = lowlf - self.denoise_conv1(feat2[-1].reshape(N*u*v,32,h,w)).reshape(N,u,v,c,h,w)
imap = self.illum(out_lastStage,lowlf)
err1 = imap * (imap * out_lastStage - out_denoise)
err2_feat = self.relu(self.gredient_conv0(out_lastStage.reshape(N*u*v,c,h,w)))
err2_feat = err2_feat.reshape(N,u,v,32,h,w)
feat = [err2_feat]
for index,layer in enumerate(self.gredient_dense):
feat_ = layer(feat)
feat.append(feat_)
err2 = self.gredient_conv1(feat[-1].reshape(N*u*v,32,h,w))
out_currentStage = out_lastStage - self.delta * (err1 + self.eta * err2.reshape(N,u,v,c,h,w))
return out_currentStage,imap
def CascadeStages(block, opt):
blocks = torch.nn.ModuleList([])
for _ in range(opt.stageNum):
blocks.append(block(opt))
return blocks
def make_autolayers(LayerNum,fn,opt):
layers = []
for i in range(LayerNum):
layers.append(Autocovnlayer(i+1, 4, opt.angResolution, fn, True))
return nn.Sequential(*layers)
class Main_unrolling(nn.Module):
def __init__(self,opt):
super(Main_unrolling, self).__init__()
# Iterative stages
self.iterativeRecon = CascadeStages(StageBlock, opt)
for m in self.modules():
if isinstance(m,nn.Conv2d):
nn.init.xavier_uniform_(m.weight)
def forward(self,lowlf):
out = lowlf
imap = torch.ones(lowlf.shape).cuda()
for idx, stage in enumerate(self.iterativeRecon):
out,imap = stage(out,imap,lowlf,idx)
return out
| lyuxianqiang/LFLL-DCU | MainNet_unrolling_all.py | MainNet_unrolling_all.py | py | 10,304 | python | en | code | 5 | github-code | 13 |
43173787289 | # Django
from django.conf import settings
from django.http import HttpResponse, Http404
from django.contrib.auth.decorators import login_required
# Mapnik
import mapnik
from copy import deepcopy
map_cache = None
class MapCache(object):
def __init__(self,mapfile,srs):
self.map = mapnik.Map(1,1)
mapnik.load_map(self.map,mapfile)
self.map.srs = srs
# its easy to do per user permissions:
#
# if not request.user.has_perm(app.model):
# raise Http404
#
# or globally protect map services to authenticated users
# @login_required
# def tile_serving_view(request):
def wms(request):
global map_cache
w,h = int(request.GET['WIDTH']), int(request.GET['HEIGHT'])
mime = request.GET['FORMAT']
p = mapnik.Projection('%s' % str("+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs") )
mapfile = settings.MAPNIK_MAPFILE
if not map_cache:
map_cache = MapCache(mapfile,p.params())
env = map(float,request.GET['BBOX'].split(','))
# note: must be run without threading until #345 is closed
# http://trac.mapnik.org/ticket/345
# tile = deepcopy(map_cache.map)
tile = map_cache.map
tile.buffer_size = 128
try:
tile.resize(w,h)
except:
tile.width = w
tile.height = h
tile.zoom_to_box(mapnik.Envelope(*env))
draw = mapnik.Image(tile.width, tile.height)
mapnik.render(tile,draw)
image = draw.tostring(str(mime.split('/')[1]))
response = HttpResponse()
response['Content-length'] = len(image)
response['Content-Type'] = mime
response.write(image)
return response | oluka/mapping_rapidsms | apps/Map/mapnik_engine.py | mapnik_engine.py | py | 1,600 | python | en | code | 3 | github-code | 13 |
25339743644 | #!/usr/bin/python3.4
import ev3dev.ev3 as ev3
from time import sleep
from os import system
import signal
# Toggle debug output
DEBUG = False
# Set initial state
state = "LINE" # TURN, LINE, STOP
progress = "INIT" # INIT, EXEC, DONE
direction = 'U' # U, D, L, R
# Define inputs
btn = ev3.Button()
lightSensorLeft = ev3.ColorSensor('in1')
lightSensorRight = ev3.ColorSensor('in4')
gyroSensor = ev3.GyroSensor('in2')
# Configure inputs
gyroSensor.mode = 'GYRO-ANG'
gs_units = gyroSensor.units
gs_tolerance = 3
# Unused inputs
#ultrasonicSensor = ev3.UltrasonicSensor('in2')
#touchSensor = ev3.TouchSensor('in2')
# Check if sonsors are connected
assert lightSensorLeft.connected, "Left light sensor is not connected (should be 'in1')"
assert lightSensorRight.connected, "Right light sensor is not conected (should be 'in4')"
assert gyroSensor.connected, "Gyro sensor is not connected (should be 'in2')"
print("Inputs loaded succesfully!")
# Define used outputs
motorLeft = ev3.LargeMotor('outA')
motorRight = ev3.LargeMotor('outD')
# Configure outputs
motorLeft.run_direct()
motorRight.run_direct()
motorLeft.polarity = "normal"
motorRight.polarity = "normal"
print("Outputs loaded succesfully!")
# Define what happens if interrupted
def signal_handler(sig, frame):
print('Shutting down gracefully...')
motorLeft.duty_cycle_sp = 0
motorRight.duty_cycle_sp = 0
exit(0)
# Run signal handler
signal.signal(signal.SIGINT, signal_handler)
# Define various control variables
SPEED_TURN = 20
SPEED_SLOW = 30
SPEED_BASE = 50
SPEED_FAST = 70
THRESHOLD_BLACK = 15
# Relative turn angle function
def control_turn(dir_start, dir_goal):
# Dictionary of angle correction
dir_cor = { 'U': 0,
'D': 180,
'L': -90,
'R': 90 }
# Calculate turn value (angle)
dir_val = dir_cor[dir_goal] - dir_cor[dir_start]
dir_val = (dir_val + 180) % 360 - 180
# Return relative turn angle
return dir_val
# Turn off motors
motorLeft.duty_cycle_sp = 0
motorRight.duty_cycle_sp = 0
# Wait for button press
print("Ready!")
while not btn.any():
pass
ev3.Sound.beep().wait()
# Main control loop
while True:
# Read sensor inputs every loop
ls_left_val = lightSensorLeft.value()
ls_right_val = lightSensorRight.value()
gs_val = gyroSensor.value()
# Read motor outputs
mo_left_val = motorLeft.duty_cycle_sp
mo_right_val = motorRight.duty_cycle_sp
# Print debug info if true
if DEBUG:
# Clear terminal before printing
system('clear')
# Printing at once because ev3/ssh console is slow
print( "[STATE]\n" +
"Current state:\t\t\t" + state + '\n' +
"Current state progress:\t\t" + progress + '\n' +
"Current direction:\t\t" + direction + '\n' +
'\n' +
"[INPUT]\n" +
"Left light sensor value:\t" + str(ls_left_val) + '\n' +
"Right light sensor value:\t" + str(ls_right_val) + '\n' +
"Gyro sensor value:\t\t" + str(gs_val) + gs_units + '\n' +
'\n' +
"[OUTPUT]\n" +
"Left motor duty cycle:\t\t" + str(mo_left_val) + '\n'
"Right motor duty cycle:\t\t" + str(mo_right_val)
)
# Handle button press / stop
if btn.any():
ev3.Sound.beep().wait()
motorLeft.duty_cycle_sp = 0
motorRight.duty_cycle_sp = 0
exit()
# Test: Stop state
if state == "STOP":
motorLeft.duty_cycle_sp = 0
motorRight.duty_cycle_sp = 0
# Test: Turn state
if state == "TURN":
if progress == "INIT":
reset_val = gs_val
goal_dir = control_turn(direction, 'L')
goal_pol = goal_dir/abs(goal_dir)
progress = "EXEC"
if progress == "EXEC":
dir_rel = gs_val - reset_val
if(dir_rel*goal_pol < abs(goal_dir)-gs_tolerance):
motorLeft.duty_cycle_sp = SPEED_TURN*goal_pol
motorRight.duty_cycle_sp = -SPEED_TURN*goal_pol
else:
progress = "DONE"
if progress == "DONE":
direction = 'D'
state = "STOP"
progress = "INIT"
# Test: Drive state
if state == "LINE":
if progress == "INIT":
Kp = 1.25/2
Ki = 0
Kd = 15
#base1 = 0
acc = 0
ls_error = 0
ls_error_prev = 0
motorLeft.duty_cycle_sp = SPEED_BASE
motorRight.duty_cycle_sp = SPEED_BASE
if not(ls_left_val < THRESHOLD_BLACK and ls_right_val < THRESHOLD_BLACK):
progress = "EXEC"
if progress == "EXEC":
ls_error_prev = ls_error
ls_error = ls_left_val - ls_right_val
acc += ls_error
derr = ls_error - ls_error_prev
pid_corr = Kp*ls_error + Ki*acc + Kd*derr
#if (ls_error < 5):
# if ((SPEED_BASE + base1) < 80):
# base1 = base1 + 1
# else:
# base1 = 0
#system('clear')
#print("pid_corr\t" + str(pid_corr))
#print("Kp*ls_error\t" + str(Kp*ls_error))
#print("Ki*acc\t" + str(Ki*acc))
#print("Kd*derr\t" + str(Kd*derr))
if(ls_left_val < THRESHOLD_BLACK and ls_right_val < THRESHOLD_BLACK):
progress = "DONE"
else:
if (SPEED_BASE + abs(pid_corr) <= 100):
motorLeft.duty_cycle_sp = SPEED_BASE+(pid_corr)
motorRight.duty_cycle_sp = SPEED_BASE-(pid_corr)
#elif(ls_left_val < THRESHOLD_BLACK):
# motorLeft.duty_cycle_sp -= 1
# motorRight.duty_cycle_sp += 1
#elif(ls_right_val < THRESHOLD_BLACK):
# motorLeft.duty_cycle_sp += 1
# motorRight.duty_cycle_sp -= 1
#else:
# motorLeft.duty_cycle_sp = SPEED_BASE
# motorRight.duty_cycle_sp = SPEED_BASE
if progress == "DONE":
state = "STOP"
progress = "INIT"
# Test: Think
if state == "THINK":
direction_goal = 'R'
if (direction != direction_goal):
state = "TURN"
| mathiaslyngbye/ev3control | backup/kenil_pid_test.py | kenil_pid_test.py | py | 6,713 | python | en | code | 0 | github-code | 13 |
546325586 | import pandas as pd
url_csv01 = "./data/all_url_title_text.csv"
url_csv02 = "./data/all_url_bin.csv"
def del_same_url_and_save(new_sub_csv, url_bin):
post = pd.read_csv(new_sub_csv)
img_bin = pd.read_csv(url_bin)
print(len(post))
print(len(img_bin))
post.drop_duplicates(['url'], keep='first', inplace=True, ignore_index=True)
#post.dropna(axis=0)
img_bin.drop_duplicates(['url'], keep='first', inplace=True, ignore_index=True)
#img_bin.dropna(axis=0)
print(len(post))
print(len(img_bin))
post.to_csv(new_sub_csv)
img_bin.to_csv(url_bin)
#del_same_url_and_save(url_csv01, url_csv02)
| Comprehensive-Design-Team-9/Web_Crawler | del_same_url.py | del_same_url.py | py | 636 | python | en | code | 0 | github-code | 13 |
36949721866 | #!/usr/bin/env python
import pygame
pygame.init()
RES = (160, 120)
FPS = 30
clock = pygame.Clock()
screen = pygame.display.set_mode(RES, pygame.RESIZABLE)
pygame.display._set_autoresize(False)
# MAIN LOOP
done = False
i = 0
j = 0
while not done:
for event in pygame.event.get():
if event.type == pygame.KEYDOWN and event.key == pygame.K_q:
done = True
if event.type == pygame.QUIT:
done = True
# if event.type==pygame.WINDOWRESIZED:
# screen=pygame.display.get_surface()
if event.type == pygame.VIDEORESIZE:
screen = pygame.display.get_surface()
i += 1
i = i % screen.get_width()
j += i % 2
j = j % screen.get_height()
screen.fill((255, 0, 255))
pygame.draw.circle(screen, (0, 0, 0), (100, 100), 20)
pygame.draw.circle(screen, (0, 0, 200), (0, 0), 10)
pygame.draw.circle(screen, (200, 0, 0), (160, 120), 30)
pygame.draw.line(screen, (250, 250, 0), (0, 120), (160, 0))
pygame.draw.circle(screen, (255, 255, 255), (i, j), 5)
pygame.display.flip()
clock.tick(FPS)
pygame.quit()
| pygame-community/pygame-ce | examples/resizing_new.py | resizing_new.py | py | 1,115 | python | en | code | 517 | github-code | 13 |
74564377938 | #!/usr/bin/env python
# -*- coding: ISO-8859-1 -*-
"""
web server.
__author__ = "Valentin Kuznetsov"
"""
from __future__ import print_function
from builtins import str as newstr, bytes, map
from future.utils import viewitems, viewvalues
# system modules
import collections
import json
import os
import pprint
import sys
import time
from copy import deepcopy
from datetime import datetime
# cherrypy modules
import cherrypy
from cherrypy import config as cherryconf
from cherrypy import expose, response, tools
from cherrypy.lib.static import serve_file
# import WMCore itself to determine path of modules
import WMCore
from Utils.CertTools import getKeyCertFromEnv
from WMCore.REST.Auth import get_user_info
# WMCore modules
from WMCore.ReqMgr.DataStructs.RequestStatus import ACTIVE_STATUS
from WMCore.ReqMgr.DataStructs.RequestStatus import REQUEST_STATE_TRANSITION, REQUEST_HUMAN_STATES
from WMCore.ReqMgr.DataStructs.RequestStatus import get_modifiable_properties, get_protected_properties
from WMCore.ReqMgr.Tools.cms import lfn_bases, lfn_unmerged_bases
from WMCore.ReqMgr.Tools.cms import releases, dashboardActivities
from WMCore.ReqMgr.Tools.cms import site_white_list, site_black_list
from WMCore.ReqMgr.Tools.cms import web_ui_names, SITE_CACHE, PNN_CACHE
from WMCore.ReqMgr.Utils.Validation import get_request_template_from_type
# ReqMgrSrv modules
from WMCore.ReqMgr.Web.tools import exposecss, exposejs, TemplatedPage
from WMCore.ReqMgr.Web.utils import gen_color
from WMCore.ReqMgr.Web.utils import json2table, json2form, genid, checkargs, tstamp, sort, reorder_list
from WMCore.Services.LogDB.LogDB import LogDB
# new reqmgr2 APIs
from WMCore.Services.ReqMgr.ReqMgr import ReqMgr
from WMCore.Services.pycurl_manager import RequestHandler
from WMCore.WMSpec.StdSpecs.StdBase import StdBase
from WMCore.Cache.GenericDataCache import MemoryCacheStruct
def getdata(url, params, headers=None):
"Helper function to get data from the service"
ckey, cert = getKeyCertFromEnv()
mgr = RequestHandler()
res = mgr.getdata(url, params=params, headers=headers, ckey=ckey, cert=cert)
return json.loads(res)
def sort_bold(docs):
"Return sorted list of bold items from provided doc list"
return ', '.join(['<b>%s</b>' % m for m in sorted(docs)])
def set_headers(itype, size=0):
"""
Set response header Content-type (itype) and Content-Length (size).
"""
if size > 0:
response.headers['Content-Length'] = size
response.headers['Content-Type'] = itype
response.headers['Expires'] = 'Sat, 14 Oct 2027 00:59:30 GMT'
def set_no_cache_flags():
"Set cherrypy flags to prevent caching"
cherrypy.response.headers['Cache-Control'] = 'no-cache'
cherrypy.response.headers['Pragma'] = 'no-cache'
cherrypy.response.headers['Expires'] = 'Sat, 01 Dec 2001 00:00:00 GMT'
def set_cache_flags():
"Set cherrypy flags to prevent caching"
headers = cherrypy.response.headers
for key in ['Cache-Control', 'Pragma']:
if key in headers:
del headers[key]
def minify(content):
"""
Remove whitespace in provided content.
"""
content = content.replace('\n', ' ')
content = content.replace('\t', ' ')
content = content.replace(' ', ' ')
content = content.replace(' ', ' ')
return content
def menus():
"Return dict of menus"
items = ['home', 'create', 'approve', 'assign', 'batches']
return items
def request_attr(doc, attrs=None):
"Return request attributes/values in separate document"
if not attrs:
attrs = ['RequestName', 'Requestdate', 'Inputdataset', \
'Prepid', 'Group', 'Requestor', 'RequestDate', \
'RequestStatus']
rdict = {}
for key in attrs:
if key in doc:
if key == 'RequestDate':
tval = doc[key]
if isinstance(tval, list):
while len(tval) < 9:
tval.append(0)
# we do not know if dayling savings time was in effect or not
tval[-1] = -1
gmt = time.gmtime(time.mktime(tuple(tval)))
rdict[key] = time.strftime("%Y-%m-%d %H:%M:%S GMT", gmt)
else:
rdict[key] = tval
else:
rdict[key] = doc[key]
return rdict
def spec_list(root):
"Return list of specs from given root directory"
specs = []
for fname in os.listdir(root):
if not fname.endswith('.py') or fname == '__init__.py':
continue
sname = fname.split('.')[0]
clsName = "%sWorkloadFactory" % sname
with open(os.path.join(root, fname)) as fd:
if clsName in fd.read():
specs.append(sname)
return specs
def user():
"""
Return user name associated with this instance.
"""
try:
return get_user_info()['login']
except:
return 'testuser'
def user_dn():
"Return user DN"
try:
return get_user_info()['dn']
except:
return '/CN/bla/foo'
def check_scripts(scripts, resource, path):
"""
Check a script is known to the resource map
and that the script actually exists
"""
for script in scripts:
if script not in resource:
spath = os.path.normpath(os.path.join(path, script))
if os.path.isfile(spath):
resource.update({script: spath})
return scripts
def _map_configcache_url(tConfigs, baseURL, configIDName, configID, taskName=""):
if configIDName.endswith('ConfigCacheID') and configID is not None:
url = "%s/reqmgr_config_cache/%s/configFile" % (baseURL, configID)
prefix = "%s: " % taskName if taskName else ""
task = "%s%s: %s " % (prefix, configIDName, configID)
tConfigs.setdefault(task, url)
return
def tasks_configs(docs, html=False):
"Helper function to provide mapping between tasks and configs"
if not isinstance(docs, list):
docs = [docs]
tConfigs = {}
for doc in docs:
name = doc.get('RequestName', '')
if "TaskChain" in doc:
chainTypeFlag = True
ctype = "Task"
elif "StepChain" in doc:
chainTypeFlag = True
ctype = "Step"
else:
chainTypeFlag = False
ctype = None
curl = doc.get('ConfigCacheUrl', 'https://cmsweb.cern.ch/couchdb')
if curl == None or curl == "none":
curl = 'https://cmsweb.cern.ch/couchdb'
if not name:
continue
for key, val in viewitems(doc):
_map_configcache_url(tConfigs, curl, key, val)
if chainTypeFlag and key.startswith(ctype) and isinstance(val, dict):
for kkk in val:
# append task/step number and name
keyStr = "%s: %s" % (key, val.get("%sName" % ctype, ''))
_map_configcache_url(tConfigs, curl, kkk, val[kkk], keyStr)
if html:
out = '<fieldset><legend>Config Cache List</legend><ul>'
for task in sorted(tConfigs):
out += '<li><a href="%s" target="config_page">%s</a></li>' % (tConfigs[task], task)
out += '</ul></fieldset>'
return out
return tConfigs
def state_transition(docs):
"Helper function to provide mapping between tasks and configs"
if not isinstance(docs, list):
docs = [docs]
out = '<fieldset><legend>State Transition</legend><ul>'
multiDocFlag = True if len(docs) > 1 else False
for doc in docs:
name = doc.get('RequestName', '')
sTransition = doc.get('RequestTransition', '')
if not name:
continue
if multiDocFlag:
out += '%s<br />' % name
for sInfo in sTransition:
out += '<li><b>%s</b>: %s UTC <b>DN</b>: %s</li>' % (sInfo["Status"],
datetime.utcfromtimestamp(
sInfo["UpdateTime"]).strftime('%Y-%m-%d %H:%M:%S'),
sInfo["DN"])
out += '</ul></fieldset>'
return out
def priority_transition(docs):
"create html for priority transition format"
if not isinstance(docs, list):
docs = [docs]
out = '<fieldset><legend>Priority Transition</legend><ul>'
multiDocFlag = True if len(docs) > 1 else False
for doc in docs:
name = doc.get('RequestName', '')
pTransition = doc.get('PriorityTransition', '')
if not name:
continue
if multiDocFlag:
out += '%s<br />' % name
for pInfo in pTransition:
out += '<li><b>%s</b>: %s UTC <b>DN</b>: %s</li>' % (pInfo["Priority"],
datetime.utcfromtimestamp(
pInfo["UpdateTime"]).strftime('%Y-%m-%d %H:%M:%S'),
pInfo["DN"])
out += '</ul></fieldset>'
return out
# code taken from
# http://stackoverflow.com/questions/1254454/fastest-way-to-convert-a-dicts-keys-values-from-unicode-to-str
def toString(data):
if isinstance(data, (newstr, bytes)):
return str(data)
elif isinstance(data, collections.Mapping):
return dict(list(map(toString, viewitems(data))))
elif isinstance(data, collections.Iterable):
return type(data)(list(map(toString, data)))
else:
return data
def getPropValueMap():
"""
Return all possible values for some assignment arguments
"""
prop_value_map = {'CMSSWVersion': releases(),
'SiteWhitelist': SITE_CACHE.getData(),
'SiteBlacklist': SITE_CACHE.getData(),
'SubscriptionPriority': ['Low', 'Normal', 'High'],
'CustodialSites': PNN_CACHE.getData(),
'NonCustodialSites': PNN_CACHE.getData(),
'MergedLFNBase': lfn_bases(),
'UnmergedLFNBase': lfn_unmerged_bases(),
'TrustPUSitelists': [True, False],
'TrustSitelists': [True, False],
'Dashboard': dashboardActivities()}
return prop_value_map
class ReqMgrService(TemplatedPage):
"""
Request Manager web service class
"""
def __init__(self, app, config, mount):
self.base = config.base
self.rootdir = '/'.join(WMCore.__file__.split('/')[:-1])
if config and not isinstance(config, dict):
web_config = config.dictionary_()
if not config:
web_config = {'base': self.base}
TemplatedPage.__init__(self, web_config)
imgdir = os.environ.get('RM_IMAGESPATH', os.getcwd() + '/images')
self.imgdir = web_config.get('imgdir', imgdir)
cssdir = os.environ.get('RM_CSSPATH', os.getcwd() + '/css')
self.cssdir = web_config.get('cssdir', cssdir)
jsdir = os.environ.get('RM_JSPATH', os.getcwd() + '/js')
self.jsdir = web_config.get('jsdir', jsdir)
spdir = os.environ.get('RM_SPECPATH', os.getcwd() + '/specs')
self.spdir = web_config.get('spdir', spdir)
# read scripts area and initialize data-ops scripts
self.sdir = os.environ.get('RM_SCRIPTS', os.getcwd() + '/scripts')
self.sdir = web_config.get('sdir', self.sdir)
self.sdict_thr = web_config.get('sdict_thr', 600) # put reasonable 10 min interval
self.sdict = {'ts': time.time()} # placeholder for data-ops scripts
self.update_scripts(force=True)
# To be filled at run time
self.cssmap = {}
self.jsmap = {}
self.imgmap = {}
self.yuimap = {}
std_specs_dir = os.path.join(self.rootdir, 'WMSpec/StdSpecs')
self.std_specs = spec_list(std_specs_dir)
self.std_specs.sort()
# Update CherryPy configuration
mime_types = ['text/css']
mime_types += ['application/javascript', 'text/javascript',
'application/x-javascript', 'text/x-javascript']
cherryconf.update({'tools.encode.on': True,
'tools.gzip.on': True,
'tools.gzip.mime_types': mime_types,
})
self._cache = {}
# initialize access to reqmgr2 APIs
self.reqmgr_url = config.reqmgr.reqmgr2_url
self.reqmgr = ReqMgr(self.reqmgr_url)
# only gets current view (This might cause to reponse time much longer,
# If upto date view is not needed overwrite Fale)
self.reqmgr._noStale = True
# get fields which we'll use in templates
cdict = config.reqmgr.dictionary_()
self.couch_url = cdict.get('couch_host', '')
self.couch_dbname = cdict.get('couch_reqmgr_db', '')
self.couch_wdbname = cdict.get('couch_workload_summary_db', '')
self.acdc_url = cdict.get('acdc_host', '')
self.acdc_dbname = cdict.get('acdc_db', '')
self.configcache_url = cdict.get('couch_config_cache_url', self.couch_url)
self.dbs_url = cdict.get('dbs_url', '')
self.dqm_url = cdict.get('dqm_url', '')
self.sw_ver = cdict.get('default_sw_version', 'CMSSW_7_6_1')
self.sw_arch = cdict.get('default_sw_scramarch', 'slc6_amd64_gcc493')
# LogDB holder
centralurl = cdict.get("central_logdb_url", "")
identifier = cdict.get("log_reporter", "reqmgr2")
self.logdb = LogDB(centralurl, identifier)
# local team cache which will request data from wmstats
base, uri = self.reqmgr_url.split('://')
base_url = '%s://%s' % (base, uri.split('/')[0])
self.wmstatsurl = cdict.get('wmstats_url', '%s/wmstatsserver' % base_url)
if not self.wmstatsurl:
raise Exception('ReqMgr2 configuration file does not provide wmstats url')
# cache team information for 2 hours to limit wmstatsserver API calls
self.TEAM_CACHE = MemoryCacheStruct(7200, self.refreshTeams)
# fetch assignment arguments specification from StdBase
self.assignArgs = StdBase().getWorkloadAssignArgs()
self.assignArgs = {key: val['default'] for key, val in viewitems(self.assignArgs)}
def getTeams(self):
return self.TEAM_CACHE.getData()
def refreshTeams(self):
"Helper function to cache team info from wmstats"
url = '%s/data/teams' % self.wmstatsurl
params = {}
headers = {'Accept': 'application/json'}
try:
data = getdata(url, params, headers)
if 'error' in data:
print("WARNING: fail to get teams from %s" % url)
print(data)
teams = data.get('result', [])
return teams
except Exception as exp:
print("WARNING: fail to get teams from %s" % url)
print(str(exp))
def update_scripts(self, force=False):
"Update scripts dict"
if force or abs(time.time() - self.sdict['ts']) > self.sdict_thr:
if os.path.isdir(self.sdir):
for item in os.listdir(self.sdir):
with open(os.path.join(self.sdir, item), 'r') as istream:
self.sdict[item.split('.')[0]] = istream.read()
self.sdict['ts'] = time.time()
def abs_page(self, tmpl, content):
"""generate abstract page"""
menu = self.templatepage('menu', menus=menus(), tmpl=tmpl)
body = self.templatepage('generic', menu=menu, content=content)
page = self.templatepage('main', content=body, user=user())
return page
def page(self, content):
"""
Provide page wrapped with top/bottom templates.
"""
return self.templatepage('main', content=content)
def error(self, content):
"Generate common error page"
content = self.templatepage('error', content=content)
return self.abs_page('error', content)
@expose
def index(self):
"""Main page"""
content = self.templatepage('index', requests=ACTIVE_STATUS, rdict=REQUEST_STATE_TRANSITION)
return self.abs_page('main', content)
@expose
def home(self, **kwds):
"""Main page"""
return self.index(**kwds)
### Request actions ###
@expose
@checkargs(['status', 'sort'])
def assign(self, **kwds):
"""assign page"""
if not kwds:
kwds = {}
if 'status' not in kwds:
kwds.update({'status': 'assignment-approved'})
docs = []
attrs = ['RequestName', 'RequestDate', 'Group', 'Requestor', 'RequestStatus']
dataResult = self.reqmgr.getRequestByStatus(statusList=[kwds['status']])
for data in dataResult:
for val in viewvalues(data):
docs.append(request_attr(val, attrs))
sortby = kwds.get('sort', 'status')
docs = [r for r in sort(docs, sortby)]
assignDict = deepcopy(self.assignArgs)
assignDict.update(getPropValueMap())
assignDict['Team'] = self.getTeams()
filter_sort = self.templatepage('filter_sort')
content = self.templatepage('assign', sort=sortby,
filter_sort_table=filter_sort,
sites=SITE_CACHE.getData(),
site_white_list=site_white_list(),
site_black_list=site_black_list(),
user=user(), user_dn=user_dn(), requests=toString(docs),
misc_table=json2table(assignDict, web_ui_names(), "all_attributes"),
misc_json=json2form(assignDict, indent=2, keep_first_value=True))
return self.abs_page('assign', content)
@expose
@checkargs(['status', 'sort'])
def approve(self, **kwds):
"""
Approve page: get list of request associated with user DN.
Fetch their status list from ReqMgr and display if requests
were seen by data-ops.
"""
if not kwds:
kwds = {}
if 'status' not in kwds:
kwds.update({'status': 'new'})
kwds.update({'_nostale': True})
docs = []
attrs = ['RequestName', 'RequestDate', 'Group', 'Requestor', 'RequestStatus', 'Campaign']
dataResult = self.reqmgr.getRequestByStatus(statusList=[kwds['status']])
for data in dataResult:
for val in viewvalues(data):
docs.append(request_attr(val, attrs))
sortby = kwds.get('sort', 'status')
docs = [r for r in sort(docs, sortby)]
filter_sort = self.templatepage('filter_sort')
content = self.templatepage('approve', requests=toString(docs), date=tstamp(),
sort=sortby, filter_sort_table=filter_sort,
gen_color=gen_color)
return self.abs_page('approve', content)
@expose
def create(self, **kwds):
"""create page"""
# get list of standard specs from WMCore and new ones from local area
# loc_specs_dir = os.path.join(self.spdir, 'Specs') # local specs
# loc_specs = spec_list(loc_specs_dir, 'Specs')
# all_specs = list(set(self.std_specs + loc_specs))
# all_specs.sort()
all_specs = list(self.std_specs)
spec = kwds.get('form', '')
if not spec:
spec = self.std_specs[0]
# make spec first in all_specs list
if spec in all_specs:
all_specs.remove(spec)
all_specs = [spec] + all_specs
jsondata = get_request_template_from_type(spec)
# create templatized page out of provided forms
self.update_scripts()
content = self.templatepage('create', table=json2table(jsondata, web_ui_names(), jsondata),
jsondata=json2form(jsondata, indent=2, keep_first_value=True), name=spec,
scripts=[s for s in self.sdict if s != 'ts'],
specs=all_specs)
return self.abs_page('create', content)
def generate_objs(self, script, jsondict):
"""Generate objects from givem JSON template"""
self.update_scripts()
code = self.sdict.get(script, '')
if code.find('def genobjs(jsondict)') == -1:
return self.error(
"Improper python snippet, your code should start with <b>def genobjs(jsondict)</b> function")
exec (code) # code snippet must starts with genobjs function
return [r for r in genobjs(jsondict)]
@expose
def config(self, name):
"Fetch config for given request name"
result = self.reqmgr.getConfig(name)
if len(result) == 1:
result = result[0]
else:
result = 'Configuration not found for: %s' % name
return result.replace('\n', '<br/>')
@expose
def fetch(self, rid):
"Fetch document for given id"
rid = rid.replace('request-', '')
doc = self.reqmgr.getRequestByNames(rid)
transitions = []
tst = time.time()
# get request tasks
tasks = self.reqmgr.getRequestTasks(rid)
if len(doc) == 1:
try:
doc = doc[0][rid]
except:
pass
name = doc.get('RequestName', 'NA')
title = 'Request %s' % name
status = doc.get('RequestStatus', '')
transitions = REQUEST_STATE_TRANSITION.get(status, [])
if status in transitions:
transitions.remove(status)
visible_attrs = get_modifiable_properties(status)
filterout_attrs = get_protected_properties()
# extend filterout list with "RequestStatus" since it is passed separately
filterout_attrs.append("RequestStatus")
for key, val in viewitems(self.assignArgs):
if not doc.get(key):
doc[key] = val
if visible_attrs == "all_attributes":
filteredDoc = doc
for prop in filterout_attrs:
if prop in filteredDoc:
del filteredDoc[prop]
else:
filteredDoc = {}
for prop in visible_attrs:
filteredDoc[prop] = doc.get(prop, "")
propValueMap = getPropValueMap()
propValueMap['Team'] = self.getTeams()
selected = {}
for prop in propValueMap:
if prop in filteredDoc:
filteredDoc[prop], selected[prop] = reorder_list(propValueMap[prop], filteredDoc[prop])
content = self.templatepage('doc', title=title, status=status, name=name, rid=rid,
tasks=json2form(tasks, indent=2, keep_first_value=False),
table=json2table(filteredDoc, web_ui_names(), visible_attrs, selected),
jsondata=json2form(doc, indent=2, keep_first_value=False),
doc=json.dumps(doc), time=time,
tasksConfigs=tasks_configs(doc, html=True),
sTransition=state_transition(doc),
pTransition=priority_transition(doc),
transitions=transitions, humanStates=REQUEST_HUMAN_STATES,
ts=tst, user=user(), userdn=user_dn())
elif len(doc) > 1:
jsondata = [pprint.pformat(d) for d in doc]
content = self.templatepage('doc', title='Series of docs: %s' % rid,
table="", jsondata=jsondata, time=time,
tasksConfigs=tasks_configs(doc, html=True),
sTransition=state_transition(doc),
pTransition=priority_transition(doc),
transitions=transitions, humanStates=REQUEST_HUMAN_STATES,
ts=tst, user=user(), userdn=user_dn())
else:
doc = 'No request found for name=%s' % rid
return self.abs_page('request', content)
@expose
def record2logdb(self, **kwds):
"""LogDB submission page"""
print(kwds)
request = kwds['request']
msg = kwds['message']
self.logdb.post(request, msg)
msg = '<h6>Confirmation</h6>Your request has been entered to LogDB.'
return self.abs_page('generic', msg)
@expose
def requests(self, **kwds):
"""Page showing requests"""
if not kwds:
kwds = {}
if 'status' not in kwds:
kwds.update({'status': 'acquired'})
dataResult = self.reqmgr.getRequestByStatus(kwds['status'])
attrs = ['RequestName', 'RequestDate', 'Group', 'Requestor', 'RequestStatus', 'Campaign']
docs = []
for data in dataResult:
for doc in viewvalues(data):
docs.append(request_attr(doc, attrs))
sortby = kwds.get('sort', 'status')
docs = [r for r in sort(docs, sortby)]
filter_sort = self.templatepage('filter_sort')
content = self.templatepage('requests', requests=toString(docs), sort=sortby,
status=kwds['status'], filter_sort_table=filter_sort)
return self.abs_page('requests', content)
@expose
def request(self, **kwargs):
"Get data example and expose it as json"
dataset = kwargs.get('uinput', '')
if not dataset:
return {'error': 'no input dataset'}
url = 'https://cmsweb.cern.ch/reqmgr2/data/request?outputdataset=%s' % dataset
params = {}
headers = {'Accept': 'application/json'}
wdata = getdata(url, params, headers)
wdict = dict(date=time.ctime(), team='Team-A', status='Running', ID=genid(wdata))
winfo = self.templatepage('workflow', wdict=wdict,
dataset=dataset, code=pprint.pformat(wdata))
content = self.templatepage('search', content=winfo)
return self.abs_page('request', content)
@expose
def batch(self, **kwds):
"""batch page"""
# TODO: we need a template for batch attributes
# and read it from separate area, like DASMaps
name = kwds.get('name', '')
batch = {}
if name:
# batch = self.reqmgr.getBatchesByName(name)
batch = {'Name': 'Batch1', 'Description': 'Bla-bla', 'Creator': 'valya', 'Group': 'test',
'Workflows': ['workflow1', 'workflow2'],
'Attributes': {'HeavyIon': ['true', 'false']}}
attributes = batch.get('Attributes', {})
workflows = batch.get('Workflows', [])
description = batch.get('Description', '')
creator = batch.get('Creator', user_dn())
content = self.templatepage('batch', name=name,
attributes=json2table(attributes, web_ui_names()),
workflows=workflows, creator=creator,
description=description)
return self.abs_page('batch', content)
@expose
def batches(self, **kwds):
"""Page showing batches"""
if not kwds:
kwds = {}
if 'name' not in kwds:
kwds.update({'name': ''})
sortby = kwds.get('sort', 'name')
# results = self.reqmgr.getBatchesByName(kwds['name'])
results = [
{'Name': 'Batch1', 'Description': 'Bla-bla', 'Creator': 'valya', 'Group': 'test',
'Workflows': ['workflow1', 'workflow2'],
'Date': 'Fri Feb 13 10:36:41 EST 2015',
'Attributes': {'HeavyIon': ['true', 'false']}},
{'Name': 'Batch2', 'Description': 'lksdjflksjdf', 'Creator': 'valya', 'Group': 'test',
'Workflows': ['workflow1', 'workflow2'],
'Date': 'Fri Feb 10 10:36:41 EST 2015',
'Attributes': {'HeavyIon': ['true', 'false']}},
]
docs = [r for r in sort(results, sortby)]
filter_sort = self.templatepage('filter_sort')
content = self.templatepage('batches', batches=docs, sort=sortby,
filter_sort_table=filter_sort)
return self.abs_page('batches', content)
### Aux methods ###
@expose
def put_request(self, **kwds):
"PUT request callback to reqmgr server, should be used in AJAX"
reqname = kwds.get('RequestName', '')
status = kwds.get('RequestStatus', '')
if not reqname:
msg = 'Unable to update request status, empty request name'
raise cherrypy.HTTPError(406, msg)
if not status:
msg = 'Unable to update request status, empty status value'
raise cherrypy.HTTPError(406, msg)
return self.reqmgr.updateRequestStatus(reqname, status)
@expose
def images(self, *args):
"""
Serve static images.
"""
args = list(args)
check_scripts(args, self.imgmap, self.imgdir)
mime_types = ['*/*', 'image/gif', 'image/png',
'image/jpg', 'image/jpeg']
accepts = cherrypy.request.headers.elements('Accept')
for accept in accepts:
if accept.value in mime_types and len(args) == 1 \
and args[0] in self.imgmap:
image = self.imgmap[args[0]]
# use image extension to pass correct content type
ctype = 'image/%s' % image.split('.')[-1]
cherrypy.response.headers['Content-type'] = ctype
return serve_file(image, content_type=ctype)
def serve(self, kwds, imap, idir, datatype='', minimize=False):
"Serve files for high level APIs (yui/css/js)"
args = []
for key, val in viewitems(kwds):
if key == 'f': # we only look-up files from given kwds dict
if isinstance(val, list):
args += val
else:
args.append(val)
scripts = check_scripts(args, imap, idir)
return self.serve_files(args, scripts, imap, datatype, minimize)
@exposecss
@tools.gzip()
def css(self, **kwargs):
"""
Serve provided CSS files. They can be passed as
f=file1.css&f=file2.css
"""
resource = kwargs.get('resource', 'css')
if resource == 'css':
return self.serve(kwargs, self.cssmap, self.cssdir, 'css', True)
@exposejs
@tools.gzip()
def js(self, **kwargs):
"""
Serve provided JS scripts. They can be passed as
f=file1.js&f=file2.js with optional resource parameter
to speficy type of JS files, e.g. resource=yui.
"""
resource = kwargs.get('resource', 'js')
if resource == 'js':
return self.serve(kwargs, self.jsmap, self.jsdir)
def serve_files(self, args, scripts, resource, datatype='', minimize=False):
"""
Return asked set of files for JS, YUI, CSS.
"""
idx = "-".join(scripts)
if idx not in self._cache:
data = ''
if datatype == 'css':
data = '@CHARSET "UTF-8";'
for script in args:
path = os.path.join(sys.path[0], resource[script])
path = os.path.normpath(path)
with open(path) as ifile:
data = "\n".join([data, ifile.read().replace('@CHARSET "UTF-8";', '')])
if datatype == 'css':
set_headers("text/css")
if minimize:
self._cache[idx] = minify(data)
else:
self._cache[idx] = data
return self._cache[idx]
| dmwm/WMCore | src/python/WMCore/ReqMgr/Web/ReqMgrService.py | ReqMgrService.py | py | 32,238 | python | en | code | 44 | github-code | 13 |
21928387763 | # Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def hasPathSum(self, root: Optional[TreeNode], targetSum: int) -> bool:
if not root:
return False
answer = 0
queue = deque([(root, root.val)])
while queue:
node, current_sum = queue.popleft()
if node.left:
queue.append((node.left, current_sum + node.left.val))
if node.right:
queue.append((node.right, current_sum + node.right.val))
if not node.left and not node.right:
answer = current_sum
if answer == targetSum:
break
return answer == targetSum
| dyabk/competitive-programming | LeetCode/path_sum.py | path_sum.py | py | 854 | python | en | code | 0 | github-code | 13 |
31238902889 | def homework_9(bag_size, items): # 請同學記得把檔案名稱改成自己的學號(ex.1104813.py)
# depth first search / breadth first search + backtracking
mount=len(items)
visit={} #節點(價值,重量)
for a in range(bag_size+1):
visit[(0,a)]=0
for i in range(1,mount+1):
for c in range(bag_size+1):
if items[i-1][0]<=c: #當物品重量比包包乘載量低時
visit[(i,c)]=max(visit[i-1,c],items[i-1][1]+visit[(i-1,c-items[i-1][0])]) #測出最大價值(物品放進包包後比較)
else:
visit[(i,c)]=visit[(i-1,c)] #當size>包包size時,維持不變退回上一層
return visit[(mount,bag_size)]
if __name__ == '__main__':
bag_size = 3
items = [[1,25],[4,120],[4,30],[1,130],[2,20]]
print(homework_9(bag_size, items))
# 155
| daniel880423/Member_System | file/hw9/1100415/hw9_s1100415_0.py | hw9_s1100415_0.py | py | 1,035 | python | en | code | 0 | github-code | 13 |
31218023004 | import boto3
ec2_resource = boto3.resource("ec2")
x = ec2_resource.create_instances(ImageId = 'ami-0cff7528ff583bf9a',
InstanceType = 't2.micro',
MaxCount = 1,
MinCount = 1,#change counts to add multiple
TagSpecifications = [
{
'ResourceType': 'instance',
'Tags': [{'Key': 'Test1','Value': 'Test1pair'}]
},
],
)
print(x)
x = ec2_resource.create_instances(ImageId = 'ami-0cff7528ff583bf9a',
InstanceType = 't2.micro',
MaxCount = 1,
MinCount = 1,#change counts to add multiple
TagSpecifications = [
{
'ResourceType': 'instance',
'Tags': [{'Key': 'Test2','Value': 'Test2pair'}]
},
],
)
ec2 = boto3.resource('ec2')
for i in response['Instances']:
print("Instance ID Created is :{} Instance Type Created is : {}" .format(i['InstanceId'],i['InstanceType']))
ec2_client = boto3.client("ec2")
instances = ec2.instances.filter(
Filters=[{'Name': 'instance-state-name', 'Values': ['running']}])
for instance in instances:
print(instance.id, instance.instance_type)
import boto3
ec2_client=boto3.client("ec2")
x=ec2_client.describe_instances()
data=x["Reservations"]
li=[]
for instances in data:
instance=instances["Instances"]
for ids in instance:
instance_id=ids["InstanceId"]
li.append(instance_id)
#ec2_client.terminate_instances(InstanceIds=li)
print(instance_id) | Dwood99/Python-99 | code/Start_and_stop_ec2(In_progress).py | Start_and_stop_ec2(In_progress).py | py | 1,689 | python | en | code | 0 | github-code | 13 |
17039587334 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class AlipayEbppIndustryGovHealthcodeQueryModel(object):
def __init__(self):
self._biz_info = None
self._biz_type = None
self._city_code = None
@property
def biz_info(self):
return self._biz_info
@biz_info.setter
def biz_info(self, value):
self._biz_info = value
@property
def biz_type(self):
return self._biz_type
@biz_type.setter
def biz_type(self, value):
self._biz_type = value
@property
def city_code(self):
return self._city_code
@city_code.setter
def city_code(self, value):
self._city_code = value
def to_alipay_dict(self):
params = dict()
if self.biz_info:
if hasattr(self.biz_info, 'to_alipay_dict'):
params['biz_info'] = self.biz_info.to_alipay_dict()
else:
params['biz_info'] = self.biz_info
if self.biz_type:
if hasattr(self.biz_type, 'to_alipay_dict'):
params['biz_type'] = self.biz_type.to_alipay_dict()
else:
params['biz_type'] = self.biz_type
if self.city_code:
if hasattr(self.city_code, 'to_alipay_dict'):
params['city_code'] = self.city_code.to_alipay_dict()
else:
params['city_code'] = self.city_code
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AlipayEbppIndustryGovHealthcodeQueryModel()
if 'biz_info' in d:
o.biz_info = d['biz_info']
if 'biz_type' in d:
o.biz_type = d['biz_type']
if 'city_code' in d:
o.city_code = d['city_code']
return o
| alipay/alipay-sdk-python-all | alipay/aop/api/domain/AlipayEbppIndustryGovHealthcodeQueryModel.py | AlipayEbppIndustryGovHealthcodeQueryModel.py | py | 1,862 | python | en | code | 241 | github-code | 13 |
43262129572 | def main():
ans = 0
if M % MOD9:
kn = pow(K, N, MOD9-1)
ans = pow(M, kn, MOD9)
return print(ans)
if __name__ == '__main__':
N, K, M = map(int, input().split())
MOD9 = 998244353
main()
| Shirohi-git/AtCoder | abc221-/abc228_e.py | abc228_e.py | py | 227 | python | en | code | 2 | github-code | 13 |
33924399913 | BOT_NAME = 'bbr'
SPIDER_MODULES = ['bbr.spiders']
NEWSPIDER_MODULE = 'bbr.spiders'
FEED_EXPORT_ENCODING = 'utf-8'
LOG_LEVEL = 'ERROR'
DOWNLOAD_DELAY = 0
USER_AGENT="Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.150 Safari/537.36"
ROBOTSTXT_OBEY = True
ITEM_PIPELINES = {
'bbr.pipelines.BbrPipeline': 300,
} | SimeonYS/bbr | bbr/settings.py | settings.py | py | 362 | python | en | code | 0 | github-code | 13 |
72103051539 | #!/usr/bin/env python
"""Batch parser that extracts and prints out results for given log files."""
import os, sys, glob
import argparse, itertools
# do some parallel computing
#from joblib import Parallel, delayed
###############
# Constants
###############
BYTE_PER_GB = 1024*1024*1024.0
KB_PER_GB = 1024*1024.0
MS_PER_SEC = 1000.0
ALG_PREMIZAN = 'premizan'
SYSTEMS = ('giraph', 'gps', 'mizan', 'graphlab')
SYS_GIRAPH, SYS_GPS, SYS_MIZAN, SYS_GRAPHLAB = SYSTEMS
###############
# Parse args
###############
def check_system(system):
try:
s = int(system)
if (s < 0) or (s >= len(SYSTEMS)):
raise argparse.ArgumentTypeError('Invalid system')
return s
except:
raise argparse.ArgumentTypeError('Invalid system')
def check_cores(cores):
try:
c = int(cores)
if c < 1:
raise argparse.ArgumentTypeError('Invalid core count')
return c
except:
raise argparse.ArgumentTypeError('Invalid core count')
parser = argparse.ArgumentParser(description='Outputs experimental data for specified log files.')
parser.add_argument('system', type=check_system,
help='system: 0 for Giraph, 1 for GPS, 2 for Mizan, 3 for GraphLab (invalid system will result in invalid time values)')
parser.add_argument('log', type=str, nargs='+',
help='an experiment\'s time log file, can be a regular expression (e.g. pagerank_orkut-adj.txt_16_0_20140101-123050_time.txt or page*or*_0_*time.txt)')
parser.add_argument('--master', action='store_true', default=False,
help='get mem/net statistics for the master rather than the worker machines')
#parser.add_argument('--cores', type=check_cores, dest='n_cores', default=4,
# help='number of cores to use (> 0), default=4')
system = SYSTEMS[parser.parse_args().system]
logs_re = parser.parse_args().log
do_master = parser.parse_args().master
#n_cores = parser.parse_args().n_cores
logs = [f for re in logs_re for f in glob.glob(re)]
###############
# Main parsers
###############
def time_parser(log_prefix, system, alg):
"""Parses running and IO times for a single run.
Arguments:
log_prefix -- the prefix of one experiment run's log files (str)
system -- the system tested (str)
alg -- the algorithm tested (str)
Returns:
A tuple (running time, IO time) or (0,0) if logs files are
missing.
"""
log_files = glob.glob(log_prefix + '_time.txt')
if len(log_files) != 1:
return (0,0)
log_file = log_files[0]
io = run = total = 0
if system == SYS_GIRAPH:
io = 0
for line in open(log_file):
if "Setup " in line:
io = io + float(line.split()[5].split('=')[1])
elif "Input superstep " in line:
io = io + float(line.split()[6].split('=')[1])
elif "Shutdown " in line:
io = io + float(line.split()[5].split('=')[1])
elif "Total (mil" in line:
total = float(line.split()[5].split('=')[1])
return ((total - io)/(MS_PER_SEC), io/(MS_PER_SEC))
elif system == SYS_GPS:
start = computestart = end = 0
for line in open(log_file):
if "SYSTEM_START_TIME " in line:
start = float(line.split()[1])
elif "START_TIME " in line:
computestart = float(line.split()[1])
elif "-1-LATEST_STATUS_TIMESTAMP " in line:
end = float(line.split()[1])
return ((end - computestart)/(MS_PER_SEC),
(computestart - start)/(MS_PER_SEC))
elif system == SYS_GRAPHLAB:
for line in open(log_file):
if "TOTAL TIME (sec)" in line:
total = float(line.split()[3])
elif "Finished Running engine" in line:
run = float(line.split()[4])
return (run, (total - run))
elif system == SYS_MIZAN:
if alg == ALG_PREMIZAN:
for line in open(log_file):
if "TOTAL TIME (sec)" in line:
io = float(line.split()[3])
return (0.0, io)
else:
for line in open(log_file):
if "TIME: Total Running Time without IO =" in line:
run = float(line.split()[7])
elif "TIME: Total Running Time =" in line:
total = float(line.split()[5])
return (run, (total - run))
def mem_parser(log_prefix, machines):
"""Parses memory usage of a single run.
Arguments:
log_prefix -- the prefix of one experiment run's log files (str)
machines -- number of machines tested (int)
Returns:
A tuple (minimum mem, avg mem, maximum mem), where "mem" corresponds to
the max memory used at each machine (GB), or (0,0,0) if logs are missing.
"""
if do_master:
log_files = glob.glob(log_prefix + '_0_mem.txt')
if len(log_files) != 1:
return (0,0,0)
else:
log_files = [f for f in glob.glob(log_prefix + '_*_mem.txt') if "_0_mem.txt" not in f]
if len(log_files) < machines:
return (0,0,0)
def parse(log):
"""Parses a single log file for mem stats.
Returns: the max memory usage in GB.
"""
# note that this "mems" is the memory usage (per second) of a SINGLE machine
mems = [float(line.split()[2]) for line in open(log).readlines()]
return (max(mems) - min(mems))/KB_PER_GB
# list of each machine's maximum memory usage
mems = [parse(log) for log in log_files]
return (min(mems), sum(mems)/len(mems), max(mems))
def net_parser(log_prefix, machines):
"""Parses network usage of a single run.
Arguments:
log_prefix -- the prefix of one experiment run's log files (str)
machines -- number of machines tested (int)
Returns:
A tuple (eth recv, eth sent), where eth recv/sent is the total network data
received/sent across all worker machines (GB), or (0,0) if logs are missing.
"""
if do_master:
log_files = glob.glob(log_prefix + '_0_nbt.txt')
if len(log_files) != 1:
return (0,0)
else:
log_files = [f for f in glob.glob(log_prefix + '_*_nbt.txt') if "_0_nbt.txt" not in f]
if len(log_files) < machines:
return (0,0)
def parse(log):
"""Parses a single log file for net stats.
Returns: (recv, sent) tuple in GB.
"""
# bash equivalent:
# recv=$((-$(cat "$log" | grep "eth0" | awk '{print $2}' | tr '\n' '+')0))
# sent=$((-$(cat "$log" | grep "eth0" | awk '{print $10}' | tr '\n' '+')0))
recv = 0
sent = 0
for line in open(log).readlines():
# lines appear as initial followed by final, so this does the correct computation
if "eth0" in line:
recv = float(line.split()[1]) - recv
sent = float(line.split()[9]) - sent
return (recv/BYTE_PER_GB, sent/BYTE_PER_GB)
eth = [parse(log) for log in log_files]
eth = zip(*eth)
return (sum(eth[0]), sum(eth[1]))
def check_files(log_prefix, machines):
"""Ensures all log files are present.
Arguments:
log_prefix -- the prefix of one experiment run's log files (str)
machines -- number of machines tested (int)
Returns:
A tuple of a boolean and a string. The booleand is False if there
is a critical missing log, and True otherwise. The string gives the
source of the error, or a warning for missing CPU/net logs.
"""
logname = os.path.basename(log_prefix)
if len(glob.glob(log_prefix + '_time.txt')) == 0:
return (False, "\n ERROR: " + logname + "_time.txt missing!")
stats = ['nbt', 'mem', 'cpu', 'net']
if do_master:
for stat in stats:
if len(glob.glob(log_prefix + '_0_' + stat + '.txt')) == 0:
return (False, "\n ERROR: " + logname + "_0_" + stat + ".txt missing!")
else:
for stat in stats:
# machines+1, as the master has those log files too
if len(glob.glob(log_prefix + '_*_' + stat + '.txt')) < machines+1:
return (False, "\n ERROR: " + logname + "_*_" + stat + ".txt missing!")
return (True, "")
###############
# Output data
###############
def single_iteration(log):
"""Outputs results for one run of an experiment.
Arguments: time log file name.
Returns: results for the run as an output friendly string.
"""
# cut via range, in case somebody decides to put _time.txt in the path
logname = os.path.basename(log)[:-len('_time.txt')]
alg, _, machines, _, _ = logname.split('_')
# header string
if (system == SYS_MIZAN) and (alg != ALG_PREMIZAN):
header = logname + " (excludes premizan time)"
elif (system == SYS_GIRAPH) and (len(glob.glob(log)) != 0):
header = logname + " (cancelled job)"
for line in open(log):
if "Job complete: " in line:
header = logname + " (" + line.split()[6] + ")"
break
else:
header = logname
log_prefix = log[:-len('_time.txt')]
is_ok, err_str = check_files(log_prefix, int(machines))
if is_ok:
time_run, time_io = time_parser(log_prefix, system, alg)
mem_min, mem_avg, mem_max = mem_parser(log_prefix, int(machines))
eth_recv, eth_sent = net_parser(log_prefix, int(machines))
stats = (time_run+time_io, time_io, time_run, mem_min, mem_avg, mem_max, eth_recv, eth_sent)
separator = "------------+------------+------------+--------------------------------+---------------------------"
return header + err_str + "\n" + separator + "\n %8.2fs | %8.2fs | %8.2fs | %7.3f / %7.3f / %7.3f GB | %8.3f / %8.3f GB \n" % stats + separator
else:
return header + err_str
# no point in doing parallel computation b/c # of logs parsed is usually not very large
#out = Parallel(n_jobs=n_cores)(delayed(single_iteration)(log) for log in logs)
# output results serially
print("")
print("==================================================================================================")
print(" Total time | Setup time | Comp. time | Memory usage (min/avg/max) | Total net I/O (recv/sent) ")
print("============+============+============+===============================+===========================")
print("")
for log in logs:
print(single_iteration(log))
print("")
# another friendly reminder of what each thing is...
print("============+============+============+================================+===========================")
print(" Total time | Setup time | Comp. time | Memory usage (min/avg/max) | Total net I/O (recv/sent) ")
print("===================================================================================================")
print("")
| xvz/graph-processing | benchmark/parsers/batch-parser.py | batch-parser.py | py | 10,947 | python | en | code | 23 | github-code | 13 |
14393294259 | from flask import Flask, request, jsonify
from flask_sqlalchemy import SQLAlchemy
from flask_marshmallow import Marshmallow
from sqlalchemy import create_engine, Column, Integer, String
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
# Flask app initialization and configuration
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///phone_book.sqlite' # create URI connection to db
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False # gets rid of warning
# https://flask.palletsprojects.com/en/1.1.x/patterns/sqlalchemy/
engine = create_engine('sqlite:///phone_book.sqlite', convert_unicode=True)
db_session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=engine))
Base = declarative_base()
Base.query = db_session.query_property()
"""
example Model
if you want to add another one, remember to add element __tablename__
"""
class PhoneBook(Base):
__tablename__ = 'PhoneBook'
id = Column(Integer, primary_key=True)
name = Column(String(32))
operator = Column(String(16))
number = Column(String(11))
def __init__(self, id, name, operator, number):
self.id = id
self.name = name
self.operator = operator
self.number = number
# Function used for initialization of a new database
def init_db():
Base.metadata.create_all(bind=engine)
# function used for adding few elements to just created database
def add_few_records():
db_session.add(PhoneBook(0, 'Karolina', 'Play', '111222333'))
db_session.add(PhoneBook(1, 'Michał', 'Play', '555333111'))
db_session.add(PhoneBook(2, 'Karolina', 'Orange', '000999888'))
db_session.add(PhoneBook(3, 'Wiktoria', 'T-Mobile', '234223322'))
db_session.commit()
"""
Function adds new element to the database. New element (rec) has to be PhoneBook, unless you created new model.
usage:
new_record = PhoneBook('4', 'Kacper', 'Orange', '444444213')
add_record(new_record)
"""
def add_record_(rec):
db_session.add(rec)
db_session.commit()
def remove_record_(id):
record_of_intrest = PhoneBook.query.get(id)
db_session.delete(record_of_intrest)
db_session.commit()
def get_record_(id):
record_of_intrest = pb_one_record_schema.dump(PhoneBook.query.get(id))
return record_of_intrest
#TODO:
def update_record_(id, **kwargs):
record_of_intrest = pb_one_record_schema.dump(PhoneBook.query.get(id))
for el in kwargs.items():
record_of_intrest[el[0]] = str(el[1])
db_session.commit()
# https://flask-marshmallow.readthedocs.io/en/latest/
ma = Marshmallow(app)
class DecoderSchema(ma.Schema):
class Meta:
fields = ("id", "name", "operator", "number")
pb_one_record_schema = DecoderSchema()
pb_records_schema = DecoderSchema(many=True)
"""
the following functions requires some addition outside tool.
During tests i was using "Postman".
Be aware that you have to enter proper endpoint. For every funtion you can find it in the decorator (@app.route)
"""
# ADD ELEMENT
@app.route('/PhoneBook', methods=['POST'])
def add_record():
id = request.json['id']
name = request.json['name']
operator = request.json['operator']
number = request.json['number']
new_record = PhoneBook(id, name, operator, number)
db_session.add(new_record)
db_session.commit()
return pb_one_record_schema.jsonify(new_record)
# GET ALL ELEMENTS
@app.route('/PhoneBook', methods=['GET'])
def get_all_records():
all_records = pb_records_schema.dump(PhoneBook.query.all())
return jsonify(all_records)
# GET SPECIFIC RECORD
@app.route('/PhoneBook/<id>', methods=['GET'])
def get_record(id):
record_of_intrest = pb_one_record_schema.dump(PhoneBook.query.get(id))
return jsonify(record_of_intrest)
# UPDATE RECORD
@app.route('/PhoneBook/<id>', methods=['PUT'])
def update_record(id):
record_of_intrest = PhoneBook.query.get(id)
record_of_intrest.id = request.json['id']
record_of_intrest.name = request.json['name']
record_of_intrest.operator = request.json['operator']
record_of_intrest.number = request.json['number']
db_session.commit()
return pb_one_record_schema.jsonify(record_of_intrest)
# DELETE RECORD
@app.route('/PhoneBook/<id>', methods=['DELETE'])
def delete_record(id):
record_of_intrest = PhoneBook.query.get(id)
db_session.delete(record_of_intrest)
db_session.commit()
return pb_records_schema.jsonify(record_of_intrest)
if __name__ == '__main__':
app.run()
| Yyrii/Flask_sqlalchemy-api | app.py | app.py | py | 4,638 | python | en | code | 0 | github-code | 13 |
40201057613 | from medienverwaltungweb.tests import *
from medienverwaltungweb.tests.functional import *
log = logging.getLogger(__name__)
class TestPersonController(TestController):
def setUp(self):
TestController.setUp(self)
self.bruce = model.Person()
self.bruce.name = u"Bruce Schneier"
meta.Session.add(self.bruce)
self.medium = model.Medium()
self.medium.title = u"A Medium"
meta.Session.add(self.medium)
self.relation = model.RelationType()
self.relation.name = u"Author"
meta.Session.add(self.relation)
meta.Session.commit()
self.assertEqual(1, self.bruce.id)
def test_edit(self):
response = self.app.get(url(controller='person',
action='edit',
id=1))
assert "Bruce Schneier" in response
def test_edit_post(self):
response = self.app.get(url(controller='person',
action='edit_post',
id='1'),
params={'name':'Changed'})
self.app.get(response.location)
record = meta.Session.query(model.Person).get(1)
self.assertEqual('Changed', record.name)
def test_edit_post_alias(self):
response = self.app.get(url(controller='person',
action='edit_post',
id='1'),
params={'name':'Changed',
'create_alias':'yes'})
self.app.get(response.location)
record = meta.Session.query(model.Person).get(1)
self.assertEqual('Changed', record.name)
self.assertEqual(1, len(record.aliases))
def test_list(self):
record = model.Person()
record.name = u"Chuck Norries"
meta.Session.add(record)
meta.Session.commit()
response = self.app.get(url(controller='person',
action='list'))
assert "Bruce Schneier" in response
assert "Chuck Norries" in response
def test_top_ten(self):
record = model.PersonToMedia()
self.bruce.persons_to_media.append(record)
self.medium.persons_to_media.append(record)
self.relation.persons_to_media.append(record)
meta.Session.add(record)
meta.Session.commit()
response = self.app.get(url(controller='person',
action='top_ten'))
assert "Bruce Schneier" in response
assert "Author" in response
assert not "Actor" in response
def test_add_to_medium_post(self):
response = self.app.get(url(controller='person',
action='add_to_medium_post',
id='1'),
params={'name':'Foo',
'role':'Author'})
self.app.get(response.location)
record = meta.Session.query(model.Person).get(2)
self.assertNotEqual(None, record)
self.assertEqual('Foo', record.name)
self.assertEqual('A Medium', record.persons_to_media[0].medium.title)
def test_merge_post(self):
record = model.PersonToMedia()
self.bruce.persons_to_media.append(record)
self.medium.persons_to_media.append(record)
self.relation.persons_to_media.append(record)
meta.Session.add(record)
john = model.Person()
john.name = u"John"
meta.Session.add(john)
record = model.PersonToMedia()
john.persons_to_media.append(record)
self.medium.persons_to_media.append(record)
self.relation.persons_to_media.append(record)
meta.Session.add(record)
meta.Session.commit()
response = self.app.get(url(controller='person',
action='merge_post'),
params={'primary_id':'2',
'person_ids_str':'1,2'})
self.app.get(response.location)
record = meta.Session.query(model.PersonToMedia).get(1)
self.assertEqual(None, record)
def test_remove_from_media(self):
record = model.PersonToMedia()
self.bruce.persons_to_media.append(record)
self.medium.persons_to_media.append(record)
self.relation.persons_to_media.append(record)
meta.Session.add(record)
meta.Session.commit()
response = self.app.get(url(controller='person',
action='remove_from_media',
id='1'),
params={'name':'Foo',
'role':'Author'})
self.app.get(response.location)
record = meta.Session.query(model.PersonToMedia).get(1)
self.assertEqual(None, record)
| dummy3k/medienverwaltung | medienverwaltungweb/medienverwaltungweb/tests/functional/test_person.py | test_person.py | py | 4,970 | python | en | code | 4 | github-code | 13 |
30163188029 | from htutil import file
import json
from pathlib import Path
import toml
import os
def template_make(raw: str, cfg: dict) -> str:
for d in cfg:
raw = raw.replace(cfg[d], '${'+d+'}')
return raw
def main():
for dir_in in os.listdir('in'):
path_in = Path('in') / dir_in
raw = file.read_text(path_in / 'raw.txt')
cfg = file.read_json(path_in / 'cfg.json')
template = template_make(raw, cfg)
path_out = Path('out')
file.write_text(path_out / dir_in / 'template.txt', template)
if __name__ == '__main__':
main()
| 117503445/goframe_template | script/template_make/main.py | main.py | py | 587 | python | en | code | 1 | github-code | 13 |
851444214 | #!/bin/python3.6
import subprocess, json,sys
from socket import gethostname as hostname
from os import listdir
from logqueue import queuethis
from etcdput import etcdput as put
from etcdgetpy import etcdget as get
from etcddel import etcddel as dels
from os.path import getmtime
def putzpool(leader,myhost):
perfmon = '0'
#with open('/pacedata/perfmon','r') as f:
# perfmon = f.readline()
#if '1' in perfmon:
# queuethis('putzpool.py','start','system')
#x=subprocess.check_output(['pgrep','-c','putzpool'])
#x=str(x).replace("b'","").replace("'","").split('\\n')
#if(x[0]!= '1' ):
# print('process still running',x[0])
#exit()
sitechange=0
readyhosts=get('ready','--prefix')
knownpools=[f for f in listdir('/TopStordata/') if 'pdhcp' in f and 'pree' not in f ]
cmdline='/sbin/zpool status'
result=subprocess.run(cmdline.split(),stdout=subprocess.PIPE).stdout
sty=str(result)[2:][:-3].replace('\\t','').split('\\n')
cmdline='/bin/lsscsi -is'
result=subprocess.run(cmdline.split(),stdout=subprocess.PIPE).stdout
lsscsi=[x for x in str(result)[2:][:-3].replace('\\t','').split('\\n') if 'LIO' in x ]
freepool=[x for x in str(result)[2:][:-3].replace('\\t','').split('\\n') if 'LIO' in x ]
periods=get('Snapperiod','--prefix')
raidtypes=['mirror','raidz','stripe']
availraid=['mirror','raidz']
raid2=['log','cache','spare']
zpool=[]
stripecount=0
spaces=-2
raidlist=[]
disklist=[]
lpools=[]
ldisks=[]
ldefdisks=[]
linusedisks=[]
lfreedisks=[]
lsparedisks=[]
lhosts=set()
phosts=set()
lraids=[]
lvolumes=[]
lsnapshots=[]
poolsstatus=[]
x=list(map(chr,(range(97,123))))
cmdline=['fdisk','-l']
cdisks=subprocess.run(cmdline,stderr=subprocess.PIPE, stdout=subprocess.PIPE)
devs=cdisks.stdout.decode().split('Disk /dev/')
drives = []
for dev in devs:
dsk = dev.split(':')[0]
if 'sd' in dsk:
drives.append(dsk)
cmdline=['/sbin/zfs','list','-t','snapshot,filesystem,volume','-o','name,creation,used,quota,usedbysnapshots,refcompressratio,prot:kind,available,referenced,status:mount,snap:type,partner:receiver,partner:sender','-H']
result=subprocess.run(cmdline,stdout=subprocess.PIPE)
zfslistall=str(result.stdout)[2:][:-3].replace('\\t',' ').split('\\n')
#lists=[lpools,ldisks,ldefdisks,lavaildisks,lfreedisks,lsparedisks,lraids,lvolumes,lsnapshots]
zfslistall=str(result.stdout)[2:][:-3].replace('\\t',' ').split('\\n')
lists={'pools':lpools,'disks':ldisks,'defdisks':ldefdisks,'inusedisks':linusedisks,'freedisks':lfreedisks,'sparedisks':lsparedisks,'raids':lraids,'volumes':lvolumes,'snapshots':lsnapshots, 'hosts':list(lhosts), 'phosts':list(phosts)}
for a in sty:
#print('aaaaaa',a)
b=a.split()
if len(b) > 0:
b.append(b[0])
if any(drive in str(b[0]) for drive in drives):
for lss in lsscsi:
if any('/dev/'+b[0] in lss for drive in drives):
b[0]='scsi-'+lss.split()[6]
#print('strb',str(b))
if "pdhc" in str(b) and 'pool' not in str(b):
raidlist=[]
volumelist=[]
zdict={}
rdict={}
ddict={}
zfslist=[x for x in zfslistall if b[0] in x ]
#print('zfslist',b[0],zfslist)
cmdline=['/sbin/zfs','get','avail:type',b[0], '-H']
result=subprocess.run(cmdline,stdout=subprocess.PIPE)
availtype=str(result.stdout)[2:][:-3].split('\\t')[2]
cmdline=['/sbin/zpool','list',b[0],'-H']
result=subprocess.run(cmdline,stdout=subprocess.PIPE)
zlist=str(result.stdout)[2:][:-3].split('\\t')
cmdline=['/sbin/zfs','get','compressratio','-H']
result=subprocess.run(cmdline,stdout=subprocess.PIPE)
zlist2=str(result.stdout)[2:][:-3].split('\\t')
if b[0] in knownpools:
cachetime=getmtime('/TopStordata/'+b[0])
else:
cmdline='/sbin/zpool set cachefile=/TopStordata/'+b[0]+' '+b[0]
subprocess.run(cmdline.split(),stdout=subprocess.PIPE)
cachetime='notset'
#put('pools/'+b[0],myhost)
poolsstatus.append(('pools/'+b[0],myhost))
zdict={ 'name':b[0],'changeop':b[1], 'availtype':availtype, 'status':b[1],'host':myhost, 'used':str(zfslist[0].split()[6]),'available':str(zfslist[0].split()[11]), 'alloc': str(zlist[2]), 'size': zlist[1], 'empty': zlist[3], 'dedup': zlist[7], 'compressratio': zlist2[2],'timestamp':str(cachetime), 'raidlist': raidlist ,'volumes':volumelist}
zpool.append(zdict)
lpools.append(zdict)
for vol in zfslist:
if b[0]+'/' in vol and '@' not in vol and b[0] in vol:
volume=vol.split()
volname=volume[0].split('/')[1]
snaplist=[]
snapperiod=[]
snapperiod=[[x[0],x[1]] for x in periods if volname in x[0]]
vdict={'fullname':volume[0],'name':volname, 'pool': b[0], 'host':myhost, 'creation':' '.join(volume[1:4]+volume[5:6]),'time':volume[4], 'used':volume[6], 'quota':volume[7], 'usedbysnapshots':volume[8], 'refcompressratio':volume[9], 'prot':volume[10],'available':volume[11], 'referenced':volume[12],'statusmount':volume[13], 'snapshots':snaplist, 'snapperiod':snapperiod}
volumelist.append(vdict)
lvolumes.append(vdict['name'])
elif '@' in vol and b[0] in vol:
snapshot=vol.split()
snapname=snapshot[0].split('@')[1]
partnerr=''
partners=''
if len(snapshot) >= 17:
partners = snapshot[16]
if len(snapshot) >= 16:
partnerr = snapshot[15]
sdict={'fullname':snapshot[0],'name':snapname, 'volume':volname, 'pool': b[0], 'host':myhost, 'creation':' '.join(snapshot[1:4]+volume[5:6]), 'time':snapshot[4], 'used':snapshot[6], 'quota':snapshot[7], 'usedbysnapshots':snapshot[8], 'refcompressratio':snapshot[9], 'prot':snapshot[10],'referenced':snapshot[12], 'statusmount':snapshot[13],'snaptype':snapshot[14], 'partnerR': partnerr, 'partnerS': partners}
snaplist.append(sdict)
lsnapshots.append(sdict['name'])
elif any(raid in str(b) for raid in raidtypes):
spaces=len(a.split(a.split()[0])[0])
disklist=[]
missingdisks=[0]
if 'Availability' in zdict['availtype'] and 'stripe' in b[0]:
b[1] = 'DEGRADED'
rdict={ 'name':b[0], 'changeop':b[1],'status':b[1],'pool':zdict['name'],'host':myhost,'disklist':disklist, 'missingdisks':missingdisks }
raidlist.append(rdict)
lraids.append(rdict)
elif any(raid in str(b) for raid in raid2):
spaces=len(a.split(a.split()[0])[0])
disklist=[]
missingdisks=[0]
b[1] = 'NA'
if 'Availability' in zdict['availtype'] and raid not in availraids:
b[1] = 'DEGRADED'
rdict={ 'name':b[0], 'changeop':b[1],'status':b[1],'pool':zdict['name'],'host':myhost,'disklist':disklist, 'missingdisks':missingdisks }
raidlist.append(rdict)
lraids.append(rdict)
elif 'dm-' in str(b) and 'corrupted' in str(b):
missingdisks[0] += 1
elif 'scsi' in str(b) or 'disk' in str(b) or '/dev/' in str(b) or (len(b) > 0 and 'sd' in b[0] and len(b[0]) < 5):
diskid='-1'
host='-1'
size='-1'
devname='-1'
disknotfound=1
if len(a.split('scsi')[0]) < (spaces+2) or (len(raidlist) < 1 and len(zpool)> 0):
disklist=[]
b[1] = 'NA'
if 'Availability' in zdict['availtype'] :
b[1] = 'DEGRADED'
rdict={ 'name':'stripe-'+str(stripecount), 'pool':zdict['name'],'changeop':b[1],'status':b[1],'host':myhost,'disklist':disklist, 'missingdisks':[0] }
raidlist.append(rdict)
lraids.append(rdict)
stripecount+=1
disknotfound=1
for lss in lsscsi:
z=lss.split()
if z[6] in b[0] and len(z[6]) > 3 and 'OFF' not in b[1] :
diskid=lsscsi.index(lss)
host=z[3].split('-')[1]
lhosts.add(host)
phosts.add(host)
size=z[7]
devname=z[5].replace('/dev/','')
freepool.remove(lss)
disknotfound=0
break
if disknotfound == 1:
diskid=0
host='-1'
size='-1'
devname=b[0]
#else:
# cmdline='/pace/hostlost.sh '+z[6]
# subprocess.run(cmdline.split(),stdout=subprocess.PIPE)
if 'Availability' in zdict['availtype'] and 'DEGRAD' in rdict['changeop']:
b[1] = 'ONLINE'
changeop=b[1]
if host=='-1':
raidlist[len(raidlist)-1]['changeop']='Warning'
zpool[len(zpool)-1]['changeop']='Warning'
changeop='Removed'
sitechange=1
ddict={'name':b[0],'actualdisk':b[-1], 'changeop':changeop,'pool':zdict['name'],'raid':rdict['name'],'status':b[1],'id': str(diskid), 'host':host, 'size':size,'devname':devname}
disklist.append(ddict)
ldisks.append(ddict)
if len(freepool) > 0:
raidlist=[]
zdict={ 'name':'pree','changeop':'pree', 'available':'0', 'status':'pree', 'host':myhost,'used':'0', 'alloc': '0', 'empty': '0','size':'0', 'dedup': '0', 'compressratio': '0', 'raidlist': raidlist, 'volumes':[]}
zpool.append(zdict)
lpools.append(zdict)
disklist=[]
rdict={ 'name':'free', 'changeop':'free','status':'free','pool':'pree','host':myhost,'disklist':disklist, 'missingdisks':[0] }
raidlist.append(rdict)
lraids.append(rdict)
for lss in freepool:
z=lss.split()
devname=z[5].replace('/dev/','')
if devname not in drives:
continue
diskid=lsscsi.index(lss)
host=z[3].split('-')[1]
if host not in str(readyhosts):
continue
##### commented for not adding free disks of freepool
lhosts.add(host)
size=z[7]
ddict={'name':'scsi-'+z[6],'actualdisk':'scsi-'+z[6], 'changeop':'free','status':'free','raid':'free','pool':'pree','id': str(diskid), 'host':host, 'size':size,'devname':devname}
if z[6] in str(zpool):
continue
disklist.append(ddict)
ldisks.append(ddict)
if len(lhosts)==0:
lhosts.add('')
if len(phosts)==0:
phosts.add('')
put('hosts/'+myhost+'/current',json.dumps(zpool))
for disk in ldisks:
if disk['changeop']=='free':
lfreedisks.append(disk)
elif disk['changeop'] =='AVAIL':
lsparedisks.append(disk)
elif disk['changeop'] != 'ONLINE':
ldefdisks.append(disk)
put('lists/'+myhost,json.dumps(lists))
xall=get('pools/','--prefix')
x=[y for y in xall if myhost in str(y)]
xnotfound=[y for y in x if y[0].replace('pools/','') not in str(poolsstatus)]
xnew=[y for y in poolsstatus if y[0].replace('pools/','') not in str(x)]
for y in xnotfound:
if y[0] not in xall:
dels(y[0].replace('pools/',''),'--prefix')
else:
dels(y[0])
for y in xnew:
put(y[0],y[1])
if '1' in perfmon:
queuethis('putzpool.py','stop','system')
if __name__=='__main__':
if len(sys.argv) > 1:
leader = sys.argv[1]
myhost = sys.argv[2]
else:
leader=get('leader','--prefix')[0][0].split('/')[1]
myhost = hostname()
putzpool(leader, myhost)
| YousefAllam221b/PaceDev | putzpool.py | putzpool.py | py | 10,386 | python | en | code | 0 | github-code | 13 |
11071332753 | from PyQt5.QtWidgets import QLabel, QVBoxLayout, QHBoxLayout, QFrame, QGroupBox, QSlider, QSpinBox, QTextEdit, QDockWidget, QGridLayout
from PyQt5.QtCore import Qt
from MainGUI.Layout.ImageInforShow import imageInfor
from ImageProcessingAction.LabelModel.labelModel import imageShowManager
def helpInforShow(var):
helpInforDock = QDockWidget('帮助信息', var)
helpContent = QTextEdit(var)
helpContent.setFrameShape(QFrame.Panel | QFrame.Sunken) # 边框样式
helpContent.setAlignment(Qt.AlignVCenter | Qt.AlignLeft) # 内容显示样式
helpContent.setEnabled(False)
helpContent.setFocusPolicy(Qt.NoFocus)
helpContent.setLineWrapMode(QTextEdit.NoWrap) # 一行文本不换行
helpContent.verticalScrollBar() # 垂直滚动
helpContent.horizontalScrollBar() # 水平滚动
helpContent.setPlaceholderText('此处显示帮助文档')
helpInforDock.setFeatures(QDockWidget.NoDockWidgetFeatures)
helpInforDock.setWidget(helpContent)
return helpInforDock
def sliceManager(var):
sliceDock = QDockWidget('图像调节', var)
sliceGroup = QGroupBox()
vbox = QVBoxLayout()
sliceHBox = QHBoxLayout()
luminanceHBox = QHBoxLayout()
contrastLabel = QLabel('对比度: ', var)
luminanceLabel = QLabel('亮 度: ', var)
var.contrastSlide = QSlider(Qt.Horizontal, var)
var.luminanceSlide = QSlider(Qt.Horizontal, var)
var.contrastSpin = QSpinBox(var)
var.luminanceSpin = QSpinBox(var)
var.contrastSlide.setFocusPolicy(Qt.NoFocus)
var.contrastSlide.setSingleStep(1)
var.contrastSlide.setMinimum(1)
var.contrastSlide.setTickPosition(QSlider.TicksAbove) # 设置刻度信息
var.contrastSlide.setEnabled(False)
# sliceSlide.setFixedSize(1, 100)
var.luminanceSlide.setFocusPolicy(Qt.NoFocus)
var.luminanceSlide.setSingleStep(1)
var.luminanceSlide.setMinimum(1)
var.luminanceSlide.setTickPosition(QSlider.TicksLeft) # 设置刻度信息
var.luminanceSlide.valueChanged.connect(var.luminanceSpin.setValue)
var.luminanceSlide.setEnabled(False)
var.contrastSpin.setSingleStep(1)
# var.sliceSpin.setWrapping(True) #是否循环
var.contrastSpin.setValue(1)
var.contrastSpin.setEnabled(False)
var.luminanceSpin.setSingleStep(1)
# var.luminanceSpin.setWrapping(True)
var.luminanceSpin.setValue(1)
var.luminanceSpin.setEnabled(False)
var.luminanceSpin.valueChanged.connect(var.luminanceSlide.setValue)
sliceHBox.addWidget(contrastLabel)
sliceHBox.addWidget(var.contrastSlide)
sliceHBox.addWidget(var.contrastSpin)
luminanceHBox.addWidget(luminanceLabel)
luminanceHBox.addWidget(var.luminanceSlide)
luminanceHBox.addWidget(var.luminanceSpin)
vbox.addLayout(sliceHBox)
vbox.addLayout(luminanceHBox)
vbox.addStretch()
sliceGroup.setLayout(vbox)
sliceDock.setFeatures(QDockWidget.NoDockWidgetFeatures)
sliceDock.setWidget(sliceGroup)
return sliceDock
# 调节图像亮度
def setLuminanceStatus(var):
var.luminanceSlide.setEnabled(True)
var.luminanceSpin.setEnabled(True)
var.luminanceSlide.setMaximum(20)
var.luminanceSpin.setRange(1, 20)
def imageInforShow(var):
imageInforDock = QDockWidget('图像信息', var)
imageInforDock.setFeatures(QDockWidget.DockWidgetClosable)
imageInforDock.setWidget(imageInfor(var))
return imageInforDock
# 显示空的图像区域
def imageShow(var):
imageLayout = QGridLayout()
var.imageShow = imageShowManager(var)
imageLayout.addWidget(var.imageShow.transverseImageShow(), 0, 0)
imageLayout.addWidget(var.imageShow.coronalImageShow(), 0, 1)
imageLayout.addWidget(var.imageShow.sagittalImageShow(), 1, 0)
imageLayout.addWidget(var.imageShow.tDImageShow(), 1, 1)
# imageGroup.setLayout(imageLayout)
return imageLayout
# 显示打开图像
def openImageLayout(var, imagePixmap):
var.imageShow.setTransverseImagePixmap(imagePixmap)
var.imageShow.setCoronalImagePixmap(imagePixmap)
var.imageShow.setSagittalImagePixmap(imagePixmap)
var.imageShow.setTDImagePixmap(imagePixmap)
| ChengLongDeng/MedicalImageProcessingTool | MainGUI/Layout/InformationShowManager.py | InformationShowManager.py | py | 4,170 | python | en | code | 0 | github-code | 13 |
26535652986 |
from sklearn.cluster import KMeans
import numpy as np
from sklearn import datasets
import matplotlib.pyplot as plt
from matplotlib.colors import ListedColormap
iris = datasets.load_iris()
##------- wybieramy 2 pierwsze zmienne ---------
X = iris.data[:, :2]
y = iris.target
kmeans = KMeans(n_clusters=3, random_state=0).fit(X)
kmeans.labels_
kmeans.cluster_centers_
cmap_light = ListedColormap(['#FFAAAA', '#AAFFAA','#00AAFF'])
black = ListedColormap(['#000000'])
plt.figure(figsize=(14,14))
plt.scatter(X[:, 0], X[:, 1], c=y, cmap=cmap_light,s=250)
markers = ["x","*",'+']
cluser_labels = kmeans.labels_
for i, c in enumerate(np.unique(cluser_labels)):
plt.scatter(X[:, 0][cluser_labels==c],X[:, 1][cluser_labels==c],c=cluser_labels[cluser_labels==c], marker=markers[i],cmap = black,s=250)
plt.show()
###----------------------- Wizualizacja wielu wymiarów --------------------------------------
from sklearn.cluster import KMeans
import numpy as np
from sklearn import datasets
import matplotlib.pyplot as plt
from matplotlib.colors import ListedColormap
from sklearn.decomposition import PCA
from sklearn.manifold import TSNE
boston = datasets.load_boston()
X = boston.data
##------- redukcja wymiarów PCA -------------
pca = PCA(n_components=2).fit(X)
pca_2d = pca.transform(X)
##------- redukcja wymiarów TSNE -------------
tsne_2d = TSNE(n_components=2).fit_transform(X)
##------------------ model KMEANS ---------
kmeans = KMeans(n_clusters=8, random_state=0).fit(X)
cluser_labels = kmeans.labels_
##----------- wykres PCA -----
plt.figure(figsize=(10,10))
plt.title('PCA')
plt.scatter(pca_2d[:, 0], pca_2d[:, 1], c=cluser_labels,s=250,cmap = 'Dark2')
plt.show()
#------------ wykres Tsne
plt.figure(figsize=(10,10))
plt.title('Tsne')
plt.scatter(tsne_2d[:, 0], tsne_2d[:, 1], c=cluser_labels,s=250,cmap = 'Dark2')
plt.show()
##------------------- wybór optymalnego K -------------------------
k_vec = []
int_vec = []
for k in range(2,15):
kmeans = KMeans(n_clusters=k, random_state=0).fit(X)
interia = kmeans.inertia_
k_vec.append(k)
int_vec.append(interia)
plt.figure(figsize=(12,10))
plt.title('Wykres sum wariancji klastów')
plt.plot(k_vec,int_vec,'bo-')
plt.xlabel('liczba klastrów')
plt.ylabel('Suma wariancji klastrów')
plt.show()
##------------- wykres dla 4 klastrów ------------------
kmeans = KMeans(n_clusters=4, random_state=0).fit(X)
cluser_labels = kmeans.labels_
plt.figure(figsize=(10,10))
plt.title('Tsne')
plt.scatter(tsne_2d[:, 0], tsne_2d[:, 1], c=cluser_labels,s=250,cmap = 'Dark2')
plt.show()
| Ralfik555/Course_DS | jdsz2-materialy-python/ML/4_knn_kmeans/kmeans.py | kmeans.py | py | 2,696 | python | en | code | 0 | github-code | 13 |
11159677732 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# python版本3.7
import re
import urllib.request
import urllib.error
import urllib.parse
from Crypto.Cipher import AES
import base64
import codecs
import requests
import json
headers = { # 请求头部
'User-Agent': 'Mozilla/5.0 (X11; Fedora; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36'
}
def get_all_hotSong(url): # 获取热歌榜所有歌曲名称和id
# url = 'https://music.163.com/playlist?id=2829883282' # 网易云歌单url
html = urllib.request.urlopen(url).read().decode('utf8') # 打开url
html = str(html) # 转换成str
pat1 = r'<ul class="f-hide"><li><a href="/song\?id=\d*?">.*</a></li></ul>' # 进行第一次筛选的正则表达式
result = re.compile(pat1).findall(html) # 用正则表达式进行筛选
result = result[0] # 获取tuple的第一个元素
pat2 = r'<li><a href="/song\?id=\d*?">(.*?)</a></li>' # 进行歌名筛选的正则表达式
pat3 = r'<li><a href="/song\?id=(\d*?)">.*?</a></li>' # 进行歌ID筛选的正则表达式
hot_song_name = re.compile(pat2).findall(result) # 获取所有热门歌曲名称
hot_song_id = re.compile(pat3).findall(result) # 获取所有热门歌曲对应的Id
return hot_song_name, hot_song_id
def get_hotComments(hot_song_name, hot_song_id):
# 包含《如果我爱你》这首歌歌评的请求url为http://music.163.com/weapi/v1/resource/comments/R_SO_4_489998494?csrf_token= ,
# 这个请求的前部分都是一样的,只是R_SO_4_后面紧跟的一串数字不一样 可以推测出,每一首歌都有一个指定的id,R_SO_4_后面紧跟的就是这首歌的id
url = 'http://music.163.com/weapi/v1/resource/comments/R_SO_4_' + hot_song_id + '?csrf_token=' # 歌评url
header = { # 请求头部
'User-Agent': 'Mozilla/5.0 (X11; Fedora; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36'
}
# post请求表单数据
# params encSecKey 是两个解密信息 具体解密教程太难了 这里直接拿了结果出来
# !!!!!同一页的params 和 encSecKey是相同的 也就是说 需要找到每一页的params和encSecKey 否则只能爬同一页的热评(15条评论)
data = {
'params': 'zC7fzWBKxxsm6TZ3PiRjd056g9iGHtbtc8vjTpBXshKIboaPnUyAXKze+KNi9QiEz/IieyRnZfNztp7yvTFyBXOlVQP/JdYNZw2+GRQDg7grOR2ZjroqoOU2z0TNhy+qDHKSV8ZXOnxUF93w3DA51ADDQHB0IngL+v6N8KthdVZeZBe0d3EsUFS8ZJltNRUJ',
'encSecKey': '4801507e42c326dfc6b50539395a4fe417594f7cf122cf3d061d1447372ba3aa804541a8ae3b3811c081eb0f2b71827850af59af411a10a1795f7a16a5189d163bc9f67b3d1907f5e6fac652f7ef66e5a1f12d6949be851fcf4f39a0c2379580a040dc53b306d5c807bf313cc0e8f39bf7d35de691c497cda1d436b808549acc'}
postdata = urllib.parse.urlencode(data).encode('utf8') # 进行编码
request = urllib.request.Request(url, headers=header, data=postdata)
reponse = urllib.request.urlopen(request).read().decode('utf8')
json_dict = json.loads(reponse) # 获取json
hot_commit = json_dict['hotComments'] # 获取json中的热门评论
num = 0
fhandle = open('./song_comments', 'a',encoding='utf-8-sig') # 写入文件
# fhandle.write(hot_song_name + ':' + '\n')
for item in hot_commit:
num += 1
# fhandle.write(str(num) + '.' + item['content'] + '\n')
fhandle.write(item['content'] + '\n')
# fhandle.write('\n==============================================\n\n')
fhandle.close()
url_list = []
url ="https://music.163.com/discover/toplist?id=3778678&t=1511958091648"
# 网易云音乐热歌排行榜
hot_song_name, hot_song_id = get_all_hotSong(url) # 获取热歌榜所有歌曲名称和id
num = 0
while num < len(hot_song_name): # 保存所有热歌榜中的热评
print('正在抓取第%d首歌曲热评...' % (num + 1))
get_hotComments(hot_song_name[num], hot_song_id[num])
print('第%d首歌曲热评抓取成功' % (num + 1))
num += 1
| Jinbo-He/PythonBIgQuiz | music163.py | music163.py | py | 4,117 | python | zh | code | 0 | github-code | 13 |
10722512196 | from charms.reactive import (
hook,
when,
only_once,
is_state
)
import os.path as path
from charmhelpers.core import hookenv, host
from charmhelpers.core.templating import render
from shell import shell
# ./lib/nginxlib
import nginxlib
# ./lib/wordpresslib.py
import wordpresslib
config = hookenv.config()
# HOOKS -----------------------------------------------------------------------
@hook('config-changed')
def config_changed():
if not is_state('nginx.available'):
return
host.service_restart('nginx')
hookenv.status_set('active', 'Ready')
# REACTORS --------------------------------------------------------------------
@when('nginx.available')
@only_once
def install_app():
""" Performs application installation
"""
hookenv.log('Installing Wordpress', 'info')
# Configure NGINX vhost
nginxlib.configure_site('default', 'vhost.conf')
# Update application
wordpresslib.download_archive()
host.service_restart('nginx')
hookenv.status_set('active', 'Wordpress is installed!')
@when('nginx.available', 'database.available')
def setup_mysql(mysql):
""" mysql available configure wordpress db
"""
hookenv.status_set('maintenance', 'Wordpress is connecting to MySQL')
target = os.path.join(nginxlib.get_app_path(), 'wp-config.php.template')
pass
| adam-stokes/juju-charm-wordpress-hhvm | reactive/wordpress.py | wordpress.py | py | 1,349 | python | en | code | 0 | github-code | 13 |
18848050032 | import datetime
class Employee:
def __init__(self, name, age, salary, employment_date):
self.name = str(name)
self.age = int(age)
self.salary = int(salary)
self.employment_date = int(employment_date)
def get_working_years(self):
return (datetime.date.today().year - self.employment_date)
def __str__(self):
return """\nName: {}, Age: {}, Salary: {}, Working years: {}""".format(self.name, self.age, self.salary, self.get_working_years())
class Manager(Employee):
def __init__(self, name, age, salary, employment_date, bonus_percentage):
Employee.__init__(self, name, age, salary, employment_date)
self.bonus_percentage = int(bonus_percentage)
def get_bonus(self):
return self.bonus_percentage * self.salary
def __str__(self):
return """\nName: {}, Age: {}, Salary: {}, Working years: {}, Bonus: {}""".format(self.name, self.age, self.salary, Employee.get_working_years(self), self.get_bonus())
employees = []
managers = []
def add_employee():
name = input("Enter name: ")
age = input("Enter age: ")
salary = input("Enter salary: ")
employment_date = input("Date of employment (year): ")
employee = Employee(name, age, salary, employment_date)
if employee not in employees:
employees.append(employee)
else:
print("Employee already exists")
print("Employee added succesfully")
def add_manager():
name = input("Name: ")
age = input("Age: ")
salary = input("Salary: ")
employment_date = input("Date of employment (year): ")
bonus_percentage = input("Bonus (%): ")
manager = Manager(name, age, salary, employment_date, bonus_percentage)
if manager not in managers:
managers.append(manager)
else:
print("Manager already exists")
print("Manager added succesfully")
while True:
print("""\nWelcome to HR Pro 2019 \nChoose an action to do:\n1. Show employees\n2. Show managers\n3. Add an employee\n4. Add a manager\n5. Exit""")
x = int(input("\nWhat would you like to do? "))
if x == 1:
if len(employees) != 0:
print("\nEmployees:")
for employee in employees:
print(employee.__str__())
else:
print("Please add an employee")
elif x == 2:
if len(managers) != 0:
print("\nManagers:")
for manager in managers:
print(manager.__str__())
else:
print("Please add a manager")
elif x == 3:
add_employee()
elif x == 4:
add_manager()
elif x == 5:
break | sarah-am/python | classes_task.py | classes_task.py | py | 2,347 | python | en | code | 0 | github-code | 13 |
70696625299 | import cv2
img = cv2.imread("Tut1_itachi_uchiha.jpg")
# shows image height and width
print(img.shape)
w, h = 1000, 600
# resize image
imgResize = cv2.resize(img, (w, h))
print(imgResize.shape)
# crop image
# h , w
imgCropped = img[300:470, 1100:1280]
# after cropped if we have to resize image with same w and h of original image we can do by this
imgCropResize = cv2.resize(imgCropped, (img.shape[1], img.shape[0]))
cv2.imshow("Itachi", img)
# cv2.imshow("Itachii", imgResize)
cv2.imshow("Itachiii", imgCropped) # akela sharingan kiya isme mane
cv2.imshow("Itachiiii", imgCropResize) # akela sharingan kiya isme mane
cv2.waitKey(0) | anant-harryfan/Python_basic_to_advance | PythonTuts/Python_other_tuts/murtaza_workshop/open-cv/Tut3_Crop_and_Resize_Images.py | Tut3_Crop_and_Resize_Images.py | py | 663 | python | en | code | 0 | github-code | 13 |
16508287934 | import sys
import pandas as pd
import sqlite3
from sqlalchemy import create_engine
def load_data(messages_filepath, categories_filepath):
"""This function load the data from disk and return data frame
merge and return a data frame
Args:
messages_filepath ([string]): Path to the message file
categories_filepath ([string]): Path to the category file
"""
messages = pd.read_csv(messages_filepath)
categories = pd.read_csv(categories_filepath)
df = messages.merge(categories, how='inner', on='id')
return df
def clean_data(df):
"""This function clean the data frame and return a cleaner dataframe
Args:
df : Data frame
"""
# create a dataframe of the 36 individual category columns
categories = df['categories'].str.split(';', expand=True)
row = categories.loc[1, :]
category_colnames = [x[:-2] for x in row]
# rename the columns of `categories`
categories.columns = category_colnames
for column in categories:
categories[column] = categories[column].apply(
lambda x: x.split('-')[1])
categories[column] = categories[column].astype(int)
categories = categories.clip(0, 1)
one_value_columns = [column for column in categories.columns if len(
categories[column].unique()) == 1]
categories = categories.drop(one_value_columns, axis=1)
df.drop(columns=['categories', 'original'], index=1, inplace=True)
df = pd.concat([df, categories], axis=1)
# drop duplicates
df = df.drop_duplicates()
df = df.dropna()
return df
def save_data(df, database_filename):
"""This function saves the dataframe to a sqllite databae
Args:
df : Data frame
database_filename : Database name
"""
engine = create_engine('sqlite:///' + database_filename)
df.to_sql(database_filename, engine, index=False, if_exists='replace')
def main():
if len(sys.argv) == 4:
messages_filepath, categories_filepath, database_filepath = sys.argv[1:]
print('Loading data...\n MESSAGES: {}\n CATEGORIES: {}'
.format(messages_filepath, categories_filepath))
df = load_data(messages_filepath, categories_filepath)
print('Cleaning data...')
df = clean_data(df)
print('Saving data...\n DATABASE: {}'.format(database_filepath))
save_data(df, database_filepath)
print('Cleaned data saved to database!')
else:
print('Please provide the filepaths of the messages and categories '
'datasets as the first and second argument respectively, as '
'well as the filepath of the database to save the cleaned data '
'to as the third argument. \n\nExample: python process_data.py '
'disaster_messages.csv disaster_categories.csv '
'DisasterResponse.db')
if __name__ == '__main__':
main()
| tmbothe/disaster-response-pipeline-project | data/process_data.py | process_data.py | py | 2,933 | python | en | code | 0 | github-code | 13 |
11612162443 | import os
class BehaviourHandler:
def build_speechlet_response(self, card_title, speech_output, reprompt_text, should_end_session):
return {
'outputSpeech': {
'type': 'PlainText',
'text': speech_output
},
'card': {
'type': 'Simple',
'title': os.environ['CLIENT_DISPLAY_NAME'] + " - " + card_title,
'content': os.environ['CLIENT_DISPLAY_NAME'] + " - " + speech_output
},
'reprompt': {
'outputSpeech': {
'type': 'PlainText',
'text': reprompt_text
}
},
'shouldEndSession': should_end_session
}
def build_response(self, session_attributes, speechlet_response):
return {
'version': '1.0',
'sessionAttributes': session_attributes,
'response': speechlet_response
}
| fibonascii/cloud-automation | lambda/alexa-skill-lod-rest/behaviour_handlers.py | behaviour_handlers.py | py | 974 | python | en | code | 0 | github-code | 13 |
39279261866 | import matplotlib.pyplot as plt
import numpy as np
from pyautocad import Autocad, APoint
import math
class Node:
def __init__(self, x, y, fx, fy, s, disx, disy, dx, dy,mz):
self.x = x
self.y = y
self.fx = fx
self.fy = fy
self.s = s
self.disx=disx
self.disy=disy
self.dx = dx
self.dy = dy
self.mz=mz
class Member:
def __init__(self, n, f, l, cx, cy, qf):
self.n = n
self.f = f
self.l = l
self.cx =cx
self.cy =cy
self.qf=qf
bp =2
ln=8
n= []
m= []
unconstrained=[]
constrained= []
gd = 0
lsum=0.0
p=int(input("Enter the no. of nodes you want to add: "))
for i in range(p):
n.append(Node(0.0,0.0,0.0,0.0,' ',-1.0,-1.0, 0, 0,-1.0))
print("Enter X & Y coordinates of node", (i+1), ", seperated by space")
n[i].x, n[i].y=map(float,input().split())
ln=len(n)*2
ssm = [[0 for col in range(ln)] for row in range(ln)];
Q= [0.0 for i in range(ln)]
D= [0.0 for i in range(ln)]
for i in range(p):
unconstrained.append(i)
nm=int(input("Enter the no. of Members you want to add: "))
for i in range(nm):
print("Add Members by giving nodes seperated by space")
near, far=map(int,input().split())
m.append(Member(n[near-1],n[far-1],0.0,0.0,0.0,0.0))
xd=(m[i].f.x-m[i].n.x)
yd=(m[i].f.y-m[i].n.y)
l=math.sqrt( (xd**2)+(yd**2) )
lsum+=l;
cx=xd/l
cy=yd/l
m[i].l=l
m[i].cx=cx
m[i].cy=cy
print('cx=', cx)
print('cy=', cy)
lavg=lsum/nm
p=int(input("Enter no. of Nodes with Horizontal Force-"))
if p>0:
print("Apply Horizontal force on Members")
else:
print("OK")
for i in range(p):
node=int(input("Node No.: "))
Fx=float(input("Fx= "))
n[node-1].fx=Fx
unconstrained.remove(node-1)
constrained.append(node-1)
p=int(input("Enter no. of Nodes with Vertical Force-"))
if p>0:
print("Apply Vertical force on Members")
else:
print("OK")
try:
for i in range(p):
node=int(input("Node No.: "))
Fy=float(input("Fy= "))
n[node-1].fy=Fy
for i in range(len(unconstrained)):
if(unconstrained[i]==node-1):
unconstrained.remove(node-1)
constrained.append(node-1)
except:
pass
#A=int(input("Enter Cross-Sectional Area of Members-"))
#E=int(input("Enter Modulus of Elasticity of Members-"))
p=int(input("Enter no. of Horizontal Roller Support-"))
if p>0:
print("Apply Roller Support on nodes")
else:
print("OK")
for i in range(p):
node=int(input("Node No.: "))
n[node-1].fx=0.0
n[node-1].fy=-1.0
n[node-1].disy=0.0
n[node-1].s='rh'
unconstrained.remove(node-1)
constrained.append(node-1)
p=int(input("Enter no. of Vertical Roller Support-"))
if p>0:
print("Apply Roller Support on nodes")
else:
print("OK")
for i in range(p):
node=int(input("Node No.: "))
n[node-1].fx=-1.0
n[node-1].fy=0.0
n[node-1].disx=0.0
n[node-1].s='rv'
unconstrained.remove(node-1)
constrained.append(node-1)
p=int(input("Enter no. of Nodes Pin Support-"))
if p>0:
print("Apply Pin Support on nodes")
else:
print("OK")
for i in range(p):
node=int(input("Node No.: "))
n[node-1].fx=-1.0
n[node-1].fy=-1.0
n[node-1].disx=0.0
n[node-1].disy=0.0
n[node-1].s='pin'
unconstrained.remove(node-1)
constrained.append(node-1)
## p=int(input("Enter no. of Fixed Support-"))
## if p>0:
## print("Apply on Fixed Nodes")
## else:
## print("OK")
## for i in range(p):
## node=int(input("Node No.: "))
## n[node-1].fx=-1.0
## n[node-1].fy=-1.0
## n[node-1].mz=-1.0
## n[node-1].disx=0.0
## n[node-1].disy=0.0
## n[node-1].s='f'
## unconstrained.remove(node-1)
## constrained.append(node-1)
###################################################################
acad = Autocad(create_if_not_exists=True)
acad.prompt("Please open AutoCAD and create a new file to view your structure, press Enter when done\n")
input()
print(acad.doc.Name)
sc=lavg/10
#####################################################
LayerObj = acad.ActiveDocument.Layers.Add("Node no.")
acad.ActiveDocument.ActiveLayer = LayerObj
ClrNum = 150#blue
LayerObj.color = ClrNum
for i in range(len(n)):
p1 = APoint(n[i].x, n[i].y)
text = acad.model.AddText('N%s' % (i+1),p1, .5*sc)
#####################################################
LayerObj = acad.ActiveDocument.Layers.Add("Members")
acad.ActiveDocument.ActiveLayer = LayerObj
ClrNum = 30#orange
LayerObj.color = ClrNum
#LayerObj.Lineweight = 30
for i in range(nm):
p1 = APoint(m[i].n.x, m[i].n.y)
p2 = APoint(m[i].f.x, m[i].f.y)
acad.model.AddLine(p1, p2)
x1=(m[i].n.x+m[i].f.x)/2.0
y1=(m[i].n.y+m[i].f.y)/2.0+.2
text = acad.model.AddText('M%s' % (i+1), APoint(x1,y1), .08*lavg)
#####################################################
for i in range(len(n)):
LayerObj = acad.ActiveDocument.Layers.Add("Force")
acad.ActiveDocument.ActiveLayer = LayerObj
ClrNum = 82#dark green
LayerObj.color = ClrNum
#LayerObj.Lineweight = 25
if n[i].fx!=0.0 and n[i].fx!=-1.0:
p1 = APoint(n[i].x, n[i].y)
p2 = APoint(n[i].x-n[i].fx*sc, n[i].y)
acad.model.AddLine(p1, p2)
text = acad.model.AddText('F=%0.1fk'%n[i].fx,p2, .04*lavg)
p1 = APoint(n[i].x-0.215*n[i].fx*sc, n[i].y+0.125*n[i].fx*sc)
p2 = APoint(n[i].x, n[i].y)
acad.model.AddLine(p1, p2)
p1 = APoint(n[i].x-0.215*n[i].fx*sc, n[i].y-0.125*n[i].fx*sc)
p2 = APoint(n[i].x, n[i].y)
acad.model.AddLine(p1, p2)
if n[i].fy!=0 and n[i].fy!=-1.0:
p1 = APoint(n[i].x, n[i].y)
p2 = APoint(n[i].x, n[i].y-n[i].fy*sc)
acad.model.AddLine(p1, p2)
text = acad.model.AddText('F=%0.1fk'%n[i].fy,p2, .04*lavg)
p1 = APoint(n[i].x+0.125*n[i].fy*sc, n[i].y-0.215*n[i].fy*sc)
p2 = APoint(n[i].x, n[i].y)
acad.model.AddLine(p1, p2)
p1 = APoint(n[i].x-0.125*n[i].fy*sc, n[i].y-0.215*n[i].fy*sc)
p2 = APoint(n[i].x, n[i].y)
acad.model.AddLine(p1, p2)
#####################################################
LayerObj = acad.ActiveDocument.Layers.Add("Support")
acad.ActiveDocument.ActiveLayer = LayerObj
ClrNum = 240#brown
LayerObj.color = ClrNum
#LayerObj.Lineweight = 18
if n[i].s=='pin':
p1 = APoint(n[i].x, n[i].y)
p2 = APoint(n[i].x+0.65*sc, n[i].y-0.75*sc)
acad.model.AddLine(p1, p2)
p1 = APoint(n[i].x-0.65*sc, n[i].y-0.75*sc)
p2 = APoint(n[i].x, n[i].y)
acad.model.AddLine(p1, p2)
p1 = APoint(n[i].x+0.65*sc, n[i].y-0.75*sc)
p2 = APoint(n[i].x-0.65*sc, n[i].y-0.75*sc)
acad.model.AddLine(p1, p2)
if n[i].s=='rh':
p1 = APoint(n[i].x, n[i].y-0.4*sc)
acad.model.AddCircle(p1, 0.4*sc)
p1 = APoint(n[i].x+1*sc, n[i].y-0.8*sc)
p2=APoint(n[i].x-1*sc, n[i].y-0.8*sc)
acad.model.AddLine(p1, p2)
if n[i].s=='rv':
p1 = APoint(n[i].x+0.4*sc, n[i].y)
acad.model.AddCircle(p1, 0.4*sc)
p1 = APoint(n[i].x+0.8*sc, n[i].y-1*sc)
p2=APoint(n[i].x+0.8*sc, n[i].y+1*sc)
acad.model.AddLine(p1, p2)
#########################################################################
gd=0
for i in range (len(unconstrained)):
n[unconstrained[i]].dx=gd
gd=gd+1
n[unconstrained[i]].dy=gd
gd=gd+1
for i in range (len(constrained)):
if n[constrained[i]].s=='rv':
n[constrained[i]].dy=gd
gd=gd+1
n[constrained[i]].dx=gd
gd=gd+1
else:
n[constrained[i]].dx=gd
gd=gd+1
n[constrained[i]].dy=gd
gd=gd+1
#################################################
sc=sc*0.5
LayerObj = acad.ActiveDocument.Layers.Add("Node Direction Vectors")
acad.ActiveDocument.ActiveLayer = LayerObj
ClrNum = 6#magenta
LayerObj.color = ClrNum
#LayerObj.Lineweight = 5
for i in range(len(n)):
p1 = APoint(n[i].x+0.2*sc, n[i].y)
p2 = APoint(n[i].x+2.2*sc, n[i].y)
acad.model.AddLine(p1, p2)
p3 = APoint(p2.x-0.4*sc, p2.y+0.25*sc)
p4 = APoint(p2.x-0.4*sc, p2.y-0.25*sc)
acad.model.AddLine(p2, p3)
acad.model.AddLine(p2, p4)
text = acad.model.AddText(int(n[i].dx+1), APoint(p2.x+0.2*sc,p2.y), .02*lavg)
p1 = APoint(n[i].x, n[i].y+0.2*sc)
p2 = APoint(n[i].x, n[i].y+2.2*sc)
acad.model.AddLine(p1, p2)
p3 = APoint(p2.x+0.25*sc, p2.y-0.4*sc)
p4 = APoint(p2.x-0.25*sc, p2.y-0.4*sc)
acad.model.AddLine(p2, p3)
acad.model.AddLine(p2, p4)
text = acad.model.AddText(int(n[i].dy+1), APoint(p2.x,p2.y+0.2*sc), .02*lavg)
print("PLOTTED SUCCESSFULLY:)");
#####################################################
for i in range(ln):
for j in range(ln):
ssm[i][j]=0.0
for i in range(nm):
ssm[int(m[i].n.dx)][int(m[i].n.dx)]+=(m[i].cx**2)/m[i].l
ssm[int(m[i].n.dx)][int(m[i].n.dy)]+=(m[i].cx*m[i].cy)/m[i].l
ssm[int(m[i].n.dx)][int(m[i].f.dx)]+=(-1*m[i].cx**2)/m[i].l
ssm[int(m[i].n.dx)][int(m[i].f.dy)]+=(-1*m[i].cx*m[i].cy)/m[i].l
ssm[int(m[i].n.dy)][int(m[i].n.dx)]+=(m[i].cx*m[i].cy)/m[i].l
ssm[int(m[i].n.dy)][int(m[i].n.dy)]+=(m[i].cy**2)/m[i].l
ssm[int(m[i].n.dy)][int(m[i].f.dx)]+=(-1*m[i].cx*m[i].cy)/m[i].l
ssm[int(m[i].n.dy)][int(m[i].f.dy)]+=(-1*m[i].cy**2)/m[i].l
ssm[int(m[i].f.dx)][int(m[i].n.dx)]=(-1*m[i].cx**2)/m[i].l
ssm[int(m[i].f.dx)][int(m[i].n.dy)]=(-1*m[i].cx*m[i].cy)/m[i].l
ssm[int(m[i].f.dx)][int(m[i].f.dx)]=(m[i].cx**2)/m[i].l
ssm[int(m[i].f.dx)][int(m[i].f.dy)]=(m[i].cx*m[i].cy)/m[i].l
ssm[int(m[i].f.dy)][int(m[i].n.dx)]=(-1*m[i].cx*m[i].cy)/m[i].l
ssm[int(m[i].f.dy)][int(m[i].n.dy)]=(-1*m[i].cy**2)/m[i].l
ssm[int(m[i].f.dy)][int(m[i].f.dx)]=(m[i].cx*m[i].cy)/m[i].l
ssm[int(m[i].f.dy)][int(m[i].f.dy)]=(m[i].cy**2)/m[i].l
##print('constrained')
##for i in range(len(constrained)):
## print(constrained[i])
##print('unconstrained')
##for i in range(len(unconstrained)):
## print(unconstrained[i])
print("STRUCTURE STIFFNESS MATRIX")
for i in range(ln):
for j in range(ln):
print('%.3f' % ssm[i][j], end ="\t")
print()
for i in range(len(n)):
Q[n[i].dx]=n[i].fx
Q[n[i].dy]=n[i].fy
D[n[i].dx]=n[i].disx
D[n[i].dy]=n[i].disy
for i in range(ln):
if(Q[i]==-1.0):
bp=i
break;
sQ= []
sD= []
sM= [[0 for col in range(bp)] for row in range(bp)];
sM1= [[0 for col in range(bp)] for row in range(ln-bp)];
for i in range(bp):
sQ.append(Q[i])
for j in range(bp):
sM[i][j]=ssm[i][j]
x= np.linalg.solve(sM, sQ)
for i in range(bp):
D[i]=x[i]
for i in range(len(n)):
n[i].disx=D[n[i].dx]
n[i].disy=D[n[i].dy]
for i in range(bp,ln):
for j in range(bp):
sM1[i-bp][j]=ssm[i][j]
for i in range(ln-bp):
Q[bp+i]=0.0
for j in range(bp):
Q[bp+i]+=sM1[i][j]*D[j]
for i in range(ln):
print('Q',(i+1),'=%.3f'%Q[i],'k\t','D',(i+1),'=%.3f'%D[i],'/AE')
for i in range(nm):
m[i].qf=(-m[i].cx*m[i].n.disx-m[i].cy*m[i].n.disy+m[i].cx*m[i].f.disx+m[i].cy*m[i].f.disy)/m[i].l
print('Force in M',(i+1),'=%.3f' %(m[i].qf),'k')
import pandas as pd
import matplotlib
from pylab import title, figure, xlabel, ylabel, xticks, bar, legend, axis, savefig
from fpdf import FPDF
import webbrowser as wb
from pyautocad import Autocad, APoint
df = pd.DataFrame()
df['Code No.'] = []
df['Q'] = []
df['D'] = []
columns = list(df)
data = []
for i in range(4, 10, 3):
values = [i, i+1, i+2]
zipped = zip(columns, values)
a_dictionary = dict(zipped)
data.append(a_dictionary)
df = df.append(data, True)
df1 = pd.DataFrame()
df1['Member No.'] = ["Q1", "Q2", "Q3", "Q4","Q5"]
df1['Length'] = [3, 4, 5, 3,0]
df1['Force'] = [4.505, -19.003, 0, 0,0]
columns = list(df1)
data = []
############################################################################################################################
df = pd.DataFrame()
df['Code No.'] = []
df['Q'] = []
df['D'] = []
columns = list(df)
data = []
for i in range(ln):
values = [i+1, Q[i], D[i]]
zipped = zip(columns, values)
a_dictionary = dict(zipped)
data.append(a_dictionary)
df = df.append(data, True)
df1 = pd.DataFrame()
df1['Member No.'] = []
df1['Length'] = []
df1['Force'] = []
columns1 = list(df1)
data1 = []
for i in range(nm):
values1 = [i+1, m[i].l, m[i].qf]
zipped1 = zip(columns1, values1)
a_dictionary1 = dict(zipped1)
data1.append(a_dictionary1)
df1 = df1.append(data1, True)
acad = Autocad(create_if_not_exists=True)
acad.prompt("Please open AutoCAD and create a new file to view your structure\n")
print(acad.doc.Name)
acad.prompt("Click the save button in AutoCAD popup dialog box, Remember not to change the name")
acad.doc.SendCommand('_-PLOT'' ''N''\n''MODEL''\n''\n''PublishToWeb PNG.pc3' '\n''\n''\n')
pdf = FPDF()
pdf.add_page()
pdf.set_xy(0, 0)
pdf.set_font('times', 'B', 35)
pdf.set_text_color(0,90,0)
pdf.cell(60)
pdf.cell(90, 25, "Truss Analysis Report", 0, 2, 'C')
pdf.set_text_color(0,10,0)
pdf.cell(120, 80, " ", 0, 2, 'C')
pdf.cell(-40)
pdf.set_font('arial', 'B', 12)
pdf.cell(50, 10, 'Code No.', 1, 0, 'C')
pdf.cell(40, 10, 'Q', 1, 0, 'C')
pdf.cell(40, 10, 'D', 1, 2, 'C')
pdf.cell(-90)
pdf.set_font('arial', '', 12)
for i in range(0, len(df)):
pdf.cell(50, 10, '%s' % (df['Code No.'].iloc[i]), 1, 0, 'C')
pdf.cell(40, 10, '%s' % (str(df.D.iloc[i])), 1, 0, 'C')
pdf.cell(40, 10, '%s' % (str(df.Q.iloc[i])), 1, 2, 'C')
pdf.cell(-90)
pdf.set_font('times', 'B', 18)
pdf.image('Truss6m-Model.png', x = 25, y = 20, w =0, h = 75, type = '', link = '')
pdf.image('AnalyseIT1.png', x = 20, y = 2, w = 20, h = 20, type = '', link = '')
pdf.cell(150, 10, "_______________________Member Properties_________________________", 0, 2, 'C')
pdf.set_font('arial', 'B', 12)
pdf.cell(50, 10, 'Member No.', 1, 0, 'C')
pdf.cell(40, 10, 'Length', 1, 0, 'C')
pdf.cell(40, 10, 'Force', 1, 2, 'C')
pdf.cell(-90)
pdf.set_font('arial', '', 12)
for i in range(0, len(df1)):
pdf.cell(50, 10, '%s' % (df1['Member No.'].iloc[i]), 1, 0, 'C')
pdf.cell(40, 10, '%s' % (str(df1.Length.iloc[i])), 1, 0, 'C')
pdf.cell(40, 10, '%s' % (str(df1.Force.iloc[i])), 1, 2, 'C')
pdf.cell(-90)
pdf.cell(90, 10, " ", 0, 2, 'C')
pdf.cell(0)
pdf.output('Report.pdf', 'F')
wb.open_new(r'C:\Users\BK GAUTAM\Documents\Report.pdf')
| ShivamGautam98/AnalyseIT | AnalyseIT.py | AnalyseIT.py | py | 14,540 | python | en | code | 1 | github-code | 13 |
809709057 | from django.urls import path
from books.views import BooksViewSet
books_create_list = BooksViewSet.as_view({
"post": "create",
"get": "list",
}
)
books_detail = BooksViewSet.as_view({
'get': 'retrieve',
'put': 'update',
'patch': 'partial_update',
'delete': 'destroy'
})
urlpatterns = [
path('', books_create_list, name='create_list_books'),
path('<int:pk>/', books_detail, name="get_list-books")
]
| Adoniswalker/books_publisher | books/urls.py | urls.py | py | 432 | python | en | code | 1 | github-code | 13 |
40961254112 | import os
from glob import glob
from setuptools import setup
package_name = 'turtlebot3_controller'
setup(
name=package_name,
version='0.0.0',
packages=[package_name],
data_files=[
('share/ament_index/resource_index/packages',
['resource/' + package_name]),
('share/' + package_name, ['package.xml']),
(os.path.join('share', package_name), glob('launch/pw4_turtlebot3_controller.launch.py')),
#(os.path.join('share', package_name, 'launch'), glob('launch/*.launch.py')),
#(os.path.join('share', package_name, 'config'), glob('config/*.yaml'))
],
install_requires=['setuptools'],
zip_safe=True,
maintainer='thomas',
maintainer_email='thomas@todo.todo',
description='TODO: Package description',
license='TODO: License declaration',
tests_require=['pytest'],
entry_points={
'console_scripts': [
"scan_subscriber = turtlebot3_controller.scan_subscriber:main",
"scan_subscriber_pw4 = turtlebot3_controller.scan_subscriber_pw4:main",
'serverpw5 = turtlebot3_controller.serverpw5:main',
'question3 = turtlebot3_controller.question3:main',
'client = turtlebot3_controller.client:main',
],
},
)
| ThomasMarcal/Projet-A4 | ROS - Robot Operating System/turtlebot3_controller/setup.py | setup.py | py | 1,272 | python | en | code | 0 | github-code | 13 |
15481907671 | import string
import random
from core.utils import Generator
class Main(Generator):
name = 'Alphabet match upper case with lowercase'
years = [4, 5]
directions = 'Incercuiti bulina literei scrise de mana care corespunde cu litera scrisa de tipar'
template = 'generators/alphabet_match_upper_lower.html'
def generate_data(self):
letters = string.ascii_uppercase
letters_lowercase = string.ascii_lowercase
results = []
for i in range(self.count):
row = dict()
while not len(row.keys()) == 6:
capital = random.choice(letters)
others = random.sample(letters_lowercase, 4)
if capital.lower() not in others:
others[0] = capital.lower()
random.shuffle(others)
if capital not in row.keys():
row[capital] = others
results.append(row)
self.data = results
return results
def get_context_data(self, iteration):
context = super(Main, self).get_context_data(iteration)
context['items'] = context['items'][iteration]
return context
| opencbsoft/kids-worksheet-generator | application/core/generators/alphabet_match_upper_lower.py | alphabet_match_upper_lower.py | py | 1,173 | python | en | code | 1 | github-code | 13 |
25413274883 | #!/usr/bin/env python3
'''
Purpose:
Read a MGIReferences sample file, and for samples that have empty
extracted text, locate their PDF and extract the text from it, and
save that as their extracted text in the sample file.
No changes to the text except for removal of field separators,
record endings, and non-ascii characters.
Outputs: Delimited file to specified output file.
See MGIReference.Sample for output format
'''
import sys
import os
import time
import argparse
import subprocess
import unittest
import db
import Pdfpath
#import extractedTextSplitter
import MGIReference as sampleLib
from utilsLib import removeNonAscii
#-----------------------------------
sampleObjType = sampleLib.MGIReference
# for the Sample output file
RECORDEND = sampleObjType.getRecordEnd()
FIELDSEP = sampleObjType.getFieldSep()
MINTEXTLENGTH = 500 # skip refs with extracted text shorter than this
LONGTIME = 60 # num of seconds. If a pdf extraction takes longer
# than this, report it.
#-----------------------------------
def getArgs():
parser = argparse.ArgumentParser( \
description='Get extracted text from PDFs.')
# parser.add_argument('option', action='store', default='counts',
# choices=['routed', 'notRoutedKeep', 'notRoutedDiscard', 'ids','test'],
# help='get samples, IDs from stdin, or just run automated tests')
parser.add_argument('sampleFile', action='store',
help='the sample file to read and update.')
# parser.add_argument('--frompdf', dest='fromPDF', action='store_true',
# required=False,
# help="extract text from the archived PDFs instead of from db")
parser.add_argument('-l', '--limit', dest='limit',
required=False, type=int, default=0, # 0 means ALL
help="only extract text for up to n references. Default is no limit")
# parser.add_argument('--textlength', dest='maxTextLength',
# type=int, required=False, default=None,
# help="only include the 1st n chars of text fields (for debugging)")
parser.add_argument('-q', '--quiet', dest='verbose', action='store_false',
required=False, help="skip helpful messages to stderr")
return parser.parse_args()
#-----------------------------------
args = getArgs()
#-----------------------------------
def main():
startTime = time.time()
sampleSet = sampleLib.SampleSet(sampleObjType).read(args.sampleFile)
numAlready = 0 # num of samples that already have extracted text
numAttempted = 0 # num of samples that we attempted to extracted text for
numExtracted = 0 # num of samples that we successfully extracted text for
numErrors = 0 # num of samples with errors during text extraction
for sample in sampleSet.getSamples():
if len(sample.getField('extractedText')) > 0:
numAlready += 1
else:
mgiID = sample.getField('ID')
numAttempted += 1
verbose("Extracting text for %s\n" % mgiID)
pdfStart = time.time()
text, error = getText4Ref_fromPDF(mgiID)
elapsedTime = time.time() - pdfStart
if elapsedTime > LONGTIME:
verbose("%s extraction took %8.3f seconds\n" \
% (mgiID, elapsedTime) )
if error:
verbose("Error extracting text for %s:\n%s" % (mgiID, error))
numErrors += 1
else:
text = cleanUpTextField(text)
sample.setField('extractedText', text)
numExtracted += 1
if numAttempted == args.limit: break
sampleSet.write(args.sampleFile)
verbose('\n')
verbose("wrote %d samples to '%s'\n" % (sampleSet.getNumSamples(),
args.sampleFile))
verbose("Samples seen with text already: %d\n" % numAlready)
verbose("Samples with new text added: %d\n" % numExtracted)
verbose("Samples with text extraction errors: %d\n" % numErrors)
verbose("%8.3f seconds\n\n" % (time.time()-startTime))
#-----------------------------------
def getText4Ref_fromPDF(mgiID):
""" Return (text, error)
text = extracted text (string) from the PDF.
for the specified MGI ID
error = None or an error message if the text could not be extracted.
"""
PDF_STORAGE_BASE_PATH = '/data/littriage'
prefix, numeric = mgiID.split(':')
filePath = os.path.join(Pdfpath.getPdfpath(PDF_STORAGE_BASE_PATH,mgiID),
numeric + '.pdf')
text, error = extractTextFromPdf(filePath)
return text, error
#-----------------------------------
def extractTextFromPdf(pdfPathName):
""" Return (text, error)
text = the extracted text from the PDF,
error = None or an error message if the text could not be extracted.
"""
## Get full text from PDF
LITPARSER = '/usr/local/mgi/live/mgiutils/litparser'
executable = os.path.join(LITPARSER, 'pdfGetFullText.sh')
cmd = [executable, pdfPathName]
cmdText = ' '.join(cmd)
completedProcess = subprocess.run(cmd, capture_output=True, text=True)
if completedProcess.returncode != 0:
text = ''
error = "pdftotext error: %d\n%s\n%s\n%s\n" % \
(completedProcess.returncode, cmdText,
completedProcess.stderr, completedProcess.stdout)
else:
text = completedProcess.stdout
error = None
return text, error
#-----------------------------------
def cleanUpTextField(text):
if text == None:
text = ''
text = removeNonAscii(cleanDelimiters(text))
return text
#-----------------------------------
def cleanDelimiters(text):
""" remove RECORDEND and FIELDSEPs from text (replace w/ ' ')
"""
return text.replace(RECORDEND,' ').replace(FIELDSEP,' ')
#-----------------------------------
def verbose(text):
if args.verbose:
sys.stdout.write(text)
sys.stdout.flush()
#-----------------------------------
def doAutomatedTests():
sys.stdout.write("No automated tests at this time\n")
return
sys.stdout.write("Running automated unit tests...\n")
unittest.main(argv=[sys.argv[0], '-v'],)
class MyTests(unittest.TestCase):
def test_getText4Ref(self):
t = getText4Ref('11943') # no text
self.assertEqual(t, '')
#-----------------------------------
if __name__ == "__main__":
main()
| nidak21/MGIreferences | sdGetExtText.py | sdGetExtText.py | py | 6,657 | python | en | code | 0 | github-code | 13 |
2089427942 | '''
Python utilities for the MDL
'''
import subprocess
#Function for serial port configuration using GPIO
def gpioconfig(port,RSmode,duplex,resistors,bias):
'''
MDL serial port configuration
port - /dev/ttyMAXn
RSmode - 'RS485' or 'RS232'
duplex - 'full' or 'half'
resistors - 1 or 0
bias - 1 or 0
'''
mdlportnums = {
'/dev/ttyMAX0':0,'/dev/ttyMAX1':1,
'/dev/ttyMAX2':2,'/dev/ttyMAX3':3,
'/dev/ttyMAX4':4,'/dev/ttyMAX5':5,
'/dev/ttyMAX6':6,'/dev/ttyMAX7':7}
RSmodes = {'RS485':1,'RS232':0}
duplexval = {'full':0,'half':1}
gpiopins = [0,1,2,3]
portnum = mdlportnums[port]
if (portnum >= 0) and (portnum <= 3):
gpiochip = 'gpiochip1'
gpiopins = [x + 4*portnum for x in gpiopins]
elif (portnum >= 4) and (portnum <=7):
gpiochip = 'gpiochip2'
gpiopins = [x + 4*(portnum-4) for x in gpiopins]
else:
print('error')
RSset = '{}={}'.format(gpiopins[0],RSmodes[RSmode])
duplexset = '{}={}'.format(gpiopins[1],duplexval[duplex])
resistset = '{}={}'.format(gpiopins[2],resistors)
biaset = '{}={}'.format(gpiopins[3],bias)
gpiocmd = 'gpioset {} {} {} {} {}'.format(
gpiochip,RSset,duplexset,resistset,biaset)
print(gpiocmd)
subprocess.run([gpiocmd],shell=True) | dyacon/pyMDL | pymdl/utilities/__init__.py | __init__.py | py | 1,330 | python | en | code | 0 | github-code | 13 |
10583180951 | n=input("enter a string : ")
letter = "T"
res = len([ele for ele in n.split() if letter in ele])
print("Count of words that starts with T : " + str(res))
#other process
'''
a=input("enter the string")
def words(string):
count=0
for word in string:
if word[0]=='T':
count=count+1
print(count)
words(a)
'''
'''
n = "There is a Tortoise"
print("The string is : " + str(n))
letter = "T"
res = len([ele for ele in n.split() if letter in ele])
print("Count of words that starts with T : " + str(res))
'''
| Mrudula1807/Python-Programming- | count words begins with t.py | count words begins with t.py | py | 603 | python | en | code | 0 | github-code | 13 |
1122833123 | import numpy as np
from flask import Flask,request ,jsonify ,render_template
import pickle
import sklearn
from werkzeug.debug import console
app = Flask(__name__)
model = pickle.load(open('randomforest.h5' , 'rb'))
@app.route('/')
def home():
return render_template('Demo2.html')
@app.route('/y_predict' ,methods = ['POST'])
def y_predict():
x_test =[[float(x) for x in request.form.values()]]
prediction = model.predict(x_test)
print(prediction)
output=prediction[0][0]
return render_template('Demo2.html', prediction_text='Probability of admission {}'.format(output))
if __name__ == "__main__":
app.run(debug=True) | SmartPracticeschool/llSPS-INT-2868-University-Admission-Prediction | app.py | app.py | py | 669 | python | en | code | 0 | github-code | 13 |
41835046810 | # from PIL import Image
import argparse
import os
import sys
import cv2
import numpy as np
import math
import json
from PIL import Image, ImageDraw, ImageFont
import matplotlib.pyplot as plt
def draw_ocr_box_txt(image,
boxes):
h, w = image.height, image.width
img_left = image.copy()
import random
random.seed(0)
draw_left = ImageDraw.Draw(img_left)
for idx, box in enumerate(boxes):
color = (random.randint(0, 255), random.randint(0, 255),
random.randint(0, 255))
draw_left.polygon(box, fill=color)
# box_height = math.sqrt((box[0][0] - box[3][0])**2 + (box[0][1] - box[3][
# 1])**2)
# box_width = math.sqrt((box[0][0] - box[1][0])**2 + (box[0][1] - box[1][
# 1])**2)
#
img_left = Image.blend(image, img_left, 0.5)
img_r=np.array(img_left)
plt.imshow(img_r)
plt.show()
def main():
path2="./test"
path1="./labels"
dirs=os.listdir(path2)
for i in dirs:
m=os.path.join(path2,i)
img=Image.open(m)
# img=np.array(img)
# f=open(os.path.join(path1,i+".txt"))
# lines=f.readlines()
boxes=[]
with open(os.path.join(path1,i+".txt"),encoding="utf8") as f:
lines=f.readlines()
for line in lines:
line=line.split(",")
bx=[]
for j in range(0,8,1):
bx.append(int(line[j]))
boxes.append(bx)
draw_ocr_box_txt(img,boxes)
if __name__ == '__main__':
main()
# return np.array(img_show) | oszn/syntxt | draw/drboex.py | drboex.py | py | 1,605 | python | en | code | 0 | github-code | 13 |
22556884739 | import matplotlib.pyplot as plt
import numpy as np
import random
class Data:
def __init__(self):
self.X = None
self.Y = None
self.dist_batches = None
self.bin_labels = None
def random_normal(self, size, distance, one_hot=False):
""" Generate two groups of data points from two Gaussian distributions.
One groupd of data points are labeled as 1s, and the other group labeled
as 0s.
Parameters
----------
size: number of data points in each group
distance: distance from the point (0,0), width and height
one_hot: the usage of one hot encoding
Returns
-------
N/A
"""
n_data = np.ones((size, 2))
x_0 = np.random.normal(distance*n_data, 1)
if one_hot:
y_0 = np.zeros(size*2)
y_0 = y_0.reshape((size,2))
y_0 = np.apply_along_axis(lambda x: [x[0]+1, x[1]], 1, y_0)
else:
y_0 = np.zeros(size)
x_1 = np.random.normal(-distance*n_data, 1)
if one_hot:
y_1 = np.zeros(size*2)
y_1 = y_1.reshape((size,2))
y_1 = np.apply_along_axis(lambda x: [x[0], x[1]+1], 1, y_1)
else:
y_1 = np.ones(size)
self.X = np.vstack((x_0, x_1))
if one_hot:
self.Y = np.vstack((y_0, y_1))
else:
self.Y = np.hstack((y_0, y_1))
def parabola_distribution(self, batch, simulate_points):
""" Generate batches of points that are from a parabola. Currently, the
parabola parameters are fixed.
Parameters
----------
batch: number of batches of points
simulate_points: generated batches of points from a line space
Returns
-------
N/A
"""
a = np.random.uniform(1, 2, size=batch)[:, np.newaxis]
parabolas = a * np.power(simulate_points, 2) + (a-1)
bin_labels = (a - 1) > 0.5
bin_labels = bin_labels.astype(np.float32)
self.dist_batches = parabolas
self.bin_labels = bin_labels
class SequenceData:
""" Generate sequence of data with dynamic length.
This class generate samples for training:
- Class 0: linear sequences (i.e. [0, 1, 2, 3,...])
- Class 1: random sequences (i.e. [1, 3, 10, 7,...])
"""
def __init__(self, n_samples=1000, max_seq_len=20, min_seq_len=3,
max_value=1000):
self.data = []
self.labels = []
self.seqlen = []
for i in range(n_samples):
rand_len = random.randint(min_seq_len, max_seq_len)
self.seqlen.append(rand_len)
if random.random() < .5:
# linear sequence
rand_start = random.randint(0, max_value - rand_len)
s = [[float(i)/max_value] for i in
range(rand_start, rand_start + rand_len)]
s += [[0.] for i in range(max_seq_len - rand_len)]
self.data.append(s)
self.labels.append([1., 0.])
else:
# random sequence
s = [[float(random.randint(0, max_value))/max_value]
for i in range(rand_len)]
s += [[0.] for i in range(max_seq_len - rand_len)]
self.data.append(s)
self.labels.append([0., 1.])
self.batch_id = 0
def next(self, batch_size):
""" Return a batch of data. When dataset end is reached, start over.
Parameters
----------
batch_size: length of a batch of data
Returns
-------
batch_data: data in the next batch
batch_labels: labels in the next batch
batch_seqlen: sequence length in the next batch
"""
if self.batch_id == len(self.data):
self.batch_id = 0
batch_data = (self.data[self.batch_id:min(self.batch_id + batch_size, len(self.data))])
batch_labels = (self.labels[self.batch_id:min(self.batch_id + batch_size, len(self.data))])
batch_seqlen = (self.seqlen[self.batch_id:min(self.batch_id + batch_size, len(self.data))])
self.batch_id = min(self.batch_id + batch_size, len(self.data))
return batch_data, batch_labels, batch_seqlen
| xiawang/TF_Related | data_generator.py | data_generator.py | py | 4,297 | python | en | code | 0 | github-code | 13 |
38046943078 | # AUTHOR: Marcin.Wolter@cern.ch
# CREATED: 20 March 2008
#
# 23 Nov 2010: cleaning up (Noel Dawe)
from AthenaCommon.Logging import logging
from AthenaCommon.AlgSequence import AlgSequence
from AthenaCommon.SystemOfUnits import *
from AthenaCommon.Constants import *
from AthenaCommon.AppMgr import ToolSvc
import traceback
from TrigTauDiscriminant.TrigTauDiscriminantConf import TrigTauDiscriBuilder
from TriggerJobOpts.TriggerFlags import TriggerFlags
import ROOT
from tauRec.tauRecFlags import tauFlags
def singleton(cls):
log = logging.getLogger('%s::__init__'% cls.__name__)
instances = {}
def getinstance(*args, **kwargs):
if cls in instances:
log.warning("Attempting to construct more than one %s. Returning the singleton."% cls.__name__)
return instances[cls]
obj = cls(*args, **kwargs)
instances[cls] = obj
return obj
return getinstance
#@singleton
class TrigTauDiscriGetter(TrigTauDiscriBuilder):
__slots__ = [ '_mytools']
def __init__(self, name = "TrigTauDiscriminant"):
super( TrigTauDiscriGetter , self ).__init__( name )
#monitoring part. To switch off do in topOption TriggerFlags.enableMonitoring = []
from TrigTauDiscriminant.TrigTauDiscriminantMonitoring import TrigTauDiscriminantValidationMonitoring, TrigTauDiscriminantOnlineMonitoring
validation = TrigTauDiscriminantValidationMonitoring()
online = TrigTauDiscriminantOnlineMonitoring()
from TrigTimeMonitor.TrigTimeHistToolConfig import TrigTimeHistToolConfig
time = TrigTimeHistToolConfig("Time")
self.AthenaMonTools = [ time, validation, online ]
self.Tools = [self.VarCalculatorSet(), self.BDTtoolset()]
def BDTtoolset(self):
from TauDiscriminant.TauDiscriminantConf import TauJetBDT
return TauJetBDT(inTrigger = True,
calibFolder = 'TrigTauRec/00-11-01/',
jetBDT = "trigger.jet.BDT.bin",
jetSigBits = "trigger.sig.bits.jet.BDT.txt")
def VarCalculatorSet(self):
from tauRecTools.tauRecToolsConf import TauIDVarCalculator
tauVarCalc = TauIDVarCalculator()
tauVarCalc.inTrigger = True
tauVarCalc.calibFolder = 'TrigTauRec/00-11-01/'
return tauVarCalc
class TrigTauDiscriGetter2015(TrigTauDiscriBuilder):
__slots__ = [ '_mytools']
def __init__(self, name = "TrigTauDiscriminant2015"):
super( TrigTauDiscriGetter2015 , self ).__init__( name )
#monitoring part. To switch off do in topOption TriggerFlags.enableMonitoring = []
from TrigTauDiscriminant.TrigTauDiscriminantMonitoring import TrigTauDiscriminantValidationMonitoring, TrigTauDiscriminantOnlineMonitoring
validation = TrigTauDiscriminantValidationMonitoring()
online = TrigTauDiscriminantOnlineMonitoring()
from TrigTimeMonitor.TrigTimeHistToolConfig import TrigTimeHistToolConfig
time = TrigTimeHistToolConfig("Time")
self.AthenaMonTools = [ time, validation, online ]
self.Tools = [self.VarCalculatorSet()] + self.BDTtoolset()
def BDTtoolset(self):
if TriggerFlags.run2Config == '2016':
from TauDiscriminant.TauDiscriminantConf import TauJetBDT
bdt_set = TauJetBDT(
inTrigger = True,
calibFolder = 'TrigTauRec/00-11-01/',
jetBDT = "bdt.2016.bin",
jetSigBits = "trigger.sig.bits.jet.BDT.txt")
return [bdt_set]
else:
from tauRecTools.tauRecToolsConf import TauJetBDTEvaluator
# BDT evaluators 1p / mp
bdt_1p = TauJetBDTEvaluator(
name = "TrigTauJetBDT1P",
calibFolder='tauRecTools/00-02-00/',
weightsFile='vars2016_pt_gamma_1p_isofix.root',
inTrigger=True,
minNTracks=0, maxNTracks=1)
bdt_mp = TauJetBDTEvaluator(
name = "TrigTauJetBDTMP",
calibFolder='tauRecTools/00-02-00/',
weightsFile='vars2016_pt_gamma_3p_isofix.root',
inTrigger=True,
minNTracks=2, maxNTracks=1000)
import PyUtils.RootUtils as ru
ROOT = ru.import_root()
import cppyy
cppyy.loadDictionary('xAODTau_cDict')
from tauRecTools.tauRecToolsConf import TauWPDecorator
# wp creators 1p / mp
wp_decorator = TauWPDecorator(
name = "TrigTauJetWPDecorator",
calibFolder='TrigTauRec/00-11-01/',
inTrigger = True,
flatteningFile1Prong = "FlatJetBDT1P_trigger_v1.root",
flatteningFile3Prong = "FlatJetBDT3P_trigger_v1.root",
CutEnumVals=[
ROOT.xAOD.TauJetParameters.JetBDTSigVeryLoose,
ROOT.xAOD.TauJetParameters.JetBDTSigLoose,
ROOT.xAOD.TauJetParameters.JetBDTSigMedium,
ROOT.xAOD.TauJetParameters.JetBDTSigTight],
SigEff1P = [0.995, 0.99, 0.97, 0.90],
SigEff3P = [0.995, 0.94, 0.88, 0.78],
ScoreName = "BDTJetScore",
NewScoreName = "BDTJetScoreSigTrans",
DefineWPs = True)
return [bdt_1p, bdt_mp, wp_decorator]
def VarCalculatorSet(self):
from tauRecTools.tauRecToolsConf import TauIDVarCalculator
tauVarCalc = TauIDVarCalculator()
tauVarCalc.inTrigger = True
tauVarCalc.calibFolder = 'TrigTauRec/00-11-01/'
return tauVarCalc
| rushioda/PIXELVALID_athena | athena/Trigger/TrigAlgorithms/TrigTauDiscriminant/python/TrigTauDiscriGetter.py | TrigTauDiscriGetter.py | py | 5,781 | python | en | code | 1 | github-code | 13 |
35988603500 | import requests
from bs4 import BeautifulSoup
import re
import numpy as np
import pandas as pd
url = 'https://www.tripadvisor.in/Hotels-g297667-Jaisalmer_Jaisalmer_District_Rajasthan-Hotels.html'
resp = requests.get(url)
html = resp.text
soup = BeautifulSoup(html,'lxml')
#print soup
hotel_name = [app.contents[0] for app in soup.select('a.property_title')]
review_count = [app.contents[0].split()[0] for app in soup.select('a.review_count')]
rating = [app for app in soup.select('span.ui_bubble_rating')]
rank = [rank.contents[0].split()[0] for rank in soup.select('div.popindex')]
hotel_link = [app['href'] for app in soup.select('a.property_title')]
hotel_facilities = []
for ul_tag in soup.select('ul.icons_list'):
facility = []
for li_tag in ul_tag.find_all('li',{'class' : 'hotel_icon'}):
for div_tag in li_tag.find('div',{'class':'label'}):
facility.append(div_tag)
hotel_facilities.append(facility)
#print review_count
#print hotel_name
#print rating
#print hotel_facilities
#print hotel_link
url2 = 'https://www.tripadvisor.in' + hotel_link[0]
response = requests.get(url2)
soup2 = BeautifulSoup(response.text,'lxml')
#print soup2
review_header = [app.text for app in soup2.select('span.noQuotes')]
reviews = [app.text for app in soup2.select('p.partial_entry')]
#print reviews | gauravr1993/data-analysis | Draft/trip.py | trip.py | py | 1,299 | python | en | code | 0 | github-code | 13 |
23607483262 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2019/7/3 13:13
# @Author : xiezheng
# @Site :
# @File : process_txt.py
import os
txt_path = "./log.txt"
for line in open(txt_path, 'r'):
line = line.strip()
| CN1Ember/feathernet_mine | quan_table/insightface_v2/insightface_data/process_txt.py | process_txt.py | py | 232 | python | en | code | 1 | github-code | 13 |
23148905325 | from __future__ import print_function # (at top of module)
from PyQt5.QtCore import Qt
from superqt import *
from PyQt5.QtWidgets import (
QCheckBox,
)
ORIENTATION = Qt.Horizontal
class FeatureSlider:
def __init__(self, parent, name, min, max, check_box_value_changed, slider_value_changed ,is_feature_slider=False):
# generates the slider label
check_box = QCheckBox()
check_box.setChecked(True)
check_box.setText(name)
check_box.stateChanged.connect(lambda state: check_box_value_changed(name, check_box.isChecked()))
parent.addWidget(check_box)
# generates the slider
slider = QLabeledRangeSlider(ORIENTATION)
slider.setRange(min, max)
slider.setSingleStep(int((max - min) / 100))
slider.setValue((min, max))
slider.valueChanged.connect(lambda e: slider_value_changed(name, e))
# add slider to parent widget
parent.addWidget(slider)
# set default values
self.slider_values = (min, max)
self.is_enabled = True
| Knightbomb8/Spotify-Testing | src/featureSlider.py | featureSlider.py | py | 1,065 | python | en | code | 0 | github-code | 13 |
32799546821 | import enum
class AnswerForVApp(object):
def __init__(self, type_msg, type_content, status):
self.type_msg = type_msg
self.type_content = type_content
self.status = status
self.display_str = None
class MsgType(enum.Enum):
INFO = 0
ANSWER = 1
NOTIF = 2
class ContentType(enum.Enum):
TYPE_INIT_REQUEST = 0
TYPE_START_MONITORING = 1
TYPE_LOCATION_NOTIF = 2
TYPE_QOS_NOTIF = 3
class AnswerStatus(enum.Enum):
ERROR = -1
OK = 0
MODIF = 1 | EVOLVED-5G/ImmersionNetApp | src/python/network/msg/MsgUtils.py | MsgUtils.py | py | 544 | python | en | code | 0 | github-code | 13 |
21672641389 | import asyncio
import json
import sys
import zmq
import zmq.asyncio
from distributed_algorithms.wave_algorithm import Wave
__author__ = 'christopher@levire.com'
zmq.asyncio.install()
ctx = zmq.asyncio.Context()
class Worker:
def __init__(self, name):
self.zmocket = ctx.socket(zmq.PULL)
self.zmocket.connect("tcp://127.0.0.1:6667")
self.gatherer_zmocket = ctx.socket(zmq.ROUTER)
self.gatherer_zmocket.connect("tcp://127.0.0.1:6666")
self.gatherer_zmocket.connect("tcp://127.0.0.1:6665")
self.wave_zmocket = ctx.socket(zmq.ROUTER)
self.wave_zmocket.setsockopt(zmq.IDENTITY, name.encode())
self.wave_zmocket.connect("tcp://127.0.0.1:6670") # gatherer 1
self.wave_zmocket.connect("tcp://127.0.0.1:6671") # gatherer 2
self.wave_zmocket.connect("tcp://127.0.0.1:6672") # controller
self.name = name
self.wave = Wave(self.wave_zmocket, controller=False, name=self.name)
# SERVICE DISCOVERY SHIT DOING LOCAL :D NO SOUNDS ;)
self.wave.update_neighbors([])
async def pulling_work(self):
await asyncio.sleep(0.5)
self.wave_zmocket.send_multipart(["controller".encode(),
json.dumps({"type": "wave_init", "sender": self.name}).encode()])
while True:
await asyncio.sleep(0.5)
await self.wave.run_wave()
if __name__ == "__main__":
worker_name = sys.argv[1]
worker = Worker(worker_name)
loop = asyncio.get_event_loop()
loop.run_until_complete(worker.pulling_work())
loop.close()
| christ0pher/distributed-algorithms-python | worker.py | worker.py | py | 1,618 | python | en | code | 0 | github-code | 13 |
29636260156 | # -*- coding: utf-8 -*-
import jinja2
import json
import os
import re
import webapp2
jinja_env = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))
def get_gradegroup_by_id(prob_id):
if re.match('^(LV|EE|LT)\.[^\.]+\.[0-9]+\.[7-9].*',prob_id):
return '9'
else:
return '12'
def get_cat1(tsk):
items = tsk.split('.')
return items[0]
def get_cat2(tsk):
items = tsk.split('.')
return '%s.%s' % (items[0], items[1])
def escape_math(res):
PRE = '<span class="math inline">\('
POST = '\)</span>'
res = re.sub('<', '<', res)
res = re.sub('>', '>', res)
is_math = False
if re.match('[^ ]+\^[^ ]+', res):
is_math = True
if re.match('[^ ]+(<|>)[^ ]+', res):
is_math = True
if re.match('[^ ]+_[^ ]+', res):
is_math = True
if is_math:
res = '%s%s%s' % (PRE, res, POST)
return res
def escape_math_seq(arg):
words = arg.split()
result = list()
for word in words:
result.append(escape_math(word))
return ' '.join(result)
def get_html_links(lst_problems):
file_dict = {
'EE.PK':'numtheory-ee-pk',
'EE.LO':'numtheory-ee-lo',
'EE.LVS':'numtheory-ee-lvs',
'EE.LVT':'numtheory-ee-lvt',
'EE.TST':'numtheory-ee-tst',
'LV.NO':'numtheory-lv-no',
'LV.VO':'numtheory-lv-vo',
'LV.AO':'numtheory-lv-ao',
'LV.TST':'numtheory-lv-tst',
'LV.OTHER':'numtheory-lv-other',
'LT.LJKMO':'numtheory-lt-ljkmo',
'LT.LKMMO':'numtheory-lt-lkmmo',
'LT.LDK':'numtheory-lt-ldk',
'LT.RAJ':'numtheory-lt-raj',
'LT.VILNIUS':'numtheory-lt-vilnius',
'LT.LMMO':'numtheory-lt-lmmo',
'LT.VUMIF':'numtheory-lt-vumif',
'LT.TST':'numtheory-lt-tst',
'BBK2012.P1':'numtheory-bbk-p1',
'BBK2012.P2':'numtheory-bbk-p2',
'BBK2012.P3':'numtheory-bbk-p3',
'BBK2012.P4':'numtheory-bbk-p4',
'BBK2012.P5':'numtheory-bbk-p5',
'BBK2012.P6':'numtheory-bbk-p6',
'BBK2012.P7':'numtheory-bbk-p7',
'BBK2012.P8':'numtheory-bbk-p8',
'BBK2012.P9':'numtheory-bbk-p9'
}
result = list()
for pp in lst_problems:
pp_pref=re.sub(r'(LV|EE|LT|BBK2012)\.([^\.]+)\..*',r'\1.\2',pp)
file_id = file_dict[pp_pref]
lower_id = pp.lower()
result.append('<a href="../../files-prob/%s/content.html#/%s">%s</a>' % (file_id,lower_id,pp))
return result
class ProblembaseReportsTaskgroupsHandler(webapp2.RequestHandler):
def get(self,my_id):
with open('data/problems.json') as f1:
the_problems = json.load(f1)
with open('data/global_navigation.json') as f2:
nav_items = json.load(f2)
sections = ['alg','div','mod','nota','seq','misc','comb']
skill_groups = {
'alg': ['alg.expr', 'alg.tra', 'alg.series', 'alg.ineq', 'alg.linear', 'alg.poly'],
'div': ['div.prop', 'div.fta', 'div.common', 'div.valu'],
'mod': ['mod.fix', 'mod.congr', 'mod.period', 'mod.exp', 'mod.eq'],
'nota': ['nota.est', 'nota.divrule','nota.combine','nota.algor'],
'seq': ['seq.spec', 'seq.arithm', 'seq.geom', 'seq.recur','seq.gaps'],
'comb': ['comb.full', 'comb.count', 'comb.graph', 'comb.constr'],
'misc': ['misc.try', 'misc.extr', 'misc.invar', 'misc.ind', 'misc.symm']
}
topic_titles = {
'alg': 'Algebraic skills',
'alg.expr': 'Build variable expressions',
'alg.tra': 'Use algebraic transformations',
'alg.series': 'Handle long sums and products',
'alg.ineq': 'Prove and use inequalities',
'alg.linear': 'Manipulate linear equations and systems',
'alg.poly': 'Use the properties of integer polynomials',
'div': 'Divisibility skills',
'div.prop': 'Use divisibility relation and its properties',
'div.fta': 'Use Fundamental theorem of arithmetic (prime factorization)',
'div.common': 'Use the properties of gcd and lcm',
'div.valu': 'Compute and use p-valuations',
'mod': 'Modular arithmetic skills',
'mod.fix': 'Use fixed modules: parity, last digits',
'mod.congr': 'Map integers to congruence classes and do arithmetic',
'mod.period': 'Use the periodicity of remainder sequences',
'mod.exp': 'Investigate congruences of an exponential function',
'mod.eq': 'Solve congruence equations and linear systems',
'nota': 'Numeral notation skills',
'nota.est': 'Estimate digits and the digit count in numbers',
'nota.divrule': 'Apply divisibility rules',
'nota.combine': 'Express digit manipulations with expressions',
'nota.algor': 'Use notation-based algorithms',
'seq': 'Integer sequence skills',
'seq.spec': 'Recognize numbers with special properties',
'seq.arithm': 'Use the properties of arithmetic progressions',
'seq.geom': 'Use the properties of geometric progressions',
'seq.recur': 'Study other recurrent sequences',
'seq.gaps': 'Estimate sequence growth and their gaps',
'comb': 'Combinatorics skills',
'comb.full': 'Perform full case analysis',
'comb.count': 'Count the number of variants',
'comb.graph': 'Use graphs to build examples',
'comb.constr': 'Use other structures to build examples',
'misc': 'Miscellaneous skills',
'misc.try': 'Experiment with some parameters and generalize',
'misc.extr': 'Select the extreme elements',
'misc.invar': 'Create and use invariants',
'misc.ind': 'Prove statements with mathematical induction',
'misc.symm': 'Use symmetry inherent in the problem'
}
grp_lst = topic_titles.keys()
grp_jun = {}
grp_sen = {}
for grp in grp_lst:
grp_jun[grp] = 0
grp_sen[grp] = 0
for problem in the_problems:
grade_group = get_gradegroup_by_id(problem['id'])
for tsk in problem['tasks']:
cat1 = get_cat1(tsk)
cat2 = get_cat2(tsk)
if cat2 in grp_lst:
if grade_group == '9':
grp_jun[cat1] += 1
grp_jun[cat2] += 1
else:
grp_sen[cat1] += 1
grp_sen[cat2] += 1
template_context = {
'title': u'Uzdevumu DB: Prasmju grupu pārskats',
'sections': sections,
'skill_groups': skill_groups,
'topic_titles': topic_titles,
'grp_jun': grp_jun,
'grp_sen': grp_sen,
'course': 'problembase',
'nav_items': nav_items
}
template = jinja_env.get_template('problembase/reports-taskgroups.html')
output = template.render(template_context)
self.response.out.write(output.encode('utf-8'))
app = webapp2.WSGIApplication([
('/problembase/(.*)', ProblembaseReportsTaskgroupsHandler)
], debug=True)
| kapsitis/linen-tracer-682 | problembase-reports-taskgroups.py | problembase-reports-taskgroups.py | py | 7,498 | python | en | code | 0 | github-code | 13 |
74562955538 | """
_InsertRunStreamDone_
Oracle implementation of InsertRunStreamDone
Insert RunStreamDone record into Tier0 Data Service
"""
from WMCore.Database.DBFormatter import DBFormatter
class InsertRunStreamDone(DBFormatter):
def execute(self, binds, conn = None, transaction = False):
sql = """MERGE INTO run_stream_done
USING DUAL ON ( run = :RUN AND stream = :STREAM )
WHEN NOT MATCHED THEN
INSERT (run, stream)
VALUES (:RUN, :STREAM)
"""
self.dbi.processData(sql, binds, conn = conn,
transaction = transaction)
return
| dmwm/T0 | src/python/T0/WMBS/Oracle/T0DataSvc/InsertRunStreamDone.py | InsertRunStreamDone.py | py | 670 | python | en | code | 6 | github-code | 13 |
74907143697 | import cv2 as cv
import numpy as np
alpha = 0.3
beta = 80
imgpath = "../../resource/chapter3/1.jpg"
img1 = cv.imread(imgpath)
img2 = cv.imread(imgpath)
def updateAlpah(x):
global alpha, img1, img2
alpha = cv.getTrackbarPos("alpha", "image")
alpha = alpha * 0.01
img1 = np.uint8(np.clip((alpha * img2 + beta), 0, 255))
def updateBeta(x):
global beta, img1, img2
beta = cv.getTrackbarPos("beta", "image")
img1 = np.uint8(np.clip((alpha * img2 + beta), 0, 255))
cv.namedWindow("image")
cv.createTrackbar("alpha", "image", 0, 300, updateAlpah)
cv.createTrackbar("beta", "image", 0, 255, updateBeta)
while (1):
cv.imshow("image", img1)
if cv.waitKey(1) == ord('q'):
break
cv.destroyAllWindows()
| codezzzsleep/records2.1 | robot-and-vision/test/chapter3/demo12.py | demo12.py | py | 742 | python | en | code | 0 | github-code | 13 |
13107388184 | # -*- coding: utf-8 -*-
"""
Created on Tue Jul 26 15:10:00 2022
@author: intan
"""
from tensorflow.keras.layers import LSTM,Dense,Dropout,Embedding,Bidirectional
from tensorflow.keras import Input,Sequential
import matplotlib.pyplot as plt
class ModelDevelopment:
def simple_MD_model(self,input_shape,vocab_size,nb_class,nb_node=128, dropout_rate=0.3):
model=Sequential()
model.add(Input(shape=(input_shape)))
model.add(Embedding(vocab_size,nb_node))
model.add(Bidirectional(LSTM(nb_node,return_sequences=(True))))
model.add(Dropout(dropout_rate))
model.add(Bidirectional(LSTM(nb_node)))
model.add(Dropout(dropout_rate))
model.add(Dense(nb_class, activation='softmax'))
model.summary()
return model
class ModelEvaluation:
def Plot_Hist(self,hist,loss=0,vloss=2):
a=list(hist.history.keys())
plt.figure()
plt.plot(hist.history[a[loss]])
plt.plot(hist.history[a[vloss]])
plt.legend(['training_'+ str(a[loss]), a[vloss]])
plt.show()
| intan7/Multiclass-Article-Classification | Multiclass_Article_Classification_module.py | Multiclass_Article_Classification_module.py | py | 1,093 | python | en | code | 0 | github-code | 13 |
28808560409 | import numpy as np
import matplotlib.pyplot as plotGraph
from scipy.stats import multivariate_normal
from mpl_toolkits.mplot3d import Axes3D
np.random.seed(47)
# Given parameters
samples = 10000
features = 4
num_labels = 2
priors = [0.35, 0.65]
matrix_mean = np.ones(shape=(num_labels, features))
matrix_mean[0, :] = [-1, -1, -1, -1]
covariance_matrix = np.ones(shape=(num_labels, features, features))
covariance_matrix[0, :, :] = [[2, -0.5, 0.3, 0], [-0.5, 1, -0.5, 0], [0.3, -0.5, 1, 0], [0, 0, 0, 2]]
covariance_matrix[1, :, :] = [[1, 0.3, -0.2, 0], [0.3, 2, 0.3, 0], [-0.2, 0.3, 1, 0], [0, 0, 0, 3]]
# Generating samples
X = np.zeros(shape=(samples, features))
labels = np.random.rand(samples) >= priors[0]
for i in range(samples):
if labels[i] == 0:
X[i, :] = np.random.multivariate_normal(matrix_mean[0, :], covariance_matrix[0, :, :])
elif labels[i] == 1:
X[i, :] = np.random.multivariate_normal(matrix_mean[1, :], covariance_matrix[1, :, :])
pdf = np.log(multivariate_normal.pdf(X, mean=matrix_mean[0, :], cov=np.eye(features,features)))
pdf1 = np.log(multivariate_normal.pdf(X, mean=matrix_mean[1, :], cov=np.eye(features,features)))
disc_score = pdf1 - pdf
ds_sort = np.sort(disc_score)
tausweep = [(ds_sort[t] + ds_sort[t + 1]) / 2.0 for t in range(0, 9999)]
TP, FP, minerror = [], [], []
for (i, tau) in enumerate(tausweep):
dec = (disc_score >= tau)
TP.append((np.size(np.where((dec == 1) & (labels == 1)))) / np.size(np.where(labels == 1)))
FP.append((np.size(np.where((dec == 1) & (labels == 0)))) / np.size(np.where(labels == 0)))
minerror.append((priors[0] * FP[i]) + (priors[1] * (1 - TP[i])))
loggamma_ideal = np.log(priors[0] / priors[1])
ideal_dec = (disc_score >= loggamma_ideal)
ideal_tp = (np.size(np.where((ideal_dec == 1) & (labels == 1)))) / np.size(np.where(labels == 1))
ideal_fp = (np.size(np.where((ideal_dec == 1) & (labels == 0)))) / np.size(np.where(labels == 0))
ideal_minerror = (priors[0] * ideal_fp) + (priors[1] * (ideal_tp))
print("γ Ideal - %f Minimum Error %f" %(np.exp(loggamma_ideal), ideal_minerror))
# Plotting Graph
fig = plotGraph.figure()
ax = plotGraph.axes(projection = "3d")
Class0 = ax.scatter(X[(labels==0),3],X[(labels==0),1],X[(labels==0),2],'+',color ='blue', label="0")
Class1 = ax.scatter(X[(labels==1),3],X[labels==1,1],X[labels==1,2],'.',color = 'red', label="1")
plotGraph.xlabel('X3')
plotGraph.ylabel('X1')
ax.set_zlabel('X2')
ax.legend()
plotGraph.title('Generated Data')
plotGraph.show()
# ROC curve
plotGraph.plot(FP, TP, color = 'pink')
plotGraph.ylabel('True Positive')
plotGraph.xlabel('False Positive')
plotGraph.title('ROC Curve of minimum expected risk classifier')
plotGraph.plot(FP[np.argmin(minerror)], TP[np.argmin(minerror)],'o',color = 'red')
plotGraph.show()
print("γ Practical - %f Minimum Error %f" %(np.exp(tausweep[np.argmin(minerror)]), np.min(minerror))) | KashS28/ECE5644-Assignments | Assignment 1/q1b.py | q1b.py | py | 2,891 | python | en | code | 0 | github-code | 13 |
70073819219 | from controller import Supervisor
MAXIMUM_TIME = 3*60*1000
SPENT_TIME = 0
EPS = 0.2
def getPoints(dist):
poi_points = 0
for j in range(10):
if dist < EPS*(j+1):
poi_points += 1
return poi_points
referee = Supervisor()
timestep = int(referee.getBasicTimeStep())
robot_node = referee.getFromDef('PARTICIPANT_ROBOT')
emitter = referee.getDevice('emitter')
poi_list = []
poi_string_list = robot_node.getField('customData').getSFString().split()
for i in range(10):
poi_element = [float(poi_string_list[2*i]), float(poi_string_list[2*i+1])]
poi_list.append(poi_element)
min_dist = [20]*10
points = 0
while referee.step(timestep) != -1 and SPENT_TIME < MAXIMUM_TIME:
final_points = 0
for i in range(10):
dist = abs(poi_list[i][0] - robot_node.getPosition()[0]) + abs(poi_list[i][1] - robot_node.getPosition()[1])
if i == 9:
final_points = 2*getPoints(dist)
min_dist[i] = min(min_dist[i], dist)
SPENT_TIME += timestep
label = ''
point_value = 0
for i in range(10):
poi_ind = str(i+1)
if i == 9:
poi_ind = 'F'
poi_points = getPoints(min_dist[i])
point_value += poi_points
poi_label = 'POI_' + poi_ind + ': ' + str(poi_points) + '\n'
label += poi_label
point_value += final_points
label += 'Final position: ' + str(final_points) + '\n'
label += 'Total points: ' + str(point_value) + '\n'
label += 'Time spent: ' + str(SPENT_TIME) + '\n'
points = point_value
referee.setLabel(1, label, 0.15, 0.55, 0.05, 16777215, 0)
if points == 120:
points += max(0, MAXIMUM_TIME - SPENT_TIME)/100
break
# Store the results
with open('/tmp/results.txt', 'w') as f:
f.write(f'points: {points}\n')
# We want to see the fall :)
referee.step(20 * timestep)
# Notify the end of the game
emitter.send('done'.encode('utf-8'))
referee.step(timestep)
| cesc-folch/pal-webots-competition-organizer | controllers/contest_manager/contest_manager.py | contest_manager.py | py | 1,949 | python | en | code | 0 | github-code | 13 |
2371531040 | from system.core.load import Control
import system.core.my_utils as my
class Invest_guide(Control) :
def _auto(self) :
self.DB = self.db('stocks')
self.bid = self.parm[0]
try : self.snd = self.parm[1]
except IndexError : self.snd = None
self.board = 'h_'+self.bid+'_board'
self.target = self.DB.one(f"SELECT extra1 FROM h_board_config WHERE bid='{self.bid}'")
def emptyPick(self) :
pickDate = self.parm[1]
qry = f"DELETE FROM {self.board} WHERE add0 >= '{pickDate}'"
self.DB.exe(qry)
return self.moveto('board/list/'+self.bid)
def insertPick(self) :
pickDate = self.parm[1]
self.DB.tbl, self.DB.wre = ('h_INVEST_board',f"add0='{pickDate}'")
line = self.DB.get_line("*")
del line['no']
line['content'] = "<div><p>Copied by Auto</p></div>"
for x in line :
if line[x] == None : line[x] = ''
qry=self.DB.qry_insert(self.board,line)
self.DB.exe(qry)
return self.moveto('board/list/'+self.bid)
def oneWrite(self) :
self.D['prev_date'] = self.DB.one(f"SELECT max(add0) FROM h_{self.bid}_board")
if self.D['prev_date'] :
self.D['today'] = self.DB.one(f"SELECT min(add0) FROM h_stockHistory_board WHERE add0 > '{self.D['prev_date']}'")
if self.D['today'] :
self.init_value()
# 매도상황 검토
self.today_sell()
# 매수상황 검토
self.today_buy()
self.calculate()
# 매도전략
self.tomorrow_sell()
# 매수전략
self.tomorrow_buy()
self.update_value()
else :
self.set_message(f"{self.D['prev_date']} 이후 업데이트된 정보가 없습니다")
return self.moveto('board/list/'+self.bid)
# -----------------------------------------------------------------------------------------------------------------------
# From Auto Input
# -----------------------------------------------------------------------------------------------------------------------
def update_value(self) :
U = self.M['LD']
del U['no']
U['wdate'] = my.now_timestamp()
U['mdate'] = U['wdate']
U['add0'] = self.M['진행일자']
U['add14'] = self.M['당일종가']
U['sub5'] = self.M['연속상승']
U['sub6'] = self.M['연속하락']
U['sub12'] = self.M['경과일수']
U['sub18'] = self.M['기초수량']
U['add5'] = self.M['변동수량']
U['add9'] = int(U['add9']) + self.M['변동수량']
U['add11'] = round(self.M['매수금액'],2)
U['add12'] = round(self.M['매도금액'],2)
if U['add11'] :
U['sub14'] = float(U['sub14']) + U['add11'] #매수누적
U['add6'] = float(U['add6']) + U['add11'] #현매수금
U['add7'] = round(U['add6']/U['add9'],4) #평균단가
U['add20'] = self.M['추가자금']
if U['add12'] :
U['sub15'] = float(U['sub15']) + U['add12'] #매도누적
U['add8'] = round((U['add12'] / float(U['add6']) - 1) * 100,2)
U['add6'] = 0.00 #현매수금
U['add7'] = 0.00 #평균단가
U['add18'] = self.M['현재손익']
U['sub1'] = self.M['시즌']
U['sub4'] = self.M['일매수금']
U['sub18'] = my.ceil(self.M['일매수금'] / self.M['당일종가'])
U['add20'] = self.M['추가자금']
else :
U['add8'] = '0.00'
if U['add7'] and float(U['add7']) : U['add8'] = round((self.M['당일종가'] / float(U['add7']) - 1) * 100,2) # 현수익률 if 평균단가 != 0
U['add19'] = self.M['가용잔액']
U['add1'] = '0.00'
U['add2'] = '0.00'
U['add3'] = self.M['현재잔액'] + U['add12'] - U['add11'] #현금합계
U['add15'] = int(U['add9']) * self.M['당일종가'] #레버가치
if U['add12'] :
U['add18'] = self.M['현재손익']
else :
U['add18'] = round(U['add15']-float(U['add6']),2) if U['add9'] else 0 # 잔량 존재 시 현재수익 계산
U['add17'] = U['add3'] + U['add15'] #Total Value
U['add4'] = round(U['add3'] / U['add17'] * 100,2)
U['add16'] = round(U['add15'] / U['add17'] * 100,2)
if self.M['경과일수'] !=0 and self.M['전매수가'] >= self.M['전매도가'] : self.M['전매수가'] = self.M['전매도가'] - 0.01
U['sub2'] = self.M['전매수량']
U['sub19'] = self.M['전매수가']
U['sub3'] = self.M['전매도량']
U['sub20'] = self.M['전매도가']
if U['sub20'] <= U['sub19'] : U['sub20'] = U['sub19'] + 0.01
U['sub29'] = self.M['진행상황']
U['sub7'] = self.M['회복전략']
U['sub30'] = self.M['수수료등']
U['sub31'] = float(U['sub31']) + self.M['수수료등'] if self.M['경과일수'] != 1 else self.M['수수료등'] # 누적수수료
U['sub28'] = round((U['add17'] / float(U['sub27']) - 1) * 100,2); # 현수익률
U['content'] = "<div><p>Written by Auto</p></div>"
# Formatting
U['add3'] = f"{U['add3']:.2f}"
U['add6'] = f"{float(U['add6']):.2f}"
U['add7'] = f"{float(U['add7']):.4f}"
U['add8'] = f"{float(U['add8']):.2f}"
U['add11'] = f"{U['add11']:.2f}"
U['add15'] = f"{U['add15']:.2f}"
U['add16'] = f"{U['add16']:.2f}"
U['add17'] = f"{U['add17']:.2f}"
U['add19'] = f"{U['add19']:.2f}"
U['add20'] = f"{float(U['add20']):.2f}"
U['sub7'] = f"{float(U['sub7']):.1f}"
U['sub19'] = f"{U['sub19']:.2f}"
U['sub30'] = f"{U['sub30']:.2f}"
if self.snd == 'chance' :
U['sub19'] = self.DB.one(f"SELECT sub19 FROM h_{self.target}_board WHERE add0 = '{self.M['진행일자']}'")
U['sub20'] = self.DB.one(f"SELECT sub20 FROM h_{self.target}_board WHERE add0 = '{self.M['진행일자']}'")
U.update({k:'' for k,v in U.items() if v == None})
qry=self.DB.qry_insert(self.board,U)
self.DB.exe(qry)
def init_value(self) :
self.M = {}
self.M['진행일자'] = self.D['today']
self.DB.tbl = f"h_{self.bid}_board"
self.DB.wre = f"add0='{self.D['prev_date']}'"
LD = self.M['LD'] = self.DB.get_line('*')
self.M['현재잔액'] = float(LD['add3'])
# 종가구하기
self.DB.clear()
self.DB.tbl = 'h_stockHistory_board'
self.DB.wre = f"add0='{self.M['진행일자']}' and add1='SOXL'";
self.M['당일종가'] = float(self.DB.get_one('add3'))
self.M['전일종가'] = float(self.M['LD']['add14'])
self.M['연속상승'] = self.DB.get_one('add9')
self.M['연속하락'] = self.DB.get_one('add10')
# 매매전략 가져오기
self.M['매매전략'] = 'VICTORY'
self.DB.tbl, self.DB.wre = ('h_stock_strategy_board',f"add0='{self.M['매매전략']}'")
self.S = self.DB.get_line('add2,add3,add4,add5,add9,add10,add11,add17,add18,add22,add25')
self.M['분할횟수'] = int(self.S['add2'])
self.D['비중조절'] = 1 + float(self.S['add3'])/100 # 매매일수 에 따른 구매수량 가중치
self.M['평단가치'] = 1 + float(self.S['add4'])/100 # 일반매수 구매가 범위
self.M['큰단가치'] = 1 + float(self.S['add5'])/100 # 매수첫날 구매가 범위
self.M['첫매가치'] = 1 + float(self.S['add9'])/100
self.M['둘매가치'] = 1 + float(self.S['add10'])/100
self.M['강매시작'] = int(self.S['add17'])
self.M['강매가치'] = 1 + float(self.S['add18'])/100
self.M['위매비중'] = int(self.S['add25'])
self.M['회복기한'] = int(self.S['add11'])
# 매수 매도 초기화
self.M['매수금액']=0.0
self.M['매도금액']=0.0
self.M['변동수량'] = 0
self.M['매수수량'] = 0
self.M['매도수량'] = 0
self.M['전매도량'] = 0
self.M['전매도가'] = 0.0
self.M['현재손익'] = 0.0
self.M['수수료등'] = 0.0
self.M['시즌'] = int(LD['sub1'])
self.M['평균단가'] = float(LD['add7'])
self.M['일매수금'] = int(LD['sub4'])
self.M['경과일수'] = int(LD['sub12'])
self.M['매매현황'] = ''
self.M['진행상황'] = '매도대기'
self.M['보유수량'] = int(LD['add9'])
self.M['현매수금'] = float(LD['add6'])
self.M['가용잔액'] = float(LD['add19'])
self.M['추가자금'] = float(LD['add20'])
self.M['진행상황'] = '매도대기'
self.M['기초수량'] = int(LD['sub18'])
self.M['회복전략'] = float(LD['sub7'])
def calculate(self) :
매도가격 = self.M['당일종가']
매수가격 = self.M['당일종가']
if self.M['보유수량'] : self.M['경과일수'] +=1
if self.M['매도수량'] :
self.M['매도금액'] = 매도가격 * self.M['매도수량']
self.M['변동수량'] = -self.M['매도수량']
self.M['진행상황'] = '전량매도'
수익금액 = self.M['매도금액'] - self.M['현매수금']
self.M['회복전략'] = 0 if 수익금액 > 0 else self.S['add22']
self.M['현재손익'] = f"{수익금액:.2f}"
self.M['경과일수'] = 0
self.M['시즌'] += 1
self.M['기초수량'] = 0
self.commission(self.M['매도금액'],2)
# 리밸런싱
self.rebalance()
if self.M['매수수량'] :
self.M['매수금액'] = 매수가격 * self.M['매수수량']
self.M['변동수량'] = self.M['매수수량']
self.M['보유수량'] += self.M['매수수량']
self.M['평균단가'] = (self.M['현매수금'] + self.M['매수금액']) / self.M['보유수량']
self.commission(self.M['매수금액'],1)
self.M['가용잔액'] -= self.M['매수금액']
if self.M['가용잔액'] < 0 :
self.M['추가자금'] += self.M['가용잔액']
self.M['가용잔액'] = 0
self.M['진행상황'] = '일반매수'
if not self.M['경과일수'] and self.M['매수수량'] :
self.M['경과일수'] = 1
self.M['진행상황'] = '첫날매수'
if not self.M['보유수량'] : self.M['진행상황'] = '매수대기'
def rebalance(self) :
total = self.M['매도금액'] + self.M['가용잔액'] + self.M['추가자금']
self.M['가용잔액'] = int((total * 2)/3)
self.M['추가자금'] = total - self.M['가용잔액']
self.M['일매수금'] = int(self.M['가용잔액']/self.M['분할횟수'])
def tomorrow_sell(self) :
if self.M['경과일수'] == 0 :
self.M['전매도량'] = 0
self.M['전매도가'] = self.M['당일종가']
return
매수수량 = my.ceil(self.M['기초수량'] * (self.M['경과일수']*self.D['비중조절'] + 1))
매도단가 = my.round_up(self.M['평균단가'] * self.M['첫매가치']) if self.M['평균단가'] else self.M['당일종가']
if (매수수량 * self.M['전일종가']) > self.M['가용잔액'] + self.M['추가자금'] :
매도단가 = my.round_up(self.M['평균단가']*self.M['둘매가치'])
if self.M['회복전략'] and self.M['경과일수'] +1 <= self.M['회복기한'] : 매도단가 = my.round_up(self.M['평균단가']* (1+float(self.M['회복전략'])/100))
if self.M['경과일수']+1 >= self.M['강매시작'] : 매도단가 = my.round_up(self.M['평균단가']*self.M['강매가치'])
self.M['전매도량'] = self.M['보유수량']
self.M['전매도가'] = round(매도단가,2)
def tomorrow_buy(self) :
if self.M['경과일수'] == 0 :
self.M['기초수량'] = self.M['전매수량'] = my.ceil(self.M['일매수금']/self.M['당일종가'])
self.M['전매수가'] = round(self.M['당일종가'] * self.M['큰단가치'],2)
return
매수단가 = round(self.M['당일종가'] * self.M['평단가치'],2)
매수수량 = my.ceil(self.M['기초수량'] * (self.M['경과일수']*self.D['비중조절'] + 1))
if 매수수량 * self.M['당일종가'] > self.M['가용잔액'] + self.M['추가자금'] :
매수수량 = self.M['기초수량'] * self.M['위매비중']
self.M['진행상황'] = '매수제한'
if 매수수량 * self.M['당일종가'] > self.M['가용잔액'] + self.M['추가자금'] :
매수수량 = 0
self.M['진행상황'] = '매수금지'
self.M['전매수량'] = 매수수량
self.M['전매수가'] = round(매수단가,2)
self.M['예상금액'] = f"{매수수량 * 매수단가 :,.2f}"
def today_sell(self) :
if not self.M['경과일수'] : return
if self.M['당일종가'] >= float(self.M['LD']['sub20']) :
self.M['매도수량'] = int(self.M['LD']['sub3'])
def today_buy(self) :
if not self.M['경과일수'] :
if self.M['당일종가'] <= float(self.M['LD']['sub19']) :
self.M['매수수량'] = self.M['기초수량'] = my.ceil(self.M['일매수금']/self.M['전일종가']) # 첫날에만 기초수량 재산정
else :
if self.M['당일종가'] <= float(self.M['LD']['sub19']) : self.M['매수수량'] = int(self.M['LD']['sub2'])
def commission(self,mm,opt) :
if opt==1 : fee = int(mm*0.07)/100
if opt==2 :
m1 = int(mm*0.07)/100
m2=round(mm*0.0008)/100
fee = m1+m2
self.M['수수료등'] = fee
self.M['현재잔액'] -= fee
self.M['추가자금'] -= fee
# -----------------------------------------------------------------------------------------------------------------------
# Initiate basic / invest
# -----------------------------------------------------------------------------------------------------------------------
def initiate_invest(self) :
theDay = self.D['post']['theDay']
Balance = float(self.D['post']['Balance'].replace(',',''))
preDay = self.DB.one(f"SELECT max(add0) FROM h_stockHistory_board WHERE add0 < '{theDay}'")
self.B = {}
self.M = {}
self.DB.clear()
self.DB.tbl = 'h_stockHistory_board'
self.DB.wre = f"add0='{theDay}' and add1='SOXL'"; SOXL = float(self.DB.get_one('add3'))
CUP = self.DB.get_one('add9'); CDN = self.DB.get_one('add10');
self.DB.wre = f"add0='{preDay}' and add1='SOXL'"; OSOX = float(self.DB.get_one('add3'))
self.B['add0'] = theDay
self.B['add1'] = '0.00'; self.B['add2'] = '0.00';
self.B['sub5'] = CUP; self.B['sub6'] = CDN; self.B['add18'] = '0.00'; self.B['sub7'] = '0.0'
self.M['가용잔액'] = int((Balance * 2)/3)
self.M['추가자금'] = Balance - self.M['가용잔액']
self.M['일매수금'] = int(self.M['가용잔액']/22)
self.M['기초수량'] = my.ceil(self.M['일매수금']/OSOX)
self.B['add11'] = self.M['기초수량'] * SOXL; # 매수금액
self.B['add16'] = self.B['add11'] / Balance * 100 # 레버비중
self.B['add4'] = 100 - self.B['add16'] # 현금비중
fee = int(self.B['add11']*0.07)/100
self.M['추가자금'] -= fee
self.B['add17'] = Balance - fee
self.B['add12'] = '0.00' ; self.B['add9'] = self.B['add5'] = self.M['기초수량']; self.B['add8'] = '0.00'
self.B['add14'] = SOXL ; self.B['add15'] = self.B['add11']
self.B['add7'] = f"{SOXL:,.4f}"; self.B['sub15'] = '0.00';
self.B['add19'] = f"{self.M['가용잔액']-self.B['add11']:,.2f}"; self.B['sub11'] = '0.00'; self.B['sub25'] = f"{Balance:,.2f}"; self.B['sub27'] = f"{Balance:,.2f}"
self.B['add20'] = self.M['추가자금']; self.B['sub26'] = '0.00'; self.B['sub28'] = '0.00'
self.B['sub1'] = 1; self.B['sub4'] = self.M['일매수금'];
self.B['sub2'] = my.ceil(self.M['기초수량'] * (1 * 1.25 + 1))
self.B['sub3'] = self.M['기초수량']
self.B['sub12'] = 1; self.B['sub18'] = self.M['기초수량'];
self.B['sub19'] = round(SOXL * 1.022,2)-0.01;
self.B['sub20'] = self.B['sub19']+0.01
self.B['sub29'] = '일반매수'; self.B['sub30'] = fee; self.B['sub31'] = fee; self.B['sub32'] = '0'
# 포맷팅 && return values
self.B['add3'] = f"{Balance-self.B['add11']-fee:,.2f}"
self.B['add11'] = f"{self.B['add11']:,.2f}"
self.B['add15'] = f"{self.B['add15']:,.2f}"
self.B['sub19'] = f"{self.B['sub19']:,.2f}"
self.B['add20'] = f"{self.B['add20']:,.2f}"
self.B['add4'] = f"{self.B['add4']:.1f}"
self.B['add16'] = f"{self.B['add16']:.1f}"
self.B['add17'] = f"{self.B['add17']:,.2f}"
self.B['sub14'] = self.B['add6'] = self.B['add11']
return self.json(self.B)
# -----------------------------------------------------------------------------------------------------------------------
# Initiate chance / invest
# -----------------------------------------------------------------------------------------------------------------------
def initiate_chance(self) :
self.B = {}
theDay = self.D['post']['theDay']
preDay = self.DB.one(f"SELECT max(add0) FROM h_stockHistory_board WHERE add0 < '{theDay}'")
현재잔액 = my.sv(self.D['post']['Balance'])
현재수량 = my.sv(self.D['post']['curQty'],'i')
기초수량 = my.sv(self.D['post']['bseQty'],'i')
self.DB.clear()
self.DB.tbl, self.DB.wre = (f"h_{self.target}_board",f"add0='{theDay}'")
TD = self.DB.get_line('add6,add8,add9,add14,sub1,sub2,sub4,sub5,sub6,sub7,sub12,sub18,sub19,sub20')
self.DB.wre = f"add0='{preDay}'"
TO = self.DB.get_line('add14, sub19')
오늘종가 = my.sv(TD['add14'])
어제종가 = my.sv(TO['add14'])
어제매가 = my.sv(TO['sub19'])
타겟일수 = int(TD['sub12'])
if not 현재수량 :
가용잔액 = int(현재잔액 * 2/3)
일매수금 = int(가용잔액/22)
매수비율 = 일매수금 / int(TD['sub4'])
기초수량 = my.ceil(매수비율 * int(TD['sub18']))
# 실제적 로직 시작 ------------------------------------------------------------------------------------------
if 타겟일수 < 2 :
self.B['rsp'] = 0
self.B['msg'] = f"현재 일 수는 {타겟일수}일 이며 필요 일 수(2일 이상)가 충족되지 않았습니다."
return self.json(self.B)
elif 타겟일수 == 2 :
if 오늘종가 <= 어제매가 :
변동수량 = 기초수량
매수금액 = 오늘종가 * 기초수량
내일수량 = 0
# 여기서의 일수는 오늘 일수임, 타겟데이타의 작성 완료 후 찬스데이타를 초기화 하는 것임
for i in range(0,타겟일수+2) : 내일수량 += my.ceil(기초수량 *(i*1.25 + 1))
cp00 = self.take_chance( 0, int(TD['add9']),int(TD['sub2']),float(TD['add6']))
cp22 = self.take_chance(-2.2,int(TD['add9']),int(TD['sub2']),float(TD['add6']))
내일가격 = cp00 if (float(TD['add8']) < -2.2 or float(TD['sub7'])) else cp22
self.B['sub19'] = min(float(TD['sub19']),내일가격)
self.B['sub2'] = 내일수량
else :
self.B['rsp'] = 0
self.B['msg'] = f"종가 기준이 조건을 만족 하지 못하였습니다."
return self.json(self.B)
elif 타겟일수 >= 3 :
if 현재수량 > 기초수량 :
self.B['rsp'] = 0
self.B['msg'] = f"이미 정상 진행 중으로 초기화 작업이 완료된 상태입니다."
return self.json(self.B)
else :
오늘수량 = 0
for i in range(0,타겟일수+1) : 오늘수량 += my.ceil(기초수량 *(i*1.25 + 1))
cp00 = self.take_chance( 0, int(TD['add9']),int(TD['sub2']),float(TD['add6']))
cp22 = self.take_chance(-2.2,int(TD['add9']),int(TD['sub2']),float(TD['add6']))
오늘가격 = cp00 if (float(TD['add8']) < cp22 or float(TD['sub7'])) else cp22
오늘가격 = min(어제매가,오늘가격)
if 오늘종가 <= 오늘가격 :
변동수량 = 오늘수량
매수금액 = 오늘종가 * 변동수량
self.B['sub19'] = TD['sub19'] # 내일 매수 가격
self.B['sub2'] = my.ceil(기초수량 *(타겟일수*1.25 + 1))
# 공통 데이타 및 Formatting
self.B['rsp'] = 1
self.B['add14'] = TD['add14'] #오늘종가
self.B['sub5'] = TD['sub5'] ; self.B['sub6'] = TD['sub6'] # 연속상승, 연속하강
self.B['sub1'] = TD['sub1'] ; self.B['sub12'] = int(TD['sub12'])-1 # 현재시즌, 경과일수
self.B['sub20'] = TD['sub20'] #매도가격
self.B['sub18'] = f"{기초수량:,}"
self.B['add5'] = f"{변동수량:,}"
self.B['add11'] = f"{매수금액:,.2f}"
return self.json(self.B)
def take_chance(self,p,H,n,A) :
if H == 0 : return 0
N = H + n
k = N / (1+p/100)
return round(A/(k-n),2)
| comphys/YHDOCU | apps/stocks/control/boards/invest_guide.py | invest_guide.py | py | 22,401 | python | ko | code | 0 | github-code | 13 |
1290347407 | import heapq
import sys
input = sys.stdin.readline
heap = []
for _ in range(int(input())):
k = int(input())
if k == 0:
try:
print(-1*heapq.heappop(heap))
except:
print(0)
else:
heapq.heappush(heap,-k) | junhaalee/Algorithm | solved/백준/11279/11279.py | 11279.py | py | 263 | python | en | code | 0 | github-code | 13 |
42706902820 | import datetime
import requests
from bs4 import BeautifulSoup
# https://www.learncodewithmike.com/2020/02/python-beautifulsoup-web-scraper.html
def crawl_stock_info(stockCodes, isAddName=True):
result = []
for code in stockCodes:
response = requests.get(f'https://invest.cnyes.com/twstock/TWS/{code}')
soup = BeautifulSoup(response.text, "html.parser")
price = soup.select_one('.info-lp').getText()
changePrice = soup.select_one('.change-net').getText()
changepercent = soup.select_one('.change-percent').getText()
name = soup.select_one('.header_second').getText()
nameDict = {'2330': '台積', '1584': '精剛', '2345': '智邦',
'2377': '微星', '00642U': '石油', '00635U': '黃金'}
if isAddName:
result.append(
f'{code} {nameDict[code]}: \n {price} {changePrice} {changepercent}\n --------------------------')
else:
result.append(
f'{code}: \n {price} {changePrice} {changepercent}\n --------------------------')
return ',\n'.join(result)
def crawl_exchage_rate(currency):
response = requests.get('https://portal.sw.nat.gov.tw/APGQO/GC331')
soup = BeautifulSoup(response.text, "html.parser")
yearSelected = soup.select_one('#yearList').find(
attrs={"selected": "selected"}).getText()
monthSelected = soup.select_one('#monList').find(
attrs={"selected": "selected"}).getText()
tenDaySelected = soup.select_one('#tenDayList').find(
attrs={"selected": "selected"})['value']
# print(yearSelected, monthSelected, tenDaySelected)
req_url = 'https://portal.sw.nat.gov.tw/APGQO/GC331!query?formBean.year={}&formBean.mon={}&formBean.tenDay={}'
response = requests.post(req_url.format(
yearSelected, monthSelected, tenDaySelected)).json()
cny_list = [x for x in response['data'] if x['CRRN_CD'] in currency]
now = datetime.datetime.now()
now_day = now.day
ten_day = None
if now_day < 11:
ten_day = '1'
elif now_day < 21:
ten_day = '2'
elif now_day < 32:
ten_day = '3'
if ten_day != tenDaySelected:
year = str(now.year)
month = '{:02d}'.format(now.month)
response2 = requests.post(req_url.format(year, month, ten_day)).json()
cny_list2 = [x for x in response2['data'] if x['CRRN_CD'] in currency]
cny_list.extend(cny_list2)
cny_list.sort(reverse=True, key=lambda x: x['UP_DATE'])
cny_list.sort(key=lambda x: x['CRRN_CD'])
result = []
for info in cny_list:
day = None
if info['TEN_DAY'] == '1':
day = '1-10'
elif info['TEN_DAY'] == '2':
day = '11-20'
elif info['TEN_DAY'] == '3':
day = '21-31'
result.append(
f"{info['CRRN_CD']} {info['YEAR']}/{info['MON']} {day}=> {info['IN_RATE']}")
return ',\n'.join(result)
| YueLung/django_backend | apps/line/module/crawl.py | crawl.py | py | 2,955 | python | en | code | 0 | github-code | 13 |
37973780458 | include.block ( "AmdcMGM/AmdcMGM_jobOptions.py" )
#--------------------------------------------------------------
# AmdcMGM
#--------------------------------------------------------------
from AmdcMGM.AmdcMGMConf import AmdcDumpGeoModel
topSequence += AmdcDumpGeoModel( "AmdcDumpGeoModel00" )
theAmdcDumpGeoModel00 = topSequence.AmdcDumpGeoModel00
#if 1 do nothing
theAmdcDumpGeoModel00.SwitchOff = 1
# WarningLevel:
# if 1 print only warnings,
# if 2 print warnings only for precise coordinates,
# print all otherwise
theAmdcDumpGeoModel00.WarningLevel = 1
# CheckTEC if 1 perform the comparison for TEC
# EpsLoTEC Min value on the position difference to output warning for TEC
theAmdcDumpGeoModel00.CheckMdt = 1
theAmdcDumpGeoModel00.EpsLoMdt = 0.030
theAmdcDumpGeoModel00.CheckRpc = 1
theAmdcDumpGeoModel00.EpsLoRpc = 0.030
theAmdcDumpGeoModel00.CheckTgc = 1
theAmdcDumpGeoModel00.EpsLoTgc = 0.030
theAmdcDumpGeoModel00.CheckCsc = 1
theAmdcDumpGeoModel00.EpsLoCsc = 0.030
# CenterCscMean:
# if 1 use mean value of strip corners position instead of center for CSC
theAmdcDumpGeoModel00.CenterCscMean = 0
# ChamberSelection:
# if 1 set on Chamber selection by
# AmdcDumpGeoModel.StationNameSelected = [ "BIL" , "EIL" ]
# AmdcDumpGeoModel.StationAbsAmdcJzzSelected = [ 2 , 1 ]
# AmdcDumpGeoModel.StationAmdcJffSelected = [ 1 , 1 ]
theAmdcDumpGeoModel00.ChamberSelection = 0
# StationSelection:
# if 1 set on Station selection by
# AmdcDumpGeoModel.StationSelected = [ "BIL" , "BML", "BMS", "BOL" ]
theAmdcDumpGeoModel00.StationSelection = 0
#if 1 initialize() return FAILURE to speed up analysis
theAmdcDumpGeoModel00.EmergencyOut = 1
| rushioda/PIXELVALID_athena | athena/MuonSpectrometer/Amdcsimrec/AmdcMGM/share/AmdcMGM_jobOptions.py | AmdcMGM_jobOptions.py | py | 1,722 | python | en | code | 1 | github-code | 13 |
14903945028 | # Python version 3.7.6
import os
# Variable to hold command
shlcmd = ""
# Loop condition to check if command is not equals to exit and executes command
while (shlcmd != "exit"):
# Command from user is stored here
shlcmd = input('/myshell:')
# Result of execution of command is stored here
stdout = os.popen(shlcmd.strip())
# Variable to reads each result line from variable stdout
txt = " "
# Loop to print line by line
while txt:
txt = stdout.read()
print(txt)
stdout.close()
print("Goodbye")
| Gbolly007/AdvancedPython | Assignment1/Number1.py | Number1.py | py | 536 | python | en | code | 0 | github-code | 13 |
13014852183 | from functools import lru_cache
import httpx
import pandas as pd
@lru_cache()
def _code_id_map_em() -> dict:
"""
东方财富-股票和市场代码
http://quote.eastmoney.com/center/gridlist.html#hs_a_board
:return: 股票和市场代码
:rtype: dict
"""
url = "http://80.push2.eastmoney.com/api/qt/clist/get"
params = {
"pn": "1",
"pz": "50000",
"po": "1",
"np": "1",
"ut": "bd1d9ddb04089700cf9c27f6f7426281",
"fltt": "2",
"invt": "2",
"fid": "f3",
"fs": "m:1 t:2,m:1 t:23",
"fields": "f12",
"_": "1623833739532",
}
r = httpx.get(url, params=params)
data_json = r.json()
if not data_json["data"]["diff"]:
return dict()
temp_df = pd.DataFrame(data_json["data"]["diff"])
temp_df["market_id"] = 1
temp_df.columns = ["sh_code", "sh_id"]
code_id_dict = dict(zip(temp_df["sh_code"], temp_df["sh_id"]))
params = {
"pn": "1",
"pz": "50000",
"po": "1",
"np": "1",
"ut": "bd1d9ddb04089700cf9c27f6f7426281",
"fltt": "2",
"invt": "2",
"fid": "f3",
"fs": "m:0 t:6,m:0 t:80",
"fields": "f12",
"_": "1623833739532",
}
r = httpx.get(url, params=params)
data_json = r.json()
if not data_json["data"]["diff"]:
return dict()
temp_df_sz = pd.DataFrame(data_json["data"]["diff"])
temp_df_sz["sz_id"] = 0
code_id_dict.update(dict(zip(temp_df_sz["f12"], temp_df_sz["sz_id"])))
params = {
"pn": "1",
"pz": "50000",
"po": "1",
"np": "1",
"ut": "bd1d9ddb04089700cf9c27f6f7426281",
"fltt": "2",
"invt": "2",
"fid": "f3",
"fs": "m:0 t:81 s:2048",
"fields": "f12",
"_": "1623833739532",
}
r = httpx.get(url, params=params)
data_json = r.json()
if not data_json["data"]["diff"]:
return dict()
temp_df_sz = pd.DataFrame(data_json["data"]["diff"])
temp_df_sz["bj_id"] = 0
code_id_dict.update(dict(zip(temp_df_sz["f12"], temp_df_sz["bj_id"])))
return code_id_dict
class StockZhAHist:
_columns = [
'日期', "股票代码", '开盘', '收盘', '最高', '最低', '成交量',
'成交额', '振幅', '涨跌幅', '涨跌额', '换手率'
]
_url = "https://quote.eastmoney.com/sh601658.html"
_desc = "东方财富-股票行情-日行情"
def __init__(self,
symbols: str | list[str] = "000001",
period: str = "daily",
start_date: str = "19700101",
end_date: str = "20500101",
adjust: str = "",
*args, **kwargs
):
self.symbols = symbols
self.period = period
self.start_date = start_date
self.end_date = end_date
self.adjust = adjust
self.extra_args = args
self.extra_kwargs = kwargs
def _fetch_data(self, *args, **kwargs):
code_id_dict = _code_id_map_em()
adjust_dict = {"qfq": "1", "hfq": "2", "": "0"}
period_dict = {"daily": "101", "weekly": "102", "monthly": "103"}
url = "http://push2his.eastmoney.com/api/qt/stock/kline/get"
big_df = pd.DataFrame()
if type(self.symbols) == str:
self.symbols = [self.symbols]
for symbol in self.symbols:
params = {
"fields1": "f1,f2,f3,f4,f5,f6",
"fields2": "f51,f52,f53,f54,f55,f56,f57,f58,f59,f60,f61,f116",
"ut": "7eea3edcaed734bea9cbfc24409ed989",
"klt": period_dict[self.period],
"fqt": adjust_dict[self.adjust],
"secid": f"{code_id_dict[symbol]}.{symbol}",
"beg": self.start_date,
"end": self.end_date,
"_": "1623766962675",
}
timeout = self.extra_kwargs.get("timeout", None)
proxies = self.extra_kwargs.get("proxies", None)
r = httpx.get(url, params=params, timeout=timeout, proxies=proxies)
data_json = r.json()
if not (data_json["data"] and data_json["data"]["klines"]):
return pd.DataFrame()
temp_df = pd.DataFrame(
[item.split(",") for item in data_json["data"]["klines"]]
)
temp_df['symbol'] = symbol
big_df = pd.concat([big_df, temp_df], ignore_index=True)
return big_df
def _process_data(self, *args, **kwargs):
fetched_data = self._fetch_data()
fetched_data.columns = [
'date', 'open', 'close', 'high', 'low', 'volume',
'turnover', 'amplitude', 'price_change_rate', 'price_change', 'turnover_rate', 'symbol'
]
fetched_data.index = pd.to_datetime(fetched_data["date"])
fetched_data.reset_index(inplace=True, drop=True)
fetched_data = fetched_data[[
'date', 'symbol', 'open', 'close', 'high', 'low', 'volume',
'turnover', 'amplitude', 'price_change_rate', 'price_change', 'turnover_rate'
]]
for _column in fetched_data.columns[2:]:
fetched_data[_column] = pd.to_numeric(fetched_data[_column], errors="coerce")
return fetched_data
@property
def data(self) -> pd.DataFrame:
"""
返回数据
:return: pandas.DataFrame
"""
return self._process_data(self)
@property
def columns(self) -> list:
"""
返回数据
:return: pandas.DataFrame
"""
return self._columns
@property
def url(self) -> str:
return self._url
@property
def desc(self) -> str:
return self._desc
def __str__(self):
return "通过调用 .data() 方法返回数据;通过调用 .columns 属性返回数据的列名"
def __call__(self, *args, **kwargs):
self._process_data(self)
if __name__ == '__main__':
stock_zh_a_hist = StockZhAHist
proxies = {
"http://": "http://127.0.0.1:7890",
"https://": "http://127.0.0.1:7890",
}
stock_zh_a_hist_obj = stock_zh_a_hist(
symbols=["430090", "000001", "000002", "600000", "600001"],
period="daily",
start_date="20000516",
end_date="20220722",
adjust="hfq",
timeout=2.111,
proxies=proxies
)
print(stock_zh_a_hist_obj.data)
print(stock_zh_a_hist_obj.columns)
print(stock_zh_a_hist_obj.url)
print(stock_zh_a_hist_obj.desc)
print(stock_zh_a_hist_obj.symbols)
print(stock_zh_a_hist_obj.start_date)
print(stock_zh_a_hist_obj.end_date)
print(stock_zh_a_hist_obj.adjust)
| albertandking/aklite | src/aklite/stock/stock_hist_em.py | stock_hist_em.py | py | 6,725 | python | en | code | 1 | github-code | 13 |
34803066698 | import asyncio, random
import os, io, gettext
import time
from hangupsbot.utils import strip_quotes, text_to_segments
from hangupsbot.commands import command
import appdirs
### NOTAS ###
@command.register
def recuerda(bot, event, *args):
"""Guarda un mensaje en la libreta de notas\nUso: <bot> recuerda [nota]"""
arg = ' '.join(args)
dirs = appdirs.AppDirs('hangupsbot', 'hangupsbot')
nota= str(os.path.join(dirs.user_data_dir))+"/"+str(event.user_id.chat_id)+".txt"
if os.path.isfile(nota):
text='queso'
else:
os.mknod(nota)
f = open(nota,'r+')
s=time.ctime()
msg= str((s+'\n[{}]\n'+'{}'+'\n\n').format(event.user.full_name,arg))
f.write(msg)
f.readlines()
f.close()
yield from event.conv.send_message(text_to_segments('Guardado'))
@command.register
def notas(bot, event, *args):
"""Muestra las notas guardadas \n Uso: <bot> notas"""
dirs = appdirs.AppDirs('hangupsbot', 'hangupsbot')
nota= str(os.path.join(dirs.user_data_dir))+"/"+str(event.user_id.chat_id)+".txt"
if os.path.isfile(nota):
text='queso'
else:
os.mknod(nota)
f = open(nota,'r+')
text= 'Notas:\n'
r=f.readlines()
for line in r:
text= _(text+line)
f.close()
yield from event.conv.send_message(text_to_segments(text))
@command.register(admin=True)
def deletenotas(bot, event, *args):
"""Borra la libreta de notas (Solo admins)\n Uso: <bot> deletenotas"""
dirs = appdirs.AppDirs('hangupsbot', 'hangupsbot')
nota= str(os.path.join(dirs.user_data_dir))+"/"+str(event.user_id.chat_id)+".txt"
arg = ' '.join(args)
if os.path.isfile(nota):
text='queso'
else:
os.mknod(nota)
f = open(nota,'w')
f.write(' ')
f.close()
yield from event.conv.send_message(text_to_segments('Borradas todas las notas'))
| Pyrus01/Hangupsfork | hangupsbot/commands/notas.py | notas.py | py | 1,866 | python | es | code | 0 | github-code | 13 |
38850158445 | import turtle as t
def makeSquare(posX=228, posY=297, angle=0, cucolor="brown"):
if cucolor == "blue":
t.color("dodger blue")
else:
t.color("saddle brown")
t.goto(posX,posY)
t.begin_fill()
for i in range(4):
t.forward(100)
t.right(90)
t.end_fill()
print("hi")
def skyControler(angle=0, height=0):
#clear()
#in work
print("in work")
quit()
#berich in dem angezeigt werden soll
# Überreste beim drehen dur 4 4ecke in schwarz überschreichen
# gößrere 4ecke und dann rest abkatten <- Nicht errichbare stellen bei drehungen | LunaDEV-net/23-1_Python-course | pfd-airplane/fakeSky.py | fakeSky.py | py | 605 | python | de | code | 1 | github-code | 13 |
32449443239 | import os
from flask import Flask, render_template
from alexandria.extensions import db, migrate, bcrypt, login_manager
from alexandria.models import users, documentlinks
def create_app(config_setting='dev'):
"""An application factory, as explained here:
http://flask.pocoo.org/docs/patterns/appfactories/
:param config_object: The configuration object to use.
"""
app = Flask(__name__)
if config_setting == 'prod': # only trigger SSLify if the app is running on Heroku
#sslify = SSLify(app)
from alexandria.settings import ProductionConfig
#app.config.from_object('config.ProductionConfig')
app.config.from_object(ProductionConfig)
elif config_setting == 'test':
from alexandria.settings import TestConfig
#app.config.from_object('config.ProductionConfig')
app.config.from_object(TestConfig)
else:
from alexandria.settings import DevelopmentConfig
app.config.from_object(DevelopmentConfig)
register_extensions(app)
register_blueprints(app)
register_errorhandlers(app)
return app
def register_extensions(app):
#assets.init_app(app)
bcrypt.init_app(app)
#cache.init_app(app)
db.init_app(app)
login_manager.init_app(app)
#debug_toolbar.init_app(app)
migrate.init_app(app, db)
#mail.init_app(app)
return None
def register_blueprints(app):
# Prevents circular imports
#from linksApp.views import links
from alexandria.views import users
app.register_blueprint(users.blueprint)
from alexandria.views import public
app.register_blueprint(public.blueprint)
from alexandria.views import documents
app.register_blueprint(documents.blueprint)
return None
def register_errorhandlers(app):
def render_error(error):
# If a HTTPException, pull the `code` attribute; default to 500
error_code = getattr(error, 'code', 500)
return render_template("{0}.html".format(error_code)), error_code
for errcode in [401, 404, 500]:
app.errorhandler(errcode)(render_error)
return None
| ianblu1/alexandria | alexandria/app.py | app.py | py | 2,115 | python | en | code | 0 | github-code | 13 |
35452354883 | #! /usr/bin/python
__author__ = "Isa Bostan"
__email__ = "isabostan@gmail.com"
__status__ = "Assignment"
import cv2
import numpy as np
source = cv2.imread("sample.jpg")
cropping = False
cv2.namedWindow("Window")
x1, y1, x2, y2 = 0, 0, 0, 0
def mouse_cropping(event, x, y, flags, userdata):
try:
global x1, y1, x2, y2, cropping
if event == cv2.EVENT_LBUTTONDOWN:
x1, y1, x2, y2 = x, y, x, y
cropping = True
cv2.circle(source, (x1,y1), 1, (255, 0, 255), 2, cv2.LINE_AA)
elif event == cv2.EVENT_MOUSEMOVE:
if cropping == True:
x2,y2 = x,y
elif event == cv2.EVENT_LBUTTONUP:
x2,y2 = x,y
cropping = False
points = [(x1,y1),(x2,y2)]
cv2.rectangle(source,points[0],points[1],(255,0,255),thickness=3,lineType=cv2.LINE_AA)
if len(points)==2:
print("cropping...")
crop = source[points[0][1]:points[1][1],points[0][0]:points[1][0]]
cv2.imwrite("face.png",crop)
cv2.imshow("Cropped", crop)
except Exception as ex:
print("Exception:",ex)
try:
cv2.setMouseCallback("Window", mouse_cropping)
k = 0
# loop until escape character is pressed
while k!=27:
dummy = source.copy()
cv2.putText(source,'''Choose center, and drag,
Press ESC to exit and c to clear''' ,
(10,30), cv2.FONT_HERSHEY_SIMPLEX,
0.7,(255,255,255), 2 )
if not cropping:
cv2.imshow("Window", source)
elif cropping:
cv2.rectangle(dummy, (x1, y1), (x2, y2), (255, 0, 255), 3)
cv2.imshow("Window", dummy)
k = cv2.waitKey(20) & 0xFF
cv2.destroyAllWindows()
except Exception as ex:
print("Exception",ex)
| rockcastle/My-OpenCV-Assignments | Assignment_2_Create_a_Face_Annotation_Tool/assingment-mouse.py | assingment-mouse.py | py | 1,849 | python | en | code | 1 | github-code | 13 |
39640300024 | import asyncio
import socket
import sys
import time
from threading import Thread
from textwrap import dedent
from discord import Client, User
from addresses import login_address
from db import add_user, clear_users, get_users, remove_user
from secrets import secrets
if '--debug' in sys.argv:
__DEBUG__ = True
elif '-d' in sys.argv:
__DEBUG__ = True
else:
__DEBUG__ = False
delay = 5 # 5 seconds
def check(address):
addr = address.split(':')[0]
port = address.split(':')[1]
try:
# 0.5 second timeout
s = socket.create_connection((addr, port), 0.5)
s.close()
return True
except:
return False
client = Client()
client.login(secrets.email, secrets.password)
@asyncio.coroutine
@client.event
def on_alert():
users = get_users()
for user in users:
user = User('', user, '', '')
client.send_message(user, 'MapleStory is back online!')
clear_users()
@asyncio.coroutine
@client.event
def on_message(message):
if message.channel.is_private:
if message.content == 'cancel':
remove_user(message.author)
client.send_message(
message.author,
'You have been removed from the alert list.'
)
elif any([m == client.user for m in message.mentions]):
if 'subscribe' in message.content:
add_user(message.author)
client.send_message(
message.author,
dedent(
'''
You are now on the alert list.
Type "cancel" to remove yourself from the list.
'''
).strip()
)
if 'help' in message.content:
client.send_message(
message.channel,
dedent(
'''
This bot will PM you when MapleStory is back online.
All inquiries should be sent to Reticence via PM.
Usage:
@Maple Alert subscribe
@Maple Alert help
'''
).strip()
)
def check_servers():
while True:
# let the discord client connect
time.sleep(delay)
if any([check(addr) for addr in login_address]):
if __DEBUG__:
print('maple online')
client.dispatch('alert')
else:
if __DEBUG__:
print('not online')
checker = Thread(target=check_servers)
checker.daemon = True
checker.start()
client.run() | ReticentIris/Maple-Alert | bot.py | bot.py | py | 2,704 | python | en | code | 0 | github-code | 13 |
42441173856 | import time
is_error_now = False
valArray = []
maxSize = 24
def AddValue(val):
nowt = time.time()
itm = (nowt, val)
valArray.append(itm)
if len(valArray) > maxSize:
del(valArray[0])
def Length():
return len(valArray)
def Get(idx):
return valArray[idx]
def GetAll():
return valArray
def GetArray():
ret = []
idx = 0
while idx < len(valArray):
ret.append(valArray[idx][1])
idx += 1
return ret
def GetTimes():
ret = []
idx = 0
while idx < len(valArray):
ret.append(valArray[idx][0])
idx += 1
return ret
def DumpValues():
print(valArray)
def Save(filename):
global valArray
ret = True
try:
with open(filename, "w") as file:
idx = 0
while idx < len(valArray):
vv = Get(idx)
ln = str(vv[0])+','+str(vv[1])+'\n'
file.write(ln)
idx += 1
file.close()
except OSError as e:
is_error_now = True
print("Error save "+filename)
ret = False
return ret
def Load(filename):
global valArray
ret = True
try:
with open(filename, 'r') as file:
valArray = []
for ln in file.readlines():
vals = ln.split(',')
if len(vals) > 1:
tup = (int(vals[0]),float(vals[1]))
valArray.append(tup)
file.close()
except OSError as e:
is_error_now = True
print("Error load "+filename)
ret = False
return ret
# unit test
if __name__=='__main__':
if Load('templog.csv'):
rr = GetArray()
print(rr)
else:
idx = 1
while idx <= 10:
n = idx * 111
AddValue(n)
idx += 1
Save('templog.csv')
| yorkwoo/pico_display | pico_lcd/valarray.py | valarray.py | py | 1,968 | python | en | code | 0 | github-code | 13 |
28638049383 | import os
import random
import time
import traceback
from concurrent import futures
from google.auth.exceptions import DefaultCredentialsError
import grpc
import google.oauth2.id_token
import google.auth.transport.requests
import google.auth.transport.grpc
# import google.auth.credentials.Credentials
# from google.auth.transport import grpc as google_auth_transport_grpc
import demo_pb2
import demo_pb2_grpc
from grpc_health.v1 import health_pb2
from grpc_health.v1 import health_pb2_grpc
from logger import getJSONLogger
logger = getJSONLogger('recommendationservice-server')
def check_and_refresh():
####
# Method 2
global credentials
global product_catalog_stub
if credentials.expired:
request1 = google.auth.transport.requests.Request()
target_audience1 = "https://{}/".format(catalog_addr.partition(":")[0])
credentials = google.oauth2.id_token.fetch_id_token_credentials(target_audience1, request=request1)
credentials.refresh(request1)
id_token1 = credentials.token
tok1 = grpc.access_token_call_credentials(id_token1)
ccc1 = grpc.composite_channel_credentials(grpc.ssl_channel_credentials(), tok1)
channel1 = grpc.secure_channel(catalog_addr,ccc1)
product_catalog_stub = demo_pb2_grpc.ProductCatalogServiceStub(channel1)
####
class RecommendationService(demo_pb2_grpc.RecommendationServiceServicer):
def ListRecommendations(self, request, context):
try:
logger.info("Entered into ListRecommendations()")
max_responses = 5
# Check token is expired or not
check_and_refresh()
# fetch list of products from product catalog stub
cat_response = product_catalog_stub.ListProducts(demo_pb2.Empty())
product_ids = [x.id for x in cat_response.products]
filtered_products = list(set(product_ids)-set(request.product_ids))
num_products = len(filtered_products)
num_return = min(max_responses, num_products)
# sample list of indicies to return
indices = random.sample(range(num_products), num_return)
# fetch product ids from indices
prod_list = [filtered_products[i] for i in indices]
logger.info("[Recv ListRecommendations] product_ids={}".format(prod_list))
# build and return response
response = demo_pb2.ListRecommendationsResponse()
response.product_ids.extend(prod_list)
logger.info("Exited into ListRecommendations()")
except Exception as e:
logger.error("ListRecommendations error={}".format(e))
return []
return response
def Check(self, request, context):
return health_pb2.HealthCheckResponse(
status=health_pb2.HealthCheckResponse.SERVING)
def Watch(self, request, context):
return health_pb2.HealthCheckResponse(
status=health_pb2.HealthCheckResponse.UNIMPLEMENTED)
if __name__ == "__main__":
logger.info("initializing recommendationservice")
port = os.environ.get('PORT', "8080")
catalog_addr = os.environ.get('PRODUCT_CATALOG_SERVICE_ADDR', '')
if catalog_addr == "":
raise Exception('PRODUCT_CATALOG_SERVICE_ADDR environment variable not set')
logger.info("product catalog address: " + catalog_addr)
# ######
# # Method 1 !!!Changes!!!
# credentials, _ = google.auth.default()
# request = google.auth.transport.requests.Request()
# target_audience = "https://{}/".format(catalog_addr.partition(":")[0])
# id_token = google.oauth2.id_token.fetch_id_token(request, target_audience)
# # req.add_header("Authorization", f"Bearer {id_token}")
# tok = grpc.access_token_call_credentials(id_token)
# ccc = grpc.composite_channel_credentials(grpc.ssl_channel_credentials(), tok)
# channel = grpc.secure_channel(catalog_addr,ccc)
# product_catalog_stub = demo_pb2_grpc.ProductCatalogServiceStub(channel)
# #####
####
# Method 2
request = google.auth.transport.requests.Request()
target_audience = "https://{}/".format(catalog_addr.partition(":")[0])
credentials = google.oauth2.id_token.fetch_id_token_credentials(target_audience, request=request)
credentials.refresh(request)
id_token = credentials.token
tok = grpc.access_token_call_credentials(id_token)
ccc = grpc.composite_channel_credentials(grpc.ssl_channel_credentials(), tok)
channel = grpc.secure_channel(catalog_addr,ccc)
product_catalog_stub = demo_pb2_grpc.ProductCatalogServiceStub(channel)
####
# create gRPC server
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
# add class to gRPC server
service = RecommendationService()
demo_pb2_grpc.add_RecommendationServiceServicer_to_server(service, server)
health_pb2_grpc.add_HealthServicer_to_server(service, server)
# start server
logger.info("listening on port: " + port)
server.add_insecure_port('[::]:'+port)
server.start()
# keep alive
try:
while True:
time.sleep(10000)
except KeyboardInterrupt:
server.stop(0)
| cc4i/boutique-on-cloudrun | demo/src/recommendationservice/recommendation_server.py | recommendation_server.py | py | 5,115 | python | en | code | 1 | github-code | 13 |
32212187820 | import heapq
import sys
INF=int(1e9)
T=int(sys.stdin.readline())
def dij(start):
distance = [INF] * (n + 1)
que=[]
heapq.heappush(que,[0,start])
distance[start]=0
while que:
d,check=heapq.heappop(que)
if distance[check]<d:
continue
for i in graph[check]:
cost=d+i[1]
if cost<distance[i[0]]:
distance[i[0]]=cost
heapq.heappush(que,[cost,i[0]])
return distance
for i in range(T):
n,m,t=map(int,sys.stdin.readline().split())
start, g,h=map(int,sys.stdin.readline().split())
graph=[[] for i in range(n+1)]
for j in range(m):
a,b,c=map(int,sys.stdin.readline().split())
graph[a].append([b,c])
graph[b].append([a,c])
site_proposed=[]
for z in range(t):
site_proposed.append(int(sys.stdin.readline()))
point_s=dij(start)
point_g=dij(g)
point_h=dij(h)
answer=[]
for w in site_proposed:
if point_s[g]+point_g[h]+point_h[w]==point_s[w] or point_s[h]+point_h[g]+point_g[w]==point_s[w]:
answer.append(w)
answer=sorted(answer)
for z in answer:
print(z,end=" ")
print()
| BlueScreenMaker/333_Algorithm | 백업/220604~230628/BackJoon/9370.py | 9370.py | py | 1,194 | python | en | code | 0 | github-code | 13 |
39660720004 | import pandas as pd
import numpy as np
import config
import process_survey as ps
# Load survey data to memory
hh = ps.load_data(config.household_file)
person = ps.load_data(config.person_file)
# Add household records to person file
person_hh = ps.join_hh2per(person, hh)
# Create instances of summary class
perhh = ps.Person(person_hh)
# Get some basic descriptive tables
hhsize = hh.groupby("hhsize").sum()['expwt_final']
numadults = hh.groupby("numadults").sum()['expwt_final']
numchildren = hh.groupby("numchildren").sum()['expwt_final']
numworkers = hh.groupby("numworkers").sum()['expwt_final']
hh_inc_detailed_imp = hh.groupby("hh_income_detailed_imp").sum()['expwt_final'] # imputed income
vehs = hh.groupby("vehicle_count").sum()['expwt_final']
county = hh.groupby("h_county_name").sum()['expwt_final']
| psrc/travel-studies | 2014/region/summary/scripts/household.py | household.py | py | 818 | python | en | code | 5 | github-code | 13 |
31767509299 | import os
# os.environ['CUDA_LAUNCH_BLOCKING'] = '1'
import gc
import time
import torch
import numpy as np
import torch.nn as nn
import torch.backends.cudnn as cudnn
import torch.utils.data as data
from torch.optim import lr_scheduler
from torch.utils.data import ConcatDataset
from dataset import SynthText, TotalText, Ctw1500Text,\
Mlt2017Text, TD500Text, TD500HUSTText, ArtTextJson, Mlt2019Text, TotalText_New, ArtText, MLTTextJson, TotalText_mid, Ctw1500Text_mid, TD500HUSTText_mid, ALLTextJson, ArtTextJson_mid
from network.loss import TextLoss, knowledge_loss
from network.textnet import TextNet
from util.augmentation import Augmentation
from cfglib.config import config as cfg, update_config, print_config
from util.misc import AverageMeter
from util.misc import mkdirs, to_device
from cfglib.option import BaseOptions
from util.visualize import visualize_network_output
from util.summary import LogSummary
from util.shedule import FixLR
# import multiprocessing
# multiprocessing.set_start_method("spawn", force=True)
lr = None
train_step = 0
def save_model(model, epoch, lr, optimzer):
save_dir = os.path.join(cfg.save_dir, cfg.exp_name)
if not os.path.exists(save_dir):
mkdirs(save_dir)
save_path = os.path.join(save_dir, 'MixNet_{}_{}.pth'.format(model.backbone_name, epoch))
print('Saving to {}.'.format(save_path))
state_dict = {
'lr': lr,
'epoch': epoch,
'model': model.state_dict() if not cfg.mgpu else model.module.state_dict()
# 'optimizer': optimzer.state_dict()
}
torch.save(state_dict, save_path)
def load_model(model, model_path):
print('Loading from {}'.format(model_path))
state_dict = torch.load(model_path)
try:
model.load_state_dict(state_dict['model'])
except RuntimeError as e:
print("Missing key in state_dict, try to load with strict = False")
model.load_state_dict(state_dict['model'], strict = False)
print(e)
def _parse_data(inputs):
input_dict = {}
inputs = list(map(lambda x: to_device(x), inputs))
input_dict['img'] = inputs[0]
input_dict['train_mask'] = inputs[1]
input_dict['tr_mask'] = inputs[2]
input_dict['distance_field'] = inputs[3]
input_dict['direction_field'] = inputs[4]
input_dict['weight_matrix'] = inputs[5]
input_dict['gt_points'] = inputs[6]
input_dict['proposal_points'] = inputs[7]
input_dict['ignore_tags'] = inputs[8]
if cfg.embed:
input_dict['edge_field'] = inputs[9]
if cfg.mid:
input_dict['gt_mid_points'] = inputs[9]
input_dict['edge_field'] = inputs[10]
return input_dict
def train(model, train_loader, criterion, scheduler, optimizer, epoch):
global train_step
losses = AverageMeter()
batch_time = AverageMeter()
data_time = AverageMeter()
end = time.time()
model.train()
# scheduler.step()
print('Epoch: {} : LR = {}'.format(epoch, scheduler.get_lr()))
for i, inputs in enumerate(train_loader):
data_time.update(time.time() - end)
train_step += 1
input_dict = _parse_data(inputs)
output_dict = model(input_dict)
loss_dict = criterion(input_dict, output_dict, eps=epoch+1)
loss = loss_dict["total_loss"]
# backward
optimizer.zero_grad()
loss.backward()
if cfg.grad_clip > 0:
torch.nn.utils.clip_grad_norm_(model.parameters(), cfg.grad_clip)
optimizer.step()
losses.update(loss.item())
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
if cfg.viz and (i % cfg.viz_freq == 0 and i > 0) and epoch % 8 == 0:
visualize_network_output(output_dict, input_dict, mode='train')
if i % cfg.display_freq == 0:
gc.collect()
print_inform = "({:d} / {:d}) ".format(i, len(train_loader))
for (k, v) in loss_dict.items():
print_inform += " {}: {:.4f} ".format(k, v.item())
print(print_inform)
if cfg.exp_name == 'Synthtext' or cfg.exp_name == 'ALL' or cfg.exp_name == "preSynthMLT" or cfg.exp_name == "preALL":
print("save checkpoint for pretrain weight. ")
if epoch % cfg.save_freq == 0:
save_model(model, epoch, scheduler.get_lr(), optimizer)
elif cfg.exp_name == 'MLT2019' or cfg.exp_name == 'ArT' or cfg.exp_name == 'MLT2017':
if epoch < 10 and cfg.max_epoch >= 200:
if epoch % (2*cfg.save_freq) == 0:
save_model(model, epoch, scheduler.get_lr(), optimizer)
else:
if epoch % cfg.save_freq == 0:
save_model(model, epoch, scheduler.get_lr(), optimizer)
else:
if epoch % cfg.save_freq == 0 and epoch > 150:
save_model(model, epoch, scheduler.get_lr(), optimizer)
print('Training Loss: {}'.format(losses.avg))
def knowledgetrain(model, knowledge, train_loader, criterion, know_criterion, scheduler, optimizer, epoch):
global train_step
losses = AverageMeter()
batch_time = AverageMeter()
data_time = AverageMeter()
end = time.time()
model.train()
# scheduler.step()
print('Epoch: {} : LR = {}'.format(epoch, scheduler.get_lr()))
for i, inputs in enumerate(train_loader):
data_time.update(time.time() - end)
train_step += 1
input_dict = _parse_data(inputs)
output_dict = model(input_dict)
output_know = knowledge(input_dict, knowledge = True)
loss_dict = criterion(input_dict, output_dict, eps=epoch+1)
loss = loss_dict["total_loss"]
know_loss = know_criterion(output_dict["image_feature"], output_know["image_feature"])
loss = loss + know_loss
loss_dict["know_loss"] = know_loss
# backward
optimizer.zero_grad()
loss.backward()
if cfg.grad_clip > 0:
torch.nn.utils.clip_grad_norm_(model.parameters(), cfg.grad_clip)
optimizer.step()
losses.update(loss.item())
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
if cfg.viz and (i % cfg.viz_freq == 0 and i > 0) and epoch % 8 == 0:
visualize_network_output(output_dict, input_dict, mode='train')
if i % cfg.display_freq == 0:
gc.collect()
print_inform = "({:d} / {:d}) ".format(i, len(train_loader))
for (k, v) in loss_dict.items():
print_inform += " {}: {:.4f} ".format(k, v.item())
print(print_inform)
if cfg.exp_name == 'Synthtext' or cfg.exp_name == 'ALL' or cfg.exp_name == "preSynthMLT" or cfg.exp_name == "preALL":
print("save checkpoint for pretrain weight. ")
if epoch % cfg.save_freq == 0:
save_model(model, epoch, scheduler.get_lr(), optimizer)
elif cfg.exp_name == 'MLT2019' or cfg.exp_name == 'ArT' or cfg.exp_name == 'MLT2017':
if epoch < 10 and cfg.max_epoch >= 200:
if epoch % (2*cfg.save_freq) == 0:
save_model(model, epoch, scheduler.get_lr(), optimizer)
else:
if epoch % cfg.save_freq == 0:
save_model(model, epoch, scheduler.get_lr(), optimizer)
else:
if epoch % cfg.save_freq == 0 and epoch > 150:
save_model(model, epoch, scheduler.get_lr(), optimizer)
print('Training Loss: {}'.format(losses.avg))
def main():
global lr
if cfg.exp_name == 'Totaltext':
trainset = TotalText(
data_root='data/total-text-mat',
ignore_list=None,
is_training=True,
load_memory=cfg.load_memory,
transform=Augmentation(size=cfg.input_size, mean=cfg.means, std=cfg.stds)
)
valset = None
elif cfg.exp_name == 'Totaltext_mid':
trainset = TotalText_mid(
data_root='data/total-text-mat',
ignore_list=None,
is_training=True,
load_memory=cfg.load_memory,
transform=Augmentation(size=cfg.input_size, mean=cfg.means, std=cfg.stds)
)
valset = None
elif cfg.exp_name == 'Synthtext':
trainset = SynthText(
data_root='../FAST/data/SynthText',
is_training=True,
load_memory=cfg.load_memory,
transform=Augmentation(size=cfg.input_size, mean=cfg.means, std=cfg.stds)
)
valset = None
elif cfg.exp_name == 'Ctw1500':
trainset = Ctw1500Text(
data_root='data/ctw1500',
is_training=True,
load_memory=cfg.load_memory,
transform=Augmentation(size=cfg.input_size, mean=cfg.means, std=cfg.stds)
)
valset = None
elif cfg.exp_name == 'Ctw1500_mid':
trainset = Ctw1500Text_mid(
data_root='data/ctw1500',
is_training=True,
load_memory=cfg.load_memory,
transform=Augmentation(size=cfg.input_size, mean=cfg.means, std=cfg.stds)
)
valset = None
elif cfg.exp_name == 'TD500HUST':
trainset = TD500HUSTText(
data_root='data/',
is_training=True,
load_memory=cfg.load_memory,
transform=Augmentation(size=cfg.input_size, mean=cfg.means, std=cfg.stds)
)
valset = None
elif cfg.exp_name == 'TD500HUST_mid':
trainset = TD500HUSTText_mid(
data_root='data/',
is_training=True,
load_memory=cfg.load_memory,
transform=Augmentation(size=cfg.input_size, mean=cfg.means, std=cfg.stds)
)
valset = None
elif cfg.exp_name == 'ArT':
trainset = ArtTextJson(
data_root='data/ArT',
is_training=True,
load_memory=cfg.load_memory,
transform=Augmentation(size=cfg.input_size, mean=cfg.means, std=cfg.stds)
)
valset = None
elif cfg.exp_name == 'ArT_mid':
trainset = ArtTextJson_mid(
data_root='data/ArT',
is_training=True,
load_memory=cfg.load_memory,
transform=Augmentation(size=cfg.input_size, mean=cfg.means, std=cfg.stds)
)
valset = None
elif cfg.exp_name == 'preSynthMLT':
trainset = MLTTextJson(
is_training=True,
load_memory=False,
transform=Augmentation(size=cfg.input_size, mean=cfg.means, std=cfg.stds)
)
valset = None
elif cfg.exp_name == 'preALL':
trainset = ALLTextJson(
is_training=True,
load_memory=False,
transform=Augmentation(size=cfg.input_size, mean=cfg.means, std=cfg.stds)
)
valset = None
elif cfg.exp_name == 'ALL':
trainset_SynthMLT = MLTTextJson(
is_training=True,
load_memory=False,
transform=Augmentation(size=cfg.input_size, mean=cfg.means, std=cfg.stds)
)
trainset_SynthText = SynthText(
data_root='../FAST/data/SynthText',
is_training=True,
load_memory=cfg.load_memory,
transform=Augmentation(size=cfg.input_size, mean=cfg.means, std=cfg.stds)
)
trainset_totaltext = TotalText(
data_root='data/total-text-mat',
ignore_list=None,
is_training=True,
load_memory=cfg.load_memory,
transform=Augmentation(size=cfg.input_size, mean=cfg.means, std=cfg.stds)
)
# trainset_ctw1500 = Ctw1500Text(
# data_root='data/ctw1500',
# is_training=True,
# load_memory=cfg.load_memory,
# transform=Augmentation(size=cfg.input_size, mean=cfg.means, std=cfg.stds)
# )
trainset_TD500 = TD500HUSTText(
data_root='data/',
is_training=True,
load_memory=cfg.load_memory,
transform=Augmentation(size=cfg.input_size, mean=cfg.means, std=cfg.stds)
)
trainset = ConcatDataset([trainset_SynthText, trainset_SynthMLT,trainset_totaltext,trainset_TD500])
valset = None
else:
print("dataset name is not correct")
train_loader = data.DataLoader(trainset, batch_size=cfg.batch_size,
shuffle=True, num_workers=cfg.num_workers,
pin_memory=True, generator=torch.Generator(device=cfg.device)
)
if cfg.exp_name == 'Synthtext' or cfg.exp_name == 'ALL' or cfg.exp_name == "preSynthMLT":
print("save checkpoint for pretrain weight. ")
# Model
model = TextNet(backbone=cfg.net, is_training=True)
model = model.to(cfg.device)
if cfg.know:
know_model = TextNet(backbone=cfg.knownet, is_training=False)
load_model(know_model, cfg.know_resume)
know_model.eval()
know_model.requires_grad = False
if cfg.exp_name == 'TD500HUST' or cfg.exp_name == "Ctw1500":
criterion = TextLoss_ctw()
else:
criterion = TextLoss()
if cfg.mgpu:
model = nn.DataParallel(model)
if cfg.cuda:
cudnn.benchmark = True
if cfg.resume:
load_model(model, cfg.resume)
lr = cfg.lr
moment = cfg.momentum
if cfg.optim == "Adam" or cfg.exp_name == 'Synthtext':
optimizer = torch.optim.Adam(model.parameters(), lr=lr)
else:
optimizer = torch.optim.SGD(model.parameters(), lr=lr, momentum=moment)
if cfg.exp_name == 'Synthtext':
scheduler = FixLR(optimizer)
else:
scheduler = lr_scheduler.StepLR(optimizer, step_size=50, gamma=0.9)
print('Start training MixNet.')
for epoch in range(cfg.start_epoch, cfg.max_epoch+1):
scheduler.step()
if cfg.know:
know_criterion = knowledge_loss(T = 5)
knowledgetrain(model, know_model, train_loader, criterion, know_criterion, scheduler, optimizer, epoch)
else:
train(model, train_loader, criterion, scheduler, optimizer, epoch)
print('End.')
if torch.cuda.is_available():
torch.cuda.empty_cache()
if __name__ == "__main__":
np.random.seed(2022)
torch.manual_seed(2022)
# parse arguments
option = BaseOptions()
args = option.initialize()
update_config(cfg, args)
print_config(cfg)
# main
main()
| D641593/MixNet | train_mixNet.py | train_mixNet.py | py | 14,341 | python | en | code | 26 | github-code | 13 |
12523090780 | from json import dump, dumps
from os import path as os_path
from sys import path as sys_path
from django.conf import settings
# ------------------------------------------------------------------------------
current = os_path.dirname(os_path.realpath(__file__))
parent = os_path.dirname(current)
parent_parent = os_path.dirname(parent)
# adding the parent directory to the sys.path
sys_path.append(parent_parent)
# ------------------------------------------------------------------------------
from apis.zendesk import ZendeskAPI # noqa
zendesk_api = ZendeskAPI(
api_key=settings.ZENDESK_API_KEY, subdomain=settings.ZENDESK_SUBDOMAIN, email=settings.ZENDESK_EMAIL
)
active_views = zendesk_api.get_active_views()
search_view_title = "Chromebooks activity (last 7 days)"
search_view_id = next((view["id"] for view in active_views["views"] if view["title"] == search_view_title), "")
searched_view = zendesk_api.get_tickets_in_view(search_view_id)
tickets_in_view = searched_view["tickets"]
with open("tickets_in_view.json", "w") as f:
dump(tickets_in_view, f, indent=4)
ticket_fields = zendesk_api.get_ticket_fields()
fields = ticket_fields["ticket_fields"]
# static ids for each field
item_tag_field_id = next(field["id"] for field in fields if field["title"] == "Item Tag")
category_field_id = next(field["id"] for field in fields if field["title"] == "Category")
site_field_id = next(field["id"] for field in fields if field["title"] == "Site")
sites = [field for field in fields if field["title"] == "Site"][0]
categories = [field for field in fields if field["title"] == "Category"][0]
statuses = [field for field in fields if field["title"] == "Ticket status"][0]
sites_values_and_names = {site["value"]: site["name"] for site in sites["custom_field_options"]}
categories_values_and_names = {category["value"]: category["name"] for category in categories["custom_field_options"]}
statuses_ids_and_names = {status["id"]: status["end_user_label"] for status in statuses["custom_statuses"]}
site_selected_tickets = {}
for ticket in tickets_in_view:
site = sites_values_and_names[
[site for site in ticket["custom_fields"] if site["id"] == site_field_id][0]["value"]
]
if site not in site_selected_tickets:
site_selected_tickets[site] = []
site_ticket = {}
site_ticket["ticket_id"] = ticket["id"]
site_ticket["ticket_status"] = statuses_ids_and_names[ticket["custom_status_id"]]
site_ticket["ticket_subject"] = ticket["subject"]
site_ticket["item_tag"] = [
item_tag for item_tag in ticket["custom_fields"] if item_tag["id"] == item_tag_field_id
][0]["value"]
site_ticket["requester"] = zendesk_api.get_user(ticket["requester_id"])["user"]["name"]
site_ticket["assignee"] = (
zendesk_api.get_user(ticket["assignee_id"])["user"]["name"] if ticket["assignee_id"] else "NONE"
)
site_ticket["requested_date"] = ticket["created_at"]
site_ticket["category"] = str(
categories_values_and_names[
[category for category in ticket["custom_fields"] if category["id"] == category_field_id][0]["value"]
]
).split("::")[-1]
site_selected_tickets[site].append(site_ticket)
print(dumps(site_selected_tickets, indent=4))
print(sum([len(site_selected_tickets[site]) for site in site_selected_tickets]))
for site in site_selected_tickets:
print(site_selected_tickets[site])
subject = "{site} Chromebooks report {date_seven_days_ago} - {date_today}"
email_template = """
Dear {principal_name},
I hope this email finds you well.
"Attached is the {site} Chromebook repair report for {date_seven_days_ago} to {date_today}."
{data_table}
Thank you for your attention.
Regards,
IT Department
"""
| abrahamprz/zenclick | chromebooks_report/management/commands/local_test.py | local_test.py | py | 3,733 | python | en | code | 0 | github-code | 13 |
4927001451 | '''
Created on May 20, 2021
@author: mvelasco
'''
import pdb
import numpy as np
from optimalTransports import Empirical_Measure, Probability_Measure, Optimal_Transport_Finder, Weighted_Voronoi_Diagram
from optimalTransports import dist, two_d_uniform_density,two_d_uniform_sample_q
from minEntropyDistFinder import norm, Minimum_Cross_Entropy_Dist_Finder
def make_opt_transport_figures():
#We create two figures, the weighted Voronoi diagram before and after finding the corresponding optimal transport
# from the uniform distribution in the square to the empirical measure supported at the centers below
centers_array = [np.array([1/2,5/12]), np.array([7/8,5/12]), np.array([7/8, 6/7]), np.array([7/10, 11/14]),np.array([1/6, 1/2]), np.array([1/4,1/4])]
empirical_measure = Empirical_Measure(centers_array)
#The probability measure is specified by its sampling function
probability_measure = Probability_Measure(two_d_uniform_sample_q, two_d_uniform_density)
OT = Optimal_Transport_Finder(empirical_measure,probability_measure,dist,num_MC=100000)
OT.WVD.plot_WVD("unweighted_voronoi.png")
OT.do_gradient_descent(NumSteps=50, StepSize=0.5, keep_track_of_best=True, Descending_in_size=True)
OT.WVD.plot_WVD("optimal_transport.png")
def make_min_entropy_single_figure(delta, filename):
#The distance between the uniform and the empirical is around 0.2
centers_array = [np.array([1/2,5/12]), np.array([7/8,5/12]), np.array([7/8, 6/7]), np.array([7/10, 11/14]),np.array([1/6, 1/2]), np.array([1/4,1/4])]
empirical_measure = Empirical_Measure(centers_array)
#The probability measure is specified by its sampling function
probability_measure = Probability_Measure(two_d_uniform_sample_q, two_d_uniform_density)
num_MC=1000
#Next we construct the full minimum entropy finder
diamK = 1.0
distance_fn = dist
dualnorm_fn = norm
num_MC = 500
num_UVsteps = 10
UV_steptype = "backtracking"
UV_initial_stepsize = 8
#The following object will carry out the optimization and find the minimum cross entropy distribution for us.
MCE = Minimum_Cross_Entropy_Dist_Finder(
delta,
diamK,
empirical_measure,
probability_measure,
distance_fn,
dualnorm_fn,
num_MC,
num_UVsteps,
UV_steptype,
UV_initial_stepsize
)
MCE.do_lambda_star_several_steps(numsteps=50, verbose=True)
#Results (which encode the minimum cross entropy distribution via q = p*exp(-1-v\phi_{\lambda}(x)-u)
#where p is the density of the probability distribution above
print("Final_uv: " + str(MCE.best_uv))
print("Best_lambdas: "+str(MCE.current_lambdas))
print("Gradient_norm : "+str(MCE.TDS.best_gradient_norm) )
print("Gradient : "+str(MCE.TDS.gradient) )
print("Cross-Entropy : "+str(MCE.TDS.best_objective_so_far) )
MCE.plot_current_weights_dist(filename, num_points=300)
def make_min_entropy_figures():
delta_values = [0.2, 0.15, 0.1, 0.05, 0.02, 0.0005]
for k, delta in enumerate(delta_values):
make_min_entropy_single_figure(delta, "min_entropy_"+str(k)+".png")
#One selects which figures to create below
#make_opt_transport_figures()
#make_min_entropy_single_figure(delta = 0.2,filename = "min_entropy_0.png")
make_min_entropy_figures()
| mauricio-velasco/min-cross-entropy | Figures.py | Figures.py | py | 3,397 | python | en | code | 0 | github-code | 13 |
37090946773 | """
Compute dengue risk from vector suitability.
Author: Jacopo Margutti (jmargutti@redcross.nl)
Date: 22-03-2021
"""
import pandas as pd
import numpy as np
import datetime
from dateutil import relativedelta
import logging
def compute_risk(df, adm_divisions, num_months_ahead=3, correction_leadtime=None):
# add N months ahead to the dates in the dataframe
df['date'] = df['year'].astype(str) + '-' + df['month'].astype(str) + '-15'
df['date'] = pd.to_datetime(df['date']) # convert to datetime
df_ = df.copy()
for n in range(num_months_ahead):
df_ = df_.append(pd.Series({'date': max(df['date']) + relativedelta.relativedelta(months=(n+1))}), ignore_index=True)
dfdates = df_.groupby('date').sum().reset_index()
dfdates['year'] = dfdates['date'].dt.year
dfdates['month'] = dfdates['date'].dt.month
dfdates = dfdates[['year', 'month']]
# remove first three months (no data to predict)
dfdates = dfdates[3:]
# initialize dataframe for risk predictions
df_predictions = pd.DataFrame()
for adm_division in adm_divisions:
for year, month in zip(dfdates.year.values, dfdates.month.values):
df_predictions = df_predictions.append(pd.Series(name=(adm_division, year, month), dtype='object'))
if correction_leadtime:
df_corr = pd.read_csv(correction_leadtime)
# loop over admin divisions anc calculate risk
for admin_division in adm_divisions:
df_admin_div = df[df.adm_division == admin_division]
for year, month in zip(dfdates.year.values, dfdates.month.values):
# store suitability
df_suitability = df_admin_div[(df_admin_div.month == month) & (df_admin_div.year == year)]
if not df_suitability.empty:
df_predictions.at[(admin_division, year, month), 'suitability'] = df_suitability.iloc[0]['suitability']
# calculate risk
date_prediction = datetime.datetime.strptime(f'{year}-{month}-15', '%Y-%m-%d')
dates_input = [date_prediction - datetime.timedelta(90),
date_prediction - datetime.timedelta(60),
date_prediction - datetime.timedelta(30)]
weights_input = [0.16, 0.68, 0.16]
risk_total, weight_total, counter = 0., 0., 0
for date_input, weight_input in zip(dates_input, weights_input):
month_input = date_input.month
year_input = date_input.year
df_input = df_admin_div[(df_admin_div.month == month_input) & (df_admin_div.year == year_input)]
if not df_input.empty:
risk_total += weight_input * df_input.iloc[0]['suitability']
weight_total += weight_input
counter += 1
risk_total = risk_total / weight_total
# extract lead time
lead_time = ''
if counter == 3:
lead_time = '0-month'
elif counter == 2:
lead_time = '1-month'
elif counter == 1:
lead_time = '2-month'
else:
logging.error('compute_risk: lead time unknown')
if correction_leadtime:
# correct for lead time
if lead_time != '0-month':
df_corr_ = df_corr[(df_corr['lead_time']==lead_time) & (df_corr['month']==month) & (df_corr['adm_division']==admin_division)]
ratio_std = df_corr_.ratio_std.values[0]
diff_mean = df_corr_.diff_mean.values[0]
risk_total = ratio_std * risk_total - diff_mean
# store risk and lead time
df_predictions.at[(admin_division, year, month), 'risk'] = risk_total
df_predictions.at[(admin_division, year, month), 'lead_time'] = lead_time
df_predictions.rename_axis(index=['adm_division', 'year', 'month'], inplace=True)
df_predictions.reset_index(inplace=True)
return df_predictions
| rodekruis/IBF-dengue-model | mosquito_model/src/mosquito_model/compute_risk.py | compute_risk.py | py | 4,013 | python | en | code | 1 | github-code | 13 |
29008362960 | from pytube import YouTube
url = input('Digite a url do Youtube: ')
video = YouTube(url)
#baixa vídeos
video.streams.get_lowest_resolution().download(
output_path = r"C:\Users\Kurumí\Desktop\SLA LSLSLSLSLSSLLSLSLSLSLS",
filename = video.title
)
#baixa áudios
video.streams.filter(only_audio=True).first().download(
output_path = r"C:\Users\Kurumí\Desktop\SLA LSLSLSLSLSSLLSLSLSLSLS" ,
filename = video.title + ".mp3"
)
| antoniohenrick/python_intensivao_ufpa | baixar_videos_youtube.py | baixar_videos_youtube.py | py | 464 | python | en | code | 0 | github-code | 13 |
21293413259 | team_name = input()
games_count = int(input())
w_game = 0
d_game = 0
l_game = 0
total_score = 0
if games_count == 0:
print(f"{team_name} hasn't played any games during this season.")
elif games_count != 0:
for i in range(1, games_count + 1):
result = input()
if result == "W":
w_game += 1
total_score += 3
elif result == "D":
d_game += 1
total_score += 1
elif result == "L":
l_game += 1
win_rate = w_game / games_count * 100
print(f"{team_name} has won {total_score} points during this season.")
print("Total stats:")
print(f"## W: {w_game}")
print(f"## D: {d_game}")
print(f"## L: {l_game}")
print(f"Win rate: {win_rate:.2f}%")
| SJeliazkova/SoftUni | Programming-Basic-Python/Exams/Exam_6_7_July_2019/05. Football Tournament.py | 05. Football Tournament.py | py | 769 | python | en | code | 0 | github-code | 13 |
38661731819 | import unittest
from array import array
class ArrayTests(unittest.TestCase):
def test_array(self) -> None:
"""array is like list [] but only stores data of a single type,
represented by a typecode.
Array is used to store data more compactly.
"""
# array of signed integer (minimum of 2 bytes each)
a = array('h', [10, 20, 30])
lst = []
for item in a:
lst.append(item)
self.assertEqual(list(a), lst)
| damonallison/python-examples | tests/stdlib/test_array.py | test_array.py | py | 494 | python | en | code | 0 | github-code | 13 |
31857032501 | import rhinoscriptsyntax as rs
def unlockCurves():
curves = rs.ObjectsByType(4)
if not curves:
print("0 Curves were found")
return False
intCount = rs.UnlockObjects(curves)
print("Unlocked {} Curves").format(intCount)
unlockCurves() | octav1an/rhino-macros | Lock_Unlock/UnlockCurves.py | UnlockCurves.py | py | 245 | python | en | code | 0 | github-code | 13 |
28596456710 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Mar 18 17:58:21 2021
@author: Dartoon
"""
import numpy as np
import astropy.io.fits as pyfits
import matplotlib.pyplot as plt
from ID_list import ID_list
import glob
from tools import read_string_list, read_info, cal_oreination
f = open("material/ID_RA_DEC_z.txt","r")
string = f.read()
zlines = string.split('\n') # Split in to \n
# def read_info(ID):
# line = [zlines[i] for i in range(len(zlines)) if ID in zlines[i]]
# if line != []:
# z = float(line[0].split(' ')[-1])
# else:
# z = -99
# return z
# Halpha = 6562.8
CIV, MgII, Hb, OIII = 1549, 2798, 4861, 5007
G102range = [8000, 11500]
G141range = [10750, 17000]
def av_filter(z):
lines = np.array([CIV, MgII, Hb, OIII])
redshift_lines = (1+z) * lines
G102_bool = (redshift_lines>G102range[0]+100) * (redshift_lines<G102range[1]-100)
G141_bool = (redshift_lines>G141range[0]+100) * (redshift_lines<G141range[1]-100)
# return G102_bool, G141_bool
s1 = np.array(['CIV', 'MgII', 'H$beta$', '[OIII]'])[G102_bool]
s2 = np.array(['CIV', 'MgII', 'H$beta$', '[OIII]'])[G141_bool]
s1 = [s1[i] for i in range(len(s1))]
s2 = [s2[i] for i in range(len(s2))]
# str1 = "G102: " + repr(s1)
# str2 = " G141: " + repr(s2)
# s = str1 + str2
if s2 != []:
try:
s = "G141 & " + s2[0] + '+' + s2[1]
except:
s = "G141 & " + s2[0]
elif s2 == [] and s1 != []:
try:
s = "G102 & " + s1[0] + '+' + s1[1]
except:
s = "G102 & " + s1[0]
else:
s = "No fileter!!! & "
return s
from astropy.cosmology import FlatLambdaCDM
# cosmo = FlatLambdaCDM(H0=70, Om0=0.3, Tcmb0=2.725)
cosmo = FlatLambdaCDM(H0=70, Om0=0.3)
def deg2HMS(ra='', dec='', round=False):
RA, DEC, rs, ds = '', '', '', ''
if dec:
if str(dec)[0] == '-':
ds, dec = '-', abs(dec)
deg = int(dec)
decM = abs(int((dec-deg)*60))
if round:
decS = int((abs((dec-deg)*60)-decM)*60)
else:
decS = (abs((dec-deg)*60)-decM)*60
DEC = '{0}{1} {2} {3}'.format(ds, deg, decM, decS)
if ra:
if str(ra)[0] == '-':
rs, ra = '-', abs(ra)
raH = int(ra/15)
raM = int(((ra/15)-raH)*60)
if round:
raS = int(((((ra/15)-raH)*60)-raM)*60)
else:
raS = ((((ra/15)-raH)*60)-raM)*60
RA = '{0}{1} {2} {3}'.format(rs, raH, raM, raS)
if ra and dec:
return (RA, DEC)
else:
return RA or DEC
print("ID & RA & DEC & z & Separ.& Mag.& Grism & Lines & PA\\\\")
print("&&&&($''$, kpc)&(pair)&&&(deg)\\\\")
offset_kpc_h0_list, z_list = [], []
for i, ID in enumerate(ID_list):
show_ID = ID[:4] + ID[9] + ID[10:14]
line = [zlines[i] for i in range(len(zlines)) if ID in zlines[i]]
line[0] = line[0].replace(' ', ' ')
z = float(line[0].split(' ')[-1])
RA, Dec = line[0].split(' ')[1], line[0].split(' ')[2]
# RA, Dec, z = read_info(ID)
# files_1 = glob.glob('../proof2close_HSC_images_5band/*/' + ID + '/fit_result/')
# files_2 = glob.glob('../extra/*/fit_result*/' + ID + '/')
# files = files_1 + files_2
add = '' #The first fit is better than this 'deep_'
# add = 'deep_'
folder_1 = glob.glob('../proof2close_HSC_images_5band/*/'+ ID+ '/')
if folder_1 != []: # and 'z_below1' not in folder_1[0]:
folder = folder_1[0] + add+'fit_result/'
file = folder + add+'fit_result_I-band.txt' #!!!
# folder = folder_1[0] + 'fit_result/'
# file = folder + 'fit_result_I-band.txt'
# elif folder_1 != [] and 'z_below1' in folder_1[0]:
# folder = '../_John_fitted/'+ID+'_HSC-I/' #For these z below 1(or z unkonwn), not fitted and use John's fit.
# file = folder + 'fit_result.txt'
# else:
# folder_2 = glob.glob('../proofBHBH/model_Iband_zover1/'+ ID + '*/')
# folder = folder_2[0]
# file = folder + 'fit_result_I-band.txt'
# file = glob.glob(files[-1]+'fit_result_{0}-band.txt'.format('I'))
if file != []:
f = open(file,"r")
string = f.read()
lines = string.split('\n') # Split in to \n
trust = 2
l1 = [i for i in range(len(lines)) if 'model_PS_result:' in lines[i]]
AGN_dic = read_string_list(string = lines[l1[trust]].split('model_PS_result: ')[1])
AGN_pos = np.array([[-1*AGN_dic[i]['ra_image'], AGN_dic[i]['dec_image']] for i in range(len(AGN_dic))])
offset = np.sum( (AGN_pos[0] - AGN_pos[1])**2)**0.5
scale_relation = cosmo.angular_diameter_distance(z).value * 10**3 * (1/3600./180.*np.pi) #Kpc/arc
offset_kpc = offset * scale_relation #In kpc
Mags = [AGN_dic[0]['magnitude'], AGN_dic[1]['magnitude']]
APT_orie_1 = cal_oreination(ID,add=add,trust=trust)+135
if APT_orie_1 >360:
APT_orie_1 = APT_orie_1-360
if APT_orie_1< 180:
APT_orie_2 = APT_orie_1 + 180
elif APT_orie_1> 180:
APT_orie_2 = APT_orie_1 - 180
mag1, mag2 = np.min(Mags), np.max(Mags)
zp = 27
flux = 10**( 0.4*(zp-mag1)) + 10**( 0.4*(zp-mag2))
tmag = -2.5*np.log10(flux) + zp
print(i+1, show_ID, round(tmag,1), z, '& {0:.1f}'.format(cal_oreination(ID,add=add, trust = trust)), '%{0:.0f} {1:.0f}degree'.format(APT_orie_1, APT_orie_2) ) #'{0:.1f},{1:.1f}'.format(np.min(Mags), np.max(Mags)), '&',
# av_filter(z), '& {0:.1f} \\\\'.format(cal_oreination(ID)), '%{0:.0f} {1:.0f}degree'.format(APT_orie_1, APT_orie_2) )
# print(deg2HMS(ra=float(RA), dec = float(Dec)) )
offset_kpc_h0_list.append(offset_kpc * 70 / 100)
z_list.append(z)
#%% Check how image looks
ID = [
# '022404.85+014941.9',
# '022906.04-051428.9',
# '092532.13-020806.1',
# '095218.04-000459.1',
# '105458.01+043310.6',
# '122144.31-004144.1',
# '124618.51-001750.2',
# '150216.66+025719.8',
# '162501.98+430931.6',
# '220642.82+003016.2',
'230402.77-003855.4'
][0]
folder_1 = glob.glob('../proof2close_HSC_images_5band/*/'+ ID+ '/')
if folder_1 != []: # and 'z_below1' not in folder_1[0]:
folder = folder_1[0] + 'deep_'+'fit_result/'
file_glob = folder + 'fit_I-band_fit*pkl' #!!!
folder_ = folder_1[0] +'fit_result/'
f = open(folder_+'fit_result_I-band.txt',"r")
string = f.read()
lines = string.split('\n') # Split in to \n
trust = 2
l1 = [i for i in range(len(lines)) if 'model_PS_result:' in lines[i]]
AGN_dic = read_string_list(string = lines[l1[trust]].split('model_PS_result: ')[1])
AGN_pos = np.array([[-1*AGN_dic[i]['ra_image'], AGN_dic[i]['dec_image']] for i in range(len(AGN_dic))])
file = glob.glob(file_glob)[0]
import pickle
fit_run = pickle.load(open(file,'rb'))
from galight.tools.astro_tools import plt_fits
plt_fits(fit_run.flux_2d_out['data'], hold=True)
c = len(fit_run.flux_2d_out['data'])/2
pixscale = fit_run.fitting_specify_class.deltaPix
for i in [0,1]:
# x, y = fit_run.final_result_ps[i]['ra_image'], fit_run.final_result_ps[i]['dec_image']
x, y = AGN_pos[i][0], AGN_pos[i][1]
x, y = x/pixscale, y/pixscale
plt.scatter( c+x, c+y )
plt.show()
AGN_dic = read_string_list(string = lines[l1[trust]].split('model_PS_result: ')[1])
if AGN_dic[0]['flux_within_frame'] < AGN_dic[1]['flux_within_frame']:
AGN_dic[0], AGN_dic[1] = AGN_dic[1], AGN_dic[0]
AGN_pos = np.array([[1*AGN_dic[i]['ra_image'], AGN_dic[i]['dec_image']] for i in range(len(AGN_dic))])
dif = AGN_pos[1]-AGN_pos[0]
PA = np.arctan( dif[0]/dif[1] ) * 180 / np.pi
if dif[1]<0 and dif[0]>0:
PA = 180 + PA
if dif[1]<0 and dif[0]<0:
PA = 180 + PA
if dif[1]>0 and dif[0]<0:
PA = 360 + PA
print(PA)
print(cal_oreination(ID, trust=trust))
#!!! I checked and confirm that the table in the proposal give perfect informaiton. Using trust = 2
#%%
# #%%Check V band mag (inbetween G and R)
# IDs = [
# '022404.85+014941.9',
# '022906.04-051428.9',
# '092532.13-020806.1',
# '095218.04-000459.1',
# '105458.01+043310.6',
# '122144.31-004144.1',
# '124618.51-001750.2',
# '150216.66+025719.8',
# '162501.98+430931.6',
# '220642.82+003016.2',
# '230402.77-003855.4'
# ]
# for ID in IDs:
# fluxs = []
# for band in ['G', 'R']:
# folder_1 = glob.glob('../proof2close_HSC_images_5band/*/'+ ID+ '/')
# # if folder_1 != []: # and 'z_below1' not in folder_1[0]:
# # folder = folder_1[0] + add+'fit_result/'
# # file_glob = folder + 'fit_-band_fit*pkl' #!!!
# folder_ = folder_1[0] +'fit_result/'
# try:
# f = open(folder_+'fit_result_{0}-band.txt'.format(band),"r")
# string = f.read()
# lines = string.split('\n') # Split in to \n
# trust = 2
# l1 = [i for i in range(len(lines)) if 'model_PS_result:' in lines[i]]
# AGN_dic = read_string_list(string = lines[l1[trust]].split('model_PS_result: ')[1])
# l1 = [i for i in range(len(lines)) if 'model_Sersic_result:' in lines[i]]
# sersic_dic = read_string_list(string = lines[l1[trust]].split('model_Sersic_result: ')[1])
# flux = AGN_dic[0]['flux_within_frame']+AGN_dic[1]['flux_within_frame']+sersic_dic[0]['flux_within_frame']
# fluxs.append(flux)
# except:
# # print(ID, band, 'not exist')
# None
# # print(ID, fluxs)
# print(ID, round(-2.5*np.log10(np.average(fluxs)) + 27.0,1 ) )
| dartoon/my_code | projects/2021_dual_AGN/analyze/print_propose_table.py | print_propose_table.py | py | 9,493 | python | en | code | 0 | github-code | 13 |
71970244497 | #Code starts here
#Function to compress string
def compress(word):
word=word.lower()
mist=[]
l=0
while(l<len(word)):
m=word[l]
j=0
while(l<len(word) and word[l]==m):
j=j+1
l=l+1
mist.append(m)
mist.append(str(j))
return ''.join(mist)
#Code ends here
| bhattbhavesh91/GA_Sessions | python_guided_project/oct_batch_challenge/python/p4.py | p4.py | py | 356 | python | en | code | 32 | github-code | 13 |
21550383682 | import uuid;
import re;
import math;
import copy;
import os;
import configparser;
class lt2circuiTikz:
lastASCfile = None;
reIsHdr = re.compile(r'[\s]*Version 4[\s]+', flags=re.IGNORECASE);
reIsSym = re.compile(r'[\s]*SymbolType[\s]+(.*)$', flags=re.IGNORECASE);# ASY file symbol type definition do NOT confuse with reIsComponent: an instance of a symbol
reIsSheet = re.compile(r'^[\s]*SHEET[\s]+([-\d]+)[\s]+([-\d]+)[\s]+([-\d]+)', flags=re.IGNORECASE);
# SHEET no wx1 wy1
reIsWire = re.compile(r'^[\s]*WIRE[\s]+([-\d]+)[\s]+([-\d]+)[\s]+([-\d]+)[\s]+([-\d]+)', flags=re.IGNORECASE);
# WIRE x1 y1 x2 y2
reIsNetLabel = re.compile(r'^[\s]*FLAG[\s]+([-\d]+)[\s]+([-\d]+)[\s]+(.*)', flags=re.IGNORECASE);
# FLAG x1 y1 label
reIsComponent = re.compile(r'^[\s]*SYMBOL[\s]+([\S]+)[\s]+([-\d]+)[\s]+([-\d]+)[\s]+([R|M])([-\d]+)', flags=re.IGNORECASE);
# SYMBOL path\\type x1 x2 [R|M] rotation
reIsCAttrName = re.compile(r'^[\s]*SYMATTR[\s]+InstName[\s]+(.*)', flags=re.IGNORECASE);
# SYMATTR InstName name
reIsCAttrValue = re.compile(r'^[\s]*SYMATTR[\s]+Value[\s]+(.*)', flags=re.IGNORECASE);
# SYMATTR Value value
reIsCAttrValue2 = re.compile(r'^[\s]*SYMATTR[\s]+Value2[\s]+(.*)', flags=re.IGNORECASE);
# SYMATTR Value2 value
reIsCAttrGeneric = re.compile(r'^[\s]*SYMATTR[\s]+([\S]+)[\s]+(.*)', flags=re.IGNORECASE);
# SYMATTR attr.name value
reIsWindow = re.compile(r'^[\s]*WINDOW[\s]+([-\d]+)[\s]+([-\d]+)[\s]([-\d]+)[\s]+([\S]+)[\s]+([-\d]+)', flags=re.IGNORECASE);
# WINDOW attr.No.Id rel.x1 rel y1 pos.str. size?=2
reIsText = re.compile(r'^[\s]*TEXT[\s]+([-\d]+)[\s]+([-\d]+)[\s]([\S]+)[\s]+([-\d]+)[\s]+[;!](.*)', flags=re.IGNORECASE);
# TEXT x1 y1 pos.str. size?=2 string
reIsLine = re.compile(r'^[\s]*LINE[\s]+([\S]+)[\s]+([-\d]+)[\s]+([-\d]+)[\s]+([-\d]+)[\s]+([-\d]+)[\s]*([-\d]*)', flags=re.IGNORECASE);
# Line type x1 y1 x2 y2 stype
reIsRect = re.compile(r'^[\s]*RECTANGLE[\s]+([\S]+)[\s]+([-\d]+)[\s]+([-\d]+)[\s]+([-\d]+)[\s]+([-\d]+)[\s]*([-\d]*)', flags=re.IGNORECASE);
# Rect type x1 y1 x2 y2 stype
reIsCircle = re.compile(r'^[\s]*CIRCLE[\s]+([\S]+)[\s]+([-\d]+)[\s]+([-\d]+)[\s]+([-\d]+)[\s]+([-\d]+)[\s]*([-\d]*)', flags=re.IGNORECASE);
# Circle/Oval type x1 y1 x2 y2 stype
# symbols:
reIsPin = re.compile(r'^[\s]*PIN[\s]+([-\d]+)[\s]+([-\d]+)[\s]([\S]+)[\s]+([-\d]+)', flags=re.IGNORECASE);
# PIN x1 y1 pos.str. offset
reIsPinName = re.compile(r'^[\s]*PINATTR[\s]+PinName[\s]+(.*)', flags=re.IGNORECASE);
# PIN PinName pin label
reIsPinOrder = re.compile(r'^[\s]*PINATTR[\s]+SpiceOrder[\s]+([-\d]+)', flags=re.IGNORECASE);
# PIN PinName pin order
reIsSAttrValue = re.compile(r'^[\s]*SYMATTR[\s]+Value[\s]+(.*)', flags=re.IGNORECASE);
# SYMATTR Value value
reIsSAttrPrefix = re.compile(r'^[\s]*SYMATTR[\s]+Prefix[\s]+(.*)', flags=re.IGNORECASE);
# SYMATTR Prefix value
reIsSAttrDescr = re.compile(r'^[\s]*SYMATTR[\s]+Description[\s]+(.*)', flags=re.IGNORECASE);
# SYMATTR Description value
reIsSAttrGeneric = re.compile(r'^[\s]*SYMATTR[\s]+([\S]+)[\s]+(.*)', flags=re.IGNORECASE);
# SYMATTR kind value
attrid2attr = {0:'InstName',3:'Value',123:'Value2',39:'SpiceLine',40:'SpiceLine2',38:'SpiceModel'};
attr2attrid = {'InstName':0,'Value':3, 'Value2':123, 'SpiceLine':39, 'SpiceLine2':40,'SpiceModel':38};
lastComponent = None;
lastWire = None;
lastLabel = None;
lastText = None;
lastPin =None;
lastSymbol =None;
lastAttributesDict = {};
lastLine = '';
lastSymLine = '';
linecnt = 0;
symlinecnt = 0;
translinecnt = 0;
config = None;
def __init__(self):
self.circDict = CircuitDict();
self.symbolDict = SymbolDict();
self.validInputFile = False;
#self.lt2tscale = 1.0/32.0;
#self.lt2tscale = (1.0/64.0);
self.lt2tscale = (1.0/48.0);
self.includepreamble = True;
self.lastASCfile = None;
try:
approot2 = (os.path.dirname(os.path.realpath(__file__)));
approot = os.path.dirname(os.path.abspath(__file__))
self.scriptmode = 'script'
except NameError: # We are the main py2exe script, not a module
import sys
approot = os.path.dirname(os.path.abspath(sys.argv[0]))
self.scriptmode = 'py2exe'
self.scriptdir = (approot);
self.symfilebasepath = 'sym'+os.sep;
configfileloc = self.scriptdir+os.sep+'lt2ti.ini'
print('lt2ti: Loading config at "'+configfileloc+'"')
self.config = configparser.RawConfigParser()
self.config.read(configfileloc)
if (self.config.has_option('general', 'symdir')):
self.symfilebasepath = self.config.get('general', 'symdir') + os.sep;
print('lt2ti: initial sym basepath="'+self.symfilebasepath+'"')
self.config = configparser.RawConfigParser()
conffiles = self.config.read([self.scriptdir+os.sep+'lt2ti.ini', self.scriptdir+os.sep+ self.symfilebasepath + 'sym.ini'])
#print('lt2ti: config sym basepath="' + self.config.get('general', 'symdir') + os.sep + '"')
self.defaultgnd = r'circuiTikz\\gnd'
if (self.config.has_option('general', 'lt2tscale')):
self.lt2tscale = float(self.config.get('general', 'lt2tscale'));
if (self.config.has_option('general', 'lt2tscale_inverse')):
self.lt2tscale = 1.0/float(self.config.get('general', 'lt2tscale_inverse'));
if (self.config.has_option('general', 'default_gnd')):
self.defaultgnd = (self.config.get('general', 'default_gnd'));
if (self.config.has_option('general', 'includepreamble')):
includepreamble = self.config.get('general', 'includepreamble');
self.includepreamble = ((str(includepreamble).lower() == 'true') or ((includepreamble) == '1'))
print('lt2ti ready for conversion.')
def _symresetSymbol(self):
if (self.lastPin != None):
if (self.lastSymbol != None):
self.lastSymbol.addPin(self.lastPin);
self.lastPin = None;
if (self.lastSymbol != None):
self.symbolDict.addSymbol(self.lastSymbol);
self.lastSymbol = None;
def symresetPin(self):
if (self.lastPin != None):
if (self.lastSymbol != None):
self.lastSymbol.addPin(self.lastPin);
self.lastPin = None;
return;
def translate2ospath(self,aPath):
tPath = aPath.replace("\\",os.sep) # localize to OS path, since LTSpice under Wine/Windows always uses \
return tPath
def readASYFile(self, relfileandpath):
relfileandpath_orig = relfileandpath
relfileandpath = self.translate2ospath(relfileandpath_orig) # localize to OS path, since LTSpice under Wine/Windows always uses \
print('Loading Symbol file "'+relfileandpath+'" (orig="'+relfileandpath_orig+'")...')
# read symbol file
self.symlinecnt = 0;
aSymbol = None;
try :
fhy = open(self.scriptdir+os.sep+ relfileandpath, mode='r', newline=None);
except Exception as e:
print('could not open ASY file "'+relfileandpath+'" (cwd="'+os.curdir+'", scriptdir="'+self.scriptdir+'", mode='+self.scriptmode+', fullpath="'+self.scriptdir+os.sep+ relfileandpath+'")');
return None;
for line in fhy:
self.symlinecnt = self.symlinecnt + 1;
self.lastSymLine = line;
m = self.reIsHdr.match(line);
if (m != None):
print('valid file header found:'+line);
self.validInputFile = True;
continue;
m = self.reIsSym.match(line);
if (m != None):
print('symbol of type '+m.group(1)+': '+line);
self._handleSType(m);
continue;
m = self.reIsSAttrPrefix.match(line);
if (m != None):
self._handleSymPrefix(m);
continue;
m = self.reIsSAttrDescr.match(line);
if (m != None):
self._handleSymDescr(m);
continue;
m = self.reIsSAttrValue.match(line);
if (m != None):
self._handleSymValue(m);
continue;
m = self.reIsSAttrGeneric.match(line);
if (m != None):
self._handleSymAttrGeneric(m);
continue;
m = self.reIsPin.match(line);
if (m != None):
self._handleSymPin(m);
continue;
m = self.reIsPinName.match(line);
if (m != None):
self._handleSymPinName(m);
continue;
m = self.reIsPinOrder.match(line);
if (m != None):
self._handlePinOrder(m);
continue;
print("could not match symbol line '"+line.replace('\n','')+"'");
aSymbol = self.lastSymbol;
self._symresetSymbol(); # handle last item
fhy.close();
return aSymbol;
def readASY2TexFile(self, relfileandpath, symbol):
relfileandpath_orig = relfileandpath
relfileandpath = self.translate2ospath(relfileandpath_orig)
asy2texfileandpath = self.scriptdir+os.sep+ relfileandpath
try :
fht = open(asy2texfileandpath, mode='r', newline=None);
except Exception as e:
print('Could not open requested asy2tex tile: "'+relfileandpath+'" (orig="'+relfileandpath_orig+'", cwd="'+os.curdir+'")');
return None;
print('Processing asy2tex file: "'+relfileandpath+'" (orig="'+relfileandpath_orig+'", cwd="'+os.curdir+'")');
rAliasFile = re.compile(r'ALIASFOR (.*\.asy2tex)[\s]*$', flags=re.IGNORECASE);
rType = re.compile(r'^[\s]*Type[\s]+([\S]+)', flags=re.IGNORECASE); # compile(r'^[\s]*SYMATTR[\s]+([\S]+)[\s]+(.*)', flags=re.IGNORECASE);
rOriginTex = re.compile(r'^[\s]*TexOrigin[\s]+([-\d\.]+)[\s]+([-\d\.]+)[\s]+([-\d]+)[\s]+([01TRUEtrueFALSEfalse]+)', flags=re.IGNORECASE);
rOriginSym = re.compile(r'^[\s]*SymOrigin[\s]+([-\d\.]+)[\s]+([-\d\.]+)[\s]+([-\d]+)[\s]+([01TRUEtrueFALSEfalse]+)', flags=re.IGNORECASE);
# x1 y1 rot mirror
rPinList_be = re.compile(r'^[\s]*(BeginPinList)[\s]*$', flags=re.IGNORECASE);
rPinListEntry = re.compile(r'^[\s]*([-\d]+)[\s]+([-\d]+)[\s]+([-\d]+)[\s]+([-\d]+)[\s]+([-\d]+)[\s]+(.*)$', flags=re.IGNORECASE);
# ord x1 y1 rot length PinName
rPinList_en = re.compile(r'^[\s]*(EndPinList)[\s]*$', flags=re.IGNORECASE);
rConKV_be = re.compile(r'^[\s]*(BeginConversionKeyvals)[\s]*$', flags=re.IGNORECASE); #BeginConversionKeyvals
rConKV_en = re.compile(r'^[\s]*(EndConversionKeyvals)[\s]*$', flags=re.IGNORECASE); #EndConversionKeyvals
rConvKVList = re.compile(r'([^=]+)=(.*)$', flags=re.IGNORECASE);
rTexName = re.compile(r'^[\s]*TexElementName[\s]+(.*)$', flags=re.IGNORECASE);
rTex_be = re.compile(r'^[\s]*(BeginTex)[\s]*$', flags=re.IGNORECASE);
rTex_en = re.compile(r'^[\s]*(EndTex)[\s]*$', flags=re.IGNORECASE);
ispinlist = False;
istexlist = False;
isconvkv = False;
texlist = [];
aTSymbol = copy.deepcopy(symbol);
self.translinecnt = 0;
for line in fht:
self.translinecnt = self.translinecnt+1;
m = rAliasFile.match(line);
if ((m != None) and (self.translinecnt < 2)): # must be at the beginning
fht.close();
pathtofile = os.path.dirname(asy2texfileandpath);
aliasfile = m.group(1);
aliasfileandpath = pathtofile+os.sep+aliasfile;
relaliasfileandpath = os.path.relpath(aliasfileandpath, self.scriptdir+os.sep);
print('Found an alias entry: "'+aliasfile+'" which resolved to "'+aliasfileandpath+'" and "'+relaliasfileandpath+'" ');
aTSymbol = self.readASY2TexFile(relaliasfileandpath, symbol);
return aTSymbol;
m = rPinList_be.match(line);
if (m != None):
ispinlist = True;
continue;
m = rPinList_en.match(line);
if (m != None):
ispinlist = False;
continue;
if (ispinlist):
m = rPinListEntry.match(line);
if (m != None):
# add pinlist entry
# Pinlist entry: ord x1 y1 rot length PinName
lxPin = SymPin();
lxPin.order = m.group(1);
lxPin.x1 = m.group(2);
lxPin.y1 = m.group(3);
lxPin.rot = m.group(4);
lxPin.length = m.group(5);
lxPin.name = m.group(6);
aTSymbol.latexPins.addPin(lxPin);
continue;
m = rConKV_be.match(line);
if (m != None):
ispinlist = False;
istexlist = False;
isconvkv = True;
continue;
m = rConKV_en.match(line);
if (m != None):
ispinlist = False;
istexlist = False;
isconvkv = False;
continue;
if (isconvkv):
m = rConvKVList.match(line);
if (m != None):
aTSymbol.conversionKV[m.group(1)] = m.group(2);
continue;
m = rTex_be.match(line);
if (m != None):
ispinlist = False;
istexlist = True;
isconvkv = False;
continue;
m = rTex_en.match(line);
if (m != None):
ispinlist = False;
istexlist = False;
isconvkv = False;
continue;
if (istexlist):
texlist.append(line);
continue;
m = rTexName.match(line);
if (m != None):
aTSymbol.latexElementName = m.group(1);
continue;
m = rType.match(line);
if (m != None):
aTSymbol.latexType = m.group(1);
continue;
m = rOriginTex.match(line);
if (m != None):
aTSymbol.latexOriginX1 = float(m.group(1));
aTSymbol.latexOriginY1 = float(m.group(2));
aTSymbol.latexOriginRot = int(m.group(3));
aTSymbol.latexOriginMirror = ((m.group(4) == '1') or (str.lower(m.group(4)) == 'true'));
continue;
m = rOriginSym.match(line);
if (m != None):
aTSymbol.symbolOriginX1 = float(m.group(1));
aTSymbol.symbolOriginY1 = float(m.group(2));
aTSymbol.symbolOriginRot = int(m.group(3));
aTSymbol.symbolOriginMirror = ((m.group(4) == '1') or (str.lower(m.group(4)) == 'true'));
continue;
print("coult not match line "+str(self.translinecnt)+" in asy2tex file '"+relfileandpath+"' :"+line);
aTSymbol.latexTemplate = texlist;
fht.close();
return aTSymbol;
def _handleSType(self, m):
self._symresetSymbol(); # no more attributes for previous lines
self.lastSymbol = Symbol(m.group(1))
self.lastSymbol.lt2tscale = self.lt2tscale;
return;
def _handleSymPrefix(self, m):
self.lastSymbol.prefix = m.group(1);
self.lastSymbol.attributes['Prefix'] = m.group(1);
return;
def _handleSymDescr(self, m):
self.lastSymbol.description = m.group(1);
self.lastSymbol.attributes['Description'] = m.group(1);
return;
def _handleSymValue(self, m):
self.lastSymbol.value = m.group(1);
self.lastSymbol.attributes['Value'] = m.group(1);
return;
def _handleSymAttrGeneric(self, m):
self.lastSymbol.attributes[m.group(1)] = m.group(2);
return;
def _handleSymPin(self, m):
self.symresetPin();
self.lastPin = SymPin();
self.lastPin.x1 = int(m.group(1));
self.lastPin.y1 = int(m.group(2));
self.lastPin.labelpos = (m.group(3));
self.lastPin.labeloffset = int(m.group(4));
return;
def _handleSymPinName(self, m):
self.lastPin.name = m.group(1);
def _handlePinOrder(self, m):
self.lastPin.order = int(m.group(1));
def _resetLast(self):
if (len(self.lastAttributesDict) > 0):
for aid, attr in self.lastAttributesDict.items():
self.lastComponent.addAttribute(attr);
#print("Adding component attrib kind="+str(attr.kind)+" val="+str(attr.value)+" to list, len="+str(len(self.lastComponent.attrlist)))
self.lastAttributesDict = {};
if (self.lastComponent != None):
self.circDict.addComponent(self.lastComponent);
self.lastComponent = None;
if (self.lastWire != None):
self.circDict.addWire(self.lastWire);
self.lastWire = None;
if (self.lastLabel != None):
self.circDict.addNetLabel(self.lastLabel);
self.lastLabel = None;
if (self.lastText != None):
self.circDict.addText(self.lastText);
self.lastText = None;
def readASCFile(self, fileandpath):
print('Reading ASC file "'+fileandpath+'"...')
self.lastComponent = None;
self.lastSymbol = None;
self.lastAttributesDict = {};
self.lastLabel = None;
self.lastWire = None
self.lastPin = None;
self.circDict = CircuitDict();
self.symbolDict = SymbolDict();
self.lastText = None
self.lastASCfile = fileandpath;
self.linecnt = 0;
try :
fhs = open(fileandpath, mode='r', newline=None, encoding='iso-8859-1');
except Exception as e:
print('could not open ASC file "'+fileandpath+'" (cwd="'+os.curdir+'")');
return None;
for line in fhs:
self.linecnt = self.linecnt + 1;
self.lastLine = line;
m = self.reIsHdr.match(line);
if (m != None):
print('valid file header found:'+line);
self.validInputFile = True;
continue;
m = self.reIsSheet.match(line);
if (m != None):
print('sheet: '+line);
continue;
m = self.reIsNetLabel.match(line);
if (m != None):
self._handleNetLabel(m);
continue;
m = self.reIsComponent.match(line);
if (m != None):
self._handleComponent(m);
continue;
m = self.reIsCAttrName.match(line);
if (m != None):
self._handleComponentName(m);
continue;
m = self.reIsCAttrValue.match(line);
if (m != None):
self._handleComponentValue(m);
continue;
m = self.reIsCAttrValue2.match(line);
if (m != None):
self._handleComponentValue2(m);
continue;
m = self.reIsCAttrGeneric.match(line);
if (m != None):
self._handleAttributeGeneric(m);
continue;
m = self.reIsWire.match(line);
if (m != None):
self._handleWire(m);
continue;
m = self.reIsWindow.match(line);
if (m != None):
self._handleWindow(m);
continue;
m = self.reIsText.match(line);
if (m != None):
self._handleText(m);
continue;
m = self.reIsLine.match(line);
if (m != None):
self._handleLine(m);
continue;
m = self.reIsRect.match(line);
if (m != None):
self._handleRect(m);
continue;
m = self.reIsCircle.match(line);
if (m != None):
self._handleCircle(m);
continue;
print("could not match line '"+line.replace('\n','')+"'");
self._resetLast(); # handle last item
fhs.close();
return self.circDict;
def _handleNetLabel(self, m):
self._resetLast(); # no more attributes for previous lines
# FLAG <x1> <y1> <label>
if (m.group(3) == '0'): # more a ground symbol than a net label
gndcomp = self.defaultgnd;
c = Component(gndcomp, int(m.group(1)), int(m.group(2)), 0, False, 'GND'+str(self.linecnt), '')
self._handleComponent_worker(c);
else:
lbl = NetLabel(m.group(3), int(m.group(1)), int(m.group(2)));
lbl.value = 'lbl'+str(self.linecnt)
pathandctype = 'netlabel';
if not self.symbolDict.hasSymbolPath(pathandctype):
# not cached, try to load
fullpath = self.symfilebasepath + pathandctype.replace('\\\\','\\');
sym = self.readASYFile(fullpath+'.asy');
sym.path = '';
sym.ctype = pathandctype;
sym.pathandctype = pathandctype;
tsym = self.readASY2TexFile(fullpath+'.asy2tex',sym);
if (tsym != None):
self.symbolDict.addSymbol(tsym); # add symbol and translation information
else:
# already existing in cache
tsym = self.symbolDict.getSymbolByPath(pathandctype);
lbl.symbol = tsym;
self.lastLabel = lbl;
return;
def _handleComponent(self, m):
self._resetLast(); # no more attributes for previous lines
# SYMBOL path\\type x1 x2 [R|M] rotation
# Component ( ctype, x1, y1, rot, mirror, name, value)
c = Component(m.group(1), int(m.group(2)), int(m.group(3)), int(m.group(5)), (str.upper(m.group(4)) == 'M'), '', "");
self._handleComponent_worker(c);
def _handleComponent_worker(self, c):
if not self.symbolDict.hasSymbolPath(c.pathandctype):
# not cached, try to load
fullpath = self.symfilebasepath + c.pathandctype.replace('\\\\','\\');
sym = self.readASYFile(fullpath+'.asy');
sym.path = c.path;
sym.ctype = c.ctype;
sym.pathandctype = c.pathandctype;
tsym = self.readASY2TexFile(fullpath+'.asy2tex',sym);
if (tsym != None):
self.symbolDict.addSymbol(tsym); # add symbol and translation information
else:
# already existing in cache
tsym = self.symbolDict.getSymbolByPath(c.pathandctype);
c.symbol = tsym;
self.lastComponent = c;
print('Found component: \n'+c.asString(' '));
return;
def _handleComponentName(self, m):
# SYMATTR InstName <name>
self.lastComponent.name = m.group(1);
attrkind = 'InstName';
attrval = m.group(1);
if (not attrkind in self.lastAttributesDict):
attr = Attribute(attrkind, attrval);
else:
attr = self.lastAttributesDict[attrkind];
attr.value = attrval;
if (attrval != "*"):
attr.visible = True;
self.lastAttributesDict[attr.kind] = attr;
return;
def _handleComponentValue(self, m):
# SYMATTR Value <value>
attrkind = 'Value';
attrval = m.group(1);
if (attrval == '""'): # this is the way LTspice indicates an empty value
attrval = '';
self.lastComponent.value = attrval;
if (not attrkind in self.lastAttributesDict):
attr = Attribute(attrkind, attrval);
else:
attr = self.lastAttributesDict[attrkind];
attr.value = attrval;
if (attrval != "*"):
attr.visible = True;
self.lastAttributesDict[attr.kind] = attr;
return;
def _handleComponentValue2(self, m):
#SYMATTR Value2 <value>
self.lastComponent.value2 = m.group(1);
attrkind = 'Value2';
attrval = m.group(1);
if (not attrkind in self.lastAttributesDict):
attr = Attribute(attrkind, attrval);
else:
attr = self.lastAttributesDict[attrkind];
attr.value = attrval;
if (attrval != "*"):
attr.visible = True;
self.lastAttributesDict[attr.kind] = attr;
return;
def _handleAttributeGeneric(self, m):
#SYMATTR <attrib.name> <value>
attrkind = m.group(1);
attrval = m.group(2);
if (not attrkind in self.lastAttributesDict):
attr = Attribute(attrkind, attrval);
self.lastAttributesDict[attr.kind] = attr;
#print("Added AttributeGeneric kind="+str(attr.kind)+" val="+str(attrval))
else:
attr = self.lastAttributesDict[attrkind];
attr.value = attrval;
self.lastAttributesDict[attr.kind] = attr;
#print("Updated AttributeGeneric kind="+str(attr.kind)+" val="+str(attrval))
return;
def _handleWire(self, m):
self._resetLast(); # no more attributes for previous lines
# WIRE <x1> <y1> <x2> <y2>
self.lastWire = Wire("w"+str(self.linecnt),int(m.group(1)),int(m.group(2)), int(m.group(3)), int(m.group(4)))
return;
def _handleWindow(self, m): # attribute display position
# WINDOW attr.No.Id rel.x1 rel y1 pos.str. size?=2
attrid = int(m.group(1));
if (attrid in self.attrid2attr):
attrkind = self.attrid2attr[attrid];
else:
print("Could not match attr.id="+m.group(1)+" to an attribute.")
return;
if (attrkind in self.lastAttributesDict):
# attribute exists: modify its values
attr = self.lastAttributesDict[attrkind];
else: # create new
attr = Attribute(attrkind, 'undefined');
attr.visible = True;
attr.idnum = m.group(1);
attr.x1rel = int(m.group(2));
attr.y1rel = int(m.group(3));
attr.align = m.group(4);
attr.size = float(m.group(5));
self.lastAttributesDict[attrkind] = attr;
return;
def _handleText(self, m): # attribute display position
self._resetLast(); # no more attributes for previous lines
# TEXT x1 y1 pos.str. size?=2 string
txt = SchText(m.group(5), int(m.group(1)), int(m.group(2)))
txt.align = m.group(3);
txt.size = float(m.group(4));
txt.value = 'lbl'+str(self.linecnt)
pathandctype = 'text';
if not self.symbolDict.hasSymbolPath(pathandctype):
# not cached, try to load
fullpath = self.symfilebasepath + pathandctype.replace('\\\\','\\');
sym = self.readASYFile(fullpath+'.asy');
sym.path = '';
sym.ctype = pathandctype;
sym.pathandctype = pathandctype;
tsym = self.readASY2TexFile(fullpath+'.asy2tex',sym);
if (tsym != None):
self.symbolDict.addSymbol(tsym); # add symbol and translation information
else:
# already existing in cache
tsym = self.symbolDict.getSymbolByPath(pathandctype);
txt.symbol = tsym;
self.lastText = txt;
return;
def _handleLine(self, m):
if ((m.group(6)) != ''):
lstyle = int(m.group(6));
else:
lstyle = 0;
schline = SchLine(int(m.group(2)), int(m.group(3)), int(m.group(4)), int(m.group(5)), lstyle)
schline.kind = (m.group(1));
pathandctype = 'SchLine';
if not self.symbolDict.hasSymbolPath(pathandctype):
# not cached, try to load
fullpath = self.symfilebasepath + pathandctype.replace('\\\\','\\');
#sym = self.readASYFile(fullpath+'.asy');
sym = Symbol(pathandctype);
sym.lt2tscale = self.lt2tscale;
sym.path = '';
sym.ctype = pathandctype;
sym.pathandctype = pathandctype;
tsym = self.readASY2TexFile(fullpath+'.asy2tex',sym);
if (tsym != None):
self.symbolDict.addSymbol(tsym); # add symbol and translation information
else:
# already existing in cache
tsym = self.symbolDict.getSymbolByPath(pathandctype);
schline.symbol = tsym;
self.circDict.addSchLine(schline);
return;
def _handleRect(self, m):
if ((m.group(6)) != ''):
lstyle = int(m.group(6));
else:
lstyle = 0;
schrect = SchRect(int(m.group(2)), int(m.group(3)), int(m.group(4)), int(m.group(5)), lstyle)
schrect.kind = (m.group(1));
pathandctype = 'SchRect';
if not self.symbolDict.hasSymbolPath(pathandctype):
# not cached, try to load
fullpath = self.symfilebasepath + pathandctype.replace('\\\\','\\');
#sym = self.readASYFile(fullpath+'.asy');
sym = Symbol(pathandctype);
sym.lt2tscale = self.lt2tscale;
sym.path = '';
sym.ctype = pathandctype;
sym.pathandctype = pathandctype;
tsym = self.readASY2TexFile(fullpath+'.asy2tex',sym);
if (tsym != None):
self.symbolDict.addSymbol(tsym); # add symbol and translation information
else:
# already existing in cache
tsym = self.symbolDict.getSymbolByPath(pathandctype);
schrect.symbol = tsym;
self.circDict.addSchLine(schrect);
return;
def _handleCircle(self, m):
if ((m.group(6)) != ''):
lstyle = int(m.group(6));
else:
lstyle = 0;
schcirc = SchCirc(int(m.group(2)), int(m.group(3)), int(m.group(4)), int(m.group(5)), lstyle)
schcirc.kind = (m.group(1));
pathandctype = 'SchCirc';
if not self.symbolDict.hasSymbolPath(pathandctype):
# not cached, try to load
fullpath = self.symfilebasepath + pathandctype.replace('\\\\','\\');
#sym = self.readASYFile(fullpath+'.asy');
sym = Symbol(pathandctype);
sym.lt2tscale = self.lt2tscale;
sym.path = '';
sym.ctype = pathandctype;
sym.pathandctype = pathandctype;
tsym = self.readASY2TexFile(fullpath+'.asy2tex',sym);
if (tsym != None):
self.symbolDict.addSymbol(tsym); # add symbol and translation information
else:
# already existing in cache
tsym = self.symbolDict.getSymbolByPath(pathandctype);
schcirc.symbol = tsym;
self.circDict.addSchLine(schcirc);
return;
def copyFileContents(self,source,destination):
fhs = open(source, mode='r', newline='');
fhd = open(destination, mode='a', newline=''); # prevent automatic newline conversion to have more control over nl chars.
for line in fhs:
#print('copying line "'+line+'" from source to destination');
fhd.write(line);
fhd.close();
fhs.close();
def copyFile(self,source,destination):
fhs = open(source, mode='r', newline='');
fhd = open(destination, mode='w', newline=''); # prevent automatic newline conversion to have more control over nl chars.
for line in fhs:
#print('copying line "'+line+'" from source to destination');
fhd.write(line);
fhd.close();
fhs.close();
def copyFileContentsToHandle(self,source,hdestination):
fhs = open(source, mode='r', newline=None);
for line in fhs:
#print('copying line "'+line+'" from source to destination');
hdestination.write(line);
fhs.close();
def writeCircuiTikz(self, outfile):
print('Writing Tex commands to "'+outfile+'"...')
xscale = 1 * self.lt2tscale;
yscale = -1 * self.lt2tscale; # y is inverse for LTspice files
xoffset = 0;
yoffset = 0;
fhd = open(outfile, mode='w', newline='\r\n'); # prevent automatic newline conversion to have more control over nl chars.
if (self.includepreamble):
self.copyFileContentsToHandle(self.scriptdir+os.sep+ self.symfilebasepath+'latex_preamble.tex', fhd);
if (self.config.has_option('general','latexincludes')):
incfiles = self.config.get('general','latexincludes');
incfiles = incfiles.split(';');
for incfile in incfiles:
srcfile = self.scriptdir+os.sep+ self.symfilebasepath + incfile;
dstfile = os.path.dirname(os.path.abspath(outfile)) +os.sep + os.path.basename(incfile);
self.copyFile(srcfile, dstfile)
print(' copying latexincludes: "'+srcfile+'" to "'+dstfile+'" ...')
if (self.config.has_option('general','bipoles_length')):
bipoles_length = self.config.get('general','bipoles_length');
fhd.write(r'\ctikzset{bipoles/length='+bipoles_length+'}\n');
self.circDict.wiresToPolyWires();
#output wires:
wireDict = self.circDict.getWiresByCoord();
for pp, dictWires in wireDict.items():
# all wires at the pp position
#jcnt = self.circDict.getJunctionCound(pp);
jcnt = self.circDict.getJunctionCount(pp);
if (jcnt <= 2): # no junction
p1junction = '';
else: # junction
p1junction = '*';
for uuid, wire in dictWires.items():
pp1 = wire.getP1Tuple();
if (pp1 == pp): # only draw wire if pos1 is attached. Otherwise wires will get drawn multiple times.
pp2 = wire.getP2Tuple();
jcnt2 = self.circDict.getJunctionCount(pp2);
if (jcnt2 <= 2):
p2junction = '';
else:
p2junction = '*';
x1 = pp[0]*xscale+xoffset;
y1 = pp[1]*yscale+yoffset;
x2 = pp2[0]*xscale+xoffset;
y2 = pp2[1]*yscale+yoffset;
lenxn = 0;
if ((type(wire) is PolyWire)):
lenxn = len(wire.xn);
if ( ((type(wire) is Wire) and not (type(wire) is PolyWire)) or ( (type(wire) is PolyWire) and ( len(wire.xn) <= 0) )): # normal wire or polywire with no intermediate segments
# \draw (8,2)to[*short,*-] (8,4);%
fhd.write(r'\draw [/lt2ti/Net]('+str(x1)+r','+str(y1)+r')to[*short,'+p1junction+'-'+p2junction+', color=netcolor] ('+str(x2)+','+str(y2)+');% wire '+wire.name+'\n');
else:
# polywire
# \draw (4,2) -- (4,4) -- (6,4);
xn = wire.xn;
yn = wire.yn;
x1b = xn[0]*xscale+xoffset;
y1b = yn[0]*yscale+yoffset;
x2b = xn[len(xn)-1]*xscale+xoffset;
y2b = yn[len(yn)-1]*yscale+yoffset;
# normal wire segments for junctions: (this works with zero length segements, so we do not use the x1b/y1b, x2b/y2b points any longer.)
fhd.write(r'\draw [/lt2ti/Net]('+str(x1)+r','+str(y1)+r')to[*short,'+p1junction+'-, color=netcolor] ('+str(x1)+','+str(y1)+');% wire '+wire.name+' start\n');
fhd.write(r'\draw [/lt2ti/Net]('+str(x2)+r','+str(y2)+r')to[*short,-'+p2junction+', color=netcolor] ('+str(x2)+','+str(y2)+');% wire '+wire.name+' end\n');
#fhd.write(r'\draw [/lt2ti/Net]('+str(x1)+r','+str(y1)+r')to[*short,'+p1junction+'-] ('+str(x1b)+','+str(y1b)+');% wire '+wire.name+' start\n');
#fhd.write(r'\draw [/lt2ti/Net]('+str(x2b)+r','+str(y2b)+r')to[*short,-'+p2junction+'] ('+str(x2)+','+str(y2)+');% wire '+wire.name+' end\n');
# polyline
wstr = '\draw [/lt2ti/Net]('+str(x1)+r','+str(y1)+r')';
for i in range(0, (len(xn))):
xni = xn[i]*xscale+xoffset;
yni = yn[i]*yscale+yoffset;
wstr = wstr + ' -- '
wstr = wstr + ' ('+str(xni)+','+str(yni)+')' ;
wstr = wstr + ' -- ('+str(x2)+','+str(y2)+'); % wire '+wire.name+ ' polyline \n' ;
fhd.write(wstr);
# output components
for pp, compDict in self.circDict.coordCompDict.items():
for c, comp in compDict.items():
comp.circuitDict = self.circDict; # used for junction lookup etc.
comp.config = self.config; # allow config parameters in components to be used
texlines = comp.translateToLatex({}); # ToDo: apply xoffset, yoffset (currently not in use)
for tl in texlines:
tl = re.sub(r'[\r]*[\n]$', '', tl); # remove trailing line break since we add it after the comment.
fhd.write(tl+' % component "'+comp.pathandctype+'" "'+comp.name+'" \n');
# output labels
for pp, labeldict in self.circDict.coordLabelDict.items():
for l, label in labeldict.items():
label.circuitDict = self.circDict;
texlines = label.translateToLatex({}); # ToDo: apply xoffset, yoffset (currently not in use)
for tl in texlines:
tl = re.sub(r'[\r]*[\n]$', '', tl); # remove trailing line break since we add it after the comment.
fhd.write(tl+' % label "'+label.pathandctype+'" "'+label.label+'" '+label.value+' \n');
# output text
for pp, txtdict in self.circDict.coordTextDict.items():
for l, txt in txtdict.items():
txt.circuitDict = self.circDict;
texlines = txt.translateToLatex({}); # ToDo: apply xoffset, yoffset (currently not in use)
for tl in texlines:
tl = re.sub(r'[\r]*[\n]$', '', tl); # remove trailing line break since we add it after the comment.
fhd.write(tl+' % text "'+txt.pathandctype+'" "'+txt.text+' '+txt.value+' " \n');
# output lines
for uuid, schline in self.circDict.getSchLines():
schline.circuitDict = self.circDict;
texlines = schline.translateToLatex({}); # ToDo: apply xoffset, yoffset (currently not in use)
for tl in texlines:
tl = re.sub(r'[\r]*[\n]$', '', tl); # remove trailing line break since we add it after the comment.
fhd.write(tl+' % schLine "'+schline.pathandctype+'" '+str(schline.getP1Tuple())+'->'+str(schline.getP2Tuple())+' style='+str(schline.style)+'\n');
# output rects
for uuid, schrect in self.circDict.getSchRects():
schrect.circuitDict = self.circDict;
texlines = schrect.translateToLatex({}); # ToDo: apply xoffset, yoffset (currently not in use)
for tl in texlines:
tl = re.sub(r'[\r]*[\n]$', '', tl); # remove trailing line break since we add it after the comment.
fhd.write(tl+' % schRect "'+schrect.pathandctype+'" '+str(schrect.getP1Tuple())+'->'+str(schrect.getP2Tuple())+' style='+str(schrect.style)+'\n');
if (self.includepreamble):
self.copyFileContentsToHandle(self.scriptdir+os.sep+ self.symfilebasepath+'latex_closing.tex', fhd);
fhd.close();
print("Done.");
return;
class CircuitDict:
def __init__(self):
self.coordWireDict = {} # todo: refactor into SpatialDicts
self.wireDict = {}
self.coordCompDict = {}
self.compDict = {}
self.labelDict = {}
self.coordLabelDict = {}
self.textDict = {}
self.coordTextDict = {}
self.coordCompPinDict = {}
self.lineDict = SpatialDict()
self.lineDict.objidattrib = 'uuid'
self.lineDict.objposattrib = ['getP1Tuple', 'getP2Tuple'];
self.rectDict = SpatialDict()
self.rectDict.objidattrib = 'uuid'
self.rectDict.objposattrib = ['getP1Tuple', 'getP2Tuple'];
def addSchLine(self, aLine):
self.lineDict.addObj(aLine)
def removeSchLine(self, aLine):
self.lineDict.removeObj(aLine)
def getSchLines(self):
return self.lineDict.getAllObjs()
def addSchRect(self, aRect):
self.rectDict.addObj(aRect)
def removeSchRect(self, aRect):
self.rectDict.removeObj(aRect)
def getSchRects(self):
return self.rectDict.getAllObjs()
def addNetLabel(self, aLabel):
self.labelDict[aLabel.uuid] = aLabel;
pp = (aLabel.x1, aLabel.y1);
if (pp in self.coordLabelDict):
dictLbls = self.coordLabelDict[pp];
else:
dictLbls = {};
dictLbls[aLabel.uuid] = aLabel;
self.coordLabelDict[pp] = dictLbls;
def removeNetLabel(self, aLabel):
self.labelDict.pop(aLabel.uuid,None);
pp = (aLabel.x1, aLabel.y1);
dictLbls = self.coordLabelDict[pp];
dictLbls.pop(aLabel.uuid,None);
self.coordLabelDict[pp] = dictLbls;
def addText(self, aText):
self.textDict[aText.uuid] = aText;
pp = (aText.x1, aText.y1);
if (pp in self.coordTextDict):
dictTxts = self.coordLabelDict[pp];
else:
dictTxts = {};
dictTxts[aText.uuid] = aText;
self.coordTextDict[pp] = dictTxts;
def removeText(self, aText):
self.textDict.pop(aText.uuid,None);
pp = (aText.x1, aText.y1);
dictTxts = self.coordTextDict[pp];
dictTxts.pop(aText.uuid,None);
self.coordTextDict[pp] = dictTxts;
def addComponent(self, aComp):
self.compDict[aComp.uuid] = aComp;
pp = (aComp.x1, aComp.y1);
if (pp in self.coordCompDict):
cdict = self.coordCompDict[pp];
else:
cdict = {};
cdict[aComp.uuid] = aComp;
self.coordCompDict[pp] = cdict;
if aComp.symbol != None:
pindictlist = aComp.symbol.symbolPins.getAllPins();
for pname, pin in pindictlist:
self.addComponentPin(aComp, pin);
def addComponentPin(self, aComp, aPin):
pp = aComp.getAbsolutePinPos(aPin);
ppint = (int(round(pp[0])), int(round(pp[1]))); # use integer, since the AbsolutePinPos might have produced roundoff errors during rotation/shifting.
#pp = (pin.x1 + aComp.x1, pin.y1 + aComp.y1) # convert to absolute position
if (ppint in self.coordCompPinDict):
dictComps = self.coordCompPinDict[ppint];
else:
dictComps = {};
dictComps[aComp.uuid] = aComp;
self.coordCompPinDict[ppint] = dictComps;
def _removeComponent_compDic(self, cmpDict, aComp):
res = cmpDict.Pop(aComp.uuid, None);
if res != None:
return True;
return False;
def _removeComponent_coordDic(self, coordCDict, aComp):
cnt = 0;
pinPosList = aComp.getPinPosList();
for pp in pinPosList:
if (pp in coordCDict):
dictComps = coordCDict[pp];
res = dictComps.Pop(aComp.uuid,None);
if (res != None):
cnt = cnt+1;
coordCDict[pp] = dictComps;
if (cnt == len(pinPosList)):
return True;
return False;
def removeComponent(self, aComp):
success = True;
success = success and self._removeComponent_compDic(self, self.compDict, aComp);
success = success and self._removeComponent_coordDic(self, self.coordCompDict, aComp);
# todo: remove pins
def _removeWire_wireDic(self, dic, aWire):
res = dic.pop(aWire.uuid,None)
if res == None:
return False;
else:
return True;
def _removeWire_coordDic(self, dic, aWire):
success = True;
p1 = aWire.getP1Tuple();
wd = dic[p1];
success = success and self._removeWire_wireDic(wd, aWire);
dic[p1] = wd;
p2 = aWire.getP2Tuple();
wd = dic[p2];
success = success and self._removeWire_wireDic(wd, aWire);
dic[p2] = wd;
return success
def removeWire(self, aWire):
success = True;
success = success and self._removeWire_coordDic(self.coordWireDict, aWire);
success = success and self._removeWire_wireDic(self.wireDict, aWire);
return success;
def _addWire(self, wireDic, coordWireDic, aWire):
self.wireDict[aWire.uuid] = aWire;
p1 = aWire.getP1Tuple()
p2 = aWire.getP2Tuple()
if (p1 in self.coordWireDict) :
dictWires = self.coordWireDict[p1];
else :
dictWires = {};
dictWires[aWire.uuid] = aWire;
self.coordWireDict[p1] = dictWires;
if (p2 in self.coordWireDict) :
dictWires = self.coordWireDict[p2];
else :
dictWires = {};
dictWires[aWire.uuid] = aWire;
self.coordWireDict[p2] = dictWires;
return True;
def addWire(self, aWire):
return self._addWire(self.wireDict, self.coordWireDict, aWire);
def getAllWires(self):
wires = [];
wd = sorted(self.coordWireDict.items());
for pp, dictWires in wd:
wires.extend(list(sorted(dictWires.items())));
return wires;
def getWireDictAt(self, aPoint):
if aPoint in self.coordWireDict:
return self.coordWireDict[aPoint]
return {};
def getPinDictAt(self, aPoint):
if aPoint in self.coordCompPinDict:
return self.coordCompPinDict[aPoint];
return {}
def getWiresByCoord(self):
return self.coordWireDict;
def getJunctionCount(self, point):
pp = point;
cnt = 0;
if (pp in self.coordCompPinDict):
cnt = cnt+ len(self.coordCompPinDict[pp]);
if (pp in self.coordWireDict):
cnt = cnt+ len(self.coordWireDict[pp]);
return cnt;
def wiresToPolyWires(self):
changes = True
cnt = 0;
while changes :
changes = False # assume no changes
# create copies since we cannot change the dicts while iterating over them
wireDictCpy = dict(self.wireDict);
coordWireDictCpy = dict(self.coordWireDict)
for coord, dictWires in coordWireDictCpy.items():
isAtCompPin = (coord in self.coordCompPinDict);
if (isAtCompPin):
print(" Wire position "+str(coord)+' is incident with a component pin. Conversion to polywire suppressed.');
if (len(dictWires) == 2) and (coord not in self.coordCompDict) and (not isAtCompPin): # two wires joined at this point and no component node there. convert to PolyWire
changes = True # we join two wires, thus change the set and must reprocess it
wires = list(dictWires.values());
print('Joining wire1 = '+wires[0].asString()+' and wire2 = '+wires[1].asString()+' at '+str(coord)+'..');
# create a new polywire
pwire = PolyWire.JoinWires(wires[0],wires[1]);
#remove the wires from the lists
self._removeWire_coordDic(self.coordWireDict, wires[0])
self._removeWire_coordDic(self.coordWireDict, wires[1])
self._removeWire_wireDic(self.wireDict,wires[0])
self._removeWire_wireDic(self.wireDict,wires[1])
# add the new wire
self._addWire(self.wireDict, self.coordWireDict, pwire);
cnt = cnt+1;
print('.. joining resulted in polywire = '+pwire.asString()+'.');
break; # break the loop. It could be that we also need to join the newly created polywire.
#for uid, dictWires in self.wireDict:
# if len(dictWires) == 2: # two wires joined at this point: convert them into a PolyWire
# no more changes
print("Joining operations completed.")
return cnt;
class Attribute:
value = None;
idnum = 0;
optionlist = [];
def __init__(self, kind, value):
self.uuid = uuid.uuid4();
self.value = value;
self.x1rel = 0;
self.y1rel = 0;
self.x2rel = 0;
self.y2rel = 0;
self.visible = False;
self.kind = kind;
self.rot = 0;
self.idnum = 0;
self.size = 2;
self.align = "Left";
self.optionlist = [];
def addOption(self, option):
self.optionlist.append(option);
class SchObject:
x1 = 0;
y1 = 0;
texx1 = 0;
texy1 = 0;
texrot = 0;
texmirror = False;
rotation = 0;
mirror = False;
symbol = None;
circuitDict = None;
value = 'UndefinedSchValue';
_rounddigits=5;
pathandctype = '';
def __init__(self, ctype, x1, y1):
self.x1 = x1;
self.y1 = y1;
self.uuid = uuid.uuid4();
self.texx1 = 0;
self.texy1 = 0;
self.texrot = 0;
self.texmirror = False;
self.rotation = 0;
self.mirror = False;
self.value = "undefinedSchValue";
self.symbol = None;
self.circuitDict = None;
self.path = '';
self.pathandctype = '';
self.value2 = ''
self.attrlist = [];
self._rounddigits=5;
def print(self):
print(self.asString())
def _latexEscape(self, text):
"""
:param text: a plain text message
:return: the message escaped to appear correctly in LaTeX
"""
conv = {
'&': r'\&',
'%': r'\%',
'$': r'\$',
'#': r'\#',
'_': r'\_',
'{': r'\{',
'}': r'\}',
'~': r'\textasciitilde{}',
'^': r'\^{}',
'\\': r'\textbackslash{}',
'<': r'\textless',
'>': r'\textgreater',
}
regex = re.compile('|'.join(re.escape((key)) for key in sorted(conv.keys(), key = lambda item: - len(item))))
return regex.sub(lambda match: conv[match.group()], text)
def _coord2tex(self, pp):
xscale = 1.0 * self.symbol.lt2tscale;
yscale = -1.0 * self.symbol.lt2tscale; # y is inverse for LTspice files
xoffset = self.symbol.latexOriginX1;
yoffset = self.symbol.latexOriginY1;
pptex = (round(pp[0]*xscale+xoffset,self._rounddigits), round(pp[1]*yscale+yoffset, self._rounddigits))
print(' new tex coord is '+str(pptex)+'.');
return pptex;
def _coord2abs(self, coord):
# preprocess
pp = coord;
mirror = self.mirror;
if (self.symbol.latexOriginMirror):
mirror = not mirror;
if (self.symbol.ckvUnsetOrFalse('suppressrotate')):
# rotate
rotdeg = (-1)*self.rotation + (-1)*self.symbol.latexOriginRot; # LTspice rotates CW positive, mathematics use CCW positive, therefore a minus sign.
#if (mirror):
#rotdeg = (-1)*rotdeg;
#print (' rotation reversed from '+str(-1*rotdeg)+'deg to '+str(rotdeg)+' due to mirroring.')
pp = self.symbol.rotatePosOrigin(pp, rotdeg);
if (rotdeg != 0):
print(' new '+str(rotdeg)+'deg rotated coord is '+str(pp)+'.');
else:
print(' possible rotation suppressed by conversion key-value pair')
if (self.symbol.ckvUnsetOrFalse('suppressmirror')):
# mirror
if mirror:
pp = ((-1)*pp[0], pp[1]) # mirror according to symbol origin
print(' new mirrored coord is '+str(pp)+'.');
else:
print(' possible mirroring suppressed by conversion key-value pair')
# convert to absolute
ppabs = (pp[0] + self.x1, pp[1] + self.y1); # move to absolute position according to our origin in absolute coordinates
print(' new absolute coord is '+str(ppabs)+'.');
return ppabs;
def _symcKVAttrStrToAttr(self, match):
attr = match.group(1);
res = self.symbol.conversionKV[attr]
return str(res);
def _attrStrToAttr(self, match):
attr = match.group(1);
res = getattr(self, attr);
return str(res);
def _mergeAttrStrToAttr(self, match):
try:
return self._attrStrToAttr(match)
except Exception as e:
print("Could not match attr.id="+match.group(1)+" to an attribute using merged lookup.")
def _symAttrStrToAttr(self, match):
attr = match.group(1);
res = getattr(self.symbol, attr);
return str(res);
def addAttribute(self, attrib):
self.attrlist.append(attrib);
def getAttributes(self, attrib):
return self.attrlist;
def getP1Tuple(self):
return (self.x1, self.y1)
def _checkRotatePM(self, match):
rot = int(match.group(1))
if (rot == 0):
if (self.rotation == rot):
return '1'
else:
return '-1'
if (int((self.rotation/rot)%2) == 1):
return '1';
else:
return '-1';
def _checkRotateMP(self, match):
rot = int(match.group(1))
if (rot == 0):
if (self.rotation == rot):
return '-1'
else:
return '1'
if (int((self.rotation/rot)%2) == 1):
return '-1';
else:
return '1';
def _checkRotate01(self, match):
rot = int(match.group(1))
if (rot == 0):
if (self.rotation == rot):
return '0'
else:
return '1'
if (int((self.rotation/rot)%2) == 1):
return '0';
else:
return '1';
def _checkRotate10(self, match):
rot = int(match.group(1))
if (rot == 0):
if (self.rotation == rot):
return '1'
else:
return '0'
if (int((self.rotation/rot)%2) == 1):
return '1';
else:
return '0';
def _checkLabelMirr(self, match):
mirror = self._mirrored();
rot = self.rotation;
xy = match.group(1)
res = '1'
if (xy == 'x'):
if ( (mirror and rot == 0) or (((rot/180)%2 == 1) and not mirror) ):
res = '-1'
elif (xy == 'y'):
if ( ((rot == 90) or (rot == -270)) ):
res = '-1'
return res;
def _mirrored(self):
mirror = self.mirror;
if (self.symbol.latexOriginMirror):
mirror = not mirror;
if (self.symbol.symbolOriginMirror):
mirror = not mirror;
return mirror;
def _mirrorReplace(self, line, opt):
mirror = self._mirrored();
if (mirror):
line = re.sub('##mirror_invert##','invert', line);
line = re.sub('##mirror_mirror##','mirror', line);
line = re.sub('##mirror_xscale##','xscale=-1', line);
line = re.sub('##mirror_xscale_value##','-1', line);
line = re.sub('##mirror_yscale##','yscale=-1', line);
line = re.sub('##mirror_yscale_value##','-1', line);
line = re.sub('##mirror##','invert', line); # for circuiTikz mirror means horizontal mirror, but we want vertical mirror, wich is invert in circuiTikz terminology
if (int((self.rotation/90)%2) == 0): # not 90deg (or odd multiples) rotated
line = re.sub('##mirror_rot_xscale_value##','-1', line); # rotated mirror scaling
line = re.sub('##mirror_rot_yscale_value##','1', line); # rotated mirror scaling
else:
line = re.sub('##mirror_rot_xscale_value##','1', line); # normal mirror
line = re.sub('##mirror_rot_yscale_value##','-1', line); # normal mirror
else:
line = re.sub('##mirror_invert##','', line);
line = re.sub('##mirror_mirror##','', line);
line = re.sub('##mirror_xscale##','', line);
line = re.sub('##mirror_xscale_value##','1', line);
line = re.sub('##mirror_yscale##','', line);
line = re.sub('##mirror_yscale_value##','1', line);
line = re.sub('##mirror##','', line);
line = re.sub('##mirror_rot_yscale_value##','1', line); # no mirror
line = re.sub('##mirror_rot_xscale_value##','1', line); # no mirror
line = re.sub('##rotate_([-\d\.]+)_pmvalue##',self._checkRotatePM, line);
line = re.sub('##rotate_([-\d\.]+)_mpvalue##',self._checkRotateMP, line);
line = re.sub('##rotate_([-\d\.]+)_01value##',self._checkRotate01, line);
line = re.sub('##rotate_([-\d\.]+)_10value##',self._checkRotate10, line);
line = re.sub('##labelmirror([xy])##',self._checkLabelMirr, line);
rotatemirror = 1;
if (self.mirror):
rotatemirror = -1;
line = re.sub('##rotate_mirror##',str(round((-1)*self.rotation*rotatemirror+(-1)*self.symbol.latexOriginRot+(-1)*self.symbol.symbolOriginRot)), line);
return line;
class Component(SchObject):
uuid = None;
x1 = 0;
y1 = 0;
texx1 = 0;
texy1 = 0;
texrot = 0;
texmirror = False;
rotation = 0;
mirror = False;
ctype = "undefined";
name = "undefined";
kind = "undefined";
value = "undefined";
symbol = None;
path = '';
pathandctype = '';
value2 = ""
attrlist = [];
def __init__(self, ctype, x1, y1, rot, mirror, name, value):
super().__init__(ctype,x1,y1);
self.ctype = ctype;
self.x1 = x1;
self.y1 = y1;
self.texx1 = 0;
self.texy1 = 0;
self.rotation = rot;
self.mirror = (mirror==True);
self.symbol = None;
self.path = '';
self.pathandctype = '';
self.value2 = ""
self.attrlist = [];
self.circuitDict = None; # ref to parent circuit dict to determine junctions etc.
self.pathandctype = ctype;
re_ctype = re.compile(r'(.*\\\\)([a-zA-Z0-9_-]+)$', flags=re.IGNORECASE);
m = re_ctype.match(self.pathandctype);
if (m != None):
self.path = m.group(1);
self.path = self.path.replace('\\\\','\\');
self.ctype = m.group(2);
else:
self.path = '';
self.ctype = self.pathandctype;
self.value = value;
self.value2 = '';
self.uuid = uuid.uuid4();
def asString(self, indent=''):
res = indent+'component "'+self.pathandctype+'" ('+self.path+','+self.ctype+') named "'+self.name+'" with value "'+self.value+'" at '+str((self.x1,self.y1))+' rot'+str(self.rotation)+' mirrored='+str(self.mirror)+'.\n';
if (self.symbol != None):
res = res + indent+' '+'component symbol:'+self.symbol.asString(indent+' ');
return res;
def _mergeAttrStrToAttr(self, match):
m = match.group(1)
for attr in (self.attrlist):
if (attr.kind == m) or (str.lower(attr.kind) == str.lower(m)):
return attr.value
av = self.symbol.attributes.get(m, "")
if (av != ""):
return av
av = self.symbol.attributes.get(str.lower(m), "")
if (av != ""):
return av
for attrn,av in self.symbol.attributes.items():
if (attrn == m) or (str.lower(attrn) == str.lower(m)):
return av
return super()._mergeAttrStrToAttr(self, match)
return "?"+match+"?"
def _pinIsJunction(self, match):
pinname = match.group(1);
pin = self.symbol.latexPins.getPinByName(pinname);
if (pin == None):
pin = self.symbol.symbolPins.getPinByName(pinname);
if (pin == None):
print('Unable to find pin "'+pinname+'" of component '+self.pathandctype+' in either latex or asy pins. Using zero.');
pp = (0, 0);
else:
pp = pin.getP1Tuple();
# get incident wire count at point
if (self.circuitDict == None):
print('Unable to lookup pin Junction for pin "'+pinname+'" of component '+self.pathandctype+' in either latex or asy pins. Using no juncion.');
return '';
ppabs = self._coord2abs(pp);
wdict = self.circuitDict.getWireDictAt(ppabs);
cpindict = self.circuitDict.getPinDictAt(ppabs);
cpindict.pop(self.uuid, None) # remove our own pin at this position
if ((len(wdict) + len(cpindict)) >= 2): # more than two pins / wires at this point: junction
return '*'
else:
return ''
def getAbsolutePinPos(self, aPin):
pp = aPin.getP1Tuple();
ppabs = self._coord2abs(pp);
return ppabs;
def _pinToCoord(self, match):
xscale = 1.0 * self.symbol.lt2tscale;
yscale = -1.0 * self.symbol.lt2tscale; # y is inverse for LTspice files
mirror = self.mirror;
if (self.symbol.latexOriginMirror):
mirror = not mirror;
pinname = match.group(1);
pincoord = match.group(2);
pin = self.symbol.latexPins.getPinByName(pinname);
pin_src = 'asy2latex'
if (pin == None):
pin_src = 'asy'
pin = self.symbol.symbolPins.getPinByName(pinname);
if (pin == None):
pin_src = 'none'
print('Unable to find '+pin_src+' pin "'+pinname+'" of component '+self.pathandctype+' in either latex or asy pins. Using zero.');
pp = (0, 0);
else:
pp = pin.getP1Tuple();
print(' processing comp. "'+self.name+'" pin "'+pinname+'" at symbol coord '+str(pin.getP1Tuple())+':');
# pp is relative to the component Origin
ppabs = self._coord2abs(pp)
pptex = self._coord2tex(ppabs);
if (str.lower(pincoord) == 'x1'):
res = str(pptex[0]);
elif (str.lower(pincoord) == 'y1'):
res = str(pptex[1]);
else:
print('Unknown pin coord component: "'+pincoord+'"');
res = '??';
return res;
def _confAttrStrToAttr(self, match):
attr = match.group(1);
if (self.config.has_option('component',attr)):
res = self.config.get('component',attr)
else:
res = '';
return str(res);
def _toLatexReplace(self, line, opt):
# pattern, repl, string)
line1 = line;
line = re.sub('#([A-Za-z0-9_+\-!]+):([xy][0-9]+)#',self._pinToCoord, line);
line = re.sub('#([A-Za-z0-9_+\-!]+):junction#',self._pinIsJunction, line);
line = re.sub('#self.symbol.conversionKV.([A-Za-z0-9_\-!]+)#',self._symcKVAttrStrToAttr, line);
line = re.sub('#self.symbol.([A-Za-z0-9_\-!]+)#',self._symAttrStrToAttr, line);
line = re.sub('#self.mergedattrib.([A-Za-z0-9_\-!]+)#',self._mergeAttrStrToAttr, line);
line = re.sub('#self.config.([A-Za-z0-9_\-!]+)#',self._confAttrStrToAttr, line);
line = re.sub('#self.([A-Za-z0-9_\-!]+)#',self._attrStrToAttr, line);
line = re.sub('##options##',self.value2, line);
line = re.sub('##rotate##',str(round((-1)*self.rotation+(-1)*self.symbol.latexOriginRot+(-1)*self.symbol.symbolOriginRot)), line);
line = self._mirrorReplace(line, opt)
print('Converted tex line "'+line1.replace('\n','')+'"\n'+
' to "'+line.replace('\n','')+'"');
return line;
def translateToLatex(self, opt):
if (opt == None):
opt = {};
p1 = (0,0);
p1 = (p1[0] + self.symbol.symbolOriginX1, p1[1]+ self.symbol.symbolOriginY1);
p1 = self._coord2abs(p1); # rotate and mirror origin offset
lp= self._coord2tex(p1)
self.texx1 = lp[0];
self.texy1 = lp[1];
self.texrot = self.rotation + (-1)*self.symbol.symbolOriginRot;
self.texmirror = (self.mirror != self.symbol.symbolOriginMirror);
print('Placing component '+self.ctype+' named "'+self.name+'" valued "'+self.value+'" at spice coord '+str(self.getP1Tuple())+', '+('M' if self.mirror else 'R')+str(self.rotation)+' -> tex '+str(self._coord2tex(self.getP1Tuple()))+'. ')
translated = [];
if (self.symbol != None):
for line in self.symbol.latexTemplate:
translated.append(self._toLatexReplace(line, opt));
return translated;
def setSymbolFromPrototype(self, aSymbolProto):
# copy the symbol
self.symbol = copy.deepcopy(aSymbolProto);
# modify the symbol according to our properties
def getPinList(self):
return list(self.symbol.symbolPins.values());
def addPin(self, aPin):
self.symbol.symbolPins.addPin(aPin);
def removePin(self,aPin):
self.symbol.symbolPins.removePin(aPin);
def getPinCount(self):
return len(self.symbol.symbolPins.getAllPins());
def getPinPosList(self):
return self.symbol.symbolPins.getAllPins()
class SchText(SchObject):
def __init__(self, text, x1, y1):
super().__init__('schtext', x1, y1);
self.uuid = uuid.uuid4();
self.text = text;
self.x1 = x1;
self.y1 = y1;
self.size = 2;
self.align = "Left"
self.attrlist = [];
self.symbol = None;
def _toLatexReplace(self, line, opt):
# pattern, repl, string)
line1 = line;
line = re.sub('#self.symbol.([A-Za-z0-9_\-!]+)#',self._symAttrStrToAttr, line);
line = re.sub('#self.symbol.conversionKV.([A-Za-z0-9_\-!]+)#',self._symcKVAttrStrToAttr, line);
line = re.sub('#self.textstr#',self._latexEscape(self.text), line);
line = re.sub('#self.([A-Za-z0-9_\-!]+)#',self._attrStrToAttr, line);
line = re.sub('##options##',self.symbol.value2, line);
line = re.sub('##rotate##',str(round((-1)*self.rotation+(-1)*self.symbol.latexOriginRot+(-1)*self.symbol.symbolOriginRot)), line);
line = self._mirrorReplace(line, opt);
print('Converted tex line "'+line1.replace('\n','')+'"\n'+
' to "'+line.replace('\n','')+'"');
return line;
def translateToLatex(self, opt):
if (opt == None):
opt = {};
p1 = (0,0);
p1 = (p1[0] + self.symbol.symbolOriginX1, p1[1]+ self.symbol.symbolOriginY1);
p1 = self._coord2abs(p1); # rotate and mirror origin offset
lp= self._coord2tex(p1)
self.texx1 = lp[0];
self.texy1 = lp[1];
self.texrot = self.rotation + (-1)*self.symbol.symbolOriginRot;
self.texmirror = (self.mirror != self.symbol.symbolOriginMirror);
print('Placing text '+self.pathandctype+' textcontent "'+self._latexEscape(self.text)+'" valued "'+self.value+'" at spice coord '+str(self.getP1Tuple())+', '+('M' if self.mirror else 'R')+str(self.rotation)+' -> tex '+str(self._coord2tex(self.getP1Tuple()))+'. ')
translated = [];
if (self.symbol != None):
for line in self.symbol.latexTemplate:
translated.append(self._toLatexReplace(line, opt));
return translated;
class NetLabel(SchObject):
def __init__(self, label, x1, y1):
super().__init__('netlabel', x1, y1);
self.uuid = uuid.uuid4();
self.label = label;
self.x1 = x1;
self.y1 = y1;
self.attrlist = [];
self.symbol = None;
def _toLatexReplace(self, line, opt):
# pattern, repl, string)
line1 = line;
line = re.sub('#self.symbol.([A-Za-z0-9_\-!]+)#',self._symAttrStrToAttr, line);
line = re.sub('#self.symbol.conversionKV.([A-Za-z0-9_\-!]+)#',self._symcKVAttrStrToAttr, line);
line = re.sub('#self.labelstr#',self._latexEscape(self.label), line);
line = re.sub('#self.([A-Za-z0-9_\-!]+)#',self._attrStrToAttr, line);
line = re.sub('##options##',self.symbol.value2, line);
line = re.sub('##rotate##',str(round((-1)*self.rotation+(-1)*self.symbol.latexOriginRot+(-1)*self.symbol.symbolOriginRot)), line);
line = self._mirrorReplace(line, opt);
print('Converted tex line "'+line1.replace('\n','')+'"\n'+
' to "'+line.replace('\n','')+'"');
return line;
def translateToLatex(self, opt):
if (opt == None):
opt = {};
p1 = (0,0);
p1 = (p1[0] + self.symbol.symbolOriginX1, p1[1]+ self.symbol.symbolOriginY1);
p1 = self._coord2abs(p1); # rotate and mirror origin offset
lp= self._coord2tex(p1)
self.texx1 = lp[0];
self.texy1 = lp[1];
self.texrot = self.rotation + (-1)*self.symbol.symbolOriginRot;
self.texmirror = (self.mirror != self.symbol.symbolOriginMirror);
print('Placing label '+self.pathandctype+' named "'+self._latexEscape(self.label)+'" valued "'+self.value+'" at spice coord '+str(self.getP1Tuple())+', '+('M' if self.mirror else 'R')+str(self.rotation)+' -> tex '+str(self._coord2tex(self.getP1Tuple()))+'. ')
translated = [];
if (self.symbol != None):
for line in self.symbol.latexTemplate:
translated.append(self._toLatexReplace(line, opt));
return translated;
class SchTwopoint(SchObject):
def __init__(self, x1,y1, x2,y2, style):
super().__init__('SchTwopoint_'+str(style),x1,y1);
self.x1 = x1;
self.y1 = y1;
self.x2 = x2;
self.y2 = y2;
self.texx2 = 0; # texx1 inherited
self.texy2 = 0;
self.texlinestyle = 'green,thick,dashed'
self.kind = 'Normal';
self.uuid = uuid.uuid4();
self.style = style;
self.name = '';
def getP1Tuple(self):
return (self.x1, self.y1);
def getP2Tuple(self):
return (self.x2, self.y2);
def _toLatexReplace(self, line, opt):
# pattern, repl, string)
line1 = line;
line = re.sub('#self.symbol.([A-Za-z0-9_\-!]+)#',self._symAttrStrToAttr, line);
line = re.sub('#self.symbol.conversionKV.([A-Za-z0-9_\-!]+)#',self._symcKVAttrStrToAttr, line);
#line = re.sub('#self.labelstr#',self._latexEscape(self.label), line);
# ToDo: Linestyle
line = re.sub('#self.([A-Za-z0-9_\-!]+)#',self._attrStrToAttr, line);
line = re.sub('##options##',self.symbol.value2, line);
line = re.sub('##rotate##',str(round((-1)*self.rotation+(-1)*self.symbol.latexOriginRot+(-1)*self.symbol.symbolOriginRot)), line);
line = self._mirrorReplace(line, opt);
print('Converted tex line "'+line1.replace('\n','')+'"\n'+
' to "'+line.replace('\n','')+'"');
return line;
def translateToLatex(self, opt):
if (opt == None):
opt = {};
p1 = (self.x1, self.y1);
p2 = (self.x2, self.y2);
lp1= self._coord2tex(p1)
lp2= self._coord2tex(p2)
self.texx1 = lp1[0];
self.texy1 = lp1[1];
self.texx2 = lp2[0];
self.texy2 = lp2[1];
linestyledict = {
0 : 'lttotidrawcolor, solid',
1 : 'lttotidrawcolor, line width=0.4pt, dashed',
2 : 'lttotidrawcolor, line width=0.7pt, dotted',
3 : 'lttotidrawcolor, line width=0.4pt, dashdotted',
4 : 'lttotidrawcolor, line width=0.4pt, dashdotdotted', # \tikzstyle{dashdotdotted}=[dash pattern=on 3pt off 2pt on \the\pgflinewidth off 2pt on \the\pgflinewidth off 2pt]
5 : 'lttotidrawcolor, line width=2pt, solid',
}
if self.style in linestyledict:
self.texlinestyle = linestyledict[self.style];
self.texrot = self.rotation + (-1)*self.symbol.symbolOriginRot;
self.texmirror = (self.mirror != self.symbol.symbolOriginMirror);
print('Placing SchTwoPoit '+self.pathandctype+' at spice coord ['+str(self.getP1Tuple())+';'+str(self.getP2Tuple())+'], '+('M' if self.mirror else 'R')+str(self.rotation)+' -> tex ['+str(self._coord2tex(self.getP1Tuple()))+';'+str(self._coord2tex(self.getP2Tuple()))+']. ')
translated = [];
if (self.symbol != None):
for line in self.symbol.latexTemplate:
translated.append(self._toLatexReplace(line, opt));
return translated;
class SchLine(SchTwopoint):
uuid = None;
def __init__(self, x1, y1, x2, y2, style):
super().__init__(x1,y1, x2,y2, style);
self.pathandctype = 'SchLine'
class SchRect(SchTwopoint):
uuid = None;
def __init__(self, x1, y1, x2, y2, style):
super().__init__(x1,y1, x2,y2, style);
self.pathandctype = 'SchRect'
class SchCirc(SchTwopoint):
uuid = None;
def __init__(self, x1, y1, x2, y2, style):
super().__init__(x1,y1, x2,y2, style);
self.pathandctype = 'SchCirc'
class Wire:
'LTpice Wire definition'
name = "UnknownWire"
wlen = 0;
startjunction = False
endjunction = False;
x1 = float('NaN')
y1 = float('NaN')
x2 = float('NaN')
y2 = float('NaN')
def __init__(self, name, x1, y1, x2, y2):
self.name = name
self.x1 = x1;
self.y1 = y1;
self.x2 = x2;
self.y2 = y2;
self.uuid = uuid.uuid4();
self.startjunction = False;
self.endjunction = False;
def getCoordTuple(self):
ctuple = (self.x1, self.y1, self.x2, self.y2)
return ctuple;
def getP1Tuple(self):
ctuple = (self.x1, self.y1)
return ctuple;
def getP2Tuple(self):
ctuple = (self.x2, self.y2)
return ctuple;
def asString(self):
line = 'wire: '+self.name+ ' = '+str(self.getP1Tuple())+' -> '+str(self.getP2Tuple())+'.';
return(line);
def asString2(self):
return ("Wire : ", self.name, ", x1=", str(self.x1), ", y1=", str(self.y1), " ; x2=",str(self.x2),", y2=",str(self.y2),", startj:",str(self.startjunction),", endj:",str(self.endjunction),".")
def print(self):
print(self.asString);
class PolyWire(Wire):
# PolyWires only connect to other wires at p1=(x1,y1) and p2=(x2,y2), but have additional nodes xn, yn that determine their path between p1, p2
#xn = [];
#yn = [];
def __init__(self, name, x1, y1, xn, yn, x2, y2):
super().__init__(name, x1, y1, x2, y2);
self.xn = xn
self.yn = yn
#Wire.__init__(name, x1, y1, x2, y2);
@classmethod
def JoinWires(cls, Wire1, Wire2):
xn_ = [];
yn_ = [];
x1_ = 0; y1_ = 0;
x2_ = 0; y2_ = 0;
name_ = Wire1.name+"_"+Wire2.name
if (Wire1.getP1Tuple() == Wire2.getP1Tuple()):
# joined at P1 back to back Wire1 <-> Wire2
print(" Wire1 <-> Wire2")
x1_ = Wire1.x2
y1_ = Wire1.y2
if (type(Wire1) is PolyWire):
an = list(Wire1.xn); # copy so we can reverse it if necessary
an.reverse(); # turn the wire coordinate list 'around'
xn_.extend(an);
an = list(Wire1.yn); # copy so we can reverse it if necessary
an.reverse(); # turn the wire coordinate list 'around'
yn_.extend(an);
xn_.append(Wire1.x1)
yn_.append(Wire1.y1)
print(" (Wire1 complete..)")
if (type(Wire2) is PolyWire):
an = list(Wire2.xn); # copy so we can reverse it if necessary
#an.reverse(); # turn the wire coordinate list 'around'
xn_.extend(an);
an = list(Wire2.yn); # copy so we can reverse it if necessary
#an.reverse(); # turn the wire coordinate list 'around'
yn_.extend(an);
x2_ = Wire2.x2
y2_ = Wire2.y2
elif (Wire1.getP2Tuple() == Wire2.getP2Tuple()):
# joined at P2 face to face Wire1 >-< Wire2
print(" Wire1 >-< Wire2")
x1_ = Wire1.x1
y1_ = Wire1.y1
if (type(Wire1) is PolyWire):
an = list(Wire1.xn); # copy so we can reverse it if necessary
#an.reverse(); # turn the wire coordinate list 'around'
xn_.extend(an);
an = list(Wire1.yn); # copy so we can reverse it if necessary
#an.reverse(); # turn the wire coordinate list 'around'
yn_.extend(an);
xn_.append(Wire1.x2)
yn_.append(Wire1.y2)
if (type(Wire2) is PolyWire):
an = list(Wire2.xn); # copy so we can reverse it if necessary
an.reverse(); # turn the wire coordinate list 'around'
xn_.extend(an);
an = list(Wire2.yn); # copy so we can reverse it if necessary
an.reverse(); # turn the wire coordinate list 'around'
yn_.extend(an);
x2_ = Wire2.x1
y2_ = Wire2.y1
elif (Wire1.getP2Tuple() == Wire2.getP1Tuple()):
# flow from Wire1 -> Wire2
print(" Wire1 -> Wire2")
x1_ = Wire1.x1
y1_ = Wire1.y1
if (type(Wire1) is PolyWire):
an = list(Wire1.xn); # copy so we can reverse it if necessary
#an.reverse(); # turn the wire coordinate list 'around'
xn_.extend(an);
an = list(Wire1.yn); # copy so we can reverse it if necessary
#an.reverse(); # turn the wire coordinate list 'around'
yn_.extend(an);
xn_.append(Wire1.x2)
yn_.append(Wire1.y2)
if (type(Wire2) is PolyWire):
an = list(Wire2.xn); # copy so we can reverse it if necessary
#an.reverse(); # turn the wire coordinate list 'around'
xn_.extend(an);
an = list(Wire2.yn); # copy so we can reverse it if necessary
#an.reverse(); # turn the wire coordinate list 'around'
yn_.extend(an);
x2_ = Wire2.x2
y2_ = Wire2.y2
elif (Wire1.getP1Tuple() == Wire2.getP2Tuple()):
# flow from Wire2 -> Wire1
print(" Wire2 -> Wire1")
x1_ = Wire2.x1
y1_ = Wire2.y1
if (type(Wire2) is PolyWire):
an = list(Wire2.xn); # copy so we can reverse it if necessary
#an.reverse(); # turn the wire coordinate list 'around'
xn_.extend(an);
an = list(Wire2.yn); # copy so we can reverse it if necessary
#an.reverse(); # turn the wire coordinate list 'around'
yn_.extend(an);
xn_.append(Wire2.x2)
yn_.append(Wire2.y2)
if (type(Wire1) is PolyWire):
an = list(Wire1.xn); # copy so we can reverse it if necessary
#an.reverse(); # turn the wire coordinate list 'around'
xn_.extend(an);
an = list(Wire1.yn); # copy so we can reverse it if necessary
#an.reverse(); # turn the wire coordinate list 'around'
yn_.extend(an);
x2_ = Wire1.x2
y2_ = Wire1.y2
else:
# unknown
print("Error: Unknown wire configuration for PolyWire:JoinWires")
print(" Constructing new wire object..")
pw = cls(name_, x1_, y1_, xn_, yn_, x2_, y2_)
return pw;
def asString(self):
line = 'polywire: '+self.name+ ' = '+str(self.getP1Tuple())+' -> '
for i in range(0, len(self.xn)):
pni = (self.xn[i], self.yn[i]);
line = line + '['+ str(pni[0])+ ', '+str(pni[1])+']'+' -> ';
line = line +str(self.getP2Tuple())+'.';
return(line);
class SymbolDict:
def __init__(self):
self.symbols = {};
self.symbolsbypath = {};
def hasSymbolName(self, aSymbolName):
return aSymbolName in self.symbols;
def hasSymbolPath(self, aSymbolNameandPath):
return aSymbolNameandPath in self.symbolsbypath;
def addSymbol(self,aSymbol):
self.symbols[aSymbol.ctype] = aSymbol;
if (aSymbol.path != ""):
spath = aSymbol.path +'\\';
else:
spath = '';
self.symbolsbypath[spath+aSymbol.ctype] = aSymbol;
def getSymbolByPath(self, aPath):
if (aPath in self.symbolsbypath):
aSymbol = self.symbolsbypath[aPath];
else:
aSymbol = None;
return aSymbol;
def getSymbolByCType(self, aCType):
if (aCType in self.symbols):
aSymbol = self.symbols[aCType];
else:
aSymbol = None;
return aSymbol;
class SpatialDict:
def __init__(self):
self.objsbypos = dict();
self.objsbyname = dict();
self.objsbyid = dict();
self.objsbyuuid = dict();
self.objnameattrib = None;
self.objposattrib = None;
self.objidattrib = None;
self.objuuidattrib = None;
def _getField(self, aObj, aField):
if ((aObj == None) or (aField == None)):
return None; # todo: possibly throw an exception instead?
objattrib = getattr(aObj, aField, None);
if (objattrib == None):
return None; # todo: possibly throw an exception instead?
if (callable(objattrib)):
pp = objattrib(); # requested attribute is a function. Use its value
else:
pp = objattrib; # requested attribute is a property. Use it directly.
return pp;
def _getPosAttrib(self, aObj):
pp = None;
if (type(self.objposattrib) is list): # every object has multiple positions
pp = [];
for posattrib in self.objposattrib:
pp_ = self._getField(aObj, posattrib);
pp.append(pp_)
else:
pp = [self._getField(aObj, self.objposattrib)];
return pp;
def _getSubidx(self, aObj):
nn = self._getField(aObj, self.objnameattrib);
ii = self._getField(aObj, self.objidattrib);
uu = self._getField(aObj, self.objuuidattrib);
subidx = None;
if (uu != None):
subidx = uu;
if (ii != None):
subidx = ii;
if (nn != None):
subidx = nn; # hightest priority: last
return subidx
def addObj(self, aObj):
pp = self._getPosAttrib(aObj);
nn = self._getField(aObj, self.objnameattrib);
ii = self._getField(aObj, self.objidattrib);
uu = self._getField(aObj, self.objuuidattrib);
subidx = self._getSubidx(aObj)
if (ii != None):
self.objsbyid[ii] = aObj;
if (nn != None):
self.objsbyname[nn] = aObj;
if (uu != None):
self.objsbyuuid[uu] = aObj;
if ((pp == None) or (subidx == None)):
return False;
for pp_ in pp:
if pp_ in self.objsbypos:
pdict = self.objsbypos[pp_];
pdict[subidx] = aObj;
self.objsbypos[pp_] = pdict;
else:
pdict = {};
pdict[subidx] = aObj;
self.objsbypos[pp_] = pdict;
def getObjByName(self, aName):
if (aName in self.objsbyname):
return self.objsbyname[aName];
else:
return None;
def getObjById(self, anId):
if (anId in self.objsbyid):
return self.objsbyid[anId];
else:
return None;
def getObjsByPos(self, aPos):
if (aPos in self.objsbypos):
return self.objsbypos[aPos]
else:
return None;
def getAllObjs(self):
return list(self.objsbyid.items());
def removeObj(self, aObj):
pp = self._getPosAttrib(aObj);
nn = self._getField(aObj, self.objnameattrib);
ii = self._getField(aObj, self.objidattrib);
uu = self._getField(aObj, self.objuuidattrib);
subidx = self._getSubidx(aObj)
self.objsbyname.pop(nn,None);
self.objsbyid.pop(ii,None);
self.objsbyuuid.pop(uu,None);
for pp_ in pp:
pdict = self.objsbypos.pop(pp_,None);
if (pdict != None):
pdict.pop(subidx,None);
self.pinsbypos[pp_] = pdict; # add remaining objs at pos back
def updateObj(self, aObj):
self.removeObj(aObj);
self.addObj(aObj);
class PinDict:
def __init__(self):
self.pinsbypos = dict();
self.pinsbyname = dict();
self.pinsbyorder = dict();
def addPin(self, aPin):
pp = aPin.getP1Tuple();
self.pinsbyname[aPin.name] = aPin;
self.pinsbyorder[aPin.order] = aPin;
if pp in self.pinsbypos:
pdict = self.pinsbypos[pp];
pdict[aPin.name] = aPin;
self.pinsbypos[pp] = pdict;
else:
pdict = {};
pdict[aPin.name] = aPin;
self.pinsbypos[pp] = pdict;
def getPinByName(self, aPinName):
if (aPinName in self.pinsbyname):
return self.pinsbyname[aPinName];
else:
return None;
def getPinByOrder(self, aPinOrder):
if (aPinOrder in self.pinsbyorder):
return self.pinsbyorder[aPinOrder];
else:
return None;
def getPinByPos(self, aPinPos):
if (aPinPos in self.pinsbypos):
return self.pinsbypos[aPinPos]
else:
return None;
def getAllPins(self):
return list(self.pinsbyname.items());
def removePin(self, aPin):
self.pinsbyname.pop(aPin.name,None);
pp = aPin.getP1Tuple();
pdict = self.pinsbypos.pop(pp,None);
if (pdict != None):
pdict.pop(aPin.name,None);
self.pinsbypos[pp] = pdict; # add remaining pins at pos back
self.pinsbyorder.pop(aPin.order,None);
def updatePin(self, aPin):
pp = aPin.getP1Tuple();
self.pinsbyname[aPin.name] = aPin;
self.pinsbyorder[aPin.order] = aPin;
if pp in self.pinsbypos:
pdict = self.pinsbypos[pp];
pdict[aPin.name] = aPin;
self.pinsbypos[pp] = pdict;
else:
pdict = {};
pdict[aPin.name] = aPin;
self.pinsbypos[pp] = pdict;
class Symbol:
def __init__(self, stype):
self.symboltype = stype;
self.ctype = ''; # the component ctype for this symbol
self.prefix = '';
self.description = '';
self.value = ''
self.value2 = ''
self.attributes = dict();
self.x1 = 0;
self.y1 = 0;
self.path = '';
self.pathandctype = '';
self.symbolPins = PinDict();
self.conversionKV = {} # key=value dict for conversion options from asy2tex file
self.latexPins = PinDict();
self.latexOriginX1 = 0;
self.latexOriginY1 = 0;
self.latexOriginRot = 0;
self.latexOriginMirror = False;
self.symbolOriginX1 = 0;
self.symbolOriginY1 = 0;
self.symbolOriginRot = 0;
self.symbolOriginMirror = False;
self.latexElementName = '';
self.latexType = 'Node';
self.latexTemplate = []; # list of lines with latex code with #PinName:x1# coordinate and ##options## placeholder
self.lt2tscale = None;
def addPin(self, aPin):
self.symbolPins.addPin(aPin);
def rotatePos(self, aPos, rotDeg, aOrigin):
x1=aPos[0]-aOrigin[0];
y1=aPos[1]-aOrigin[1];
rotRad = rotDeg*math.pi/180.0;
x2 = x1*math.cos(rotRad) + y1*math.sin(rotRad);
y2 = x1*(-1.0)*math.sin(rotRad) + y1*math.cos(rotRad);
x2 = x2+aOrigin[0];
y2 = y2+aOrigin[1];
return (x2, y2);
def rotatePosOrigin(self, aPos, rotDeg):
return self.rotatePos(aPos,rotDeg, (0.0,0.0));
def rotatePosInt(self, aPos, rotDeg, aOrigin):
pp = self.rotatePos(aPos,rotDeg,aOrigin);
x2 = round(pp[0]);
y2 = round(pp[1]);
return (x2, y2);
def rotatePosIntOrigin(self, aPos, rotDeg):
return self.rotatePosInt(aPos,rotDeg, (0,0));
def ckvUnsetOrFalse(self, key):
if not (key in self.conversionKV):
return True;
res = (str.lower(self.conversionKV[key]) == 'false') or ( (self.conversionKV[key]) == '0' )
return res;
def ckvUnsetOrTrue(self, key):
if not (key in self.conversionKV):
return True;
res = (str.lower(self.conversionKV[key]) == 'true') or ( (self.conversionKV[key]) == '1' )
return res;
def ckvSetAndFalse(self, key):
if not (key in self.conversionKV):
return False;
res = (str.lower(self.conversionKV[key]) == 'false') or ( (self.conversionKV[key]) == '0' )
return res;
def ckvSetAndTrue(self, key):
if not (key in self.conversionKV):
return False;
res = (str.lower(self.conversionKV[key]) == 'true') or ( (self.conversionKV[key]) == '1' )
return res;
def asString(self, indent=''):
res = indent+'Symbol "'+self.ctype+'" at path "'+self.path+'" with prefix "'+self.prefix+'" ';
res = res+'at origin '+str((self.x1,self.y1))+'.\n'
for pname, pin in self.symbolPins.getAllPins():
res = res + pin.asString(indent+' ');
return res;
class SymPin:
def __init__(self):
self.name = '';
self.x1 = 0;
self.y1 = 0;
self.labelpos = 'NONE';
self.labeloffset = 8;
self.order = -1;
self.rot = 0;
self.length = 0;
def getP1Tuple(self):
ctuple = (self.x1, self.y1)
return ctuple;
def asString(self, indent=''):
res = indent+ 'Pin "'+self.name+'" ord '+str(self.order)+' at '+str(self.getP1Tuple())+' rot='+str(self.rot)+' len='+str(self.length)+'.\n';
return res;
from argparse import ArgumentParser
def main():
parser = ArgumentParser('Converts asc files into circuiTikz tex files. Takes the asc file as an argument.')
parser.add_argument("file")
args = parser.parse_args()
l2tobj = lt2circuiTikz();
l2tobj.readASCFile(args.file);
l2tobj.writeCircuiTikz(args.file+'.tex');
isstandalone = False;
try:
approot = os.path.dirname(os.path.abspath(__file__))
if __name__ == '__main__':
isstandalone = True;
except NameError: # We are the main py2exe script, not a module
import sys
approot = os.path.dirname(os.path.abspath(sys.argv[0]))
isstandalone = True;
if (isstandalone):
main();
| ckuhlmann/lt2circuitikz | lt2ti.py | lt2ti.py | py | 97,776 | python | en | code | 80 | github-code | 13 |
17453285125 | from pygame import *
from random import randint
# фонова музика
mixer.init()
mixer.music.load('the.trail.mp3')
mixer.music.play()
# шрифти і написи
font.init()
score_text = font.Font(None, 36)
score = 0
lost_text = font.Font(None, 36)
lost = 0
lose_text = font.Font(None, 36)
win_text = font.Font(None, 36)
win_width = 700
win_height = 500
display.set_caption("Shooter")
window = display.set_mode((win_width, win_height))
background = transform.scale(
image.load("mountains.jpg"),
(win_width, win_height)
)
class GameSprite(sprite.Sprite):
def __init__(self, player_image, player_x, player_y, size_x, size_y, player_speed):
super().__init__()
self.image = transform.scale(image.load(player_image), (size_x, size_y))
self.speed = player_speed
self.rect = self.image.get_rect()
self.rect.x = player_x
self.rect.y = player_y
def reset(self):
window.blit(self.image, (self.rect.x, self.rect.y))
class Player(GameSprite):
def update(self):
keys = key.get_pressed()
if keys[K_LEFT] and self.rect.x > 5:
self.rect.x -= self.speed
if keys[K_RIGHT] and self.rect.x < win_width - 80:
self.rect.x += self.speed
def fire(self):
dagger = Dagger('dagger.png', self.rect.centerx, self.rect.top, 60, 50, -15)
daggers.add(dagger)
# клас спрайта-ворога
class Enemy(GameSprite):
# рух ворога
def update(self):
self.rect.y += self.speed
global lost
if self.rect.y > win_height:
self.rect.x = randint(80, win_width - 80)
self.rect.y = 0
lost = lost + 1
# клас спрайта-кулі
class Dagger(GameSprite):
# рух ворога
def update(self):
self.rect.y += self.speed
# зникає, якщо дійде до краю екрана
if self.rect.y < 0:
self.kill()
wither = Player("wither.png", 5, win_height - 100, 80, 100, 10)
daggers = sprite.Group()
monsters = sprite.Group()
for i in range(1, 6):
monster = Enemy("griffin.png", randint(80, win_width - 80), -40, 80, 50, randint(1, 5))
monsters.add(monster)
run = True
game_over = False
is_win = False
while run:
window.blit(background, (0, 0))
text1 = score_text.render("Рахунок: " + str(score), 1, (0, 0, 0))
window.blit(text1, (10, 20))
text2 = lost_text.render("Пропущено: " + str(lost), 1, (0, 0, 0))
window.blit(text2, (10, 50))
for e in event.get():
if e.type == QUIT:
run = False
elif e.type == KEYDOWN:
if game_over == False:
if e.key == K_SPACE:
wither.fire()
if game_over == False:
# перевірка зіткнення кулі та монстрів (і монстр, і куля при зіткненні зникають)
collides = sprite.groupcollide(monsters, daggers, True, False)
for c in collides:
# цей цикл повториться стільки разів, скільки монстрів збито
score = score + 1
monster = Enemy('griffin.png', randint(80, win_width - 80), -40, 80, 50, randint(1, 5))
monsters.add(monster)
# можливий програш: пропустили занадто багато або герой зіткнувся з ворогом
if sprite.spritecollide(wither, monsters, True) or lost >= 5:
lost += 1
game_over = True # програли, ставимо тло і більше не керуємо спрайтами.
if score >= 100:
game_over = True
is_win = True
# рухи спрайтів
wither.update()
monsters.update()
daggers.update()
else:
if is_win == True:
text4 = win_text.render("Ви перемогли", 1, (0, 0, 0))
window.blit(text4, (200, 200))
else:
text3 = lose_text.render("Ви програли", 1, (0, 0, 0))
window.blit(text3, (200, 200))
# оновлюємо їх у новому місці при кожній ітерації циклу
wither.reset()
monsters.draw(window)
daggers.draw(window)
display.update()
time.delay(60) | KasopiDaNir/shooter.witcher | main.py | main.py | py | 4,437 | python | uk | code | 0 | github-code | 13 |
17049688284 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class CarModel(object):
def __init__(self):
self._brand_name = None
self._config_name = None
self._engine_desc = None
self._family_short_name = None
self._gear_box_type = None
self._purchase_price = None
self._seat = None
self._seat_max = None
self._seat_min = None
self._vehicle_class_code = None
self._vehicle_code = None
self._vehicle_name = None
self._year_pattern = None
@property
def brand_name(self):
return self._brand_name
@brand_name.setter
def brand_name(self, value):
self._brand_name = value
@property
def config_name(self):
return self._config_name
@config_name.setter
def config_name(self, value):
self._config_name = value
@property
def engine_desc(self):
return self._engine_desc
@engine_desc.setter
def engine_desc(self, value):
self._engine_desc = value
@property
def family_short_name(self):
return self._family_short_name
@family_short_name.setter
def family_short_name(self, value):
self._family_short_name = value
@property
def gear_box_type(self):
return self._gear_box_type
@gear_box_type.setter
def gear_box_type(self, value):
self._gear_box_type = value
@property
def purchase_price(self):
return self._purchase_price
@purchase_price.setter
def purchase_price(self, value):
self._purchase_price = value
@property
def seat(self):
return self._seat
@seat.setter
def seat(self, value):
self._seat = value
@property
def seat_max(self):
return self._seat_max
@seat_max.setter
def seat_max(self, value):
self._seat_max = value
@property
def seat_min(self):
return self._seat_min
@seat_min.setter
def seat_min(self, value):
self._seat_min = value
@property
def vehicle_class_code(self):
return self._vehicle_class_code
@vehicle_class_code.setter
def vehicle_class_code(self, value):
self._vehicle_class_code = value
@property
def vehicle_code(self):
return self._vehicle_code
@vehicle_code.setter
def vehicle_code(self, value):
self._vehicle_code = value
@property
def vehicle_name(self):
return self._vehicle_name
@vehicle_name.setter
def vehicle_name(self, value):
self._vehicle_name = value
@property
def year_pattern(self):
return self._year_pattern
@year_pattern.setter
def year_pattern(self, value):
self._year_pattern = value
def to_alipay_dict(self):
params = dict()
if self.brand_name:
if hasattr(self.brand_name, 'to_alipay_dict'):
params['brand_name'] = self.brand_name.to_alipay_dict()
else:
params['brand_name'] = self.brand_name
if self.config_name:
if hasattr(self.config_name, 'to_alipay_dict'):
params['config_name'] = self.config_name.to_alipay_dict()
else:
params['config_name'] = self.config_name
if self.engine_desc:
if hasattr(self.engine_desc, 'to_alipay_dict'):
params['engine_desc'] = self.engine_desc.to_alipay_dict()
else:
params['engine_desc'] = self.engine_desc
if self.family_short_name:
if hasattr(self.family_short_name, 'to_alipay_dict'):
params['family_short_name'] = self.family_short_name.to_alipay_dict()
else:
params['family_short_name'] = self.family_short_name
if self.gear_box_type:
if hasattr(self.gear_box_type, 'to_alipay_dict'):
params['gear_box_type'] = self.gear_box_type.to_alipay_dict()
else:
params['gear_box_type'] = self.gear_box_type
if self.purchase_price:
if hasattr(self.purchase_price, 'to_alipay_dict'):
params['purchase_price'] = self.purchase_price.to_alipay_dict()
else:
params['purchase_price'] = self.purchase_price
if self.seat:
if hasattr(self.seat, 'to_alipay_dict'):
params['seat'] = self.seat.to_alipay_dict()
else:
params['seat'] = self.seat
if self.seat_max:
if hasattr(self.seat_max, 'to_alipay_dict'):
params['seat_max'] = self.seat_max.to_alipay_dict()
else:
params['seat_max'] = self.seat_max
if self.seat_min:
if hasattr(self.seat_min, 'to_alipay_dict'):
params['seat_min'] = self.seat_min.to_alipay_dict()
else:
params['seat_min'] = self.seat_min
if self.vehicle_class_code:
if hasattr(self.vehicle_class_code, 'to_alipay_dict'):
params['vehicle_class_code'] = self.vehicle_class_code.to_alipay_dict()
else:
params['vehicle_class_code'] = self.vehicle_class_code
if self.vehicle_code:
if hasattr(self.vehicle_code, 'to_alipay_dict'):
params['vehicle_code'] = self.vehicle_code.to_alipay_dict()
else:
params['vehicle_code'] = self.vehicle_code
if self.vehicle_name:
if hasattr(self.vehicle_name, 'to_alipay_dict'):
params['vehicle_name'] = self.vehicle_name.to_alipay_dict()
else:
params['vehicle_name'] = self.vehicle_name
if self.year_pattern:
if hasattr(self.year_pattern, 'to_alipay_dict'):
params['year_pattern'] = self.year_pattern.to_alipay_dict()
else:
params['year_pattern'] = self.year_pattern
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = CarModel()
if 'brand_name' in d:
o.brand_name = d['brand_name']
if 'config_name' in d:
o.config_name = d['config_name']
if 'engine_desc' in d:
o.engine_desc = d['engine_desc']
if 'family_short_name' in d:
o.family_short_name = d['family_short_name']
if 'gear_box_type' in d:
o.gear_box_type = d['gear_box_type']
if 'purchase_price' in d:
o.purchase_price = d['purchase_price']
if 'seat' in d:
o.seat = d['seat']
if 'seat_max' in d:
o.seat_max = d['seat_max']
if 'seat_min' in d:
o.seat_min = d['seat_min']
if 'vehicle_class_code' in d:
o.vehicle_class_code = d['vehicle_class_code']
if 'vehicle_code' in d:
o.vehicle_code = d['vehicle_code']
if 'vehicle_name' in d:
o.vehicle_name = d['vehicle_name']
if 'year_pattern' in d:
o.year_pattern = d['year_pattern']
return o
| alipay/alipay-sdk-python-all | alipay/aop/api/domain/CarModel.py | CarModel.py | py | 7,161 | python | en | code | 241 | github-code | 13 |
22245928917 | from PyQt5 import QtWidgets, uic
from mainwindow import Ui_MainWindow
from login_screen import Ui_login_window
from errorbox import Ui_errorbox
from messagebox import Ui_messagebox
from contract_insert import Ui_contract_insert
from supplier_insert import Ui_supplier_insert
from product_insert import Ui_product_insert
from supplier_update import Ui_supplier_update
from contract_update import Ui_contract_update
from product_update import Ui_product_update
from contract_delete import Ui_contract_delete
from supplier_delete import Ui_supplier_delete
from product_delete import Ui_product_delete
from tables import Ui_ShowDB
from first_proced import Ui_first_proced
from first_proced_show import Ui_first_proced_show
from legal_insert import Ui_legal_insert
from private_insert import Ui_private_insert
from legal_delete import Ui_legal_delete
from private_delete import Ui_private_delete
import sys
from PyQt5.QtWidgets import QTableWidgetItem
import mysql.connector
from mysql.connector import Error
import datetime
def create_connection(host_name, user_name, user_password, db_name):
connection = None
try:
connection = mysql.connector.connect(
host=host_name,
user=user_name,
passwd=user_password,
database=db_name
)
print("Connection to MySQL DB successful")
except Error as e:
print(f"The error '{e}' occurred")
return connection
class message_box(QtWidgets.QDialog):
def __init__(self):
super(message_box, self).__init__()
self.ui = Ui_messagebox()
self.ui.setupUi(self)
class error_box(QtWidgets.QDialog):
def __init__(self):
super(error_box, self).__init__()
self.ui = Ui_errorbox()
self.ui.setupUi(self)
class mywindow(QtWidgets.QMainWindow):
connection = create_connection("localhost", "root", "", "delivery")
def __init__(self):
super(mywindow, self).__init__()
self.ui = Ui_MainWindow()
self.ui.setupUi(self)
self.ui.open_login_screen.triggered.connect(self.login_button_clicked)
self.ui.pushButton_contract_insert.clicked.connect(self.contract_insert_clicked)
self.ui.pushButton_supplier_insert.clicked.connect(self.supplier_insert_clicked)
self.ui.pushButton_product_insert.clicked.connect(self.product_insert_clicked)
self.ui.pushButton_supplier_update.clicked.connect(self.supplier_update_clicked)
self.ui.pushButton_contract_update.clicked.connect(self.contract_update_clicked)
self.ui.pushButton_product_update.clicked.connect(self.product_update_clicked)
self.ui.pushButton_contract_delete.clicked.connect(self.contract_delete_clicked)
self.ui.pushButton_supplier_delete.clicked.connect(self.supplier_delete_clicked)
self.ui.pushButton_product_delete.clicked.connect(self.product_delete_clicked)
self.ui.show_db.triggered.connect(self.show_db)
self.ui.open_procedure_1.triggered.connect(self.proced_1)
self.ui.pushButton_legal_insert.clicked.connect(self.legal_insert_clicked)
self.ui.pushButton_private_insert.clicked.connect(self.private_insert_clicked)
self.ui.pushButton_legal_delete.clicked.connect(self.legal_delete_clicked)
self.ui.pushButton_private_delete.clicked.connect(self.private_delete_clicked)
def legal_insert_clicked(self):
self.w2 = legal_insert_window()
self.w2.exec()
def private_insert_clicked(self):
self.w2 = private_insert_window()
self.w2.exec()
def legal_delete_clicked(self):
self.w2 = legal_delete_window()
self.w2.exec()
def private_delete_clicked(self):
self.w2 = private_delete_window()
self.w2.exec()
def proced_1(self):
self.w2 = show_first_procedure()
self.w2.exec()
def show_db(self):
self.w2 = show_window()
self.w2.exec()
def login_button_clicked(self):
self.w2 = login_window()
self.w2.exec()
def contract_delete_clicked(self):
self.w2 = contract_delete_window()
self.w2.exec()
def supplier_delete_clicked(self):
self.w2 = supplier_delete_window()
self.w2.exec()
def product_delete_clicked(self):
self.w2 = product_delete_window()
self.w2.exec()
def contract_insert_clicked(self):
self.w2 = contract_insert_window()
self.w2.exec()
def supplier_insert_clicked(self):
self.w2 = supplier_insert_window()
self.w2.exec()
def product_insert_clicked(self):
self.w2 = product_insert_window()
self.w2.exec()
def supplier_update_clicked(self):
self.w2 = supplier_update_window()
self.w2.exec()
def contract_update_clicked(self):
self.w2 = contract_update_window()
self.w2.exec()
def product_update_clicked(self):
self.w2 = product_update_window()
self.w2.exec()
class show_window(QtWidgets.QDialog):
def __init__(self):
super(show_window, self).__init__()
self.ui = Ui_ShowDB()
self.ui.setupUi(self)
cursor = application.connection.cursor()
select_supplier = None
cursor.execute("SELECT * FROM supplier")
select_supplier = cursor.fetchall()
print(select_supplier)
for i in range(0, len(select_supplier)):
self.ui.tableWidget.setRowCount(self.ui.tableWidget.rowCount() + 1)
for j in range(0, 3):
a = QTableWidgetItem(str(select_supplier[i][j]))
self.ui.tableWidget.setItem(i, j, a)
application.connection.commit()
cursor = application.connection.cursor()
select_contract = None
cursor.execute("SELECT * FROM contract")
select_contract = cursor.fetchall()
for i in range(0, len(select_contract)):
self.ui.tableWidget_2.setRowCount(self.ui.tableWidget_2.rowCount() + 1)
for j in range(0, 5):
a = QTableWidgetItem(str(select_contract[i][j]))
self.ui.tableWidget_2.setItem(i, j, a)
application.connection.commit()
cursor = application.connection.cursor()
select_product = None
cursor.execute("SELECT * FROM product")
select_product = cursor.fetchall()
for i in range(0, len(select_product)):
self.ui.tableWidget_3.setRowCount(self.ui.tableWidget_3.rowCount() + 1)
for j in range(0, 4):
a = QTableWidgetItem(str(select_product[i][j]))
self.ui.tableWidget_3.setItem(i, j, a)
application.connection.commit()
cursor = application.connection.cursor()
select_legal = None
cursor.execute("SELECT * FROM legal_supplier")
select_legal = cursor.fetchall()
for i in range(0, len(select_legal)):
self.ui.tableWidget_4.setRowCount(self.ui.tableWidget_4.rowCount() + 1)
for j in range(0, 3):
a = QTableWidgetItem(str(select_legal[i][j]))
self.ui.tableWidget_4.setItem(i, j, a)
application.connection.commit()
cursor = application.connection.cursor()
select_private = None
cursor.execute("SELECT * FROM private_supplier")
select_private = cursor.fetchall()
for i in range(0, len(select_private)):
self.ui.tableWidget_5.setRowCount(self.ui.tableWidget_5.rowCount() + 1)
for j in range(0, 5):
a = QTableWidgetItem(str(select_private[i][j]))
self.ui.tableWidget_5.setItem(i, j, a)
application.connection.commit()
class show_first_procedure(QtWidgets.QDialog):
def __init__(self):
super(show_first_procedure, self).__init__()
self.ui = Ui_first_proced()
self.ui.setupUi(self)
self.ui.pushButton_create_report.clicked.connect(self.btnCreate)
def btnCreate(self):
global b
b = self.ui.lineEdit.text()
self.w2 = show_first_procedure_2()
self.w2.exec()
class show_first_procedure_2(QtWidgets.QDialog):
def __init__(self):
super(show_first_procedure_2, self).__init__()
self.ui = Ui_first_proced_show()
self.ui.setupUi(self)
try:
cursor = application.connection.cursor()
global b
a = [int(b)]
report_result = None
cursor.callproc('GetListOfSuppliedProductsByNumber', a)
for result in cursor.stored_results():
report_result = result.fetchall()
print(report_result)
for i in range(0, len(report_result)):
self.ui.tableWidget.setRowCount(self.ui.tableWidget.rowCount() + 1)
for j in range(0, 3):
a = QTableWidgetItem(str(report_result[i][j]))
self.ui.tableWidget.setItem(i, j, a)
application.connection.commit()
except:
self.w2 = error_box()
self.w2.exec()
class supplier_insert_window(QtWidgets.QDialog):
def __init__(self):
super(supplier_insert_window, self).__init__()
self.ui = Ui_supplier_insert()
self.ui.setupUi(self)
self.ui.pushButton_supplier_insert.clicked.connect(self.btnInsert)
def btnInsert(self):
try:
id = self.ui.lineEdit_supplier_id.text()
name = self.ui.lineEdit_supplier_name.text()
address = self.ui.lineEdit_supplier_address.text()
with application.connection.cursor() as cursor:
kor1 = (int(id), name, address)
cursor.execute("INSERT INTO supplier VALUES (%s,%s,%s)", kor1)
application.connection.commit()
except:
self.w2 = error_box()
self.w2.exec()
else:
self.w2 = message_box()
self.w2.exec()
class contract_insert_window(QtWidgets.QDialog):
def __init__(self):
super(contract_insert_window, self).__init__()
self.ui = Ui_contract_insert()
self.ui.setupUi(self)
self.ui.pushButton_contract_insert.clicked.connect(self.btnInsert)
def btnInsert(self):
try:
id = self.ui.lineEdit_contract_id.text()
supplier = self.ui.lineEdit_contract_supplier.text()
title = self.ui.lineEdit_contract_title.text()
note = self.ui.lineEdit_contract_note.text()
with application.connection.cursor() as cursor:
kor1 = (int(id), datetime.datetime.now(), int(supplier), title, note)
cursor.execute("INSERT INTO contract VALUES (%s,%s,%s,%s,%s)", kor1)
application.connection.commit()
except:
self.w2 = error_box()
self.w2.exec()
else:
self.w2 = message_box()
self.w2.exec()
class product_insert_window(QtWidgets.QDialog):
def __init__(self):
super(product_insert_window, self).__init__()
self.ui = Ui_product_insert()
self.ui.setupUi(self)
self.ui.pushButton_product_insert.clicked.connect(self.btnInsert)
def btnInsert(self):
try:
contract = self.ui.lineEdit_product_contract.text()
name = self.ui.lineEdit_product_name.text()
amount = self.ui.lineEdit_product_amount.text()
price = self.ui.lineEdit_product_price.text()
with application.connection.cursor() as cursor:
kor1 = (int(contract), name, int(amount), float(price))
cursor.execute("INSERT INTO product VALUES (%s,%s,%s,%s)", kor1)
application.connection.commit()
except:
self.w2 = error_box()
self.w2.exec()
else:
self.w2 = message_box()
self.w2.exec()
class supplier_update_window(QtWidgets.QDialog):
def __init__(self):
super(supplier_update_window, self).__init__()
self.ui = Ui_supplier_update()
self.ui.setupUi(self)
self.ui.pushButton_supplier_update.clicked.connect(self.btnInsert)
def btnInsert(self):
try:
id = self.ui.lineEdit_supplier_id.text()
name = self.ui.lineEdit_supplier_name.text()
address = self.ui.lineEdit_supplier_address.text()
with application.connection.cursor() as cursor:
kor1 = (name, address, int(id))
cursor.execute("UPDATE supplier SET name = %s, address = %s WHERE id = %s", kor1)
application.connection.commit()
except:
self.w2 = error_box()
self.w2.exec()
else:
self.w2 = message_box()
self.w2.exec()
class contract_update_window(QtWidgets.QDialog):
def __init__(self):
super(contract_update_window, self).__init__()
self.ui = Ui_contract_update()
self.ui.setupUi(self)
self.ui.pushButton_contract_update.clicked.connect(self.btnInsert)
def btnInsert(self):
try:
id = self.ui.lineEdit_contract_id.text()
supplier = self.ui.lineEdit_contract_supplier.text()
title = self.ui.lineEdit_contract_title.text()
note = self.ui.lineEdit_contract_note.text()
with application.connection.cursor() as cursor:
kor1 = (title, note, int(id))
cursor.execute("UPDATE contract SET title = %s, note = %s WHERE number = %s", kor1)
application.connection.commit()
except:
self.w2 = error_box()
self.w2.exec()
else:
self.w2 = message_box()
self.w2.exec()
class product_update_window(QtWidgets.QDialog):
def __init__(self):
super(product_update_window, self).__init__()
self.ui = Ui_product_update()
self.ui.setupUi(self)
self.ui.pushButton_product_insert.clicked.connect(self.btnInsert)
def btnInsert(self):
try:
contract = self.ui.lineEdit_product_contract.text()
name = self.ui.lineEdit_product_name.text()
amount = self.ui.lineEdit_product_amount.text()
price = self.ui.lineEdit_product_price.text()
with application.connection.cursor() as cursor:
kor1 = (int(amount), float(price), name)
cursor.execute("UPDATE product SET amount = %s, price = %s WHERE product = %s", kor1)
application.connection.commit()
except:
self.w2 = error_box()
self.w2.exec()
else:
self.w2 = message_box()
self.w2.exec()
class supplier_delete_window(QtWidgets.QDialog):
def __init__(self):
super(supplier_delete_window, self).__init__()
self.ui = Ui_supplier_delete()
self.ui.setupUi(self)
self.ui.pushButton_supplier_delete.clicked.connect(self.btnInsert)
def btnInsert(self):
try:
id = self.ui.lineEdit_supplier.text()
print(id)
with application.connection.cursor() as cursor:
kor1 = (int(id),)
cursor.execute("DELETE FROM supplier WHERE id = %s", kor1)
application.connection.commit()
except:
self.w2 = error_box()
self.w2.exec()
else:
self.w2 = message_box()
self.w2.exec()
class contract_delete_window(QtWidgets.QDialog):
def __init__(self):
super(contract_delete_window, self).__init__()
self.ui = Ui_contract_delete()
self.ui.setupUi(self)
self.ui.pushButton_contract_delete.clicked.connect(self.btnInsert)
def btnInsert(self):
try:
number = self.ui.lineEdit_contract.text()
cursor = application.connection.cursor()
kor1 = (int(number),)
cursor.execute("DELETE FROM contract WHERE number = %s", kor1)
application.connection.commit()
except:
self.w2 = error_box()
self.w2.exec()
else:
self.w2 = message_box()
self.w2.exec()
class product_delete_window(QtWidgets.QDialog):
def __init__(self):
super(product_delete_window, self).__init__()
self.ui = Ui_product_delete()
self.ui.setupUi(self)
self.ui.pushButton_product_delete.clicked.connect(self.btnInsert)
def btnInsert(self):
try:
id = self.ui.lineEdit_product.text()
with application.connection.cursor() as cursor:
kor1 = (id,)
cursor.execute("DELETE FROM product WHERE product.product = %s", kor1)
application.connection.commit()
except:
self.w2 = error_box()
self.w2.exec()
else:
self.w2 = message_box()
self.w2.exec()
class login_window(QtWidgets.QDialog):
def __init__(self):
super(login_window, self).__init__()
self.ui = Ui_login_window()
self.ui.setupUi(self)
self.ui.pushButton_login_commit.clicked.connect(self.btnInsert)
def btnInsert(self):
try:
login = self.ui.login_line.text()
password = self.ui.password_line.text()
if (login == "root" and password == ""):
application.connection = create_connection("localhost", login, password, "delivery")
self.close()
elif(login == "supply_manag" and password == "12345"):
application.connection = create_connection("localhost", login, password, "delivery")
self.close()
elif(login == "accountant" and password == "supply"):
application.connection = create_connection("localhost", login, password, "delivery")
self.close()
elif(login == "market_manag" and password == "supply"):
application.connection = create_connection("localhost", login, password, "delivery")
self.close()
else:
self.w2 = error_box()
self.w2.exec()
except:
self.w2 = error_box()
self.w2.exec()
class legal_insert_window(QtWidgets.QDialog):
def __init__(self):
super(legal_insert_window, self).__init__()
self.ui = Ui_legal_insert()
self.ui.setupUi(self)
self.ui.pushButton_legal_insert.clicked.connect(self.btnInsert_insert)
self.ui.pushButton_legal_update.clicked.connect(self.btnInsert_update)
def btnInsert_insert(self):
try:
id = self.ui.lineEdit_legal_id.text()
tax = self.ui.lineEdit_legal_tax.text()
vat = self.ui.lineEdit_legal_vat.text()
if (tax.isdigit() and vat.isdigit()):
with application.connection.cursor() as cursor:
vat = "UA" + vat
kor1 = (int(id), tax, vat)
cursor.execute("INSERT INTO legal_supplier VALUES (%s,%s,%s)", kor1)
application.connection.commit()
else:
raise Exception("Some exception")
except:
self.w2 = error_box()
self.w2.exec()
else:
self.w2 = message_box()
self.w2.exec()
def btnInsert_update(self):
try:
id = self.ui.lineEdit_legal_id.text()
tax = self.ui.lineEdit_legal_tax.text()
vat = self.ui.lineEdit_legal_vat.text()
if (tax.isdigit() and vat.isdigit()):
with application.connection.cursor() as cursor:
vat = "UA"+vat
kor1 = (tax, vat, int(id))
cursor.execute("UPDATE legal_supplier SET tax_number = %s, vat_number = %s WHERE id = %s", kor1)
application.connection.commit()
else:
raise Exception("Some exception")
except:
self.w2 = error_box()
self.w2.exec()
else:
self.w2 = message_box()
self.w2.exec()
class private_insert_window(QtWidgets.QDialog):
def __init__(self):
super(private_insert_window, self).__init__()
self.ui = Ui_private_insert()
self.ui.setupUi(self)
self.ui.pushButton_private_insert.clicked.connect(self.btnInsert_insert)
self.ui.pushButton_private_update.clicked.connect(self.btnInsert_update)
def btnInsert_insert(self):
try:
id = self.ui.lineEdit_private_id.text()
last = self.ui.lineEdit_private_last.text()
first = self.ui.lineEdit_private_first.text()
second = self.ui.lineEdit_private_second.text()
number = self.ui.lineEdit_private_number.text()
with application.connection.cursor() as cursor:
kor1 = (int(id), last, first, second, number)
cursor.execute("INSERT INTO private_supplier VALUES (%s,%s,%s,%s,%s)", kor1)
application.connection.commit()
except:
self.w2 = error_box()
self.w2.exec()
else:
self.w2 = message_box()
self.w2.exec()
def btnInsert_update(self):
try:
id = self.ui.lineEdit_private_id.text()
last = self.ui.lineEdit_private_last.text()
first = self.ui.lineEdit_private_first.text()
second = self.ui.lineEdit_private_second.text()
number = self.ui.lineEdit_private_number.text()
with application.connection.cursor() as cursor:
kor1 = (last, first, second, number, int(id))
cursor.execute("UPDATE private_supplier SET last_name = %s, first_name = %s, second_name = %s, reg_number = %s WHERE id = %s", kor1)
application.connection.commit()
except:
self.w2 = error_box()
self.w2.exec()
else:
self.w2 = message_box()
self.w2.exec()
class legal_delete_window(QtWidgets.QDialog):
def __init__(self):
super(legal_delete_window, self).__init__()
self.ui = Ui_legal_delete()
self.ui.setupUi(self)
self.ui.pushButton_legal_delete.clicked.connect(self.btnInsert)
def btnInsert(self):
try:
id = self.ui.lineEdit_legal.text()
print(id)
with application.connection.cursor() as cursor:
kor1 = (int(id),)
cursor.execute("DELETE FROM legal_supplier WHERE id = %s", kor1)
application.connection.commit()
except:
self.w2 = error_box()
self.w2.exec()
else:
self.w2 = message_box()
self.w2.exec()
class private_delete_window(QtWidgets.QDialog):
def __init__(self):
super(private_delete_window, self).__init__()
self.ui = Ui_private_delete()
self.ui.setupUi(self)
self.ui.pushButton_private_delete.clicked.connect(self.btnInsert)
def btnInsert(self):
try:
id = self.ui.lineEdit_private.text()
print(id)
with application.connection.cursor() as cursor:
kor1 = (int(id),)
cursor.execute("DELETE FROM private_supplier WHERE id = %s", kor1)
application.connection.commit()
except:
self.w2 = error_box()
self.w2.exec()
else:
self.w2 = message_box()
self.w2.exec()
global b
app = QtWidgets.QApplication([])
application = mywindow()
application.show()
sys.exit(app.exec()) | svyatoslavkorshunov/lab | lab8_python/lab8.py | lab8.py | py | 23,644 | python | en | code | 0 | github-code | 13 |
37364390553 | from django.db import models
from tag.models import Tag
from hemontika_api import LANGUAGE_CHOICES
from hemontika_api.utils import COUNTRY_CHOICES, REGION_CHOICES, DISTRICT_CHOICES
from django.conf import settings
# create your models here
def unique_user_path(instance, filename):
return "videos/musics/_{}_{}".format(instance.musician.id, filename)
class Music(models.Model):
musician = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
title = models.CharField(max_length=50)
video = models.FileField(upload_to=unique_user_path)
date = models.DateTimeField(auto_now_add=True)
tags = models.ManyToManyField(Tag, blank=True)
rating = models.FloatField(default=0.0)
views = models.PositiveBigIntegerField(default=0)
language = models.CharField(max_length=50, choices=LANGUAGE_CHOICES)
country = models.CharField(max_length=50, choices=COUNTRY_CHOICES, null=True, blank=True)
region = models.CharField(max_length=50, choices=REGION_CHOICES, blank=True, null=True)
district = models.CharField(max_length=50, choices=DISTRICT_CHOICES, blank=True, null=True)
description = models.CharField(max_length=300)
def __str__(self):
return self.title
| Subhra264/hemontika | backend_server/src/music/models.py | models.py | py | 1,229 | python | en | code | 0 | github-code | 13 |
9440092349 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from PIL import Image,ImageFile
from glob import glob
import os
import cairosvg
from tqdm import tqdm
from utils import create_path, filetype
from sys import argv
def img_converter(path, old_name):
if filetype(old_name) in ['ASCII', 'HTML', 'MS', 'PC', 'RIFF', 'exported', 'SVG']:
#print(old_name, filetype(old_name))
with open('skipped_files.txt', 'a') as f:
f.write(old_name + '\n')
return
im = Image.open(old_name)
if not im.mode == 'RGB':
Image.open(old_name).convert('RGB').save(old_name, 'JPEG')
im = Image.open(old_name)
out = im.resize((256, 256))
return out
def tran(old_path, new_path):
i = len(old_path)
pic_names = {}
for animal in [name[i+1:] for name in glob(old_path + '/*')]:
pic_names[animal] = glob(old_path + '/' + animal + '/*')
for animal in tqdm(pic_names):
i = 1
create_path(new_path + animal)
for pic in pic_names[animal]:
#print(animal, pic)
out = img_converter(old_path, pic)
out.save(new_path + animal + '/' + str(i) + '.jpg', "JPEG")
i = i + 1
if __name__ == "__main__":
tran(argv[1], argv[2])
| piyushjaingoda/National-Flag-Recognition-using-Machine-Learning-Techniques | web_scraping/data_transformation.py | data_transformation.py | py | 1,281 | python | en | code | 3 | github-code | 13 |
32054691015 | import pygame
import math
import random
pygame.init()
sw = 800 # ширина экрана
sh = 800 # высота экрана
bg = pygame.image.load('starbg.png') # фон
playerRocket = pygame.image.load('spaceRocket.png') # корабль
star = pygame.image.load('star.png') # взрыв
asteroid50 = pygame.image.load('asteroid50.png') # астероиды
asteroid100 = pygame.image.load('asteroid100.png')
asteroid150 = pygame.image.load('asteroid150.png')
pygame.display.set_caption('Asteroids') # изменим заголовок окна
win = pygame.display.set_mode((sw, sh)) # формируем игровое окно
clock = pygame.time.Clock()
gameover = False
lives = 3
score = 0
rapidFire = False
rfStart = -1
isSoundOn = True
highScore = 0
class Player:
def __init__(self):
self.img = playerRocket
self.w = self.img.get_width() # размеры корабля
self.h = self.img.get_height()
self.x = sw//2 # координаты центра поля
self.y = sh//2
self.angle = 0 # начальный угол
self.rotatedSurf = pygame.transform.rotate(self.img, self.angle) # вращение корабля
self.rotatedRect = self.rotatedSurf.get_rect()
self.rotatedRect.center = (self.x, self.y) # располагаем корабль в центре поля
self.cosine = math.cos(math.radians(self.angle + 90)) # косинус точки
self.sine = math.sin(math.radians(self.angle + 90)) # синус точки
self.head = (self.x + self.cosine * self.w//2, self.y - self.sine * self.h//2) # координаты передней части коробля
def draw(self, win):
win.blit(self.rotatedSurf, self.rotatedRect)
def turnLeft(self):
self.angle += 5
self.rotatedSurf = pygame.transform.rotate(self.img, self.angle)
self.rotatedRect = self.rotatedSurf.get_rect()
self.rotatedRect.center = (self.x, self.y)
self.cosine = math.cos(math.radians(self.angle + 90))
self.sine = math.sin(math.radians(self.angle + 90))
self.head = (self.x + self.cosine * self.w//2, self.y - self.sine * self.h//2)
def turnRight(self):
self.angle -= 5
self.rotatedSurf = pygame.transform.rotate(self.img, self.angle)
self.rotatedRect = self.rotatedSurf.get_rect()
self.rotatedRect.center = (self.x, self.y)
self.cosine = math.cos(math.radians(self.angle + 90))
self.sine = math.sin(math.radians(self.angle + 90))
self.head = (self.x + self.cosine * self.w//2, self.y - self.sine * self.h//2)
def moveForward(self):
self.x += self.cosine * 6
self.y -= self.sine * 6
self.rotatedSurf = pygame.transform.rotate(self.img, self.angle)
self.rotatedRect = self.rotatedSurf.get_rect()
self.rotatedRect.center = (self.x, self.y)
self.cosine = math.cos(math.radians(self.angle + 90))
self.sine = math.sin(math.radians(self.angle + 90))
self.head = (self.x + self.cosine * self.w // 2, self.y - self.sine * self.h // 2)
def updateLocation(self):
if self.x > sw + 50:
self.x = 0
elif self.x < 0 - self.w:
self.x = sw
elif self.y < -50:
self.y = sh
elif self.y > sh + 50:
self.y = 0
class Bullet:
def __init__(self):
self.point = player.head
self.x, self.y = self.point
self.w = 4
self.h = 4
self.c = player.cosine
self.s = player.sine
self.xv = self.c * 10
self.yv = self.s * 10
def move(self):
self.x += self.xv
self.y -= self.yv
def draw(self, win):
pygame.draw.rect(win, (255, 255, 255), [self.x, self.y, self.w, self.h])
def checkOffScreen(self):
if self.x < -50 or self.x > sw or self.y > sh or self.y < -50:
return True
class Asteroid:
def __init__(self, rank):
self.rank = rank
if self.rank == 1:
self.image = asteroid50
elif self.rank == 2:
self.image = asteroid100
else:
self.image = asteroid150
self.w = 50 * rank
self.h = 50 * rank
self.ranPoint = random.choice([(random.randrange(0, sw-self.w), random.choice([-1*self.h - 5, sh + 5])), (random.choice([-1*self.w - 5, sw + 5]), random.randrange(0, sh - self.h))])
self.x, self.y = self.ranPoint
if self.x < sw//2:
self.xdir = 1
else:
self.xdir = -1
if self.y < sh//2:
self.ydir = 1
else:
self.ydir = -1
self.xv = self.xdir * random.randrange(1,3)
self.yv = self.ydir * random.randrange(1,3)
def draw(self, win):
win.blit(self.image, (self.x, self.y))
class Star:
def __init__(self):
self.img = star
self.w = self.img.get_width()
self.h = self.img.get_height()
self.ranPoint = random.choice([(random.randrange(0, sw - self.w), random.choice([-1 * self.h - 5, sh + 5])),
(random.choice([-1 * self.w - 5, sw + 5]), random.randrange(0, sh - self.h))])
self.x, self.y = self.ranPoint
if self.x < sw//2:
self.xdir = 1
else:
self.xdir = -1
if self.y < sh//2:
self.ydir = 1
else:
self.ydir = -1
self.xv = self.xdir * 2
self.yv = self.ydir * 2
def draw(self, win):
win.blit(self.img, (self.x, self.y))
def redrawGameWindow():
'''перерисовка игрового окна'''
win.blit(bg, (0,0))
font = pygame.font.SysFont('arial',30)
livesText = font.render('Lives: ' + str(lives), 1, (255, 255, 255))
playAgainText = font.render('Press Tab to Play Again', 1, (255,255,255))
scoreText = font.render('Score: ' + str(score), 1, (255,255,255))
highScoreText = font.render('High Score: ' + str(highScore), 1, (255, 255, 255))
player.draw(win)
for a in asteroids:
a.draw(win)
for b in playerBullets:
b.draw(win)
for s in stars:
s.draw(win)
for a in aliens:
a.draw(win)
for b in alienBullets:
b.draw(win)
if rapidFire:
pygame.draw.rect(win, (0, 0, 0), [sw//2 - 51, 19, 102, 22])
pygame.draw.rect(win, (255, 255, 255), [sw//2 - 50, 20, 100 - 100*(count - rfStart)/500, 20])
if gameover:
win.blit(playAgainText, (sw//2-playAgainText.get_width()//2, sh//2 - playAgainText.get_height()//2))
win.blit(scoreText, (sw- scoreText.get_width() - 25, 25))
win.blit(livesText, (25, 25))
win.blit(highScoreText, (sw - highScoreText.get_width() -25, 35 + scoreText.get_height()))
pygame.display.update()
player = Player()
playerBullets = []
asteroids = []
count = 0
stars = []
aliens = []
alienBullets = []
run = True
while run: # главный цикл обработки событий
clock.tick(60) # число кадров в секунду
count += 1
# если игра не завершена
if not gameover:
if count % 50 == 0:
ran = random.choice([1,1,1,2,2,3])
asteroids.append(Asteroid(ran))
if count % 1000 == 0:
stars.append(Star())
for i, a in enumerate(aliens):
a.x += a.xv
a.y += a.yv
if a.x > sw + 150 or a.x + a.w < -100 or a.y > sh + 150 or a.y + a.h < -100:
aliens.pop(i)
for b in playerBullets:
if (b.x >= a.x and b.x <= a.x + a.w) or b.x + b.w >= a.x and b.x + b.w <= a.x + a.w:
if (b.y >= a.y and b.y <= a.y + a.h) or b.y + b.h >= a.y and b.y + b.h <= a.y + a.h:
aliens.pop(i)
score += 50
break
for i, b in enumerate(alienBullets):
b.x += b.xv
b.y += b.yv
if (b.x >= player.x - player.w//2 and b.x <= player.x + player.w//2) or b.x + b.w >= player.x - player.w//2 and b.x + b.w <= player.x + player.w//2:
if (b.y >= player.y-player.h//2 and b.y <= player.y + player.h//2) or b.y + b.h >= player.y - player.h//2 and b.y + b.h <= player.y + player.h//2:
lives -= 1
alienBullets.pop(i)
break
player.updateLocation()
for b in playerBullets:
b.move()
if b.checkOffScreen():
playerBullets.pop(playerBullets.index(b))
for a in asteroids:
a.x += a.xv
a.y += a.yv
if (a.x >= player.x - player.w//2 and a.x <= player.x + player.w//2) or (a.x + a.w <= player.x + player.w//2 and a.x + a.w >= player.x - player.w//2):
if(a.y >= player.y - player.h//2 and a.y <= player.y + player.h//2) or (a.y +a.h >= player.y - player.h//2 and a.y + a.h <= player.y + player.h//2):
lives -= 1
asteroids.pop(asteroids.index(a))
break
# bullet collision
for b in playerBullets:
if (b.x >= a.x and b.x <= a.x + a.w) or b.x + b.w >= a.x and b.x + b.w <= a.x + a.w:
if (b.y >= a.y and b.y <= a.y + a.h) or b.y + b.h >= a.y and b.y + b.h <= a.y + a.h:
if a.rank == 3:
score += 10
na1 = Asteroid(2)
na2 = Asteroid(2)
na1.x = a.x
na2.x = a.x
na1.y = a.y
na2.y = a.y
asteroids.append(na1)
asteroids.append(na2)
elif a.rank == 2:
score += 20
na1 = Asteroid(1)
na2 = Asteroid(1)
na1.x = a.x
na2.x = a.x
na1.y = a.y
na2.y = a.y
asteroids.append(na1)
asteroids.append(na2)
else:
score += 30
asteroids.pop(asteroids.index(a))
playerBullets.pop(playerBullets.index(b))
break
for s in stars:
s.x += s.xv
s.y += s.yv
if s.x < -100 - s.w or s.x > sw + 100 or s.y > sh + 100 or s.y < -100 - s.h:
stars.pop(stars.index(s))
break
for b in playerBullets:
if (b.x >= s.x and b.x <= s.x + s.w) or b.x + b.w >= s.x and b.x + b.w <= s.x + s.w:
if (b.y >= s.y and b.y <= s.y + s.h) or b.y + b.h >= s.y and b.y + b.h <= s.y + s.h:
rapidFire = True
rfStart = count
stars.pop(stars.index(s))
playerBullets.pop(playerBullets.index(b))
break
if lives <= 0:
gameover = True
if rfStart != -1:
if count - rfStart > 500:
rapidFire = False
rfStart = -1
keys = pygame.key.get_pressed()
if keys[pygame.K_LEFT]:
player.turnLeft()
if keys[pygame.K_RIGHT]:
player.turnRight()
if keys[pygame.K_UP]:
player.moveForward()
if keys[pygame.K_SPACE]:
if rapidFire:
playerBullets.append(Bullet())
for event in pygame.event.get(): # перебираем очередь событий
if event.type == pygame.QUIT: # если пользователь вышел из игры
run = False
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_SPACE:
if not gameover:
if not rapidFire:
playerBullets.append(Bullet())
if event.key == pygame.K_TAB:
if gameover:
gameover = False
lives = 3
asteroids.clear()
aliens.clear()
alienBullets.clear()
stars.clear()
if score > highScore:
highScore = score
score = 0
redrawGameWindow()
pygame.quit() | Anastas20/Animation | Астероиды.py | Астероиды.py | py | 12,925 | python | en | code | 0 | github-code | 13 |
28349834903 | class Solution:
def numSubarrayBoundedMax(self, nums: List[int], left: int, right: int) -> int:
st=[0]
n=len(nums)
lefti=[-1]*n
if nums[0]<left or nums[0]>right:
lefti[0]=0
for i in range(1,n):
if nums[i]>right or nums[i]<left:
st.append(i)
lefti[i]=i
continue
while st and nums[st[-1]]<=nums[i] :
st.pop(-1)
if st:
lefti[i]=st[-1]
st.append(i)
st=[n-1]
righti=[n]*n
if nums[-1]<left or nums[-1]>right:
lefti[-1]=n-1
for i in range(n-2,-1,-1):
if nums[i]>right or nums[i]<left:
st.append(i)
righti[i]=i
continue
while st and nums[st[-1]]<nums[i] :
st.pop(-1)
if st:
righti[i]=st[-1]
st.append(i)
ans=0
# print(lefti)
# print(righti)
for i in range(n):
ans+=(i-lefti[i])*(righti[i]-i)
return ans | saurabhjain17/leetcode-coding-questions | 0795-number-of-subarrays-with-bounded-maximum/0795-number-of-subarrays-with-bounded-maximum.py | 0795-number-of-subarrays-with-bounded-maximum.py | py | 1,120 | python | en | code | 1 | github-code | 13 |
26522875674 | from os import environ as env
from dotenv import find_dotenv, load_dotenv
owners_key = "owners"
trucks_key = "trucks"
loads_key = "loads"
ALGORITHMS = ["RS256"]
ENV_FILE = find_dotenv()
if ENV_FILE:
load_dotenv(ENV_FILE)
CLIENT_ID = env.get("AUTH0_CLIENT_ID")
CLIENT_SECRET = env.get("AUTH0_CLIENT_SECRET")
DOMAIN = env.get("AUTH0_DOMAIN")
SECRET_KEY = env.get("APP_SECRET_KEY") | chenste-osu/truckerapi | constants.py | constants.py | py | 387 | python | en | code | 1 | github-code | 13 |
43083744812 | #
# @lc app=leetcode.cn id=1905 lang=python3
#
# [1905] 统计子岛屿
#
# @lc code=start
class Solution:
def countSubIslands(self, grid1: List[List[int]], grid2: List[List[int]]) -> int:
# 得到矩阵的行和列
m, n = len(grid1), len(grid1[0])
directions = [(-1, 0), (1, 0), (0, -1), (0, 1)]
# dfs,搜索一片岛屿
def dfs(x, y):
if not (0 <= x < m and 0 <= y < n and grid2[x][y] == 1):
return
# 将搜索过的岛屿置0,防止重复搜索
grid2[x][y] = 0
for direction in directions:
dfs(x + direction[0], y + direction[1])
# 首先将 2 中岛屿不被 1 中包含的剔除掉
for i in range(m):
for j in range(n):
if grid2[i][j] == 1 and grid1[i][j] == 0:
dfs(i, j)
# 计算子岛屿数量
res = 0
for i in range(m):
for j in range(n):
if grid2[i][j] == 1:
res += 1
dfs(i, j)
return res
# @lc code=end
| Guo-xuejian/leetcode-practice | 1905.统计子岛屿.py | 1905.统计子岛屿.py | py | 1,110 | python | en | code | 1 | github-code | 13 |
19057407356 | #! /usr/bin/env python3
# https://www.searchenginejournal.com/seo-tasks-automate-with-python/351050/
# https://github.com/sethblack/python-seo-analyzer/
from seoanalyzer import analyze
# output = analyze(site, sitemap)
siteA = "https://www.google.com/"
analysisA = analyze(siteA)
print(analysisA)
| jakewilliami/scripts | python/seo.py | seo.py | py | 301 | python | en | code | 3 | github-code | 13 |
17038667354 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.GPSLocationInfo import GPSLocationInfo
class AlipayCommerceTransportTaxiDrivermachineBindModel(object):
def __init__(self):
self._car_no = None
self._city_code = None
self._driver_open_id = None
self._driver_user_id = None
self._gmt_signin = None
self._location_info = None
self._machine_sn = None
self._machine_supplier_id = None
self._request_id = None
@property
def car_no(self):
return self._car_no
@car_no.setter
def car_no(self, value):
self._car_no = value
@property
def city_code(self):
return self._city_code
@city_code.setter
def city_code(self, value):
self._city_code = value
@property
def driver_open_id(self):
return self._driver_open_id
@driver_open_id.setter
def driver_open_id(self, value):
self._driver_open_id = value
@property
def driver_user_id(self):
return self._driver_user_id
@driver_user_id.setter
def driver_user_id(self, value):
self._driver_user_id = value
@property
def gmt_signin(self):
return self._gmt_signin
@gmt_signin.setter
def gmt_signin(self, value):
self._gmt_signin = value
@property
def location_info(self):
return self._location_info
@location_info.setter
def location_info(self, value):
if isinstance(value, GPSLocationInfo):
self._location_info = value
else:
self._location_info = GPSLocationInfo.from_alipay_dict(value)
@property
def machine_sn(self):
return self._machine_sn
@machine_sn.setter
def machine_sn(self, value):
self._machine_sn = value
@property
def machine_supplier_id(self):
return self._machine_supplier_id
@machine_supplier_id.setter
def machine_supplier_id(self, value):
self._machine_supplier_id = value
@property
def request_id(self):
return self._request_id
@request_id.setter
def request_id(self, value):
self._request_id = value
def to_alipay_dict(self):
params = dict()
if self.car_no:
if hasattr(self.car_no, 'to_alipay_dict'):
params['car_no'] = self.car_no.to_alipay_dict()
else:
params['car_no'] = self.car_no
if self.city_code:
if hasattr(self.city_code, 'to_alipay_dict'):
params['city_code'] = self.city_code.to_alipay_dict()
else:
params['city_code'] = self.city_code
if self.driver_open_id:
if hasattr(self.driver_open_id, 'to_alipay_dict'):
params['driver_open_id'] = self.driver_open_id.to_alipay_dict()
else:
params['driver_open_id'] = self.driver_open_id
if self.driver_user_id:
if hasattr(self.driver_user_id, 'to_alipay_dict'):
params['driver_user_id'] = self.driver_user_id.to_alipay_dict()
else:
params['driver_user_id'] = self.driver_user_id
if self.gmt_signin:
if hasattr(self.gmt_signin, 'to_alipay_dict'):
params['gmt_signin'] = self.gmt_signin.to_alipay_dict()
else:
params['gmt_signin'] = self.gmt_signin
if self.location_info:
if hasattr(self.location_info, 'to_alipay_dict'):
params['location_info'] = self.location_info.to_alipay_dict()
else:
params['location_info'] = self.location_info
if self.machine_sn:
if hasattr(self.machine_sn, 'to_alipay_dict'):
params['machine_sn'] = self.machine_sn.to_alipay_dict()
else:
params['machine_sn'] = self.machine_sn
if self.machine_supplier_id:
if hasattr(self.machine_supplier_id, 'to_alipay_dict'):
params['machine_supplier_id'] = self.machine_supplier_id.to_alipay_dict()
else:
params['machine_supplier_id'] = self.machine_supplier_id
if self.request_id:
if hasattr(self.request_id, 'to_alipay_dict'):
params['request_id'] = self.request_id.to_alipay_dict()
else:
params['request_id'] = self.request_id
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AlipayCommerceTransportTaxiDrivermachineBindModel()
if 'car_no' in d:
o.car_no = d['car_no']
if 'city_code' in d:
o.city_code = d['city_code']
if 'driver_open_id' in d:
o.driver_open_id = d['driver_open_id']
if 'driver_user_id' in d:
o.driver_user_id = d['driver_user_id']
if 'gmt_signin' in d:
o.gmt_signin = d['gmt_signin']
if 'location_info' in d:
o.location_info = d['location_info']
if 'machine_sn' in d:
o.machine_sn = d['machine_sn']
if 'machine_supplier_id' in d:
o.machine_supplier_id = d['machine_supplier_id']
if 'request_id' in d:
o.request_id = d['request_id']
return o
| alipay/alipay-sdk-python-all | alipay/aop/api/domain/AlipayCommerceTransportTaxiDrivermachineBindModel.py | AlipayCommerceTransportTaxiDrivermachineBindModel.py | py | 5,377 | python | en | code | 241 | github-code | 13 |
73644147859 | from collections import deque
import sys
li = []
li = deque(li)
for _ in range(int(sys.stdin.readline())):
n = list(sys.stdin.readline().split())
x = n[0]
if x == 'push':
li.append(n[1])
elif x == 'pop':
if li:
s = li.popleft()
print(s)
else:
print(-1)
elif x == 'size':
print(len(li))
elif x == 'empty':
if li:
print(0)
else:
print(1)
elif x == 'front':
if li:
print(li[0])
else:
print(-1)
elif x == 'back':
if li:
print(li[-1])
else:
print(-1)
| tenedict/Algorithm | Algorithm/baekjun/S4/큐.py | 큐.py | py | 668 | python | en | code | 1 | github-code | 13 |
25176253517 | import pandas as pd
from matplotlib import pyplot as plt
import seaborn as sns
# Carreguem el dataset sencer
rahan_csv = pd.read_csv('dataset/Rahan.csv', delimiter=';')
rahan_csv = rahan_csv.iloc[:,:-2]
abrasan_csv = pd.read_csv('dataset/TabrizPollution/Abrasan.csv', delimiter=';')
bashumal_csv = pd.read_csv('dataset/TabrizPollution/Bashumal.csv', delimiter=';')
rastakucha_csv = pd.read_csv('dataset/TabrizPollution/RastaKucha.csv', delimiter=';')
data = pd.concat([abrasan_csv, bashumal_csv, rastakucha_csv], ignore_index=True)
# Veiem una mica les dades
print(data.head())
data.info()
data.describe()
# Netejem tots els valors nuls
df = data[(data['air_temperature'] != -9999.0) & (data['dewpoint'] != -9999.0) & (data['wind_direction_corr'] != -9999.0) &
(data['wind_speed'] != -9999.0) & (data['relative_pressure'] != -9999.0) & (data['PM10'] != -9999.0) &
(data['PM2.5'] != -9999.0)]
df.describe()
# Les mostrem en gràfics pairplot, per veure com es relacionen
sns.pairplot(df)
plt.figure()
# Mostrem la matriu de correlació amb el mateix objectiu
correlation = df.corr()
plt.figure()
ax = sns.heatmap(correlation, annot=True, linewidths=.5)
# Normalitzem les dades i fem el mateix
normalized_df=(df-df.min())/(df.max()-df.min())
correlation = normalized_df.corr()
plt.figure()
ax = sns.heatmap(correlation, annot=True, linewidths=.5)
sns.pairplot(normalized_df)
plt.figure()
# Com que les relacions entre variables dos a dos no eren molt clares, veiem relacions amb varaibles tres a tres.
sns.pairplot(data=df.sample(1000), hue='PM2.5')
plt.figure()
sns.pairplot(data=df.sample(15000), hue='PM10')
plt.figure()
| 1462731/APC-Practica1-Regressio | data_description.py | data_description.py | py | 1,663 | python | ca | code | 0 | github-code | 13 |
5990853298 |
# coding: utf-8
# In[ ]:
# Listing sheets of Excel spreadsheets with pandas
# Import pandas
import pandas as pd
# Assign spreadsheet filename: file
file = 'd.xlsx'
# Load spreadsheet: xl
xl = pd.ExcelFile(file)
# Print sheet names
print(xl.sheet_names)
# In[ ]:
# Importing sheets of Excel spreadsheets with pandas
# Import pandas
import pandas as pd
# Assign spreadsheet filename: file
file = 'd.xlsx'
# Load spreadsheet: xl
xl = pd.ExcelFile(file)
# Load a sheet into a DataFrame by name: df1
df1 = xl.parse('2.Q 2015')
# Print the head of the DataFrame df1
print(df1.head())
# Load a sheet into a DataFrame by index: df2
df2 = xl.parse(0)
# Print the head of the DataFrame df2
print(df2.head())
# In[ ]:
# Customizing your Excel spreadsheet import with pandas
# Import pandas
import pandas as pd
pd.set_option("display.max_rows",999)
# Assign spreadsheet filename: file
file = 'd.xlsx'
# Load spreadsheet: xl
xl = pd.ExcelFile(file)
# Parse the first sheet and rename the columns: df1
df1 = xl.parse(0, skiprows=[0], names=['Quarterly', 'Anual'])
# Print the head of the DataFrame df1
print(df1.head())
# Parse the first column of the first sheet and rename the column: df2
df2 = xl.parse(0, parse_cols=[0], skiprows=[0], names=['Kvartal'])
# Print the head of the DataFrame df2
print(df2.head())
| IgorKnez/Python | Importing Excel files with Pandas.py | Importing Excel files with Pandas.py | py | 1,328 | python | en | code | 0 | github-code | 13 |
73643783057 | # Ejercicio 525: Calcular la suma de 2 números. Si la suma está entre 15 y 30, retornar 20.
def calcular_suma(a, b):
suma = a + b
if suma in range(15, 31):
return 20
return suma
operando_1 = 13
operando_2 = 30
print(calcular_suma(operando_1, operando_2))
operando_1 = 13
operando_2 = 15
print(calcular_suma(operando_1, operando_2))
| Fhernd/PythonEjercicios | Parte001/ex525_suma_numeros_enteros.py | ex525_suma_numeros_enteros.py | py | 364 | python | es | code | 126 | github-code | 13 |
8115154472 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
change units per EM
"""
import os, sys, re
import argparse
from fontTools.ttLib import TTFont
from fontTools.misc.transform import Transform
from fontTools.pens.transformPen import TransformPen
from fontTools.pens.t2CharStringPen import T2CharStringPen
# https://www.microsoft.com/typography/otspec/gpos.htm
# LookupType Enumeration table for glyph positioning
class GposLookupType(object):
SINGLE = 1
PAIR = 2
CURSIVE_ATT = 3
MARK2BASE_ATT = 4
MARK2LIGA_ATT = 5
MARK2MARK_ATT = 6
CONTEXT_POSITIONING = 7
CHAINED_CONTEXT_POSITIONING = 8
EXTENSION_POSITIONING = 9
class GlyphsScaler(object):
def __init__(self, in_font, out_font, upm=2048, dry_run=False):
self.in_font = in_font
self.out_font = out_font
self.font = TTFont(self.in_font)
self.unitsPerEm = upm
self._scale = round(1. * upm / self.font["head"].unitsPerEm, 3)
self.isCID = True
self.dry_run = dry_run
self.updated_record_values = set()
def run(self):
self.update_BASE()
if not self.dry_run:
self.update_CFF()
self.update_GPOS()
self.update_OS_2()
self.update_VORG()
self.update_head()
self.update_hhea()
self.update_hmtx()
self.update_post()
self.update_vhea()
self.update_vmtx()
if not self.dry_run:
self.font.save(self.out_font)
def update_BASE(self):
if "BASE" not in self.font:
return
base = self.font["BASE"]
if hasattr(base.table.HorizAxis, "BaseScriptList"):
for record in base.table.HorizAxis.BaseScriptList.BaseScriptRecord:
for coord in record.BaseScript.BaseValues.BaseCoord:
coord.Coordinate = self.scale(coord.Coordinate)
if hasattr(base.table.VertAxis, "BaseScriptList"):
for record in base.table.VertAxis.BaseScriptList.BaseScriptRecord:
for coord in record.BaseScript.BaseValues.BaseCoord:
coord.Coordinate = self.scale(coord.Coordinate)
def update_CFF(self):
cff = self.font["CFF "].cff
topDict = cff.topDictIndex[0]
self.isCID = hasattr(topDict, "FDArray")
attrs = ["UnderlinePosition", "UnderlineThickness", "StrokeWidth"]
self.scale_object(topDict, attrs)
topDict.FontBBox = map(lambda v: self._scale_value(v), topDict.FontBBox)
if self.unitsPerEm != 1000:
topDict.rawDict["FontMatrix"] = [1./self.unitsPerEm, 0, 0, 1./self.unitsPerEm, 0, 0]
gs = self.font.getGlyphSet()
order = self.font.getGlyphOrder()
self.update_glyps_widths(gs, topDict)
self.update_default_and_nominal_width(topDict)
# https://github.com/fonttools/fonttools/blob/master/Lib/fontTools/subset/__init__.py
def update_glyps_widths(self, gs, topDict):
globalSubrs = topDict.GlobalSubrs
charStrings = topDict.CharStrings
for name in gs.keys():
g = gs[name]
if self.isCID:
c, fdSelectIndex = topDict.CharStrings.getItemAndSelector(name)
private = topDict.FDArray[fdSelectIndex].Private
else:
private = topDict.Private
dfltWdX = private.defaultWidthX
nmnlWdX = private.nominalWidthX
width = g.width - nmnlWdX
transformation = (self._scale, 0, 0, self._scale, 0, 0)
t2Pen = T2CharStringPen(width, gs)
transPen = TransformPen(t2Pen, transformation)
g.draw(transPen)
charString = t2Pen.getCharString(private, globalSubrs)
glyphID = charStrings.charStrings[name]
self.update_glyph_width(charString, nmnlWdX)
charStrings.charStringsIndex.items[glyphID] = charString
def update_glyph_width(self, charString, nmnlWdX):
int_args = []
for b in charString.program:
if isinstance(b, int):
int_args.append(b)
elif isinstance(b, str):
if b == "rmoveto":
if len(int_args) != 2:
break
elif b == "hmoveto" or b == "vmoveto":
if len(int_args) != 1:
break
elif b == "endchar":
if len(int_args) != 0:
break
else:
return
charString.program[0] = self._scale_value(charString.program[0])
def update_default_and_nominal_width(self, topDict):
def update_private(private):
attrs = ["defaultWidthX", "nominalWidthX"]
self.scale_object(private, attrs)
if self.isCID:
for fd in topDict.FDArray:
update_private(fd.Private)
else:
update_private(topDict.Private)
def update_GPOS(self):
if "GPOS" not in self.font:
return
gpos = self.font["GPOS"]
for lookup in gpos.table.LookupList.Lookup:
self.update_lookup(lookup)
def update_lookup(self, lookup):
for subtable in lookup.SubTable:
if subtable.LookupType == GposLookupType.SINGLE:
self.update_lookup_single(subtable)
elif subtable.LookupType == GposLookupType.PAIR:
self.update_lookup_pair(subtable)
elif subtable.LookupType == GposLookupType.EXTENSION_POSITIONING:
extSubTable = subtable.ExtSubTable
if extSubTable.LookupType == GposLookupType.SINGLE:
self.update_lookup_single(extSubTable)
elif extSubTable.LookupType == GposLookupType.PAIR:
self.update_lookup_pair(extSubTable)
else:
pass
def update_lookup_single(self, subtable):
coverage = subtable.Coverage
# SinglePosFormat1 subtable: Single positioning value
if subtable.Format == 1:
for gname in coverage.glyphs:
# some fonts have odd data
if subtable.Value is None:
if 0:
print("[WARN] {} has an invalid metrics".format(gname))
self.update_record_value(subtable.Value)
# SinglePosFormat2 subtable: Array of positioning values
elif subtable.Format == 2:
for gname, val in zip(coverage.glyphs, subtable.Value):
self.update_record_value(val)
else:
raise NotImplementedError()
def update_lookup_pair(self, subtable):
coverage = subtable.Coverage
# PairPosFormat1 subtable: Adjustments for glyph pairs
if subtable.Format == 1:
for FirstGlyph, pair in zip(coverage.glyphs, subtable.PairSet):
for record in pair.PairValueRecord:
SecondGlyph = record.SecondGlyph
Value1 = record.Value1
Value2 = record.Value2
self.update_record_value(Value1)
# PairPosFormat2 subtable: Class pair adjustment
elif subtable.Format == 2:
ordered_classes1 = self._order_classes(subtable.ClassDef1.classDefs, coverage)
ordered_classes2 = self._order_classes(subtable.ClassDef2.classDefs)
for classValue1, gnames1 in ordered_classes1:
class1Record = subtable.Class1Record[classValue1]
class2Record = class1Record.Class2Record
for classValue2, gnames2 in ordered_classes2:
record = class2Record[classValue2]
self.update_record_value(record.Value1)
else:
raise NotImplementedError()
def update_record_value(self, record):
# If same record is referred from several lookups, then updating must be done once.
if record in self.updated_record_values:
return
attrs = ["XPlacement", "YPlacement", "XAdvance", "YAdvance"]
self.scale_object(record, attrs)
self.updated_record_values.add(record)
def _order_classes(self, classDefs, coverage=None):
d = {}
for gname, classValue in classDefs.items():
if not classValue in d:
d[classValue] = []
d[classValue].append(gname)
for classValue, gnames in d.items():
d[classValue] = sorted(gnames)
# XXX: precise definition of Class 0?
# gnames = coverage - all glyphs belonging to any other classes?
if coverage is not None and 0 not in d:
glyphs = sorted(coverage.glyphs)
for classValue, gnames in d.items():
for gname in gnames:
if gname in glyphs:
glyphs.remove(gname)
d[0] = glyphs
# for python 2, 'lambda (classValue,gnames): gnames[0]' is also valid
return sorted(d.items(), key=lambda classValue_gnames: classValue_gnames[1][0])
def update_OS_2(self):
attrs = ["xAvgCharWidth", "ySubscriptXSize", "ySubscriptYSize", "ySubscriptXOffset", "ySubscriptYOffset", "ySuperscriptXSize", "ySuperscriptYSize", "ySuperscriptXOffset", "ySuperscriptYOffset", "yStrikeoutSize", "yStrikeoutPosition", "sTypoAscender", "sTypoDescender", "sTypoLineGap", "usWinAscent", "usWinDescent", "sxHeight", "sCapHeight"]
self.scale_table("OS/2", attrs)
def update_VORG(self):
if "VORG" not in self.font:
return
vorg = self.font["VORG"]
for name in vorg.VOriginRecords.keys():
vorg.VOriginRecords[name] = self._scale_value(vorg.VOriginRecords[name])
vorg.defaultVertOriginY = self._scale_value(vorg.defaultVertOriginY)
def update_head(self):
attrs = ["unitsPerEm", "xMin", "yMin", "xMax", "yMax"]
self.scale_table("head", attrs)
def update_hhea(self):
attrs = ["ascent", "descent", "lineGap", "advanceWidthMax", "minLeftSideBearing", "minRightSideBearing", "xMaxExtent"]
self.scale_table("hhea", attrs)
def update_hmtx(self):
hmtx = self.font["hmtx"]
for gname in hmtx.metrics.keys():
adw, lsb = hmtx.metrics[gname]
hmtx.metrics[gname] = (self._scale_value(adw), self._scale_value(lsb))
def update_post(self):
attrs = ["underlinePosition", "underlineThickness"]
self.scale_table("post", attrs)
def update_vhea(self):
if "vhea" not in self.font:
return
attrs = ["ascent", "descent", "lineGap", "advanceHeightMax", "minTopSideBearing", "minBottomSideBearing", "yMaxExtent"]
self.scale_table("vhea", attrs)
def update_vmtx(self):
if "vmtx" not in self.font:
return
vmtx = self.font["vmtx"]
for gname in vmtx.metrics.keys():
adh, tsb = vmtx.metrics[gname]
vmtx.metrics[gname] = (self._scale_value(adh), self._scale_value(tsb))
def scale_table(self, table_tag, attrs):
table = self.font[table_tag]
self.scale_object(table, attrs)
def scale_object(self, obj, attrs):
for attr in attrs:
if hasattr(obj, attr):
setattr(obj, attr, self._scale_value(getattr(obj, attr)))
def _scale_value(self, value):
return int(value * self._scale)
def get_args():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("in_font", metavar="FONT", type=str,
help="FONT")
parser.add_argument("-o", "--output", dest="out_font", default=None,
help="output font")
parser.add_argument("-u", "--upm", dest="upm", default=None,
help="units per EM")
parser.add_argument("--dry-run", dest="dry_run", action="store_true",
help="dry run?")
args = parser.parse_args()
if args.out_font is None:
args.out_font = args.in_font
return args
def main():
args = get_args()
upm = 2048
if args.upm:
upm = float(args.upm)
scaler = GlyphsScaler(args.in_font, args.out_font, upm, args.dry_run)
scaler.run()
if __name__ == "__main__":
main()
| derwind/misc_scripts | change_upm.py | change_upm.py | py | 12,265 | python | en | code | 0 | github-code | 13 |
71253496657 | import sys
import torch
from segmentation_models_pytorch.utils.meter import AverageValueMeter
from tqdm import tqdm as tqdm
class SWEpoch:
def __init__(self, model, loss, metrics, stage_name, device='cpu', verbose=True):
"""[summary]
Args:
model ([type]): [description]
loss (a PyTorch loss object or a an array of tuples like [(loss1, weight1), ...]): [description]
metrics ([type]): [description]
stage_name ([type]): [description]
device (str, optional): [description]. Defaults to 'cpu'.
verbose (bool, optional): [description]. Defaults to True.
"""
self.model = model
self.loss = loss
self.metrics = metrics
self.stage_name = stage_name
self.verbose = verbose
self.device = device
self._to_device()
def _to_device(self):
self.model.to(self.device)
if type(self.loss) == type([]):
for l, w in self.loss:
self.loss[i] = l.to(self.device)
else:
self.loss.to(self.device)
for metric in self.metrics:
metric.to(self.device)
def _format_logs(self, logs):
str_logs = ['{} - {:.4}'.format(k, v) for k, v in logs.items()]
s = ', '.join(str_logs)
return s
def batch_update(self, x, y, epoch):
raise NotImplementedError
def on_epoch_start(self):
pass
def run(self, dataloader, epoch):
self.on_epoch_start()
logs = {}
loss_meter = AverageValueMeter()
metrics_meters = {metric.__name__: AverageValueMeter() for metric in self.metrics}
with tqdm(dataloader, desc=self.stage_name, file=sys.stdout, disable=not (self.verbose)) as iterator:
for x, y, w in iterator:
x, y, w = x.to(self.device), y.to(self.device), w.to(self.device)
loss, y_pred = self.batch_update(x, y, w, epoch)
# update loss logs
loss_value = loss.cpu().detach().numpy()
loss_meter.add(loss_value)
loss_logs = {'loss': loss_meter.mean}
logs.update(loss_logs)
# update metrics logs
for metric_fn in self.metrics:
metric_value = metric_fn(y_pred, y).cpu().detach().numpy()
metrics_meters[metric_fn.__name__].add(metric_value)
metrics_logs = {k: v.mean for k, v in metrics_meters.items()}
logs.update(metrics_logs)
if self.verbose:
s = self._format_logs(logs)
iterator.set_postfix_str(s)
return logs
class SWTrainEpoch(SWEpoch):
def __init__(self, model, loss, metrics, optimizer, device='cpu',
verbose=True, weight_power=5):
super().__init__(
model=model,
loss=loss,
metrics=metrics,
stage_name='train',
device=device,
verbose=verbose,
)
self.optimizer = optimizer
self.loss.reduction = 'none'
self.weight_power = weight_power
def on_epoch_start(self):
self.model.train()
def batch_update(self, x, y, w, epoch):
self.optimizer.zero_grad()
prediction = self.model.forward(x)
power = ((epoch / 50) ** self.weight_power)
weighmap = (w ** power)
if type(self.loss) == type([]):
losses = [l(prediction, y) * weighmap * w for l, w in self.loss]
loss = torch.sum(torch.stack(losses), dim=0)
else:
loss = self.loss(prediction, y) * weighmap
loss = torch.sum(loss) / torch.sum(weighmap)
loss.backward()
self.optimizer.step()
return loss, prediction
class SWValidEpoch(SWEpoch):
def __init__(self, model, loss, metrics, device='cpu',
verbose=True, weight_power=5):
super().__init__(
model=model,
loss=loss,
metrics=metrics,
stage_name='valid',
device=device,
verbose=verbose,
)
self.loss.reduction = 'none'
self.weight_power = weight_power
def on_epoch_start(self):
self.model.eval()
def batch_update(self, x, y, w, epoch):
with torch.no_grad():
prediction = self.model.forward(x)
power = ((epoch / 50) ** self.weight_power)
weighmap = (w ** power)
if type(self.loss) == type([]):
losses = [l(prediction, y) * weighmap * w for l, w in self.loss]
loss = torch.sum(torch.stack(losses), dim=0)
else:
loss = self.loss(prediction, y) * weighmap
loss = torch.sum(loss) / torch.sum(weighmap)
return loss, prediction
| CIVA-Lab/U-SE-ResNet-for-Cell-Tracking-Challenge | SW/train_codes/trainer.py | trainer.py | py | 4,861 | python | en | code | 2 | github-code | 13 |
18235090394 | from mongoengine import connect, disconnect
from mongoengine.connection import _connections
from multiprocessing import current_process
from config import Config
from db.models.results import Results
import os
import logging
log = logging.getLogger(__name__)
class Db:
Results = None
def __init__(self, createClient=True, maxPoolSize=10):
config = Config()
self.db = {}
self.Results = Results
self.createClient = createClient
self.maxPoolSize = maxPoolSize
self.initConnection(config)
logid = "DB [" + str(os.getpid()) + "](" + current_process().name + ") "
log.debug(logid + "Connections " + str(_connections))
def initConnection(self, config):
connect(
db=config.data['database']['dbName'],
host=config.data['database']['host'],
port=config.data['database']['port'],
username=config.data['database']['username'],
password=config.data['database']['password'],
authentication_source=config.data['database']['dbName'],
maxPoolSize=self.maxPoolSize,
minPoolSize=1,
connect=self.createClient)
def disconnect(self):
disconnect()
def connections(self):
return _connections | bcgov/OCWA | microservices/validateApi/db/db.py | db.py | py | 1,288 | python | en | code | 10 | github-code | 13 |
21493565407 | import os
import sys
from dataclasses import dataclass
import numpy as np
import pandas as pd
from sklearn.compose import ColumnTransformer
from sklearn.impute import SimpleImputer
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import OrdinalEncoder
from sklearn.preprocessing import StandardScaler
from src.components.data_ingestion import DataIngestion
from src.exception import CustomException
from src.logger import logging
from src.utils import save_object
@dataclass
class DataTransformationConfig:
preprocessor_obj_file_path = os.path.join('artifacts', 'preprocessor.pkl')
class DataTransformation:
def __init__(self):
self.data_transformation_config = DataTransformationConfig()
def get_data_transformation_object(self):
try:
logging.info("Data Transformation Initiated")
# Define which columns should be ordinal_encoded and which should be scaled
# In this dataset their is no categorical columns is present so no required
num_columns = ['LIMIT_BAL', 'SEX', 'EDUCATION', 'MARRIAGE', 'AGE', 'PAY_0', 'PAY_2',
'PAY_3', 'PAY_4', 'PAY_5', 'PAY_6', 'BILL_AMT1', 'BILL_AMT2',
'BILL_AMT3', 'BILL_AMT4', 'BILL_AMT5', 'BILL_AMT6', 'PAY_AMT1',
'PAY_AMT2', 'PAY_AMT3', 'PAY_AMT4', 'PAY_AMT5', 'PAY_AMT6']
logging.info("Pipeline Initiated")
# Numerical Pipeline
num_pipeline = Pipeline(
steps = [
('imputer', SimpleImputer(strategy = 'median')),
('scaler', StandardScaler())
]
)
# Preprocessor
preprocessor = ColumnTransformer(
[
('num_pipeline', num_pipeline, num_columns)
]
)
return preprocessor
logging.info("Pipeline Completed")
except Exception as e:
logging.info("Error in Data Transformation")
raise CustomException(e, sys)
def initiate_data_transformation(self, train_path, test_path):
try:
# Reading Train & Test Data
train_df = pd.read_csv(train_path)
test_df = pd.read_csv(test_path)
logging.info("Read Train & Test Data Completed")
logging.info(f"Train Dataframe Head :\n{train_df.head().to_string()}")
logging.info(f"Test Dataframe Head : \n{test_df.head().to_string()}")
logging.info("Obtaining Preprocessing Object")
preprocessor_obj = self.get_data_transformation_object()
target_column_name = "default.payment.next.month"
drop_column_name = [target_column_name, "ID"]
input_feature_train_df = train_df.drop(columns = drop_column_name, axis = 1)
target_feature_train_df = train_df[target_column_name]
input_feature_test_df = test_df.drop(columns = drop_column_name, axis = 1)
target_feature_test_df = test_df[target_column_name]
# Transforming Using Preprocessor Object
input_feature_train_arr = preprocessor_obj.fit_transform(input_feature_train_df)
input_feature_test_arr = preprocessor_obj.transform(input_feature_test_df)
logging.info("Applying Preprocessing Object On Training & Testing Datasets.")
train_arr = np.c_[input_feature_train_arr, np.array(target_feature_train_df)]
test_arr = np.c_[input_feature_test_arr, np.array(target_feature_test_df)]
save_object(
file_path = self.data_transformation_config.preprocessor_obj_file_path,
obj = preprocessor_obj
)
logging.info("Preprocessor Pickle File Saved")
logging.info(f"Train_arr_shape : {train_arr.shape}")
logging.info(f"Test_arr_shape : {test_arr.shape}")
return (
train_arr,
test_arr,
self.data_transformation_config.preprocessor_obj_file_path
)
except Exception as e:
logging.info("Exception Occured in the Initiate_data_transformation")
raise CustomException(e, sys)
if __name__ == "__main__":
obj = DataIngestion()
train_data_path, test_data_path = obj.initiate_data_ingestion()
data_transformation = DataTransformation()
train_arr, test_arr,_ = data_transformation.initiate_data_transformation(train_data_path, test_data_path)
| devkegovind/Credit_Card_Default_Prediction | src/components/data_transformation.py | data_transformation.py | py | 4,534 | python | en | code | 0 | github-code | 13 |
18082088569 | from guietta import _, Gui, Quit, ___, III, HS, VS, HSeparator, VSeparator, QFileDialog
from guietta import Empty, Exceptions, P, PG
import os
import subprocess
import re
import cdio
import pycdio
import numpy as np
import math
from qtpy.QtGui import QFont
from qtpy.QtWidgets import QComboBox
from time import strftime
# Aim of the GUI.
# * Allow to start/monitor/stop a read process.
# * Show progress
# * Show graph
product_name="Diagnostics via Disk"
product_name_tech="diagnostics_via_disk"
logo_path='diagnostics_via_disk-logo.png'
pitch="Ramp up COVID-19 testing\nusing frugal devices: CD/DVD drives"
driveChooseComboBox = QComboBox()
# To realign in emacs:
# C-u align-regexp
# as a regexp set this without the quotes: '\(\),'
# let default answer '1' for the two question
# repeat? yes
gui = Gui(
[ 'pitch' , ___ , ___ , VSeparator , logo_path ],
[ HSeparator , ___ , ___ , III , III ],
[ 'Available drives:' , driveChooseComboBox , [ 'refreshDrivesList' ], III , III ],
[ 'CD/DVD drive' , ['trayOpen'] , ['trayClose'] , III , III ],
[ HSeparator , ___ , ___ , III , III ],
[ ["genIdFromTime"] , ___ , 'ortype' , III , III ],
[ 'Analysis run ID:' , '__runID__' , ___ , III , III ],
[ HSeparator , ___ , ___ , III , III ],
[ 'Analysis run control:' , ['analysisStart'] , ['analysisStop'] , III , III ],
[ HSeparator , ___ , ___ , III , III ],
[ 'Disk type:' , 'diskType' , ___ , III , III ],
[ 'Programmed speed:' , 'programmedSpeed' , ___ , III , III ],
[ 'Disk capacity:' , 'diskCapacity' , ___ , III , III ],
[ 'Sector size:' , 'sectorSize' , ___ , III , III ],
[ 'Analysis progress' , P('analysisProgress') , ___ , III , III ],
[ PG('plot') ],
title= product_name + " - " + pitch.replace('\n'," "),
exceptions = Exceptions.PRINT
)
labels = {
'pitch' : pitch,
'trayOpen' : 'Open tray',
'trayClose' : 'Close tray',
'genIdFromTime' : 'Generate new ID from current time',
'analysisStart' : 'Start',
'analysisStop' : 'Stop',
'programmedSpeed' : '',
'diskType' : '',
'diskCapacity' : '',
'sectorSize' : '',
'ortype': 'or type a valid file name below',
'refreshDrivesList' : 'Update list',
#'instantSpeed' : '',
}
for id, label in labels.items():
gui.widgets[id].setText(label)
font = QFont( "Arial", 20, QFont.Bold)
titleWidget = gui.widgets['pitch']
titleWidget.setFont(font)
gui.widgets['diagnostics_via_disklogo'].setMargin(10)
drive = None
drive_name = None
runningAnalysisProcess = None
logfile_write_descriptor = None
logfile_read_descriptor = None
def enableOrDisableRelevantWidgets():
running = ( runningAnalysisProcess is not None )
gui.widgets['trayOpen'].setEnabled( drive is not None and not running )
gui.widgets['trayClose'].setEnabled( drive is not None and not running )
gui.widgets['analysisStart'].setEnabled( not running )
gui.widgets['analysisStop'].setEnabled( running )
gui.widgets['analysisProgress'].setEnabled( running )
if not running:
gui.analysisProgress = 0
enableOrDisableRelevantWidgets()
err_no_drive='NO DRIVE DETECTED'
def hands_off_the_drive(*args):
global drive
drive = None
global drive_name
drive_name = 'No drive selected'
cb_refreshDrivesList()
def cb_refreshDrivesList(*args):
drives = cdio.get_devices(pycdio.DRIVER_UNKNOWN)
driveChooseComboBox.clear()
if len(drives):
driveChooseComboBox.addItems(drives)
else:
driveChooseComboBox.addItem(err_no_drive)
cb_refreshDrivesList()
def cb_QComboBox(*args):
global drive
global drive_name
selectedName = driveChooseComboBox.currentText()
print("Selected drive named {!r}", selectedName)
if (selectedName == err_no_drive):
drive = None
drive_name = selectedName
else:
try:
drive = cdio.Device(selectedName)
drive_name = selectedName
except OSError:
drive = None
drive_name = 'No drive selected'
cb_refreshDrivesList()
print("Selected drive named {!s} object {!r}".format(drive_name, drive))
enableOrDisableRelevantWidgets()
cb_QComboBox()
# TODO handle case when CD mounted...
def cb_analysisStart(gui, *args):
print("analysisStart")
global drive
try:
mode = drive.get_disc_mode()
except (cdio.DriverError, OSError):
hands_off_the_drive()
return
gui.diskType = mode
global maxSectorsOnThisDiscTechnology
# There are many cases (80 minutes CD, and variations between
# DVD+- and DVD-SL DVD-DL). So far only covering main cases.
if re.match(r"DVD", mode):
# Source https://en.wikipedia.org/wiki/DVD#Capacity
maxSectorsOnThisDiscTechnology = 4171712 # assuming DVD-DL!
else:
# Source https://en.wikipedia.org/wiki/CD-ROM#Capacity
maxSectorsOnThisDiscTechnology = 333000
com=[ "readom", "-noerror", "-nocorr", "-c2scan", "meshpoints=1000", "dev=" + drive_name ]
logfilename = gui.runID + ".log"
global logfile_write_descriptor
logfile_write_descriptor = open(logfilename, "a")
global runningAnalysisProcess
runningAnalysisProcess = subprocess.Popen(com,
shell = False,
stdout = logfile_write_descriptor,
stderr = subprocess.STDOUT)
enableOrDisableRelevantWidgets()
global logfile_read_descriptor
logfile_read_descriptor = open(logfilename, "rb")
def cb_analysisStop(gui, *args):
print("analysisStop")
global runningAnalysisProcess
runningAnalysisProcess.kill()
def cb_genIdFromTime(gui):
gui.runID = product_name_tech + "-run_" + strftime( "%Y-%m-%d_%H-%M-%S")
cb_genIdFromTime(gui);
def cb_trayOpen(gui):
global drive
try:
drive.eject_media()
except cdio.DriverError:
hands_off_the_drive()
def cb_trayClose(gui):
global drive_name
try:
cdio.close_tray(drive_name)
except cdio.DriverError:
hands_off_the_drive()
#filename = QFileDialog.getOpenFileName(None, "Open File",
# ".",
# "Analysis run log *.bioidrun (*.bioidrun)")
def updateWidget(match_result, match_action_info):
target_widget = match_action_info['target']
print("updateWidget " + target_widget)
gui.widgets[target_widget].setText(match_result.group(1))
def ignore(match_result, match_action_info):
print ("Ignoring line: {!r}".format(match_result.string))
diskSectorCount = None
sectorNumToMillimeter = None
c2data = None
def setCapacity(match_result, match_action_info):
gui.diskCapacity = match_result.group(1)
global diskSectorCount
diskSectorCount = int ( match_result.group(2) )
global c2data
print("Initializing c2data with {} elements.".format(diskSectorCount))
c2data = np.empty(diskSectorCount)
global sectorNumToMillimeter
sectorNumToMillimeter = np.empty(diskSectorCount)
global maxSectorsOnThisDiscTechnology
for sector in range(len(sectorNumToMillimeter)):
sectorNumToMillimeter[sector] = \
math.sqrt( sector / maxSectorsOnThisDiscTechnology \
* (58 * 58 - 25 * 25) + 25 * 25 ) # minor repetition, yet simple and fast
def updateProgress(match_result, match_action_info):
global diskSectorCount
currentSector = int ( match_result.group(1) )
percentage = int ( 100 * currentSector / diskSectorCount )
print("{!r} / {!r} = {!r}".format( currentSector, diskSectorCount , percentage ) )
gui.analysisProgress = percentage
def updateGraphData(match_result, match_action_info):
global c2data
sectorInError = int ( match_result.group(1) )
errorCount = int ( match_result.group(2) )
global c2data
print("Setting c2data{0} {1}.".format(sectorInError, errorCount))
c2data[sectorInError] = errorCount
gui.plot.setDownsampling(mode='peak')
gui.plot.plot(sectorNumToMillimeter, c2data, title="C2 errors", clear=True)
labelParseRules = {
"Read +speed: +(.+)$" : { 'func' : updateWidget, 'target' : 'programmedSpeed', },
"Write +speed: +(.+)$" : { 'func' : ignore },
"Capacity: (([0-9]+) Blocks = [0-9]+ kBytes = [0-9]+ MBytes = [0-9]+ prMB)" : { 'func' : setCapacity },
"addr: +([0-9]+)" : { 'func' : updateProgress },
"Sectorsize: +(.+)$" : { 'func' : updateWidget, 'target' : 'sectorSize', },
"Copy from SCSI .+ disk to file '/dev/null'" : { 'func' : ignore },
'C2 in sector: ([0-9]+) first at byte: ([0-9]+) .+ total: ([0-9]+) errors' : { 'func' : updateGraphData },
"^$" : { 'func' : ignore },
}
labelParseRulesCompiled = {
re.compile(regexpstring) : match_action_info for regexpstring, match_action_info in labelParseRules.items()
}
regexp_to_split_lines = re.compile('\r\n|\r|\n')
def updateGuiFromProcessLog():
#print("poll readom")
global runningAnalysisProcess
global logfile_write_descriptor
global logfile_read_descriptor
write_position = logfile_write_descriptor.tell()
read_position = logfile_read_descriptor.tell()
bytes_available = write_position - read_position
if bytes_available > 0 :
bytes = logfile_read_descriptor.read(bytes_available)
print("readom said: '{!r}'".format(bytes) )
partread = bytes.decode("ascii")
linesread = regexp_to_split_lines.split(partread)
for lineread in linesread:
match_result = None
for regexp, match_action_info in labelParseRulesCompiled.items():
match_result = regexp.match(lineread)
if match_result is not None:
break
if match_result is not None:
print ("matched {!r} with result {!r}".format(regexp.pattern, match_result))
function_to_call = match_action_info['func']
function_to_call(match_result, match_action_info)
else:
print ("Warning unmatched: {!r}".format(lineread))
print ("Warning unmatched: {!r}".format(lineread))
if runningAnalysisProcess.poll() is not None:
print("process has exited")
runningAnalysisProcess = None
enableOrDisableRelevantWidgets()
return
while True:
try:
name, event = gui.get(timeout=0.1)
except Empty:
#print("poll")
if runningAnalysisProcess is not None:
updateGuiFromProcessLog()
continue
if name is None:
print("Exiting event loop")
break
print("Event {!r}".format(name))
functionName = "cb_" + name
function = globals().get(functionName, None)
if function is None:
print("Unknown " + functionName)
else:
print("Calling " + functionName + " function {!r}".format(function) )
function(gui)
print("Returned from " + functionName)
| zray007/Diagnostics-via-Disk | diagnostics_via_disk.py | diagnostics_via_disk.py | py | 11,774 | python | en | code | 4 | github-code | 13 |
15872517293 | import regex as re
import numpy as np
ALL_CELL_RE = re.compile(
r"""
\s+CELL\|\sVector\sa\s\[angstrom\]:
\s+(?P<xx>[\s-]\d+\.\d+)
\s+(?P<xy>[\s-]\d+\.\d+)
\s+(?P<xz>[\s-]\d+\.\d+)
\s+\|a\|\s+=\s+\S+
\n
\s+CELL\|\sVector\sb\s\[angstrom\]:
\s+(?P<yx>[\s-]\d+\.\d+)
\s+(?P<yy>[\s-]\d+\.\d+)
\s+(?P<yz>[\s-]\d+\.\d+)
\s+\|b\|\s+=\s+\S+
\n
\s+CELL\|\sVector\sc\s\[angstrom\]:
\s+(?P<zx>[\s-]\d+\.\d+)
\s+(?P<zy>[\s-]\d+\.\d+)
\s+(?P<zz>[\s-]\d+\.\d+)
\s+\|c\|\s+=\s+\S+
\n
""",
re.VERBOSE
)
def parse_all_cells(output_file):
all_cells = []
for match in ALL_CELL_RE.finditer(output_file):
#print(match)
cell = [
[match["xx"], match["xy"], match["xz"]],
[match["yx"], match["yy"], match["yz"]],
[match["zx"], match["zy"], match["zz"]]
]
all_cells.append(cell)
if all_cells:
return np.array(all_cells, dtype=float)
else:
return None
| ruihao69/cp2kdata | cp2kdata/block_parser/cells.py | cells.py | py | 1,018 | python | en | code | null | github-code | 13 |
39031717302 | # Dynamic programming solution
''' Algorithm :
for cells in 1st row & column, path is unidirectional hence minimum path sum will be simply adding min sum till previous cell in same row/column to current cell weight.
Then for each remaining cell, we will calculate minimum possible path from that cell by adding min of paths weights between cell row down & column down.
The final value in right bottom cell of grid will be the minimum sum path which is expected answer.
For below grid, calculated path table will look like
[
[1,3,1],
[1,5,1],
[4,2,1]
]
[
[1,4,5],
[2,7,6],
[6,8,7]
]
The function returns value in the right bottom corner: 7
'''
#=========================
class Solution(object):
def minPathSum(self, grid):
"""
:type grid: List[List[int]]
:rtype: int
"""
m,n = len(grid),len(grid[0])
for i in range(1,m):
grid[i][0] += grid[i-1][0]
for j in range(1,n):
grid[0][j] += grid[0][j-1]
for r in range(1,m):
for c in range(1,n):
grid[r][c] += min(grid[r - 1][c], grid[r][c - 1])
return grid[m - 1][n - 1] | sarvesh10491/Leetcode | Pattern_Based/8_Minimum_Path_Sum.py | 8_Minimum_Path_Sum.py | py | 1,226 | python | en | code | 0 | github-code | 13 |
31141524883 | import os
import glob
from clawpack.clawutil import data
try:
CLAW = os.environ['CLAW']
except:
raise Exception("*** Must first set CLAW enviornment variable")
# Scratch directory for storing topo and dtopo files:
scratch_dir = os.path.join(CLAW, 'geoclaw', 'scratch')
def make_setrun(config):
"""Passes the configuration data into the setrun function.
Parameters
----------
config : Config object
The object that contains the default bounds, initial conditions,
fault information, and topography information from the .cfg files.
Returns
-------
setrun : (function)
The function that prepares the necessary data to use Geoglaw to model
the scenario's topography.
"""
def setrun(claw_pkg='geoclaw'):
"""Initializes the necessary phsyics and topograhpy parameters
in prepare to run Geoclaw.
Parameters
----------
claw_pkg : string
The package to use for the setrun. Expected to be 'geoclaw'.
Returns
-------
rundata : object of class ClawRunData
A data-based object that deals with the topography of the seafloor.
"""
assert claw_pkg.lower() == 'geoclaw', "Expected claw_pkg = 'geoclaw'"
num_dim = 2
rundata = data.ClawRunData(claw_pkg, num_dim)
try:
geo_data = rundata.geo_data
except:
print("*** Error, this rundata has no geo_data attribute")
raise AttributeError("Missing geo_data attribute")
# == Physics ==
geo_data.gravity = 9.81
geo_data.coordinate_system = 2
geo_data.earth_radius = 6367.5e3
# == Forcing Options
geo_data.coriolis_forcing = False
# == Algorithm and Initial Conditions ==
geo_data.sea_level = 0.0
geo_data.dry_tolerance = 1.e-3
geo_data.friction_forcing = True
geo_data.manning_coefficient =.025
geo_data.friction_depth = 1e6
# Refinement settings
refinement_data = rundata.refinement_data
refinement_data.variable_dt_refinement_ratios = True
refinement_data.wave_tolerance = 5.e-1
refinement_data.deep_depth = 1e2
refinement_data.max_level_deep = 3
# index of max AMR level
maxlevel = len(config.geoclaw["refinement_ratios"])+1
# load all topo files from topo_dir
topo_data = rundata.topo_data
topo_dir = config.geoclaw['topo_dir']
topo_files = glob.glob(topo_dir+"*.tt3")
for file in topo_files:
topo_data.topofiles.append([3,1,maxlevel,0.,1.e10, file])
dtopo_data = rundata.dtopo_data
dtopo_data.dtopofiles.append(
[3,maxlevel,maxlevel,config.geoclaw['dtopo_path']]
)
dtopo_data.dt_max_dtopo = 0.2
#------------------------------------------------------------------
# Standard Clawpack parameters to be written to claw.data:
# (or to amr2ez.data for AMR)
#------------------------------------------------------------------
clawdata = rundata.clawdata # initialized when rundata instantiated
# Number of space dimensions:
clawdata.num_dim = num_dim
# Lower and upper edge of computational domain:
clawdata.lower[0] = config.geoclaw_bounds['lon_min'] # west longitude
clawdata.upper[0] = config.geoclaw_bounds['lon_max'] # east longitude
clawdata.lower[1] = config.geoclaw_bounds['lat_min'] # south latitude
clawdata.upper[1] = config.geoclaw_bounds['lat_max'] # north latitude
# Number of grid cells: Coarsest grid
clawdata.num_cells[0] = config.geoclaw['xcoarse_grid']
clawdata.num_cells[1] = config.geoclaw['ycoarse_grid']
# Number of equations in the system:
clawdata.num_eqn = 3
# Number of auxiliary variables in aux array (initialized in setaux)
clawdata.num_aux = 3
# Index of aux array corresponding to capacity function, if there is
# one:
clawdata.capa_index = 2
# Initial time:
clawdata.t0 = 0.0
clawdata.output_style = 1
# Output nout frames at equally spaced times up to tfinal:
clawdata.num_output_times = 1
clawdata.tfinal = config.geoclaw['run_time']
clawdata.output_t0 = True # output at initial (or restart) time
clawdata.output_format = 'ascii' # 'ascii' or 'netcdf'
clawdata.output_q_components = 'all' # need all
clawdata.output_aux_components = 'none' # eta=h+B is in q
clawdata.output_aux_onlyonce = False # output aux arrays each frame
clawdata.verbosity = config.geoclaw['verbosity']
# --------------
# Time stepping:
# --------------
# if dt_variable==1: variable time steps used based on cfl_desired,
# if dt_variable==0: fixed time steps dt = dt_initial always used.
clawdata.dt_variable = True
# Initial time step for variable dt.
# If dt_variable==0 then dt=dt_initial for all steps:
clawdata.dt_initial = 0.2
# Max time step to be allowed if variable dt used:
clawdata.dt_max = 1e+99
# Desired Courant number if variable dt used, and max to allow without
# retaking step with a smaller dt:
clawdata.cfl_desired = 0.75
clawdata.cfl_max = 1.0
# Maximum number of time steps to allow between output times:
clawdata.steps_max = 5000
# ------------------
# Method to be used:
# ------------------
# Order of accuracy: 1 => Godunov, 2 => Lax-Wendroff plus limiters
clawdata.order = 2
# Use dimensional splitting? (not yet available for AMR)
clawdata.dimensional_split = 'unsplit'
# For unsplit method, transverse_waves can be
# 0 or 'none' ==> donor cell (only normal solver used)
# 1 or 'increment' ==> corner transport of waves
# 2 or 'all' ==> corner transport of 2nd order corrections too
clawdata.transverse_waves = 2
# Number of waves in the Riemann solution:
clawdata.num_waves = 3
# List of limiters to use for each wave family:
# Required: len(limiter) == num_waves
# Some options:
# 0 or 'none' ==> no limiter (Lax-Wendroff)
# 1 or 'minmod' ==> minmod
# 2 or 'superbee' ==> superbee
# 3 or 'mc' ==> MC limiter
# 4 or 'vanleer' ==> van Leer
clawdata.limiter = ['mc', 'mc', 'mc']
clawdata.use_fwaves = True # True ==> use f-wave version of algorithms
# Source terms splitting:
# src_split == 0 or 'none'
# ==> no source term (src routine never called)
# src_split == 1 or 'godunov'
# ==> Godunov (1st order) splitting used,
# src_split == 2 or 'strang'
# ==> Strang (2nd order) splitting used, not recommended.
clawdata.source_split = 'godunov'
# --------------------
# Boundary conditions:
# --------------------
# Number of ghost cells (usually 2)
clawdata.num_ghost = 2
# Choice of BCs at xlower and xupper:
# 0 => user specified (must modify bcN.f to use this option)
# 1 => extrapolation (non-reflecting outflow)
# 2 => periodic (must specify this at both boundaries)
# 3 => solid wall for systems where q(2) is normal velocity
clawdata.bc_lower[0] = 'extrap'
clawdata.bc_upper[0] = 'extrap'
clawdata.bc_lower[1] = 'extrap'
clawdata.bc_upper[1] = 'extrap'
# ---------------
# AMR parameters:
# ---------------
amrdata = rundata.amrdata
# max number of refinement levels:
amrdata.amr_levels_max = maxlevel
# List of refinement ratios at each level (length at least mxnest-1)
amrdata.refinement_ratios_x = config.geoclaw['refinement_ratios']
amrdata.refinement_ratios_y = config.geoclaw['refinement_ratios']
amrdata.refinement_ratios_t = config.geoclaw['refinement_ratios']
# Specify type of each aux variable in amrdata.auxtype.
# This must be a list of length maux, each element of which is one of:
# 'center', 'capacity', 'xleft', or 'yleft' (see documentation).
amrdata.aux_type = ['center','capacity','yleft']
# Flag using refinement routine flag2refine instead of richardson error
amrdata.flag_richardson = False # use Richardson?
amrdata.flag2refine = False
amrdata.flag2refine_tol = 0.5
# steps to take on each level L between regriddings of level L+1:
amrdata.regrid_interval = 3
# width of buffer zone around flagged points:
# (typically the same as regrid_interval so waves don't escape):
amrdata.regrid_buffer_width = 2
# clustering alg. cutoff for (# flagged pts) / (total # cells refined)
# (closer to 1.0 => more small grids may be needed to cover flagged
# cells)
amrdata.clustering_cutoff = 0.700000
# print info about each regridding up to this level:
amrdata.verbosity_regrid = 0
# ----- For developers -----
# Toggle debugging print statements:
amrdata.dprint = False # print domain flags
amrdata.eprint = False # print err est flags
amrdata.edebug = False # even more err est flags
amrdata.gprint = False # grid bisection/clustering
amrdata.nprint = False # proper nesting output
amrdata.pprint = False # proj. of tagged points
amrdata.rprint = False # print regridding summary
amrdata.sprint = False # space/memory output
amrdata.tprint = True # time step reporting each level
amrdata.uprint = False # update/upbnd reporting
# More AMR parameters can be set -- see the defaults in pyclaw/data.py
# ---------------
# Regions:
# ---------------
rundata.regiondata.regions = []
# to specify regions of refinement append lines of the form
# [minlevel,maxlevel,t1,t2,x1,x2,y1,y2]
for key,region in config.regions.items():
rundata.regiondata.regions.append([maxlevel,maxlevel]+region)
# ---------------
# FGMax:
# ---------------
# == fgmax.data values ==
fgmax_files = rundata.fgmax_data.fgmax_files
# for fixed grids append to this list names of any fgmax input files
fgmax_files.append(config.fgmax['fgmax_grid_path'])
rundata.fgmax_data.num_fgmax_val = 1
#------------------------------------------------------------------
# Adjoint specific data:
#------------------------------------------------------------------
# Also need to set flagging method and appropriate tolerances above
adjointdata = rundata.adjointdata
adjointdata.use_adjoint = True
# location of adjoint solution, must first be created:
# make symlink for adjoint
adjointdata.adjoint_outdir = config.geoclaw['adjoint_outdir']
# time period of interest:
adjointdata.t1 = rundata.clawdata.t0
adjointdata.t2 = rundata.clawdata.tfinal
if adjointdata.use_adjoint:
# need an additional aux variable for inner product:
rundata.amrdata.aux_type.append('center')
rundata.clawdata.num_aux = len(rundata.amrdata.aux_type)
adjointdata.innerprod_index = len(rundata.amrdata.aux_type)
return rundata
return setrun
def write_setrun(config_path=None):
"""Opens and writes a new setrun.py file for a specific scenario
containing the tsunamibayes' setrun.py functions and with code
instructions to read the scenario's default configuration file paths.
config_path : string
An additional file path which stores necessary information for the
scenario configuration, optional. Defaults to None.
"""
with open('setrun.py','w') as f:
f.write("from tsunamibayes.setrun import make_setrun\n")
f.write("from tsunamibayes.utils import Config\n\n")
f.write("config = Config()\n")
f.write("config.read('defaults.cfg')\n")
if config_path:
f.write("config.read('{}')\n".format(config_path))
f.write("setrun = make_setrun(config)\n\n")
f.write("if __name__ == '__main__':\n")
f.write(" rundata = setrun()\n")
f.write(" rundata.write()")
| jpw37/tsunamibayes | tsunamibayes/setrun.py | setrun.py | py | 12,657 | python | en | code | 9 | github-code | 13 |
26785040293 | import numpy as np
class LinearRegression:
def __init__(self,learning_rate=0.01,n_iters=100):
print(learning_rate)
self.lr = learning_rate
self.n_iters = n_iters
self.weights=None
self.bias=None
def fit(self,X,y):
m,n = X.shape
self.weights = np.zeros(n)
self.bias = 0
for _ in range(self.n_iters):
y_pred = np.dot(X,self.weights)+self.bias
dw = (1/m)*(np.sum(2*np.dot(X.T,(y_pred-y))))
db = (1/m)*(np.sum(2*(y_pred-y)))
print(dw,db)
self.weights -= self.lr*dw
self.bias -= self.bias*db
def predict(self,X):
return np.dot(X,self.weights)+self.bias | NilayGaitonde/Algorithms | LinearRegression/linearRegression.py | linearRegression.py | py | 725 | python | en | code | 1 | github-code | 13 |
19526236700 | import pygame, pygame.font, pygame.event, pygame.draw, string
from pygame.locals import *
import numbers
MAX_PASSWORD_LENGTH = 8
IB_RETURN = 0xC000
IB_BACKSPACE = 0xC001
IB_ESCAPE = 0xC002
IB_DELETE = 0XC03
IB_LARROW = 0XC04
IB_RARROW = 0XC05
def get_key():
''' Return unicode char and handle backspace/return keys
'''
while 1:
event = pygame.event.poll()
if event.type == KEYDOWN:
if event.key == K_RETURN:
return IB_RETURN
if event.key == K_BACKSPACE:
return IB_BACKSPACE
if event.key == K_ESCAPE:
return IB_ESCAPE
if event.key == K_DELETE:
return IB_DELETE
if event.key == K_LEFT:
return IB_LARROW
if event.key == K_RIGHT:
return IB_RARROW
if event.key < 32 or 127 <= event.key:
return event.key
return event.unicode
else:
pass
def display_box(screen, message):
'''Print a message in a box in the middle of the screen
'''
default_font = pygame.font.get_default_font()
font_object = pygame.font.Font(default_font, 15)
pygame.draw.rect(screen, (255,0,0),
((screen.get_width() / 2) - 100,
(screen.get_height() / 2) - 10,
200,20), 0)
pygame.draw.rect(screen, (255,255,255),
((screen.get_width() / 2) - 102,
(screen.get_height() / 2) - 12,
204,24), 1)
if len(message) != 0:
screen.blit(font_object.render(message, 1, (255,255,255)),
((screen.get_width() / 2) - 100, (screen.get_height() / 2) - 10))
pygame.display.flip()
def ask(screen, question):
'''ask(screen, question) -> answer
'''
pygame.font.init()
current_string = []
display_string = ['_',]
index = 0
display_box(screen, question + ": " + "".join(current_string))
while 1:
in_key = get_key()
#
# [!] index max min 0, max max len-1
#
if in_key == IB_BACKSPACE and index > 0:
current_string = current_string[0:-1]
display_string = display_string[:index-1] + display_string[index:]
index -= 1
elif in_key == IB_DELETE and index:
print(current_string)
print(index)
#del current_string[index]
current_string = current_string[:index-1] + current_string[index+1:]
display_string = display_string[:index-1] + display_string[index+1:]
#del display_string[index]
index -= 1
elif in_key == IB_LARROW and index > 0:
index -= 1
a = display_string[index-1]
display_string[index-1] = display_string[index]
display_string[index] = display_string[index-1]
elif in_key == IB_RARROW and index < len(current_string):
index += 1
elif in_key == IB_RETURN:
break
elif in_key == IB_ESCAPE:
current_string = []
display_string = []
break
elif isinstance(in_key, int):
print('IIII {}'.format(in_key))
elif len(display_string) <= MAX_PASSWORD_LENGTH:
print(in_key)
current_string.append(in_key)
display_string.insert(index, in_key) #('*')
index += 1
display_box(screen, question + ": " + "".join(display_string) + "i:" + str(index))
pygame.quit()
return "".join(current_string)
def main():
screen = pygame.display.set_mode((240,100))
print('This is what you entered: [{}]'.format(ask(screen, "PASSWORD")))
if __name__ == '__main__':
main()
############
#!/usr/bin/python
#import pygame
#pygame.init()
#screen_size=(800,60)
#disp=pygame.display.set_mode(screen_size, pygame.DOUBLEBUF)
#msg=u""
#clock=pygame.time.Clock()
#default_font=pygame.font.get_default_font()
#font=pygame.font.SysFont(default_font,16)
#disp.fill((240,240,240,255))
#pygame.display.flip()
#while(not pygame.event.pump()):
# for event in pygame.event.get():
# print (event)
# if event.type == pygame.QUIT:
# pygame.quit()
# break
# if event.type == pygame.KEYDOWN:
# msg+=event.unicode
# disp.fill((240,240,240,255))
# disp.blit(font.render(msg,True,(30,30,30,255)),(0,0))
# pygame.display.flip()
# clock.tick(24) | teddysback/vnc_c2Py3 | vnc_wrap/fun/input_box.py | input_box.py | py | 4,604 | python | en | code | 0 | github-code | 13 |
29859952397 | import datetime
import os
from collections import defaultdict
from pathlib import Path
from intelmq.lib.bot import OutputBot
class FileOutputBot(OutputBot):
"""Write events to a file"""
_file = None
encoding_errors_mode = 'strict'
file: str = "/opt/intelmq/var/lib/bots/file-output/events.txt" # TODO: should be pathlib.Path
format_filename: bool = False
hierarchical_output: bool = False
keep_raw_field: bool = False
message_jsondict_as_string: bool = False
message_with_type: bool = False
single_key: bool = False
_is_multithreadable = False
def init(self):
# needs to be done here, because in process() FileNotFoundError handling we call init(),
# otherwise the file would not be opened again
self._file = None
self.logger.debug("Opening %r file.", self.file)
self.errors = self.encoding_errors_mode
if not self.format_filename:
self.open_file(self.file)
self.logger.info("File %r is open.", self.file)
def open_file(self, filename: str = None):
if self._file is not None:
self._file.close()
try:
self._file = open(filename, mode='a', encoding='utf-8', errors=self.errors)
except FileNotFoundError: # directory does not exist
path = Path(os.path.dirname(filename))
try:
path.mkdir(mode=0o755, parents=True, exist_ok=True)
except OSError:
self.logger.exception('Directory %r could not be created.', path)
self.stop()
else:
self._file = open(filename, mode='a', encoding='utf-8', errors=self.errors)
def process(self):
event = self.receive_message()
if self.format_filename:
ev = defaultdict(None)
ev.update(event)
# remove once #671 is done
if 'time.observation' in ev:
try:
ev['time.observation'] = datetime.datetime.strptime(ev['time.observation'],
'%Y-%m-%dT%H:%M:%S+00:00')
except ValueError:
ev['time.observation'] = datetime.datetime.strptime(ev['time.observation'],
'%Y-%m-%dT%H:%M:%S.%f+00:00')
if 'time.source' in ev:
try:
ev['time.source'] = datetime.datetime.strptime(ev['time.source'],
'%Y-%m-%dT%H:%M:%S+00:00')
except ValueError:
ev['time.source'] = datetime.datetime.strptime(ev['time.source'],
'%Y-%m-%dT%H:%M:%S.%f+00:00')
filename = self.file.format(event=ev)
if not self._file or filename != self._file.name:
self.open_file(filename)
event_data = self.export_event(event, return_type=str)
try:
self._file.write(event_data)
self._file.write("\n")
self._file.flush()
except FileNotFoundError:
self.init()
else:
self.acknowledge_message()
def shutdown(self):
if self._file:
self._file.close()
@staticmethod
def check(parameters):
if 'file' not in parameters:
return [["error", "Parameter 'file' not given."]]
dirname = os.path.dirname(parameters['file'])
if not os.path.exists(dirname) and '{ev' not in dirname:
path = Path(dirname)
try:
path.mkdir(mode=0o755, parents=True, exist_ok=True)
except OSError:
return [["error", "Directory (%r) of parameter 'file' does not exist and could not be created." % dirname]]
else:
return [["info", "Directory (%r) of parameter 'file' did not exist, but has now been created." % dirname]]
BOT = FileOutputBot
| certtools/intelmq | intelmq/bots/outputs/file/output.py | output.py | py | 4,058 | python | en | code | 856 | github-code | 13 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.