text stringlengths 38 1.54M |
|---|
from dataset import DataEntry, DataSet, Vocab, Action
from nn.utils.io_utils import deserialize_from_file
#--------------------------------------
# ADDED
#--------------------------------------
#import dynet as dy
#import random
#import math
#import sys
def write_to_file(output_file, dataset, max_num):
query_writer = open("query_"+output_file, 'w')
code_writer = open("tree_"+output_file, 'w')
print("Writing to file...")
for index in range(max_num):
query_writer.write(" ".join(dataset.get_examples(index).query)+"\n")
code_parse_tree = str(dataset.get_examples(index).parse_tree)
if "\n" in code_parse_tree:
print("index: " + str(index))
#code_writer.write(repr(code_parse_tree)+"\n")
code_writer.write(code_parse_tree.replace("\n", "\\n")+"\n")
query_writer.close()
code_writer.close()
def main():
'''
Read file from Django data set
'''
train_data, dev_data, test_data = deserialize_from_file("data/django.cleaned.dataset.freq5.par_info.refact.space_only.bin")
#uncomment below for Hearthstone data set
#train_data, dev_data, test_data = deserialize_from_file("hs.freq3.pre_suf.unary_closure.bin")
print("----- TRAIN -----")
train_length = len(train_data.examples)
print(train_length) #16000 instances for django, 533 for hs
write_to_file("train_.txt", train_data, train_length)
print("----- DEV -----")
dev_length = len(dev_data.examples)
print(dev_length) #1000 instances for django, 66 for hs
write_to_file("dev_.txt", dev_data, dev_length)
print("----- TEST -----")
test_length = len(test_data.examples)
print(test_length) #1801 instances, 66 for hs
write_to_file("test_.txt", test_data, test_length)
#print(train_data.get_examples(0).query)
#print(train_data.get_examples(0).parse_tree)
if __name__ == '__main__':
main() |
#for statements 1 - 9
"""Laços Aninhados"""
for i in range (1,10):
print("Tabuada do " + str(i))
for j in range(0,11):
print(str(j) + "" + str(j*i)) |
import sys
import numpy as npy
import matplotlib.pyplot as plt
import matplotlib.image as img
m = img.imread(sys.argv[1])
w, h = m.shape[:2]
new = npy.zeros([w, h, 3], dtype=int)
mask = npy.zeros([w, h, 3], dtype=int)
arr = npy.zeros([8, 3], dtype=int)
wt = npy.zeros([8, 3], dtype=float)
def gradient_r(x1, y1, xc, yc):
if m[x1][y1][0] > m[xc][yc][0]:
return m[x1][y1][0] - m[xc][yc][0]
else:
return m[xc][yc][0] - m[x1][y1][0]
def gradient_g(x1, y1, xc, yc):
if m[x1][y1][1] > m[xc][yc][1]:
return m[x1][y1][1] - m[xc][yc][1]
else:
return m[xc][yc][1] - m[x1][y1][1]
def gradient_b(x1, y1, xc, yc):
if m[x1][y1][2] > m[xc][yc][2]:
return m[x1][y1][2] - m[xc][yc][2]
else:
return m[xc][yc][2] - m[x1][y1][2]
def F(x, a):
if a == 0:
return 0
if abs(x) <= (a / 2):
return (1 - (x / a) ** 2)
elif abs(x) <= a:
return (x / a - 1) ** 2
else:
return 0
for i in range(w):
for j in range(h):
if (i-100)**2 + (j-100)**2 >= 220 and (i-100)**2 + (j-100)**2 <= 300:
new[i][j] = 1
mask[i][j] = 255
else:
new[i][j] = m[i][j]
mask[i][j] = 1
plt.imshow((mask).astype(npy.uint8))
plt.show()
#img.imsave("mask2.png",(mask).astype(npy.uint8))
plt.imshow((new).astype(npy.uint8))
plt.show()
for i in range(w):
for j in range(h):
if mask[i][j][0] == 255 and mask[i][j][1] == 255 and mask[i][j][2] == 255:
k = 0
for ii in range(i - 1, i + 1):
for jj in range(j - 1, j + 1):
if ii != i and jj != j:
arr[k][0] = gradient_r(ii, jj, i, j)
arr[k][1] = gradient_g(ii, jj, i, j)
arr[k][2] = gradient_b(ii, jj, i, j)
k = k + 1
r = 0
g = 0
b = 0
for k in range(8):
r = r + arr[k][0]
g = g + arr[k][1]
b = b + arr[k][2]
r /= 8
g /= 8
b /= 8
#print({r, g, b});
for ii in range(8):
wt[ii][0] = F(arr[ii][0], r)
wt[ii][1] = F(arr[ii][1], g)
wt[ii][2] = F(arr[ii][2], b)
fr = 0
fg = 0
fb = 0
weightr = 0
weightg = 0
weightb = 0
for _ in range(8):
weightr += wt[_][0]
weightg += wt[_][1]
weightb += wt[_][2]
fr = (1 - weightr) * m[i][j][0]
fg = (1 - weightg) * m[i][j][0]
fb = (1 - weightb) * m[i][j][0]
k = 0
for ii in range(i - 1, i + 1):
for jj in range(j - 1, j + 1):
if ii != i and jj != j:
fr += wt[k][0] * m[ii][jj][0]
fg += wt[k][1] * m[ii][jj][1]
fb += wt[k][2] * m[ii][jj][2]
k = k + 1
new[i][j][0] = int(fr)
new[i][j][1] = int(fg)
new[i][j][2] = int(fb)
for _ in range(20):
for i in range(w):
for j in range(h):
if mask[i][j][0] == 255 and mask[i][j][1] == 255 and mask[i][j][2] == 255:
new[i][j] = (new[i - 1][j - 1] + new[i - 1][j] + new[i - 1][j + 1] + new[i][j - 1]
+ new[i][j + 1] + new[i + 1][j] + new[i + 1][j - 1] + new[i + 1][j - 1]) / 8
else:
new[i][j] = new[i][j]
plt.imshow((new).astype(npy.uint8))
plt.show()
|
import re;
from mHTTP.mExceptions import cHTTPException, cTCPIPException, cSSLException;
from oConsole import oConsole;
from mColors import *;
grFavIconLinkElement = re.compile(
r'<link'
r'(?:\s+\w+="[^"]+")*'
r'\s+rel="(?:shortcut )icon"'
r'(?:\s+\w+="[^"]+")*'
r'\s+href="([^"]+)"'
r'\s*\/?>',
re.I
);
gbDebug = False;
def foGetFavIconURLForHTTPClientsAndURL(aoHTTPClients, oURL):
if len(aoHTTPClients) == 0:
return None;
for oHTTPClient in aoHTTPClients:
# Load the page and see if there is a <link ... rel="icon" ... href="..."> element in it.
oProxyServerURL = oHTTPClient.foGetProxyServerURL();
oConsole.fStatus(*(
["Requesting ", INFO, str(oURL), NORMAL] +
([" through ", INFO, str(oProxyServerURL), NORMAL] if oProxyServerURL else []) +
[" to look for favicon..."]
));
try:
oResponse = oHTTPClient.fozGetResponseForURL(oURL);
except (cHTTPException, cTCPIPException, cSSLException) as oException:
if gbDebug:
oConsole.fPrint(*(
[WARNING, "Requesting ", WARNING_INFO, str(oURL), WARNING] +
([" through ", WARNING_INFO, str(oProxyServerURL), WARNING] if oProxyServerURL else []) +
[" failed: ", WARNING_INFO, str(oException), WARNING, "!"]
));
continue; # This HTTP client cannot retreive the page; try the next client.
assert oResponse is not None, \
"HTTP Response should not be None!"; # This can only happen if the client is stopping and we control the client and should not have stopped it.
# See if there is a "/favicon.ico" on the server.
oFavIconLinkElementMatch = (
grFavIconLinkElement.search(oResponse.szData or "") if oResponse.uStatusCode == 200 and oResponse.szMediaType == "text/html" \
else None
);
oFavIconURL = oURL.foFromRelativeString("/favicon.ico"); # The default value unless the webpage specifies a different URL
if oFavIconLinkElementMatch is not None:
try:
sFavIconURL = str(oFavIconLinkElementMatch.group(1));
except:
oConsole.fPrint(
WARNING, "- ", WARNING_INFO, str(oURL), WARNING,
" refers to a non-ascii favicon URL: ", WARNING_INFO, repr(oFavIconLinkElementMatch.group(1)), WARNING, "!"
);
else:
oFavIconURL = oURL.foFromRelativeString(sFavIconURL);
# See if there is a "/favicon.ico" on the server.
oConsole.fStatus(*(
["Requesting ", INFO, str(oFavIconURL), NORMAL] +
([" through ", INFO, str(oProxyServerURL), NORMAL] if oProxyServerURL else []) +
[" to check favicon..."]
));
try:
oResponse = oHTTPClient.fozGetResponseForURL(oFavIconURL);
except (cHTTPException, cTCPIPException, cSSLException) as oException:
oConsole.fPrint(*(
[WARNING, "- Cannot retrieve ", WARNING_INFO, str(oURL), WARNING] +
([" through ", INFO, str(oProxyServerURL), NORMAL, " "] if oProxyServerURL else []) +
[": ", WARNING_INFO, str(oException), WARNING, "!"]
));
return None;
assert oResponse is not None, \
"HTTP Response should not be None!"; # This can only happen if the client is stopping and we control the client and should not have stopped it.
if oResponse.uStatusCode != 200:
oConsole.fPrint(
WARNING, "- Cannot retrieve ", WARNING_INFO, str(oURL), WARNING, ": the server responded with ",
WARNING_INFO, "HTTP ", str(oResponse.uStatusCode), " ", oResponse.sReasonPhrase, WARNING, "!"
);
return None;
else:
if gbDebug:
oConsole.fPrint(
"* FavIcon URL for ", INFO, str(oURL), NORMAL, ": ", INFO, str(oFavIconURL), NORMAL, "."
);
return oFavIconURL;
oConsole.fPrint(WARNING, "- Cannot retrieve ", WARNING_INFO, str(oURL), WARNING, " through any HTTP client.");
return None; |
import paramiko
import os
paramiko.util.log_to_file('logfile.log')
host = "101.102.103.104"
port = 22
transport = paramiko.Transport((host, port))
password = "pass"
username = "user"
transport.connect(username = username, password = password)
sftp = paramiko.SFTPClient.from_transport(transport)
filepath = '~/remote/file'
localpath = '~/local/file'
sftp.get(filepath, localpath)
sftp.close()
transport.close()
|
def mapper(filename):
map_result = []
with open(filename, 'r') as file:
for line in file:
line = line.replace(".", " ").replace(",", " ").lower()
for word in line.split():
map_result.append((word, 1))
return map_result
def shuffle_sort(map_result):
shuffle_sort_result = {}
for word_tuple in sorted(map_result):
if word_tuple[0] in shuffle_sort_result:
shuffle_sort_result[word_tuple[0]].append(word_tuple[1])
else:
shuffle_sort_result[word_tuple[0]] = [word_tuple[1]]
return shuffle_sort_result
def reducer(sort_and_shuffle_result):
reducer_result = []
for word in sort_and_shuffle_result:
reducer_result.append((word, sum(sort_and_shuffle_result[word])))
return reducer_result
if __name__ == "__main__":
print(reducer(shuffle_sort(mapper("Sample_file_2.txt"))))
|
import atexit
import sys
import io
class Solution:
def find(self, a, parent):
if parent[a] < 0:
return a
x = self.find(parent[a], parent)
parent[a] = x
return x
def merge(self, a, b,parent):
a = self.find(a, parent)
b = self.find(b, parent)
if a == b:
return
else:
parent[a] += parent[b]
parent[b] = a
# Function to find sum of weights of edges of the Minimum Spanning Tree.
def spanningTree(self, V, adj):
# code here
parent = [-1] * V
edges = []
for i in range(V):
for x in adj[i]:
edges.append([x[1], x[0], i])
edges.sort()
ans = 0
for i in range(len(edges)):
a = edges[i][2]
b = edges[i][1]
if self.find(a,parent) == self.find(b,parent):
continue
else:
ans += edges[i][0]
self.merge(a, b,parent)
return ans
t=int(input())
for i in range(t):
V,E=map(int,input().strip().split())
adj=[[]for k in range(V)]
for j in range(E):
u,v,w=map(int,input().split())
adj[u].append([v,w])
adj[v].append([u,w])
obj=Solution()
print(obj.spanningTree(V,adj)) |
from turtle import Turtle
import random
food_shape=("circle","turtle","arrow")
class Food(Turtle):
def __init__(self) -> None:
super().__init__()
self.shape("turtle")
self.penup()
self.shapesize(stretch_len=0.5,stretch_wid=0.5)
self.color("blue")
self.speed(0)
self.refresh()
def refresh(self):
random_x=random.randint(-250,250)
random_y=random.randint(-250,250)
self.goto(random_x,random_y) |
#Scatter plotting
import matplotlib.pyplot as plt
x=[1,2,3,4,5,6,7,8]
y=[5,3,4,2,5,4,2,1]
plt.scatter(x,y,label="random values",color="red") #syntax: plt.scatter(x coordinate, y coordinate, label, colour)
plt.title("Scatter Graph") #heading of the graph
plt.legend()
plt.xlabel("x axis")
plt.ylabel("y axis")
plt.show() |
import matplotlib.image as mpimg
import matplotlib.pyplot as plt
import numpy as np
import cv2
import glob
import time
from sklearn.svm import LinearSVC
from sklearn.preprocessing import StandardScaler
from skimage.feature import hog
from sklearn.model_selection import train_test_split, GridSearchCV
from sklearn import svm
from utils import *
import pickle
# Divide up into cars and notcars
images = glob.glob('train_data/**/*.png', recursive=True)
cars = []
notcars = []
for image in images:
if 'non-vehicles' in image:
notcars.append(image)
else:
cars.append(image)
print("number of cars", len(cars))
print("number of notcars", len(notcars))
### TODO: Tweak these parameters and see how the results change.
color_space = 'YCrCb' # Can be RGB, HSV, LUV, HLS, YUV, YCrCb
orient = 9
cell_per_block = 2
pix_per_cell = 8
hog_channel = "ALL" # Can be 0, 1, 2, or "ALL"
spatial_size = (32, 32)
hist_bins = 32
spatial_feat = 1
hist_feat = 1
hog_feat = 1
t=time.time()
car_features = extract_features(cars, color_space=color_space,
spatial_size=spatial_size, hist_bins=hist_bins,
orient=orient, pix_per_cell=pix_per_cell,
cell_per_block=cell_per_block,
hog_channel=hog_channel, spatial_feat=spatial_feat,
hist_feat=hist_feat, hog_feat=hog_feat)
notcar_features = extract_features(notcars, color_space=color_space,
spatial_size=spatial_size, hist_bins=hist_bins,
orient=orient, pix_per_cell=pix_per_cell,
cell_per_block=cell_per_block,
hog_channel=hog_channel, spatial_feat=spatial_feat,
hist_feat=hist_feat, hog_feat=hog_feat)
t2 = time.time()
print(round(t2-t, 2), 'Seconds to extract HOG features...')
# Create an array stack of feature vectors
X = np.vstack((car_features, notcar_features)).astype(np.float64)
# Define the labels vector
y = np.hstack((np.ones(len(car_features)), np.zeros(len(notcar_features))))
print("shape of X", X.shape)
print("shape of y", y.shape)
# Split up data into randomized training and test sets
rand_state = np.random.randint(0, 100)
X_train, X_test, y_train, y_test = train_test_split(
X, y, test_size=0.1, random_state=rand_state)
print("shape of train data", X_train.shape)
print("shape of test data", X_train.shape)
# Fit a per-column scaler
X_scaler = StandardScaler().fit(X_train)
# Apply the scaler to X
X_train = X_scaler.transform(X_train)
X_test = X_scaler.transform(X_test)
print('Using:',orient,'orientations',pix_per_cell,
'pixels per cell and', cell_per_block,'cells per block')
# Use a linear SVC
#svc = LinearSVC(C=10)
# Check the training time for the SVC
svc = LinearSVC()
t=time.time()
svc.fit(X_train, y_train)
#parameters = {'C':[1, 10]}
#svc = GridSearchCV(svc, parameters)
#svc.fit(X_train, y_train)
t2 = time.time()
print(round(t2-t, 2), 'Seconds to train SVC...')
# Check the score of the SVC
print('Test Accuracy of SVC = ', round(svc.score(X_test, y_test), 4))
# Check the prediction time for a single sample
t=time.time()
n_predict = 10
print('My SVC predicts: ', svc.predict(X_test[0:n_predict]))
print('For these',n_predict, 'labels: ', y_test[0:n_predict])
t2 = time.time()
print(round(t2-t, 5), 'Seconds to predict', n_predict,'labels with SVC')
print(svc.decision_function(X_test[0:n_predict]))
# Save svc model and parameters
save_data = {
"svc": svc,
"scaler": X_scaler,
"orient": orient,
"pix_per_cell": pix_per_cell,
"cell_per_block": cell_per_block,
"spatial_size": spatial_size,
"hist_bins": hist_bins,
}
pickle.dump(save_data, open("svc_pickle.p", "wb"))
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This script is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# It is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with it. If not, see <http://www.gnu.org/licenses/>.
"""
Cherche le code FANTOIR et les highways d'OSM
correspondants à chaque relation associatedStreet.
Ce code apelle des script du projet associatedStreet:
https://github.com/vdct/associatedStreet/
Ce Code est basé sur associatedStreet/addrfantoir.py
"""
import sys
import os.path
import subprocess
import urllib2
import shutil
import urllib
import collections
from zipfile import ZipFile
import cadastre
from osm import OsmParser, OsmWriter
from mytools import write_stream_to_file
from mytools import to_ascii
ASSOCIATEDSTREET_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "associatedStreet")
FANTOIR_URL = "http://collectivites-locales.gouv.fr/files/files/gestion_locale_dgfip/national/FANTOIR.zip"
FANTOIR_ZIP = os.path.join(os.path.dirname(os.path.realpath(__file__)), "FANTOIR.zip")
if not os.path.exists(os.path.join(ASSOCIATEDSTREET_DIR,"addr_fantoir_building.py")):
sys.stderr.write(u"ERREUR: le projet associatedStreet n'as pas été trouvé.\n".encode("utf-8"))
sys.stderr.write(u" Veuillez executer les commandes suivantes et relancer:\n".encode("utf-8"))
sys.stderr.write(u" git submodule init\n".encode("utf-8"))
sys.stderr.write(u" git submodule update\n".encode("utf-8"))
sys.exit(-1)
associatedStreet_init = os.path.join(ASSOCIATEDSTREET_DIR,"__init__.py")
if not os.path.exists(associatedStreet_init):
open(associatedStreet_init, "a").close()
associatedStreet_pg_connexion = os.path.join(ASSOCIATEDSTREET_DIR,"pg_connexion.py")
if not os.path.exists(associatedStreet_pg_connexion):
shutil.copyfile(associatedStreet_pg_connexion + ".txt", associatedStreet_pg_connexion)
import associatedStreet.addr_fantoir_building as addr_fantoir_building
addr_fantoir_building.dicts = addr_fantoir_building.Dicts()
addr_fantoir_building.dicts.load_lettre_a_lettre()
addr_fantoir_building.dicts.load_abrev_type_voie()
addr_fantoir_building.dicts.load_abrev_titres()
addr_fantoir_building.dicts.load_chiffres()
addr_fantoir_building.dicts.load_chiffres_romains()
addr_fantoir_building.dicts.load_mot_a_blanc()
addr_fantoir_building.dicts.load_osm_insee()
def normalize(nom):
result = addr_fantoir_building.normalize(to_ascii(nom))
if result.startswith("GR GRANDE RUE") or result.startswith("GR GRAND RUE"):
result = result[3:]
return result
def get_fantoir_code_departement(code_departement):
if code_departement[0] == '0':
return code_departement[1:3] + '0'
else:
return code_departement[0:3]
def get_dict_fantoir(code_departement, code_commune):
""" Retourne un dictionnaire qui mappe un nom normalizé
du Fantoir (nature + libele de la voie)
vers un tuple (string, boolean) représentant le CODE FANTOIR, et
s'il s'agit d'un lieu dit non bâti (place=locality).
"""
code_insee = cadastre.code_insee(code_departement, code_commune)
dict_fantoir = {}
try:
db_cursor = addr_fantoir_building.get_pgc().cursor()
sql_query = ''' SELECT code_insee||id_voie||cle_rivoli,
nature_voie||' '||libelle_voie,
type_voie, ld_bati
FROM fantoir_voie
WHERE code_insee = \'''' + code_insee + '''\'
AND caractere_annul NOT IN ('O','Q');'''
db_cursor.execute(sql_query)
for result in db_cursor:
code_fantoir = result[0]
nom_fantoir = ' '.join(result[1].replace('-',' ').split())
#lieu_dit_non_bati = (result[2] == '3') and (result[3] == '0')
highway = result[2] in ['1', '4', '5']
dict_fantoir[normalize(nom_fantoir)] = (code_fantoir, highway)
assert(len(dict_fantoir) > 0)
return dict_fantoir
except:
# La connexion avec la base SQL a du échouer, on
# charge les fichiers zip fantoir manuellement:
filename = FANTOIR_ZIP
ok_filename = filename + ".ok"
if not (os.path.exists(filename) and os.path.exists(ok_filename)):
sys.stdout.write("Téléchargement du fichier Fantoir " + FANTOIR_URL + "\n")
if os.path.exists(filename): os.remove(filename)
if os.path.exists(ok_filename): os.remove(ok_filename)
write_stream_to_file(urllib2.urlopen(FANTOIR_URL), filename)
open(ok_filename, "a").close()
else:
sys.stdout.write("Lecture du fichier FANTOIR.zip")
sys.stdout.flush()
num_commune = code_insee[2:5]
debut = get_fantoir_code_departement(code_departement) + num_commune
for line in ZipFile(filename, "r").open("FANTOIR.txt"):
if line.startswith(debut):
if line[108:109] != ' ':
# C'est un unregistrement de voie
if line[73] == ' ':
# la voie n'est pas annulée
assert(code_insee == line[0:2] + line[3:6])
id_voie = line[6:10]
cle_rivoli = line[10]
nature_voie = line[11:15].strip()
libele_voie = line[15:41].strip()
code_fantoir = code_insee + id_voie + cle_rivoli
nom_fantoir = nature_voie + " " + libele_voie
#lieu_dit_non_bati = line[108:110] == '30'
highway = line[108:109] in ['1', '4', '5']
dict_fantoir[normalize(nom_fantoir)] = \
(code_fantoir, highway)
return dict_fantoir
def open_osm_overpass(requete, cache_filename, metropole=False):
ok_filename = cache_filename + ".ok"
try:
if not (os.path.exists(cache_filename) and os.path.exists(ok_filename)):
if os.path.exists(cache_filename): os.remove(cache_filename)
if os.path.exists(ok_filename): os.remove(ok_filename)
if metropole:
# oapi-fr.openstreetmap.fr n'a que la métropole, pas l'outre mer
overvass_server = "http://oapi-fr.openstreetmap.fr/oapi/interpreter?"
else:
overvass_server = "http://overpass-api.de/api/interpreter?"
url = overvass_server + urllib.urlencode({'data':requete})
sys.stdout.write((urllib.unquote(url) + "\n").encode("utf-8"))
sys.stdout.flush()
write_stream_to_file(urllib2.urlopen(url), cache_filename)
open(ok_filename, "a").close()
return OsmParser().parse(cache_filename)
except Exception as ex:
if os.path.isfile(ok_filename):
os.unlink(ok_filename)
if metropole:
# Essai avec l'autre serveur overpass (metropole=False)
print "ERREUR:", ex
print "Tentative depuis le serveur allemand:"
return open_osm_overpass(requete, cache_filename, False)
else:
raise ex
def open_osm_multipolygon_s_ways_commune(code_departement, code_commune, type_multipolygon, filtre="", nodes=False):
cache_filename = code_commune + "-multipolygon_" + type_multipolygon + "s.osm"
code_insee = cadastre.code_insee(code_departement, code_commune)
area = 3600000000 + addr_fantoir_building.dicts.osm_insee[code_insee]
requete_overpass = 'rel(area:%d)[type=multipolygon]["%s"]%s;way(r);' % (area, type_multipolygon, filtre)
if nodes: requete_overpass += "(._;>;);"
requete_overpass += "out meta;"
sys.stdout.write((u"Récupération des multipolygon " + type_multipolygon + " de la commune\n").encode("utf-8"))
return open_osm_overpass(requete_overpass, cache_filename, metropole=code_departement.startswith("0"))
def open_osm_ways_commune(code_departement, code_commune, type_way, filtre="", nodes=False):
cache_filename = code_commune + "-" + type_way + "s.osm"
code_insee = cadastre.code_insee(code_departement, code_commune)
area = 3600000000 + addr_fantoir_building.dicts.osm_insee[code_insee]
requete_overpass = 'way(area:%d)["%s"]%s;' % (area, type_way, filtre)
if nodes: requete_overpass += "(._;>;);"
requete_overpass += "out meta;"
sys.stdout.write((u"Récupération des " + type_way + " de la commune\n").encode("utf-8"))
return open_osm_overpass(requete_overpass, cache_filename, metropole=code_departement.startswith("0"))
def get_dict_osm_ways(osm):
""" Pour le fichier osm donné, retourne un dictionnaire qui mappe le
nom normalisé des ways vers un dictionnaire avec:
- un chanps 'name' avec le nom original
- un champ 'ids' avec la liste des id des ways ayant ce
nom normalizé là.
"""
dict_ways_osm = {}
for way in osm.ways.itervalues():
name = way.tags['name']
name_norm = normalize(name)
if name and name_norm:
if name_norm not in dict_ways_osm:
dict_ways_osm[name_norm] = {'name':name,'ids':[]}
dict_ways_osm[name_norm]['ids'].append(way.id())
return dict_ways_osm
def humanise_nom_fantoir(name, dict_premier_mot, dict_tout_les_mots):
original_name = name
name = name.title()
mots = name.split()
premier_mot_norm = to_ascii(mots[0]).upper()
if premier_mot_norm in dict_premier_mot:
if len(mots) > 1 and mots[1] == dict_premier_mot[premier_mot_norm]:
# Le type de voie est répété dans le nom de la voie, ça arrive parfois, on le supprime:
mots = mots[1:]
else:
# On remplace étend le préfixe:
mots = dict_premier_mot[premier_mot_norm].split() + mots[1:]
for i,mot in enumerate(mots):
mot_norm = to_ascii(mot).upper()
if mot_norm in dict_tout_les_mots:
mots[i] = dict_tout_les_mots[mot_norm]
name = ' '.join(mots)
name = name.replace(" Du "," du ")
name = name.replace(" De La "," de la ")
name = name.replace(" De "," de ")
name = name.replace(" Des "," des ")
name = name.replace(" Et "," et ")
name = name.replace(" L "," l'")
name = name.replace(" L'"," l'")
name = name.replace(" D "," d'")
name = name.replace(" D'"," d'")
name = name.replace(" Saint "," Saint-")
name = name.replace(" Sainte "," Sainte-")
name = name.replace("Grande Rue Grande Rue", "Grande Rue")
name = name.replace("Grande Rue Grand Rue", "Grand'Rue")
#if name != original_name:
# print " - ", original_name, "=>", name
return name
def get_dict_abrev_type_voie():
""" Retourne un dictionnaire qui transforme une abréviation de type de voie
utilisée par le Fantoir en sa version non abrégée.
"""
dict_abrev_type_voie = {}
for nom, abrev in addr_fantoir_building.dicts.abrev_type_voie.iteritems():
nom = nom.title()
abrev = to_ascii(abrev).upper()
if not abrev in dict_abrev_type_voie:
dict_abrev_type_voie[abrev] = nom
else:
# Choisi le nom le plus petit:
if len(nom) < len(dict_abrev_type_voie[abrev]):
dict_abrev_type_voie[abrev] = nom
dict_abrev_type_voie["CHEM"] = "Chemin" # à la place de CHEMINEMENT
dict_abrev_type_voie["CHE"] = "Chemin" # à la place de CHEM
dict_abrev_type_voie["ILE"] = u"Île" # pb d'encodage dans le projet associatedStreet
dict_abrev_type_voie["ECA"] = u"Écart" # pb d'encodage dans le projet associatedStreet
return dict_abrev_type_voie
def get_dict_accents_mots(osm_noms):
"""Retourne un dictionnaire qui transforme un mot (ascii majuscule)
en sa version avec accents.
Pour cela on utilise en entrée le fichier osm CODE_COMUNE-noms.osm,
qui contient les mots extraits des export PDF du cadastre.
"""
dict_accents_mots = {}
if osm_noms:
sys.stdout.write((u"Recherche l'orthographe accentuée depuis les exports PDF du cadastre.\n").encode("utf-8"))
liste_mots_a_effacer_du_dict = ["DE", "LA", "ET"]
# On essaye de parser l'ensemble des noms extraits du cadastre pour
# en faire un dictionaire de remplacement a appliquer
for node in osm_noms.nodes.itervalues():
if ('name' in node.tags): #and not ('place' in node.tags): # on évite les nœuds place=* qui sont écrit en majuscule sans accents
for mot in node.tags['name'].replace("_"," ").replace("-"," ").replace("'"," ").split():
if len(mot) > 1:
mot_norm = to_ascii(mot).upper()
if mot_norm != mot.upper(): # il contient des accents
mot = mot.capitalize()
if mot_norm not in dict_accents_mots:
dict_accents_mots[mot_norm] = mot
elif dict_accents_mots[mot_norm] != mot:
alternative = dict_accents_mots[mot_norm]
# On a deux orthographes pour le même mot, on garde celle avec des caracères
# bizares, genre accents ou cédille
mot_est_complexe = to_ascii(mot) != mot
alternative_est_complexe = to_ascii(alternative) != alternative
if mot_est_complexe and not alternative_est_complexe:
dict_accents_mots[mot_norm] = mot
elif alternative_est_complexe and not mot_est_complexe:
# on garde l'arternative qui est actuellement dans le dictionnaire
pass
elif alternative_est_complexe and mot_est_complexe:
# je ne sais pas quoi faire, trop de risque pour cette orthographe
# on supprime le mot
liste_mots_a_effacer_du_dict.append(mot_norm)
sys.stdout.write(("ATTENTION: ne peut pas choisir entre l'orthographe " + mot + " ou " + alternative + "\n").encode("utf-8"))
else:
# c'est juste un problème de capitale, on ignore
pass
for mot in liste_mots_a_effacer_du_dict:
if mot in dict_accents_mots:
del(dict_accents_mots[mot])
dict_accents_mots.update({
"EGLISE": u"Église",
"ECOLE": u"École",
"ECOLES": u"Écoles",
"ALLEE": u"Allée",
"ALLEES": u"Allées",
"GENERAL" : u"Général",
# Abréviations typiques du Fantoir:
"PDT": u"Président",
"CDT": "Commandant",
"REGT" : u"Régiment",
"DOC" : "Docteur",
"ST" : "Saint",
"STE" : "Sainte",
})
return dict_accents_mots
def cherche_fantoir_et_osm_highways(code_departement, code_commune, osm, osm_noms = None):
""" Modifie les relations associatedStreet du fichier osm donné,
à partir de la version normalizée de leur nom:
- positionne le tag ref:FR:FANTOIR avec le code fantoir correspondant
- cherche depuis les données OSM les highways de la commune ayant le
même nom normalizé, et les ajoute en tant que role street de la
relation
- change le nom de la relation en celui des highway OSM si trouvé,
ou sinon humanise le nom original en utilisant les accents trouvé
dans le fichier osm_noms passé en paramètre.
"""
sys.stdout.write((u"Rapprochement avec les codes FANTOIR, et les highway OSM\n").encode("utf-8"))
highways_osm = open_osm_ways_commune(code_departement, code_commune, "highway", '["name"]', nodes=False)
dict_ways_osm = get_dict_osm_ways(highways_osm)
dict_fantoir = get_dict_fantoir(code_departement, code_commune)
dict_abrev_type_voie = get_dict_abrev_type_voie()
dict_accents_mots = get_dict_accents_mots(osm_noms)
log = open(code_commune + "-associatedStreet.log", "w")
nb_associatedStreet = 0
nb_voies_fantoir = 0
nb_voies_osm = 0
# Compte le nombre d'occurence de chaque nom normalizé
# afin de détecter les conflits
conflits_normalization = collections.Counter([
normalize(r.tags['name']) for r in osm.relations.itervalues()
if r.tags.get('type') == 'associatedStreet'])
for relation in osm.relations.itervalues():
if relation.tags['type'] == 'associatedStreet':
nb_associatedStreet += 1
name = relation.tags['name']
name_norm = normalize(name)
if name and name_norm:
log.write((name + u" => normalizé[" + name_norm + "]").encode("utf-8"))
if conflits_normalization[name_norm] > 1:
# Cas rencontré à Dijon (021 B0231), deux rues différentes "Rue la Fontaine" et "Rue de Fontaine"
# ont le même nom normalizé, on ne tente donc pas de raprochement Fantoir ou OSM
relation.tags['name'] = humanise_nom_fantoir(name, dict_abrev_type_voie, dict_accents_mots)
log.write((" CONFLIT DE NORMALIZATION, => " + relation.tags['name'] + "\n").encode("utf-8"))
else:
if name_norm in dict_fantoir:
relation.tags['ref:FR:FANTOIR'] = dict_fantoir[name_norm][0]
nb_voies_fantoir += 1
log.write((" ref:FR:FANTOIR[" + dict_fantoir[name_norm][0] + "]").encode("utf-8"))
else:
log.write((" ref:FR:FANTOIR[???]").encode("utf-8"))
if name_norm in dict_ways_osm:
nb_voies_osm += 1
for id_way in dict_ways_osm[name_norm]['ids']:
relation.add_member_type_ref_role('way', id_way, 'street')
relation.tags['name'] = dict_ways_osm[name_norm]['name']
log.write((" osm highway[" + relation.tags['name'] + "]\n").encode("utf-8"))
else:
relation.tags['name'] = humanise_nom_fantoir(name, dict_abrev_type_voie, dict_accents_mots)
log.write((" osm highway[???] => " + relation.tags['name'] + "\n").encode("utf-8"))
log.close()
sys.stdout.write(("Nombre de rues: "+str(nb_associatedStreet)+ "\n").encode("utf-8"))
if nb_associatedStreet > 0:
sys.stdout.write((" avec code FANTOIR : "+str(nb_voies_fantoir)+" ("+str(int(nb_voies_fantoir*100/nb_associatedStreet))+"%)\n").encode("utf-8"))
sys.stdout.write((" avec rapprochement OSM : "+str(nb_voies_osm)+" ("+str(int(nb_voies_osm*100/nb_associatedStreet))+"%)\n").encode("utf-8"))
# Humanise aussi les noms de lieux-dits:
for node in osm.nodes.itervalues():
if node.tags.has_key("place"):
name = node.tags["name"]
name_norm = normalize(name)
highway = False
if (name_norm in dict_fantoir):
node.tags['ref:FR:FANTOIR'] = dict_fantoir[name_norm][0]
highway = dict_fantoir[name_norm][1]
node.tags["name"] = humanise_nom_fantoir(name,
dict_abrev_type_voie if highway else {},
dict_accents_mots)
if highway:
del(node.tags["place"])
node.tags["highway"] = "road"
node.tags["fixme"] = u"à vérifier: nom de rue créé automatiquement à partir des adresses du coin"
def print_help():
programme = sys.argv[0]
spaces = " " * len(programme)
sys.stdout.write((u"Récupération des code fantoir et des highway OSM des associatedStreet\n").encode("utf-8"))
sys.stdout.write((u"USAGE:" + "\n").encode("utf-8"))
sys.stdout.write((u"%s CODE_DEPARTEMENT CODE_COMUNE input.osm output.osm" % programme + "\n").encode("utf-8"))
def command_line_error(message, help=False):
sys.stdout.write(("ERREUR: " + message + "\n").encode("utf-8"))
if help: print_help()
def main(argv):
if len(argv) != 5 or argv[1] in ["-h","-help","--help"]:
print_help()
sys.exit()
code_departement = argv[1]
code_commune = argv[2]
input_filename = argv[3]
output_filename = argv[4]
if len(code_departement) != 3:
command_line_error("le code departement doit avoir 3 chiffres")
if len(code_commune) != 5:
command_line_error("le code commune doit avoir 5 lettres ou chiffres")
osm = OsmParser().parse(input_filename)
osm_noms = None
osm_noms_filename = code_commune + "-noms.osm"
if os.path.exists(osm_noms_filename):
print "Charges les noms depuis le fichier " + osm_noms_filename
osm_noms = OsmParser().parse(osm_noms_filename)
cherche_fantoir_et_osm_highways(code_departement, code_commune, osm, osm_noms)
OsmWriter(osm).write_to_file(output_filename)
if __name__ == '__main__':
main(sys.argv)
|
from generallibrary.iterables import extend_list_in_dict, split_list
from generallibrary.functions import SigInfo, wrapper_transfer, Recycle
from generallibrary.diagram import TreeDiagram
from generallibrary.objinfo.children import _ObjInfoChildren
from generallibrary.objinfo.type import _ObjInfoType
from generallibrary.objinfo.origin import _ObjInfoOrigin
from generallibrary.objinfo.properties import _ObjInfoProperties
from generallibrary.objinfo.parents import _ObjInfoParents
class ObjInfo(_ObjInfoChildren, _ObjInfoType, _ObjInfoOrigin, _ObjInfoProperties, _ObjInfoParents, TreeDiagram):
""" Get whether obj is a module, function, class, method, property or variable.
Automatically generates parents post creation for attributes that are not modules.
Children are generated manually with `generate_attributes`.
Todo: Recycle ObjInfo.
Issue is that it becomes a NetworkDiagram (Sort of already is) """
# _recycle_keys = {"id": lambda obj: ObjInfo._identifier(obj=obj)}
children_states = {
_ObjInfoProperties.public: True,
_ObjInfoOrigin.from_builtin: False,
# _ObjInfoOrigin.from_instance: None,
# _ObjInfoOrigin.from_base: None,
# _ObjInfoOrigin.from_class: None,
_ObjInfoType.is_module: False,
# _ObjInfoType.is_function: None,
# _ObjInfoType.is_class: None,
# _ObjInfoType.is_property: None,
# _ObjInfoType.is_instance: None,
# _ObjInfoType.is_method: None,
}
save_node = load_node = copy_node = NotImplemented
def __init__(self, obj, parent=None, name=None):
self.obj = obj
self.cls = self.obj if self.is_class() else type(self.obj)
self.origin = self.get_origin(self.obj)
if name is None:
name = getattr(self.origin, "__name__", None)
if self.is_module():
name = name.split(".")[-1]
self.name = name
sentinel = object()
def identifier(self, obj=sentinel):
""" Returns an identifier for any object. """
if obj is self.sentinel:
obj = self.obj
return self._identifier(obj=obj)
@staticmethod
def _identifier(obj):
return id(obj)
def __repr__(self):
""" Return a nice represantion string with capitalized type and name. """
return f"{self.type(nice_output=True)}: {self.name}"
ObjInfo = ...
setattr(ObjInfo, "ObjInfo", ObjInfo)
class _Hook:
def __init__(self, func, after):
self.func = func
self.after = after
def hook(callable_, *funcs, after=False):
""" Hook into a callable. Stores funcs in callable's instance, class or even module. """
objInfo = ObjInfo(callable_)
owner = objInfo.get_parent().obj
if not hasattr(owner, "hooks"):
owner.hooks = {}
new = objInfo.name not in owner.hooks
extend_list_in_dict(owner.hooks, objInfo.name, *[_Hook(func=func, after=after) for func in funcs])
def _wrapper(*args, **kwargs):
after, before = split_list(lambda x: x.after, *owner.hooks[objInfo.name])
sigInfo = SigInfo(callable_, *args, **kwargs) # Call through SigInfo to easily relay any arguments
for hook_obj in before:
sigInfo.call(child_callable=hook_obj.func)
result = callable_(*args, **kwargs)
for hook_obj in after:
sigInfo.call(child_callable=hook_obj.func)
return result
if new:
wrapper_transfer(base=callable_, target=_wrapper)
setattr(objInfo.get_parent().obj, objInfo.name, _wrapper)
return owner.hooks[objInfo.name]
def cache_clear(obj):
for objInfo in ObjInfo(obj).get_children(depth=-1, include_self=True, gen=True, filt=lambda objInfo: hasattr(objInfo.obj, "cache_clear"), traverse_excluded=True):
objInfo.obj.cache_clear()
|
import re
import pdb
import nltk
import pickle
import random
import numpy as np
import unicodedata
from tqdm import tqdm
from collections import defaultdict
from nltk.corpus import wordnet as wn
def synset_from_sense_key(sense_key):
ADJ, ADJ_SAT, ADV, NOUN, VERB = 'a', 's', 'r', 'n', 'v'
sense_key_regex = re.compile(r"(.*)\%(.*):(.*):(.*):(.*):(.*)")
synset_types = {1: NOUN, 2: VERB, 3: ADJ, 4: ADV, 5: ADJ_SAT}
lemma, ss_type, _, lex_id, _, _ = sense_key_regex.match(sense_key).groups()
# check that information extracted from sense_key is valid
error = None
if not lemma:
error = "lemma"
elif int(ss_type) not in synset_types:
error = "ss_type"
elif int(lex_id) < 0 or int(lex_id) > 99:
error = "lex_id"
if error:
raise WordNetError(
"valid {} could not be extracted from the sense key".format(error))
synset_id = '.'.join([lemma, synset_types[int(ss_type)], lex_id])
return wn.synset(synset_id)
def _is_punctuation(char):
"""Checks whether `chars` is a punctuation character."""
cp = ord(char)
# We treat all non-letter/number ASCII as punctuation.
# Characters such as "^", "$", and "`" are not in the Unicode
# Punctuation class but we treat them as punctuation anyways, for
# consistency.
if (cp >= 33 and cp <= 47) or (cp >= 58 and cp <= 64) or (cp >= 91 and cp <= 96) or (cp >= 123 and cp <= 126):
return True
cat = unicodedata.category(char)
if cat.startswith("P"):
return True
return False |
import os
import sys
sys.path.append('.')
sys.path.append('/home/huangzeyu/tmp/yolov3')
import torch
from detectron2.data import samplers
from utils.datasets import *
from utils.utils import *
from detectron2.utils.comm import get_world_size
def build_yolo_detection_train_loader(cfg, mapper=None):
hyp = {
'giou': 3.54,
'cls': 37.4,
'cls_pw': 1.0,
'obj': 64.3,
'obj_pw': 1.0,
'iou_t': 0.225,
'lr0': 0.01,
'lrf': -4.0,
'momentum': 0.937,
'weight_decay': 0.000484,
'fl_gamma': 0.0,
'hsv_h': 0.0138,
'hsv_s': 0.678,
'hsv_v': 0.36,
'degrees': 0.0,
'translate': 0.0,
'scale': 0.0,
'shear': 0.0
}
train_path = "/home/huangzeyu/tmp/yolov3/data/coco/trainvalno5k.txt"
img_size = 416
num_workers = get_world_size()
images_per_batch = cfg.SOLVER.IMS_PER_BATCH
assert (
images_per_batch % num_workers == 0
), "SOLVER.IMS_PER_BATCH ({}) must be divisible by the number of workers ({}).".format(
images_per_batch, num_workers
)
assert (
images_per_batch >= num_workers
), "SOLVER.IMS_PER_BATCH ({}) must be larger than the number of workers ({}).".format(
images_per_batch, num_workers
)
images_per_worker = images_per_batch // num_workers
# Dataset
dataset = LoadImagesAndLabels(train_path, img_size, images_per_worker,
augment=True,
hyp=hyp,
rect=False,
cache_images=False,
single_cls=False)
sampler = samplers.TrainingSampler(len(dataset))
batch_sampler = torch.utils.data.sampler.BatchSampler(
sampler, images_per_worker, drop_last=True
)
# Dataloader
images_per_worker = min(images_per_worker, len(dataset))
nw = min([os.cpu_count(), images_per_worker if images_per_worker > 1 else 0, 8]) # number of workers
dataloader = torch.utils.data.DataLoader(dataset,
batch_sampler=batch_sampler,
num_workers=nw,
collate_fn=dataset.collate_fn)
return dataloader
def build_yolo_detection_test_loader(cfg, mapper=None):
hyp = {
'giou': 3.54,
'cls': 37.4,
'cls_pw': 1.0,
'obj': 64.3,
'obj_pw': 1.0,
'iou_t': 0.225,
'lr0': 0.01,
'lrf': -4.0,
'momentum': 0.937,
'weight_decay': 0.000484,
'fl_gamma': 0.0,
'hsv_h': 0.0138,
'hsv_s': 0.678,
'hsv_v': 0.36,
'degrees': 0.0,
'translate': 0.0,
'scale': 0.0,
'shear': 0.0
}
train_path = "/home/huangzeyu/tmp/yolov3/data/coco/trainvalno5k.txt"
img_size = 416
num_workers = get_world_size()
images_per_batch = cfg.SOLVER.IMS_PER_BATCH
assert (
images_per_batch % num_workers == 0
), "SOLVER.IMS_PER_BATCH ({}) must be divisible by the number of workers ({}).".format(
images_per_batch, num_workers
)
assert (
images_per_batch >= num_workers
), "SOLVER.IMS_PER_BATCH ({}) must be larger than the number of workers ({}).".format(
images_per_batch, num_workers
)
images_per_worker = images_per_batch // num_workers
# Dataset
dataset = LoadImagesAndLabels(
"/home/huangzeyu/tmp/yolov3/data/coco/5k.txt",
416,
images_per_worker,
hyp=hyp,
rect=True,
cache_images=False,
single_cls=False)
# Dataloader
images_per_worker = min(images_per_worker, len(dataset))
nw = min([os.cpu_count(), images_per_worker if images_per_worker > 1 else 0, 8]) # number of workers
sampler = samplers.InferenceSampler(len(dataset))
# Always use 1 image per worker during inference since this is the
# standard when reporting inference time in papers.
batch_sampler = torch.utils.data.sampler.BatchSampler(
sampler, 1, drop_last=False)
test_loader = torch.utils.data.DataLoader(dataset,
batch_sampler=batch_sampler,
num_workers=nw,
collate_fn=dataset.collate_fn)
return test_loader |
"""
Clinical Trials Policy class
Raluca Cobzaru (c) 2018
Adapted from code by Donghun Lee (c) 2018
"""
from collections import namedtuple
import numpy as np
from scipy.stats import binom
import scipy
import math
import pandas as pd
import copy
from ClinicalTrialsModel import ClinicalTrialsModel
import time
def trunc_poisson_fn(count, mean):
"""
returns list of truncated Poisson distribution with given mean and values count
:param count: int - maximal value considered by the distribution
:param mean: float - mean of Poisson distribution
:return list(float) - vector of truncated Poisson pmfs
"""
trunc_probs = []
sum = 0.0
for r in range(0, count):
trunc_probs.insert(r, 1/math.factorial(r)*(mean**r)*np.exp(-mean))
sum += trunc_probs[r]
trunc_probs.insert(count, 1-sum)
return trunc_probs
class ClinicalTrialsPolicy():
"""
Base class for decision policy
"""
def __init__(self, model, policy_names):
"""
initializes the policy
:param model: the ClinicalTrialsModel that the policy is being implemented on
:param policy_names: list(str) - list of policies
"""
self.model = model
self.policy_names = policy_names
self.Policy = namedtuple('Policy', policy_names)
def build_policy(self, info):
"""
builds the policies depending on the parameters provided
:param info: dict - contains all policy information
:return: namedtuple - a policy object
"""
return self.Policy(*[info[k] for k in self.policy_names])
def model_A_policy(self, state, info_tuple):
"""
implements deterministic lookahead policy based on Model A
:param state: namedtuple - the state of the model at a given time
:param info_tuple: tuple - contains the parameters needed to run the policy
:return: a decision made based on the policy
"""
success_A = info_tuple[0]
stop_A = info_tuple[1]
sim_model = ClinicalTrialsModel(self.model.state_variables, self.model.decision_variables, self.model.initial_state, True)
sim_model.state = copy.deepcopy(state)
if stop_A==False:
value_dict={}
sol_dict,value_dict = model_A_value_fn(sim_model, 0, success_A,value_dict)
new_decision = sol_dict['optimal_enroll']
else:
new_decision=0
return new_decision
def model_B_policy(self, state, info_tuple):
"""
implements lookahead policy based on Model B
:param state: namedtuple - the state of the model at a given time
:param info_tuple: tuple - contains the parameters needed to run the policy
:return: a decision made based on the policy
"""
success_B = info_tuple[0]
stop_B = info_tuple[1]
sim_model = ClinicalTrialsModel(self.model.state_variables, self.model.decision_variables, self.model.initial_state, True)
sim_model.state = copy.deepcopy(state)
if stop_B==False:
value_dict={}
sol_dict,value_dict = model_B_value_fn(sim_model, 0, success_B,value_dict)
new_decision = sol_dict['optimal_enroll']
else:
new_decision=0
return new_decision
def model_C_extension_policy(self, state, info_tuple):
"""
implements lookahead policy based on the extension of Model C
:param state: namedtuple - the state of the model at a given time
:param info_tuple: tuple - contains the parameters needed to run the policy
:return: a decision made based on the policy
"""
success_C_extension = info_tuple[0]
stop_C_extension = info_tuple[1]
sim_model = ClinicalTrialsModel(self.model.state_variables, self.model.decision_variables, self.model.initial_state, True)
sim_model.state = copy.deepcopy(state)
if stop_C_extension==False:
value_dict={}
sol_dict,value_dict = model_C_extension_value_fn(sim_model, 0, success_C_extension,value_dict)
new_decision = sol_dict['optimal_enroll']
else:
new_decision=0
return new_decision
def model_C_policy(self, state, info_tuple, time):
"""
implements hybrid policy for Model C using backward ADP
:param state: namedtuple - the state of the model at a given time
:param info_tuple: tuple - contains the parameters needed to run the policy
:param time: int - start time
:return: a decision made based on the policy
"""
success_C = info_tuple[0]
stop_C = info_tuple[1]
sim_model = ClinicalTrialsModel(self.model.state_variables, self.model.decision_variables, self.model.initial_state, True)
sim_model.state = copy.deepcopy(state)
parameters = parameters_fn(sim_model)
if stop_C == True: new_decision = 0
else:
vals = []
decs = []
for x_enroll in range(self.model.initial_state['enroll_min'], self.model.initial_state['enroll_max']+self.model.initial_state['enroll_step'], self.model.initial_state['enroll_step']):
pseudo_state = [state.potential_pop + x_enroll, state.success, state.failure, state.l_response]
if len(parameters[time]) < 8:
value = func_simple(pseudo_state, parameters[time][0], parameters[time][1], parameters[time][2], parameters[time][3])
else:
value = func(pseudo_state, parameters[time][0], parameters[time][1], parameters[time][2], parameters[time][3], parameters[time][4], parameters[time][5], parameters[time][6], parameters[time][7])
cost = -(self.model.initial_state['program_cost'] + self.model.initial_state['patient_cost'] * x_enroll)
vals.append(value + cost)
decs.append(x_enroll)
val_max = max(vals)
new_decision = decs[vals.index(val_max)]
return new_decision
def run_policy(self, policy_info, policy, t):
"""
runs the model with a selected policy
:param policy_info: dict - dictionary of policies and their associated parameters
:param policy: str - the name of the chosen policy
:param time: int - start time
:return: float - calculated contribution
"""
time_run = time.time()
model_copy = copy.deepcopy(self.model)
while t <= model_copy.initial_state['trial_size'] and policy_info[policy][1] == False:
time_t = time.time()
# build decision policy
p = self.build_policy(policy_info)
# implements sampled distribution for p_true
p_true_samples = np.random.beta(model_copy.state.success, model_copy.state.failure, model_copy.initial_state['K'])
p_belief = model_copy.state.success / (model_copy.state.success + model_copy.state.failure)
# drug_success = 1 if successful, 0 if failure, -1 if continue trial (for all policies)
if p_belief > model_copy.initial_state['theta_stop_high']:
decision = {'prog_continue': 0, 'drug_success': 1}
policy_info[policy][1] = True
elif p_belief < model_copy.initial_state['theta_stop_low']:
decision = {'prog_continue': 0, 'drug_success': 0}
policy_info[policy][1] = True
else:
decision = {'prog_continue': 1, 'drug_success': -1}
# makes enrollment decision based on chosen policy
if policy == "model_A":
decision['enroll'] = self.model_A_policy(model_copy.state, p.model_A)
elif policy == "model_B":
decision['enroll'] = self.model_B_policy(model_copy.state, p.model_B)
elif policy == "model_C_extension":
decision['enroll'] = self.model_C_extension_policy(model_copy.state, p.model_C_extension)
elif policy == "model_C":
decision['enroll'] = self.model_C_policy(model_copy.state, p.model_C, t)
x = model_copy.build_decision(decision)
print("Base Model t={}, obj={:,}, state.potential_pop={}, state.success={}, state.failure={}, x={}, elapsed time={:.2f} sec".format(t, model_copy.objective,
model_copy.state.potential_pop,
model_copy.state.success,
model_copy.state.failure, x,time.time()-time_t))
# steps the model forward one iteration
model_copy.step(x)
# updates policy info
policy_info[policy][0] = decision['drug_success']
# increments time
t += 1
print("Base Model: Stopping time t={}, obj(revenue)={:,}, rhobar={:.2f}, Elapsed time={:.2f} sec".format(t, model_copy.objective, model_copy.state.success/(model_copy.state.success+model_copy.state.failure),time.time()-time_run))
policy_value = model_copy.objective
return policy_value
def model_A_value_fn(model, iteration, success_index,value_dict):
"""
solves the deterministic shortest path problem for Model A (over given horizon);
returns the value of the current state and its optimal number of new potential patients to enroll
:param model: ClinicalTrialsModel - model which contains all state variables (physical and belief states)
:param iteration: int - tracks the horizon in the deteministic shortest path problem
:param success_index: int - 1 if drug is declared successful, 0 if failure, -1 if continue trial
:return: value of current node and its optimal enrollment count
"""
# computes value and optimal enrollments corresponding to current state
if success_index == -1:
if iteration < model.initial_state['H']:
bellman_vals = []
bellman_decisions = []
for x_enroll in range(model.initial_state['enroll_min'], model.initial_state['enroll_max']+model.initial_state['enroll_step'], model.initial_state['enroll_step']):
bellman_potential_pop = model.state.potential_pop + x_enroll
#print("Starting bellman - ite: {}, R: {}, x_enroll: {}, R_t+1: {}".format(iteration, model.state.potential_pop,x_enroll,bellman_potential_pop))
bellman_cost = -(model.initial_state['program_cost'] + model.initial_state['patient_cost'] * x_enroll)
bellman_state = copy.deepcopy(model.initial_state)
bellman_state['potential_pop'] = bellman_potential_pop
bellman_M = ClinicalTrialsModel(model.state_variables, model.decision_variables, bellman_state, True)
value_key=(iteration+1,bellman_state['potential_pop'])
count=-1
# the drug success probability stays fixed
bellman_p_belief = bellman_M.state.success / (bellman_M.state.success + bellman_M.state.failure)
if bellman_p_belief > bellman_M.initial_state['theta_stop_high']:
success_index = 1
step_value = model.initial_state['success_rev']
elif bellman_p_belief < bellman_M.initial_state['theta_stop_low']:
success_index = 0
step_value = 0
else:
if value_key in value_dict:
step_value = value_dict[value_key][0]
count = value_dict[value_key][1]
#print("key: {} value: {:.2f} count: {} lendict:{}".format(value_key,step_value,count,len(value_dict)))
else:
sol_dict,value_dict = model_A_value_fn(bellman_M, iteration+1, success_index,value_dict)
step_value = sol_dict['value']
value_dict.update({value_key:[step_value,count+1]})
bellman_cost += step_value
bellman_decisions.append(x_enroll)
bellman_vals.append(bellman_cost)
#print("Ending - ite: {}, R: {}, x_enroll: {}, R_t+1: {}, Cost: {}".format(iteration, model.state.potential_pop,x_enroll,bellman_potential_pop,bellman_cost))
value = max(bellman_vals)
optimal_enroll = bellman_decisions[bellman_vals.index(value)]
#print("********Ending State- ite: {}, R: {}, arg_max: {}, opt_value {} ".format(iteration, model.state.potential_pop,optimal_enroll,value))
return {"value": value,
"optimal_enroll": optimal_enroll},value_dict
# stops iterating at horizon t' = t + H
else:
return {"value": 0,"optimal_enroll": 0},value_dict
# stops experiment at node if drug is declared success or failure
else: return {"value": model.initial_state['success_rev'] * success_index,
"optimal_enroll": 0},value_dict
def model_B_value_fn(model, iteration, success_index,value_dict):
"""
solves the stochastic lookahead problem for Model B (over given horizon);
returns the value of the current state and its optimal number of new potential patients to enroll
:param model: ClinicalTrialsModel - model which contains all state variables (physical and belief states)
:param iteration: int - tracks the horizon in the stochastic lookahead problem
:param success_index: int - 1 if drug is declared successful, 0 if failure, -1 if continue trial
:return: value of current node and its optimal enrollment count
"""
# computes value and optimal enrollments corresponding to current state
if success_index == -1:
if iteration < model.initial_state['H']:
bellman_vals = []
bellman_decisions = []
for x_enroll in range(model.initial_state['enroll_min'], model.initial_state['enroll_max']+model.initial_state['enroll_step'], model.initial_state['enroll_step']):
# "simulated" exogenous info that helps us get from (t, t') to (t, t'+1)
bellman_potential_pop = model.state.potential_pop + x_enroll
bellman_enrollments = math.floor(model.state.l_response * bellman_potential_pop)
bellman_cost = -(model.initial_state['program_cost'] + model.initial_state['patient_cost'] * x_enroll)
# loops over success values in increments of step_succ
step_succ = int(bellman_enrollments / 3) + 1
for set_succ in range(0, bellman_enrollments, step_succ):
bellman_state = copy.deepcopy(model.initial_state)
bellman_state['potential_pop'] = bellman_potential_pop
bellman_state['success'] = model.state.success + set_succ
bellman_state['failure'] = model.state.failure + (bellman_enrollments - set_succ)
bellman_M = ClinicalTrialsModel(model.state_variables, model.decision_variables, bellman_state, True)
value_key=(iteration+1,bellman_state['potential_pop'],bellman_state['success'],bellman_state['failure'])
count=-1
# implements sampled distribution for bellman_p_true
bellman_p_samples = np.random.beta(bellman_M.state.success, bellman_M.state.failure, bellman_M.initial_state['K'])
bellman_p_belief = bellman_M.state.success / (bellman_M.state.success + bellman_M.state.failure)
if bellman_p_belief > bellman_M.initial_state['theta_stop_high']:
success_index = 1
step_value = model.initial_state['success_rev']
#print("LA State: {}, ({}, {}), {} - Stopping time {}".format(bellman_state['potential_pop'],bellman_state['success'],bellman_state['failure'],model.state.l_response,iteration))
elif bellman_p_belief < bellman_M.initial_state['theta_stop_low']:
success_index = 0
step_value = 0
else:
if value_key in value_dict:
step_value = value_dict[value_key][0]
count = value_dict[value_key][1]
#print("key: {} value: {:.2f} count: {} lendict:{}".format(value_key,step_value,count,len(value_dict)))
else:
sol_dict,value_dict = model_B_value_fn(bellman_M, iteration+1, success_index,value_dict)
step_value = sol_dict['value']
value_dict.update({value_key:[step_value,count+1]})
for k in range(0, bellman_M.initial_state['K']):
bellman_cost += binom.pmf(set_succ, bellman_enrollments, bellman_p_samples[k]) * 1/bellman_M.initial_state['K'] * step_value
bellman_decisions.append(x_enroll)
bellman_vals.append(bellman_cost)
value = max(bellman_vals)
optimal_enroll = bellman_decisions[bellman_vals.index(value)]
return {"value": value, "optimal_enroll": optimal_enroll},value_dict
# stops iterating at horizon t' = t + H
else: return {"value": 0,"optimal_enroll": 0},value_dict
# stops experiment at node if drug is declared success or failure
else: return {"value": model.initial_state['success_rev'] * success_index,"optimal_enroll": 0},value_dict
def model_C_extension_value_fn(model, iteration, success_index,value_dict):
"""
solves the stochastic lookahead version for Model C (over given horizon);
returns the value of the current state and its optimal number of new potential patients to enroll
:param model: ClinicalTrialsModel - model which contains all state variables (physical and belief states)
:param iteration: int - tracks the horizon in the stochastic lookahead problem
:param success_index: int - 1 if drug is declared successful, 0 if failure, -1 if continue trial
:return: value of current node and its optimal enrollment count
"""
if success_index == -1:
if iteration < model.initial_state['H']:
bellman_vals = []
bellman_decisions = []
for x_enroll in range(model.initial_state['enroll_min'], model.initial_state['enroll_max']+model.initial_state['enroll_step'], model.initial_state['enroll_step']):
bellman_potential_pop = model.state.potential_pop + x_enroll
bellman_cost = -(model.initial_state['program_cost'] + model.initial_state['patient_cost'] * x_enroll)
bellman_r_bar = math.floor(model.state.l_response * bellman_potential_pop)
trunc_probs = trunc_poisson_fn(x_enroll, bellman_r_bar)
# loops over enrollment and success counts in increments of step_pop and step_succ respectively
step_pop = int(x_enroll / 10)
for set_pop in range(0, x_enroll, step_pop):
step_succ = int(set_pop / 3) + 1
for set_succ in range(0, set_pop, step_succ):
bellman_state = copy.deepcopy(model.initial_state)
bellman_state['potential_pop'] = bellman_potential_pop
bellman_state['success'] = model.state.success + set_succ
bellman_state['failure'] = model.state.failure + (set_pop - set_succ)
bellman_state['l_response'] = (1-model.initial_state['alpha']) * model.state.l_response + model.initial_state['alpha'] * set_pop/bellman_potential_pop
bellman_M = ClinicalTrialsModel(model.state_variables, model.decision_variables, bellman_state, True)
value_key=(iteration+1,bellman_state['potential_pop'],bellman_state['success'],bellman_state['failure'],bellman_state['l_response'])
count=-1
# implements sampled distribution for bellman_p_true
bellman_p_samples = np.random.beta(bellman_M.state.success, bellman_M.state.failure, bellman_M.initial_state['K'])
bellman_p_belief = bellman_M.state.success / (bellman_M.state.success + bellman_M.state.failure)
if bellman_p_belief > bellman_M.initial_state['theta_stop_high']:
success_index = 1
step_value = model.initial_state['success_rev']
elif bellman_p_belief < bellman_M.initial_state['theta_stop_low']:
success_index = 0
step_value = 0
#else: step_value = model_C_extension_value_fn(bellman_M, iteration+1, success_index)['value']
else:
if value_key in value_dict:
step_value = value_dict[value_key][0]
count = value_dict[value_key][1]
#print("key: {} value: {:.2f} count: {} lendict:{}".format(value_key,step_value,count,len(value_dict)))
else:
sol_dict,value_dict = model_C_extension_value_fn(bellman_M, iteration+1, success_index,value_dict)
step_value = sol_dict['value']
value_dict.update({value_key:[step_value,count+1]})
for k in range(0, bellman_M.initial_state['K']):
bellman_cost += binom.pmf(set_succ, set_pop, bellman_p_samples[k]) * trunc_probs[set_pop] * 1/bellman_M.initial_state['K'] * step_value
bellman_decisions.append(x_enroll)
bellman_vals.append(bellman_cost)
value = max(bellman_vals)
optimal_enroll = bellman_decisions[bellman_vals.index(value)]
return {"value": value,
"optimal_enroll": optimal_enroll},value_dict
# stops iterating at horizon t' = t + H
else: return {"value": 0,
"optimal_enroll": 0},value_dict
# stops experiment at node if drug is declared success or failure
else: return {"value": model.initial_state['success_rev'] * success_index,
"optimal_enroll": 0},value_dict
#
def func_simple(pseudo_state, a, b, c, d):
"""
linear fit function for the Bellman value at given pseudo-state (for small number of data points)
:param pseudo_state: list(float) - list of the four state variables for a given state
:param a, b, c, d, e: float - parameters of the linear fit function
"""
sum = a*pseudo_state[0] + b*pseudo_state[1] + c*pseudo_state[2] + d*pseudo_state[3]
return sum
def func(pseudo_state, a1, a2, b1, b2, c1, c2, d1, d2):
"""
quadratic fit function for the Bellman value at given pseudo-state
:param pseudo_state: list(float) - list of the four state variables for a given state
:param a1, a2, ... d2: float - parameters of the quadratic fit function
"""
sum = a1*pseudo_state[0]**2 + a2*pseudo_state[0]
sum += b1*pseudo_state[1]**2 + b2*pseudo_state[1]
sum += c1*pseudo_state[2]**2 + c2*pseudo_state[2]
sum += d1*pseudo_state[3]**2 + d2*pseudo_state[3]
return sum
def parameters_fn(model):
"""
simulates enrollment paths; then fits the values at each state using a linear or a quadratic fit function
returns parameters for the linear/quadratic fit functions at each t
:param model: ClinicalTrialsModel - model that we simulate on (contains all state variables)
:return parameters: list(list(float)) - parameters for the fit functions at each t
"""
samples = [[] for n in range(model.initial_state['sampling_size'])]
values = [[] for n in range(model.initial_state['sampling_size'])]
for n in range(model.initial_state['sampling_size']):
sample_t = 0
stop = False
sample_M = ClinicalTrialsModel(model.state_variables, model.decision_variables, model.initial_state, True)
while sample_t <= model.initial_state['trial_size'] and stop == False:
p_belief = sample_M.state.success / (sample_M.state.success + sample_M.state.failure)
p_true_samples = np.random.beta(sample_M.state.success, sample_M.state.failure, sample_M.initial_state['K'])
# drug_success = 1 if successful, 0 if failure, -1 if continue trial
if p_belief > model.initial_state['theta_stop_high']:
decision = {'prog_continue': 0, 'drug_success': 1}
stop = True
elif p_belief < model.initial_state['theta_stop_low']:
decision = {'prog_continue': 0, 'drug_success': 0}
stop = True
else:
decision = {'prog_continue': 1, 'drug_success': -1}
decision['enroll'] = np.random.choice(range(model.initial_state['enroll_min'], model.initial_state['enroll_max']+model.initial_state['enroll_step'], model.initial_state['enroll_step'])) if stop == False else 0
x = sample_M.build_decision(decision)
pseudo_state = [sample_M.state.potential_pop + decision['enroll'], sample_M.state.success, sample_M.state.failure, sample_M.state.l_response]
sample_M.step(x)
sample_t += 1
samples[n].append(pseudo_state)
values[n].append(sample_M.objective)
parameters = []
for t_fct in range(model.initial_state['trial_size']+1):
samples_list = []
values_list = []
for n in range(model.initial_state['sampling_size']):
if (len(samples[n]) >= (t_fct + 1)):
samples_list.append(samples[n][t_fct])
values_list.append(values[n][t_fct])
samples_array = np.array(samples_list)
values_array = np.array(values_list)
if t_fct == 0:
all_matrix = np.c_[samples_array, values_array]
new_array = [tuple(row) for row in all_matrix]
uniques = np.unique(new_array, axis=0)
samples_array = uniques[:, 0:4]
values_array = uniques[:, 4]
parameters.append(np.matrix.tolist(scipy.optimize.curve_fit(func_simple, np.matrix.transpose(samples_array), values_array)[0]))
else:
if len(values_array) >= 8:
parameters.append(np.matrix.tolist(scipy.optimize.curve_fit(func, np.matrix.transpose(samples_array), values_array)[0]))
else:
parameters.append(np.matrix.tolist(scipy.optimize.curve_fit(func_simple, np.matrix.transpose(samples_array), values_array)[0]))
return parameters
if __name__ == "__main__":
# this is an example of creating a model, using a chosen policy, and running until the drug is declared a success/failure or
# we reach the maximum number of trials
policy_names = ['model_A', 'model_B', 'model_C', 'model_C_extension']
state_variables = ['potential_pop', 'success', 'failure', 'l_response']
# extracts data from given data set; defines initial state
file = 'Trials Parameters.xlsx'
raw_data = pd.ExcelFile(file)
data = raw_data.parse('Exogenous Data')
initial_state = {'potential_pop': float(data.iat[0, 0]),
'success': data.iat[1, 0],
'failure': float(data.iat[2, 0]),
'l_response': float(data.iat[3, 0]),
'theta_stop_low': data.iat[4, 0],
'theta_stop_high': data.iat[5, 0],
'alpha': data.iat[6, 0],
'K': int(data.iat[7, 0]),
'N': int(data.iat[8, 0]),
'trial_size': int(data.iat[9, 0]),
'patient_cost': data.iat[10, 0],
'program_cost': data.iat[11, 0],
'success_rev': data.iat[12, 0],
'sampling_size': int(data.iat[13, 0]),
'enroll_min': int(data.iat[14, 0]),
'enroll_max': int(data.iat[15, 0]),
'enroll_step': int(data.iat[16, 0]),
'H': int(data.iat[17, 0]),
'true_l_response': data.iat[18, 0],
'true_succ_rate': data.iat[19, 0]}
decision_variables = ['enroll', 'prog_continue', 'drug_success']
M = ClinicalTrialsModel(state_variables, decision_variables, initial_state, False)
P = ClinicalTrialsPolicy(M, policy_names)
t = 0
stop = False
policy_info = {'model_A': [-1, stop],
'model_B': [-1, stop],
'model_C': [-1, stop],
'model_C_extension': [-1, stop]}
# an example of running the Model B policy
P.run_policy(policy_info, "model_B", t)
pass |
import socket
UDP_IP = "0.0.0.0"
UDP_PORT = 9000
MESSAGE = "Hello, World!"
# print "UDP target IP:", UDP_IP
# print "UDP target port:", UDP_PORT
# print "message:", MESSAGE
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # UDP
while True:
sock.sendto(bytes(MESSAGE, "utf=8"), (UDP_IP, UDP_PORT))
|
"""v0.1
Revision ID: 787738fb2362
Revises: f6778600730b
Create Date: 2019-06-22 17:57:41.370758
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '787738fb2362'
down_revision = 'f6778600730b'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_unique_constraint(None, 'files', ['id'])
op.create_unique_constraint(None, 'incedents', ['id'])
op.create_unique_constraint(None, 'incedents_points', ['id'])
op.create_unique_constraint(None, 'users', ['id'])
op.drop_column('users', 'name')
op.drop_column('users', 'comfirmed')
op.drop_column('users', 'comfirm_code')
op.drop_column('users', 'email')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('users', sa.Column('email', sa.VARCHAR(), autoincrement=False, nullable=True))
op.add_column('users', sa.Column('comfirm_code', sa.BOOLEAN(), autoincrement=False, nullable=True))
op.add_column('users', sa.Column('comfirmed', sa.BOOLEAN(), autoincrement=False, nullable=True))
op.add_column('users', sa.Column('name', sa.VARCHAR(), autoincrement=False, nullable=True))
op.drop_constraint(None, 'users', type_='unique')
op.drop_constraint(None, 'incedents_points', type_='unique')
op.drop_constraint(None, 'incedents', type_='unique')
op.drop_constraint(None, 'files', type_='unique')
# ### end Alembic commands ###
|
class Solution:
def __init__(self):
self.result = []
def dfs(self, nums, index, path):
if path not in self.result:
self.result.append(path)
for i in range(index, len(nums)):
self.dfs(nums, i + 1, path + [nums[i]])
def solution(self, nums):
self.dfs(nums, 0, [])
return self.result
class BookSolution:
def __init__(self):
self.result = []
def dfs(self, index, path):
self.result.append(path)
for i in range(index, len(nums)):
self.dfs(i + 1, path + [nums[i]])
def solution(self, nums):
self.dfs(0, [])
return self.result
if __name__ == "__main__":
print(Solution().solution([1, 2, 3]))
|
# -*- coding: utf-8 -*-
"""
Created on Fri Oct 19 00:31:03 2018
@author: hongx
"""
import numpy as np
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
music = pd.read_csv("../data/lyrics_sentiment_no_lyrics.csv")
#music.head()
d11 = music["date"]
music["right_date"] = pd.to_datetime(d11)
music["year"] = music["right_date"].map(lambda x: x.year)
dfnew1 = pd.DataFrame()
dfnew1["date"] = music["year"]
dfnew1["count"] = music['total weeks']
sns.set(style="whitegrid")
year = dfnew1["date"]
week = dfnew1["count"]
g = sns.boxenplot(x = year, y = week,
color="b",
scale="linear", data=dfnew1)
g.set(xlabel = "Years", ylabel = "Weeks")
g.set(title = "Comparison of the Weeks that Songs stayed on Board in Different Years")
plt.show()
|
__author__ = 'nahla.errakik'
import pandas as pd
def get_ind_file(filetype):
"""
Load and format the Ken French 30 Industry Portfolios files
"""
known_types = ["returns", "nfirms", "size"]
if filetype not in known_types:
raise ValueError(f"filetype must be one of:{','.join(known_types)}")
if filetype is "returns":
name = "vw_rets"
divisor = 100
elif filetype is "nfirms":
name = "nfirms"
divisor = 1
elif filetype is "size":
name = "size"
divisor = 1
else:
name = None
divisor = 1
ind = pd.read_csv(f"data/ind30_m_{name}.csv", header=0, index_col=0) / divisor
ind.index = pd.to_datetime(ind.index, format="%Y%m").to_period('M')
ind.columns = ind.columns.str.strip()
return ind
def get_ind_returns():
"""
Load and format the Ken French 30 Industry Portfolios Value Weighted Monthly Returns
"""
return get_ind_file("returns")
def get_ind_nfirms():
"""
Load and format the Ken French 30 Industry Portfolios Average number of Firms
"""
return get_ind_file("nfirms")
def get_ind_size():
"""
Load and format the Ken French 30 Industry Portfolios Average size (market cap)
"""
return get_ind_file("size")
def get_total_market_index_returns():
"""
Load the 30 industry portfolio data and derive the returns of a capweighted total market index
"""
ind_nfirms = get_ind_nfirms()
ind_size = get_ind_size()
ind_return = get_ind_returns()
ind_mktcap = ind_nfirms * ind_size
total_mktcap = ind_mktcap.sum(axis=1)
ind_capweight = ind_mktcap.divide(total_mktcap, axis="rows")
total_market_return = (ind_capweight * ind_return).sum(axis="columns")
return total_market_return
|
"""
This is the test script
"""
# flake8: noqa W191
import sys
import pandas as pd
sys.path.append("ibmcloudsql")
import ibmcloudsql # noqa
import test_credentials # noqa
try:
from exceptions import RateLimitedException
except Exception:
from .exceptions import RateLimitedException
pd.set_option("display.max_colwidth", None)
pd.set_option("display.max_columns", 20)
if test_credentials.result_location[-1] != "/":
test_credentials.result_location += "/"
sqlClient = ibmcloudsql.SQLQuery(
test_credentials.apikey,
test_credentials.instance_crn,
client_info="ibmcloudsql test",
) # maintain backward compatible
sqlClient = ibmcloudsql.SQLQuery(
test_credentials.apikey,
test_credentials.instance_crn,
target_cos_url=test_credentials.result_location,
client_info="ibmcloudsql test",
)
sqlClient.logon()
sqlClient.sql_ui_link()
print("Running test with individual method invocation and Parquet target:")
jobId = sqlClient.submit_sql(
"SELECT * FROM cos://us-geo/sql/employees.parquet STORED AS PARQUET LIMIT 10 INTO {} STORED AS PARQUET".format(
test_credentials.result_location
)
)
sqlClient.wait_for_job(jobId)
result_df = sqlClient.get_result(jobId)
print(
"jobId {} restults are stored in {}. Result set is:".format(
jobId, sqlClient.get_job(jobId)["resultset_location"]
)
)
print(result_df.head(200))
print("Running test with individual method invocation and ORC target:")
jobId = sqlClient.submit_sql(
"SELECT * FROM cos://us-geo/sql/employees.parquet STORED AS PARQUET LIMIT 10 INTO {} STORED AS ORC".format(
test_credentials.result_location
)
)
sqlClient.wait_for_job(jobId)
try:
result_df = sqlClient.get_result(jobId)
except ValueError as e:
print(e)
print("Running test with individual method invocation and CSV target:")
jobId = sqlClient.submit_sql(
"SELECT * FROM cos://us-geo/sql/employees.parquet STORED AS PARQUET LIMIT 10 INTO {} STORED AS CSV".format(
test_credentials.result_location
)
)
sqlClient.wait_for_job(jobId)
result_df = sqlClient.get_result(jobId)
print(
"jobId {} restults are stored in {}. Result set is:".format(
jobId, sqlClient.get_job(jobId)["resultset_location"]
)
)
print(result_df.head(200))
print("Running test with individual method invocation and JSON target:")
jobId = sqlClient.submit_sql(
"WITH orders as (SELECT customerid, named_struct('count', count(orderid), 'orderids', collect_list(orderid)) orders \
FROM cos://us-geo/sql/orders.parquet STORED AS PARQUET \
GROUP BY customerid) \
SELECT c.customerid, \
named_struct('name', companyname, 'contact', contactname, 'tile', contacttitle, 'phone', PHONE) company, \
named_struct('street', address, 'city', city, 'zip', postalcode, 'country', country) address, \
orders \
FROM cos://us-geo/sql/customers.parquet STORED AS PARQUET c, \
orders o \
WHERE o.customerid=c.customerid \
INTO {} STORED AS JSON".format(
test_credentials.result_location
)
)
sqlClient.wait_for_job(jobId)
result_df = sqlClient.get_result(jobId)
print(
"jobId {} restults are stored in {}. Result set is:".format(
jobId, sqlClient.get_job(jobId)["resultset_location"]
)
)
print(result_df.head(10))
print("Running test rate limiting without retry. Expecting RateLimitedException:")
sql = "WITH prefiltered_hospitals AS ( \
SELECT c.name county, c.shape_WKT county_location, h.name hospital, MAX(h.location) hospital_location \
FROM cos://us-geo/sql/counties.parquet STORED AS PARQUET c, \
cos://us-geo/sql/hospitals.parquet STORED AS PARQUET h \
WHERE c.state_name='Washington' AND h.x between c.xmin and c.xmax AND h.y between c.ymin and c.ymax \
GROUP BY c.name, c.shape_WKT, h.name) \
SELECT county, hospital FROM prefiltered_hospitals \
WHERE ST_Intersects(ST_WKTToSQL(hospital_location), ST_WKTToSQL(county_location)) \
INTO {} STORED AS CSV".format(
test_credentials.result_location
)
jobidArray = []
try:
for x in range(test_credentials.instance_rate_limit + 1):
jobidArray.append(sqlClient.submit_sql(sql))
except RateLimitedException as e:
print(e)
for jobId in jobidArray:
sqlClient.wait_for_job(jobId)
print("Running test rate limiting with retry:")
sqlClientRetry = ibmcloudsql.SQLQuery(
test_credentials.apikey,
test_credentials.instance_crn,
target_cos_url=test_credentials.result_location,
client_info="ibmcloudsql test",
max_tries=5,
)
sqlClientRetry.logon()
jobidArray = []
for x in range(test_credentials.instance_rate_limit + 1):
jobidArray.append(sqlClientRetry.submit_sql(sql))
for jobId in jobidArray:
sqlClientRetry.wait_for_job(jobId)
print("Running test with partitioned CSV target:")
jobId = sqlClient.submit_sql(
"SELECT * FROM cos://us-geo/sql/employees.parquet STORED AS PARQUET INTO {} STORED AS CSV PARTITIONED BY (city)".format(
test_credentials.result_location
)
)
sqlClient.wait_for_job(jobId)
result_objects_df = sqlClient.list_results(jobId)
print(result_objects_df.head(200))
result_df = sqlClient.get_result(jobId)
print(
"jobId {} results are stored in {}. Result set is:".format(
jobId, sqlClient.get_job(jobId)["resultset_location"]
)
)
print(result_df.head(200))
print("Expecting failure when trying to rename partitioned results to exact target:")
try:
sqlClient.rename_exact_result(jobId)
except ValueError as e:
print(e)
print("Running test with exact target object name creation:")
cos_url = "{}myresult.parquet".format(test_credentials.result_location)
sqlClient.delete_objects(cos_url)
jobId = sqlClient.submit_sql(
"SELECT * FROM cos://us-geo/sql/employees.parquet STORED AS PARQUET LIMIT 10 INTO {} JOBPREFIX NONE STORED AS PARQUET".format(
cos_url
)
)
sqlClient.wait_for_job(jobId)
result_df = sqlClient.get_result(jobId)
sqlClient.rename_exact_result(jobId)
result_objects_df = sqlClient.list_results(jobId)
print(result_objects_df.head(200))
print("Expecting failure when trying to rename already renamed exact target:")
try:
sqlClient.rename_exact_result(jobId)
except ValueError as e:
print(e)
print("Running test with delete_result:")
res = sqlClient.delete_result(jobId)
print(res)
print("Running test with paginated parquet target:")
jobId = sqlClient.submit_sql(
"SELECT * FROM cos://us-geo/sql/employees.parquet STORED AS PARQUET LIMIT 10 INTO {} STORED AS PARQUET".format(
test_credentials.result_location
),
2,
)
sqlClient.wait_for_job(jobId)
result_df = sqlClient.get_result(jobId, 1)
print("jobId {} result page 1 is:".format(jobId))
print(result_df.head(200))
result_df = sqlClient.get_result(jobId, 5)
print("jobId {} result page 5 is:".format(jobId))
print(result_df.head(200))
print("Trying to retrieve non existing page number:")
try:
result_df = sqlClient.get_result(jobId, 6)
except ValueError as e:
print(e)
print("Trying to retrieve invalid page number:")
try:
result_df = sqlClient.get_result(jobId, 0)
except ValueError as e:
print(e)
print("Trying to use wrong data type for page number:")
try:
result_df = sqlClient.get_result(jobId, "abc")
except ValueError as e:
print(e)
print("Trying to run SQL with invalid pagesize:")
try:
jobId = sqlClient.submit_sql(
"SELECT * FROM cos://us-geo/sql/employees.parquet STORED AS PARQUET LIMIT 10 INTO {} STORED AS PARQUET".format(
test_credentials.result_location
),
0,
)
except ValueError as e:
print(e)
print("Trying to run SQL with PARTITIONED clause plus pagesize:")
try:
jobId = sqlClient.submit_sql(
"SELECT * FROM cos://us-geo/sql/employees.parquet STORED AS PARQUET LIMIT 10 INTO {} STORED AS PARQUET PARTITIONED BY (city)".format(
test_credentials.result_location
),
2,
)
except SyntaxError as e:
print(e)
print("Running test with paginated JSON target:")
jobId = sqlClient.submit_sql(
"WITH orders as (SELECT customerid, named_struct('count', count(orderid), 'orderids', collect_list(orderid)) orders \
FROM cos://us-geo/sql/orders.parquet STORED AS PARQUET \
GROUP BY customerid) \
SELECT c.customerid, \
named_struct('name', companyname, 'contact', contactname, 'tile', contacttitle, 'phone', PHONE) company, \
named_struct('street', address, 'city', city, 'zip', postalcode, 'country', country) address, \
orders \
FROM cos://us-geo/sql/customers.parquet STORED AS PARQUET c, \
orders o \
WHERE o.customerid=c.customerid \
INTO {} STORED AS JSON".format(
test_credentials.result_location
),
pagesize=10,
)
sqlClient.wait_for_job(jobId)
result_df_list = sqlClient.list_results(jobId)
print("jobId {} result pages:".format(jobId))
print(result_df_list.head(200))
result_df = sqlClient.get_result(jobId, pagenumber=2)
print("jobId {} result page 2 is:".format(jobId))
print(result_df.head(10))
print("Running test with compound method invocation:")
result_df = sqlClient.run_sql(
"WITH orders_shipped AS \
(SELECT OrderID, EmployeeID, (CASE WHEN shippedDate < requiredDate \
THEN 'On Time' \
ELSE 'Late' \
END) AS Shipped \
FROM cos://us-geo/sql/orders.parquet STORED AS PARQUET) \
SELECT e.FirstName, e.LastName, COUNT(o.OrderID) As NumOrders, Shipped \
FROM orders_shipped o, \
cos://us-geo/sql/employees.parquet STORED AS PARQUET e \
WHERE e.EmployeeID = o.EmployeeID \
GROUP BY e.FirstName, e.LastName, Shipped \
ORDER BY e.LastName, e.FirstName, NumOrders DESC \
INTO {} STORED AS CSV".format(
test_credentials.result_location
)
)
print("Result set is:")
print(result_df.head(200))
print("Running test with SQL grammar error:")
try:
print(
sqlClient.run_sql(
"SELECT xyzFROM cos://us-geo/sql/employees.parquet STORED AS PARQUET LIMIT 10 INTO {} STORED AS CSV".format(
test_credentials.result_location
)
)
)
except Exception as e:
print(e)
print("Running test with SQL runtime error:")
try:
print(
sqlClient.run_sql(
"SELECT xyz FROM cos://us-geo/sql/employees.parquet STORED AS PARQUET LIMIT 10 INTO {} STORED AS CSV".format(
test_credentials.result_location
)
)
)
except Exception as e:
print(e)
print("SQL UI Link:")
sqlClient.sql_ui_link()
print("Job list:")
pd.set_option("display.max_colwidth", 10)
print(sqlClient.get_jobs().head(200))
pd.set_option("display.max_colwidth", None)
print("COS Summary:")
print(sqlClient.get_cos_summary(test_credentials.result_location))
print("COS Object Listing:")
objects_df = sqlClient.list_cos_objects(test_credentials.result_location)
print(objects_df.head(100))
print("Test with target URL as separate parameter")
sqlClient = ibmcloudsql.SQLQuery(
test_credentials.apikey,
test_credentials.instance_crn,
target_cos_url=test_credentials.result_location,
client_info="ibmcloudsql test",
)
sqlClient.logon()
jobId = sqlClient.submit_sql(
"SELECT * FROM cos://us-geo/sql/employees.parquet STORED AS PARQUET LIMIT 10"
)
sqlClient.wait_for_job(jobId)
result_objects_df = sqlClient.list_results(jobId)
print(result_objects_df.head(200))
result_df = sqlClient.get_result(jobId)
print(result_df.head(200))
print("Test job history export to COS")
jobhist_location = test_credentials.result_location + "my_job_history/"
sqlClient.export_job_history(
jobhist_location,
export_file_prefix="job_export=",
export_file_suffix="/data.parquet",
)
sqlClient = ibmcloudsql.SQLQuery(
test_credentials.apikey,
test_credentials.instance_crn,
target_cos_url=test_credentials.result_location,
client_info="ibmcloudsql test",
)
sqlClient.logon()
print("Running query on exported history")
jobhist_df = sqlClient.run_sql(
"SELECT * FROM {} STORED AS PARQUET LIMIT 10 INTO {} STORED AS CSV".format(
jobhist_location, test_credentials.result_location
)
)
print(jobhist_df[["job_id", "status"]])
print("Exporting tags")
sqlClient.export_tags_for_cos_objects(
"cos://us-geo/cdl/iotmessages/", "cos://us-geo/results/foo/tags.parquet"
)
# print("Running EU test with individual method invocation and Parquet target:")
# try:
# sqlClient_eu = ibmcloudsql.SQLQuery(test_credentials.apikey, test_credentials.eu_instance_crn, target_cos_url=test_credentials.result_location, client_info='ibmcloudsql test')
# sqlClient_eu.logon()
# jobId = sqlClient_eu.submit_sql("SELECT * FROM cos://us-geo/sql/employees.parquet STORED AS PARQUET LIMIT 10 INTO {} STORED AS PARQUET".format(test_credentials.eu_result_location))
# sqlClient_eu.wait_for_job(jobId)
# result_df = sqlClient_eu.get_result(jobId)
# print("jobId {} restults are stored in {}. Result set is:".format(jobId, sqlClient_eu.get_job(jobId)['resultset_location']))
# print(result_df.head(200))
# print("EU SQL UI Link:")
# sqlClient_eu.sql_ui_link()
# except AttributeError as _:
# print(".. no configuration available")
# pass
print("Force rate limiting:")
try:
for n in range(6):
sqlClient.submit_sql(
"SELECT * FROM cos://us-geo/sql/employees.parquet STORED AS PARQUET LIMIT 10 INTO {} STORED AS PARQUET".format(
test_credentials.result_location
)
)
except RateLimitedException:
print("Got rate limited as expected")
# add this as the previous test launches so many asynchronous runs
sqlClient = ibmcloudsql.SQLQuery(
test_credentials.apikey,
test_credentials.instance_crn,
target_cos_url=test_credentials.result_location,
client_info="ibmcloudsql test",
max_tries=100,
)
sqlClient.logon()
print("Running test with execute_sql() no data returned:")
result = sqlClient.execute_sql(
"WITH orders_shipped AS \
(SELECT OrderID, EmployeeID, (CASE WHEN shippedDate < requiredDate \
THEN 'On Time' \
ELSE 'Late' \
END) AS Shipped \
FROM cos://us-geo/sql/orders.parquet STORED AS PARQUET) \
SELECT e.FirstName, e.LastName, COUNT(o.OrderID) As NumOrders, Shipped \
FROM orders_shipped o, \
cos://us-geo/sql/employees.parquet STORED AS PARQUET e \
WHERE e.EmployeeID = o.EmployeeID \
GROUP BY e.FirstName, e.LastName, Shipped \
ORDER BY e.LastName, e.FirstName, NumOrders DESC \
INTO {} STORED AS CSV".format(
test_credentials.result_location
)
)
print("Result is:")
print(result)
# ========= CATALOG TABLES
print("Show all catalog tables")
print(sqlClient.target_url)
print(sqlClient.show_tables())
table_name = "test_table_partition"
print("Create a partitioned catalog table")
print(sqlClient.create_partitioned_table(table_name,cos_url="cos://us-geo/sql/customers_partitioned_csv",partition_list="country" ))
print(sqlClient.show_tables())
print("Describe a catalog table")
print(sqlClient.describe_table(table_name))
print("Add partition")
print(sqlClient.add_partition(table_name,"country='Lummerland'" ))
print("Drop a catalog table")
print(sqlClient.drop_table(table_name))
print(sqlClient.show_tables())
table_name = "test_table"
print("Create a catalog table")
print(sqlClient.create_table(table_name, blocking=True, force_recreate=True))
print(sqlClient.show_tables())
print("Describe a catalog table")
print(sqlClient.describe_table(table_name))
print("Drop a catalog table")
print(sqlClient.drop_table(table_name))
print(sqlClient.show_tables())
# ============= SQLBuilder
print("==================================")
print("Check query string generation")
sqlmagic = ibmcloudsql.SQLBuilder()
targeturl = "cos://us-geo/thinkstdemo-donotdelete-pr-iwmvg18vv9ki4d/"
(
sqlClient.with_(
"humidity_location_table",
(
sqlmagic.select_("location")
.from_view_(
"select count(*) as count, location from dht where humidity > 70.0 group by location"
)
.where_("count > 1000 and count < 2000")
).reset_(),
)
.with_(
"pm_location_table",
(
sqlmagic.select_("location")
.from_view_("select count(*) as count, location from sds group by location")
.where_("count > 1000 and count < 2000")
).reset_(),
)
.select_("humidity_location_table.location")
.from_table_("humidity_location_table")
.join_table_(
"pm_location_table",
typ="inner",
condition="humidity_location_table.location=pm_location_table.location",
)
.store_at_(targeturl)
)
expected_sql = """WITH humidity_location_table AS
(SELECT LOCATION
FROM
(SELECT count(*) AS COUNT,
LOCATION
FROM dht
WHERE humidity > 70.0
GROUP BY LOCATION)
WHERE COUNT > 1000
AND COUNT < 2000 ),
pm_location_table AS
(SELECT LOCATION
FROM
(SELECT count(*) AS COUNT,
LOCATION
FROM sds
GROUP BY LOCATION)
WHERE COUNT > 1000
AND COUNT < 2000 )
SELECT humidity_location_table.location
FROM humidity_location_table
INNER JOIN pm_location_table ON humidity_location_table.location=pm_location_table.location INTO cos://us-geo/thinkstdemo-donotdelete-pr-iwmvg18vv9ki4d/ stored AS csv"""
assert sqlClient.get_sql() == expected_sql
print("==================================")
print("Check generating JOIN statement")
try:
(
sqlClient.join_table_(
"pm_location_table",
typ="inner a",
condition="humidity_location_table.location=pm_location_table.location",
)
)
except ValueError:
print("Got ValueError as expected")
except Exception:
assert 0
print("==================================")
print("Check get_schema_data")
try:
df = sqlClient.get_schema_data(
"cos://us-geo/sql/oklabdata/parquet/sds011/2017/09/", type="csv"
)
except ValueError:
print("Got ValueError as expected")
except Exception:
assert 0
|
from rest_framework import serializers
from laptops.models import Laptop, CPU
class LaptopSerializer(serializers.ModelSerializer):
cpu = serializers.StringRelatedField()
manufacturer = serializers.StringRelatedField()
gpu = serializers.StringRelatedField(many=True)
class Meta:
model = Laptop
fields = (
'id',
'manufacturer',
'model_name',
'year',
'cpu',
'weight',
'ram',
'battery',
'gpu',
'benchmark'
)
|
# -*- coding: utf-8 -*-
"""
Created on Wed Mar 20 11:44:47 2019
@author: a-kojima
"""
import numpy as np
import matplotlib.pyplot as pl
import soundfile as sf
from scipy import signal as sg
from scipy.fftpack import fft
import copy
class PlotBeamPattern:
def __init__(self, gammatone_path, sampling_frequency=16000):
self.gammatone_path = gammatone_path
self.sampling_frequency = sampling_frequency
def plot_beampattern(self, speech_path, selected_filter_indexes, fftl=2048, frame_length=128, frame_shift=32):
gammatone_weight = np.load(self.gammatone_path)
if np.ndim(gammatone_weight) == 4:
gammatone_weight = np.transpose(gammatone_weight[:, :, :, 0], (2, 1, 0))
filter_order, number_of_channels, number_of_filter = np.shape(gammatone_weight)
PHASESHIFTSIZE = 20
LOOKDIRECTION = 36
pl.figure(),
index_plot = 1
for selected_filter_index in selected_filter_indexes:
filter_response = gammatone_weight[:, :, selected_filter_index]
all_data, _ = sf.read(speech_path, dtype='float32')
time_axis = (np.linspace(0, len(all_data) / self.sampling_frequency, filter_order)) * 1000
number_of_frames = np.int((len(all_data) - frame_length) / frame_shift)
st = 0
ed = frame_length
beam_pattern = np.zeros((LOOKDIRECTION, np.int(fftl / 2) + 1))
for ii in range(0, number_of_frames):
data = all_data[st:ed, :]
ch1_filter = sg.lfilter(filter_response[:, 0], 1, data[:, 0])
ch2_filter = sg.lfilter(filter_response[:, 1], 1, data[:, 1])
for j in range(0, LOOKDIRECTION):
roll_range = PHASESHIFTSIZE * np.int(j - np.int(LOOKDIRECTION / 2))
ch2_filter_shift = copy.deepcopy(np.roll(ch2_filter, roll_range ))
if roll_range >= 0:
ch2_filter_shift[0:roll_range ] = 0
else:
ch2_filter_shift[roll_range:- 1] = 0
filter_sum = ch1_filter + ch2_filter_shift
beam_pattern[j, :] = beam_pattern[j, :] + np.abs(fft(filter_sum, n=fftl)[0:np.int(fftl / 2) + 1]) ** 2
st = st + frame_shift
ed = ed + frame_shift
vmin = np.min(beam_pattern)
vmax = np.max(beam_pattern)
pl.subplot(len(selected_filter_indexes), 2, index_plot)
pl.plot(time_axis, filter_response)
if index_plot==1:
pl.title('Filter Coeficient')
if index_plot == len(selected_filter_indexes) -1:
pl.xlabel('Time[ms]')
pl.subplot(len(selected_filter_indexes), 2, index_plot + 1)
pl.imshow(beam_pattern, aspect='auto', origin='lower', cmap='hot', extent = [0, 8, 0, 180], vmin=vmin, vmax=vmax, interpolation='nearest')
if index_plot + 1 == 2:
pl.title('Beampattern')
if index_plot == len(selected_filter_indexes):
pl.xlabel('Frequency[kHz]')
pl.ylabel('DOA [deg]')
pl.ylim([0, 180])
pl.xlim([0, 8])
index_plot = index_plot + 2
beam_pattern = beam_pattern * 0
def plot_brainogram(self, speech_path, frame_length=1024, frame_shift=256):
gammatone_weight = np.load(self.gammatone_path)
if np.ndim(gammatone_weight) == 4:
gammatone_weight = np.transpose(gammatone_weight[:, :, :, 0], (2, 1, 0))
index = self._get_sort_index(gammatone_weight)
gammatone_weight = gammatone_weight[:, :, index]
filter_order, number_of_channels, number_of_filter = np.shape(gammatone_weight)
all_data, _ = sf.read(SAMPLE_SPEECH, dtype='float32')
number_of_frames = np.int((len(all_data) - frame_length) / frame_shift)
brainogram = np.zeros((number_of_filter, number_of_frames, number_of_channels))
pl.figure()
for i in range(0, number_of_channels):
for j in range(0, number_of_filter):
filter_response = gammatone_weight[:, :, j]
all_data, _ = sf.read(SAMPLE_SPEECH, dtype='float32')
number_of_frames = np.int((len(all_data) - frame_length) / frame_shift)
st = 0
ed = frame_length
for ii in range(0, number_of_frames):
data = all_data[st:ed, :]
filter_data = sg.lfilter(filter_response[:, i], 1, data[:, i])
brainogram[j, ii, i] = np.sum(filter_data ** 2)
st = st + frame_shift
ed = ed + frame_shift
pl.subplot(number_of_channels, 1, i + 1)
pl.imshow(10 * np.log10(brainogram[:, :, i] ** 2),aspect='auto', origin='lower', cmap='hot')
def _get_sort_index(self, model_weight):
_, _, n_f = np.shape(model_weight)
imp = np.zeros(1024)
imp[0] = 1
pos = np.array([])
for ii in range(0, n_f):
test = np.abs(fft(sg.lfilter(model_weight[:, 0, ii], 1, imp)))[0:513]
ind = np.argmax(test)
pos = np.append(pos, ind)
return np.argsort(pos)
if __name__ == '__main__':
SAMPLING_FREQUENCY = 16000
SAMPLE_SPEECH = r'./test0128.wav'
GAMMATONE_WEIGHT = r'./train_gammatone_weight.npy'
plot = PlotBeamPattern(GAMMATONE_WEIGHT,SAMPLING_FREQUENCY)
plot.plot_beampattern(SAMPLE_SPEECH, [0, 5, 10, 20])
plot.plot_brainogram(SAMPLE_SPEECH)
|
#operadores de asignacion
x=3
x+=2 # x= x+2
print(x)
x-=1
print(x)
print("{}{}".format('Estoy Practicando ',7))
x*=5
print(x) |
import argparse
import json
from imdb import IMDb
from tqdm import tqdm
def fetch_imdb_info(vocab_path, output_path):
print('Reading vocab...')
with open(vocab_path) as file:
vocab = json.load(file)
ia = IMDb()
imdb_info = {}
for id, entry in tqdm(vocab.items(), desc='Fetching IMDb info...'):
imdb_id = entry['imdb']
try:
movie = ia.get_movie(imdb_id)
imdb_info[id] = {'imdb': imdb_id, 'title': movie['title'], 'year': movie['year'],
'img': movie['full-size cover url'], 'rating': movie['rating']}
except:
print(f'Error. Fetched info for {len(imdb_info)} movies. Last id: {id}.')
with open(output_path + '.tmp', 'w') as file:
json.dump(imdb_info, file)
print(f'Finished fetching info for {len(imdb_info)} movies.')
with open(output_path, 'w') as file:
json.dump(imdb_info, file)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Script to featch IMDB data for a dataset.')
parser.add_argument('--vocab_path', type=str, default='data/processed/movielens/movielens_vocab.json')
parser.add_argument('--output_path', type=str, default='data/processed/movielens/imdb_data.json')
args = parser.parse_args()
fetch_imdb_info(args.vocab_path, args.output_path)
|
def sum_element_1(n):
return 1 / (4*n + 1)
def sum_element_2(n):
return 1 / (4*n + 3)
def sum_element(n):
return sum_element_1(n) - sum_element_2(n)
def approximation_of_pi(limit):
estimate = 0
n = 0
while n <= limit:
estimate += sum_element(n)
n += 1
return estimate * 4
for i in [1, 3, 6, 9]:
print(format(approximation_of_pi(10**i), ".32f")) |
from gtts import gTTS
# import os
import playsound
text = "LOL this is real funny"
output = gTTS(text=text, lang='en', slow=False)
output.save('output.mp3')
# os.system("afplay output.mp3")
# wait for the sound to finish playing?
blocking = True
playsound.playsound("output.mp3", block=blocking) |
import cv2
import matplotlib.pyplot as plt
import numpy as np
import imutils
import easyocr
## Read in Image, Greyscale/Blur
img = cv2.imread("C:/Users/asus/Desktop/Python Project/License Plate Recog/IMG_9.jpg")
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
# plt.imshow(cv2.cvtColor(gray, cv2.COLOR_BGR2RGB)) # to test this step
## Apply filter & find edges
bfilter = cv2.bilateralFilter(gray, 11, 17, 17) #noise reduction/smoothing
edged = cv2.Canny(bfilter, 150, 200, True) #edge detection
img2=cv2.cvtColor(edged, cv2.COLOR_BGR2RGB)
plt.figure(1)
plt.imshow(img2) # to test step
## Find Contours & apply mask
keypoints = cv2.findContours(edged.copy(), cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
contours = imutils.grab_contours(keypoints)
contours = sorted(contours, key=cv2.contourArea, reverse=True) #[:10]
# x,y,w,h = cv2.boundingRect(edged.copy())
# img3 = cv2.rectangle(img,(x,y),(x+w,y+h),(0,255,0),2)
# plt.imshow(img3)
# rect = cv2.minAreaRect(img)
# box = cv2.boxPoints(rect)
# box = np.int0(box)
# img3 = cv2.drawContours(img,[box],0,(0,0,255),2)
# plt.imshow(img3)
location = None
for contour in contours:
approx = cv2.approxPolyDP(contour, 10, True)
if len(approx) == 4:
location = approx
break
#print(location)
mask = np.zeros(gray.shape, np.uint8)
new_image = cv2.drawContours(mask, [location], 0,255, -1)
new_image = cv2.bitwise_and(img, img, mask = mask)
plt.figure(2)
plt.imshow(cv2.cvtColor(new_image, cv2.COLOR_BGR2RGB)) #for test
(x, y) = np.where(mask==255)
(x1, y1) = (np.min(x), np.min(y))
(x2, y2) = (np.max(x), np.max(y))
cropped_image = gray[x1-1:x2+1, y1-1:y2+1]
plt.figure(3)
plt.imshow(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB))
# Use EasyOCR to read text
reader = easyocr.Reader(['en'], gpu=True)
result = reader.readtext(cropped_image)
res=result[0]
# Render result
text = res[1]
font = cv2.FONT_HERSHEY_PLAIN
rez = cv2.putText(img, text=text, org=(approx[0][0][0], approx[1][0][1]+60), fontFace=font, fontScale=2, color=(0,255,0), thickness=2) #, linetype = cv2.LINE_AA)
rez = cv2.rectangle(img, tuple(approx[0][0]), tuple(approx[2][0]), (0,255,0), 3)
plt.figure(4)
plt.imshow(cv2.cvtColor(rez, cv2.COLOR_BGR2RGB)) |
# -*- coding: utf-8 -*-
"""Agent_Timing.ipynb
"""
from google.colab import drive
drive.mount('/content/gdrive', force_remount=True)
import sys
sys.path.append('/content/gdrive/My Drive/EtaZero')
# Commented out IPython magic to ensure Python compatibility.
# %matplotlib inline
import matplotlib.pyplot as plt
import os
import torch
from agent_timer import AgentTimer
from agents.eta_zero import EtaZero
from agents.uct_agent import UCTAgent
timer = AgentTimer("/content/gdrive/My Drive/EtaZero", section="Attempt5")
timer.get_info()
agents = [
# EtaZero(load_net(1), samples_per_move=200),
UCTAgent(10_000)
]
for agent in agents:
timer.time(
agent,
num_games=max(30 - timer.get_info().get(agent.time_id, 0), 0)
) |
# 03_jsonTest02.py
# 첨부 파일 some.json을 이용하여 각 정보를 출력해보세요.
import json
filename = 'some.json'
def get_Json_Data():
print('함수 호출됨')
filename = 'some.json'
myfile = open(filename, 'rt', encoding='utf-8')
print(type(myfile))
myfile = myfile.read()
print(type(myfile))
# loads(str) : 문자열 형식의 데이터를 이용하여 json 타입으로 변환해주는 함수입니다.
jsonData = json.loads(myfile)
print(type(jsonData))
print('-' * 30)
print(jsonData)
member = jsonData['member']
web = jsonData['web']
print(member)
print(web)
print('-' * 30)
name = member['name']
address = member['address']
phone = member['phone']
cafename = web['cafename']
id = web['id']
print('멤버 이름 : %s / 주소 : %s / 번호 : %s' % (name, address, phone))
print('카페 이름 : %s / id : %s' % (cafename, id))
# __name__ : 파이썬이 내장하고 는 내부 변수
# 애플리케이션 이름이 저장되어 있다.
# #해당 애플리케이션이 스스로 실행하면 '__main__' 값이 대입된다.
if __name__ == '__main__':
print('나 스스로 실행 되었습니다.')
get_Json_Data() # 함수 호출
else:
print('다른 프로그램이 호출 했습니다.') |
# PIL modulunu goruntunun pikselini
# cikarmak ve uzerinde degisiklik yapmak icin kullanacagiz
from PIL import Image
import speech_recognition as sr
sr.__version__
r = sr.Recognizer()
audiodata = sr.AudioFile("data/voicerecord.wav")
with audiodata as source:
audio = r.record(source)
result = r.recognize_google(audio, language="en-US")
# Kodlama verilerini 8 bitlik binary sayiya donusturuyoruz
# Herbir karakter icin ASCII degerleri alinacak
def genData(data):
# Verilen verinin binary kod halinde
# liste icerisine atilmasi
newd = []
for i in data:
newd.append(format(ord(i), '08b'))
return newd
# Pikseller 8 bitlik binary sayilara donustutulur
# ve tekrar dondurulur.
def modPix(pix, data):
datalist = genData(data)
lendata = len(datalist)
imdata = iter(pix)
for i in range(lendata):
# Tek seferde 3 piksel cikarma islemi
pix = [value for value in imdata.__next__()[:3] +
imdata.__next__()[:3] +
imdata.__next__()[:3]]
# Piksel degerlerini tek icin 1
# Cift icin 0 olacak sekilde degistiriyoruz
for j in range(0, 8):
if (datalist[i][j] == '0' and pix[j]% 2 != 0):
pix[j] -= 1
elif (datalist[i][j] == '1' and pix[j] % 2 == 0):
if(pix[j] != 0):
pix[j] -= 1
else:
pix[j] += 1
# pix[j] -= 1
# Her 3lu setin 9. pikseli okumayi
# daha fazla surdurup surdurmeyecegimizi
# belirlemek icindir.
# 0 okumaya devam et anlamina gelirken,
# 1 mesajin bittigini gosterir.
if (i == lendata - 1):
if (pix[-1] % 2 == 0):
if(pix[-1] != 0):
pix[-1] -= 1
else:
pix[-1] += 1
else:
if (pix[-1] % 2 != 0):
pix[-1] -= 1
pix = tuple(pix)
yield pix[0:3]
yield pix[3:6]
yield pix[6:9]
def encode_enc(newimg, data):
w = newimg.size[0]
(x, y) = (0, 0)
for pixel in modPix(newimg.getdata(), data):
# Duzenlenen yeni piksellerin,
# yeni bir resim icerisine yerlestirilmesi
newimg.putpixel((x, y), pixel)
if (x == w - 1):
x = 0
y += 1
else:
x += 1
# Verilerin resim icerisine kodlanmasi (steganografi)
def encode():
img = input("Resim ismini giriniz (uzantısı ile birlikte) : ")
image = Image.open(img, 'r')
print("Sistemde bulunan ses kaydından alınan veri gizleniyor...")
data = result
if (len(data) == 0):
raise ValueError('Boş veri girildi')
newimg = image.copy()
encode_enc(newimg, data)
new_img_name = input("Yeni resimin ismini giriniz (uzantısı ile birlikte) : ")
newimg.save(new_img_name, str(new_img_name.split(".")[1].upper()))
# Resim icerisinde gizli olan verilerin
# cozumlenmesi (steganaliz)
def decode():
img = input("Resim ismini giriniz (uzantısı ile birlikte) : ")
image = Image.open(img, 'r')
data = ''
imgdata = iter(image.getdata())
while (True):
pixels = [value for value in imgdata.__next__()[:3] +
imgdata.__next__()[:3] +
imgdata.__next__()[:3]]
# ikili veri dizisi
binstr = ''
for i in pixels[:8]:
if (i % 2 == 0):
binstr += '0'
else:
binstr += '1'
data += chr(int(binstr, 2))
if (pixels[-1] % 2 != 0):
return data
# Main Fonksiyonu
def main():
a = int(input("-- Steganografi İşlemi --\n"
"1. Encode\n2. Decode\n"))
if (a == 1):
encode()
elif (a == 2):
print("Kodlanan metin : " + decode())
else:
raise Exception("Doğru giriş yapınız")
# Surucu Kodu
if __name__ == '__main__' :
# Main fonksiyonun cagirilmasi
main()
|
from BinarySearchTreeNode import Node
class BTS: # Binary Search Tree
def __init__(self):
self.__Head = None
def Insert(self, Value):
if self.__Head is None:
self.__Head = Node(Value)
else:
self.__insert(Value, self.__Head)
def __insert(self, Value, root):
if Value < root.Value:
if root.Left is None:
root.Left = Node(Value)
else:
self.__insert(Value, root.Left)
else:
if root.Right is None:
root.Right = Node(Value)
else:
self.__insert(Value, root.Right)
def Delete(self, Value):
if self.__Head:
self.__Delete(self.__Head, Value)
def __Delete(self, Root, Value):
if Root is None:
return None
elif Root.Value > Value:
Root.Left = self.__Delete(Root.Left, Value)
elif Root.Value < Value:
Root.Right = self.__Delete(Root.Right, Value)
else:
if Root.Right is None:
return Root.Left
elif Root.Left is None:
return Root.Right
else:
Temp = Root.Right
Mini_Value = Temp.Value
while Temp.Left:
Temp = Temp.Left
Mini_Value = Temp.Value
Root.Value = Mini_Value
Root.Right = self.__Delete(Root.Right, Root.Value)
return Root
def Preorder(self):
if self.__Head:
self.__Preorder(self.__Head)
def __Preorder(self, Root):
print(Root.Value)
self.__Preorder(Root.Left)
self.__Preorder(Root.Right)
def Inorder(self):
if self.__Head:
self.__Inorder(self.__Head)
def __Inorder(self, Root):
if Root:
self.__Inorder(Root.Left)
print(Root.Value)
self.__Inorder(Root.Right)
def Postorder(self):
if self.__Head:
self.__Postorder(self.__Head)
def __Postorder(self, Root):
self.__Preorder(Root.Left)
self.__Preorder(Root.Right)
print(Root.Value)
def Search(self, Value):
root = self.__Head
while root:
if Value < root.Value:
root = root.Left
elif Value > root.Value:
root = root.Right
else:
return Value
|
import bottle
from bottle import request, response
import sqlalchemy as sa
from bauble import app, API_ROOT
from bauble.middleware import basic_auth, filter_param
from bauble.model import SourceDetail
column_names = [col.name for col in sa.inspect(SourceDetail).columns]
def resolve_source(next):
def _wrapped(*args, **kwargs):
request.source = request.session.query(SourceDetail).get(request.args['source_id'])
if not request.source:
bottle.abort(404, "Source not found")
return next(*args, **kwargs)
return _wrapped
@app.get(API_ROOT + "/source")
@basic_auth
@filter_param(SourceDetail, column_names)
def index_source():
# TODO: we're not doing any sanitization or validation...see preggy or validate.py
sources = request.filter if request.filter else request.session.query(SourceDetail)
return [source.json() for source in sources]
@app.get(API_ROOT + "/source/<source_id:int>")
@basic_auth
@resolve_source
def get_source(source_id):
return request.source.json()
@app.route(API_ROOT + "/source/<source_id:int>", method='PATCH')
@basic_auth
@resolve_source
def patch_source(source_id):
if not request.json:
bottle.abort(400, 'The request doesn\'t contain a request body')
# create a copy of the request data with only the columns
data = {col: request.json[col] for col in request.json.keys() if col in column_names}
for key, value in data.items():
setattr(request.source, key, data[key])
request.session.commit()
return request.source.json()
@app.post(API_ROOT + "/source")
@basic_auth
def post_source():
# TODO create a subset of the columns that we consider mutable
mutable = []
# create a copy of the request data with only the columns
data = {col: request.json[col] for col in request.json.keys() if col in column_names}
# make a copy of the data for only those fields that are columns
source = SourceDetail(**data)
request.session.add(source)
request.session.commit()
response.status = 201
return source.json()
@app.delete(API_ROOT + "/source/<source_id:int>")
@basic_auth
@resolve_source
def delete_source(source_id):
request.session.delete(request.source)
request.session.commit()
response.status = 204
|
import typing as tp
from lib import input_utils
def two_sum(
stream: tp.Iterable[int],
target: int,
) -> tp.Optional[tp.Tuple[int, int]]:
cache = set()
for e in stream:
expected = target - e
if expected in cache:
return e, expected
cache.add(e)
return None
def three_sum(
stream: tp.Iterable[int],
target: int,
) -> tp.Optional[tp.Tuple[int, int, int]]:
cache = set(stream)
for e in cache:
expected = target - e
ts = two_sum(cache, expected)
if ts is None:
continue
first, second = ts
return first, second, e
return None
if __name__ == '__main__':
def part_1():
data = input_utils.read_ints(
'/home/i2/projects/aoc/2020/day_1/puzzle.input'
)
nums = two_sum(data, 2020)
if nums is None:
raise RuntimeError('aaargh')
a, b = nums
print(a, b, a * b)
def part_2():
data = input_utils.read_ints(
'/home/i2/projects/aoc/2020/day_1/puzzle.input'
)
nums = three_sum(data, 2020)
if nums is None:
raise RuntimeError('aaargh')
a, b, c = nums
print(a, b, c, a * b * c)
part_1()
part_2()
|
from django.shortcuts import render
from django.http import HttpResponse
from AppTwo.models import Topic, Webpage, Access
# Create your views here.
def index(request):
webpages_list = Access.objects.order_by('date')
date_dict = {'access': webpages_list}
# my_dict = {"insert_me": "Now I am from AppTwo/index.py"}
return render(request, 'AppTwo/index.html', context=date_dict)
|
from __future__ import annotations
import requests
from typing import Optional, Union
from bs4 import BeautifulSoup
from collections import Counter
from helpers import NA
from bs4.element import Tag
import json
from time import sleep
from requests.exceptions import ConnectionError
from urllib3.exceptions import MaxRetryError, NewConnectionError
from socket import gaierror
import re
from nltk.corpus import stopwords
from pandas import DataFrame
import pandas as pd
"""
Classes that allow us to scrape Monster.com listings
Inspired by Jesse Steinweg-Woods' project on web scraping from Indeed.com
His blog post: https://jessesw.com/Data-Science-Skills/
"""
class MonsterTextParser:
def __init__(self, keywords: tuple):
self.keywords = keywords
def count_words(self, results: Union[MonsterListing, MonsterSearch], as_percentage: bool = False,
delete_matching: str = "[^a-zA-Z]") -> DataFrame:
freqs = Counter()
if type(results) == MonsterListing:
freqs.update(self.words_from_description(results, delete_matching=delete_matching))
else:
[freqs.update(self.words_from_description(listing, delete_matching=delete_matching)) for listing in results]
out_dict = dict([(x, freqs[x.lower()]) for x in self.keywords])
df = pd.DataFrame.from_dict(out_dict, orient='index', columns=['Frequency']).reset_index()
df = df.rename(columns={'index': 'Keyword'})
if as_percentage:
df['Frequency'] = df['Frequency'] * 100 / (1 if type(results) == MonsterListing else len(results))
return df.sort_values(by='Frequency', ascending=False).reset_index(drop=True)
def words_from_description(self, listing: MonsterListing, delete_matching: str = "[^a-zA-Z]") -> list:
"""
Extract words from description in listing. Some code lifted from Jesse Steinweg-Woods' blog post.
:param listing: MonsterListing to get words from
:param delete_matching: delete any character not matching
:return:
"""
if listing.description is None:
listing.fetch_description()
description = listing.description
lines = (line.strip() for line in description.splitlines())
# break multi-headlines into a line each
chunks = (phrase.strip() for line in lines for phrase in line.split(" "))
# Get rid of all blank lines and ends of line
text = ''.join(chunk + ' ' for chunk in chunks if chunk).encode('utf-8')
# Now clean out all of the unicode junk
text = text.decode('unicode_escape')
# Get rid of any terms that aren't words
text = re.sub(delete_matching, " ", text)
text = text.lower().split() # Go to lower case and split them apart
stop_words = set(stopwords.words("english"))
words = list()
keywords_lower = [x.lower() for x in self.keywords]
for w in text:
if w not in keywords_lower:
w = w.replace('.', '')
if w not in stop_words and len(w) > 0:
words.append(w)
# Last, just get the set of these.
return list(set(words))
class MonsterLocation:
@staticmethod
def format_location(city: str, state: str) -> tuple:
"""
:param city: Search city
:param state: Search state
:return: (city, state) formatted properly
"""
state = state.upper()
city = ' '.join([x.lower().capitalize() for x in city.split(' ')]) # capitalize all words
return city, state
@classmethod
def from_string(cls, loc_string: str, alternates: tuple = ()) -> MonsterLocation:
"""
:param loc_string: e.g. "New York, NY". A location string.
:param alternates: e.g. ("Brooklyn, NY", "Jersey City, NJ", "Manhattan, NY")
:return: A MonsterLocation from the location string
"""
city, state = [x.strip() for x in loc_string.split(',')]
return cls(*cls.format_location(city, state), alternates=alternates)
def __init__(self, city: str, state: str, alternates: tuple = ()):
"""
:param city: Search city
:param state: Search state
:param alternates: e.g. for "New York, NY": ("Brooklyn, NY", "Jersey City, NJ", "Manhattan, NY")
"""
self.city, self.state = self.format_location(city, state)
# We store the alternates
if len(alternates) > 0:
self.alternates = [MonsterLocation.from_string(x) for x in alternates]
else:
self.alternates = alternates
@staticmethod
def search_var_from_arguments(city: str, state: str) -> str:
return f'{"-".join(city.split(" "))}__2C-{state}'
# applies search_var_from_arguments to object's city and state
def search_var(self) -> str:
return self.search_var_from_arguments(self.city, self.state)
# compares two locations. comparisons are also made between each location and the other's alternates.
def __eq__(self, other) -> bool:
if isinstance(other, MonsterLocation):
if self.city == other.city and self.state == other.state:
return True
else:
if len(self.alternates) > 0:
for alt in self.alternates:
if alt.city == other.city and alt.state == other.state:
return True
elif len(other.alternates) > 0:
for alt in other.alternates:
if alt.city == self.city and alt.state == self.state:
return True
return False
def __str__(self) -> str:
return f'{self.city}, {self.state}'
class MonsterSearch:
"""
Search results for a particular query on monster.
The search results are stored in a dictionary. The key is the job ID, and the value is a MonsterListing
"""
def __init__(self, location: MonsterLocation, query: str, extra_titles: tuple = None, results: dict = None,
job_ids: list = None):
"""
Parameters
----------
:param location: Location to search in
:param query: The term we will be searching for
:param extra_titles: Other valid title substrings
:param results: dictionary of search results indexed by unique job id
:param job_ids: list of job ids in order they were fetched
"""
self.location = location
self.base_url = f'https://www.monster.com/jobs/search/?q={"-".join(query.lower().split(" "))}' \
f'&where={self.location.search_var()}'
self.query = query
self.extra_titles = extra_titles
self.results = results
self.job_ids = job_ids # job ids of results; helps us maintain an order...
self.job_id_index = 0 # for iteration
def is_valid_listing(self, listing: MonsterListing) -> bool:
"""
Checks to see if a listing matches our desired search.
:param listing: search result from Monster
:return: True if the listing matches our query and is in the right location, False otherwise
"""
if self.location == listing.location:
if self.query.lower() in listing.job_title.lower():
return True
for title in self.extra_titles:
if title.lower() in listing.job_title.lower():
return True
return False
def fetch_listings(self, limit: int = 10, refetch: bool = False):
if self.results is not None and len(self.results) != 0 and not refetch:
print("You've already fetched the results for this query. Set refetch to True to fetch them again.")
else:
self.results = dict()
self.job_ids = list()
search_url = f'{self.base_url}&stpage=1&page={limit}' # tack on page ranges to base URL
try:
response = requests.get(search_url)
soup = BeautifulSoup(response.text, 'html.parser')
all_listings = soup.find_all('section', attrs={'data-jobid': True})
for item in all_listings:
listing = MonsterListing.from_search_results(item)
if listing is not None and self.is_valid_listing(listing) and listing.job_id not in self.results:
self.results[listing.job_id] = listing
self.job_ids.append(listing.job_id)
except (gaierror, ConnectionError, MaxRetryError, NewConnectionError) as e:
print(e)
def fetch_descriptions(self, suppress_output=False):
desc_count = 0
if self.results is not None:
for job_id in self.job_ids:
listing = self.results[job_id]
desc_count += 1
if len(listing.description) > 0 and not suppress_output:
print(f'Description #{desc_count} is already present.')
else:
listing.fetch_description()
if not suppress_output:
if len(listing.description) > 0:
print(f'Description #{desc_count} successfully fetched.')
else:
print(f'Listing #{desc_count} appears to be dead...')
sleep(1) # so we don't overwhelm the server (idea courtesy of Jesse Steinweg-Woods)
else:
print("You need to fetch the listings first: use fetch_listings(...)")
def json_dict(self) -> dict:
out_dict = dict()
out_dict['location'] = {'main': self.location.__str__(),
'alternates': [x.__str__() for x in self.location.alternates]}
out_dict['query'] = self.query
out_dict['extra_titles'] = self.extra_titles
out_dict['base_url'] = self.base_url
out_dict['results'] = dict()
out_dict['job_ids'] = list()
for job_id in self.job_ids:
out_dict['results'][job_id] = self.results[job_id].json_dict()
out_dict['job_ids'].append(job_id)
return out_dict
def json_serialize(self) -> str:
return json.dumps(self.json_dict())
@classmethod
def json_deserialize(cls, in_dict=None, in_str=None) -> MonsterSearch:
if in_str is not None:
in_dict = json.loads(in_str)
loc_dict = in_dict['location']
location = MonsterLocation.from_string(loc_dict['main'], alternates=loc_dict['alternates'])
query = in_dict['query']
extra_titles = in_dict['extra_titles']
results = in_dict['results']
job_ids = in_dict['job_ids']
deser_results = dict([(job_id, MonsterListing.json_deserialize(in_dict=results[job_id])) for job_id in results])
return cls(location, query, extra_titles=extra_titles, results=deser_results, job_ids=job_ids)
# for iterating through results
def __iter__(self):
self.job_id_index = 0
return self
def __next__(self):
if self.job_id_index < len(self.job_ids):
job_id_index = self.job_id_index
self.job_id_index += 1
return self.results[self.job_ids[job_id_index]]
else:
raise StopIteration
# length of MonsterSearch = length of results
def __len__(self):
return len(self.job_ids)
def __str__(self):
out_str = \
f'Search Query: {self.query}\n' \
f'Location: {self.location}\n'
out_str += f'Number of Listings: {0 if self.results is None else len(self.results)}'
return out_str
class MonsterListing:
def __init__(self, job_id: str, job_url: str, location: MonsterLocation,
company: str, job_title: str, description: str = ''):
"""
Set up MonsterListing class
:param job_id: unique job ID that allows one to find the listing
:param job_url: URL to the STATIC page where the listing is located; NOT search result page
:param location: Geographic location of job
:param company: Company name
:param job_title: Job title used on website
:param description: Long description of job duties, expectations, requirements, etc.
"""
self.job_id = job_id
self.job_url = job_url
self.location = location
self.company = company
self.job_title = job_title
self.description = description
@classmethod
def from_search_results(cls, item: Tag) -> Optional[MonsterListing]:
"""
Parses listing properties from source. Useful for when we're looping through search results.
:param item: Search results from BeautifulSoup
:return: MonsterListing with all fields filled except description, which requires another HTTP request.
"""
if item is not None:
job_id = item['data-jobid']
loc_string = item \
.find('div', attrs={'class': 'location'}) \
.find('span', attrs={'class': 'name'}).text.strip()
if ',' in loc_string:
city, state = [a.strip() for a in loc_string.split(',')]
state = state[:2] # two-letter state format
location = MonsterLocation(city, state)
else:
location = MonsterLocation(NA, NA) # NA value; will not match any valid locations
job_title = item.find('h2', attrs={'class': 'title'}).find('a', href=True).string.strip()
company = item \
.find('div', attrs={'class': 'company'}) \
.find('span', attrs={'class': 'name'}).text.strip()
job_url = item.find('h2', attrs={'class': 'title'}).find('a', href=True)['href']
# add MonsterListing to results
return cls(job_id, job_url, location, company, job_title)
@classmethod
def from_id(cls, job_id) -> Optional[MonsterListing]:
temp_url = f'https://www.monster.com/jobs/search/?jobid={job_id}'
# get HTML
response = requests.get(temp_url)
soup = BeautifulSoup(response.text, 'html.parser')
# get first item
item = soup.find('section', attrs={'data-jobid': True})
return cls.from_search_results(item)
def __str__(self) -> str:
out_str = \
f'Job Title: {self.job_title}\n' \
f'Company: {self.company}\n' \
f'Location: {self.location}\n' \
f'ID: {self.job_id}'
return out_str
def __eq__(self, other) -> bool:
if type(other) == MonsterListing:
return self.job_id == other.job_id
return False
def json_dict(self) -> dict:
out_dict = {'job_id': self.job_id, 'job_url': self.job_url, 'location': self.location.__str__(),
'company': self.company, 'job_title': self.job_title, 'description': self.description}
return out_dict
def json_serialize(self) -> str:
return json.dumps(self.json_dict())
@classmethod
def json_deserialize(cls, in_dict=None, in_str=None) -> MonsterListing:
if in_str is not None:
in_dict = json.loads(in_str)
# convert to MonsterListing
job_id = in_dict['job_id']
job_url = in_dict['job_url']
location = MonsterLocation.from_string(in_dict['location'])
description = in_dict['description']
company = in_dict['company']
job_title = in_dict['job_title']
return cls(job_id, job_url, location, company, job_title, description)
def fetch_description(self):
response = requests.get(self.job_url)
soup = BeautifulSoup(response.text, 'html.parser')
job_body = soup.find('div', attrs={'id': 'JobDescription'})
if job_body is not None:
self.description = job_body.get_text(separator=' ') # add whitespace between HTML tags
else:
self.description = ''
def get_excerpt(self, word_limit=1000, end_string='...'):
if self.description is None:
self.fetch_description()
return self.description[:word_limit] + end_string
|
#Written by Chang Wang
#this code used KNN algorithm
#it picked 9 column from the input file and dealed with these features
#main aims to discuss the difference between using same algorithm but different features with other group members
#the output is such as:
#the best k = 13
#precision score = 0.7142857142857143
#recall score = 0.8333333333333334
#F1_score = 0.7692307692307692
#accuracy score for test dataset: 0.6666666666666666
#Finished time: 24/11/2019
#reference URL:
#https://blog.csdn.net/z583636762/article/details/78988415
#https://www.cnblogs.com/bymo/p/8618191.html
#https://blog.csdn.net/u011630575/article/details/79195450
#https://blog.csdn.net/jasonleesjtu/article/details/92091143
#https://www.jianshu.com/p/284581d9b189
import os
import re
import numpy as np
import pandas as pd
from sklearn import neighbors
from sklearn.metrics import accuracy_score
from sklearn import metrics
from matplotlib import pyplot as plt
def initial_dataframe(dir_name, index, features):
lst = os.listdir(dir_name)
lst.sort()
df = pd.DataFrame(np.nan, index=index, columns=features)
return lst, df
index = os.listdir(os.getcwd() + '/StudentLife_Dataset/inputs/sensing/activity')
index.sort()
#print(index)
for i in range(len(index)):
index[i] = re.findall(r'u\d+', index[i])[0]
#print(index)
# dealing with feature which need to count the number of inference
# for activity and audio
def featrue_count(dir, lst, df):
for i in range(len(lst)):
csv = os.path.join(dir, lst[i])
data = pd.read_csv(csv, index_col=False)
count = data[data.columns[1]].value_counts() #normalize=True
user = re.findall(r'u\d+', lst[i])
feature = []
amount = 0
for j in range(len(count.keys())):
feature.append(count[j])
amount = amount + count[j]
for k in range(len(feature)):
feature[k] = round(feature[k] / amount,3)
df.loc[user[0]] = feature
# print(feature)
return df
# dataframe for activity and audio
# activity feature
activity = os.getcwd() + '/StudentLife_Dataset/inputs/sensing/activity'
#print(activity)
activity_features = ['Stationary', 'Walking', 'Running', 'Unknown']
act_lst, act_df = initial_dataframe(activity, index, activity_features)
act_df = featrue_count(activity, act_lst, act_df)
act_df['Moving'] = act_df['Walking'] + act_df['Running'] + act_df['Unknown']
act_df = act_df[['Stationary', 'Moving']]
print(act_df)
# audio feature
audio = os.getcwd() + '/StudentLife_Dataset/inputs/sensing/audio'
audio_features = ['Silence', 'Voice', 'Noise']
aud_lst, aud_df = initial_dataframe(audio, index, audio_features)
aud_df = featrue_count(audio, aud_lst, aud_df)
# print(aud_df.var())
def feature_conversation(dir, lst, df):
for i in range(len(lst)):
csv = os.path.join(dir, lst[i])
user = re.findall(r'u\d+', lst[i])
data = pd.read_csv(csv, index_col=False)
duration = 0
start_timestamp = data['start_timestamp']
end_timestamp = data[' end_timestamp']
for i in range(len(data)):
duration = duration + (end_timestamp[i] - start_timestamp[i])
df.loc[user[0]] = duration
return df
#conversation feature
conv = os.getcwd() + '/StudentLife_Dataset/inputs/sensing/conversation'
conv_features = ['conversation']
conv_lst, conv_df = initial_dataframe(conv, index, conv_features)
conv_df = feature_conversation(conv, conv_lst, conv_df)
def feature_dis_freg(dir, lst, df):
for i in range(len(lst)):
csv = os.path.join(dir, lst[i])
user = re.findall(r'u\d+', lst[i])
data = pd.read_csv(csv, index_col=False)
latitude = data['latitude']
longitude = data['longitude']
altitude = data['altitude']
change_account = 0
pre_latitude = latitude[0]
pre_longitude = longitude[0]
pre_altitude = altitude[0]
for i in range(len(data)):
if abs(pre_latitude - latitude[i]) > 0.00004 or abs(pre_longitude - longitude[i]) > 0.00004 or pre_altitude != altitude[i]:
change_account += 1
pre_latitude = latitude[i]
pre_longitude = longitude[i]
pre_altitude = altitude[i]
df.loc[user[0]] = change_account
return df
# gps feature
gps = os.getcwd() + '/StudentLife_Dataset/inputs/sensing/gps'
gps_features = ['MovedTime']
gps_lst, gps_df = initial_dataframe(gps, index, gps_features)
gps_df = feature_dis_freg(gps, gps_lst, gps_df)
# wifi location
def feature_loc(dir, lst, df):
for i in range(len(lst)):
csv = os.path.join(dir, lst[i])
user = re.findall(r'u\d+', lst[i])
data = pd.read_csv(csv, index_col=False)
data = data[['location']]
indoor = data['location'].str.contains("in").value_counts()[True] / 10
outdoor = data['location'].str.contains("near").value_counts()[True] / 10
df.loc[user[0]] = [indoor, outdoor]
return df
location = os.getcwd() + '/StudentLife_Dataset/inputs/sensing/wifi_location'
loc_features = ['Indoor', 'Outdoor']
loc_lst, loc_df = initial_dataframe(location, index, loc_features)
loc_df = feature_loc(location, loc_lst, loc_df)
#print(loc_df)
#merge for a new dataframe
input_df_0 = pd.merge(act_df, aud_df, left_index=True, right_index=True)
input_df_0 = pd.merge(input_df_0, conv_df, left_index=True, right_index=True)
input_df_0 = pd.merge(input_df_0, gps_df, left_index=True, right_index=True)
input_df_0 = pd.merge(input_df_0, loc_df, left_index=True, right_index=True)
#print(train_X)
#output flourishing
Flourishing = pd.read_csv('FlourishingScale.csv')
Flourishing_pre = Flourishing.head(46)
Flourishing_pre.fillna(0, inplace = True)
#deal with the data
array_Flourishing = Flourishing_pre.values
uid = []
sum_list = []
#only need to calculate the sum
flourishing = pd.DataFrame(np.nan, index=uid, columns=['average'])
for index_1 in range(len(array_Flourishing)):
uid.append(array_Flourishing[index_1][0])
account = 0
number = 0
for index_2 in range(2,len(array_Flourishing[index_1])):
if array_Flourishing[index_1][index_2] > 0:
#get the average for each uid, avoid NAN value's influence
account += int(array_Flourishing[index_1][index_2])
number += 1
sum_list.append(float((account) / number))
flourishing.loc[uid[index_1]] = float(sum_list[index_1])
#print(input_df)
input_df = pd.merge(input_df_0, flourishing, left_index=True, right_index=True)
#print(input_df)
input_1 = input_df.values
#min and max
def transformation(data):
for index in range(0, len(data[0])-1):
max_number = 0
min_number = 99999999
for row in range(len(data)):
if float(data[row][index]) > max_number:
max_number = float(data[row][index])
if float(data[row][index]) < min_number:
min_number = float(data[row][index])
for row_a in range(len(data)):
data[row_a][index] = (float(data[row_a][index]) - min_number)/(max_number - min_number)
return data
#except the last row, regard them as X
def X_train(data):
x_train = []
for x in range(len(data)):
list_x = []
for y in range(0, len(data[0])-1):
list_x.append(float(data[x][y]))
x_train.append(list_x)
return x_train
#the last row is Y
#using the average to obtain the overall
def Y_train(data):
y_train = []
for i in range(len(data)):
y_train.append(int(8*data[i][len(data[0])-1]))
value = 0
acc = 0
for j in range(len(y_train)):
value = value + y_train[j]
acc = j
value_1 = value / (acc + 1)
for k in range(len(y_train)):
if y_train[k] >= value_1:
y_train[k] = float(1)
else:
y_train[k] = float(0)
return y_train
input_1 = transformation(input_1)
training_data = input_1[:37]
test_data = input_1[37:]
x_train_training = X_train(training_data)
x_train_test = X_train(test_data)
y_train = Y_train(input_1)
y_train_training = y_train[:37]
y_train_test = y_train[37:]
#find the largest auc score and got k
def Optimal_number(x_1, y_1, x_2, y_2):
largest_AUC_score = 0
optimal_number = 0
AUC_list = []
for index in range(1, 31):
model = neighbors.KNeighborsClassifier(index)
model.fit(x_1, y_1)
AUC_score = metrics.roc_auc_score(y_2, model.predict_proba(x_2)[:, 1])
if AUC_score > largest_AUC_score:
largest_AUC_score = AUC_score
optimal_number = index
AUC_list.append(AUC_score)
return optimal_number, AUC_list
print('------------flourishing------------')
#training_optimal_number, training_AUC_list = Optimal_number(x_train_training, y_train_training, x_train_training,y_train_training)
test_optimal_number, test_AUC_list = Optimal_number(x_train_training, y_train_training, x_train_test, y_train_test)
#print(training_optimal_number)
print('the best k = ',test_optimal_number)
#plt.plot(training_AUC_list)
#plt.show()
plt.plot(test_AUC_list)
plt.show()
#get precision score, recall score and F1 score
def Part(n, x_1, y_1, x_2, y_2):
model = neighbors.KNeighborsClassifier(n)
model.fit(x_1, y_1)
recall_opt = metrics.recall_score(y_2, model.predict(x_2))
prec_opt = metrics.precision_score(y_2, model.predict(x_2))
F1_score = metrics.f1_score(y_2,model.predict(x_2))
return prec_opt, recall_opt, F1_score
prec_opt, recall_opt, F1_score = Part(5, x_train_training, y_train_training, x_train_test, y_train_test)
print('precision score =',prec_opt)
print('recall score =',recall_opt)
print('F1_score =',F1_score)
def accuracy_score_2(n, x_train_training, y_train_training, x_train, y_train):
model = neighbors.KNeighborsClassifier(n)
model.fit(x_train_training, y_train_training)
score = accuracy_score(y_train, model.predict(x_train))
return score
#for a in range(1,30):
#score_1111 = accuracy_score_2(a, x_train_training, y_train_training, x_train_test, y_train_test)
#prec_opt_111, recall_opt_111, F1_score_111 = Part(a, x_train_training, y_train_training, x_train_test, y_train_test)
#print('11111111111111',score_1111)
#print('2222222222222222', recall_opt_111)
#training_score = accuracy_score_2(2, x_train_training, y_train_training, x_train_training, y_train_training)
test_score = accuracy_score_2(test_optimal_number, x_train_training, y_train_training, x_train_test, y_train_test)
#print("accuracy score for training dataset:", training_score)
print("accuracy score for test dataset:", test_score)
#similar with previous
panas = pd.read_csv('panas.csv')
panas_pre = panas.head(46)
panas_pre.fillna(0, inplace = True)
#print(panas_pre.to_string())
drop_columns_1 = ['Distressed','Upset','Guilty','Scared','Hostile','Irritable','Nervous','Jittery','Afraid']
posi_panas = panas_pre.drop(drop_columns_1, axis = 1)
#print(posi_panas.to_string())
array_pos_panas = posi_panas.values
uid_1 = []
sum_list_1 = []
panas_1 = pd.DataFrame(np.nan, index=uid, columns=['average'])
for index_1 in range(len(array_pos_panas)):
uid_1.append(array_Flourishing[index_1][0])
account = 0
number = 0
for index_2 in range(2,len(array_pos_panas[index_1])):
if array_pos_panas[index_1][index_2] > 0:
account += int(array_pos_panas[index_1][index_2])
number += 1
sum_list_1.append(float((account) / number))
panas_1.loc[uid_1[index_1]] = float(sum_list_1[index_1])
#print(input_df)
input_1_df = pd.merge(input_df_0, panas_1, left_index=True, right_index=True)
#print(input_1_df.to_string())
input_2 = input_1_df.values
def Y_train_1(data):
y_train = []
for i in range(len(data)):
y_train.append(int(9*data[i][len(data[0])-1]))
value = 25.3
for k in range(len(y_train)):
if y_train[k] >= value:
y_train[k] = float(1)
else:
y_train[k] = float(0)
return y_train
input_2 = transformation(input_2)
training_data_1 = input_2[:37]
test_data_1 = input_2[37:]
x_train_training_1 = X_train(training_data_1)
x_train_test_1 = X_train(test_data_1)
y_train_1 = Y_train_1(input_2)
y_train_training_1 = y_train_1[:37]
y_train_test_1 = y_train_1[37:]
test_optimal_number_1, test_AUC_list_1 = Optimal_number(x_train_training_1, y_train_training_1, x_train_test_1, y_train_test_1)
print('------------panas------------')
print('the best k = ',test_optimal_number_1)
plt.plot(test_AUC_list_1)
plt.show()
prec_opt_1, recall_opt_1, F1_score_1 = Part(4, x_train_training_1, y_train_training_1, x_train_test_1, y_train_test_1)
print('precision score =',prec_opt_1)
print('recall score =',recall_opt_1)
print('F1_score =',F1_score_1)
test_score_1 = accuracy_score_2(test_optimal_number_1, x_train_training_1, y_train_training_1, x_train_test_1, y_train_test_1)
#print("accuracy score for training dataset:", training_score)
print("accuracy score for test dataset:", test_score_1)
drop_columns_2 = ['Interested','Strong','Enthusiastic','Proud','Alert','Inspired','Determined','Attentive','Active']
neg_panas = panas_pre.drop(drop_columns_2, axis = 1)
array_neg_panas = neg_panas.values
uid_2 = []
sum_list_2 = []
panas_2 = pd.DataFrame(np.nan, index=uid, columns=['average'])
for index_1 in range(len(array_neg_panas)):
uid_2.append(array_Flourishing[index_1][0])
account = 0
number = 0
for index_2 in range(2,len(array_neg_panas[index_1])):
if array_neg_panas[index_1][index_2] > 0:
account += int(array_neg_panas[index_1][index_2])
number += 1
sum_list_2.append(float((account) / number))
panas_2.loc[uid_2[index_1]] = float(sum_list_2[index_1])
#print(input_df)
input_2_df = pd.merge(input_df_0, panas_2, left_index=True, right_index=True)
#print(input_1_df.to_string())
input_3 = input_2_df.values
def Y_train_2(data):
y_train = []
for i in range(len(data)):
y_train.append(int(9*data[i][len(data[0])-1]))
value = 12.4
for k in range(len(y_train)):
if y_train[k] >= value:
y_train[k] = float(1)
else:
y_train[k] = float(0)
return y_train
input_3 = transformation(input_3)
training_data_2 = input_3[:37]
test_data_2 = input_3[37:]
x_train_training_2 = X_train(training_data_2)
x_train_test_2 = X_train(test_data_2)
y_train_2 = Y_train_2(input_3)
y_train_training_2 = y_train_2[:37]
y_train_test_2 = y_train_2[37:]
test_optimal_number_2, test_AUC_list_2 = Optimal_number(x_train_training_2, y_train_training_2, x_train_test_2, y_train_test_2)
print('------------panas------------')
print('the best k = ',test_optimal_number_2)
plt.plot(test_AUC_list_2)
plt.show()
prec_opt_2, recall_opt_2, F1_score_2 = Part(2, x_train_training_2, y_train_training_2, x_train_test_2, y_train_test_2)
print('precision score =',prec_opt_2)
print('recall score =',recall_opt_2)
print('F1_score =',F1_score_2)
test_score_2 = accuracy_score_2(test_optimal_number_2, x_train_training_2, y_train_training_2, x_train_test_2, y_train_test_2)
#print("accuracy score for training dataset:", training_score)
print("accuracy score for test dataset:", test_score_2)
|
from PIL import Image
from PIL import ImageFilter
from PIL.ImageFilter import *
import os
def CreatePath(name):
outPath = r"C:\Users\emili\Desktop\ClusterASD\Images\ImgFiltros/"+name+"Filtros/"
os.makedirs(outPath, exist_ok=True)
def ImageFilter(path):
# path of the folder containing the raw images
inPath = path
# path of the folder that will contain the modified image
outPath = rf"{path}/Filtros/"
for imagePath in os.listdir(inPath):
inputPath = os.path.join(inPath, imagePath)
simg = Image.open(inputPath)
fullOutPath = os.path.join(outPath, imagePath)
# fullOutPath contains the path of the output
# image that needs to be generated
simg.rotate(90).filter(ModeFilter(size=9)).save(fullOutPath)
simg.close()
print(fullOutPath)
|
from flask import render_template, url_for, flash, redirect, Blueprint
from app import db, bcrypt
from app.models import Teacher, Classes
from app.teacher.forms import ChangeTeacherForm
from flask_login import current_user, login_required
teacher = Blueprint('teacher', __name__)
@teacher.route('/teacher_profile')
@login_required
def teachers_profile():
teacher_data = Teacher.query.filter_by(email=current_user.email).first()
classes = [(t.subject + " " + t.weekday + " " + t.hour) for t in Classes.query.filter_by
(teacher_id=current_user.id).all()]
return render_template('teacher_profile.html', teacher_data=teacher_data, classes=classes)
@teacher.route('/change_teacher_form', methods=['GET', 'POST'])
@login_required
def change_teacher_form():
teacher = Teacher.query.filter_by(email=current_user.email).first()
form = ChangeTeacherForm()
if form.validate_on_submit():
teacher.first_name = form.first_name.data
teacher.last_name = form.last_name.data
teacher.major = form.major.data
teacher.university = form.university.data
db.session.commit()
flash("Your information was updated", "success")
return redirect(url_for('teacher.teachers_profile'))
return render_template('change_teacher_form.html', form=form, teacher=teacher)
|
#!/usr/bin/env python
import RPi.GPIO as GPIO
from mfrc522 import SimpleMFRC522
import time
servoPIN = 17
GPIO.setmode(GPIO.BCM)
GPIO.setup(servoPIN, GPIO.OUT)
myServo = GPIO.PWM(servoPIN, 50) # GPIO 17 for PWM with $
myServo.start(2.5) # Initialization
reader = SimpleMFRC522()
try:
while True:
id, text = reader.read()
print(id)
print(text)
myServo.ChangeDutyCycle(7.5)
time.sleep(2)
myServo.ChangeDutyCycle(0.5)
except KeyboardInterrupt :
GPIO.cleanup()
myServo.stop()
|
from database import db, IDPKMixin, DescriptionMixin, SystemMixin
class Site(db.Model, IDPKMixin, DescriptionMixin, SystemMixin):
"""Site model"""
__tablename__ = 'site'
name = db.Column(db.Text)
code = db.Column(db.Text, unique=True)
|
from twython import Twython
import requests
import zmq
APP_KEY = '6LTEgHCBchKPIQdXb3IH6kJSI'
APP_SECRET = 'waHGTlmTVKQmsm485tf5WPWpUShQkTecvdvwKOBB7DA8nQlnSB'
twitter = Twython(APP_KEY, APP_SECRET)
auth = twitter.get_authentication_tokens()
OAUTH_TOKEN = auth['oauth_token']
OAUTH_TOKEN_SECRET = auth['oauth_token_secret']
twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
print(auth['auth_url'])
pin = input("Give your pin: ").strip()
print(pin)
final_tokens = twitter.get_authorized_tokens(pin)
f_oauth_token = final_tokens['oauth_token']
f_oauth_token_secret = final_tokens['oauth_token_secret']
print(f_oauth_token)
print(f_oauth_token_secret)
twitter = Twython(APP_KEY, APP_SECRET, f_oauth_token, f_oauth_token_secret)
#print(r.text)
#print(r.json()['oauth_verifier'])
#print(OAUTH_TOKEN)
#print(OAUTH_TOKEN_SECRET)
twitter.verify_credentials()
user_timeline = twitter.get_home_timeline()
for tweet in user_timeline:
print(tweet['text'])
#auth = twitter.get_authentication_tokens()
#
#context = zmq.Context()
#socket = context.socket(zmq.REQ)
#port = "5555"
#socket.connect ("tcp://localhost:%s" % port)
#
#for i in range (1,10):
# socket.send ("saying hello from python")
# message = socket.recv()
# print("Received reply from server:", message)
|
# -*- coding: utf-8 -*-
import math
import httplib
import urllib
import urllib2
import json
import hashlib
import hmac
import time
import copy
import string
import random
import socket
import sys
from _CLASS import *
from _ExmoAPI import *
from _key import *
import _file
import g # global
import ted
import f_main
import f_print
import f_string
import f_trade
import f_tactic
import f_currency
def main():
try:
g.exmoAPI_instance = ExmoAPI(MY_API_KEY, MY_API_SECRET) # EXMO-API-Connection
except:
f_print.print_exception(sys._getframe().f_code.co_name + ' ExmoAPI()')
try:
# make pair-number from first and second currency (ted.py)
pair_nr = f_currency.get_pairs_nr(FIRST_CURRENCY, SECOND_CURRENCY)
f_main.ini(pair_nr)
f_print.print_pair()
f_print.print_tactic(pair_nr)
# active_trade_pair = pair_nr
except:
f_print.print_exception(sys._getframe().f_code.co_name + 'ini() + print()')
print '> Bot: I am ready . . .',
while (True):
pair_nr = f_currency.get_pairs_nr(FIRST_CURRENCY, SECOND_CURRENCY)
f_main.ini(pair_nr)
# my_func.__name__
try:
# active_trade_pair depending on the chosen tactics and depending on the free currency
active_trade_pair = f_currency.get_active_trade_pair(pair_nr)
if (active_trade_pair < 0):
f_main.do_wait()
else:
try:
f_main.do_trade(active_trade_pair)
except:
f_print.print_exception(sys._getframe().f_code.co_name + ' f_main.do_trade')
except:
f_print.print_exception(sys._getframe().f_code.co_name + ' while-loop')
# raise sys.exit()
# sys.exit("Exit")
main()
main()
|
COMPONENT_NAMES = [
# "compute_accessibility",
"school_location",
"workplace_location",
"auto_ownership_simulate",
"free_parking",
"cdap_simulate",
"mandatory_tour_frequency",
"mandatory_tour_scheduling",
"joint_tour_frequency",
"joint_tour_composition",
"joint_tour_participation",
"joint_tour_destination",
"joint_tour_scheduling",
"non_mandatory_tour_frequency",
"non_mandatory_tour_destination",
"non_mandatory_tour_scheduling",
"tour_mode_choice_simulate",
"atwork_subtour_frequency",
"atwork_subtour_destination",
"atwork_subtour_scheduling",
"atwork_subtour_mode_choice",
"stop_frequency",
"trip_purpose",
"trip_destination",
"trip_purpose_and_destination",
"trip_scheduling",
"trip_mode_choice",
# "write_data_dictionary",
# "track_skim_usage",
"write_trip_matrices",
# "write_tables",
]
BENCHMARK_SETTINGS = {
"households_sample_size": 48_769, # match hh sample size in example data
}
SKIM_CACHE = False
PRELOAD_INJECTABLES = ("skim_dict",)
REPEAT = 1
NUMBER = 1
TIMEOUT = 36000.0 # ten hours
|
from xstatic.main import XStatic
# names below must be package names
mod_names = [
'asciinema_player',
'bootbox',
'bootstrap',
'font_awesome',
'jquery',
'jquery_ui',
'jquery_file_upload',
'pygments',
]
pkg = __import__('xstatic.pkg', fromlist=mod_names)
serve_files = {}
for mod_name in mod_names:
mod = getattr(pkg, mod_name)
xs = XStatic(mod, root_url='/static', provider='local', protocol='http')
serve_files[xs.name] = xs.base_dir
|
from django.shortcuts import render,get_object_or_404,redirect, Http404, HttpResponseRedirect,HttpResponse
from django.core.paginator import Paginator
from django.views import generic, View
from qa.models import Question,Answer,CustomUser,Session
from django.views import generic
from qa.forms import AnswerForm,AskForm,SignupForm,LoginForm
from django.utils import timezone
import datetime
def testView(request,*args,**kwargs):
try:
return HttpResponse('OK')
except:
raise Http404
class IndexView(generic.ListView):
model = Question
template_name='qa/index.html'
context_object_name='new_question_list'
paginate_by=10
def get_queryset(self):
qs=Question.objects.new()
return qs
class PopularView(generic.ListView):
model=Question
template_name='qa/popular_questions.html'
context_object_name='popular_question_list'
paginate_by=10
def get_queryset(self):
return Question.objects.popular()
class DetailQuestionView(generic.View):
#model=Question
#template_name='qa/question_detail.html'
def get(self, request, pk):
qs=Question.objects.get(pk=pk)
form = AnswerForm(initial={'question':qs.id})
return render (request, 'qa/question_detail.html', context={'form':form,'question':qs})
def post(self,request, pk):
bound_form=AnswerForm(request.POST)
qs=Question.objects.get(pk=pk)
if bound_form.is_valid():
bound_form._user=request.user
bound_form.save()
return redirect(qs)
else:
return render (request, 'qa/question_detail.html', context={'form':bound_form,'question':qs})
class QuestionCreateView(generic.View):
def get(self, request):
form = AskForm()
return render (request, 'qa/question_create.html', context= {'form': form})
def post(self,request):
bound_form=AskForm(request.POST)
if bound_form.is_valid():
bound_form._user=request.user
new_question=bound_form.save()
return redirect(new_question)
return render(request,'qa/question_create.html',context={'form':bound_form})
class SignupView(generic.View):
def get(self,request):
form=SignupForm()
return render (request, 'qa/signup.html',context={'form': form})
def post(self,request):
bound_form=SignupForm(request.POST)
if bound_form.is_valid():
if bound_form.save():
url=request.POST.get ('continue','/')
response=HttpResponseRedirect(url)
sessid=CustomUser.objects.do_login(bound_form.cleaned_data['username'],bound_form.cleaned_data['password'])
response.set_cookie('sessid', sessid,
domain='127.0.0.1',httponly=True,
expires=timezone.datetime.now()+timezone.timedelta(days=5)
)
return response
else:
return render (request, 'qa/signup.html',context={'form': bound_form})
class LoginView(generic.View):
def get(self,request):
form=LoginForm()
return render (request,'qa/login.html', context={'form':form})
def post(self,request):
login= request.POST.get('login')
password = request.POST.get('password')
url = request.POST.get('continue','/')
sessid=CustomUser.objects.do_login(login,password)
if sessid:
response = HttpResponseRedirect(url)
response.set_cookie('sessid', sessid,
domain='127.0.0.1',httponly=True,
expires=timezone.datetime.now()+timezone.timedelta(days=5)
)
return response
else:
error=u'Неверный логин / пароль'
return render (request,login.html, {'error':error}) |
#-*- coding: utf-8 -*-
"""
Created on 2019/5/14
@Author: xhj
"""
import os
import sys
__all__ = ['utils', 'data_prepare', 'camera_calibration'] |
__author__ = 'rayatnia'
import smtplib
from email.mime.text import MIMEText
from threading import Thread
from django.core.mail import send_mail
from django.conf import settings
def async(f):
def wrapper(*args, **kwargs):
thr = Thread(target=f, args=args, kwargs=kwargs)
thr.start()
return wrapper
def send_mails(subject, body, fr=settings.EMAIL_HOST_USER, to=[]):
send_mail(
subject,
"",
"robonit@nit.ac.ir",
to,
fail_silently=False,
html_message=body)
|
"""
The surface into which tetrominoes fall.
https://tetris.wiki/Playfield
"""
import pygame
from src.config import config as src_config
from src.tetromino import Block
class Playfield:
"""The surface into which tetrominoes fall"""
config = src_config["playfield"]
def __init__(self, display):
"""Initialize an instance of Playfield"""
self.display = display
self.surface = self.display.subsurface(self.config["area"])
self.surface.fill(self.config["bgd_color"])
self.locked_blocks = LockedBlocked(self)
def get_x(self, col):
"""Get the x coordinate of the given column in the playfield"""
return col * self.config["cell_size"][0]
def get_y(self, row):
"""Get the y coordinate of the given row in the playfield"""
return row * self.config["cell_size"][1]
def valid_space(self, piece):
"""Check if the given piece is in the valid space of the playfield
Returns True if the given piece is contained withing the playfield and
its not colliding with locked blocks, otherwise returns False.
"""
return self.contains(piece) and (not self.locked_collide(piece))
def contains(self, piece):
"""Check if the given piece is contained within the playfield
Returns True if the given piece is contained within the playfield,
otherwise returnns False.
"""
vanish_zone = pygame.Rect(self.config["vanish_zone"])
rect = self.surface.get_rect().union(vanish_zone)
for block in piece.sprites():
if not rect.contains(block.rect):
return False
return True
def locked_collide(self, piece):
"""Check if the given piece is colliding with locked blocks
Returns True if the given piece is colliding with locked blocks,
otherwise returns False.
"""
return bool(pygame.sprite.groupcollide(piece, self.locked_blocks,
dokilla=False, dokillb=False))
def lock_piece(self, piece):
"""Lock the given piece onto the playfield"""
# Add all block in the given piece to self.locked_blocks
self.locked_blocks.add(piece)
# Remove all block from the given piece, so the player can no longer
# control the piece.
piece.empty()
# Clear complete line and return the amount of line cleared
return self.locked_blocks.line_clear()
def clear_callback(self, surf, rect):
"""Callback function for pygame.sprite.AbstractGroup.clear"""
surf.fill(self.config["bgd_color"], rect)
class LockedBlocked(pygame.sprite.RenderUpdates):
"""A group of blocks that's been locked onto the playfield"""
def __init__(self, playfield, *sprites):
"""Initialize an instance of LockedBlocked
Every sprite in the group must be an instance of Block.
"""
super().__init__(*sprites)
self.playfield = playfield
self.sprite_groups = tuple(pygame.sprite.Group() for i in range(0, 22))
def add_internal(self, sprite):
"""Do not use this method directly
It is used by the group to add a sprite internally.
"""
if isinstance(sprite, Block):
super().add_internal(sprite)
self.sprite_groups[21 - (sprite.row + 2)].add(sprite)
def remove_internal(self, sprite):
"""Do not use this method directly
It is used by the group to remove a sprite internally.
"""
if isinstance(sprite, Block):
super().remove_internal(sprite)
self.sprite_groups[21 - (sprite.row + 2)].remove(sprite)
def draw(self):
"""Draw the lock blocks onto the display
Returns a list of Rectangular areas on the display that have
been changed.
"""
dirty = super().draw(self.playfield.surface)
offset = self.playfield.surface.get_offset()
return [rect.move(offset) for rect in dirty]
def clear(self):
"""Draw the background of the playfield over the sprites"""
super().clear(self.playfield.surface, self.playfield.clear_callback)
def line_clear(self):
"""Clear complete row of blocks, and moves blocks above it downward
Returns the total amount rows been cleared.
"""
line_cleared = 0
for i, group in enumerate(self.sprite_groups):
if len(group) == 0:
break
elif len(group) == 10:
self.remove(group)
line_cleared += 1
elif line_cleared:
for block in group.sprites():
block.move(0, line_cleared)
self.sprite_groups[i - line_cleared].add(group)
group.empty()
return line_cleared
|
from common_colors import *
import os, sys
import pycoingecko
import requests # to get image from the web
import shutil # to save it locally
# Argument is coingecko image output path
coingecko_client = pycoingecko.CoinGeckoAPI()
top_tokens = coingecko_client.get_coins_markets(vs_currency='USD', per_page=100)
tokens = [(t['id'], t['image'])for t in top_tokens]
path = sys.argv[1]
for (id, image_url) in tokens:
r = requests.get(image_url, stream = True)
filename = "%s/%s.png" % (path, id)
# Check if the image was retrieved successfully
if r.status_code == 200:
# Set decode_content value to True, otherwise the downloaded image file's size will be zero.
r.raw.decode_content = True
# Open a local file with wb ( write binary ) permission.
with open(filename,'wb') as f:
shutil.copyfileobj(r.raw, f)
print('Image sucessfully Downloaded: ',filename)
else:
print('Image Couldn\'t be retreived')
|
import string, random
import networkx as nx
import matplotlib.pyplot as plt
from scipy.sparse import random as sparse_random
from layout_grouped_graph import partition_layout
# Random string generator
def rand_string(size=6, chars=string.ascii_uppercase):
return ''.join(random.choice(chars) for _ in range(size))
# Set up a nodes and networks randomly
nodes = [rand_string() for _ in range(30)]
networks = [rand_string() for _ in range(5)]
networks_list = networks*6
random.shuffle(networks_list)
# Define what nodes belong to what network and what their color should be
node_network_map = dict(zip(nodes, networks_list))
colors = ['green', 'royalblue', 'red', 'orange', 'cyan']
color_map = dict(zip(networks, colors))
graph = nx.Graph()
graph.add_nodes_from(nodes)
nodes_by_color = {val: [node for node in graph if color_map[node_network_map[node]] == val]
for val in colors}
# Take random sparse matrix as adjacency matrix
mat = sparse_random(30, 30, density=0.3).todense()
for row, row_val in enumerate(nodes):
for col, col_val in enumerate(nodes):
if col > row and mat[row, col] != 0.0: # Stick to upper half triangle, mat is not symmetric
graph.add_edge(row_val, col_val, weight=mat[row, col])
# Choose a layout to visualize graph
# pos = nx.spring_layout(graph)
# import ipdb; ipdb.set_trace()
pos = partition_layout(graph, node_network_map, ratio=0.15)
edges = graph.edges()
# Get the edge weights and normalize them
weights = [abs(graph[u][v]['weight']) for u, v in edges]
weights_n = [5*float(i)/max(weights) for i in weights] # Change 5 to control thickness
# First draw the nodes
plt.figure()
for color, node_names in nodes_by_color.items():
nx.draw_networkx_nodes(graph, pos=pos, nodelist=node_names, node_color=color)
# Then draw edges with thickness defined by weights_n
nx.draw_networkx_edges(graph, pos=pos, width=weights_n, alpha=0.5)
nx.draw_networkx_labels(graph, pos=pos)
plt.show()
# from _main import draw
# draw(graph, node_positions=pos)
# plt.show()
|
# coding: utf-8
"""
Digitick REST API
The Digitick REST API is a set of methods giving access to catalog, user and cart management.
OpenAPI spec version: v1.0
Contact: contact@digitick.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class EventsResponseInner(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'id': 'int',
'name': 'str',
'presentation1': 'str',
'producer1': 'str',
'producer2': 'str',
'sponsor': 'str',
'category_id': 'int',
'sub_category_id': 'int',
'picture_url110_png': 'str',
'picture_url80_gif': 'str',
'picture_url45_gif': 'str',
'picture_copyright': 'str',
'venue': 'str',
'address': 'str',
'zip_code': 'str',
'city': 'str',
'country_id': 'int',
'number_of_dates': 'int',
'first_date': 'str',
'last_date': 'str',
'number_of_prices': 'int',
'min_price_cents': 'int',
'max_price_cents': 'int',
'vat_percentage': 'str',
'is_exclu': 'bool'
}
attribute_map = {
'id': 'id',
'name': 'name',
'presentation1': 'presentation1',
'producer1': 'producer1',
'producer2': 'producer2',
'sponsor': 'sponsor',
'category_id': 'categoryId',
'sub_category_id': 'subCategoryId',
'picture_url110_png': 'pictureUrl110Png',
'picture_url80_gif': 'pictureUrl80Gif',
'picture_url45_gif': 'pictureUrl45Gif',
'picture_copyright': 'pictureCopyright',
'venue': 'venue',
'address': 'address',
'zip_code': 'zipCode',
'city': 'city',
'country_id': 'countryId',
'number_of_dates': 'numberOfDates',
'first_date': 'firstDate',
'last_date': 'lastDate',
'number_of_prices': 'numberOfPrices',
'min_price_cents': 'minPriceCents',
'max_price_cents': 'maxPriceCents',
'vat_percentage': 'vatPercentage',
'is_exclu': 'isExclu'
}
def __init__(self, id=None, name=None, presentation1=None, producer1=None, producer2=None, sponsor=None, category_id=None, sub_category_id=None, picture_url110_png=None, picture_url80_gif=None, picture_url45_gif=None, picture_copyright=None, venue=None, address=None, zip_code=None, city=None, country_id=None, number_of_dates=None, first_date=None, last_date=None, number_of_prices=None, min_price_cents=None, max_price_cents=None, vat_percentage=None, is_exclu=None):
"""
EventsResponseInner - a model defined in Swagger
"""
self._id = None
self._name = None
self._presentation1 = None
self._producer1 = None
self._producer2 = None
self._sponsor = None
self._category_id = None
self._sub_category_id = None
self._picture_url110_png = None
self._picture_url80_gif = None
self._picture_url45_gif = None
self._picture_copyright = None
self._venue = None
self._address = None
self._zip_code = None
self._city = None
self._country_id = None
self._number_of_dates = None
self._first_date = None
self._last_date = None
self._number_of_prices = None
self._min_price_cents = None
self._max_price_cents = None
self._vat_percentage = None
self._is_exclu = None
if id is not None:
self.id = id
if name is not None:
self.name = name
if presentation1 is not None:
self.presentation1 = presentation1
if producer1 is not None:
self.producer1 = producer1
if producer2 is not None:
self.producer2 = producer2
if sponsor is not None:
self.sponsor = sponsor
if category_id is not None:
self.category_id = category_id
if sub_category_id is not None:
self.sub_category_id = sub_category_id
if picture_url110_png is not None:
self.picture_url110_png = picture_url110_png
if picture_url80_gif is not None:
self.picture_url80_gif = picture_url80_gif
if picture_url45_gif is not None:
self.picture_url45_gif = picture_url45_gif
if picture_copyright is not None:
self.picture_copyright = picture_copyright
if venue is not None:
self.venue = venue
if address is not None:
self.address = address
if zip_code is not None:
self.zip_code = zip_code
if city is not None:
self.city = city
if country_id is not None:
self.country_id = country_id
if number_of_dates is not None:
self.number_of_dates = number_of_dates
if first_date is not None:
self.first_date = first_date
if last_date is not None:
self.last_date = last_date
if number_of_prices is not None:
self.number_of_prices = number_of_prices
if min_price_cents is not None:
self.min_price_cents = min_price_cents
if max_price_cents is not None:
self.max_price_cents = max_price_cents
if vat_percentage is not None:
self.vat_percentage = vat_percentage
if is_exclu is not None:
self.is_exclu = is_exclu
@property
def id(self):
"""
Gets the id of this EventsResponseInner.
:return: The id of this EventsResponseInner.
:rtype: int
"""
return self._id
@id.setter
def id(self, id):
"""
Sets the id of this EventsResponseInner.
:param id: The id of this EventsResponseInner.
:type: int
"""
self._id = id
@property
def name(self):
"""
Gets the name of this EventsResponseInner.
:return: The name of this EventsResponseInner.
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""
Sets the name of this EventsResponseInner.
:param name: The name of this EventsResponseInner.
:type: str
"""
self._name = name
@property
def presentation1(self):
"""
Gets the presentation1 of this EventsResponseInner.
:return: The presentation1 of this EventsResponseInner.
:rtype: str
"""
return self._presentation1
@presentation1.setter
def presentation1(self, presentation1):
"""
Sets the presentation1 of this EventsResponseInner.
:param presentation1: The presentation1 of this EventsResponseInner.
:type: str
"""
self._presentation1 = presentation1
@property
def producer1(self):
"""
Gets the producer1 of this EventsResponseInner.
:return: The producer1 of this EventsResponseInner.
:rtype: str
"""
return self._producer1
@producer1.setter
def producer1(self, producer1):
"""
Sets the producer1 of this EventsResponseInner.
:param producer1: The producer1 of this EventsResponseInner.
:type: str
"""
self._producer1 = producer1
@property
def producer2(self):
"""
Gets the producer2 of this EventsResponseInner.
:return: The producer2 of this EventsResponseInner.
:rtype: str
"""
return self._producer2
@producer2.setter
def producer2(self, producer2):
"""
Sets the producer2 of this EventsResponseInner.
:param producer2: The producer2 of this EventsResponseInner.
:type: str
"""
self._producer2 = producer2
@property
def sponsor(self):
"""
Gets the sponsor of this EventsResponseInner.
:return: The sponsor of this EventsResponseInner.
:rtype: str
"""
return self._sponsor
@sponsor.setter
def sponsor(self, sponsor):
"""
Sets the sponsor of this EventsResponseInner.
:param sponsor: The sponsor of this EventsResponseInner.
:type: str
"""
self._sponsor = sponsor
@property
def category_id(self):
"""
Gets the category_id of this EventsResponseInner.
:return: The category_id of this EventsResponseInner.
:rtype: int
"""
return self._category_id
@category_id.setter
def category_id(self, category_id):
"""
Sets the category_id of this EventsResponseInner.
:param category_id: The category_id of this EventsResponseInner.
:type: int
"""
self._category_id = category_id
@property
def sub_category_id(self):
"""
Gets the sub_category_id of this EventsResponseInner.
:return: The sub_category_id of this EventsResponseInner.
:rtype: int
"""
return self._sub_category_id
@sub_category_id.setter
def sub_category_id(self, sub_category_id):
"""
Sets the sub_category_id of this EventsResponseInner.
:param sub_category_id: The sub_category_id of this EventsResponseInner.
:type: int
"""
self._sub_category_id = sub_category_id
@property
def picture_url110_png(self):
"""
Gets the picture_url110_png of this EventsResponseInner.
:return: The picture_url110_png of this EventsResponseInner.
:rtype: str
"""
return self._picture_url110_png
@picture_url110_png.setter
def picture_url110_png(self, picture_url110_png):
"""
Sets the picture_url110_png of this EventsResponseInner.
:param picture_url110_png: The picture_url110_png of this EventsResponseInner.
:type: str
"""
self._picture_url110_png = picture_url110_png
@property
def picture_url80_gif(self):
"""
Gets the picture_url80_gif of this EventsResponseInner.
:return: The picture_url80_gif of this EventsResponseInner.
:rtype: str
"""
return self._picture_url80_gif
@picture_url80_gif.setter
def picture_url80_gif(self, picture_url80_gif):
"""
Sets the picture_url80_gif of this EventsResponseInner.
:param picture_url80_gif: The picture_url80_gif of this EventsResponseInner.
:type: str
"""
self._picture_url80_gif = picture_url80_gif
@property
def picture_url45_gif(self):
"""
Gets the picture_url45_gif of this EventsResponseInner.
:return: The picture_url45_gif of this EventsResponseInner.
:rtype: str
"""
return self._picture_url45_gif
@picture_url45_gif.setter
def picture_url45_gif(self, picture_url45_gif):
"""
Sets the picture_url45_gif of this EventsResponseInner.
:param picture_url45_gif: The picture_url45_gif of this EventsResponseInner.
:type: str
"""
self._picture_url45_gif = picture_url45_gif
@property
def picture_copyright(self):
"""
Gets the picture_copyright of this EventsResponseInner.
:return: The picture_copyright of this EventsResponseInner.
:rtype: str
"""
return self._picture_copyright
@picture_copyright.setter
def picture_copyright(self, picture_copyright):
"""
Sets the picture_copyright of this EventsResponseInner.
:param picture_copyright: The picture_copyright of this EventsResponseInner.
:type: str
"""
self._picture_copyright = picture_copyright
@property
def venue(self):
"""
Gets the venue of this EventsResponseInner.
:return: The venue of this EventsResponseInner.
:rtype: str
"""
return self._venue
@venue.setter
def venue(self, venue):
"""
Sets the venue of this EventsResponseInner.
:param venue: The venue of this EventsResponseInner.
:type: str
"""
self._venue = venue
@property
def address(self):
"""
Gets the address of this EventsResponseInner.
:return: The address of this EventsResponseInner.
:rtype: str
"""
return self._address
@address.setter
def address(self, address):
"""
Sets the address of this EventsResponseInner.
:param address: The address of this EventsResponseInner.
:type: str
"""
self._address = address
@property
def zip_code(self):
"""
Gets the zip_code of this EventsResponseInner.
:return: The zip_code of this EventsResponseInner.
:rtype: str
"""
return self._zip_code
@zip_code.setter
def zip_code(self, zip_code):
"""
Sets the zip_code of this EventsResponseInner.
:param zip_code: The zip_code of this EventsResponseInner.
:type: str
"""
self._zip_code = zip_code
@property
def city(self):
"""
Gets the city of this EventsResponseInner.
:return: The city of this EventsResponseInner.
:rtype: str
"""
return self._city
@city.setter
def city(self, city):
"""
Sets the city of this EventsResponseInner.
:param city: The city of this EventsResponseInner.
:type: str
"""
self._city = city
@property
def country_id(self):
"""
Gets the country_id of this EventsResponseInner.
:return: The country_id of this EventsResponseInner.
:rtype: int
"""
return self._country_id
@country_id.setter
def country_id(self, country_id):
"""
Sets the country_id of this EventsResponseInner.
:param country_id: The country_id of this EventsResponseInner.
:type: int
"""
self._country_id = country_id
@property
def number_of_dates(self):
"""
Gets the number_of_dates of this EventsResponseInner.
:return: The number_of_dates of this EventsResponseInner.
:rtype: int
"""
return self._number_of_dates
@number_of_dates.setter
def number_of_dates(self, number_of_dates):
"""
Sets the number_of_dates of this EventsResponseInner.
:param number_of_dates: The number_of_dates of this EventsResponseInner.
:type: int
"""
self._number_of_dates = number_of_dates
@property
def first_date(self):
"""
Gets the first_date of this EventsResponseInner.
:return: The first_date of this EventsResponseInner.
:rtype: str
"""
return self._first_date
@first_date.setter
def first_date(self, first_date):
"""
Sets the first_date of this EventsResponseInner.
:param first_date: The first_date of this EventsResponseInner.
:type: str
"""
self._first_date = first_date
@property
def last_date(self):
"""
Gets the last_date of this EventsResponseInner.
:return: The last_date of this EventsResponseInner.
:rtype: str
"""
return self._last_date
@last_date.setter
def last_date(self, last_date):
"""
Sets the last_date of this EventsResponseInner.
:param last_date: The last_date of this EventsResponseInner.
:type: str
"""
self._last_date = last_date
@property
def number_of_prices(self):
"""
Gets the number_of_prices of this EventsResponseInner.
:return: The number_of_prices of this EventsResponseInner.
:rtype: int
"""
return self._number_of_prices
@number_of_prices.setter
def number_of_prices(self, number_of_prices):
"""
Sets the number_of_prices of this EventsResponseInner.
:param number_of_prices: The number_of_prices of this EventsResponseInner.
:type: int
"""
self._number_of_prices = number_of_prices
@property
def min_price_cents(self):
"""
Gets the min_price_cents of this EventsResponseInner.
:return: The min_price_cents of this EventsResponseInner.
:rtype: int
"""
return self._min_price_cents
@min_price_cents.setter
def min_price_cents(self, min_price_cents):
"""
Sets the min_price_cents of this EventsResponseInner.
:param min_price_cents: The min_price_cents of this EventsResponseInner.
:type: int
"""
self._min_price_cents = min_price_cents
@property
def max_price_cents(self):
"""
Gets the max_price_cents of this EventsResponseInner.
:return: The max_price_cents of this EventsResponseInner.
:rtype: int
"""
return self._max_price_cents
@max_price_cents.setter
def max_price_cents(self, max_price_cents):
"""
Sets the max_price_cents of this EventsResponseInner.
:param max_price_cents: The max_price_cents of this EventsResponseInner.
:type: int
"""
self._max_price_cents = max_price_cents
@property
def vat_percentage(self):
"""
Gets the vat_percentage of this EventsResponseInner.
:return: The vat_percentage of this EventsResponseInner.
:rtype: str
"""
return self._vat_percentage
@vat_percentage.setter
def vat_percentage(self, vat_percentage):
"""
Sets the vat_percentage of this EventsResponseInner.
:param vat_percentage: The vat_percentage of this EventsResponseInner.
:type: str
"""
self._vat_percentage = vat_percentage
@property
def is_exclu(self):
"""
Gets the is_exclu of this EventsResponseInner.
:return: The is_exclu of this EventsResponseInner.
:rtype: bool
"""
return self._is_exclu
@is_exclu.setter
def is_exclu(self, is_exclu):
"""
Sets the is_exclu of this EventsResponseInner.
:param is_exclu: The is_exclu of this EventsResponseInner.
:type: bool
"""
self._is_exclu = is_exclu
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, EventsResponseInner):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
import os
import sys
from pathlib import Path
if len(sys.argv) != 2:
print('You must pass the data directory as the first argument. E.g. "py createDataFolder.py C:/DATA"')
exit()
dir = sys.argv[1]
Path(dir + "/CCAP/T0/Change9606").mkdir(parents=True, exist_ok=True)
Path(dir + "/CCAP/T1").mkdir(parents=True, exist_ok=True)
|
import regex
import lib.logger as logging
from lib.functions import wait_until
from lib.game import ui
from lib.game.battle_bot import ManualBattleBot
from lib.game.missions.missions import Missions
logger = logging.get_logger(__name__)
class WorldBossInvasion(Missions):
"""Class for working with World Boss Invasion missions."""
class MissionFilter:
"""Class for working with mission types of World Boss Invasion."""
DEFAULT_ERROR = 3 # Number of errors in the form of inserted, deleted or substituted characters in regex
def __init__(self, pattern, opposite_pattern, mission_filter, opposite_filter):
"""Class initialization.
:param str pattern: regular expression pattern for mission's condition.
:param str opposite_pattern: regular expression pattern for opposite mission's condition.
:param ui.UIElement mission_filter: UI for main mission filter.
:param ui.UIElement opposite_filter: UI for opposite mission filter.
"""
self.pattern = f"({pattern}){{e<={self.DEFAULT_ERROR}}}"
self._regexp = regex.compile(self.pattern)
self.opposite_pattern = f"({opposite_pattern}){{e<={self.DEFAULT_ERROR}}}"
self._opposite_regexp = regex.compile(self.opposite_pattern)
self.filter = mission_filter
self.opposite_filter = opposite_filter
def get_filter(self, text):
if self._regexp.match(text):
return self.filter
if self._opposite_regexp.match(text):
return self.opposite_filter
class SuperHeroes(MissionFilter):
def __init__(self):
super().__init__(pattern="Clear the stage with more than N Super Heroes",
opposite_pattern="Clear the stage with less than N Super Heroes",
mission_filter=ui.INVASION_CHARACTER_FILTER_HERO,
opposite_filter=ui.INVASION_CHARACTER_FILTER_VILLAIN)
class SuperVillain(MissionFilter):
def __init__(self):
super().__init__(pattern="Clear the stage with more than N Super Villain",
opposite_pattern="Clear the stage with less than N Super Villain",
mission_filter=ui.INVASION_CHARACTER_FILTER_VILLAIN,
opposite_filter=ui.INVASION_CHARACTER_FILTER_HERO)
class BlastCharacters(MissionFilter):
def __init__(self):
super().__init__(pattern="Clear the stage with more than N Blast type Characters",
opposite_pattern="Clear the stage with less than N Blast type Characters",
mission_filter=ui.INVASION_CHARACTER_FILTER_BLAST,
opposite_filter=ui.INVASION_CHARACTER_FILTER_ALL)
class CombatCharacters(MissionFilter):
def __init__(self):
super().__init__(pattern="Clear the stage with more than N Combat type Characters",
opposite_pattern="Clear the stage with less than N Combat type Characters",
mission_filter=ui.INVASION_CHARACTER_FILTER_COMBAT,
opposite_filter=ui.INVASION_CHARACTER_FILTER_ALL)
class SpeedCharacters(MissionFilter):
def __init__(self):
super().__init__(pattern="Clear the stage with more than N Speed type Characters",
opposite_pattern="Clear the stage with less than N Speed type Characters",
mission_filter=ui.INVASION_CHARACTER_FILTER_SPEED,
opposite_filter=ui.INVASION_CHARACTER_FILTER_ALL)
class UniversalCharacters(MissionFilter):
def __init__(self):
super().__init__(pattern="Clear the stage with more than N Universal type Characters",
opposite_pattern="Clear the stage with less than N Universal type Characters",
mission_filter=ui.INVASION_CHARACTER_FILTER_UNIVERSAL,
opposite_filter=ui.INVASION_CHARACTER_FILTER_ALL)
class MaleCharacters(MissionFilter):
def __init__(self):
super().__init__(pattern="Clear the stage with more than N Male Characters",
opposite_pattern="Clear the stage with less than N Male Characters",
mission_filter=ui.INVASION_CHARACTER_FILTER_MALE,
opposite_filter=ui.INVASION_CHARACTER_FILTER_FEMALE)
class FemaleCharacters(MissionFilter):
def __init__(self):
super().__init__(pattern="Clear the stage with more than N Female Characters",
opposite_pattern="Clear the stage with less than N Female Characters",
mission_filter=ui.INVASION_CHARACTER_FILTER_FEMALE,
opposite_filter=ui.INVASION_CHARACTER_FILTER_MALE)
def __init__(self, game):
"""Class initialization.
:param lib.game.game.Game game: instance of the game.
"""
super().__init__(game, mode_name='WORLD BOSS INVASION')
self._chests = None
self._max_chests = None
self._boss_mission = None
self.mission_filters = [self.SuperHeroes(), self.SuperVillain(), self.MaleCharacters(), self.FemaleCharacters(),
self.CombatCharacters(), self.SpeedCharacters(), self.BlastCharacters(),
self.UniversalCharacters()]
@property
def battle_over_conditions(self):
def damage():
if self.emulator.is_ui_element_on_screen(ui.INVASION_END_BATTLE_DAMAGE):
logger.info("Won battle, chest was acquired.")
self._chests += 1
return True
return False
def failed():
return self.emulator.is_ui_element_on_screen(ui.INVASION_FAILED)
return [damage, failed]
def do_missions(self, times=None, ignore_coop_mission=False):
"""Does missions."""
self.start_missions(times=times, ignore_coop_mission=ignore_coop_mission)
self.end_missions()
def start_missions(self, times=None, ignore_coop_mission=False):
"""Starts World Boss Invasion."""
if self.open_world_boss_invasion():
if self.chests > 0:
if not self.acquire_chests():
return
if times:
self._max_chests = times
if self.chests < self.max_chests and self._find_boss_for_fight():
while self.chests < self.max_chests:
logger.debug(f"{times} stages left to complete ({self.chests} out of {self.max_chests}.")
if not self.press_start_button(ignore_coop_mission=ignore_coop_mission):
return
self._wait_for_players_and_start_fight()
logger.info("No more stages.")
def end_missions(self):
"""Ends missions."""
if not self.game.is_main_menu():
if self.emulator.is_image_on_screen(ui.HOME):
self.emulator.click_button(ui.HOME)
self.close_after_mission_notifications()
self.game.close_ads()
else:
logger.error("Can't return to main menu, HOME button is missing.")
def open_world_boss_invasion(self):
"""Opens World Boss Invasion missions.
:return: is WBI missions open or not.
:rtype: bool
"""
self.game.go_to_coop()
if wait_until(self.emulator.is_ui_element_on_screen, ui_element=ui.INVASION_LABEL):
self.emulator.click_button(ui.INVASION_LABEL)
if wait_until(self.emulator.is_ui_element_on_screen, ui_element=ui.INVASION_MENU_LABEL):
return wait_until(self.emulator.is_ui_element_on_screen, ui_element=ui.INVASION_MANAGE_CHESTS)
return False
def _get_chests_count(self):
"""Stores current chests and max chests amount."""
chests_text = self.emulator.get_screen_text(ui_element=ui.INVASION_STAGES)
current_chest, max_chest = self.game.get_current_and_max_values_from_text(chests_text)
self._chests = 5 if current_chest > 5 else current_chest
self._max_chests = 5 if max_chest > 5 else max_chest
logger.info(f"{self._chests} chests out of {self._max_chests} (from '{chests_text}' text).")
@property
def chests(self):
"""Get current amount of chests.
:rtype: int
"""
if self._chests is None:
self._get_chests_count()
return self._chests
@chests.setter
def chests(self, value):
"""Update available chests value.
:param int value: value to set.
"""
self._chests = value
@property
def max_chests(self):
"""Get max amount of chests.
:rtype: int
"""
if self._max_chests is None:
self._get_chests_count()
return self._max_chests
def acquire_chests(self):
"""Acquires all available chests."""
logger.debug("Starting to acquire all available chests.")
self.emulator.click_button(ui.INVASION_MANAGE_CHESTS)
if wait_until(self.emulator.is_ui_element_on_screen, ui_element=ui.INVASION_CHESTS_MENU_LABEL):
for chest_index in range(1, self.max_chests + 1):
self._acquire_chest(chest_index)
logger.debug("Going back to mission's lobby.")
self.emulator.click_button(ui.MENU_BACK)
if wait_until(self.emulator.is_ui_element_on_screen, ui_element=ui.INVASION_MANAGE_CHESTS):
self._get_chests_count()
return True
logger.error("Can't get back to mission's lobby.")
def _acquire_chest(self, chest_index):
"""Acquires chest by chest index.
:param int chest_index: chest index (from 1 to max chests + 1)
:return: was chest acquired or not.
:rtype: bool
"""
logger.debug(f"Trying to acquire chest #{chest_index}")
chest_ui = ui.get_by_name(f'INVASION_CHEST_AVAILABLE_{chest_index}')
if wait_until(self.emulator.is_ui_element_on_screen, timeout=1, ui_element=chest_ui):
logger.debug(f"Chest {chest_index} is available. Trying to open.")
self.emulator.click_button(chest_ui)
if wait_until(self.emulator.is_ui_element_on_screen, ui_element=ui.INVASION_SKIP_CHEST):
while self.emulator.is_ui_element_on_screen(ui_element=ui.INVASION_SKIP_CHEST):
logger.debug("Skipping chests items.")
self.emulator.click_button(ui.INVASION_SKIP_CHEST, min_duration=0.5,
max_duration=0.8)
while not self.emulator.is_ui_element_on_screen(ui_element=ui.INVASION_CHESTS_MENU_LABEL):
self.emulator.click_button(ui.INVASION_SKIP_CHEST, min_duration=0.5,
max_duration=0.8)
logger.debug("Chest acquired, going back to chest's menu.")
return True
logger.debug(f"Chest #{chest_index} isn't available.")
return False
def _find_boss_for_fight(self):
"""Finds available boss fight and enter it.
:return: was fight found and entered or not.
:rtype: bool
"""
weekly_boss_name = self.emulator.get_screen_text(ui_element=ui.INVASION_NAME)
logger.debug(f"Weekly boss name: {weekly_boss_name}")
for bosses in ['INVASION_TWILIGHT_BATTLE_', 'INVASION_BLACK_ORDER_BATTLE_']:
for boss_index in range(1, 8):
boss_ui = ui.get_by_name(f'{bosses}{boss_index}')
boss_time = self.emulator.get_screen_text(ui_element=boss_ui)
if boss_time:
logger.debug(f"Found boss with UI: {boss_ui} with time {boss_time}, entering.")
self.emulator.click_button(boss_ui)
if wait_until(self.emulator.is_ui_element_on_screen, ui_element=ui.INVASION_BOSS_FIGHT_ENTER):
self._boss_mission = self.emulator.get_screen_text(ui.INVASION_BOSS_MISSION)
logger.debug(f"Current boss mission: {self._boss_mission}")
self.emulator.click_button(ui.INVASION_BOSS_FIGHT_ENTER)
return True
logger.error(f"Something went wrong with found boss {boss_ui}")
if wait_until(self.emulator.is_ui_element_on_screen, ui_element=ui.INVASION_BOSS_FIGHT_CLOSE):
logger.warning(f"Closing {boss_ui}")
self.emulator.click_button(ui.INVASION_BOSS_FIGHT_CLOSE)
logger.error("Failed to found boss.")
return False
def _check_notifications_before_fight(self):
"""Checks fight notifications about any obstacles to start a fight.
:return: can we start a fight or not.
:rtype: bool
"""
waiting_for_other_players = self.emulator.is_ui_element_on_screen(
ui_element=ui.WAITING_FOR_OTHER_PLAYERS)
if not waiting_for_other_players:
if self.emulator.is_ui_element_on_screen(ui_element=ui.NOT_ENOUGH_ENERGY):
self.emulator.click_button(ui.NOT_ENOUGH_ENERGY)
self._chests = self._max_chests
if self.emulator.is_ui_element_on_screen(ui_element=ui.INVASION_NOT_ENOUGH_CHARACTERS):
self.emulator.click_button(ui.INVASION_NOT_ENOUGH_CHARACTERS)
self._chests = self._max_chests
return False
return True
def press_start_button(self, start_button_ui=ui.INVASION_BOSS_FIGHT_START, ignore_coop_mission=False):
"""Presses start button of the mission.
:return: was button clicked successfully or not.
:rtype: bool
"""
logger.debug(f"Pressing START button with UI Element: {start_button_ui}.")
if wait_until(self.emulator.is_ui_element_on_screen, ui_element=start_button_ui):
self._deploy_characters(ignore_coop_mission=ignore_coop_mission)
self.emulator.click_button(start_button_ui)
if wait_until(self._check_notifications_before_fight, timeout=10):
return True
if wait_until(self.emulator.is_ui_element_on_screen, ui_element=ui.INVASION_NO_CHEST_SLOTS):
logger.warning("No slots for chests. Exiting.")
self.emulator.click_button(ui.INVASION_NO_CHEST_SLOTS)
return False
if wait_until(self.emulator.is_ui_element_on_screen, timeout=2,
ui_element=ui.DISCONNECT_NEW_OPPONENT):
logger.debug("Found disconnect notification. Trying to start again.")
self.emulator.click_button(ui.DISCONNECT_NEW_OPPONENT)
return True
logger.error(f"Unable to press {start_button_ui} button.")
return False
def _deploy_characters(self, ignore_coop_mission=False):
"""Deploys 3 characters to battle."""
no_main = self.emulator.is_image_on_screen(ui_element=ui.INVASION_NO_CHARACTER_MAIN)
no_left = self.emulator.is_image_on_screen(ui_element=ui.INVASION_NO_CHARACTER_LEFT)
no_right = self.emulator.is_image_on_screen(ui_element=ui.INVASION_NO_CHARACTER_RIGHT)
if not ignore_coop_mission and (no_main or no_left or no_right):
self._select_character_filter_by_mission()
if no_main:
self.emulator.click_button(ui.INVASION_CHARACTER_1)
if no_left:
self.emulator.click_button(ui.INVASION_CHARACTER_2)
if no_right:
self.emulator.click_button(ui.INVASION_CHARACTER_3)
def _select_character_filter_by_mission(self):
"""Selects character filter by current mission."""
for mission_filter in self.mission_filters:
characters_filter = mission_filter.get_filter(text=self._boss_mission)
if characters_filter:
logger.debug(f"Found filter {characters_filter} by {mission_filter.__class__.__name__}")
self.emulator.click_button(ui.INVASION_CHARACTER_FILTER, min_duration=1, max_duration=1)
self.emulator.click_button(characters_filter, min_duration=1, max_duration=1)
def _wait_for_players_and_start_fight(self):
"""Waits for players before start of the fight."""
if wait_until(self.emulator.is_ui_element_on_screen, ui_element=ui.WAITING_FOR_OTHER_PLAYERS):
logger.debug("Waiting for other players before battle.")
if wait_until(self.emulator.is_ui_element_on_screen, timeout=60, condition=False, period=0.5,
ui_element=ui.WAITING_FOR_OTHER_PLAYERS):
if wait_until(self.emulator.is_ui_element_on_screen, timeout=2,
ui_element=ui.DISCONNECT_NEW_OPPONENT):
logger.debug("Found disconnect notification. Trying to start again.")
self.emulator.click_button(ui.DISCONNECT_NEW_OPPONENT)
return self._wait_for_players_and_start_fight()
logger.debug("Battle is loading. Starting manual bot.")
return self._manual_bot_start()
logger.warning("Waiting other players very long, trying to reset.")
self.emulator.click_button(ui.WAITING_FOR_OTHER_PLAYERS)
def _manual_bot_start(self):
"""Starts manual bot for the fight."""
ManualBattleBot(self.game, self.battle_over_conditions, self.disconnect_conditions).fight()
if wait_until(self.emulator.is_image_on_screen, timeout=2, ui_element=ui.INVASION_HOME_BUTTON):
if self._chests < self._max_chests:
self.press_repeat_button(repeat_button_ui=ui.INVASION_REPEAT_BUTTON,
start_button_ui=ui.INVASION_BOSS_FIGHT_START)
else:
self.press_home_button(home_button=ui.INVASION_HOME_BUTTON)
return
# In case we got back from fight by disconnect or something else
logger.debug("Any chest after boss fight wasn't acquired.")
if wait_until(self.emulator.is_ui_element_on_screen, timeout=20,
ui_element=ui.INVASION_BOSS_FIGHT_START):
if self.press_start_button():
self._wait_for_players_and_start_fight()
|
# Copyright 2012-2013 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Identity v3 Service action implementations"""
import logging
import sys
from cliff import command
from cliff import lister
from cliff import show
from openstackclient.common import utils
class CreateService(show.ShowOne):
"""Create service command"""
api = 'identity'
log = logging.getLogger(__name__ + '.CreateService')
def get_parser(self, prog_name):
parser = super(CreateService, self).get_parser(prog_name)
parser.add_argument(
'type',
metavar='<service-type>',
help='New service type (compute, image, identity, volume, etc)')
parser.add_argument(
'--name',
metavar='<service-name>',
help='New service name')
enable_group = parser.add_mutually_exclusive_group()
enable_group.add_argument(
'--enable',
dest='enabled',
action='store_true',
default=True,
help='Enable user')
enable_group.add_argument(
'--disable',
dest='enabled',
action='store_false',
help='Disable user')
return parser
def take_action(self, parsed_args):
self.log.debug('take_action(%s)' % parsed_args)
identity_client = self.app.client_manager.identity
service = identity_client.services.create(
parsed_args.name,
parsed_args.type,
parsed_args.enabled)
return zip(*sorted(service._info.iteritems()))
class DeleteService(command.Command):
"""Delete service command"""
api = 'identity'
log = logging.getLogger(__name__ + '.DeleteService')
def get_parser(self, prog_name):
parser = super(DeleteService, self).get_parser(prog_name)
parser.add_argument(
'service',
metavar='<service>',
help='Name or ID of service to delete')
return parser
def take_action(self, parsed_args):
self.log.debug('take_action(%s)' % parsed_args)
identity_client = self.app.client_manager.identity
service_id = utils.find_resource(
identity_client.services, parsed_args.service).id
identity_client.services.delete(service_id)
return
class ListService(lister.Lister):
"""List service command"""
api = 'identity'
log = logging.getLogger(__name__ + '.ListService')
def take_action(self, parsed_args):
self.log.debug('take_action(%s)' % parsed_args)
columns = ('ID', 'Name', 'Type', 'Enabled')
data = self.app.client_manager.identity.services.list()
return (columns,
(utils.get_item_properties(
s, columns,
formatters={},
) for s in data))
class SetService(show.ShowOne):
"""Set service command"""
api = 'identity'
log = logging.getLogger(__name__ + '.SetService')
def get_parser(self, prog_name):
parser = super(SetService, self).get_parser(prog_name)
parser.add_argument(
'service',
metavar='<service>',
help='Service name or ID to update')
parser.add_argument(
'--type',
metavar='<service-type>',
help='New service type (compute, image, identity, volume, etc)')
parser.add_argument(
'--name',
metavar='<service-name>',
help='New service name')
enable_group = parser.add_mutually_exclusive_group()
enable_group.add_argument(
'--enable',
dest='enabled',
action='store_true',
default=True,
help='Enable user')
enable_group.add_argument(
'--disable',
dest='enabled',
action='store_false',
help='Disable user')
return parser
def take_action(self, parsed_args):
self.log.debug('take_action(%s)' % parsed_args)
identity_client = self.app.client_manager.identity
service = utils.find_resource(identity_client.services,
parsed_args.service)
if not parsed_args.name and not parsed_args.type:
sys.stdout.write("Service not updated, no arguments present")
return
identity_client.services.update(
service,
parsed_args.name,
parsed_args.type,
parsed_args.enabled)
return
class ShowService(show.ShowOne):
"""Show service command"""
api = 'identity'
log = logging.getLogger(__name__ + '.ShowService')
def get_parser(self, prog_name):
parser = super(ShowService, self).get_parser(prog_name)
parser.add_argument(
'service',
metavar='<service>',
help='Type, name or ID of service to display')
return parser
def take_action(self, parsed_args):
self.log.debug('take_action(%s)' % parsed_args)
identity_client = self.app.client_manager.identity
service = utils.find_resource(identity_client.services,
parsed_args.service)
return zip(*sorted(service._info.iteritems()))
|
# 숫자를 1 ~ 31 까지 담는다
# 반복문을 사용해서 숫자 31이 될떄까지
# 컴퓨터 임의갯수숫자를 부르고 사용자가 임의갯수숫자를 입력한다.
# 사용자가 임의갯수숫자를 입력하면
# 컴퓨터는 나머지의 숫자를 순서대로 임의갯수대로 나타낸다.
# 여기서 숫자가 31이 나오지 않으면 다시 사용자가 임의갯수숫자 입력으로 돌아감
# 사용자가 컴퓨터 다음 숫자를 임의갯수대로 입력한다
# 컴퓨터는 사용자가 입력한 숫자의 다음 숫자를 임의갯수대로 입력한다
# 6행줄로 다시 돌아간다.
# 컴퓨터가 30까지 입력하면 컴퓨터 승리! 사용자가 30을 입력하면 사용자 승리! |
from rest_framework import generics, permissions
from core.models import Pet
from core.serializers import PetSerializer,PetCreateSerializer
class PetList(generics.ListAPIView):
queryset = Pet.objects.all()
serializer_class = PetSerializer
permission_classes = ()
class PetDestroy(generics.DestroyAPIView):
queryset = Pet.objects.all()
serializer_class = PetCreateSerializer
permission_classes = (
permissions.IsAuthenticated,
)
class PetUpdate(generics.UpdateAPIView):
queryset = Pet.objects.all()
serializer_class = PetSerializer
permission_classes = (
permissions.IsAuthenticated,
)
class PetCreate(generics.CreateAPIView):
queryset = Pet.objects.all()
serializer_class = PetCreateSerializer
permission_classes = ()
class PetGet(generics.RetrieveAPIView):
queryset = Pet.objects.all()
serializer_class = PetSerializer
permission_classes = () |
#!/usr/bin/env python3
# install pip3 install requests
from tickets import Ticket
import os
import time
# This is the main code to the code challenge
# Contains:
class CodingChallenge():
def display_menu(self):
# This is the main display menu
# clears out the screen everytime method is called
os.system('clear')
print("")
print(30 * " ", "Welcome to Mobile Ticket viewer", 30 * " ")
print("Please select an option from below:\n")
print("1. List all the tickets ")
print("2. List a single ticket ")
print("Quit (Enter q or Q to Quit ) \n")
def menu(self):
# clears out the screen everytime method menu is called
os.system('clear')
while True:
# displays the main menu in the console
self.display_menu()
choice = input("Enter your choice(1,2,3 or q/Q): ")
if choice == "1":
os.system('clear')
# calls get_all_tickets from tickets.py file to receive all tickets
tickets.get_all_tickets()
elif choice == "2":
os.system('clear')
# calls get_ticket from tickets.py file to get a single ticket
tickets.get_ticket()
elif choice == "q" or choice == "Q":
print("\nThanks for visiting. :)\n")
# Quits the main console-based menu
break
else:
print(
"""\nInvalid selection!!!! please enter number 1, 2 or enter q or Q to Quit""")
time.sleep(3)
os.system('clear')
if __name__ == '__main__':
tickets = Ticket()
a = CodingChallenge()
a.menu()
|
# Adapted for numpy/ma/cdms2 by convertcdms.py
#
# Test Outline (Go) module
#
############################################################################
# #
# Module: testoutline module #
# #
# Copyright: 2000, Regents of the University of California #
# This software may not be distributed to others without #
# permission of the author. #
# #
# Authors: PCMDI Software Team #
# Lawrence Livermore NationalLaboratory: #
# support@pcmdi.llnl.gov #
# #
# Description: Used to test VCS's outline graphics method. #
# #
# Version: 4.0 #
# #
############################################################################
#
#
#
############################################################################
# #
# Import: VCS and cdms modules. #
# #
############################################################################
def test():
import vcs_legacy,cdms2 as cdms,time,os,sys,support # import vcs_legacy and cu
bg= support.bg
f=cdms.open(os.path.join(cdms.__path__[0],'..','..','..','..','sample_data','clt.nc')) # open clt file
s=f('clt') # get slab clt
x=vcs_legacy.init() # construct vcs_legacy canvas
a=x.createoutline('test') # create 'test' outline
if not vcs_legacy.isgraphicsmethod(a): # test object 'a' for graphics method
raise Exception, "Error did not retrieve the gm"
else:
if not vcs_legacy.isoutline(a): # check for outline
raise Exception, "Error gm is not right type"
x.plot(s,'default','outline','test',bg=bg)# plot slab the old way
support.check_plot(x)
if bg==0:
x.geometry(450,337,100,0) # change the geometry and location
x.flush()
support.check_plot(x)
s=f('u') # get u slab
x.clear() # clear the VCS Canvas
x.plot(s,'default','outline','test',bg=bg)# plot the surface data
support.check_plot(x)
a.script('test','w') # save 'test' outline as a Python script
a.xticlabels('lon30','lon30') # change the x-axis
support.check_plot(x)
a.xticlabels('','') # remove the x-axis
support.check_plot(x)
a.xticlabels('*') # put the x-axis
support.check_plot(x)
a.datawc(-45.0, 45.0, -90.0, 90.0) # change the region
support.check_plot(x)
a.datawc(1e20,1e20,1e20,1e20) # put the region back
support.check_plot(x)
cont = False
for anarg in sys.argv:
if anarg in ['--extended', '--full','-E','-F']:
cont = True
break
if cont is False:
print '\n************* PARTIAL TEST *****************'
print 'FOR COMPLETE TEST OF THIS MODULE USE '
print ' -F (--full) or -E (--extended) option'
print '************* PARTIAL TEST *****************\n'
sys.exit()
a.outline=([0]) # set the outline value
support.check_plot(x)
a.line=0 # same as 'solid', change the line style
support.check_plot(x)
a.line=1 # same as 'dash', change the line style
support.check_plot(x)
a.line=2 # same as 'dot', change the line style
support.check_plot(x)
a.line=3 # same as 'dash-dot', change the line style
support.check_plot(x)
a.line=4 # same as 'long-dash', change the line style
support.check_plot(x)
a.linecolor=(77) # change the line color
support.check_plot(x)
a.linecolor=16 # change the line color
support.check_plot(x)
a.linecolor=44 # same as a.linecolor=(44)
support.check_plot(x)
a.linecolor=None # use the default line color, black
support.check_plot(x)
a.line=None # use default line style, solid black line
support.check_plot(x)
a.outline=([1]) # set the outline value
support.check_plot(x)
a.outline=([0,1]) # set the outline value
support.check_plot(x)
a.outline=([0]) # set the outline value
support.check_plot(x)
x.clear() # clear the VCS Canvas
x.outline(s,a,'default',bg=bg) # plot outline using 'default' template
support.check_plot(x)
objs =x.listelements('template') # get the list of templates
t=x.createtemplate('test') # create template 'test' from 'default' template
if not vcs_legacy.istemplate(t): # test whether 't' is a template or not
raise Exception,"Error template not created"
else:
a2 =x.listelements('template') # get the list of templates
if objs==a2:
raise Exception,"Error template not created or added to list"
x.clear() # clear the VCS Canvas
x.plot(t,a,s,bg=bg) # plot outline template 't', outline 'a', and array 's'
support.check_plot(x)
x.clear() # clear the VCS Canvas
x.outline(a,s,t,bg=bg) # plot using outline 'a', array 's', and template 't'
support.check_plot(x)
#########################################################################
# Create line object 'l' from the default line #
#########################################################################
#########################################################################
objs = x.listelements('line') # show the list of line secondary objects
l=x.createline('test')
if not vcs_legacy.issecondaryobject(l): # check to see if it is a secondary object
raise Exception,"Error did not get line"
else:
if not vcs_legacy.isline(l): # check to see if it is a line
raise Exception, "Error object created is not line"
#########################################################################
# Use the create line object 'l' from above and modify the line object #
#########################################################################
a.line=l # use the line object
support.check_plot(x)
l.color = 44 # change the line color
support.check_plot(x)
l.type ='dash' # change the line type
support.check_plot(x)
a = x.listelements('outline') # show list of gm
r=x.createoutline('test2','quick') # create xyvsy 'test2'
a2 = x.listelements('outline') # show list of gm
if a2==a:
raise "error gm not created or not added to list"
x.removeobject(r) # remove xyvsy 'test2'
a3 = x.listelements('outline') # show list of gm
if a3!=a:
raise "error gm not removed"
#################################################################################
# to see how x.update and x.mode work, see testoutline.py #
#################################################################################
#x.update()
#x.mode=1
#x.mode=0
print '*************************************************************************************'
print '****** ******'
print '****** O U T F I L L T E S T C O M P L E T E D S U C E S S F U L L Y ******'
print '****** ******'
print '*************************************************************************************'
if __name__=="__main__":
test()
|
# O(v+e) time | o(v) space
class Node:
def __init__(self, name):
self.children = []
self.name = name
self.visited = set()
def addChild(self, name):
self.children.append(Node(name))
return self
def depthFirstSearch(self, array):
stack = []
stack.append(self)
while len(stack) > 0:
item = stack.pop()
if item not in self.visited:
self.visited.add(item)
array.append(item.name)
for idx in reversed(range(len(item.children))):
stack.append(item.children[idx])
return array
"""
Recursive
class Node:
def __init__(self, name):
self.children = []
self.name = name
self.visited = set()
def addChild(self, name):
self.children.append(Node(name))
return self
def depthFirstSearch(self, array):
stack = []
stack.append(self)
while len(stack) > 0:
item = stack.pop()
if item not in self.visited:
self.visited.add(item)
array.append(item.name)
for idx in reversed(range(len(item.children))):
stack.append(item.children[idx])
return array
"""
|
from .device import SimulatedLakeshore372
from ..lewis_versions import LEWIS_LATEST
framework_version = LEWIS_LATEST
__all__ = ['SimulatedLakeshore372']
|
"""Annotators for numbering things."""
import random
import re
from binascii import hexlify
from collections import defaultdict
from sparv import Annotation, Output, Wildcard, annotator
START_DEFAULT = 1
@annotator("Number {annotation} by position", wildcards=[Wildcard("annotation", Wildcard.ANNOTATION)])
def number_by_position(out: Output = Output("{annotation}:misc.number_position"),
chunk: Annotation = Annotation("{annotation}"),
prefix: str = "",
zfill: bool = False,
start: int = START_DEFAULT):
"""Number chunks by their position."""
spans = list(chunk.read_spans())
def _order(index, _value):
return spans[index]
_read_chunks_and_write_new_ordering(out, chunk, _order, prefix, zfill, start)
@annotator("Number {annotation} randomly", wildcards=[Wildcard("annotation", Wildcard.ANNOTATION)])
def number_random(out: Output = Output("{annotation}:misc.number_random"),
chunk: Annotation = Annotation("{annotation}"),
prefix: str = "",
zfill: bool = False,
start: int = START_DEFAULT):
"""Number chunks randomly.
Uses index as random seed.
"""
def _order(index, _value):
random.seed(int(hexlify(str(index).encode()), 16))
return random.random()
_read_chunks_and_write_new_ordering(out, chunk, _order, prefix, zfill, start)
@annotator("Number {annotation}, with the order determined by {attribute}", wildcards=[
Wildcard("annotation", Wildcard.ANNOTATION),
Wildcard("attribute", Wildcard.ATTRIBUTE)
])
def number_by_attribute(out: Output = Output("{annotation}:misc.number_by_{attribute}"),
chunk: Annotation = Annotation("{annotation}:{attribute}"),
prefix: str = "",
zfill: bool = False,
start: int = START_DEFAULT):
"""Number chunks, with the order determined by an attribute."""
def _order(_index, value):
return _natural_sorting(value)
_read_chunks_and_write_new_ordering(out, chunk, _order, prefix, zfill, start)
@annotator("Renumber already numbered {annotation}:{attribute}, in new random order", wildcards=[
Wildcard("annotation", Wildcard.ANNOTATION),
Wildcard("attribute", Wildcard.ATTRIBUTE)
])
def renumber_by_shuffle(out: Output = Output("{annotation}:misc.renumber_by_shuffle_{attribute}"),
chunk: Annotation = Annotation("{annotation}:{attribute}"),
prefix: str = "",
zfill: bool = False,
start: int = START_DEFAULT):
"""Renumber already numbered chunks, in new random order.
Retains the connection between parallelly numbered chunks by using the values as random seed.
"""
def _order(_index, value):
random.seed(int(hexlify(value.encode()), 16))
return random.random(), _natural_sorting(value)
_read_chunks_and_write_new_ordering(out, chunk, _order, prefix, zfill, start)
@annotator("Number {annotation} by ({parent_annotation}:{parent_attribute} order, {annotation} order)", wildcards=[
Wildcard("annotation", Wildcard.ANNOTATION),
Wildcard("parent_annotation", Wildcard.ANNOTATION),
Wildcard("parent_attribute", Wildcard.ATTRIBUTE)
])
def number_by_parent(out: Output = Output("{annotation}:misc.number_by_parent_{parent_annotation}__{parent_attribute}"),
chunk: Annotation = Annotation("{annotation}"),
parent_order: Annotation = Annotation("{parent_annotation}:{parent_attribute}"),
prefix: str = "",
zfill: bool = False,
start: int = START_DEFAULT):
"""Number chunks by (parent_order, chunk order)."""
parent_children, _orphans = parent_order.get_children(chunk)
child_order = {child_index: (parent_nr, child_index)
for parent_index, parent_nr in enumerate(parent_order.read())
for child_index in parent_children[parent_index]}
def _order(index, _value):
return child_order.get(index)
_read_chunks_and_write_new_ordering(out, chunk, _order, prefix, zfill, start)
@annotator("Number {annotation} by relative position within {parent}", wildcards=[
Wildcard("annotation", Wildcard.ANNOTATION),
Wildcard("parent", Wildcard.ANNOTATION)
])
def number_relative(out: Output = Output("{annotation}:misc.number_rel_{parent}"),
parent: Annotation = Annotation("{parent}"),
child: Annotation = Annotation("{annotation}"),
prefix: str = "",
zfill: bool = False,
start: int = START_DEFAULT):
"""Number chunks by their relative position within a parent."""
parent_children, _orphans = parent.get_children(child)
out.write(("{prefix}{nr:0{length}d}".format(prefix=prefix,
length=len(str(len(parent) - 1 + start))
if zfill else 0,
nr=cnr)
for parent in parent_children
for cnr, _index in enumerate(parent, start)))
def _read_chunks_and_write_new_ordering(out: Output, chunk: Annotation, order, prefix="", zfill=False,
start=START_DEFAULT):
"""Common function called by other numbering functions."""
new_order = defaultdict(list)
in_annotation = list(chunk.read())
for i, val in enumerate(in_annotation):
val = order(i, val)
new_order[val].append(i)
out_annotation = chunk.create_empty_attribute()
nr_digits = len(str(len(new_order) - 1 + start))
for nr, key in enumerate(sorted(new_order), start):
for index in new_order[key]:
out_annotation[index] = "{prefix}{nr:0{length}d}".format(prefix=prefix,
length=nr_digits if zfill else 0,
nr=nr)
out.write(out_annotation)
def _natural_sorting(astr):
"""Convert a string into a naturally sortable tuple."""
return tuple(int(s) if s.isdigit() else s for s in re.split(r"(\d+)", astr))
|
from unittest import TestCase
from mock import patch, Mock
from tables.rows.builders import TvShowSearchRowBuilder
class MockTvShow:
def __init__(self, name, rotten_tomatoes_score, start_year, end_year):
self.name = name
self.rotten_tomatoes_score = rotten_tomatoes_score
self.start_year = start_year
self.end_year = end_year
class TestName(TestCase):
row_builder = TvShowSearchRowBuilder()
@patch("tables.rows.builders.convert_to_ascii")
@patch("tables.rows.builders.colored")
@patch("tables.rows.builders.wrap")
def test_name(self, mock_wrap, mock_colored, mock_ascii_conversion):
name_parts = ["jae", "baebae"]
mock_ascii_conversion.return_value = "converted to ascii"
mock_colored.return_value = "colored"
mock_wrap.return_value = name_parts
expected = "colored\ncolored"
self.assertEqual(expected, self.row_builder.name(name="name"))
mock_ascii_conversion.assert_called_once_with(text="name")
mock_wrap.assert_called_once_with(text="converted to ascii", width=30)
mock_colored.assert_any_call("jae", attrs=["bold"])
mock_colored.assert_any_call("baebae", attrs=["bold"])
class TestFormatYears(TestCase):
row_builder = TvShowSearchRowBuilder()
start_year = "start year"
def test_when_end_year_is_none(self):
expected = "start year-"
self.assertEqual(expected, self.row_builder.format_years(start_year=self.start_year, end_year=None))
def test_when_end_year_is_defined(self):
end_year = "end year"
expected = "start year-end year"
self.assertEqual(expected, self.row_builder.format_years(start_year=self.start_year, end_year=end_year))
class TestBuild(TestCase):
row_builder = TvShowSearchRowBuilder()
def test_build(self):
name = "name"
rotten_tomatoes_score = "rotten tomatoes score"
start_year = "start year"
end_year = "end year"
tv_show = MockTvShow(name=name, rotten_tomatoes_score=rotten_tomatoes_score,
start_year=start_year, end_year=end_year)
rating_formatter = "rating formatter"
format_years = "format years"
self.row_builder.name = Mock("name")
self.row_builder.name.return_value = name
self.row_builder.rating_formatter.format = Mock("format")
self.row_builder.rating_formatter.format.return_value = rating_formatter
self.row_builder.format_years = Mock("format years")
self.row_builder.format_years.return_value = format_years
expected = [name, rating_formatter, format_years]
self.assertEqual(expected, self.row_builder.build(tv_show=tv_show))
self.row_builder.name.assert_called_once_with(name=name)
self.row_builder.rating_formatter.format.assert_called_once_with(rating=rotten_tomatoes_score)
self.row_builder.format_years.assert_called_once_with(start_year=start_year, end_year=end_year)
|
# coding=utf-8
from unittest import TestCase
from monitorrent.plugins.trackers.rutracker import RutrackerTracker, RutrackerLoginFailedException
from monitorrent.tests import use_vcr
from monitorrent.tests.rutracker_helper import RutrackerHelper
class RutrackerTrackerTest(TestCase):
def setUp(self):
self.tracker = RutrackerTracker()
self.helper = RutrackerHelper()
self.urls_to_check = [
"http://rutracker.org/forum/viewtopic.php?t=5062041",
"http://www.rutracker.org/forum/viewtopic.php?t=5062041"
]
def test_can_parse_url(self):
for url in self.urls_to_check:
self.assertTrue(self.tracker.can_parse_url(url))
bad_urls = [
"http://rutracker.com/forum/viewtopic.php?t=5062041",
"http://beltracker.org/forum/viewtopic.php?t=5062041"
]
for url in bad_urls:
self.assertFalse(self.tracker.can_parse_url(url))
@use_vcr
def test_parse_url(self):
parsed_url = self.tracker.parse_url("http://rutracker.org/forum/viewtopic.php?t=5062041")
self.assertEqual(
parsed_url['original_name'], u'Бeзyмный Мaкс: Дoрoга яpоcти в 3Д / Mаd Mаx: Furу Rоad 3D '
u'(Джoрдж Миллер / Geоrge Millеr) [2015, Боевик, Фантастика, '
u'Приключения, BDrip-AVC] Half OverUnder / Вертикальная анаморфная стереопара')
@use_vcr
def test_parse_url_1(self):
parsed_url = self.tracker.parse_url("http://rutracker.org/forum/viewtopic.php?t=5018611")
self.assertEqual(parsed_url['original_name'],
u'Ганнибал / Hannibal / Сезон: 3 / Серии: 1-11 из 13 '
u'(Гильермо Наварро, Майкл Раймер, Дэвид Слэйд) [2015, детектив, криминал, драма, HDTVRip] '
u'MVO (Sony Sci Fi) + Original + Subs (Rus, Eng)')
# TODO the tests requiring login will fail due to captcha restrictions
@use_vcr
def test_login_failed(self):
with self.assertRaises(RutrackerLoginFailedException) as e:
self.tracker.login(self.helper.fake_login, self.helper.fake_password)
self.assertEqual(e.exception.code, 1)
self.assertEqual(e.exception.message, 'Invalid login or password')
@use_vcr
def test_login(self):
self.tracker.login(self.helper.real_login, self.helper.real_password)
self.assertEqual(self.tracker.bb_data, self.helper.real_bb_data)
self.assertEqual(self.tracker.uid, self.helper.real_uid)
@use_vcr
def test_verify(self):
self.tracker.login(self.helper.real_login, self.helper.real_password)
self.assertTrue(self.tracker.verify())
def test_get_cookies(self):
self.assertFalse(self.tracker.get_cookies())
self.tracker = RutrackerTracker(self.helper.real_uid, self.helper.real_bb_data)
self.assertEqual(self.tracker.get_cookies()['bb_data'], self.helper.real_bb_data)
@use_vcr
def test_get_hash(self):
self.tracker = RutrackerTracker(self.helper.real_uid, self.helper.real_bb_data)
for url in self.urls_to_check:
self.assertEqual(self.tracker.get_hash(url), 'B81DE799C2B6D2A70EA60283FC386DC950BA5551')
def test_get_id(self):
for url in self.urls_to_check:
self.assertEqual(self.tracker.get_id(url), "5062041")
def test_get_download_url(self):
for url in self.urls_to_check:
self.assertEqual(self.tracker.get_download_url(url), "http://dl.rutracker.org/forum/dl.php?t=5062041")
|
import csv
with open('file2.csv','r',newline='\r\n') as f:
file_reader=csv.reader(f)
for i in file_reader:
print(i) |
# import socket
# import struct
# def send( text, s):
# msgbody = bytes(text.encode('utf-8'))
# msglen = len(msgbody)
# header = struct.pack('>H', msglen)
# message = header + msgbody
# s.sendall(message)
# def recieve(sock):
# data = b''
# while len(data) < 2:
# data = sock.recv(1024)
# body_length = struct.unpack('>H', data[:2])[0]
# print(body_length)
# data = data[2:]
# while len(data) < body_length:
# data += sock.recv(1024)
# return data.decode('utf-8')
# s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# s.connect(('10.118.2.111', 65536))
# message = input('Message to send: ')
# send(message, s)
# print(recieve(s))
#---------------------------------------
# import socket
# import os
# s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# s.bind(('127.0.0.1', 12345)) #if the clients/server are on different network you shall bind to ('', port)
# s.listen(10)
# c, addr = s.accept()
# print('{} connected.'.format(addr))
# console.log('test')
# l = "test"
# c.send_all(l)
# print("Done sending...")
#------------------------------------------
import socket
HOST = '' # Standard loopback interface address (localhost)
PORT = 65432 # Port to listen on (non-privileged ports are > 1023)
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.bind((HOST, PORT))
s.listen()
conn, addr = s.accept()
message = 'test'
with conn:
print('Connected by', addr)
while True:
#conn.sendall(bytes(message.encode('utf-8')))
data = conn.recv(1024)
conn.sendall(bytes(message.encode('utf-8')))
if not data:
break
conn.send(bytes(message.encode('utf-8'))) |
from time import sleep
from bs4 import BeautifulSoup
from urllib.request import urlopen, Request
# from django.db import models
from .models import Article
def find_article():
url = 'https://medium.com/blockchain'
r = Request(url, headers={'User-Agent': 'Mozilla/5.0'})
html = urlopen(r).read()
soup = BeautifulSoup(html, 'html.parser')
stories = soup.find_all('div', {'class': [
'col u-xs-size12of12 js-trackPostPresentation u-paddingLeft12 u-marginBottom15 u-paddingRight12 u-size6of12',
'col u-xs-size12of12 js-trackPostPresentation u-paddingLeft12 u-marginBottom15 u-paddingRight12 u-size4of12']})
for story in stories:
title = story.find('h3').text if story.find('h3') else '-'
description = story.find('div', {
'class': 'u-fontSize18 u-letterSpacingTight u-lineHeightTight u-marginTop7 u-textColorNormal u-baseColor--textNormal'}).text if story.find(
'div', {
'class': 'u-fontSize18 u-letterSpacingTight u-lineHeightTight u-marginTop7 u-textColorNormal u-baseColor--textNormal'}) else '-'
slug = story.find('a')['href'].split('/')[-1]
author = story.find('a', {
'class': 'ds-link ds-link--styleSubtle link link--darken link--accent u-accentColor--textNormal u-accentColor--textDarken'})[
'href'].split('@')[-1]
story_url = story.find('a')['href']
story_page = Request(story_url, headers={'User-Agent': 'Mozilla/5.0'})
story_html = urlopen(story_page).read()
story_soup = BeautifulSoup(story_html, 'html.parser')
sections = story_soup.find_all('section')
for section in sections:
body = section.find('p')
tags = section.find('p')
# my_user = MyUser(username=author)
# my_user.save()
# my_user_profile = Profile(user=my_user.id)
# my_user_profile.save()
Article.objects.create(
slug=slug,
title=title,
description=description,
body=body,
# author=author,
# author=models.ForeignKey(my_user_profile, default=1, on_delete=models.SET_DEFAULT),
# tags=tags
)
# Article.tags.add(tags)
sleep(5)
return 'done!!!!' |
import cv2
import numpy as np
from matplotlib import pyplot as plt
img = cv2.imread("brasao.jpg")
color = ('b','g','r')
for i, col in enumerate (color):
histr = cv2.calcHist([img],[i],None,[256],[0,256])
plt.plot(histr,color= col)
plt.xlim([0,256])
cv2.imshow("Imagem original",img)
plt.show()
cv2.waitKey(0)
cv2.destroyAllWindows() |
import dash
import dash_html_components as html
from os import sys, path
sys.path.append(path.dirname(path.dirname(path.abspath(__file__))))
app = dash.Dash(__name__)
server = app.server
# Layout
app.layout = html.Div([
# Title - Row
html.Div(
[
html.H1(
'{{cookiecutter.app_name}}',
style={
'font-family': 'Helvetica',
"margin-top": "25",
"margin-bottom": "25"
},
className='eight columns',
),
html.H1(
id='month-year-label',
style={
'font-family': 'Helvetica',
"margin-top": "25",
"margin-bottom": "25",
"text-align": "right"
},
className='four columns',
),
],
className='row'
)
])
if __name__ == '__main__':
app.run_server(debug=True)
|
from multiprocessing import Pipe, Process
from utils.importhelper import load
from os import listdir, chdir, getcwd
from subprocess import call
config = {}
'''
These classes below basically simulate structs as in C.
'''
class package:
'''
Stores a package for use in the updater script.
'''
def __init__(self, name):
self.name = name
self.hasBuilt = False
self.hasError = None
self.config = None
self.deps = []
self.reqby = []
class Order:
'''
An update or installation of a package.
'''
def __init__(self):
self.pkgs = []
self.notBuilt = []
def processConfig(config):
'''
Does additional processing to the config.
'''
config['storage'] = map(lambda a: a.strip(), config['storage'].split(','))
return config
def getPkgFromOrder(order, pkg):
'''
This function takes a pkg by name and gets the package struct from the order
'''
for i in order.pkgs:
if i.name == pkg:
return i
return None
def buildOrder(order):
'''
This functon builds the inital gpkginfo dict.
'''
for p in listdir(config['basedir']):
# Load the PKGBUILD
pkgbuild = open(config['basedir'] + '/' + p + '/PKGBUILD').readlines()
# Enumerate over lines stripping and looking for depends, makedepends
dependancies = []
for i in pkgbuild:
i = i.strip(i)
if i.split('=')[0] == 'depends' or i.split('=')[0] == 'makedepsnds':
deps = map(lambda a: a.split("()'\""), i.split('=')[1].split(' '))
for j in deps:
if not j in dependancies:
dependancies.apend(j)
# Get the pkg from the order and add the dependancies to it
pkg = getPkgFromOrder(order, p)
for i in dependancies:
dep = getPkgFromOrder(order, i)
if dep:
pkg.deps.append(dep)
dep.reqby.append(pkg)
# Populate the notBuilt list used for graph search starting
order.notBuilt = order.pkgs
return order
def findBottom(order, visited = []):
'''
This function finds a bottom and returns the pkg.
'''
# Get a starting package if needed
if len(visited) == 0:
visited.append(order.notBuilt[0])
else: # Else we check if we are a valid package for being looked at
if visited[-1].hasBuilt:
return None
# Now we move down and see if we find a bottom
for i in visited[-1].deps:
# Check for circular dependancies
# A circular dependancy occurs when i is already in our visited list
# It is handled by calling it a bottom
if i in visited:
return i
a = findBottom(order, visited + i)
if a:
return a
# no bottom has been found, so we are the bottom
return visited.pop()
def makepkg(order, pkg):
'''
This is where the magic happens. It takes a pkg object and runs makepkg in the package directory, making the package.
It also updates the order's hasBuilt list. For now it just calls makepkg -s --noconfirm.
'''
#TODO: Add a lot of features to the build process like a PKGBUILD mangle chain and error handling
oldcwd = getcwd()
chdir(config['basedir'] + '/' + pkg)
ret = call(['makepkg', '-s', '--noconfirm'])
# Set the has built and error fields
# Makepkg returns 0 for ok and 1 for error
if ret == 1:
pkg.hasError = True
pkg.hasBuilt = True
# Remove package from the notBuilt list
n = 0
for i in order.notBuilt:
if i == pkg:
break
order.notBuilt.pop(n)
return ret
def buildPkgs(pkgs):
'''
This function takes a list of package names and builds them in the proper order.
'''
# Copy over all the packages from storage to the local dir
for i in pkgs:
storage.get(i, config['basedir'])
# Build the order struct
order = buildOrder(Order())
# Loop until findBottom returns none
bottom = findBottom(order)
while bottom:
makepkg(bottom)
bottom = findBottom(order)
|
from shoe import *
from dealer import *
import random
class Table(object):
def __init__(self,players,nDecks,bankroll,minBet,maxBet,bjmultiplier,dealtRatio):
self.dealtRatio=dealtRatio
self.bjmultiplier=bjmultiplier
self.shoe=Shoe(nDecks)
self.players=players
self.bankroll=bankroll
self.minBet=minBet
self.maxBet=maxBet
self.dealer=Dealer(-1,bankroll,self)
def round(self):
self.tableState='takingBets'
for p in self.players:
p.betDecision(self.minBet,self.maxBet,self)
self.tableState='allowingDecisions'
self.dealer.step(self.players)
def disp(self):
for p in self.players:
p.disp()
# self.deck.printAll()
self.dealer.disp()
def setUp(self,p1,p2,d1):
p=Player(1,1000)
d=Dealer(-1,1000,self)
p.hands=[Hand(p)]
d.hands=[Hand(p)]
d.hand=d.hands[0]
c1=Card(p1)
c2=Card(p2)
c3=Card(d1)
p.hands[0].add(c1)
p.hands[0].add(c2)
d.hands[0].add(c3)
return p,d
def mcSim(self,p1,p2,d1,targetCount=None):
quiet=True
handsPerStep=1000
hands=0
value=self.initValue()
while True:
for i in range(handsPerStep):
p,d=self.setUp(p1,p2,d1)
self.shoe.shuffle()
if targetCount!=None:
while self.shoe.r7count<targetCount:
card=None
while card==None:
x=random.randint(2,6)
card=self.shoe.pop(x)
while self.shoe.r7count>targetCount:
card=None
while card==None:
x=random.randint(10,14)
x=x%13
card=self.shoe.pop(x)
d.dealCard(d.hands[0])
decision=d.handleDecisions(p,quiet)
if not quiet:
print decision
d.handleDecisions(d,quiet)
if not quiet:
p.disp()
d.disp()
winningHands,tiedHands=d.evalAll([p])
if not quiet:
print winningHands,tiedHands
if winningHands!=[]:
if p.hands[0].isBlackjack:
value[decision]+=1.5
else:
value[decision]+=1
elif tiedHands!=[]:
value[decision]+=0
else:
value[decision]-=1
hands+=1
for d in value:
print d+':\t'+str(value[d]*1.0/hands)
print p1,p2,d1
print 'simulated %d hands at count of %d'%(hands,targetCount)
print 'continue? (c to change problem)\t[y]',
x=raw_input()
if x=='c':
print 'p1:\t',
p1=self.assignInput(p1)
print 'p2:\t',
p2=self.assignInput(p2)
print 'd1:\t',
d1=self.assignInput(d1)
print 'targetCount: [%d]\t'%(targetCount),
targetCount=self.assignInput(targetCount)
print 'new problem: [%d,%d,%d](%d)'%(p1,p2,d1,targetCount)
hands=0
value=self.initValue()
elif x=='n':
break
def initValue(self):
value={}
for d in ['stand','hit']:
value[d]=0
return value
def assignInput(self,original):
x=raw_input()
try:
tmp=int(x)
except:
tmp=original
return tmp
def status(self):
for p in self.players:
p.status()
print 'count: %d'%self.shoe.r7count
|
import cv2
import skimage
import numpy as np
from scipy import ndimage
from functools import reduce
import matplotlib.pyplot as plt
import skimage.morphology as morph
from skimage.color import rgb2gray
from scipy.ndimage.morphology import binary_opening
from skimage.segmentation import felzenszwalb, find_boundaries
def remove_too_small_shape(mask, shape_prod=100):
"""Remove labels from a mask if those are too small.
Get cell for each label and if the product of the shape is less
than the shape_prod value remove it.
Args:
mask (numpy.ndarray): Mask numpy.ndarray (2D) of 0 and 1.
shape_prod (int, optional): Shape product threshold. Defaults to 100.
Returns:
numpy.ndarray: 2D processed mask, the input without the too small labels.
"""
l, c = ndimage.label(mask)
for i in range(c):
obj_indices = ndimage.find_objects(l)[i]
cell = mask[obj_indices]
if np.product(cell.shape) < shape_prod:
mask[obj_indices] = 0
return mask
def remove_too_small_pix(mask, pixels=10):
"""Remove labels from a mask if those are too small.
Get cell for each label and if the number of 1s is less
than the pixels value remove it.
Args:
mask (numpy.ndarray): Mask numpy.ndarray (2D) of 0 and 1.
pix (int, optional): Pixels number threshold. Defaults to 10.
Returns:
numpy.ndarray: 2D processed mask, the input without the too small labels.
"""
l, c = ndimage.label(mask)
for i in range(c):
if np.sum(l == i + 1) < pixels:
mask[l == i + 1] = 0
return mask
def find_labels_to_process(mask, shape_prod=400, pix=None):
"""Find labels who are supposed to contain more than a single car.
Get cell for each label and if the product of the shape (respectively
number of pixels/values greater than 0) is more than the shape_prod (respectively pix)
value consider it as to process. If both shape_prod and pix have a value, pix is used.
Args:
mask (numpy.ndarray): Mask numpy.ndarray (2D) of 0 and 1.
shape_prod (int, optional): Shape product threshold. Defaults to 400.
pix(int, optional): Number of pixels as threshold. Defaults to None.
Returns:
List: List of labels too big to contain a single car.
"""
toproc = []
l, _ = ndimage.label(mask)
for label_ind, label_coords in enumerate(ndimage.find_objects(l)):
cell = mask[label_coords]
# Check if the label size is too small
if pix is not None:
if np.sum(cell > 0) > pix:
toproc.append(label_ind)
elif shape_prod is not None:
if np.product(cell.shape) > shape_prod:
toproc.append(label_ind)
else:
print("shape_prod or pix value is needed as threshold.")
break
return toproc
def find_cars_orientation(img, mask, plot=False):
"""Find cars orientation in a colored cell/image by finding the min.
rectangle area around it.
Remove the background of the image using the mask. Then search for the min.
area rectangle that can be drawn around it. As cars are usually parked next to each
other and not one behind another, this can be used to find the cars orientation.
Args:
img (numpy.ndarray): Colored image cell to process (3 channels), supposed to contain
more than a car.
mask (numpy.ndarray): Mask numpy.ndarray (2D) of 0 and 1.
Returns:
float: Angle of the rectangle drawn around the cars (i.e. supposed orientation of
the cars).
"""
# Load image as HSV and select saturation
hh, ww, cc = img.shape
# Remove background for the colored image
for i in range(cc):
img[:, :, i][mask == 0] = 0
# Convert to gray
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
# Threshold the grayscale image
ret, thresh = cv2.threshold(gray, 0, 255, 0)
# Find outer contour
cntrs = cv2.findContours(thresh, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
cntrs = cntrs[0] if len(cntrs) == 2 else cntrs[1]
# Get rotated rectangle from outer contour
rotrect = cv2.minAreaRect(cntrs[0])
box = cv2.boxPoints(rotrect)
box = np.int0(box)
# Draw rotated rectangle on copy of img as result
result = img.copy()
# cv2.drawContours(result,[box],0,(0,0,255),2)
cv2.fillPoly(result, [box], (0, 0, 255))
# Get angle from rotated rectangle
angle = rotrect[-1]
# from https://www.pyimagesearch.com/2017/02/20/text-skew-correction-opencv-python/
# the `cv2.minAreaRect` function returns values in the
# range [-90, 0); as the rectangle rotates clockwise the
# returned angle trends to 0 -- in this special case we
# need to add 90 degrees to the angle
if angle < -45:
angle = -(90 + angle)
# Otherwise, just take the inverse of the angle to make
# it positive
else:
angle = -angle
if plot:
cv2.imshow("THRESH", thresh)
cv2.imshow("RESULT", result)
cv2.waitKey(0)
cv2.destroyAllWindows()
return angle, result, box
def generate_car_mask(angle):
"""Generate a car mask from the input angle.
This mask will be used to go through a mask containing more than a single car
and find the areas overlapping.
Args:
angle (float): Orientation angle of the car.
Returns:
numpy.ndarray: 2D minimum mask to contain the oriented car mask.
"""
# TODO: Need to take into account two cars parked in behing another.
car_rect = np.zeros((100, 100))
rr, cc = skimage.draw.rectangle((40, 40), end=(54, 46))
car_rect[rr, cc] = 1
car_rect_rot = skimage.transform.rotate(car_rect, angle=angle + 90)
car_rect_rot[car_rect_rot > 0] = 1
# find min box around it
# TODO: Need to find a better way
min_x = 999
min_y = 999
max_x = 0
max_y = 0
for i in range(car_rect_rot.shape[0]):
for j in range(car_rect_rot.shape[0]):
if car_rect_rot[i, j] == 1:
if i < min_x:
min_x = i
if i > max_x:
max_x = i
if j < min_y:
min_y = j
if j > max_y:
max_y = j
car_selected = car_rect_rot[min_x:max_x, min_y:max_y]
return car_selected
def open_remove(img, iters=3):
"""Aplly binary open with iters iterations on a mask. Remove small labels
with less than 30 (those cannot contain a car). Then remove small labels with less than 160
pixels which could contain a single car and store them in a list (returned).
Args:
img (numpy.ndarray): Image, without background, to process.
iters (int, optional): Number of iterations for binary_opening. Defaults to 3.
Returns:
(numpy.ndarray, list(numpy.ndarray)): Return a tuple of two objects: mask to keep
processing and removed masks/labels which are supposed to contain a single car each.
"""
igbb = binary_opening(
img, iterations=iters, structure=morph.square(2)
) # previously square 3
mask = igbb.copy()
mask[mask > 0] = 1
mask = remove_too_small_pix(mask, pixels=30)
l, c = ndimage.label(mask)
l_masks = []
for i in range(c):
if np.sum(l == i + 1) < 160:
m = np.zeros_like(mask)
m[l == i + 1] = 1
l_masks.append(m)
mask[l == i + 1] = 0
return mask, l_masks
def over_cars(
imgb, mask, car_selected, rdraw=None, over_perc=0.8, neighbors=0, plot=False
):
"""Go over imgb and mask with car_selected and try to find regions with high overlap percentage.
Those, with a shape close to a car are supposed cars.
Args:
imgb (numpy.ndarray): Gray scale (2D) image with background removed.
mask (numpy.ndarray): Mask (2D) where cars are supposed to be.
car_selected (numpy.ndarray): Mask (2D) of orientated car used to find cars in mask.
rdraw (numpy.ndarray, optional): Rectangular (2D) mask around all supposed cars in mask.
Used to control area of overlap. Defaults to None.
over_perc (float, optional): Overlap percentage with car_selected and mask. Defaults to .8.
neighbors (int, optional): Parameter to control to look around after a match to find a better
one. Defaults to 0.
plot (bool, optional): Whether or not to plot the different steps. Defaults to True.
Returns:
(list(numpy.ndarray)): List of the overlapped masks where are supposed cars.
"""
igbb = imgb.copy()
mtpb = mask.copy()
pix_car = np.sum(car_selected)
j = 0
l_m = []
# Go over the original mask and try to find overlaps with car_selected
while j <= (igbb.shape[1] - car_selected.shape[1]):
i = 0
while i <= (igbb.shape[0] - car_selected.shape[0]):
check_mask = True
if rdraw is not None:
# Check that the car_selected is in rdraw before checking overlap with mask
roverlap = (
rdraw[
i : (car_selected.shape[0] + i), j : (car_selected.shape[1] + j)
]
+ car_selected
)
if np.sum(roverlap > 1) < 0.8 * pix_car:
check_mask = False
if check_mask:
overlap = mtpb.copy()
overlap = (
mtpb[
i : (car_selected.shape[0] + i), j : (car_selected.shape[1] + j)
]
+ car_selected
)
# Check if detected
if np.sum(overlap > 1) > over_perc * pix_car:
# Try to find a better match in the neighbourhood by moving to the right and to the bottom
# Removed the feature
max_find = (np.sum(overlap), i, j, overlap)
im, jm, om = max_find[1], max_find[2], max_find[3]
mtpb[
im : (car_selected.shape[0] + im),
jm : (car_selected.shape[1] + jm),
][om > 1] = 0
m = np.zeros_like(mtpb)
m[
im : (car_selected.shape[0] + im),
jm : (car_selected.shape[1] + jm),
][om > 1] = 1
l_m.append(m)
if plot:
plt.imshow(mtpb)
plt.show()
# Try to create openings in mask
mtpb = binary_opening(mtpb, iterations=2)
# Remove artefacts from the openings
mtpb = remove_too_small_pix(mtpb, 50)
if plot:
plt.imshow(mtpb)
plt.show()
# Aplly the result on igbb
igbb[mtpb == 0] = 0
i += 1
j += 1
return l_m
def remove_f_boundaries(imgb, mask):
"""Apply felzenszwalb on imgb and remove the found boundaries. Pretty conservative.
Args:
imgb (numpy.ndarray): Image (2D) grayscale without background (filtered by mask).
mask (numpy.ndarray): Mask of imgb.
Returns:
(numpy.ndarray, numpy.ndarray): imgb and mask with found boundaries removed.
"""
segments = felzenszwalb(imgb, scale=200, sigma=0.5, min_size=100)
bd = find_boundaries(segments)
imgb[bd > 0] = 0
mask[bd > 0] = 0
return imgb, mask
def find_small_segment(cbox):
"""From a list of points drawing a rectangle, find the points drawing the smallest
and highest side of it.
Args:
cbox (list(np.array)): List of arrays, each array being a points in space.
Returns:
(np.array, np.array): Tuple of the two points found.
"""
f_dist = lambda a, b: np.sqrt((b[0] - a[0]) ** 2 + (b[1] - a[1]) ** 2)
segments = [[cbox[i], cbox[i - 1]] for i in range(len(cbox))]
distances = np.array([f_dist(a, b) for a, b in segments])
inds = distances.argsort()
s1 = segments[inds[0]]
s2 = segments[inds[1]]
higher = s1
# higher, as working with matrix indices, means smaller y value
if all([s1[0][1] < x[1] for x in s2]) or all([s1[1][1] < x[1] for x in s2]):
higher = s1
else:
higher = s2
return higher
def find_angle(pts, cbox):
"""From pts a list of points drawing the smallest and highest segment of a rectangle,
and cbox the list of points drawing the rectangle, find the angle to use to oriente the car
overlapping mask.
Args:
pts ((np.array, np.array)): Points of the smallest and highest segment in space.
cbox (list(np.array)): Points drawing a rectangle.
Returns:
(float, bool): Angle to orient the car and if the segment was on the left of the
rectangle or not.
"""
# find angle formed by smallest segment
highp = pts[0] if pts[0][1] < pts[1][1] else pts[1]
lowp = pts[0] if pts[0][1] > pts[1][1] else pts[1]
# Flat vector used to compute the angle
vflat = np.array([highp[0] + 1, highp[1]]) - highp
vangled = lowp - highp
cosang = np.dot(vflat, vangled)
sinang = np.linalg.norm(np.cross(vflat, vangled))
angle = np.arctan2(sinang, cosang)
angle = np.rad2deg(angle)
# Check if on two points in p is the one at max left among points in cbox
left = False
for p in pts:
if all([p[0] <= x[0] for x in cbox]):
left = True
# Applying correction to generate car mask from angle
if angle < 90:
angle = 180 - angle
else:
angle = 360 - angle
return angle, left
def boundaries_to_remove(l_masks, plot=False):
if len(l_masks) == 0:
print("l_masks must contain at least a single mask to process.")
remove = None
else:
lm = np.zeros_like(l_masks[0])
for i, l in enumerate(l_masks):
lm = lm + (l * (i + 1))
boundaries = skimage.segmentation.find_boundaries(lm)
if plot:
plt.imshow(boundaries)
plt.show()
remove = np.zeros_like(l_masks[0])
remove[boundaries > 0] = 1
if plot:
plt.imshow(remove)
plt.show()
return remove
def process_label(image, image_gray, mask, labels, label_id, plot=False):
img = image.copy()
imgg = image_gray.copy()
pmask = mask.copy()
l_masks = []
# Crop image and mask to process label_id
ii, obj_indices = ndimage.find_objects(labels)[label_id]
ic = img[ii, obj_indices, :]
ig = imgg[ii, obj_indices]
mtp = pmask[ii, obj_indices]
mtp = np.where(mtp > 0, 1, 0)
igb = ig.copy()
igb[mtp == 0] = 0
if plot:
plt.imshow(igb)
plt.show()
# Remove small part which are kept in cell, mostly from other labels
mtp = remove_too_small_pix(mtp, 50)
igb[mtp == 0] = 0
if plot:
plt.imshow(igb)
plt.show()
if np.sum(mtp) < 240:
l_masks.append(mtp)
mtpb = np.zeros_like(mtp)
else:
# Try to separate small part of the mask
mtpb, l_m = open_remove(igb, iters=5)
l_masks = l_masks + l_m
if plot:
plt.imshow(mtpb)
plt.show()
igb[mtpb == 0] = 0
if plot:
plt.imshow(igb)
plt.show()
# Check if there is enough packed pixels to contain close to two cars
# Threshold set as one car is around 150 pix, then two with a small margin
# should be higher than 240
if np.sum(mtpb) < 240:
# add remaining mask
l_masks.append(mtpb)
else:
# Find the orientation of the cars
angle, rbox, cbox = find_cars_orientation(ic, mtpb, False)
# Generate the mask box onto which the cars should be
rbox = np.sum(rbox, axis=-1)
rbox[rbox > 0] = 1
if plot:
plt.imshow(rbox)
plt.show()
# Find the smallest lines with the higher point in array
pts = find_small_segment(cbox)
angle, left = find_angle(pts, cbox)
car_selected = generate_car_mask(angle)
if plot:
plt.imshow(car_selected)
plt.show()
if left:
l_m1 = over_cars(igb, mtpb, car_selected, rbox)
l_masks = l_masks + l_m1
if plot:
for l in l_m1:
plt.imshow(l)
plt.show()
else:
igbf = np.flip(igb, axis=1)
mtpbf = np.flip(mtpb, axis=1)
carf = np.flip(car_selected, axis=1)
rboxf = np.flip(rbox, axis=1)
l_m1 = over_cars(igbf, mtpbf, carf, rboxf)
# need to flip masks back
l_m1 = [np.flip(x, axis=1) for x in l_m1]
l_masks = l_masks + l_m1
if plot:
for l in l_m1:
plt.imshow(l)
plt.show()
if plot:
for l in l_masks:
plt.imshow(l)
plt.show()
remove_mask = boundaries_to_remove(l_masks, plot)
result = reduce(lambda a, b: a + b, l_masks)
if plot:
plt.imshow(result)
plt.show()
result[remove_mask > 0] = 0
if plot:
plt.imshow(result)
plt.show()
return result
|
class Element:
def agg_state(self, t, v):
if v == "fahrenheit":
t = Iron.convert_fr(self, t)
elif v == "kelvin":
t = Iron.convert_cl(self, t)
print('Температура в цельсиях: ' + str(t))
if t < self.t_plav:
return 'Затвердение'
elif t >= self.t_plav and t < self.t_isp:
return "Плавление"
else:
return "Испарение"
def convert_fr(self, tem):
return (tem * 9 / 5) + 32
def convert_cl(self, tem):
return tem + 273
class Iron(Element):
t_plav = 1538
t_isp = 2862
class Chlorine(Element):
t_plav = -100
t_isp = -34
class Oxygen(Element):
t_plav = -218
t_isp = -182
a = Iron()
print(a.agg_state(1537, "celsius"))
b = Chlorine()
print(b.agg_state(-310, 'kelvin'))
c = Oxygen()
print(c.agg_state(1, 'fahrenheit'))
|
#!/usr/bin/env python3
'''
@author: Josh Snider
'''
import filters
import pdb
import tropes
import unittest
class TestTropes(unittest.TestCase):
def test_sep_shoutouts(self):
with tropes.Tropes(False) as datab:
shoutouts = datab.get_shoutouts(
'http://tvtropes.org/pmwiki/pmwiki.php/TabletopGame/Warhammer40000')
assert(len(shoutouts) == 50)
assert('http://tvtropes.org/pmwiki/pmwiki.php/Film/ApocalypseNow'
in shoutouts)
assert('http://tvtropes.org/pmwiki/pmwiki.php/Literature/TheLordOfTheRings'
in shoutouts)
def test_multitrope_shoutouts(self):
with tropes.Tropes(False) as datab:
pdb.set_trace()
shoutouts = datab.get_shoutouts(
'http://tvtropes.org/pmwiki/pmwiki.php/Anime/DragonBallZ')
assert(len(shoutouts) == 0)
def test_single_shoutouts(self):
with tropes.Tropes(False) as datab:
shoutouts = datab.get_shoutouts(
'http://tvtropes.org/pmwiki/pmwiki.php/Series/MontyPythonsFlyingCircus')
assert(len(shoutouts) == 3)
assert('http://tvtropes.org/pmwiki/pmwiki.php/Series/ThePrisoner'
in shoutouts)
assert('http://tvtropes.org/pmwiki/pmwiki.php/Series/TheSaint'
in shoutouts)
if __name__ == '__main__':
unittest.main()
|
from .player_profile import login, register
def intro_graphic():
print(
"""
888 d8b 888 888
888 Y8P 888 888
888 888 888
888888888 .d8888b888888 8888b. .d8888b888888 .d88b. .d88b.
888 888d88P" 888 "88bd88P" 888 d88""88bd8P Y8b
888 888888 888 .d888888888 888 888 88888888888
Y88b. 888Y88b. Y88b. 888 888Y88b. Y88b. Y88..88PY8b.
"Y888888 "Y8888P "Y888"Y888888 "Y8888P "Y888 "Y88P" "Y8888
Welcome to the informal log-in screen:
""")
def main_menu():
print("""Greetings! Dare you enter the dreaded warzone?
1.Log in
2.Register""")
while True:
player_choice = int(input())
if player_choice == 1:
player = login()
return player
elif player_choice == 2:
player = register()
return player
else:
print("Please pick one of the available options")
|
import pandas as pd
import numpy as np
import tensorflow as tf
from tensorflow import keras
import matplotlib.pyplot as plt
train = pd.read_csv('training_data.csv')
test = pd.read_csv('testing_data.csv')
validation = pd.read_csv('validation_data.csv')
train_in = []
test_in = []
train_out = []
test_out = []
validation_in = []
validation_out = []
for row in train.itertuples():
# train_in.append(row.Word)
train_out.append(row.Number_of_Syllables)
if row.Word == ' ': # an empty word was getting in for some reason
pass
else:
temp = list(row.Word)
for i in range(len(temp)):
temp[i] = ord(temp[i])
train_in.append(temp)
for row in test.itertuples():
# test_in.append(row.Word)
test_out.append(row.Number_of_Syllables)
if row.Word == ' ':
pass
else:
temp = list(row.Word)
for i in range(len(temp)):
temp[i] = ord(temp[i])
test_in.append(temp)
for row in validation.itertuples():
# test_in.append(row.Word)
validation_out.append(row.Number_of_Syllables)
if row.Word == ' ':
pass
else:
temp = list(row.Word)
for i in range(len(temp)):
temp[i] = ord(temp[i])
validation_in.append(temp)
test_in = np.array(test_in)
test_out = np.array(test_out)
train_in = np.array(train_in)
train_out = np.array(train_out)
validation_in = np.array(validation_in)
validation_out = np.array(validation_out)
max_word = 143
model = keras.Sequential()
model.add(keras.layers.Embedding(max_word, 100))
model.add(keras.layers.GlobalAveragePooling1D())
model.add(keras.layers.Dense(100, activation=tf.nn.relu))
model.add(keras.layers.Dense(50, activation=tf.nn.relu))
model.add(keras.layers.Dense(8, activation=tf.nn.softmax))
model.summary()
model.compile(optimizer=tf.compat.v1.train.GradientDescentOptimizer(learning_rate=0.2, use_locking=False),
loss='sparse_categorical_crossentropy', metrics=['accuracy'])
history = model.fit(train_in, train_out, epochs=1000, batch_size=500, validation_data=(test_in, test_out), verbose=2)
results = model.evaluate(validation_in, validation_out)
print('Accuracy is', results[1])
# summarize history for accuracy
plt.plot(history.history['accuracy'])
plt.plot(history.history['val_accuracy'])
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.show()
# summarize history for loss
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.show()
|
n=raw_input()
import math
r=float(n.split()[0])
n=int(n.split()[1])
a=float(2*r*math.sin(3.14/n))
p=float(n*a)
print round(p,1)
|
from abaqusConstants import *
from .GeometricRestriction import GeometricRestriction
from ..Region.Region import Region
class TopologyCyclicSymmetry(GeometricRestriction):
"""The TopologyCyclicSymmetry object defines a topology cyclic symmetry geometric
restriction.
The TopologyCyclicSymmetry object is derived from the GeometricRestriction object.
Notes
-----
This object can be accessed by:
.. code-block:: python
import optimization
mdb.models[name].optimizationTasks[name].geometricRestrictions[name]
"""
def __init__(
self,
name: str,
region: Region,
translation: float,
axis: SymbolicConstant = AXIS_1,
csys: int = None,
ignoreFrozenArea: Boolean = OFF,
):
"""This method creates a TopologyCyclicSymmetry object.
Notes
-----
This function can be accessed by:
.. code-block:: python
mdb.models[name].optimizationTasks[name].TopologyCyclicSymmetry
Parameters
----------
name
A String specifying the geometric restriction repository key.
region
A Region object specifying the region to which the geometric restriction is applied.
When used with a TopologyTask, there is no default value. When used with a ShapeTask,
the default value is MODEL.
translation
A Float specifying the translation distance.
axis
A SymbolicConstant specifying the translation direction defined along an axis positioned
at the *csys* origin. Possible values are AXIS_1, AXIS_2, and AXIS_3. The default value
is AXIS_1.
csys
None or a DatumCsys object specifying the local coordinate system. If *csys*=None, the
global coordinate system is used. When this member is queried, it returns an Int. The
default value is None.
ignoreFrozenArea
A Boolean specifying whether to ignore frozen areas. The default value is OFF.
Returns
-------
A TopologyCyclicSymmetry object.
"""
super().__init__()
pass
def setValues(
self,
axis: SymbolicConstant = AXIS_1,
csys: int = None,
ignoreFrozenArea: Boolean = OFF,
):
"""This method modifies the TopologyCyclicSymmetry object.
Parameters
----------
axis
A SymbolicConstant specifying the translation direction defined along an axis positioned
at the *csys* origin. Possible values are AXIS_1, AXIS_2, and AXIS_3. The default value
is AXIS_1.
csys
None or a DatumCsys object specifying the local coordinate system. If *csys*=None, the
global coordinate system is used. When this member is queried, it returns an Int. The
default value is None.
ignoreFrozenArea
A Boolean specifying whether to ignore frozen areas. The default value is OFF.
"""
pass
|
from PyQt5 import QtCore, QtGui, QtWidgets
class noticeProfile(object):
def setup(self, Notice,data):
self.data = data
Notice.setObjectName("Notice")
Notice.resize(580, 429)
self.frame = QtWidgets.QFrame(Notice)
self.frame.setGeometry(QtCore.QRect(10, 10, 561, 381))
self.frame.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame.setObjectName("frame")
self.idLabel = QtWidgets.QLabel(self.frame)
self.idLabel.setGeometry(QtCore.QRect(10, 10, 71, 41))
self.idLabel.setStyleSheet("font-size:14pt;\n"
"font-weight: bold;")
self.idLabel.setObjectName("idLabel")
self.titlelabel = QtWidgets.QLabel(self.frame)
self.titlelabel.setGeometry(QtCore.QRect(10, 60, 91, 41))
self.titlelabel.setObjectName("titlelabel")
self.noticeLabel = QtWidgets.QLabel(self.frame)
self.noticeLabel.setGeometry(QtCore.QRect(10, 110, 91, 41))
self.noticeLabel.setObjectName("noticeLabel")
self.id = QtWidgets.QLabel(self.frame)
self.id.setGeometry(QtCore.QRect(90, 10, 91, 41))
self.id.setObjectName("id")
self.title = QtWidgets.QLabel(self.frame)
self.title.setGeometry(QtCore.QRect(90, 60, 461, 41))
self.title.setObjectName("title")
self.notice = QtWidgets.QTextBrowser(self.frame)
self.notice.setGeometry(QtCore.QRect(90, 120, 461, 251))
self.notice.setObjectName("notice")
self.dateLabel = QtWidgets.QLabel(self.frame)
self.dateLabel.setGeometry(QtCore.QRect(230, 10, 178, 41))
self.dateLabel.setStyleSheet("font-size:14pt;\n"
"font-weight: bold;")
self.dateLabel.setObjectName("dateLabel")
self.date = QtWidgets.QLabel(self.frame)
self.date.setGeometry(QtCore.QRect(420, 10, 151, 41))
self.date.setObjectName("date")
self.closeButton = QtWidgets.QPushButton(Notice)
self.closeButton.setGeometry(QtCore.QRect(480, 400, 89, 25))
self.closeButton.setObjectName("closeButton")
self.retranslateUi(Notice)
QtCore.QMetaObject.connectSlotsByName(Notice)
def retranslateUi(self, Notice):
_translate = QtCore.QCoreApplication.translate
Notice.setWindowTitle(_translate("Notice", "Notice"))
self.idLabel.setText(_translate("Notice", "<html><head/><body><p>Id : </p></body></html>"))
self.titlelabel.setText(_translate("Notice", "<html><head/><body><p><span style=\" font-size:12pt; font-weight:600;\">Title :</span></p></body></html>"))
self.noticeLabel.setText(_translate("Notice", "<html><head/><body><p><span style=\" font-size:12pt; font-weight:600;\">Notice :</span></p></body></html>"))
self.id.setText(_translate("Notice", "<html><head/><body><p>ID</p></body></html>"))
self.title.setText(_translate("Notice", "<html><head/><body><p>Title</p></body></html>"))
self.dateLabel.setText(_translate("Notice", "<html><head/><body><p>Date Of Publish :</p></body></html>"))
self.date.setText(_translate("Notice", "<html><head/><body><p>29/09/2002</p></body></html>"))
self.closeButton.setText(_translate("Notice", "Close"))
self.clickevent(Notice)
def clickevent(self,parent):
print(self.data)
self.id.setText(str(self.data["id"]))
self.date.setText(str(self.data["date"])[:10])
self.title.setText(self.data["title"])
self.notice.setText(self.data["notice"])
self.closeButton.clicked.connect(lambda : parent.close())
|
# Run Selenium tests in parallel with Python for Selenium Python tutorial
import pytest
from selenium import webdriver
import os
from webdriver_manager.firefox import GeckoDriverManager
from webdriver_manager.chrome import ChromeDriverManager
import boto3
from selenium.webdriver import DesiredCapabilities
from selenium.webdriver import Remote
# os.environ['HUB_HOST'] = 'localhost'
# os.environ['BROWSER'] = 'chrome'
def pytest_addoption(parser):
parser.addoption(
"--testBrowser",
action="store",
default = "chrome",
help="supply the browser where test needs to run",
)
parser.addoption(
"--hubHost",
action="store",
default = "localhost",
help="supply hub host",
)
### below commented lines ate used to add markers to run tests, the value to be given from cmd to tell which tests to run###
# parser.addoption(
# "--testCustomMarker",
# action="store",
# metavar="NAME",
# default="chrome",
# help="only run tests matching the environment NAME.",
# )
# def pytest_configure(config):
# # register an additional marker
# config.addinivalue_line(
# "markers", "env(name): mark test to run only on named environment"
# )
#
# def pytest_runtest_setup(item):
# envnames = [mark.args[0] for mark in item.iter_markers(name="env")]
# if envnames:
# if item.config.getoption("--testCustomMarker") not in envnames:
# pytest.skip("test requires env in {!r}".format(envnames))\
@pytest.fixture(scope="session")
def driver_browser(request):
browser= os.environ['BROWSER']
print("Browser thru cmd =", browser)
if browser == "firefox":
web_driver = webdriver.Remote("http://"+os.environ['HUB_HOST']+":4444/wd/hub", DesiredCapabilities.FIREFOX)
else:
web_driver = webdriver.Remote("http://"+os.environ['HUB_HOST']+":4444/wd/hub", DesiredCapabilities.CHROME)
return web_driver
@pytest.fixture(scope="class")
def driver_init(request,driver_browser):
driver_browser.implicitly_wait(10)
driver_browser.get("http://www.google.com")
request.cls.driver = driver_browser
yield
driver_browser.quit()
@pytest.fixture(scope="class")
def driver_init_chrome(request):
driver = webdriver.Remote("http://"+os.environ['HUB_HOST']+":4444/wd/hub", DesiredCapabilities.CHROME)
driver.get("http://www.google.com")
request.cls.driver = driver
yield
driver.quit()
@pytest.fixture(scope="class")
def driver_init_firefox(request):
driver = webdriver.Remote("http://"+os.environ['HUB_HOST']+":4444/wd/hub", DesiredCapabilities.FIREFOX)
driver.get("http://www.google.com")
request.cls.driver = driver
yield
driver.quit()
|
n = int(input())
i = 0
n += 3
while n>0:
n -= 3
if n%5==0:
j= n//5
print(i + j)
break
i += 1
if n<0:
print(-1)
|
import joblib
import numpy as np
from ndarraydjango.fields import NDArrayField
from sklearn.linear_model import LogisticRegression
from sklearn.tree import DecisionTreeClassifier
from sklearn.metrics import confusion_matrix, accuracy_score
from django.db.models import *
import pickle
from census.models import Census
from machine_learning_model.constants import *
from sklearn.neighbors import KNeighborsClassifier
from sklearn.naive_bayes import GaussianNB
from sklearn.ensemble import RandomForestClassifier
from sklearn.svm import SVC
import pandas as pd
from sklearn.preprocessing import LabelEncoder, OneHotEncoder, StandardScaler
from sklearn.compose import ColumnTransformer
from sklearn.model_selection import train_test_split
from census.models import Census
class MachineLearningModel(Model):
classe = None
previsores = None
previsores_treinamento = None
previsores_teste = None
classe_teste = None
classe_treinamento = None
classificador = None
previsao = None
matriz = None
precisao = DecimalField(max_digits=4, decimal_places=2, null=True, blank=True)
biblioteca_usada = CharField(max_length=20)
datetime = DateTimeField(auto_now_add=True)
classe_modelo = None
def open_classificador(self, classe_modelo, biblioteca_usada):
self.classe_modelo = classe_modelo
self.biblioteca_usada = biblioteca_usada
try:
with open(self.biblioteca_usada + '.pkl', 'rb') as f:
self.classificador = pickle.load(f)
except Exception as e:
print(e)
query_data = self.classe_modelo.objects.all()
self.get_data(query_data)
self.treinar_modelo()
self.definir_precisao()
def save_classificador(self):
with open(self.biblioteca_usada + '.pkl', 'wb') as f:
pickle.dump(self.classificador, f)
def treinar_modelo(self):
self.dividir_base_previsores_classe()
if self.biblioteca_usada is REGRESSAO_LOGISTICA:
self.treinar_regressao_logistica()
if self.biblioteca_usada is ARVORES_DE_DECISAO:
self.treinar_arvores_decisao()
if self.biblioteca_usada is KNN:
self.treinar_svm()
if self.biblioteca_usada is NAIVE_BAYES:
self.treinar_naive_bayes()
if self.biblioteca_usada is RANDOM_FOREST:
self.treinar_random_forest()
if self.biblioteca_usada is SVM:
self.treinar_svm()
self.classificador.fit(self.previsores_treinamento, self.classe_treinamento)
self.save_classificador()
def treinar_regressao_logistica(self):
self.classificador = LogisticRegression(max_iter=10)
def treinar_arvores_decisao(self):
self.classificador = DecisionTreeClassifier(criterion='entropy', random_state=0)
def treinar_knn(self):
self.classificador = KNeighborsClassifier(n_neighbors=5, metric='minkowski', p=2)
def treinar_naive_bayes(self):
self.classificador = GaussianNB()
def treinar_random_forest(self):
self.classificador = RandomForestClassifier(n_estimators=40, criterion='entropy', random_state=0)
def treinar_svm(self):
self.classificador = SVC(kernel='linear', random_state=1)
def get_data(self, query_data):
database = pd.DataFrame(list(query_data.values()))
if self.classe_modelo is Census:
self.get_census_data(database)
def get_census_data(self, database):
classe = database.iloc[:, 15].values
labelencoder_classe = LabelEncoder()
self.classe = labelencoder_classe.fit_transform(classe)
previsores = database.iloc[:, 1:15].values
labelencoder_previsores = LabelEncoder()
previsores[:, 1] = labelencoder_previsores.fit_transform(previsores[:, 1])
previsores[:, 3] = labelencoder_previsores.fit_transform(previsores[:, 3])
previsores[:, 5] = labelencoder_previsores.fit_transform(previsores[:, 5])
previsores[:, 6] = labelencoder_previsores.fit_transform(previsores[:, 6])
previsores[:, 7] = labelencoder_previsores.fit_transform(previsores[:, 7])
previsores[:, 8] = labelencoder_previsores.fit_transform(previsores[:, 8])
previsores[:, 9] = labelencoder_previsores.fit_transform(previsores[:, 9])
previsores[:, 13] = labelencoder_previsores.fit_transform(previsores[:, 13])
onehotencoder = ColumnTransformer(transformers=[("OneHot", OneHotEncoder(), [1, 3, 5, 6, 7, 8, 9, 13])],
remainder='passthrough')
myscaler_name = "scaler_" + str(self.biblioteca_usada) + "_.bin"
onehotencoder_name = "onehot_" + str(self.biblioteca_usada) + "_.joblib"
if len(previsores) > 1:
onehot = onehotencoder.fit(previsores)
previsores = onehot.transform(previsores).toarray()
scaler = StandardScaler()
self.previsores = scaler.fit_transform(previsores)
joblib.dump(onehot, onehotencoder_name)
joblib.dump(scaler, myscaler_name)
else:
onehot = joblib.load(onehotencoder_name)
scaler = joblib.load(myscaler_name)
previsores = onehot.transform(previsores).toarray()
self.previsores = scaler.fit_transform(previsores)
print("termino codificacao")
def dividir_base_previsores_classe(self):
self.previsores_treinamento, self.previsores_teste, self.classe_treinamento, self.classe_teste = \
train_test_split(
self.previsores,
self.classe,
test_size=0.15,
random_state=0)
print("termino divisao")
def definir_precisao(self):
if self.previsores_teste is None:
self.previsores_teste = self.previsores
self.previsao = self.classificador.predict(self.previsores_teste)
if self.classe_teste is not None:
self.precisao = accuracy_score(self.classe_teste, self.previsao)
self.matriz = confusion_matrix(self.classe_teste, self.previsao)
|
class Base(object):
def clone(self):
copy = self.__class__()
for col in self.__table__.columns:
val = getattr(self, col.name)
setattr(copy, col.name, val)
return copy
def update_from_dict(self, d):
for k, v in d.items():
setattr(self, k, v)
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base(cls=Base)
|
# -*- coding: utf-8 -*-
"""Добавляет к выбору цепи выбранного оборудования. Отфильтровывает из выбора лишние категории"""
__title__ = 'Выбрать цепи\nоборудования'
__author__ = 'SG'
import re
import clr
clr.AddReference('System.Core')
from System.Collections.Generic import *
from Autodesk.Revit.DB import ElementId, PartUtils, ViewOrientation3D, XYZ, FilteredElementCollector, BuiltInCategory, Transaction, TransactionGroup, BuiltInParameter
import sys
from Autodesk.Revit.UI.Selection import ObjectType, ISelectionFilter
doc = __revit__.ActiveUIDocument.Document
uidoc = __revit__.ActiveUIDocument
k = 304.8
def natural_sorted(list, key=lambda s: s):
"""
Sort the list into natural alphanumeric order.
"""
def get_alphanum_key_func(key):
convert = lambda text: int(text) if text.isdigit() else text # noqa
return lambda s: [convert(c) for c in re.split('([0-9]+)', key(s))]
sort_key = get_alphanum_key_func(key)
return sorted(list, key=sort_key)
sel = [doc.GetElement(elid) for elid in uidoc.Selection.GetElementIds()]
# sel = filter(lambda x: x.LookupParameter('Категория').AsValueString() == 'Электрооборудование', sel)
sel = [el for el in sel if el.LookupParameter('Категория').AsValueString() == 'Электрооборудование']
# if sel and __forceddebugmode__:
if sel and not __shiftclick__:
from pyrevit import script
output = script.get_output()
system_ids_to_select = []
for el in sel:
if el.MEPModel.ElectricalSystems:
for system in el.MEPModel.ElectricalSystems:
if system.Id not in system_ids_to_select:
system_ids_to_select.append(system.Id)
elif 'ейк' in el.LookupParameter('Тип').AsValueString():
target = el.LookupParameter('Цепь').AsString()
system_ids_to_select.append(ElementId(int(target)))
data = []
system_ids_to_select = list(set(system_ids_to_select))
# print(system_ids_to_select)
# 1/0
system_ids_to_select = natural_sorted(system_ids_to_select,
key=lambda id: (doc.GetElement(id).LookupParameter('Позиция конца').AsString() if doc.GetElement(id).LookupParameter('Позиция конца') else ''))
system_ids_to_select = natural_sorted(system_ids_to_select,
key=lambda id: doc.GetElement(id).LookupParameter('Позиция начала').AsString() if doc.GetElement(id).LookupParameter('Позиция начала') else '')
system_ids_to_select = natural_sorted(system_ids_to_select,
key=lambda id: doc.GetElement(id).LookupParameter('Помещение цепи').AsString() if doc.GetElement(id).LookupParameter('Позиция начала') else '')
for id in system_ids_to_select:
el = doc.GetElement(id)
data.append([
output.linkify(id),
el.LookupParameter('Помещение цепи').AsString() if el.LookupParameter('Помещение цепи') else el.LookupParameter('Помещение').AsString() if el.LookupParameter('Помещение') else '',
# el.LookupParameter('Позиция начала').AsString() if el.LookupParameter('Позиция начала') else '',
output.linkify(
list(el.Elements)[0].Id,
el.LookupParameter('Позиция начала').AsString() if el.LookupParameter('Позиция начала') else ''
),
el.LookupParameter('Имя нагрузки').AsString() if el.LookupParameter('Имя нагрузки') else '',
# el.LookupParameter('Позиция конца').AsString() if el.LookupParameter('Позиция конца') else '',
output.linkify(
# list(el.BaseEquipment)[0].Id,
el.BaseEquipment.Id,
el.LookupParameter('Позиция конца').AsString() if el.LookupParameter('Позиция конца') else ''
),
el.LookupParameter('Панель').AsString(),
el.LookupParameter('Тип, марка').AsString(),
'Все устройства' if str(el.CircuitPathMode) == 'AllDevices' else\
'Наиболее удалённое устройство' if str(el.CircuitPathMode) == 'FarthestDevice' else\
'Пользоват.' if str(el.CircuitPathMode) == 'Custom' else el.CircuitPathMode,
'{:.0f}'.format(el.PathOffset * k) if el.PathOffset else '',
'{:.0f}'.format(el.LookupParameter('Длина').AsDouble() * k),
'{:n}'.format(el.LookupParameter('Количество').AsDouble()),
el.LookupParameter('Строгая длина, м').AsString() or '',
# и почему в списке есть лишние цепи, например сервер # 2021.05.12: чта?
])
# data = natural_sorted(data, key=lambda x: x[4])
# data = natural_sorted(data, key=lambda x: x[2])
# print('{} Пом. {}: {} {} – {} {} {} Отступ={}'.format(
# output.linkify(id),
# el.LookupParameter('Помещение цепи').AsString() if el.LookupParameter('Помещение цепи') else el.LookupParameter('Помещение').AsString() if el.LookupParameter('Помещение') else '',
# el.LookupParameter('Позиция начала').AsString() if el.LookupParameter('Позиция начала') else '',
# el.LookupParameter('Имя нагрузки').AsString() if el.LookupParameter('Имя нагрузки') else '',
# el.LookupParameter('Позиция конца').AsString() if el.LookupParameter('Позиция конца') else '',
# el.LookupParameter('Панель').AsString(),
# el.LookupParameter('Тип, марка').AsString(),
# el.PathOffset * k,
# ))
if len(system_ids_to_select) > 1:
print('Выбрать все {}'.format(output.linkify(system_ids_to_select)))
# data = [
# ['row1', 'data', 'data', 80 ],
# [output.linkify(ElementId(952627)), 'data', 'data', 45 ],
# ]
data = [[i + 1] + arr for i, arr in zip(range(len(data)), data)]
output.print_table(
table_data=data,
columns=['№', 'Цепь', 'Помещение цепи', 'Позиция начала', 'Имя нагрузки', 'Позиция конца', 'Панель', 'Тип, марка', 'Режим траектории', 'Отступ', 'Длина revit', 'Количество, м/шт.', 'Строгая длина',],
formats=['', '', '', ],
# last_line_style='color:red;'
)
# if sel and __shiftclick__:
# if el.MEPModel.ElectricalSystems:
# panel_ids = []
# for system in el.MEPModel.ElectricalSystems:
# if system.BaseEquipment and system.BaseEquipment.Id != el.Id:
# panel_ids.append(system.BaseEquipment.Id)
# uidoc.Selection.SetElementIds(List[ElementId](panel_ids))
elif sel:
sel_save = [el.Id for el in sel if 'ейк' not in el.LookupParameter('Тип').AsValueString()]
system_ids_to_select = []
for el in sel:
# print(el.Id)
if el.MEPModel.ElectricalSystems:
for system in el.MEPModel.ElectricalSystems:
if system.Id not in system_ids_to_select:
system_ids_to_select.append(system.Id)
elif 'ейк' in el.LookupParameter('Тип').AsValueString():
target = el.LookupParameter('Цепь').AsString()
system_ids_to_select.append(ElementId(int(target)))
uidoc.Selection.SetElementIds(List[ElementId](system_ids_to_select + sel_save))
|
import data
import webbrowser
#from T_Gui import update_count
URL = "https://api.nasa.gov/planetary/apod"
def run():
url_im = ""
request = data.requests.get(URL, data.params)
if request.status_code == 200:
data.count_req = request.headers['X-RateLimit-Remaining']
url_im = data.json.loads(request.text)["hdurl"]
else:
print("Error")
if url_im:
webbrowser.open(url_im, new=2)
|
import os
import unittest
from urllib.parse import parse_qs
import requests
import requests_mock
from gratisdns import AAAARecord, ARecord, GratisDNS, MXRecord, TXTRecord
def mocked_response(fname):
path = os.path.sep.join((os.path.dirname(os.path.abspath(__file__)), fname))
return open(path, 'r').read()
class FormMatcher(object):
def __init__(self):
self.query = {}
def __call__(self, request):
response = requests.Response()
response.status_code = 200
assert(request.headers['Content-Type'] == 'application/x-www-form-urlencoded')
self.form_data = {k: v[0] for k, v in parse_qs(request.text).items()}
return response
@requests_mock.Mocker()
class TestGratisDns(unittest.TestCase):
def setUp(self):
with requests_mock.Mocker() as mock_request:
mock_request.post(GratisDNS.BACKEND_URL, status_code=requests.codes.found)
self.gratisdns = GratisDNS('user', 'password')
def test_get_primary_domains(self, mock_request):
mock_request.get(GratisDNS.BACKEND_URL, text=mocked_response('primary_domains.html'))
self.assertEqual(self.gratisdns.get_primary_domains(), ['mytest.dk', 'mytest2.dk'])
def test_get_seconday_domains(self, mock_request):
mock_request.get(GratisDNS.BACKEND_URL, text=mocked_response('secondary_domains.html'))
self.assertEqual(self.gratisdns.get_secondary_domains(), [])
def test_get_primary_domain_details(self, mock_request):
mock_request.get(GratisDNS.BACKEND_URL, text=mocked_response('primary_domain_details.html'))
records = self.gratisdns.get_primary_domain_details('mytest.dk')
self.assertEqual(len(records), 4)
for record in records['A']:
self.assertIsInstance(record, ARecord)
self.assertListEqual(records['A'], [
ARecord('mytest.dk', '*.mytest.dk', '1.2.3.4', id='42'),
ARecord('mytest.dk', 'mytest.dk', '1.2.3.4', id='17'),
ARecord(None, 'localhost.mytest.dk', '127.0.0.1')
])
for record in records['AAAA']:
self.assertIsInstance(record, AAAARecord)
self.assertEqual(records['AAAA'], [
AAAARecord('mytest.dk', 'mytest.dk', '2001:db8:85a3:8d3:1319:8a2e:370:7348', id='1337')
])
for record in records['MX']:
self.assertIsInstance(record, MXRecord)
self.assertEqual(records['MX'], [
MXRecord('mytest.dk', 'mytest.dk', 'mytest.dk', '10', id='666')
])
for record in records['TXT']:
self.assertIsInstance(record, TXTRecord)
self.assertEqual(records['TXT'], [
TXTRecord('mytest.dk', 'mytest.dk', 'lumskebuks', id='1992')
])
def test_update_a_record(self, mock_request):
matcher = FormMatcher()
mock_request.get(GratisDNS.BACKEND_URL, text=mocked_response('primary_domain_details.html'))
mock_request.post(GratisDNS.BACKEND_URL, additional_matcher=matcher)
record = self.gratisdns.get_primary_domain_details('mytest.dk')['A'][0]
record.ip = '13.13.13.13'
self.gratisdns.update_record(record)
self.assertDictEqual(matcher.form_data,
{'user_domain': 'mytest.dk',
'name': '*.mytest.dk',
'ip': '13.13.13.13',
'id': '42',
'ttl': '43200',
'action': 'dns_primary_record_update_a'})
def test_update_aaaa_record(self, mock_request):
matcher = FormMatcher()
mock_request.get(GratisDNS.BACKEND_URL, text=mocked_response('primary_domain_details.html'))
mock_request.post(GratisDNS.BACKEND_URL, additional_matcher=matcher)
record = self.gratisdns.get_primary_domain_details('mytest.dk')['AAAA'][0]
record.ip = '1234:5678:90ab:cdef:1234:5678:90ab:cdef'
self.gratisdns.update_record(record)
self.assertDictEqual(matcher.form_data,
{'user_domain': 'mytest.dk',
'name': 'mytest.dk',
'ip': '1234:5678:90ab:cdef:1234:5678:90ab:cdef',
'id': '1337',
'ttl': '43200',
'action': 'dns_primary_record_update_aaaa'})
def test_update_mx_record(self, mock_request):
matcher = FormMatcher()
mock_request.get(GratisDNS.BACKEND_URL, text=mocked_response('primary_domain_details.html'))
mock_request.post(GratisDNS.BACKEND_URL, additional_matcher=matcher)
record = self.gratisdns.get_primary_domain_details('mytest.dk')['MX'][0]
record.exchanger = 'testpost.dk'
self.gratisdns.update_record(record)
self.assertDictEqual(matcher.form_data,
{'user_domain': 'mytest.dk',
'name': 'mytest.dk',
'exchanger': 'testpost.dk',
'id': '666',
'preference': '10',
'ttl': '43200',
'action': 'dns_primary_record_update_mx'})
def test_update_txt_record(self, mock_request):
matcher = FormMatcher()
mock_request.get(GratisDNS.BACKEND_URL, text=mocked_response('primary_domain_details.html'))
mock_request.post(GratisDNS.BACKEND_URL, additional_matcher=matcher)
record = self.gratisdns.get_primary_domain_details('mytest.dk')['TXT'][0]
record.txtdata = 'fjollerik'
self.gratisdns.update_record(record)
self.assertDictEqual(matcher.form_data,
{'user_domain': 'mytest.dk',
'name': 'mytest.dk',
'txtdata': 'fjollerik',
'id': '1992',
'ttl': '43200',
'action': 'dns_primary_record_update_txt'})
|
import cdaotg
import webtest
def test_get():
app = webtest.TestApp(cdaotg.app)
# test when same length
response1 = app.get('\pata?a=123&b=456')
assert response1.status_int == 200
assert response1.content_type == 'text/html'
assert response1.body.contains('123456')
# test when a longer than b
response2 = app.get('\pata?a=1234&b=56')
assert response2.status_int == 200
assert response2.content_type == 'text/html'
assert response2.body.contains('152634')
# test when a shorter than b
response1 = app.get('\pata?a=12&b=3456')
assert response1.status_int == 200
assert response1.content_type == 'text/html'
assert response1.body.contains('132456') |
import torch
import torch.nn.functional as F
from hessian_eigenthings.power_iter import Operator, deflated_power_iteration
from hessian_eigenthings.lanczos import lanczos
from sklearn.cross_decomposition import CCA
from time import time
import sys
#%% This operator could be used as a local distance metric on the GAN image manifold.
# This version copied from hessian_eigenthings use backward autodifferencing
class GANHVPOperator(Operator):
def __init__(
self,
model,
code,
criterion,
use_gpu=True,
preprocess=lambda img: F.interpolate(img, (256, 256), mode='bilinear', align_corners=True),
activation=False,
):
if use_gpu:
device = "cuda"
self.device = device
if hasattr(model,"parameters"):
for param in model.parameters():
param.requires_grad_(False)
if hasattr(criterion,"parameters"):
for param in criterion.parameters():
param.requires_grad_(False)
self.model = model
self.preprocess = preprocess
self.criterion = criterion
self.code = code.clone().requires_grad_(False).float().to(device) # torch.float32
self.size = self.code.numel()
# self.perturb_vec = torch.zeros((1, 4096), dtype=torch.float32).requires_grad_(True).to(device)
self.perturb_vec = 0.0001 * torch.randn((1, self.size), dtype=torch.float32).requires_grad_(True).to(
device) # dimension debugged Sep 10
self.activation = activation
if activation: # then criterion is a single entry objective function
self.img_ref = self.model.visualize(self.code + self.perturb_vec)
activ = self.criterion(self.preprocess(self.img_ref))
gradient = torch.autograd.grad(activ, self.perturb_vec, create_graph=True, retain_graph=True)[0]
else:
self.img_ref = self.model.visualize(self.code, ) # forward the feature vector through the GAN
img_pertb = self.model.visualize(self.code + self.perturb_vec)
d_sim = self.criterion(self.preprocess(self.img_ref), self.preprocess(img_pertb))
# similarity metric between 2 images.
gradient = torch.autograd.grad(d_sim, self.perturb_vec, create_graph=True, retain_graph=True)[0]
# 1st order gradient
self.gradient = gradient.view(-1)
def select_code(self, code):
self.code = code.clone().requires_grad_(False).float().to(self.device) # torch.float32
self.size = self.code.numel()
self.perturb_vec = torch.zeros((1, self.size), dtype=torch.float32).requires_grad_(True).to(self.device)
self.img_ref = self.model.visualize(self.code, ) # forward the feature vector through the GAN
img_pertb = self.model.visualize(self.code + self.perturb_vec)
d_sim = self.criterion(self.preprocess(self.img_ref), self.preprocess(img_pertb))
gradient = torch.autograd.grad(d_sim, self.perturb_vec, create_graph=True, retain_graph=True)[0]
self.gradient = gradient.view(-1)
self.size = self.perturb_vec.numel()
def apply(self, vec):
"""
Returns H*vec where H is the hessian of the loss w.r.t.
the vectorized model parameters
"""
self.zero_grad()
# take the second gradient
grad_grad = torch.autograd.grad(
self.gradient, self.perturb_vec, grad_outputs=vec, only_inputs=True, retain_graph=True
)
hessian_vec_prod = grad_grad[0].view(-1) # torch.cat([g.view(-1) for g in grad_grad]) #.contiguous()
return hessian_vec_prod
def vHv_form(self, vec):
"""
Returns Bilinear form vec.T*H*vec where H is the hessian of the loss.
If vec is eigen vector of H this will return the eigen value.
"""
self.zero_grad()
# take the second gradient
grad_grad = torch.autograd.grad(
self.gradient, self.perturb_vec, grad_outputs=vec, only_inputs=True, retain_graph=True
)
hessian_vec_prod = grad_grad[0].view(-1)
vhv = (hessian_vec_prod * vec).sum()
return vhv
def zero_grad(self):
"""
Zeros out the gradient info for each parameter in the model
"""
for p in [self.perturb_vec]:
if p.grad is not None:
p.grad.data.zero_()
class GANForwardHVPOperator(Operator):
"""This part amalgamates the structure of Lucent and hessian_eigenthings"""
def __init__(
self,
model,
code,
objective,
preprocess=lambda img: F.interpolate(img, (224, 224), mode='bilinear', align_corners=True),
use_gpu=True,
EPS=1E-2,
):
device = "cuda" if use_gpu else "cpu"
self.device = device
if hasattr(model, "parameters"):
for param in model.parameters():
param.requires_grad_(False)
if hasattr(objective, "parameters"):
for param in objective.parameters():
param.requires_grad_(False)
self.model = model
self.objective = objective
self.preprocess = preprocess
self.code = code.clone().requires_grad_(False).float().to(device) # torch.float32
self.img_ref = self.model.visualize(self.code)
resz_img = self.preprocess(self.img_ref) # F.interpolate(self.img_ref, (224, 224), mode='bilinear', align_corners=True)
activ = self.objective(resz_img)
self.size = self.code.numel()
self.EPS = EPS
self.perturb_norm = self.code.norm() * self.EPS
def select_code(self, code):
"""Change the reference code"""
self.code = code.clone().requires_grad_(False).float().to(self.device) # torch.float32
self.perturb_norm = self.code.norm() * self.EPS
self.img_ref = self.model.visualize(self.code + self.perturb_vec)
resz_img = self.preprocess(self.img_ref)
activ = self.objective(resz_img)
gradient = torch.autograd.grad(activ, self.perturb_vec, create_graph=False, retain_graph=False)[0]
self.gradient = gradient.view(-1)
def apply(self, vec, EPS=None):
"""
Returns H*vec where H is the hessian of the loss w.r.t.
the vectorized model parameters
"""
vecnorm = vec.norm()
if vecnorm < 1E-8:
return torch.zeros_like(vec).cuda()
EPS = self.EPS if EPS is None else EPS
self.perturb_norm = self.code.norm() * EPS
eps = self.perturb_norm / vecnorm
# take the second gradient by comparing 2 first order gradient.
perturb_vecs = self.code.detach() + eps * torch.tensor([1, -1.0], device=self.device).view(-1, 1) * vec.detach()
perturb_vecs.requires_grad_(True)
img = self.model.visualize(perturb_vecs)
resz_img = self.preprocess(img)
activs = self.objective(resz_img) # , scaler=True
# obj = alexnet.features[:10](resz_img)[:, :, 6, 6].sum() # esz_img.std()
ftgrad_both = torch.autograd.grad(activs.sum(), perturb_vecs, retain_graph=False, create_graph=False, only_inputs=True)[0]
hessian_vec_prod = (ftgrad_both[0, :] - ftgrad_both[1, :]) / (2 * eps)
return hessian_vec_prod
def vHv_form(self, vec):
"""
Returns Bilinear form vec.T*H*vec where H is the hessian of the loss.
If vec is eigen vector of H this will return the eigen value.
"""
hessian_vec_prod = self.apply(vec)
vhv = (hessian_vec_prod * vec).sum()
return vhv
def zero_grad(self):
"""
Zeros out the gradient info for each parameter in the model
"""
pass
class GANForwardHVPOperator_multiscale(Operator):
"""This part amalgamates the structure of Lucent and hessian_eigenthings"""
def __init__(
self,
model,
code,
objective,
preprocess=lambda img: F.interpolate(img, (224, 224), mode='bilinear', align_corners=True),
use_gpu=True,
scalevect=(0.5, 1.0, 2.0),
EPS=1E-2,
):
device = "cuda" if use_gpu else "cpu"
self.device = device
if hasattr(model, "parameters"):
for param in model.parameters():
param.requires_grad_(False)
if hasattr(objective, "parameters"):
for param in objective.parameters():
param.requires_grad_(False)
self.model = model
self.objective = objective
self.preprocess = preprocess
self.code = code.clone().requires_grad_(False).float().to(device) # torch.float32
self.img_ref = self.model.visualize(self.code)
resz_img = self.preprocess(self.img_ref) # F.interpolate(self.img_ref, (224, 224), mode='bilinear', align_corners=True)
activ = self.objective(resz_img)
self.size = self.code.numel()
self.EPS = EPS
self.perturb_norm = self.code.norm() * self.EPS
self.ticks = torch.tensor(list(scalevect),
device=self.device).reshape(-1, 1)
self.ticks = torch.concat([self.ticks, -self.ticks], dim=0)
self.ticks_divisor = torch.tensor(sum(scalevect), device=self.device)
self.ticks_N = len(scalevect)
# def select_code(self, code):
# """Change the reference code"""
# self.code = code.clone().requires_grad_(False).float().to(self.device) # torch.float32
# self.perturb_norm = self.code.norm() * self.EPS
# self.img_ref = self.model.visualize(self.code + self.perturb_vec)
# resz_img = self.preprocess(self.img_ref)
# activ = self.objective(resz_img)
# gradient = torch.autograd.grad(activ, self.perturb_vec, create_graph=False, retain_graph=False)[0]
# self.gradient = gradient.view(-1)
def apply(self, vec, EPS=None):
"""
Returns H*vec where H is the hessian of the loss w.r.t.
the vectorized model parameters
"""
vecnorm = vec.norm()
if vecnorm < 1E-8:
return torch.zeros_like(vec).cuda()
EPS = self.EPS if EPS is None else EPS
self.perturb_norm = self.code.norm() * EPS
eps = self.perturb_norm / vecnorm
# take the second gradient by comparing 2 first order gradient.
perturb_vecs = self.code.detach() + eps * self.ticks * vec.detach()
perturb_vecs.requires_grad_(True)
img = self.model.visualize(perturb_vecs)
resz_img = self.preprocess(img)
activs = self.objective(resz_img) # , scaler=True
ftgrad_both = torch.autograd.grad(activs.sum(), perturb_vecs, retain_graph=False, create_graph=False, only_inputs=True)[0]
hessian_vec_prod = (ftgrad_both[:self.ticks_N, :].sum(dim=0)
- ftgrad_both[-self.ticks_N:, :].sum(dim=0)) \
/ (2 * self.ticks_divisor * eps)
return hessian_vec_prod
def apply_batch(self, vecs, EPS=None):
"""
Returns H*vec where H is the hessian of the loss w.r.t.
the vectorized model parameters
"""
if vecs.ndim == 1:
vecs = vecs.unsqueeze(0)
if vecs.size(0) == self.size:
vecs = vecs.T
assert vecs.size(1) == self.size
vecnorm = vecs.norm(dim=1)
if vecnorm.mean() < 1E-8:
return torch.zeros_like(vecs).cuda()
EPS = self.EPS if EPS is None else EPS
self.perturb_norm = self.code.norm() * EPS
eps = self.perturb_norm / vecnorm
# take the second gradient by comparing 2 first order gradient.
perturb_vecs = eps.unsqueeze(0).unsqueeze(2) * \
torch.einsum("Ti,iBC->TBC",
self.ticks, vecs.detach().unsqueeze(0))
perturb_vecs.requires_grad_(True)
img = self.model.visualize(self.code.detach() + perturb_vecs.reshape(-1, self.size))
resz_img = self.preprocess(img)
activs = self.objective(resz_img) # , scaler=True
ftgrad_both = torch.autograd.grad(activs.sum(), perturb_vecs, retain_graph=False, create_graph=False, only_inputs=True)[0]
hessian_vec_prod = (ftgrad_both[:self.ticks_N, :, :].sum(dim=0)
- ftgrad_both[-self.ticks_N:, :, :].sum(dim=0)) \
/ (2 * self.ticks_divisor * eps.unsqueeze(1))
return hessian_vec_prod
def vHv_form(self, vec):
"""
Returns Bilinear form vec.T*H*vec where H is the hessian of the loss.
If vec is eigen vector of H this will return the eigen value.
"""
hessian_vec_prod = self.apply(vec)
vhv = (hessian_vec_prod * vec).sum()
return vhv
def zero_grad(self):
"""
Zeros out the gradient info for each parameter in the model
"""
pass
class NNForwardHVPOperator(Operator):
"""This part amalgamates the structure of Lucent and hessian_eigenthings"""
def __init__(
self,
objective,
input,
use_gpu=True,
EPS=1E-2,
):
device = "cuda" if use_gpu else "cpu"
self.device = device
if hasattr(objective, "parameters"):
for param in objective.parameters():
param.requires_grad_(False)
self.objective = objective
self.code = input.detach().clone().float().to(device) # torch.float32
activ = self.objective(self.code)
self.size = self.code.numel()
self.EPS = EPS
self.perturb_norm = self.EPS # * torch.randn(self.size).norm() *
# def select_code(self, code):
# """Change the reference code"""
# self.code = code.clone().detach().float().to(self.device) # torch.float32
# self.perturb_norm = self.code.norm() * self.EPS
# activ = self.objective(self.code)
# gradient = torch.autograd.grad(activ, self.perturb_vec, create_graph=False, retain_graph=False)[0]
# self.gradient = gradient.view(-1)
def apply(self, vec, EPS=None):
"""
Returns H*vec where H is the hessian of the loss w.r.t.
the vectorized model parameters
"""
vecnorm = vec.norm()
if vecnorm < 1E-8:
return torch.zeros_like(vec).cuda()
if EPS is None:
EPS = self.EPS
self.perturb_norm = EPS # * self.code.norm()
eps = self.perturb_norm / vecnorm
# take the second gradient by comparing 2 first order gradient.
perturb_vecs = self.code.detach() + eps * torch.tensor([1, -1.0], device=self.device).view(-1, 1) * vec.detach()
perturb_vecs.requires_grad_(True)
activs = self.objective(perturb_vecs) # , scaler=True
ftgrad_both = torch.autograd.grad(activs.sum(), perturb_vecs,
retain_graph=False, create_graph=False, only_inputs=True)[0]
hessian_vec_prod = (ftgrad_both[0, :] - ftgrad_both[1, :]) / (2 * eps)
return hessian_vec_prod
def vHv_form(self, vec):
"""
Returns Bilinear form vec.T*H*vec where H is the hessian of the loss.
If vec is eigen vector of H this will return the eigen value.
"""
hessian_vec_prod = self.apply(vec)
vhv = (hessian_vec_prod * vec).sum()
return vhv
def zero_grad(self):
"""
Zeros out the gradient info for each parameter in the model
"""
pass
class NNForwardHVPOperator_multiscale(Operator):
"""This part amalgamates the structure of Lucent and hessian_eigenthings"""
def __init__(
self,
objective,
input,
use_gpu=True,
EPS=1E-2,
scalevect=(0.5, 1.0, 2.0)
):
device = "cuda" if use_gpu else "cpu"
self.device = device
if hasattr(objective, "parameters"):
for param in objective.parameters():
param.requires_grad_(False)
self.objective = objective
self.code = input.detach().clone().float().to(device) # torch.float32
activ = self.objective(self.code)
self.size = self.code.numel()
self.EPS = EPS
self.perturb_norm = self.EPS # * torch.randn(self.size).norm() *
self.ticks = torch.tensor(list(scalevect),
device=self.device).reshape(-1, 1)
self.ticks = torch.concat([self.ticks, -self.ticks], dim=0)
self.ticks_divisor = torch.tensor(sum(scalevect), device=self.device)
self.ticks_N = len(scalevect)
def apply(self, vec, EPS=None):
"""
Returns H*vec where H is the hessian of the loss w.r.t.
the vectorized model parameters
"""
vecnorm = vec.norm()
if vecnorm < 1E-8:
return torch.zeros_like(vec).cuda()
if EPS is None:
EPS = self.EPS
# self.perturb_norm = EPS # * self.code.norm()
eps = self.perturb_norm / vecnorm
# take the second gradient by comparing 2 first order gradient.
perturb_vecs = self.code.detach() + eps * self.ticks * vec.detach()
perturb_vecs.requires_grad_(True)
activs = self.objective(perturb_vecs) # , scaler=True
ftgrad_both = torch.autograd.grad(activs.sum(), perturb_vecs,
retain_graph=False, create_graph=False, only_inputs=True)[0]
hessian_vec_prod = (ftgrad_both[:self.ticks_N, :].sum(dim=0)
- ftgrad_both[-self.ticks_N:, :].sum(dim=0)) \
/ (2 * self.ticks_divisor * eps)
return hessian_vec_prod
def vHv_form(self, vec):
"""
Returns Bilinear form vec.T*H*vec where H is the hessian of the loss.
If vec is eigen vector of H this will return the eigen value.
"""
hessian_vec_prod = self.apply(vec)
vhv = (hessian_vec_prod * vec).sum()
return vhv
def zero_grad(self):
"""
Zeros out the gradient info for each parameter in the model
"""
pass
#%%
class GANForwardMetricHVPOperator(Operator):
"""This part amalgamates the structure of Lucent and hessian_eigenthings
It adapts GANForwardHVPOperator for binary metric function
"""
def __init__(
self,
model,
code,
criterion,
preprocess=lambda img: F.interpolate(img, (256, 256), mode='bilinear', align_corners=True),
use_gpu=True,
EPS=1E-2,
):
device = "cuda" if use_gpu else "cpu"
self.device = device
if hasattr(model, "parameters"):
for param in model.parameters():
param.requires_grad_(False)
if hasattr(criterion, "parameters"):
for param in criterion.parameters():
param.requires_grad_(False)
self.model = model
self.criterion = criterion # metric function use to determine the image distance
self.preprocess = preprocess
self.code = code.clone().requires_grad_(False).float().to(device) # reference code
self.img_ref = self.model.visualize(self.code)
self.img_ref = self.preprocess(self.img_ref) # F.interpolate(self.img_ref, (224, 224), mode='bilinear', align_corners=True)
activ = self.criterion(self.img_ref, self.img_ref)
self.size = self.code.numel()
self.EPS = EPS
self.perturb_norm = self.code.norm() * self.EPS # norm
def select_code(self, code):
self.code = code.clone().requires_grad_(False).float().to(self.device) # torch.float32
self.perturb_norm = self.code.norm() * self.EPS
self.img_ref = self.model.visualize(self.code)
self.img_ref = self.preprocess(self.img_ref)
# dsim = self.criterion(self.img_ref, self.img_ref)
# gradient = torch.autograd.grad(dsim, self.perturb_vec, create_graph=False, retain_graph=False)[0]
# self.gradient = gradient.view(-1)
def apply(self, vec, EPS=None):
"""
Returns H*vec where H is the hessian of the loss w.r.t.
the vectorized model parameters.
Here we implement the forward approximation of HVP.
Hv|_x \approx (g(x + eps*v) - g(x - eps*v)) / (2*eps)
"""
vecnorm = vec.norm()
if vecnorm < 1E-8:
return torch.zeros_like(vec).cuda()
EPS = self.EPS if EPS is None else EPS
self.perturb_norm = self.code.norm() * EPS
eps = self.perturb_norm / vecnorm
# take the second gradient by comparing 2 first order gradient.
perturb_vecs = self.code.detach() + eps * torch.tensor([1, -1.0]).view(-1, 1).to(self.device) * vec.to(self.device).detach()
perturb_vecs.requires_grad_(True)
img = self.model.visualize(perturb_vecs)
resz_img = self.preprocess(img)
dsim = self.criterion(self.img_ref, resz_img)
# size 2, 1, 1, 1. Distance from reference to 2 perturbed images. Do mean before grad
ftgrad_both = torch.autograd.grad(dsim.mean(), perturb_vecs, retain_graph=False, create_graph=False,
only_inputs=True)[0]
hessian_vec_prod = (ftgrad_both[0, :] - ftgrad_both[1, :]) / (2 * eps)
return hessian_vec_prod
def vHv_form(self, vec):
"""
Returns Bilinear form vec.T*H*vec where H is the hessian of the loss.
If vec is eigen vector of H this will return the eigen value.
"""
hessian_vec_prod = self.apply(vec)
vhv = (hessian_vec_prod * vec).sum()
return vhv
def zero_grad(self):
"""
Zeros out the gradient info for each parameter in the model
"""
pass
def compute_hessian_eigenthings(
model,
code,
loss,
num_eigenthings=40,
mode="power_iter",
use_gpu=True,
**kwargs
):
"""
Computes the top `num_eigenthings` eigenvalues and eigenvecs
for the hessian of the given model by using subsampled power iteration
with deflation and the hessian-vector product
(This function just change the Operator from the original HVPOperator to GANHVPOperator.)
Parameters
---------------
model : Module
pytorch model for this netowrk
dataloader : torch.data.DataLoader
dataloader with x,y pairs for which we compute the loss.
loss : torch.nn.modules.Loss | torch.nn.functional criterion
loss function to differentiate through
num_eigenthings : int
number of eigenvalues/eigenvecs to compute. computed in order of
decreasing eigenvalue magnitude.
full_dataset : boolean
if true, each power iteration call evaluates the gradient over the
whole dataset.
mode : str ['power_iter', 'lanczos']
which backend to use to compute the top eigenvalues.
use_gpu:
if true, attempt to use cuda for all lin alg computatoins
max_samples:
the maximum number of samples that can fit on-memory. used
to accumulate gradients for large batches.
**kwargs:
contains additional parameters passed onto lanczos or power_iter.
"""
hvp_operator = GANHVPOperator(
model,
code,
loss,
use_gpu=use_gpu,
)
eigenvals, eigenvecs = None, None
if mode == "power_iter":
eigenvals, eigenvecs = deflated_power_iteration(
hvp_operator, num_eigenthings, use_gpu=use_gpu, **kwargs
)
elif mode == "lanczos":
eigenvals, eigenvecs = lanczos(
hvp_operator, num_eigenthings, use_gpu=use_gpu, **kwargs
)
else:
raise ValueError("Unsupported mode %s (must be power_iter or lanczos)" % mode)
return eigenvals, eigenvecs
#%%
from IPython.display import clear_output
from hessian_eigenthings.utils import progress_bar
def get_full_hessian(loss, param):
# from https://discuss.pytorch.org/t/compute-the-hessian-matrix-of-a-network/15270/3
# modified from hessian_eigenthings repo. api follows hessian.hessian
hessian_size = param.numel()
hessian = torch.zeros(hessian_size, hessian_size)
loss_grad = torch.autograd.grad(loss, param, create_graph=True, retain_graph=True, only_inputs=True)[0].view(-1)
for idx in range(hessian_size):
clear_output(wait = True)
progress_bar(
idx, hessian_size, "full hessian columns: %d of %d" % (idx, hessian_size)
)
grad2rd = torch.autograd.grad(loss_grad[idx], param, create_graph=False, retain_graph=True, only_inputs=True)
hessian[idx] = grad2rd[0].view(-1)
return hessian.cpu().data.numpy()
#%% Wrap up function
def cca_correlation(X, Y, n_comp=50):
"""
:param X, Y: should be N-by-p, N-by-q matrices,
:param n_comp: a integer, how many components we want to create and compare.
:return: cca_corr, n_comp-by-n_comp matrix
X_c, Y_c will be the linear mapped version of X, Y with shape N-by-n_comp, N-by-n_comp shape
cc_mat is the
"""
cca = CCA(n_components=n_comp)
X_c, Y_c = cca.fit_transform(X, Y)
ccmat = np.corrcoef(X_c, Y_c, rowvar=False)
cca_corr = np.diag(ccmat[n_comp:, :n_comp]) # slice out the cross corr part
return cca_corr
#%% Test the module
if __name__=="__main__":
sys.path.append(r"E:\Github_Projects\PerceptualSimilarity")
import models # from PerceptualSimilarity folder
# model_vgg = models.PerceptualLoss(model='net-lin', net='vgg', use_gpu=1, gpu_ids=[0])
model_squ = models.PerceptualLoss(model='net-lin', net='squeeze', use_gpu=1, gpu_ids=[0])
from GAN_utils import upconvGAN
G = upconvGAN("fc6")
G.requires_grad_(False).cuda() # this notation is incorrect in older pytorch
model_squ.requires_grad_(False).cuda()
#%%
feat = torch.randn((4096), dtype=torch.float32).requires_grad_(False).cuda()
GHVP = GANHVPOperator(G, feat, model_squ)
GHVP.apply(torch.randn((4096)).requires_grad_(False).cuda())
#%% 300 vectors
t0 = time()
feat = torch.randn((1, 4096), dtype=torch.float32).requires_grad_(False).cuda()
eigenvals, eigenvecs = compute_hessian_eigenthings(G, feat, model_vgg,
num_eigenthings=300, mode="lanczos", use_gpu=True,)
print(time() - t0,"\n") # 81.02 s
t0 = time()
feat = torch.randn((1, 4096), dtype=torch.float32).requires_grad_(False).cuda()
eigenvals3, eigenvecs3 = compute_hessian_eigenthings(G, feat, model_vgg,
num_eigenthings=300, mode="lanczos", use_gpu=True, max_steps=50,)
print(time() - t0, "\n") # 82.15 s
t0 = time()
eigenvals2, eigenvecs2 = compute_hessian_eigenthings(G, feat, model_vgg,
num_eigenthings=300, mode="power_iter", use_gpu=True,)
print(time() - t0) # 936.246 / 1002.95
#%% 100 vectors
t0 = time()
feat = torch.randn((1, 4096), dtype=torch.float32).requires_grad_(False).cuda()
eigenvals, eigenvecs = compute_hessian_eigenthings(G, feat, model_vgg,
num_eigenthings=100, mode="lanczos", use_gpu=True,)
print(time() - t0) # 79.466
t0 = time()
eigenvals2, eigenvecs2 = compute_hessian_eigenthings(G, feat, model_vgg,
num_eigenthings=100, mode="power_iter", use_gpu=True,)
print(time() - t0) # 227.1 s
#%%
t0 = time()
feat = torch.randn((1, 4096), dtype=torch.float32).requires_grad_(False).cuda()
eigenvals, eigenvecs = compute_hessian_eigenthings(G, feat, model_vgg,
num_eigenthings=40, mode="lanczos", use_gpu=True,)
print(time() - t0) # 13.09 sec
t0 = time()
eigenvals2, eigenvecs2 = compute_hessian_eigenthings(G, feat, model_vgg,
num_eigenthings=40, mode="power_iter", use_gpu=True,)
print(time() - t0) # 70.09 sec
#%%
from os.path import join
import numpy as np
import matplotlib.pylab as plt
innerprod = eigenvecs @ eigenvecs2[::-1,:].T
np.diag(innerprod)
#%%
innerprod = eigenvecs @ eigenvecs3[::-1,:].T
np.diag(innerprod)
plt.figure()
plt.subplot(2,1,1)
plt.plot(eigenvals[::-1], alpha=0.5, lw=2, label="lanczos")
# plt.plot(eigenvals2, alpha=0.5, lw=2, label="power_iter")
plt.plot(eigenvals3[::-1], alpha=0.5, lw=2, label="lanczos")
plt.ylabel("eigenvalue")
plt.legend()
plt.subplot(2,1,2)
plt.plot(np.abs(np.diag(eigenvecs[::-1] @ eigenvecs3[::-1].T)))
# plt.plot(np.abs(np.diag(eigenvecs[::-1] @ eigenvecs2.T)))
plt.ylabel("Inner prod of eigenvector")
plt.title("Compare Lanczos and Power iter method in computing eigen vectors")
plt.show()
#%%
t0 = time()
feat = torch.randn((1, 4096), dtype=torch.float32).requires_grad_(False).cuda()
eigenvals, eigenvecs = compute_hessian_eigenthings(G, feat, model_squ,
num_eigenthings=100, mode="lanczos", use_gpu=True,)
print(time() - t0) # 79.466
t0 = time()
eigenvals3, eigenvecs3 = compute_hessian_eigenthings(G, feat, model_squ,
num_eigenthings=100, mode="lanczos", use_gpu=True, max_steps=50,)
print(time() - t0)
#%%
t0 = time()
eigenvals2, eigenvecs2 = compute_hessian_eigenthings(G, feat, model_squ,
num_eigenthings=100, mode="power_iter", use_gpu=True, )
print(time() - t0)
#%%
t0 = time()
cca = CCA(n_components=100)
evec1_c, evec3_c = cca.fit_transform(eigenvecs.T, eigenvecs3.T)
print(time() - t0)
ccmat = np.corrcoef(evec1_c.T, evec3_c.T, )
np.diag(ccmat[50:,:50])
#%%
t0 = time()
n_comp = 100
cca_corr = cca_correlation(eigenvecs.T, eigenvecs3.T, n_comp=n_comp)
cca_corr_baseline = cca_correlation(eigenvecs.T, np.random.randn(*eigenvecs.T.shape), n_comp=n_comp)
print("%d components CCA corr %.2f, (baseline %.2f) (%.2fsec)" % (n_comp, cca_corr.mean(), cca_corr_baseline.mean(), time() - t0))
# 50 components CCA corr 1.00, (baseline 0.20) (15.43sec)
# 100 components CCA corr 1.00, (baseline 0.13) (22.50sec)
# %%
t0 = time()
n_comp = 50
cca_corr = cca_correlation(eigenvecs.T, eigenvecs2.T, n_comp=n_comp)
cca_corr_baseline = cca_correlation(eigenvecs2.T, np.random.randn(*eigenvecs.T.shape), n_comp=n_comp)
print("%d components CCA corr %.2f, (baseline %.2f) (%.2fsec)" % (
n_comp, cca_corr.mean(), cca_corr_baseline.mean(), time() - t0))
# 100 components CCA corr 0.99, (baseline 0.13) (18.22sec)
# 50 components CCA corr 0.98, (baseline 0.20) (10.19sec)
#%% Test the eigenvalues are close to that found by vHv bilinear form.
t0 = time()
feat = torch.randn((1, 4096), dtype=torch.float32).requires_grad_(False).cuda()
eigenvals, eigenvecs = compute_hessian_eigenthings(G, feat, model_squ,
num_eigenthings=100, mode="lanczos", use_gpu=True, )
print(time() - t0) # 18.45
#%%
GHVP = GANHVPOperator(G, feat, model_squ, use_gpu=True)
# GHVP.vHv_form(torch.tensor(eigenvecs[1, :]).cuda())
# eigenvals
t0 = time()
vHv_vals = []
eigenvecs_tsr = torch.tensor(eigenvecs).cuda()
for i in range(eigenvecs_tsr.shape[0]):
vHv_vals.append(GHVP.vHv_form(eigenvecs_tsr[i, :]).item())
print(time()-t0)
#%%
savedir = r"E:\OneDrive - Washington University in St. Louis\Artiphysiology\HessianDecomp"
plt.figure()
plt.plot(eigenvals[::-1], alpha=0.5, lw=2, label="lanczos")
# plt.plot(eigenvals2, alpha=0.5, lw=2, label="power_iter")
plt.plot(vHv_vals[::-1], alpha=0.5, lw=2, label="vHv")
plt.ylabel("eigenvalue")
plt.legend()
plt.title("Comparing Eigenvalue computed by Lanczos and vHv")
plt.savefig(join(savedir, "Lanczos_vHv_cmp.png"))
plt.show()
#%% Analyze of isotropy of GAN space using Hessian spectrum
feat = 64 * torch.tensor(PC1_vect).float() #torch.randn(4096).float().cuda()
eval_col = []
evect_col = []
t0 = time()
for vnorm in [0, 1, 2, 3, 4, 5]:
evals, evecs = compute_hessian_eigenthings(G, vnorm * feat, model_squ,
num_eigenthings=800, mode="lanczos", use_gpu=True, )
eval_col.append(evals)
evect_col.append(evecs)
print(
"Norm %d \nEigen value: max %.3E min %.3E std %.3E" % (vnorm * 64, evals.max(), evals.min(), evals.std()))
print(time() - t0)
np.savez("H_norm_relation.npz", eval_col=eval_col, evect_col=evect_col, feat=feat)
#%%
plt.figure()
for evals, vnorm in zip(eval_col, [0, 1, 2, 3, 4, 5]):
plt.plot(evals[-50:] * 1, label="norm%d"%(vnorm*64)) # / evals[-1]
plt.legend()
plt.xlabel("eigen id")
plt.ylabel("eigenvals")
plt.savefig(join(savedir, "code_norm_spectra_curv.png"))
plt.show() |
import os
import glob
import yaml
import time
import struct
import argparse
import multiprocessing
import crcmod.predefined
from queue import Empty
from usbcan import CANFrame, run
m3fc_id = 1
msg_id = lambda x: x << 5
m3fc_msg_cfg_profile = m3fc_id | msg_id(54)
m3fc_msg_cfg_pyros = m3fc_id | msg_id(55)
m3fc_msg_cfg_accel_cal_x = m3fc_id | msg_id(56)
m3fc_msg_cfg_accel_cal_y = m3fc_id | msg_id(57)
m3fc_msg_cfg_accel_cal_z = m3fc_id | msg_id(58)
m3fc_msg_cfg_radio_freq = m3fc_id | msg_id(59)
m3fc_msg_cfg_crc = m3fc_id | msg_id(60)
m3fc_msg_set_cfg_profile = m3fc_id | msg_id(1)
m3fc_msg_set_cfg_pyros = m3fc_id | msg_id(2)
m3fc_msg_set_cfg_accel_x = m3fc_id | msg_id(10)
m3fc_msg_set_cfg_accel_y = m3fc_id | msg_id(11)
m3fc_msg_set_cfg_accel_z = m3fc_id | msg_id(12)
m3fc_msg_set_cfg_radio_freq = m3fc_id | msg_id(13)
m3fc_msg_set_cfg_crc = m3fc_id | msg_id(14)
m3fc_msg_save_cfg = m3fc_id | msg_id(4)
class M3FCConfigPyros:
use_map = {0x00: "Unused",
0x10: "Drogue", 0x20: "Main", 0x30: "Dart Separation"}
use_invmap = {"unused": 0, "drogue": 0x10, "main": 0x20, "dartsep": 0x30}
use_mask = 0xf0
type_map = {0: "None", 1: "EMatch", 2: "Talon", 3: "Metron"}
type_invmap = {"none": 0, "ematch": 1, "talon": 2, "metron": 3}
type_mask = 0x03
current_map = {0: "None", 0x04: "1A", 0x08: "3A"}
current_invmap = {"none": 0, "1A": 0x04, "3A": 0x08}
current_mask = 0x0c
def __init__(self, p1, p2, p3, p4, p5, p6, p7, p8):
self.pyro1 = p1
self.pyro2 = p2
self.pyro3 = p3
self.pyro4 = p4
self.pyro5 = p5
self.pyro6 = p6
self.pyro7 = p7
self.pyro8 = p8
@classmethod
def from_can(cls, packet):
assert packet.sid == m3fc_msg_cfg_pyros
return cls(*packet.data)
def to_can(self):
return CANFrame(sid=m3fc_msg_set_cfg_pyros, rtr=False, dlc=8,
data=[self.pyro1, self.pyro2, self.pyro3, self.pyro4,
self.pyro5, self.pyro6, self.pyro7, self.pyro8])
@classmethod
def from_dict(cls, d):
def from_pyro_dict(d):
if d == "none":
return 0
assert set(d.keys()) == {"type", "usage", "current"}
return (
cls.use_invmap.get(d['usage'], 0) |
cls.type_invmap.get(d['type'], 0) |
cls.current_invmap.get(d['current'], 0))
assert set(d.keys()) == {"pyro1", "pyro2", "pyro3", "pyro4",
"pyro5", "pyro6", "pyro7", "pyro8"}
return cls(from_pyro_dict(d['pyro1']),
from_pyro_dict(d['pyro2']),
from_pyro_dict(d['pyro3']),
from_pyro_dict(d['pyro4']),
from_pyro_dict(d['pyro5']),
from_pyro_dict(d['pyro6']),
from_pyro_dict(d['pyro7']),
from_pyro_dict(d['pyro8']))
def __str__(self):
def pyro2str(pyro):
usage = self.use_map.get(pyro & 0xf0, "?")
current = self.current_map.get(pyro & 0x0c, "?")
type_ = self.type_map.get(pyro & 0x03, "?")
return "{}/{}/{}".format(usage, type_, current)
out = []
out.append("M3FC Config Pyros:")
out.append("Pyro 1: " + pyro2str(self.pyro1))
out.append("Pyro 2: " + pyro2str(self.pyro2))
out.append("Pyro 3: " + pyro2str(self.pyro3))
out.append("Pyro 4: " + pyro2str(self.pyro4))
out.append("Pyro 5: " + pyro2str(self.pyro5))
out.append("Pyro 6: " + pyro2str(self.pyro6))
out.append("Pyro 7: " + pyro2str(self.pyro7))
out.append("Pyro 8: " + pyro2str(self.pyro8))
out.append("")
return "\n".join(out)
class M3FCConfigProfile:
m3fc_position_map = {1: "Dart", 2: "Core"}
m3fc_position_invmap = {"dart": 1, "core": 2}
accel_axis_map = {1: "X", 2: "-X", 3: "Y", 4: "-Y", 5: "Z", 6: "-Z"}
accel_axis_invmap = {"x": 1, "-x": 2, "y": 3, "-y": 4, "z": 5, "-z": 6}
def __init__(self, m3fc_position, accel_axis, ignition_accel,
burnout_timeout, apogee_timeout, main_altitude,
main_timeout, land_timeout):
self.m3fc_position = m3fc_position
self.accel_axis = accel_axis
self.ignition_accel = ignition_accel
self.burnout_timeout = burnout_timeout
self.apogee_timeout = apogee_timeout
self.main_altitude = main_altitude
self.main_timeout = main_timeout
self.land_timeout = land_timeout
@classmethod
def from_can(cls, packet):
assert packet.sid == m3fc_msg_cfg_profile
return cls(*packet.data)
def to_can(self):
return CANFrame(sid=m3fc_msg_set_cfg_profile, rtr=False, dlc=8,
data=[self.m3fc_position, self.accel_axis,
self.ignition_accel, self.burnout_timeout,
self.apogee_timeout, self.main_altitude,
self.main_timeout, self.land_timeout])
@classmethod
def from_dict(cls, d):
assert set(d.keys()) == {"m3fc_position", "accel_axis",
"ignition_accel", "burnout_timeout",
"apogee_timeout", "main_altitude",
"main_timeout", "land_timeout"}
return cls(
cls.m3fc_position_invmap.get(d['m3fc_position'].lower(), 0),
cls.accel_axis_invmap.get(d['accel_axis'].lower(), 0),
int(d['ignition_accel']),
int(d['burnout_timeout'] * 10),
int(d['apogee_timeout']),
int(d['main_altitude'] // 10),
int(d['main_timeout']),
int(d['land_timeout'] // 10))
def __str__(self):
out = []
out.append("M3FC Config Profile:")
out.append("M3FC Position: {}".format(
self.m3fc_position_map.get(self.m3fc_position, "Unknown")))
out.append("Accelerometer Up Axis: {}".format(
self.accel_axis_map.get(self.accel_axis, "Unknown")))
out.append("Ignition Detection Threshold: {}m/s/s".format(
self.ignition_accel))
out.append("Burnout Detection Timeout: {:.1f}s after launch".format(
self.burnout_timeout/10.0))
out.append("Apogee Detection Timeout: {}s after launch".format(
self.apogee_timeout))
out.append("Main Chute Release Altitude: {}m above launch".format(
self.main_altitude*10))
out.append("Main Chute Release Timeout: {}s after apogee".format(
self.main_timeout))
out.append("Landing Detection Timeout: {}s after launch".format(
self.land_timeout*10))
out.append("")
return "\n".join(out)
class M3FCConfigAccelCalX:
def __init__(self, scale, offset):
self.scale = scale
self.offset = offset
@classmethod
def from_can(cls, packet):
assert packet.sid == m3fc_msg_cfg_accel_cal_x
return cls(*struct.unpack("<ff", packet.data_bytes()))
def to_can(self):
return CANFrame(sid=m3fc_msg_set_cfg_accel_x, rtr=False, dlc=8,
data=struct.pack("<ff", self.scale, self.offset))
@classmethod
def from_dict(cls, d):
assert set(d.keys()) == {"scale", "offset"}
return cls(d['scale'], d['offset'])
def __str__(self):
out = "Accel Cal X: Scale={:.6f} Offset={:.3f}"
return out.format(self.scale, self.offset)
class M3FCConfigAccelCalY:
def __init__(self, scale, offset):
self.scale = scale
self.offset = offset
@classmethod
def from_can(cls, packet):
assert packet.sid == m3fc_msg_cfg_accel_cal_y
return cls(*struct.unpack("<ff", packet.data_bytes()))
def to_can(self):
return CANFrame(sid=m3fc_msg_set_cfg_accel_y, rtr=False, dlc=8,
data=struct.pack("<ff", self.scale, self.offset))
@classmethod
def from_dict(cls, d):
assert set(d.keys()) == {"scale", "offset"}
return cls(d['scale'], d['offset'])
def __str__(self):
out = "Accel Cal Y: Scale={:.6f} Offset={:.3f}"
return out.format(self.scale, self.offset)
class M3FCConfigAccelCalZ:
def __init__(self, scale, offset):
self.scale = scale
self.offset = offset
@classmethod
def from_can(cls, packet):
assert packet.sid == m3fc_msg_cfg_accel_cal_z
return cls(*struct.unpack("<ff", packet.data_bytes()))
def to_can(self):
return CANFrame(sid=m3fc_msg_set_cfg_accel_z, rtr=False, dlc=8,
data=struct.pack("<ff", self.scale, self.offset))
@classmethod
def from_dict(cls, d):
assert set(d.keys()) == {"scale", "offset"}
return cls(d['scale'], d['offset'])
def __str__(self):
out = "Accel Cal Z: Scale={:.6f} Offset={:.3f}"
return out.format(self.scale, self.offset)
class M3FCConfigRadioFreq:
def __init__(self, freq):
self.freq = freq
@classmethod
def from_can(cls, packet):
assert packet.sid == m3fc_msg_cfg_radio_freq
return cls(struct.unpack("<I", packet.data_bytes())[0])
def to_can(self):
return CANFrame(sid=m3fc_msg_set_cfg_radio_freq, rtr=False, dlc=4,
data=struct.pack("<I", self.freq))
@classmethod
def from_dict(cls, d):
return cls(d)
def __str__(self):
return "Radio Freq: {}".format(self.freq)
class M3FCConfigCRC:
def __init__(self, crc):
self.crc = crc
@classmethod
def from_can(cls, packet):
assert packet.sid == m3fc_msg_cfg_crc
return cls(struct.unpack("<I", packet.data_bytes())[0])
def to_can(self):
return CANFrame(sid=m3fc_msg_set_cfg_crc, rtr=False, dlc=4,
data=struct.pack("<I", self.crc))
def __str__(self):
return "CRC: {}".format(hex(self.crc))
class M3FCConfig:
def __init__(self, profile, pyros, accel_cal_x, accel_cal_y, accel_cal_z,
radio_freq, crc):
self.profile = profile
self.pyros = pyros
self.accel_cal_x = accel_cal_x
self.accel_cal_y = accel_cal_y
self.accel_cal_z = accel_cal_z
self.radio_freq = radio_freq
self.crc = crc
@classmethod
def from_dict(cls, d):
assert set(d.keys()) == {"profile", "pyros", "accel_cal_x",
"accel_cal_y", "accel_cal_z", "radio_freq"}
cfg = cls(
M3FCConfigProfile.from_dict(d['profile']),
M3FCConfigPyros.from_dict(d['pyros']),
M3FCConfigAccelCalX.from_dict(d['accel_cal_x']),
M3FCConfigAccelCalY.from_dict(d['accel_cal_y']),
M3FCConfigAccelCalZ.from_dict(d['accel_cal_z']),
M3FCConfigRadioFreq.from_dict(d['radio_freq']),
None)
cfg.crc = cfg.compute_crc()
return cfg
def compute_crc(self):
profile = self.profile
pyros = self.pyros
accel_cal_x = self.accel_cal_x
accel_cal_y = self.accel_cal_y
accel_cal_z = self.accel_cal_z
radio_freq = self.radio_freq
raw = struct.pack(
"<16B6fI",
profile.m3fc_position, profile.accel_axis, profile.ignition_accel,
profile.burnout_timeout, profile.apogee_timeout,
profile.main_altitude, profile.main_timeout, profile.land_timeout,
pyros.pyro1, pyros.pyro2, pyros.pyro3, pyros.pyro4,
pyros.pyro5, pyros.pyro6, pyros.pyro7, pyros.pyro8,
accel_cal_x.scale, accel_cal_x.offset, accel_cal_y.scale,
accel_cal_y.offset, accel_cal_z.scale, accel_cal_z.offset,
radio_freq.freq)
# convert to 32 bit words and then reverse the byte ordering
u32 = struct.unpack(">11I", raw)
raw = struct.pack("<11I", *u32)
crc32_func = crcmod.predefined.mkCrcFun('crc-32-mpeg')
return M3FCConfigCRC(crc32_func(raw))
def parts(self):
return [self.profile, self.pyros, self.accel_cal_x, self.accel_cal_y,
self.accel_cal_z, self.radio_freq, self.crc]
def loaded(self):
return all(x is not None for x in self.parts())
def to_can(self):
return [x.to_can() for x in self.parts()]
def update_from_can(self, frame):
if frame.sid == m3fc_msg_cfg_profile:
self.profile = M3FCConfigProfile.from_can(frame)
elif frame.sid == m3fc_msg_cfg_pyros:
self.pyros = M3FCConfigPyros.from_can(frame)
elif frame.sid == m3fc_msg_cfg_accel_cal_x:
self.accel_cal_x = M3FCConfigAccelCalX.from_can(frame)
elif frame.sid == m3fc_msg_cfg_accel_cal_y:
self.accel_cal_y = M3FCConfigAccelCalY.from_can(frame)
elif frame.sid == m3fc_msg_cfg_accel_cal_z:
self.accel_cal_z = M3FCConfigAccelCalZ.from_can(frame)
elif frame.sid == m3fc_msg_cfg_radio_freq:
self.radio_freq = M3FCConfigRadioFreq.from_can(frame)
elif frame.sid == m3fc_msg_cfg_crc:
self.crc = M3FCConfigCRC.from_can(frame)
def __str__(self):
return "\n".join(str(x) for x in self.parts())
def ppp_pad(buf):
out = [0x7E]
for byte in buf:
if byte == 0x7E:
out.append(0x7D)
out.append(0x5E)
elif byte == 0x7D:
out.append(0x7D)
out.append(0x5D)
else:
out.append(byte)
return struct.pack("{}B".format(len(out)), *out)
def read_config_from_can(rxq):
cfg = M3FCConfig(None, None, None, None, None, None, None)
while not cfg.loaded():
frame = rxq.get()
cfg.update_from_can(frame)
print(cfg)
def config_from_file(path):
yaml_cfg = yaml.load(open(path))
return M3FCConfig.from_dict(yaml_cfg)
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--port", help="path to serial port on m3debug",
default="/dev/serial/by-id/*m3debug*-if02")
parser.add_argument("--file", help="path to config yaml file")
parser.add_argument("--flash", help="save new config to flash",
action="store_true")
parser.add_argument("--crc", help="just compute crc on a file",
action="store_true")
parser.add_argument("--slow", help="work slowly over rf links",
action="store_true")
args = parser.parse_args()
if args.crc:
if not args.file:
print("must specify --file with --crc")
return
cfg = config_from_file(args.file)
print(cfg)
return
unglob = glob.glob(args.port)
if len(unglob) == 0:
raise RuntimeError("No serial ports matching glob found")
port = unglob[0]
port = os.path.realpath(port)
txq = multiprocessing.Queue()
rxq = multiprocessing.Queue()
runner = multiprocessing.Process(target=run, args=(port, txq, rxq))
runner.start()
if args.file:
cfg = config_from_file(args.file)
print("Loaded config:")
print(cfg)
accept = input("Set new config? (y/N): ")
if accept.lower() == "y":
print("Setting new config")
if args.slow:
for i in range(5):
print("Attempt {}".format(i))
for idx, frame in enumerate(cfg.to_can()):
print("{}, ", end='', flush=True)
txq.put(frame)
time.sleep(1)
else:
for frame in cfg.to_can():
txq.put(frame)
else:
print("Not setting new config")
if args.flash:
print("Saving current config to flash")
txq.put(CANFrame(sid=m3fc_msg_save_cfg, rtr=False, dlc=0, data=[]))
# clear current q
while not rxq.empty():
try:
rxq.get_nowait()
except Empty:
break
print("Reading current config...\n")
read_config_from_can(rxq)
runner.terminate()
if __name__ == "__main__":
main()
|
a = [1,2,3,5]
while a[-1] < 4000001:
k = a[-1] + a[-2]
a.append(k)
s = 0
for i in a:
if i % 2 == 0:
s += i
print(s) |
from evaluator import Evaluator
from games.chess import Chess
import chess
class MaterialPositionChessEvaluator(Evaluator):
def evaluate(self, game):
"""
Taken from https://medium.com/@andreasstckl/writing-a-chess-program-in-one-day-30daff4610ec
https://www.chessprogramming.org/Simplified_Evaluation_Function
:param game:
:return:
"""
pawntable = [
0, 0, 0, 0, 0, 0, 0, 0,
5, 10, 10,-20,-20, 10, 10, 5,
5, -5,-10, 0, 0,-10, -5, 5,
0, 0, 0, 20, 20, 0, 0, 0,
5, 5, 10, 25, 25, 10, 5, 5,
10, 10, 20, 30, 30, 20, 10, 10,
50, 50, 50, 50, 50, 50, 50, 50,
0, 0, 0, 0, 0, 0, 0, 0]
knightstable = [
-50,-40,-30,-30,-30,-30,-40,-50,
-40,-20, 0, 5, 5, 0,-20,-40,
-30, 5, 10, 15, 15, 10, 5,-30,
-30, 0, 15, 20, 20, 15, 0,-30,
-30, 5, 15, 20, 20, 15, 5,-30,
-30, 0, 10, 15, 15, 10, 0,-30,
-40,-20, 0, 0, 0, 0,-20,-40,
-50,-40,-30,-30,-30,-30,-40,-50]
bishopstable = [
-20,-10,-10,-10,-10,-10,-10,-20,
-10, 5, 0, 0, 0, 0, 5,-10,
-10, 10, 10, 10, 10, 10, 10,-10,
-10, 0, 10, 10, 10, 10, 0,-10,
-10, 5, 5, 10, 10, 5, 5,-10,
-10, 0, 5, 10, 10, 5, 0,-10,
-10, 0, 0, 0, 0, 0, 0,-10,
-20,-10,-10,-10,-10,-10,-10,-20]
rookstable = [
0, 0, 0, 5, 5, 0, 0, 0,
-5, 0, 0, 0, 0, 0, 0, -5,
-5, 0, 0, 0, 0, 0, 0, -5,
-5, 0, 0, 0, 0, 0, 0, -5,
-5, 0, 0, 0, 0, 0, 0, -5,
-5, 0, 0, 0, 0, 0, 0, -5,
5, 10, 10, 10, 10, 10, 10, 5,
0, 0, 0, 0, 0, 0, 0, 0]
queenstable = [
-20,-10,-10, -5, -5,-10,-10,-20,
-10, 0, 0, 0, 0, 0, 0,-10,
-10, 5, 5, 5, 5, 5, 0,-10,
0, 0, 5, 5, 5, 5, 0, -5,
-5, 0, 5, 5, 5, 5, 0, -5,
-10, 0, 5, 5, 5, 5, 0,-10,
-10, 0, 0, 0, 0, 0, 0,-10,
-20,-10,-10, -5, -5,-10,-10,-20]
kingstable = [
20, 30, 10, 0, 0, 10, 30, 20,
20, 20, 0, 0, 0, 0, 20, 20,
-10,-20,-20,-20,-20,-20,-20,-10,
-20,-30,-30,-40,-40,-30,-30,-20,
-30,-40,-40,-50,-50,-40,-40,-30,
-30,-40,-40,-50,-50,-40,-40,-30,
-30,-40,-40,-50,-50,-40,-40,-30,
-30,-40,-40,-50,-50,-40,-40,-30]
wp = len(game.state.pieces(chess.PAWN, chess.WHITE))
bp = len(game.state.pieces(chess.PAWN, chess.BLACK))
wn = len(game.state.pieces(chess.KNIGHT, chess.WHITE))
bn = len(game.state.pieces(chess.KNIGHT, chess.BLACK))
wb = len(game.state.pieces(chess.BISHOP, chess.WHITE))
bb = len(game.state.pieces(chess.BISHOP, chess.BLACK))
wr = len(game.state.pieces(chess.ROOK, chess.WHITE))
br = len(game.state.pieces(chess.ROOK, chess.BLACK))
wq = len(game.state.pieces(chess.QUEEN, chess.WHITE))
bq = len(game.state.pieces(chess.QUEEN, chess.BLACK))
material = 100*(wp-bp)+320*(wn-bn)+330*(wb-bb)+500*(wr-br)+900*(wq-bq)
pawnsq = sum([pawntable[i] for i in game.state.pieces(chess.PAWN, chess.WHITE)])
pawnsq= pawnsq + sum([-pawntable[chess.square_mirror(i)]
for i in game.state.pieces(chess.PAWN, chess.BLACK)])
knightsq = sum([knightstable[i] for i in game.state.pieces(chess.KNIGHT, chess.WHITE)])
knightsq = knightsq + sum([-knightstable[chess.square_mirror(i)]
for i in game.state.pieces(chess.KNIGHT, chess.BLACK)])
bishopsq= sum([bishopstable[i] for i in game.state.pieces(chess.BISHOP, chess.WHITE)])
bishopsq= bishopsq + sum([-bishopstable[chess.square_mirror(i)]
for i in game.state.pieces(chess.BISHOP, chess.BLACK)])
rooksq = sum([rookstable[i] for i in game.state.pieces(chess.ROOK, chess.WHITE)])
rooksq = rooksq + sum([-rookstable[chess.square_mirror(i)]
for i in game.state.pieces(chess.ROOK, chess.BLACK)])
queensq = sum([queenstable[i] for i in game.state.pieces(chess.QUEEN, chess.WHITE)])
queensq = queensq + sum([-queenstable[chess.square_mirror(i)]
for i in game.state.pieces(chess.QUEEN, chess.BLACK)])
kingsq = sum([kingstable[i] for i in game.state.pieces(chess.KING, chess.WHITE)])
kingsq = kingsq + sum([-kingstable[chess.square_mirror(i)]
for i in game.state.pieces(chess.KING, chess.BLACK)])
eval = material + pawnsq + knightsq + bishopsq+ rooksq+ queensq + kingsq
return eval |
import pymysql
import config as cfg
import logging
import sys
import pandas as pd
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
# Create formatter
formatter = logging.Formatter('%(asctime)s-FILE:%(filename)s-FUNC:%(funcName)s-LINE:%(lineno)d-%(message)s')
# Create a file handler and add it to logger.
file_handler = logging.FileHandler('web_scraper.log')
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
#
# stream_handler = logging.StreamHandler(sys.stdout)
# stream_handler.setLevel(logging.INFO)
# stream_handler.setFormatter(formatter)
# logger.addHandler(stream_handler)
class Database:
def __init__(self):
"""
Initialisation function for Database class.
"""
self.db_name = cfg.DATABASE_NAME
logging.info(f'DB name is {self.db_name}')
self.con, self.cur = self.connect_to_db()
# self.con, self.cur updated after DB confirmed/created
self.con, self.cur = self.create_db()
self.create_tables_db()
@staticmethod
def connect_to_db():
"""
Creates initial connection and cursor objects.
:return: con and cursor
"""
# Create initial connection object.
try:
con = pymysql.connect(host='localhost', user=cfg.USER_DB,
password=cfg.PASSWORD_DB_SERVER, cursorclass=pymysql.cursors.DictCursor)
except TypeError:
logging.critical(f'Could not create connection object for database. Please check parameters used.')
sys.exit(1)
# Create initial cursor
cur = con.cursor()
return con, cur
def create_db(self):
"""
Checks if DB exists. If not, creates it.
:return: con, cursor
"""
query = f"CREATE DATABASE IF NOT EXISTS {self.db_name}"
self.cur.execute(query)
# Update con with confirmed/new DB info
try:
con = pymysql.connect(host='localhost', user=cfg.USER_DB, password=cfg.PASSWORD_DB_SERVER,
database=self.db_name, cursorclass=pymysql.cursors.DictCursor)
except TypeError:
logging.critical(f'Could not create cursor object for database. Please check parameters used.')
sys.exit(1)
# Updated cursor
cur = con.cursor()
return con, cur
def create_tables_db(self):
"""
Assembles tables as required if don't exist in self.db_name
:return: None
"""
try:
self.cur.execute("""CREATE TABLE IF NOT EXISTS frequent (
id int PRIMARY KEY NOT NULL AUTO_INCREMENT,
meta_score varchar(255),
user_score varchar(255),
wiki_url varchar(1000)
);""")
logging.info(f'studios table functioning.')
except pymysql.err.ProgrammingError:
logging.critical(f'Could not execute the SQL to create the frequent table. Please check query.')
sys.exit(1)
try:
self.cur.execute("""
CREATE TABLE IF NOT EXISTS studios (
id int PRIMARY KEY NOT NULL AUTO_INCREMENT,
name varchar(255)
);""")
logging.info(f'studios table functioning.')
except pymysql.err.ProgrammingError:
logging.critical(f'Could not execute the SQL to create the studios table. Please check query.')
sys.exit(1)
try:
self.cur.execute("""
CREATE TABLE IF NOT EXISTS directors (
id int PRIMARY KEY NOT NULL AUTO_INCREMENT,
name varchar(255)
);""")
logging.info(f'directors table functioning.')
except pymysql.err.ProgrammingError:
logging.critical(f'Could not execute the SQL to create the directors table. Please check query.')
sys.exit(1)
try:
self.cur.execute("""
CREATE TABLE IF NOT EXISTS genres (
id int PRIMARY KEY NOT NULL AUTO_INCREMENT,
name varchar(255)
);""")
logging.info(f'genres table functioning.')
except pymysql.err.ProgrammingError:
logging.critical(f'Could not execute the SQL to create the genres table. Please check query.')
sys.exit(1)
try:
self.cur.execute("""
CREATE TABLE IF NOT EXISTS creators (
id int PRIMARY KEY NOT NULL AUTO_INCREMENT,
name varchar(255)
);""")
logging.info(f'creators table functioning.')
except pymysql.err.ProgrammingError:
logging.critical(f'Could not execute the SQL to create the creators table. Please check query.')
sys.exit(1)
try:
self.cur.execute("""
CREATE TABLE IF NOT EXISTS platforms (
id int PRIMARY KEY NOT NULL AUTO_INCREMENT,
name varchar(255)
);""")
logging.info(f'platforms table functioning.')
except pymysql.err.ProgrammingError:
logging.critical(f'Could not execute the SQL to create the platforms table. Please check query.')
sys.exit(1)
try:
self.cur.execute("""
CREATE TABLE IF NOT EXISTS movies (
id int PRIMARY KEY NOT NULL AUTO_INCREMENT,
title varchar(255) NOT NULL,
unique_identifier varchar(500) NOT NULL,
release_year varchar(255),
rating varchar(255),
runtime varchar(255),
summary varchar(10000),
studio_id int,
director_id int,
frequent_id int,
FOREIGN KEY(studio_id) REFERENCES studios(id),
FOREIGN KEY(director_id) REFERENCES directors(id),
FOREIGN KEY(frequent_id) REFERENCES frequent(id)
);""")
logging.info(f'movies table functioning.')
except pymysql.err.ProgrammingError:
logging.critical(f'Could not execute the SQL to create the movies table. Please check query.')
sys.exit(1)
try:
self.cur.execute("""
CREATE TABLE IF NOT EXISTS movies_genres (
id int PRIMARY KEY NOT NULL AUTO_INCREMENT,
movie_id int,
genre_id int,
FOREIGN KEY(movie_id) REFERENCES movies(id),
FOREIGN KEY(genre_id) REFERENCES genres(id)
);""")
logging.info(f'movies_genres table functioning.')
except pymysql.err.ProgrammingError:
logging.critical(f'Could not execute the SQL to create the movies_genres table. Please check query.')
sys.exit(1)
try:
self.cur.execute("""
CREATE TABLE IF NOT EXISTS tv_shows (
id int PRIMARY KEY NOT NULL AUTO_INCREMENT,
title varchar(255) NOT NULL,
unique_identifier varchar(255) NOT NULL,
release_date varchar(255),
summary varchar(10000),
studio_id int,
creator_id int,
frequent_id int,
FOREIGN KEY(studio_id) REFERENCES studios(id),
FOREIGN KEY(creator_id) REFERENCES creators(id),
FOREIGN KEY(frequent_id) REFERENCES frequent(id)
);""")
logging.info(f'tv_shows table functioning.')
except pymysql.err.ProgrammingError:
logging.critical(f'Could not execute the SQL to create the tv_shows table. Please check query.')
sys.exit(1)
try:
self.cur.execute("""
CREATE TABLE IF NOT EXISTS tv_shows_genres (
id int PRIMARY KEY NOT NULL AUTO_INCREMENT,
tv_show_id int,
genre_id int,
FOREIGN KEY(tv_show_id) REFERENCES tv_shows(id),
FOREIGN KEY(genre_id) REFERENCES genres(id)
);""")
logging.info(f'tv_shows_genres table functioning.')
except pymysql.err.ProgrammingError:
logging.critical(f'Could not execute the SQL to create the tv_shows_genres table. Please check query.')
sys.exit(1)
try:
self.cur.execute("""
CREATE TABLE IF NOT EXISTS games (
id int PRIMARY KEY NOT NULL AUTO_INCREMENT,
title varchar(255) NOT NULL,
unique_identifier varchar(255) NOT NULL,
release_date varchar(255),
rating varchar(255),
summary varchar(10000),
studio_id int,
frequent_id int,
FOREIGN KEY(studio_id) REFERENCES studios(id),
FOREIGN KEY(frequent_id) REFERENCES frequent(id)
);""")
logging.info(f'games table functioning.')
except pymysql.err.ProgrammingError:
logging.critical(f'Could not execute the SQL to create the games table. Please check query.')
sys.exit(1)
try:
self.cur.execute("""
CREATE TABLE IF NOT EXISTS games_genres (
id int PRIMARY KEY NOT NULL AUTO_INCREMENT,
game_id int,
genre_id int,
FOREIGN KEY(game_id) REFERENCES games(id),
FOREIGN KEY(genre_id) REFERENCES genres(id)
);""")
logging.info(f'games_genres table functioning.')
except pymysql.err.ProgrammingError:
logging.critical(f'Could not execute the SQL to create the games_genres table. Please check query.')
sys.exit(1)
try:
self.cur.execute("""
CREATE TABLE IF NOT EXISTS games_platforms (
id int PRIMARY KEY NOT NULL AUTO_INCREMENT,
game_id int,
platform_id int,
FOREIGN KEY(game_id) REFERENCES games(id),
FOREIGN KEY(platform_id) REFERENCES platforms(id)
);""")
logging.info(f'games_platforms table functioning.')
except pymysql.err.ProgrammingError:
logging.critical(f'Could not execute the SQL to create the games_platforms table. Please check query.')
sys.exit(1)
def add_to_database_by_type(self, container, container_type):
"""
Checks the type of the Dataframe and calls the correct method to add to the
database (movies, tv shows, games).
:param container: Dataframe
:param container_type: Type of dataframe
:return: None
"""
if container_type == 'movies':
self.populate_tables_movies(container)
logging.info(f'Finished populating Movies to database.')
elif container_type == 'tv':
self.populate_tables_tv_shows(container)
logging.info(f'Finished populating TV shows to database.')
elif container_type == 'games':
self.populate_tables_games(container)
logging.info(f'Finished populating Games to database.')
else:
logging.error(f'Failed to add to database. Please check the item type. '
f'It must be either movies, tv or games.')
def populate_tables_movies(self, container):
"""
Takes in a pd Dataframe. Each row series contains all information for a movie item.
Inserts the data from the df to the database.
:param self:
:param container: pd DataFrame.
:return: None
"""
counter = 0
for index, row_df in container.iterrows():
unique_identifier = index
# Check if movie already in table
self.cur.execute(f"""SELECT id as id, unique_identifier as unique_identifier
FROM movies WHERE unique_identifier="{unique_identifier}";""")
movie_existence_query = self.cur.fetchone()
# If movie already in db, need to check if frequently updated items have changed
if movie_existence_query:
movie_id = movie_existence_query['id']
# Get rating and wiki_url to check
self.cur.execute(f"""SELECT frequent_id, meta_score, user_score,
wiki_url
FROM movies LEFT JOIN frequent
ON movies.frequent_id = frequent.id
WHERE movies.id={movie_id};""")
frequent_query_movies = self.cur.fetchone()
# Id in frequent table
frequent_id = frequent_query_movies['frequent_id']
# If items in frequent table haven't changed, no need to update
if frequent_query_movies['meta_score'] == row_df['Metascore'] and \
frequent_query_movies['user_score'] == row_df['User score'] and\
frequent_query_movies['wiki_url'] == row_df['wiki_url']:
continue
else:
# Update items in frequent table
sql_to_execute_frequent = fr"""UPDATE frequent
SET meta_score = %s, user_score = %s,
wiki_url = %s
WHERE id = %s;"""
self.cur.execute(sql_to_execute_frequent, (row_df['Metascore'], row_df['User score'],
row_df['wiki_url'], frequent_id))
self.con.commit()
logging.info(f' frequent table updated for {unique_identifier} in {self.db_name}')
else:
# If no record for movie in movies table, need to add
# Check if studio in studios table
self.cur.execute(f"""SELECT id as id FROM studios WHERE name="{row_df['Studio']}";""")
studio_existence_query = self.cur.fetchone()
if studio_existence_query is None:
# If studio not in studio table, insert it and then select the id to use for movies FK.
self.cur.execute(f"""INSERT INTO studios (name) VALUES ("{row_df['Studio']}");""")
self.cur.execute(f"""SELECT id AS id FROM studios WHERE name="{row_df['Studio']}";""")
studio_existence_query = self.cur.fetchone()
studio_id = studio_existence_query['id']
else:
studio_id = studio_existence_query['id']
# Check if directors in directors table
self.cur.execute(f"""SELECT id AS id FROM directors WHERE name="{row_df['Director']}";""")
director_existence_query = self.cur.fetchone()
if director_existence_query is None:
self.cur.execute(f"""INSERT INTO directors (name) VALUES ("{row_df['Director']}");""")
self.cur.execute(f"""SELECT id AS id FROM directors WHERE name="{row_df['Director']}";""")
director_existence_query = self.cur.fetchone()
director_id = director_existence_query['id']
else:
director_id = director_existence_query['id']
# Insert into frequent table first, as movies table references frequent
sql_to_execute_frequent = fr"""INSERT INTO frequent (meta_score, user_score, wiki_url)
VALUES (%s, %s, %s);"""
self.cur.execute(sql_to_execute_frequent, (row_df['Metascore'],
row_df['User score'], row_df['wiki_url']))
# Get id from frequent table to use for movies table
self.cur.execute(f"""SELECT max(id) as id FROM frequent;""")
frequent_id_query = self.cur.fetchone()
frequent_id = frequent_id_query['id']
# sql for movies table
sql_to_execute_movies = fr"""INSERT INTO movies (title, unique_identifier,
release_year, rating, runtime, summary, studio_id,
director_id, frequent_id)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s);"""
# Insert record into movies table
self.cur.execute(sql_to_execute_movies, (row_df['Title'], unique_identifier, row_df['Release Year'], row_df['Rating'],
row_df['Runtime'], row_df['Summary'], studio_id, director_id, frequent_id))
self.cur.execute(f"""SELECT id FROM movies WHERE unique_identifier="{unique_identifier}";""")
movie_id_query = self.cur.fetchone()
movie_id = movie_id_query['id']
# Insert into movies_genres table
for genre in row_df['Genres']:
self.cur.execute(f"""SELECT id FROM genres WHERE name="{genre}";""")
genre_id_query = self.cur.fetchone()
if genre_id_query is None:
self.cur.execute(f"""INSERT INTO genres (name) VALUES ("{genre}");""")
self.cur.execute(f"""SELECT id FROM genres WHERE name="{genre}";""")
genre_id_query = self.cur.fetchone()
genre_id = genre_id_query['id']
else:
genre_id = genre_id_query['id']
# Insert this movie, genre combination into movies_genres table
self.cur.execute(f"""INSERT INTO movies_genres (movie_id, genre_id)
VALUES ({movie_id}, {genre_id});""")
counter += 1
# if counter % cfg.SIZE_OF_COMMIT == 0 or counter == len(container) - 1:
self.con.commit()
logging.info(f'{unique_identifier} successfully added to {self.db_name}')
logging.debug(f'Commit of {cfg.SIZE_OF_COMMIT} entries')
def populate_tables_tv_shows(self, container):
"""
Takes in a pd Dataframe. Each row series contains all information for a tv_show item.
Inserts the data from the df to the database.
:param self:
:param container: pd DataFrame.
:return: None
"""
counter = 0
for index, row_df in container.iterrows():
unique_identifier = index
# Check if tv_show already in table
self.cur.execute(f"""SELECT id as id, unique_identifier as unique_identifier
FROM tv_shows WHERE unique_identifier="{unique_identifier}";""")
tv_show_existence_query = self.cur.fetchone()
if tv_show_existence_query:
tv_show_id = tv_show_existence_query['id']
# Get rating and wiki_url to check
self.cur.execute(f"""SELECT frequent_id, meta_score, user_score,
wiki_url
FROM tv_shows LEFT JOIN frequent
ON tv_shows.frequent_id = frequent.id
WHERE tv_shows.id={tv_show_id};""")
frequent_query_tv_shows = self.cur.fetchone()
# Id in frequent table
frequent_id = frequent_query_tv_shows['frequent_id']
# If items in frequent table haven't changed, no need to update
if frequent_query_tv_shows['meta_score'] == row_df['Metascore'] and \
frequent_query_tv_shows['user_score'] == row_df['User score'] and \
frequent_query_tv_shows['wiki_url'] == row_df['wiki_url']:
continue
else:
# Update items in frequent table
sql_to_execute_frequent = fr"""UPDATE frequent
SET meta_score = %s, user_score = %s,
wiki_url = %s
WHERE id = %s;"""
self.cur.execute(sql_to_execute_frequent, (row_df['Metascore'], row_df['User score'],
row_df['wiki_url'], frequent_id))
self.con.commit()
logging.info(f' frequent table updated for {unique_identifier} in {self.db_name}')
else:
# If no record for tv_show in tv_shows table, need to add
# Check if studio in studios table
self.cur.execute(f"""SELECT id as id FROM studios WHERE name="{row_df['Studio']}";""")
studio_existence_query = self.cur.fetchone()
if studio_existence_query is None:
# If studio not in studio table, insert it and then select the id to use for tv_shows FK.
self.cur.execute(f"""INSERT INTO studios (name) VALUES ("{row_df['Studio']}");""")
self.cur.execute(f"""SELECT id AS id FROM studios WHERE name="{row_df['Studio']}";""")
studio_existence_query = self.cur.fetchone()
studio_id = studio_existence_query['id']
else:
studio_id = studio_existence_query['id']
# Check if creator in creators table
self.cur.execute(f"""SELECT id AS id FROM creators WHERE name="{row_df['Creator']}";""")
creator_existence_query = self.cur.fetchone()
if creator_existence_query is None:
self.cur.execute(f"""INSERT INTO creators (name) VALUES ("{row_df['Creator']}");""")
self.cur.execute(f"""SELECT id AS id FROM creators WHERE name="{row_df['Creator']}";""")
creator_existence_query = self.cur.fetchone()
creator_id = creator_existence_query['id']
else:
creator_id = creator_existence_query['id']
# Insert into frequent table first, as tv_shows table references frequent
sql_to_execute_frequent = fr"""INSERT INTO frequent (meta_score, user_score, wiki_url)
VALUES (%s, %s, %s);"""
self.cur.execute(sql_to_execute_frequent, (row_df['Metascore'],
row_df['User score'], row_df['wiki_url']))
# Get id from frequent table to use for tv_shows table
self.cur.execute(f"""SELECT max(id) as id FROM frequent;""")
frequent_id_query = self.cur.fetchone()
frequent_id = frequent_id_query['id']
# sql for tv_shows table
sql_to_execute = fr"""INSERT INTO tv_shows (title, unique_identifier, release_date,
summary, studio_id, creator_id, frequent_id)
VALUES (%s, %s, %s, %s, %s, %s, %s);"""
# Insert record into tv_shows table
self.cur.execute(sql_to_execute, (row_df['Title'], unique_identifier,
row_df['Release Year'], row_df['Summary'],
studio_id, creator_id, frequent_id))
self.cur.execute(f"""SELECT id FROM tv_shows WHERE unique_identifier="{unique_identifier}";""")
tv_show_query = self.cur.fetchone()
tv_show_id = tv_show_query['id']
# Insert into tv_shows_genres table
for genre in row_df['Genres']:
self.cur.execute(f"""SELECT id FROM genres WHERE name="{genre}";""")
genre_id_query = self.cur.fetchone()
if genre_id_query is None:
self.cur.execute(f"""INSERT INTO genres (name) VALUES ("{genre}");""")
self.cur.execute(f"""SELECT id FROM genres WHERE name="{genre}";""")
genre_id_query = self.cur.fetchone()
genre_id = genre_id_query['id']
else:
genre_id = genre_id_query['id']
# Insert this tv_show, genre combination into tv_shows_genres table
self.cur.execute(f"""INSERT INTO tv_shows_genres (tv_show_id, genre_id) VALUES
({tv_show_id}, {genre_id});""")
counter += 1
# if counter % cfg.SIZE_OF_COMMIT == 0 or counter == len(container) - 1:
self.con.commit()
logging.info(f'{unique_identifier} successfully added to {self.db_name}')
logging.debug(f'Commit of {cfg.SIZE_OF_COMMIT} entries')
def populate_tables_games(self, container):
"""
Takes in a pd Dataframe. Each row series contains all information for a game item.
Inserts the data from the df to the database.
:param self:
:param container: pd DataFrame.
:return: None
"""
counter = 0
for index, row_df in container.iterrows():
unique_identifier = index
# Check game already in table
self.cur.execute(f"""SELECT id as id, unique_identifier as unique_identifier
FROM games WHERE unique_identifier="{unique_identifier}";""")
game_existence_query = self.cur.fetchone()
# If game already in db, need to check if frequently updated items have changed
if game_existence_query:
game_id = game_existence_query['id']
# Get rating and wiki_url to check
self.cur.execute(f"""SELECT frequent_id, meta_score, user_score,
wiki_url
FROM games LEFT JOIN frequent
ON games.frequent_id = frequent.id
WHERE games.id={game_id};""")
frequent_query_games = self.cur.fetchone()
# Id in frequent table
frequent_id = frequent_query_games['frequent_id']
# If items in frequent table haven't changed, no need to update
if frequent_query_games['meta_score'] == row_df['Metascore'] and \
frequent_query_games['user_score'] == row_df['User score'] and \
frequent_query_games['wiki_url'] == row_df['wiki_url']:
continue
else:
# Update items in frequent table
sql_to_execute_frequent = fr"""UPDATE frequent
SET meta_score = %s, user_score = %s,
wiki_url = %s
WHERE id = %s;"""
self.cur.execute(sql_to_execute_frequent, (row_df['Metascore'], row_df['User score'],
row_df['wiki_url'], frequent_id))
self.con.commit()
logging.info(f' frequent table updated for {unique_identifier} in {self.db_name}')
else:
# If no record for movie in games table, need to add
# Check if studio in studios table
self.cur.execute(f"""SELECT id as id FROM studios WHERE name="{row_df['Studio']}";""")
studio_existence_query = self.cur.fetchone()
if studio_existence_query is None:
# If studio not in studio table, insert it and then select the id to use for games FK.
self.cur.execute(f"""INSERT INTO studios (name) VALUES ("{row_df['Studio']}");""")
self.cur.execute(f"""SELECT id AS id FROM studios WHERE name="{row_df['Studio']}";""")
studio_existence_query = self.cur.fetchone()
studio_id = studio_existence_query['id']
else:
studio_id = studio_existence_query['id']
# Insert into frequent table first, as games table references frequent
sql_to_execute_frequent = fr"""INSERT INTO frequent (meta_score, user_score, wiki_url)
VALUES (%s, %s, %s);"""
self.cur.execute(sql_to_execute_frequent, (row_df['Metascore'],
row_df['User score'], row_df['wiki_url']))
# Get id from frequent table to use for games table
self.cur.execute(f"""SELECT max(id) as id FROM frequent;""")
frequent_id_query = self.cur.fetchone()
frequent_id = frequent_id_query['id']
# sql for games table
sql_to_execute_games = fr"""INSERT INTO games (title, unique_identifier,
release_date, rating,
summary, studio_id, frequent_id)
VALUES (%s, %s, %s, %s, %s, %s, %s);"""
self.cur.execute(sql_to_execute_games, (row_df['Title'], unique_identifier, row_df['Release Year'],
row_df['Rating'], row_df['Summary'],
studio_id, frequent_id))
self.cur.execute(f"""SELECT id FROM games WHERE unique_identifier="{unique_identifier}";""")
game_id_query = self.cur.fetchone()
game_id = game_id_query['id']
# Insert into games_genres table
for genre in row_df['Genres']:
self.cur.execute(f"""SELECT id FROM genres WHERE name="{genre}";""")
genre_id_query = self.cur.fetchone()
if genre_id_query is None:
self.cur.execute(f"""INSERT INTO genres (name) VALUES ("{genre}");""")
self.cur.execute(f"""SELECT id FROM genres WHERE name="{genre}";""")
genre_id_query = self.cur.fetchone()
genre_id = genre_id_query['id']
else:
genre_id = genre_id_query['id']
# Insert this game, genre combination into games_genres table
self.cur.execute(f"""INSERT INTO games_genres (game_id, genre_id)
VALUES ({game_id}, {genre_id});""")
# Insert into games_platforms table
for platform in row_df['Platform']:
self.cur.execute(f"""SELECT id FROM platforms WHERE name="{platform}";""")
platform_id_query = self.cur.fetchone()
if platform_id_query is None:
self.cur.execute(f"""INSERT INTO platforms (name) VALUES ("{platform}");""")
self.cur.execute(f"""SELECT id FROM platforms WHERE name="{platform}";""")
platform_id_query = self.cur.fetchone()
platform_id = platform_id_query['id']
else:
platform_id = platform_id_query['id']
# Insert this game, platform combination into games_platforms table
self.cur.execute(f"""INSERT INTO games_platforms (game_id, platform_id) VALUES
({game_id}, {platform_id});""")
counter += 1
# if counter % cfg.SIZE_OF_COMMIT == 0 or counter == len(container) - 1:
self.con.commit()
logging.info(f'{unique_identifier} successfully added to {self.db_name}')
logging.debug(f'Commit of {cfg.SIZE_OF_COMMIT} entries')
def main():
# df_tv_shows = cl.tv_show('year', '2002')
# print(df_tv_shows.columns)
# db1.populate_tables_tv_shows(df_tv_shows)
# df_game = cl.game('year', '1996')
# df_game = df_game.replace(np.nan, "missing", regex=True)
df_movies = pd.read_csv('tester_movies2.csv')
df_tv_shows = pd.read_csv('tester_tv_shows2.csv')
df_games = pd.read_csv('tester_games2.csv')
db1 = Database()
assert db1.db_name == 'metacritic'
db1.connect_to_db()
db1.create_db()
db1.create_tables_db()
db1.add_to_database_by_type(df_games, 'games')
print(db1.db_name)
if __name__ == '__main__':
main()
|
from flask_sqlalchemy import SQLAlchemy
import base64
import hashlib
import os
import settings
db = SQLAlchemy()
with open(settings.PRIVATE_KEY_FILE, 'rb') as f:
private_key = f.read(16)
class Pony(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(255), unique=True)
url = db.Column(db.String(255))
description = db.Column(db.Text)
def __init__(self, name, url, description):
self.name = name
self.url = url
self.description = description
def password_hash(password, salt):
return hashlib.sha224(salt + password.encode('utf8') + private_key).hexdigest()
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(255), unique=True)
salt = db.Column(db.String(255))
password = db.Column(db.String(56))
vote_pony_id = db.Column(db.Integer, db.ForeignKey('pony.id'), nullable=True)
def __init__(self, username, password):
self.username = username
salt = os.urandom(16)
self.salt = base64.encodebytes(salt).decode('utf8').strip()
self.password = password_hash(password, salt)
self.vote_pony_id = None
def check_password(self, password):
salt = base64.decodebytes(self.salt.encode('utf8'))
return self.password == password_hash(password, salt)
|
from decouple import config
import pika, json
import os
import django
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "prescriptions2.settings")
django.setup()
from entries.serializer import PrescriptionSerializer, PikUpSerializer
from entries.models import OurPrescriptions
params = pika.URLParameters(config('pika_params'))
connection = pika.BlockingConnection(params)
channel = connection.channel()
channel.queue_declare(queue='second')
def callback(ch, method, properties, body):
info = json.loads(body)
if properties.content_type == 'prescription_fill':
serializer = PrescriptionSerializer(data=info)
if serializer.is_valid():
serializer.save()
print(serializer.data)
else:
print(serializer.errors)
if properties.content_type == 'pickup':
print('hi')
print(f'info: {info}')
object = OurPrescriptions.objects.filter(id=info).first()
if object:
serialized = PrescriptionSerializer(object)
serializer = PikUpSerializer(data=serialized.data)
if serializer.is_valid():
serializer.save()
object.delete()
print('everything went through')
else:
print(serializer.errors)
channel.basic_consume(queue='second', on_message_callback=callback, auto_ack=True)
print('activated')
channel.start_consuming()
channel.close() |
from pulumi import export as pulumi_export
from pulumi import Output
import app.__main__ as app_code
import infra.__main__ as infra_code
infra = infra_code
kubeconfig = infra_code.cluster_kubeconfig
app_infra = kubeconfig.apply(
lambda val: app_code.create_app(kubeconfig_val=val)
)
pulumi_export("endpoint_url", Output.unsecret(app_infra)) |
import random
import traceback
from telebot import types, TeleBot
import time
import threading
import config
dnd = TeleBot(config.dndbot_token)
db2 = config.mongo_client.dnd
users2 = db2.users
users = db2.users
nowid = db2.nowid
spells = db2.spells
open_objects = db2.open_objects
if open_objects.find_one({}) == None:
open_objects.insert_one({'units':{}, 'spells':{}, 'weapons':{}})
#if 'barbarian' not in spells.find_one({}):
# spells.update_one({}, {'$set': {'barbarian': {}, 'bard': {}, 'fighter': {}, 'wizard': {}, 'druid': {},
# 'cleric': {}, 'warlock': {}, 'monk': {}, 'paladin': {}, 'rogue': {}, 'ranger': {},
# 'sorcerer': {}}})
if nowid.find_one({}) == None:
nowid.insert_one({'id': 1})
base = {
'units': {},
'alpha_access': True,
'current_stat': None,
'current_unit': None,
'current_spell': None,
'current_spellstat': None,
'spells': {},
'current_team':None,
'current_game':None,
'current_weapon':None,
'current_weaponstat':None,
'current_effect':None,
'current_effectstat':None,
'cgame':None,
'weapons':{},
'open_objects_access':False,
'current_openobj':None,
'effects':{},
'saved_games':{},
'current_condition':None,
'current_obj':None,
'current_obj_to_effect': None
}
classes = ['bard', 'barbarian', 'fighter', 'wizard', 'druid', 'cleric', 'warlock', 'monk', 'paladin',
'rogue', 'ranger', 'sorcerer']
races = ['elf', 'human', 'tiefling', 'half-elf', 'halfling', 'half-orc', 'dwarf', 'gnome']
games = {}
# rangee: [дальность_применения, тип_цели]
# duration: 0, если мгновенное
# damage: [3, 6] = 3d6
# class Spell(lvl = 0, casttime = 1, rangee = {'distance':30, 'target_type': 'target'}, duration = 1,
# savethrow = 'dexterity', damage = [3, 6], heal = [0, 0], actions = ['damage']):
# def __init__(self):
# self.lvl = lvl
# self.casttime = casttime # действия
# self.range = rangee # футы
# self.duration = duration # минуты
# self.savethrow = savethrow
# self.damage = damage
# self.heal = heal
# self.actions = actions
#
@dnd.message_handler(commands=['open_objects'])
def openobj(m):
if m.from_user.id != m.chat.id:
dnd.send_message(m.chat.id, 'Можно использовать только в личке!')
return
kb = types.InlineKeyboardMarkup()
kb.add(types.InlineKeyboardButton(text = 'Юниты', callback_data = 'openobj menu units'))
kb.add(types.InlineKeyboardButton(text = 'Заклинания', callback_data = 'openobj menu spells'))
kb.add(types.InlineKeyboardButton(text = 'Оружия', callback_data = 'openobj menu weapons'))
dnd.send_message(m.chat.id, 'Выберите меню для просмотра.', reply_markup = kb)
@dnd.message_handler(commands=['start'])
def startttpt(m):
if m.from_user.id==m.chat.id:
dnd.send_message(m.chat.id, 'Нажмите на "/", чтобы увидеть список всплывающих команд.')
@dnd.message_handler(commands=['del_object'])
def del_obj(m):
user = createuser2(m)
try:
id = m.text.split(' ')[1]
obj = None
for ids in user['units']:
if ids == id:
obj = user['units'][ids]
t = 'units'
for ids in user['spells']:
if ids == id:
obj = user['spells'][ids]
t = 'spells'
for ids in user['weapons']:
if ids == id:
obj = user['weapons'][ids]
t = 'weapons'
for ids in user['effects']:
if ids == id:
obj = user['effects'][ids]
t = 'effects'
if obj != None:
users.update_one({'id':user['id']},{'$unset':{t+'.'+str(obj['id']):1}})
dnd.send_message(m.chat.id, 'Успешно удалён объект "'+obj['name']+'"!')
else:
dnd.send_message(m.chat.id, 'Такого объекта у вас не существует! Для удаления оружия/скилла/юнита отправьте мне '+
'эту команду в следующем формате:\n/del_object id\nГде id - айди объекта.')
except:
dnd.send_message(m.chat.id, 'Такого объекта у вас не существует! Для удаления оружия/скилла/юнита отправьте мне '+
'эту команду в следующем формате:\n/del_object id\nГде id - айди объекта.')
@dnd.message_handler(commands=['give_access'])
def give_access(m):
if m.from_user.id == 441399484:
try:
x = m.text.split(' ')
if len(x) > 1:
if users2.find_one({'id': int(x[1])}) != None:
users2.update_one({'id': int(x[1])}, {'$set': {'alpha_access': True}})
dnd.send_message(m.chat.id, 'Доступ открыт.')
elif m.reply_to_message != None:
id = m.reply_to_message.from_user.id
if users2.find_one({'id': id}) != None:
users2.update_one({'id': id}, {'$set': {'alpha_access': True}})
dnd.send_message(m.chat.id, 'Доступ открыт.')
except:
pass
@dnd.message_handler(commands=['addspell'])
def addspell(m):
user = createuser2(m)
if not user['alpha_access']:
dnd.send_message(m.chat.id, 'У вас нет альфа-доступа! Пишите @Loshadkin.')
return
if len(user['spells']) > 50:
dnd.send_message(m.chat.id, 'Максимальное число заклинаний - 50!')
return
spell = createspell()
users2.update_one({'id': user['id']}, {'$set': {'spells.' + str(spell['id']): spell}})
dnd.send_message(m.chat.id, 'Вы успешно создали заклинание! Теперь настройте его (/set_spell).')
@dnd.message_handler(commands=['create_unit'])
def createunit(m):
user = createuser2(m)
if not user['alpha_access']:
dnd.send_message(m.chat.id, 'У вас нет альфа-доступа! Пишите @Loshadkin.')
return
if len(user['units']) > 50:
dnd.send_message(m.chat.id, 'Максимальное число юнитов - 50!')
return
unit = createunit(user)
users2.update_one({'id': user['id']}, {'$set': {'units.' + str(unit['id']): unit}})
dnd.send_message(m.chat.id, 'Вы успешно создали юнита! Теперь настройте его (/set_stats).')
@dnd.message_handler(commands=['create_effect'])
def createeffect(m):
user = createuser2(m)
if not user['alpha_access']:
dnd.send_message(m.chat.id, 'У вас нет альфа-доступа! Пишите @Loshadkin.')
return
if len(user['effects']) > 50:
dnd.send_message(m.chat.id, 'Максимальное число эффектов - 50!')
return
effect = createeffect()
users2.update_one({'id': user['id']}, {'$set': {'effects.' + str(effect['id']): effect}})
dnd.send_message(m.chat.id, 'Вы успешно создали эффект! Теперь настройте его (/set_effect).')
@dnd.message_handler(commands=['set_effect'])
def set_effectt(m):
if m.chat.id != m.from_user.id:
dnd.send_message(m.chat.id, 'Можно использовать только в личке!')
return
user = createuser2(m)
if not user['alpha_access']:
dnd.send_message(m.chat.id, 'У вас нет альфа-доступа! Пишите @Loshadkin.')
return
kbs = []
kb = types.InlineKeyboardMarkup()
for ids in user['effects']:
effect = user['effects'][ids]
kbs.append(types.InlineKeyboardButton(text=effect['name'], callback_data=str(effect['id']) + ' effect_manage'))
kb = kb_sort(kbs)
dnd.send_message(m.chat.id, 'Выберите эффект, который хотите отредактировать.', reply_markup=kb)
@dnd.message_handler(commands=['create_weapon'])
def createweapon(m):
user = createuser2(m)
if not user['alpha_access']:
dnd.send_message(m.chat.id, 'У вас нет альфа-доступа! Пишите @Loshadkin.')
return
if len(user['units']) > 50:
dnd.send_message(m.chat.id, 'Максимальное число орудий - 50!')
return
weapon = createweapon()
users2.update_one({'id': user['id']}, {'$set': {'weapons.' + str(weapon['id']): weapon}})
dnd.send_message(m.chat.id, 'Вы успешно создали оружие! Теперь настройте его (/set_weapon_stats).')
@dnd.message_handler(commands=['set_stats'])
def set_stats(m):
if m.chat.id != m.from_user.id:
dnd.send_message(m.chat.id, 'Можно использовать только в личке!')
return
user = createuser2(m)
if not user['alpha_access']:
dnd.send_message(m.chat.id, 'У вас нет альфа-доступа! Пишите @Loshadkin.')
return
kbs = []
kb = types.InlineKeyboardMarkup()
for ids in user['units']:
unit = user['units'][ids]
kbs.append(types.InlineKeyboardButton(text=unit['name'], callback_data=str(unit['id']) + ' edit'))
kb = kb_sort(kbs)
dnd.send_message(m.chat.id, 'Выберите юнита, которого хотите отредактировать.', reply_markup=kb)
@dnd.message_handler(commands=['set_weapon_stats'])
def set_statsw(m):
if m.chat.id != m.from_user.id:
dnd.send_message(m.chat.id, 'Можно использовать только в личке!')
return
user = createuser2(m)
if not user['alpha_access']:
dnd.send_message(m.chat.id, 'У вас нет альфа-доступа! Пишите @Loshadkin.')
return
kbs = []
kb = types.InlineKeyboardMarkup()
for ids in user['weapons']:
weapon = user['weapons'][ids]
kbs.append(types.InlineKeyboardButton(text=weapon['name'], callback_data=str(weapon['id']) + ' weapon_manage'))
kb = kb_sort(kbs)
dnd.send_message(m.chat.id, 'Выберите оружие, которое хотите отредактировать.', reply_markup=kb)
@dnd.message_handler(commands=['set_spell'])
def set_stats(m):
if m.chat.id != m.from_user.id:
dnd.send_message(m.chat.id, 'Можно использовать только в личке!')
return
user = createuser2(m)
if not user['alpha_access']:
dnd.send_message(m.chat.id, 'У вас нет альфа-доступа! Пишите @Loshadkin.')
return
kbs = []
kb = types.InlineKeyboardMarkup()
for ids in user['spells']:
spell = user['spells'][ids]
kbs.append(types.InlineKeyboardButton(text=spell['name'], callback_data=str(spell['id']) + ' spell_manage'))
kb = kb_sort(kbs)
dnd.send_message(m.chat.id, 'Выберите спелл, который хотите отредактировать.', reply_markup=kb)
@dnd.message_handler(content_types=['photo'])
def msgsp(m):
user = createuser2(m)
if user['current_stat'] != None and user['current_unit'] != None and m.from_user.id == m.chat.id:
unit = user['units'][str(user['current_unit'])]
if user['current_stat'] == 'photo':
users2.update_one({'id': user['id']}, {
'$set': {'units.' + str(user['current_unit']) + '.' + user['current_stat']: m.photo[0].file_id}})
user = createuser2(m)
unit = user['units'][str(user['current_unit'])]
users2.update_one({'id': user['id']}, {'$set': {'current_stat': None, 'current_unit': None}})
dnd.send_message(m.chat.id, 'Новое фото установлено!')
sendunitedit(m.chat.id, unit)
@dnd.message_handler(commands=['create_map'])
def mapp(m):
if m.chat.id not in games:
x = m.text.split(' ')
teams = 2
if len(x)>1:
try:
teams = int(x[1])
except:
pass
if teams < 2:
teams = 2
games.update(creategame(m))
kb=types.InlineKeyboardMarkup()
i=1
while i <= teams:
kb.add(types.InlineKeyboardButton(text = 'Добавить в команду '+str(i), callback_data = 'addt '+str(i)+' '+str(m.chat.id)))
i+=1
dnd.send_message(m.chat.id, 'Игра была создана! Теперь Мастер должен добавить в неё персонажей.', reply_markup=kb)
@dnd.message_handler(commands=['start_map'])
def startmap(m):
if m.chat.id in games:
user = createuser2(m)
game = games[m.chat.id]
if m.from_user.id != game['master']['id']:
dnd.send_message(m.chat.id, 'Только Мастер может запустить игру!')
return
if game['started'] == True:
dnd.send_message(m.chat.id, 'Игра уже идёт!')
return
game['started'] = True
text = ''
for ids in game['units']:
unit = game['units'][ids]
min = 1
max = 20
x = random.randint(min,max)
unit.update({'initiative': x})
text += 'Инициатива '+unit['name']+': ('+str(min)+'d'+str(max)+') = '+str(x)+'\n'
dnd.send_message(m.chat.id, text)
text = 'Очерёдность ходов:\n'
time.sleep(4)
turned = []
while len(turned) < len(game['units']):
i = -10
nowu = None
for ids in game['units']:
unit = game['units'][ids]
if unit['id'] not in turned:
if unit['initiative'] >= i:
nowu = unit
i = unit['initiative']
if nowu != None:
turned.append(nowu['id'])
nowu.update({'turn':len(turned)})
text += str(len(turned))+'й: '+nowu['name']+'\n'
dnd.send_message(m.chat.id, text)
for ids in game['units']:
game['units'][ids].update({'alive':True,
'current_act':None,
'speech_wait':False,
'speeched':False,
'freeatk':1,
'done_turn':False,
'stunned':0})
for ids in game['units']:
n = False
weapon = game['units'][ids]['current_weapon']
try:
weapon['downloaded']
except:
n = True
if n:
if weapon != None:
try:
game['units'][ids]['current_weapon'] = user['weapons'][str(weapon['id'])]
except:
pass
sp = []
un1 = []
dp = []
for ids in game['units']:
n = False
unit = game['units'][ids]
for idss in unit['spells']:
spell = unit['spells'][idss]
try:
spell['downloaded']
except:
n = True
if n:
try:
sp.append(idss)
un1.append(ids)
try:
dp.append(user['spells'][str(spell['id'])])
except:
dp.append(user['spells'][spell['id']])
except:
dnd.send_message(441399484, traceback.format_exc())
i = 0
for ids in dp:
game['units'][un1[i]]['spells'][sp[i]] = ids
i += 1
for ids in game['units']:
if game['units'][ids]['current_weapon'] == None:
game['units'][ids]['current_weapon'] = {
'id':randomid(),
'name':'Кулак',
'maxdmg':2,
'mindmg':1,
'dmg_buff':0,
'accuracy_buff':0
}
teams = {}
for ids in game['units']:
unit = game['units'][ids]
if unit['team'] not in teams:
pc = poscodegen(key = 'position_code', d = teams)
teams.update({unit['team']:{'team':unit['team'], 'position_code':pc}})
print(pc)
unit.update({'position_code':pc})
else:
unit.update({'position_code': teams[unit['team']]['position_code']})
next_turn(game)
def poscodegen(key = None, d = None, game = None):
if d == None and game == None:
return
cods = []
x = ['1', '2', '3', '4', '5', '6', '7', '8', '9', '0']
if d != None:
for ids in d:
cods.append(d[ids][key])
elif game != None:
for ids in game['units']:
cods.append(game['units'][ids]['position_code'])
code = ''
while len(code) < 4:
code += random.choice(x)
while code in cods:
code = ''
while len(code) < 4:
code += random.choice(x)
return code
def uuu():
return {
'id': 0,
'name': 0,
'class': 0,
'race': 0,
'hp': 0,
'maxhp':0,
'strenght': 0,
'dexterity': 0,
'constitution': 0,
'intelligence': 0,
'wisdom': 0,
'charisma': 0,
'armor_class': random.randint(8, 16),
'initiative': 10,
'speed': 30,
'photo': None,
'death_saves(success)': 0,
'death_saves(fail)': 0,
'spells': {},
'inventory': [],
'current_weapon': None,
'owner': id,
'player': None,
'max_spells':{},
'alive':True,
'freeatk':1
}
@dnd.message_handler(commands=['delete'])
def dell(m):
if m.chat.id in games:
game = games[m.chat.id]
user = dnd.get_chat_member(m.chat.id, m.from_user.id)
admins = ['administrator', 'creator']
if m.from_user.id == game['master']['id'] or user.status in admins:
try:
game['ctimer'].cancel()
except:
dnd.send_message(441399484, traceback.format_exc())
del games[game['id']]
game['kill'] = True
dnd.send_message(m.chat.id, 'Игра удалена.')
@dnd.message_handler()
def msgs(m):
try:
user = createuser2(m)
if user['cgame'] != None:
try:
game = games[user['cgame']]
for ids in game['units']:
if game['units'][ids]['speech_wait']:
say_speech(game['units'][ids], game, m.text)
kb = mainmenu(game, unit)
dnd.send_message(unit['player'], 'Выберите действие персонажа '+unit['name']+'.', reply_markup=kb)
except:
pass
if user['current_stat'] != None and user['current_unit'] != None and m.from_user.id == m.chat.id:
try:
unit = user['units'][str(user['current_unit'])]
except:
return
numbervalues = ['hp', 'maxhp', 'strenght', 'dexterity', 'constitution', 'intelligence',
'wisdom', 'charisma', 'armor_class', 'speed', 'current_weapon']
blist = ['inventory', 'spells', 'photo', 'addcast', 'max_spells']
if user['current_stat'] not in blist:
test = False
if user['current_stat'] in numbervalues:
test = True
val = m.text
if test:
try:
val = int(m.text)
except:
dnd.send_message(m.chat.id, 'Нужно значение типа int!')
return
if user['current_stat'] != 'player' and user['current_stat'] != 'current_weapon':
users2.update_one({'id': user['id']},
{'$set': {'units.' + str(user['current_unit']) + '.' + user['current_stat']: val}})
elif user['current_stat'] == 'player':
try:
users2.update_one({'id': user['id']},
{'$set': {'units.' + str(user['current_unit']) + '.' + user['current_stat']: int(val)}})
except:
dnd.send_message(m.chat.id, 'Неверный формат! Пришлите мне айди игрока!')
return
elif user['current_stat'] == 'current_weapon':
try:
weapon = user['weapons'][m.text]
users2.update_one({'id': user['id']},
{'$set': {'units.' + str(user['current_unit']) + '.' + user['current_stat']: weapon}})
except:
dnd.send_message(m.chat.id, 'Неверный формат! Пришлите мне айди оружия!')
return
user = createuser2(m)
unit = user['units'][str(user['current_unit'])]
users2.update_one({'id': user['id']}, {'$set': {'current_stat': None, 'current_unit': None}})
dnd.send_message(m.chat.id, unit['name'] + ': успешно изменена характеристика "' + user[
'current_stat'] + '" на "' + str(val) + '"!')
sendunitedit(m.chat.id, unit)
else:
if user['current_stat'] == 'inventory':
inv = []
t = m.text.split(', ')
for ids in t:
inv.append(ids)
tt = ''
for ids in inv:
tt += ids + ', '
tt = tt[:len(tt) - 2]
users2.update_one({'id': user['id']},
{'$set': {'units.' + str(user['current_unit']) + '.' + user['current_stat']: inv}})
user = createuser2(m)
unit = user['units'][str(user['current_unit'])]
users2.update_one({'id': user['id']}, {'$set': {'current_stat': None, 'current_unit': None}})
dnd.send_message(m.chat.id, unit['name'] + ': инвентарь юнита успешно изменён на ' + tt + '!')
sendunitedit(m.chat.id, unit)
elif user['current_stat'] == 'addcast':
try:
spell = user['spells'][m.text]
except:
dnd.send_message(m.chat.id, 'Такого спелла не существует!')
users2.update_one({'id': user['id']}, {'$set': {'current_stat': None, 'current_unit': None}})
return
users.update_one({'id':user['id']},{'$set':{'units.'+str(user['current_unit'])+'.spells.'+str(spell['id']):spell}})
dnd.send_message(m.chat.id, 'Заклинание "'+spell['name']+'" успешно добавлено к юниту!')
users2.update_one({'id': user['id']}, {'$set': {'current_stat': None, 'current_unit': None}})
sendunitedit(m.chat.id, unit)
elif user['current_stat'] == 'max_spells':
try:
er = 'Ошибки:\n'
succ = ''
d = {}
t = m.text.split(' ')
for ids in t:
lvl = ids.split(':')[0]
val = ids.split(':')[1]
try:
d.update({lvl:int(val)})
succ += lvl+' лвл: '+str(val)+' заклинаний\n'
except:
if val == "inf":
d.update({lvl:val})
succ += lvl+' лвл: '+str(val)+' заклинаний\n'
else:
er += 'Неверный параметр значения заклинания уровня '+lvl+'!\n'
for ids in d:
users.update_one({'id':user['id']},{'$set':{'units.'+str(unit['id'])+'.max_spells.'+ids:d[ids]}})
users2.update_one({'id': user['id']}, {'$set': {'current_stat': None, 'current_unit': None}})
dnd.send_message(m.chat.id, 'Успешно установленные значения:\n'+succ+'\n'+er)
sendunitedit(m.chat.id, unit)
except:
dnd.send_message(m.chat.id, 'Неверный формат!')
elif user['current_spell'] != None and user['current_spellstat'] != None and m.from_user.id == m.chat.id:
val = m.text
attributes = ['strenght', 'dexterity', 'wisdom', 'charisma', 'constitution', 'intelligence']
try:
spell = user['spells'][str(user['current_spell'])]
except:
dnd.send_message(441399484, traceback.format_exc())
return
r = False
numbervalues = ['damage', 'heal']
targettypes = ['all_enemy', 'all_ally', 'enemy', 'ally', 'self', 'all', 'ally!self', 'all_ally!self', 'all!self', 'any', 'any!self']
damagetypes = ['target', 'aoe']
test1 = False
if user['current_spellstat'] == 'savethrow':
st = spell['savethrow']
text = m.text.lower()
if 'аттрибут' in m.text.lower():
val = text.split('аттрибут')[1]
val = val.split(':')[1]
if val[0] == ' ':
val = val[1:]
print(val)
val = val.split(' ')[0]
if val not in attributes:
tt = ''
for ids in attributes:
tt += '`'+ids+'` '
dnd.send_message(m.chat.id, 'Необходимо выставить одно из следующих значений:\n'+tt, parse_mode = 'markdown')
return
else:
users2.update_one({'id': user['id']},
{'$set': {'spells.' + str(user['current_spell']) + '.savethrow.attribute': val}})
dnd.send_message(m.chat.id, 'Успешно изменено значение "аттрибут" на '+str(val)+'!')
r = True
if 'сложность' in m.text.lower():
val = text.split('сложность')[1]
val = val.split(':')[1]
if val[0] == ' ':
val = val[1:]
print(val)
val = val.split(' ')[0]
try:
val = int(val)
except:
dnd.send_message(m.chat.id, 'Необходимо значение типа int!')
return
users2.update_one({'id': user['id']},
{'$set': {'spells.' + str(user['current_spell']) + '.savethrow.value': val}})
dnd.send_message(m.chat.id, 'Успешно изменено значение "сложность" на '+str(val)+'!')
r = True
if r:
user = createuser2(m)
spell = user['spells'][str(user['current_spell'])]
users2.update_one({'id': user['id']}, {'$set': {'current_spellstat': None, 'current_spell': None}})
sendspelledit(m.chat.id, spell)
return
elif user['current_spellstat'] == 'target_type':
test1 = True
if test1:
val = m.text.lower()
if val not in targettypes:
tt = ''
for ids in targettypes:
tt += '`'+ids+'` '
dnd.send_message(m.chat.id, 'Необходимо выставить одно из следующих значений:\n'+tt, parse_mode='markdown')
return
if user['current_spellstat'] == 'damage_type':
val = m.text.lower()
if val not in damagetypes:
tt = ''
for ids in damagetypes:
tt += '`'+ids+'` '
dnd.send_message(m.chat.id, 'Необходимо выставить одно из следующих значений:\n'+tt, parse_mode='markdown')
return
test2 = False
if user['current_spellstat'] in numbervalues:
test2 = True
if test2:
try:
val = int(m.text)
except:
if 'd' in m.text:
try:
a = m.text.split('d')
int(a[0])
int(a[1])
except:
dnd.send_message(m.chat.id, 'Нужно значение типа int, если оно статичное, или формат значения "XdY", '+
'где X - количество кидаемых кубов, а Y - максимальное значение каждого куба. Пример: 1d6 - '+
'будет выпадать случайное значение от 1 до 6.')
return
else:
dnd.send_message(m.chat.id, 'Нужно значение типа int, если оно статичное, или формат значения "XdY", '+
'где X - количество кидаемых кубов, а Y - максимальное значение каждого куба. Пример: 1d6 - '+
'будет выпадать случайное значение от 1 до 6.')
return
users2.update_one({'id': user['id']},
{'$set': {'spells.' + str(user['current_spell']) + '.' + user['current_spellstat']: val}})
user = createuser2(m)
spell = user['spells'][str(user['current_spell'])]
users2.update_one({'id': user['id']}, {'$set': {'current_spellstat': None, 'current_spell': None}})
dnd.send_message(m.chat.id, spell['name'] + ': успешно изменена характеристика "' + user[
'current_spellstat'] + '" на "' + str(val) + '"!')
sendspelledit(m.chat.id, spell)
elif user['current_weapon'] != None and user['current_weaponstat'] != None and m.from_user.id == m.chat.id:
val = m.text
try:
weapon = user['weapons'][str(user['current_weapon'])]
except:
return
numbervalues = ['maxdmg', 'mindmg', 'dmg_buff', 'accuracy_buff']
test = False
test2 = False
if user['current_weaponstat'] == 'range':
test2 = True
if user['current_weaponstat'] in numbervalues:
test = True
val = m.text
if test:
try:
val = int(m.text)
except:
dnd.send_message(m.chat.id, 'Нужно значение типа int!')
return
if test2:
if m.text.lower() not in ['melee', 'ranged']:
dnd.send_message(m.chat.id, 'Нужно одно из этих значений: `melee`, `ranged`.', parse_mode = 'markdown')
return
else:
val = m.text.lower()
users2.update_one({'id': user['id']},
{'$set': {'weapons.' + str(user['current_weapon']) + '.' + user['current_weaponstat']: val}})
user = createuser2(m)
weapon = user['weapons'][str(user['current_weapon'])]
users2.update_one({'id': user['id']}, {'$set': {'current_weaponstat': None, 'current_weapon': None}})
dnd.send_message(m.chat.id, weapon['name'] + ': успешно изменена характеристика "' + user[
'current_weaponstat'] + '" на "' + str(val) + '"!')
sendweaponedit(m.chat.id, weapon)
elif user['current_openobj'] != None:
co = user['current_openobj']
try:
obj = user[co][m.text]
except:
dnd.send_message(m.chat.id, 'Такого объекта ('+co+') у вас не существует! Отменяю добавление.')
users2.update_one({'id': user['id']}, {'$set': {'current_openobj': None}})
return
if len(open_objects.find_one({})[co]) >= 75:
dnd.send_message(m.chat.id, 'Лимит объектов общего доступа - 75! Пишите @Loshadkin.')
return
open_objects.update_one({},{'$set':{co+'.'+str(obj['id']):obj}})
dnd.send_message(m.chat.id, 'Объект ('+co+') ('+obj['name']+') успешно добавлен в общий доступ!')
users2.update_one({'id': user['id']}, {'$set': {'current_openobj': None}})
elif user['current_obj'] != None and user['current_condition'] != None:
allow = []
nxt = False
err = ''
txt = ''
if user['current_condition'] == 'target_stats' or user['current_condition'] == 'unit_stats':
unit = uuu()
x = m.text.split(' ')
sl = {}
for ids in x:
try:
param = ids.split(':')[0]
if param in unit:
value = ids.split(':')[1]
sl.update({param:value})
txt += 'Успешно изменен параметр "'+param+'" на "'+value+'"!\n'
else:
err += 'Ошибка в добавлении '+ids+': параметра "'+param+'" у юнита не существует!\n'
except:
err += 'Ошибка в добавлении '+ids+'!\n'
for ids in sl:
users.update_one({'id':user['id']},{'$set':{'effects.'+user['current_obj']+'.condition.'+user['current_condition']+'.'+ids:sl[ids]}})
if txt == '':
txt = 'Не удалось выставить ни одного параметра!'
if err != '':
txt += '\n\nОшибки:\n'+err
users.update_one({'id':user['id']},{'$set':{'current_condition':None, 'current_obj':None}})
dnd.send_message(m.chat.id, txt)
elif user['current_condition'] == 'chance':
users.update_one({'id':user['id']},{'$set':{'effects.'+user['current_obj']+'.condition.'+user['current_condition']:m.text}})
users.update_one({'id':user['id']},{'$set':{'current_condition':None, 'current_obj':None}})
dnd.send_message(m.chat.id, 'Успешно изменено значение параметра "chance" на "'+m.text+'"!')
elif user['current_effect'] != None and user['current_effectstat'] != None:
try:
x = m.text
if user['current_effectstat'] == 'duration':
try:
x = int(m.text)
except:
dnd.send_message(m.chat.id, 'Требуется значение типа int!')
return
if user['current_effectstat'] == 'effect':
allow = ['stun', 'weakness', 'kill', 'mark', 'bonus_accuracy', 'bonus_strenght', 'bonus_dexterity', 'bonus_wisdom',
'bonus_charisma', 'bonus_constitution', 'bonus_intelligence', 'bonus_armor', 'bonus_maxhp', 'bonus_hp', 'crit']
if m.text not in allow:
dt = ''
for ids in allow:
dt += '`'+ids.replace('_', '_')+'` '
dnd.send_message(m.chat.id, 'Для выставления "'+user['current_effectstat']+'" требуется одно из следующих значений:\n'+
dt, parse_mode='markdown')
return
if user['current_effectstat'] == 'target':
allow = ['target', 'unit']
if m.text not in allow:
dt = ''
for ids in allow:
dt += '`'+ids.replace('_', '_')+'` '
dnd.send_message(m.chat.id, 'Для выставления "'+user['current_effectstat']+'" требуется одно из следующих значений:\n'+
dt, parse_mode='markdown')
return
users.update_one({'id':user['id']},{'$set':{'effects.'+str(user['current_effect'])+'.'+str(user['current_effectstat']):x}})
users.update_one({'id':user['id']},{'$set':{'current_effect':None, 'current_effectstat':None}})
dnd.send_message(m.chat.id, 'Успешно изменён параметр эффекта "'+user['current_effectstat']+'" на "'+m.text+'"!')
except:
users.update_one({'id':user['id']},{'$set':{'current_effect':None, 'current_effectstat':None}})
dnd.send_message(441399484, traceback.format_exc())
elif user['current_obj_to_effect'] != None:
try:
effect = user['effects'][m.text]
except:
dnd.send_message(m.chat.id, 'Такого эффекта не существует! Отменяю добавление.')
users.update_one({'id':user['id']},{'$set':{'current_obj_to_effect':None}})
return
try:
obj = user['spells'][str(user['current_obj_to_effect'])]
w = 'spells'
except:
try:
obj = user['weapons'][str(user['current_obj_to_effect'])]
w = 'weapons'
except:
dnd.send_message(441399484, str(user['current_obj_to_effect']))
dnd.send_message(m.chat.id, 'Ошибка! Отменяю добавление эффекта.')
users.update_one({'id':user['id']},{'$set':{'current_obj_to_effect':None}})
return
users.update_one({'id':user['id']},{'$set':{w+'.'+str(obj['id'])+'.effects.'+str(effect['id']):effect}})
dnd.send_message(m.chat.id, 'Эффект успешно добавлен!')
users.update_one({'id':user['id']},{'$set':{'current_obj_to_effect':None}})
elif user['current_game'] != None and user['current_team'] != None:
try:
unit = int(m.text)
except:
return
cunit = None
for ids in user['units']:
if user['units'][ids]['id'] == unit:
cunit = user['units'][ids]
if cunit == None:
dnd.send_message(m.chat.id, 'Такого юнита у вас не существует!')
return
try:
games[user['current_game']]['units'].update({cunit['id']:cunit})
u = games[user['current_game']]['units'][cunit['id']]
u.update({'team':user['current_team']})
dnd.send_message(m.chat.id, 'Юнит '+cunit['name']+' успешно добавлен в команду '+user['current_team']+'!')
users.update_one({'id':user['id']},{'$set':{'current_game':None, 'current_team':None}})
except:
dnd.send_message(441399484, traceback.format_exc())
dnd.send_message(m.chat.id, 'Игры не существует! Отменяю добавление юнита.')
users.update_one({'id':user['id']},{'$set':{'current_game':None, 'current_team':None}})
except:
print(traceback.format_exc())
dnd.send_message(m.chat.id, 'error!')
@dnd.callback_query_handler(func=lambda call: True)
def inline(call):
try:
user = createuser2(call)
if 'edit' in call.data:
try:
unit = user['units'][call.data.split(' ')[0]]
except:
dnd.answer_callback_query(call.id, 'Такого юнита не существует!', show_alert=True)
return
kb = create_edit_kb(unit)
dnd.send_message(call.message.chat.id, 'Нажмите на характеристику для её изменения.', reply_markup=kb)
elif 'openobj' in call.data:
objs = open_objects.find_one({})
act = call.data.split(' ')[1]
if act == 'menu':
what = call.data.split(' ')[2]
if what == 'weapons':
t = 'оружие'
elif what == 'spells':
t = 'заклинание'
elif what == 'units':
t = 'юнита'
kb = types.InlineKeyboardMarkup()
kb.add(types.InlineKeyboardButton(text = 'Добавить '+t, callback_data = 'openobj add '+what))
kb.add(types.InlineKeyboardButton(text = 'Загрузить '+t, callback_data = 'openobj load '+what))
kb.add(types.InlineKeyboardButton(text = 'Главное меню', callback_data = 'openobj mainmenu'))
medit('Выберите действие.', call.message.chat.id, call.message.message_id, reply_markup = kb)
elif act == 'load':
what = call.data.split(' ')[2]
kb = types.InlineKeyboardMarkup()
kbs = []
kb = types.InlineKeyboardMarkup()
for ids in objs[what]:
obj = objs[what][ids]
kbs.append(types.InlineKeyboardButton(text = obj['name'], callback_data = 'openobj download '+what+' '+str(obj['id'])))
kb = kb_sort(kbs)
kb.add(types.InlineKeyboardButton(text = 'Главное меню', callback_data = 'openobj mainmenu'))
medit('Выберите обьект для загрузки.', call.message.chat.id, call.message.message_id, reply_markup = kb)
elif act == 'add':
what = call.data.split(' ')[2]
if what == 'weapons':
t = 'оружия'
elif what == 'spells':
t = 'заклинания'
elif what == 'units':
t = 'юнита'
users.update_one({'id':call.from_user.id},{'$set':{'current_openobj':what}})
dnd.send_message(call.message.chat.id, 'Теперь пришлите мне id '+t+', которого хотите добавить в публичный доступ.')
elif act == 'download':
what = call.data.split(' ')[2]
obj = objs[what][call.data.split(' ')[3]]
if what == 'units':
newobj = createunit(userid = call.from_user.id)
elif what == 'weapons':
newobj = createweapon()
elif what == 'spells':
newobj = createspell()
for ids in obj:
if ids != 'id' and ids != 'owner':
newobj[ids] = obj[ids]
try:
for ids in newobj['spells']:
newobj['spells'][ids].update({'downloaded':True})
except:
pass
try:
newobj['current_weapon'].update({'downloaded':True})
except:
pass
if len(user[what]) >= 50:
medit('Максимальное число объектов одного типа - 50!', call.message.chat.id, call.message.message_id)
return
users.update_one({'id': user['id']}, {'$set': {what+'.' + str(newobj['id']): newobj}})
medit('Объект "'+newobj['name']+'" успешно добавлен к вам в коллекцию!', call.message.chat.id, call.message.message_id)
elif act == 'mainmenu':
kb = types.InlineKeyboardMarkup()
kb.add(types.InlineKeyboardButton(text = 'Юниты', callback_data = 'openobj menu units'))
kb.add(types.InlineKeyboardButton(text = 'Заклинания', callback_data = 'openobj menu spells'))
kb.add(types.InlineKeyboardButton(text = 'Оружия', callback_data = 'openobj menu weapons'))
medit('Выберите меню для просмотра.', call.message.chat.id, call.message.message_id, reply_markup = kb)
elif 'addcast' in call.data or 'delcast' in call.data:
unit = user['units'][call.data.split(' ')[1]]
what = call.data.split(' ')[0]
txt = ' Список текущих заклинаний:\n'
for ids in unit['spells']:
spel = unit['spells'][ids]
txt += spel['name']+': '+str(spel['lvl'])+' лвл\n'
if what == 'addcast':
users2.update_one({'id': user['id']}, {'$set': {'current_unit': unit['id'], 'current_stat': what}})
dnd.send_message(call.message.chat.id, 'Теперь пришлите мне ID заклинания, которое хотите добавить.'+txt)
elif what == 'delcast':
kb = types.InlineKeyboardMarkup()
for ids in user['units'][str(unit['id'])]['spells']:
spell = user['units'][str(unit['id'])]['spells'][ids]
kb.add(types.InlineKeyboardButton(text = spell['name'], callback_data = 'delete_spell '+str(unit['id'])+' '+str(spell['id'])))
medit('Нажмите на спелл для его удаления.', call.message.chat.id, call.message.message_id, reply_markup = kb)
elif 'addeffect' in call.data or 'deleffect' in call.data:
try:
obj = user['spells'][call.data.split(' ')[1]]
w = 'spells'
except:
try:
obj = user['weapons'][call.data.split(' ')[1]]
w = 'weapons'
except:
dnd.answer_callback_query(call.id, 'Обьекта не существует!')
return
what = call.data.split(' ')[0]
txt = ' Список текущих эффектов:\n'
for ids in obj['effects']:
ef = obj['effects'][ids]
txt += ef['name']+'\n'
if what == 'addeffect':
users2.update_one({'id': user['id']}, {'$set': {'current_obj_to_effect': obj['id']}})
dnd.send_message(call.message.chat.id, 'Теперь пришлите мне ID эффекта, который хотите добавить.'+txt)
elif what == 'deleffect':
kb = types.InlineKeyboardMarkup()
for ids in user[w][str(obj['id'])]['effects']:
ef = user[w][str(obj['id'])]['effects'][ids]
kb.add(types.InlineKeyboardButton(text = ef['name'], callback_data = 'delete_effect '+str(ef['id'])+' '+str(obj['id'])))
medit('Нажмите на эффект для его удаления.', call.message.chat.id, call.message.message_id, reply_markup = kb)
elif 'delete_spell' in call.data:
unit = user['units'][call.data.split(' ')[1]]
if call.data.split(' ')[2] in unit['spells']:
users.update_one({'id':user['id']},{'$unset':{'units.'+str(unit['id'])+'.spells.'+call.data.split(' ')[2]: 1}})
medit('Спелл удалён!', call.message.chat.id, call.message.message_id)
sendunitedit(call.message.chat.id, unit)
elif 'delete_effect' in call.data:
try:
obj = user['spells'][call.data.split(' ')[2]]
w = 'spells'
except:
try:
obj = user['weapons'][call.data.split(' ')[2]]
w = 'weapons'
except:
dnd.answer_callback_query(call.id, 'Обьекта не существует!')
return
if call.data.split(' ')[1] in obj['effects']:
users.update_one({'id':user['id']},{'$unset':{w+'.'+str(obj['id'])+'.effects.'+call.data.split(' ')[1]: 1}})
medit('Эффект удалён!', call.message.chat.id, call.message.message_id)
elif 'change' in call.data and 'spell_change' not in call.data and 'weapon_ch' not in call.data and 'spell_manage' not in call.data:
blist = ['inventory', 'spells', 'photo', 'max_spells']
numbervalues = ['hp', 'maxhp', 'strenght', 'dexterity', 'constitution', 'intelligence',
'wisdom', 'charisma', 'armor_class', 'speed', 'name', 'player', 'current_weapon']
what = call.data.split(' ')[1]
try:
unit = user['units'][call.data.split(' ')[2]]
except:
dnd.answer_callback_query(call.id, 'Такого юнита не существует!', show_alert=True)
return
users2.update_one({'id': user['id']}, {'$set': {'current_unit': unit['id'], 'current_stat': what}})
if what not in blist:
tt = ''
if what in numbervalues:
if what == 'current_weapon':
tt += ' Требуется айди оружия!'
dnd.send_message(call.message.chat.id,
'Теперь пришлите мне новое значение характеристики "' + what + '".'+tt)
else:
if what == 'race':
r = 'расы'
alls = ''
for ids in races:
alls += '`' + ids + '` '
elif what == 'class':
r = 'классы'
alls = ''
for ids in classes:
alls += '`' + ids + '` '
dnd.send_message(call.message.chat.id,
'Теперь пришлите мне новое значение характеристики "' + what + '".\n' +
'Существующие ' + r + ': ' + alls, parse_mode='markdown')
else:
if what == 'inventory':
inv = '`'
for ids in unit['inventory']:
inv += ids + ', '
inv = inv[:len(inv) - 2]
inv += '`'
if inv == '`':
inv = 'Пусто!'
dnd.send_message(call.message.chat.id,
'Теперь пришлите мне новый инвентарь, перечисляя предметы через запятую. Текущий ' +
'инвентарь: ' + inv, parse_mode='markdown')
elif what == 'photo':
if unit['photo'] != None:
dnd.send_photo(call.message.chat.id, unit['photo'],
caption='Текущая фотография юнита. Для изменения отправьте новое фото.')
else:
dnd.send_message(call.message.chat.id,
'Фотография отсутствует. Для изменения отправьте новое фото.')
elif what == 'spells':
kb = types.InlineKeyboardMarkup()
kb.add(types.InlineKeyboardButton(text = 'Добавить заклинание', callback_data = 'addcast '+str(unit['id'])))
kb.add(types.InlineKeyboardButton(text = 'Удалить заклинание', callback_data = 'delcast '+str(unit['id'])))
medit('Нажмите кнопку для изменения параметров.', call.message.chat.id, call.message.message_id, reply_markup = kb)
elif what == 'max_spells':
dnd.send_message(call.message.chat.id, 'Теперь пришлите мне значения максимума используемых за раунд заклинаний '+
'в следующем формате:\n1:6 2:4 3:1\nГде первое число - уровень заклинания, а второе - '+
'максимальное использование заклинаний этого уровня за битву. Чтобы снять ограничение, поставьте '+
'"inf" после уровня. Перед и после двоеточия не должно быть пробелов!')
txt = ''
for ids in unit['max_spells']:
txt += ids+' уровень: '+str(unit['max_spells'][ids])+' заклинаний\n'
if txt == '':
txt = 'Пусто!'
dnd.send_message(call.message.chat.id, 'Текущие значения:\n'+txt)
################################################
elif 'spell_change' in call.data:
what = call.data.split(' ')[1]
try:
spell = user['spells'][call.data.split(' ')[2]]
except:
dnd.answer_callback_query(call.id, 'Такого спелла не существует!', show_alert=True)
return
text = ''
if what == 'savethrow':
text += ' Текущее значение:\n'
text += '┞ Аттрибут: '+str(spell['savethrow']['attribute'])+'\n'
text += '┕ Сложность: '+str(spell['savethrow']['value'])+'\n'
text += 'Чтобы выставить новые значения (одно или несколько, через пробел), вводите их в следующем формате:\n'
text += '`Аттрибут: значение`\n'
text += '`Сложность: значение`\nПример: `аттрибут: dexterity сложность: 5`'
if what == 'custom_text':
text += ' Текущее значение: "'+str(spell['custom_text'])+'".\nВозможные переменные:\n{target_name} - имя цели (только '+\
'если тип заклинания - направленное на цель);\n'+\
'{spell_name} - название заклинания;\n{unit_name} - имя юнита.'
text = text.replace('_', '\_')
if what == 'effects':
kb = types.InlineKeyboardMarkup()
kb.add(types.InlineKeyboardButton(text = 'Добавить эффект', callback_data = 'addeffect '+str(spell['id'])))
kb.add(types.InlineKeyboardButton(text = 'Удалить эффект', callback_data = 'deleffect '+str(spell['id'])))
medit('Нажмите кнопку для изменения параметров.', call.message.chat.id, call.message.message_id, reply_markup = kb)
return
users2.update_one({'id': user['id']}, {'$set': {'current_spell': spell['id'], 'current_spellstat': what}})
if what == 'classes' or what == 'description':
text += '\nТекущее значение: "' + str(spell[what]) + '"'
dnd.send_message(call.message.chat.id,
'Теперь пришлите мне новое значение характеристики заклинания "' + what.replace('_', '\_') + '".' + text, parse_mode = 'markdown')
elif 'weapon_ch' in call.data:
what = call.data.split(' ')[1]
try:
weapon = user['weapons'][call.data.split(' ')[2]]
except:
dnd.send_message(441399484, traceback.format_exc())
dnd.answer_callback_query(call.id, 'Такого оружия не существует!', show_alert=True)
return
users2.update_one({'id': user['id']}, {'$set': {'current_weapon': weapon['id'], 'current_weaponstat': what}})
text = ''
if what == 'custom_attack_text':
text = ' Текущее значение: "'+str(weapon['custom_attack_text'])+'".\nВозможные переменные:\n{target_name} - имя цели;\n'+\
'{weapon_name} - название оружия;\n{unit_name} - имя юнита.'
if what == 'effects':
kb = types.InlineKeyboardMarkup()
kb.add(types.InlineKeyboardButton(text = 'Добавить эффект', callback_data = 'addeffect '+str(weapon['id'])))
kb.add(types.InlineKeyboardButton(text = 'Удалить эффект', callback_data = 'deleffect '+str(weapon['id'])))
medit('Нажмите кнопку для изменения параметров.', call.message.chat.id, call.message.message_id, reply_markup = kb)
users2.update_one({'id': user['id']}, {'$set': {'current_weapon': None, 'current_weaponstat': None}})
return
dnd.send_message(call.message.chat.id,
'Теперь пришлите мне новое значение характеристики оружия "' + what + '".' + text)
elif 'effect_ch' in call.data:
what = call.data.split(' ')[1]
try:
effect = user['effects'][call.data.split(' ')[2]]
except:
dnd.send_message(441399484, traceback.format_exc())
dnd.answer_callback_query(call.id, 'Такого эффекта не существует!', show_alert=True)
return
users2.update_one({'id': user['id']}, {'$set': {'current_effect': effect['id'], 'current_effectstat': what}})
text = ''
if what == 'custom_text':
text = ' Текущее значение: "'+str(effect['custom_text'])+'".\nВозможные переменные:\n{target_name} - имя цели;\n'+\
'{effect_name} - название эффекта;\n{unit_name} - имя юнита.'
elif what == 'condition':
cond = effect['condition']
kb = create_condition_kb(cond, effect)
medit('Выберите условия применения эффекта "'+effect['name']+'"', call.message.chat.id, call.message.message_id, reply_markup = kb)
return
users.update_one({'id':user['id']},{'$set':{'current_effect':str(effect['id']), 'current_effectstat':what}})
dnd.send_message(call.message.chat.id,
'Теперь пришлите мне новое значение характеристики эффекта "' + what + '".' + text)
elif 'cond_ch' in call.data:
what = call.data.split(' ')[1]
try:
effect = user['effects'][call.data.split(' ')[2]]
except:
dnd.answer_callback_query(call.id, 'Такого условия не существует!')
return
users.update_one({'id':user['id']},{'$set':{'current_obj':call.data.split(' ')[2]}})
users.update_one({'id':user['id']},{'$set':{'current_condition':what}})
text = ''
if what == 'chance':
text += ' Выставьте значение типа double, от 0 до 100, обозначающее шанс применения эффекта. Примеры: 0.01; 25; 99; 50.88.'
dnd.send_message(call.message.chat.id, 'Теперь пришлите мне новое значение пункта "'+what+'".'+text)
elif 'spell_manage' in call.data:
try:
spell = user['spells'][call.data.split(' ')[0]]
except:
dnd.answer_callback_query(call.id, 'Такого спелла не существует!', show_alert=True)
return
kb = create_spell_kb(spell)
dnd.send_message(call.message.chat.id, 'Нажмите на характеристику для её изменения.', reply_markup=kb)
elif 'effect_manage' in call.data:
try:
effect = user['effects'][call.data.split(' ')[0]]
except:
dnd.answer_callback_query(call.id, 'Такого эффекта не существует!', show_alert=True)
return
kb = create_effect_kb(effect)
dnd.send_message(call.message.chat.id, 'Нажмите на характеристику для её изменения.', reply_markup=kb)
elif 'weapon_manage' in call.data:
try:
weapon = user['weapons'][call.data.split(' ')[0]]
except:
dnd.send_message(441399484, traceback.format_exc())
dnd.answer_callback_query(call.id, 'Такого оружия не существует!', show_alert=True)
return
kb = create_weapon_kb(weapon)
dnd.send_message(call.message.chat.id, 'Нажмите на характеристику для её изменения.', reply_markup=kb)
elif 'show' in call.data:
if call.data.split(' ')[1] == 'id':
dnd.send_message(call.message.chat.id, 'id объекта: `'+call.data.split(' ')[2]+'`', parse_mode='markdown')
elif 'addt' in call.data:
team = call.data.split(' ')[1]
try:
game = games[int(call.data.split(' ')[2])]
except:
dnd.answer_callback_query(call.id, 'Игры не существует!', show_alert = True)
return
if call.from_user.id == game['master']['id']:
users.update_one({'id':user['id']},{'$set':{'current_team':team, 'current_game':game['id']}})
dnd.send_message(call.message.chat.id, 'Теперь пришлите ID юнита, которого нужно добавить в команду '+team+'.')
else:
dnd.answer_callback_query(call.id, 'Только Мастер может добавлять в игру персонажей!', show_alert = True)
elif 'game mainmenu' in call.data:
try:
game = games[int(call.data.split(' ')[3])]
except:
dnd.answer_callback_query(call.id, 'Игры не существует!')
return
try:
unit = game['units'][int(call.data.split(' ')[2])]
except:
try:
unit = game['units'][call.data.split(' ')[2]]
except:
dnd.answer_callback_query(call.id, 'Юнита не существует!', show_alert = True)
return
kb = mainmenu(game, unit)
medit('Выберите действие персонажа '+unit['name']+'.', call.message.chat.id, call.message.message_id, reply_markup=kb)
elif 'gameact' in call.data:
try:
game = games[int(call.data.split(' ')[3])]
except:
dnd.answer_callback_query(call.id, 'Игры не существует!')
return
nothit = []
try:
unit = game['units'][int(call.data.split(' ')[2])]
except:
try:
unit = game['units'][call.data.split(' ')[2]]
except:
dnd.answer_callback_query(call.id, 'Юнита не существует!', show_alert = True)
return
if game['current_unit'] != str(unit['id']):
dnd.answer_callback_query(call.id, 'Сейчас не ваш ход!')
medit('Старое сообщение!', call.message.chat.id, call.message.message_id)
return
act = call.data.split(' ')[1]
tt = ''
if act == 'select_attack':
kb = types.InlineKeyboardMarkup()
for ids in game['units']:
unit2 = game['units'][ids]
if unit['team'] != unit2['team']:
if unit2['alive'] == True:
if unit2['position_code'] == unit['position_code'] or unit['current_weapon']['range'] == 'ranged':
kb.add(types.InlineKeyboardButton(text = unit2['name'], callback_data = 'selectact attack '+str(unit['id'])+' '+str(game['id'])+' '+str(unit2['id'])))
else:
nothit.append(unit2['name'])
kb.add(types.InlineKeyboardButton(text = 'В главное меню', callback_data = 'game mainmenu '+str(unit['id'])+' '+str(game['id'])))
if len(nothit) > 0:
tt += ' Цели, до которых ваше оружие не может достать:\n'
for ids in nothit:
tt += ids+', '
tt = tt[:len(tt)-2]
tt += '.'
medit('Выберите цель.'+tt, call.message.chat.id, call.message.message_id, reply_markup=kb)
elif act == 'select_move':
melee = []
kb = types.InlineKeyboardMarkup()
for ids in game['units']:
unit2 = game['units'][ids]
if unit2['alive'] == True:
if unit2['position_code'] != unit['position_code']:
kb.add(types.InlineKeyboardButton(text = unit2['name'], callback_data = 'selectact move '+str(unit['id'])+' '+str(game['id'])+' '+str(unit2['id'])))
else:
melee.append(unit2)
kb.add(types.InlineKeyboardButton(text = 'Отойти ото всех', callback_data = 'selectact move '+str(unit['id'])+' '+str(game['id'])+' '+str(unit['id'])))
kb.add(types.InlineKeyboardButton(text = 'В главное меню', callback_data = 'game mainmenu '+str(unit['id'])+' '+str(game['id'])))
if len(melee) > 0:
tt += ' Бойцы рядом с вами:\n'
for ids in melee:
if ids['id'] != unit['id']:
tt += ids['name']+', '
tt = tt[:len(tt)-2]
tt += '.'
medit('Выберите бойца.'+tt, call.message.chat.id, call.message.message_id, reply_markup=kb)
elif act == 'select_speech':
if unit['speeched'] == False:
unit['speech_wait'] = True
users.update_one({'id':call.from_user.id},{'$set':{'cgame':game['id']}})
medit('Напишите речь юнита следующим сообщением.', call.message.chat.id, call.message.message_id)
else:
dnd.send_message(call.message.chat.id, 'Юнит уже говорил на этом ходу!')
kb = mainmenu(game, unit)
dnd.send_message(unit['player'], 'Выберите действие.', reply_markup=kb)
elif act == 'select_spell':
kb = types.InlineKeyboardMarkup()
text = 'Выберите заклинание. Осталось использований:\n'
for ids in unit['max_spells']:
i = unit['max_spells'][ids]
text += ids+' уровень: '+str(i)+' заклинаний\n'
for ids in unit['spells']:
spell = unit['spells'][ids]
try:
sl = unit['max_spells'][str(spell['lvl'])]
except:
try:
sl = unit['max_spells'][int(spell['lvl'])]
except:
sl = 'inf'
try:
if sl > 0:
kb.add(types.InlineKeyboardButton(text = '('+str(spell['lvl'])+')'+spell['name'],
callback_data = 'gameact use_spell '+str(unit['id'])+' '+str(game['id'])+' '+str(spell['id'])))
except:
kb.add(types.InlineKeyboardButton(text = '('+str(spell['lvl'])+')'+spell['name'],
callback_data = 'gameact use_spell '+str(unit['id'])+' '+str(game['id'])+' '+str(spell['id'])))
kb.add(types.InlineKeyboardButton(text = 'В главное меню', callback_data = 'game mainmenu '+str(unit['id'])+' '+str(game['id'])))
medit(text, call.message.chat.id, call.message.message_id, reply_markup = kb)
elif act == 'use_spell':
try:
spell = unit['spells'][str(call.data.split(' ')[4])]
except:
dnd.answer_callback_query(call.id, 'Спелла не существует!')
return
ttypes = ['enemy', 'ally', 'ally!self', 'any', 'any!self']
if spell['target_type'] in ttypes:
kb = types.InlineKeyboardMarkup()
text = 'Выберите цель для заклинания "'+spell['name']+'":'
if spell['target_type'] == 'enemy':
for ids in game['units']:
enemy = game['units'][ids]
if enemy['team'] != unit['team']:
kb.add(types.InlineKeyboardButton(text = enemy['name'],
callback_data = 'gameact select_spelltarget '+str(unit['id'])+' '+str(game['id'])+' '+str(spell['id'])+' '+str(enemy['id'])))
elif spell['target_type'] == 'ally':
for ids in game['units']:
enemy = game['units'][ids]
if enemy['team'] == unit['team']:
kb.add(types.InlineKeyboardButton(text = enemy['name'],
callback_data = 'gameact select_spelltarget '+str(unit['id'])+' '+str(game['id'])+' '+str(spell['id'])+' '+str(enemy['id'])))
elif spell['target_type'] == 'ally!self':
for ids in game['units']:
enemy = game['units'][ids]
if enemy['team'] == unit['team'] and enemy['id'] != unit['id']:
kb.add(types.InlineKeyboardButton(text = enemy['name'],
callback_data = 'gameact select_spelltarget '+str(unit['id'])+' '+str(game['id'])+' '+str(spell['id'])+' '+str(enemy['id'])))
elif spell['target_type'] == 'any':
for ids in game['units']:
enemy = game['units'][ids]
kb.add(types.InlineKeyboardButton(text = enemy['name'],
callback_data = 'gameact select_spelltarget '+str(unit['id'])+' '+str(game['id'])+' '+str(spell['id'])+' '+str(enemy['id'])))
elif spell['target_type'] == 'any!self':
for ids in game['units']:
enemy = game['units'][ids]
if enemy['id'] != unit['id']:
kb.add(types.InlineKeyboardButton(text = enemy['name'],
callback_data = 'gameact select_spelltarget '+str(unit['id'])+' '+str(game['id'])+' '+str(spell['id'])+' '+str(enemy['id'])))
kb.add(types.InlineKeyboardButton(text = 'В главное меню', callback_data = 'game mainmenu '+str(unit['id'])+' '+str(game['id'])))
medit('Выберите цель для заклинания "'+spell['name']+'".', call.message.chat.id, call.message.message_id, reply_markup=kb)
else:
unit['current_act'] = createact(unit, spell['target_type'], game, 'spell', spell['id'])
medit('Выбрано: заклинание "'+spell['name']+'".', call.message.chat.id, call.message.message_id)
game['current_unit'] = None
unit['done_turn'] = True
endturn(game, unit)
elif act == 'select_spelltarget':
try:
spell = unit['spells'][call.data.split(' ')[4]]
except:
dnd.answer_callback_query(call.id, 'Спелла не существует!')
return
try:
target = game['units'][int(call.data.split(' ')[5])]
except:
target = game['units'][call.data.split(' ')[5]]
unit['current_act'] = createact(unit, target, game, 'spell', spell['id'])
medit('Выбрано: заклинание "'+spell['name']+'", цель - '+target['name'], call.message.chat.id, call.message.message_id)
game['current_unit'] = None
unit['done_turn'] = True
endturn(game, unit)
elif 'selectact' in call.data:
try:
game = games[int(call.data.split(' ')[3])]
except:
dnd.answer_callback_query(call.id, 'Игры не существует!')
return
unit = game['units'][int(call.data.split(' ')[2])]
if game['current_unit'] != str(unit['id']):
dnd.answer_callback_query(call.id, 'Сейчас не ваш ход!')
medit('Старое сообщение!', call.message.chat.id, call.message.message_id)
return
act = call.data.split(' ')[1]
if act == 'attack':
target = game['units'][int(call.data.split(' ')[4])]
unit['current_act'] = createact(unit, target, game, 'attack')
medit('Выбрано: атака '+target['name']+'.', call.message.chat.id, call.message.message_id)
unit['done_turn'] = True
endturn(game, unit)
elif act == 'move':
target = game['units'][int(call.data.split(' ')[4])]
unit['current_act'] = createact(unit, target, game, 'move')
if unit['id'] != target['id']:
medit('Выбрано: передвижение к '+target['name']+'.', call.message.chat.id, call.message.message_id)
else:
medit('Выбрано: отход.', call.message.chat.id, call.message.message_id)
game['current_unit'] = None
unit['done_turn'] = True
endturn(game, unit)
except:
dnd.send_message(441399484, traceback.format_exc())
def endturn(game, unit):
if unit['done_turn'] == False:
medit('Время вышло!', game['current_msg'].chat.id, game['current_msg'].message_id)
game['current_unit'] = None
if game['ctimer'] != None:
try:
game['ctimer'].cancel()
except:
dnd.send_message(441399484, traceback.format_exc())
if unit['current_act'] == None:
dnd.send_message(game['id'], unit['name']+' решил почиллить вместо действий! Передаю ход.')
game['now_unit'] += 1
next_turn(game)
return
if unit['current_act']['act'] == 'attack':
hit(unit, game)
dnd.send_message(game['id'], 'Следующий ход!')
game['now_unit'] += 1
time.sleep(1)
next_turn(game)
elif unit['current_act']['act'] == 'move':
target = unit['current_act']['target']
text = ''
freeatk = []
for ids in game['units']:
unit2 = game['units'][ids]
if unit2['team'] != unit['team'] and unit2['position_code'] == unit['position_code'] and unit2['freeatk'] > 0 and unit2['alive']:
freeatk.append(unit2)
if target['id'] == unit['id']:
unit['position_code'] = poscodegen(game = game)
text += '👣|'+unit['name']+' отходит подальше ото всех!\n'
else:
unit['position_code'] = target['position_code']
text += '👣|'+unit['name']+' подходит к '+target['name']+' вплотную!\n'
dnd.send_message(game['id'], text)
time.sleep(2)
fatext = ''
if len(freeatk) > 0:
fatext += 'Свободные атаки по '+unit['name']+' за выход из ближнего боя:'
dnd.send_message(game['id'], fatext)
time.sleep(1)
for ids in freeatk:
hit(ids, game, target = unit)
ids['freeatk'] -= 1
time.sleep(1)
dnd.send_message(game['id'], 'Следующий ход!')
time.sleep(1)
game['now_unit'] += 1
next_turn(game)
elif unit['current_act']['act'] == 'spell':
try:
spell = unit['spells'][int(unit['current_act']['spell'])]
except:
spell = unit['spells'][str(unit['current_act']['spell'])]
try:
unit['max_spells'][spell['lvl']] -= 1
except:
pass
target = unit['current_act']['target']
notarget = ['all_ally', 'all_enemy', 'self', 'all_ally!self', 'all!self', 'all']
targets = []
dt = ''
add_d = False
if target in notarget:
if target == 'all_ally':
for ids in game['units']:
enemy = game['units'][ids]
if enemy['team'] == unit['team'] and enemy['alive']:
targets.append(enemy)
elif target == 'all_enemy':
for ids in game['units']:
enemy = game['units'][ids]
if enemy['team'] != unit['team'] and enemy['alive']:
targets.append(enemy)
elif target == 'self':
targets.append(unit)
elif target == 'all_ally!self':
for ids in game['units']:
enemy = game['units'][ids]
if enemy['team'] == unit['team'] and enemy['id'] != unit['id'] and enemy['alive']:
targets.append(enemy)
elif target == 'all!self':
for ids in game['units']:
enemy = game['units'][ids]
if enemy['id'] != unit['id'] and enemy['alive']:
targets.append(enemy)
elif target == 'all':
for ids in game['units']:
enemy = game['units'][ids]
if enemy['alive']:
targets.append(enemy)
else:
targets.append(game['units'][target['id']])
if spell['damage_type'] == 'aoe':
for ids in game['units']:
if game['units'][ids]['position_code'] == target['position_code'] and game['units'][ids]['id'] != target['id'] and target['alive']:
targets.append(game['units'][ids])
add_d = True
letter = 'е'
if add_d:
dt += ' и все стоящие рядом существа'
letter = 'ю'
text = ''
text += unit['name']+' использует заклинание "'+spell['name']+'"!'
if spell['custom_text'] != None:
text = ''
text = spell['custom_text']
text = text.replace('{unit_name}', unit['name']).replace('{spell_name}', spell['name'])
if target not in notarget:
text = text.replace('{target_name}', target['name'])
heal = 0
damage = 0
if target == 'all_ally':
text += ' Все союзники получают следующее:\n'
elif target == 'all_enemy':
text += ' Все враги получают следующее:\n'
elif target == 'self':
text += ' Он получает следующее:\n'
elif target == 'all_enemy!self':
text += ' Все союзники, кроме него самого, получают следующее:\n'
elif target == 'all!self':
text += ' Все персонажи, кроме него, получают следующее:\n'
elif target == 'all':
text += ' Все персонажи получают следующее:\n'
else:
text += ' '+target['name']+dt+' получа'+letter+'т следующее:\n'
try:
spell['heal'] += 0
tp = 'static'
except:
tp = 'random'
if tp == 'static':
if spell['heal'] > 0:
heal += spell['heal']
text += '♥|Отхил на '+str(spell['heal'])+' хп!\n'
else:
heal = 0
i = 0
need = int(spell['heal'].split('d')[0])
while i < need:
heal += random.randint(1, int(spell['heal'].split('d')[1]))
i+=1
text += '♥|Отхил на ('+spell['heal']+') = '+str(heal)+' хп!\n'
try:
spell['damage'] += 0
tp = 'static'
except:
tp = 'random'
if tp == 'static':
if spell['damage'] > 0:
damage += spell['damage']
text += '💔|Урон: '+str(spell['damage'])+' единиц(ы)!\n'
else:
damage = 0
i = 0
need = int(spell['damage'].split('d')[0])
while i < need:
damage += random.randint(1, int(spell['damage'].split('d')[1]))
i+=1
text += '💔|Урон: ('+spell['damage']+') = '+str(damage)+' единиц!\n'
dnd.send_message(game['id'], text)
time.sleep(2)
text = ''
for ids in targets:
dt = ''
if heal > 0:
ids['hp'] += heal
if ids['hp'] > ids['maxhp']:
ids['hp'] = ids['maxhp']
if damage > 0:
attr = spell['savethrow']['attribute']
border = spell['savethrow']['value']
bonus = int((unit[attr]-10)/2)
result = random.randint(1,20)
if result + bonus >= border:
r = 'успех! Получает только половину ('+str(int(damage/2))+') урона.'
ids['hp'] -= int(damage/2)
if deathtest(ids):
ids['alive'] = False
dt += '\n☠|'+ids['name']+' погибает!'
em = '👍'
else:
r = 'неудача!'
em = '👎'
ids['hp'] -= damage
if deathtest(ids):
ids['alive'] = False
dt += '\n☠|'+ids['name']+' погибает!'
text += ids['name']+' делает спасбросок аттрибута "'+attr+'" по сложности '+ \
str(border)+': (1d20 + '+str(bonus)+') = '+str(result + bonus)+'.\n'+em+'|'+r+dt+'\n\n'
if text != '':
dnd.send_message(game['id'], text)
time.sleep(2)
dnd.send_message(game['id'], 'Следующий ход!')
game['now_unit'] += 1
time.sleep(1)
next_turn(game)
def deathtest(unit):
if unit['hp'] <= 0:
return True
else:
return False
def hit(unit, game, target = None):
if target == None:
target = unit['current_act']['target']
text = ''
em = '💥'
bonus = unit['current_weapon']['accuracy_buff'] #int((unit['dexterity']-10)/2)
text += em+'|'+unit['name']+' атакует '+target['name']+', используя '+unit['current_weapon']['name']+'!'
t2 = ' Кидает на попадание (1d20 + '+str(bonus)+')'
try:
if unit['current_weapon']['custom_attack_text'] != None:
text = unit['current_weapon']['custom_attack_text']
text = text.replace('{unit_name}', unit['name']).replace('{target_name}', target['name']).replace('{weapon_name}', unit['current_weapon']['name'])
except:
pass
text += t2
dnd.send_message(game['id'], text)
time.sleep(2)
text = ''
bonus = unit['current_weapon']['accuracy_buff']
hit = random.randint(1, 20) + bonus
text += str(hit)+'! Армор-класс соперника: '+str(target['armor_class'])+'🛡. '
if target['armor_class'] >= hit:
text += '💨Промах!'
h = False
else:
text += '💥Попадание!'
h = True
dnd.send_message(game['id'], text)
time.sleep(2)
if h:
bonusdmg = 0
bonusstun = 0
eftext = ''
w = unit['current_weapon']
for ids in w['effects']:
effect = w['effects'][ids]
damount = str(effect['condition']['chance'])
try:
d = damount.split('.')[1]
except:
try:
d = damount.split(',')[1]
except:
d = ''
x = int('100'+('0'*len(d)))
chance = (x*float(effect['condition']['chance']))/100
print(chance)
print(x)
print(effect['condition']['chance'])
allow = True
sign = None
for idss in effect['condition']['target_stats']:
try:
x = effect['condition']['target_stats']
value = int(x[idss])
except:
value = x[idss]
if value[0] in ['>', '<', '=']:
sign = value[0]
try:
value = int(value[1:])
except:
value = 0
if sign != None:
if sign == '>':
try:
if target[idss] <= value:
allow = False
except:
pass
elif sign == '<':
try:
if target[idss] >= value:
allow = False
except:
pass
elif sign == '=':
try:
if target[idss] != value:
allow = False
except:
pass
else:
if target[idss] != value:
allow = False
for idss in effect['condition']['unit_stats']:
try:
x = effect['condition']['target_stats']
value = int(x[idss])
except:
value = x[idss]
if value[0] in ['>', '<', '=']:
sign = value[0]
try:
value = int(value[1:])
except:
value = 0
if sign != None:
if sign == '>':
try:
if unit[idss] <= value:
allow = False
except:
pass
elif sign == '<':
try:
if unit[idss] >= value:
allow = False
except:
pass
elif sign == '=':
try:
if unit[idss] != value:
allow = False
except:
pass
else:
if unit[idss] != value:
allow = False
if random.randint(1, x) <= chance and allow:
try:
if effect['effect'] == 'crit':
try:
int(effect['power'])
tp = 'static'
except:
tp = 'random'
if tp == 'static':
if effect['target'] == 'target':
bonusdmg += int(effect['power'])
eftext += '💔|Крит: цель дополнительно получает '+str(bonusdmg)+' урона!\n'
else:
unit['hp'] -= int(effect['power'])
eftext += '💔|'+unit['name']+' теряет '+str(effect['power'])+' хп!\n'
else:
damage = 0
i = 0
need = int(effect['power'].split('d')[0])
while i < need:
bonusdmg += random.randint(1, int(effect['power'].split('d')[1]))
i+=1
if effect['target'] == 'target':
eftext += '💔|Крит: цель дополнительно получает ('+effect['power']+') = '+str(bonusdmg)+' урона!\n'
else:
unit['hp'] -= bonusdmg
eftext += '💔|'+unit['name']+' теряет '+str(bonusdmg)+' хп!\n'
bonusdmg = 0
elif effect == 'stun':
try:
int(effect['power'])
tp = 'static'
except:
tp = 'random'
if tp == 'static':
if effect['target'] == 'target':
bonusstun += int(effect['power'])+1
eftext += '🌀|Оглушение: цель пропустит следующие '+str(bonusstun-1)+' ходов!\n'
else:
unit['stunned'] += int(effect['power'])+1
eftext += '🌀|Оглушение: '+unit['name']+' пропустит следующие '+str(effect['power'])+' ходов!\n'
else:
bonusstun = 0
i = 0
need = int(effect['power'].split('d')[0])
while i < need:
bonusstun += random.randint(1, int(effect['power'].split('d')[1]))
i+=1
bonusstun += 1
if effect['target'] == 'target':
eftext += '🌀|Оглушение: цель пропустит следующие ('+effect['power']+') = '+str(bonusstun-1)+' ходов!\n'
else:
unit['stunned'] += bonusstun
eftext += '🌀|'+unit['name']+' пропустит следующие '+str(bonusstun-1)+' ходов!\n'
bonusstun = 0
except:
dnd.send_message(441399484, traceback.format_exc())
eftext = 'Криво выставлена переменная "power" эффекта '+effect['name']+' персонажа '+unit['name']+'! Отменяю эффект.'
if eftext != '':
dnd.send_message(game['id'], 'Эффекты:\n'+eftext)
time.sleep(2)
weapon = unit['current_weapon']
totaldmg = random.randint(1, weapon['maxdmg']) + weapon['dmg_buff'] + bonusdmg
buff = 0
buff += weapon['dmg_buff'] + bonusdmg
text = '💔|Нанесённый урон: ('+str(1)+'d'+str(weapon['maxdmg'])+' + '+str(buff)+') = '+str(totaldmg)+'!'
dnd.send_message(game['id'], text)
time.sleep(2)
text = ''
target['stunned'] += bonusstun
target['hp'] -= totaldmg
dd = False
if target['hp'] <= 0:
target['alive'] = False
dd = True
if dd:
text += '☠|'+target['name']+' погибает!'
else:
text += 'У '+target['name']+' остаётся '+str(target['hp'])+'♥!'
dnd.send_message(game['id'], text)
time.sleep(2)
else:
pass
effects = ['stun', 'weakness', 'kill', 'mark', 'bonus_accuracy', 'bonus_strenght', 'bonus_dexterity', 'bonus_wisdom', 'crit'
'bonus_charisma', 'bonus_constitution', 'bonus_intelligence', 'bonus_armor', 'bonus_maxhp', 'bonus_hp', 'respawn']
def createact(unit, target, game, act, spell = None):
if act == 'attack' or act == 'move':
return {
'act':act,
'target':target
}
elif act == 'spell':
return {
'act':act,
'target':target,
'spell':spell
}
def next_turn(game):
if game['kill']:
del games[game['id']]
return
reset_vars(game)
if len(game['units']) <= 1:
dnd.send_message(game['id'], 'Ничья! Все мертвы!')
del games[game['id']]
return
teams = {}
for ids in game['units']:
unit = game['units'][ids]
if unit['alive']:
if unit['team'] not in teams:
teams.update({unit['team']:1})
else:
teams[unit['team']]+=1
end = False
aliveteams = 0
if len(teams) <= 1:
end = True
if end:
if len(teams) == 0:
text = 'Все проиграли!'
else:
for ids in teams:
team = ids
text = 'Команда '+team+' победила! Выжившие бойцы:\n'
for ids in game['units']:
unit = game['units'][ids]
if unit['alive']:
text += unit['name']+': '+str(unit['hp'])+'/'+str(unit['maxhp'])+'♥️!\n'
dnd.send_message(game['id'], 'Игра окончена! Результаты:\n'+text)
del games[game['id']]
return
cunit = None
while cunit == None:
for ids in game['units']:
unit = game['units'][ids]
if unit['turn'] == game['now_unit'] and unit['alive']:
cunit = unit
if cunit == None:
if game['now_unit'] > len(game['units']):
game['now_unit'] = 1
else:
game['now_unit'] += 1
game['current_unit'] = str(cunit['id'])
dnd.send_message(game['id'], 'Ход юнита '+cunit['name']+'!')
give_turn(game, cunit)
def reset_vars(game):
for ids in game['units']:
unit = game['units'][ids]
unit['current_act'] = None
unit['speech_wait'] = False
unit['speeched'] = False
unit['done_turn'] = False
unit['stunned'] -= 1
if unit['stunned'] < 0:
unit['stunned'] = 0
def give_turn(game, unit):
unit['freeatk'] = 1
user = users.find_one({'id':unit['player']})
if user == None:
dnd.send_message(game['id'], 'Не знаю юзера, управляющего персонажем '+unit['name']+'! Передаю ход.')
time.sleep(1)
game['now_unit'] += 1
next_turn(game)
return
if unit['stunned'] > 0:
dnd.send_message(game['id'], '🌀|'+unit['name']+' оглушен! Передаю ход.')
time.sleep(1)
game['now_unit'] += 1
next_turn(game)
return
kb = mainmenu(game, unit)
try:
timee = 70
msg = dnd.send_message(unit['player'], 'Выберите действие персонажа '+unit['name']+'! У вас '+str(timee)+' секунд.', reply_markup=kb)
game['current_msg'] = msg
t = threading.Timer(timee, endturn, args = [game, unit])
t.start()
game['ctimer'] = t
except:
dnd.send_message(441399484, traceback.format_exc())
dnd.send_message(game['id'], 'Управляющий персонажем '+unit['name']+' не написал мне в личку! Передаю ход.')
time.sleep(1)
game['now_unit'] += 1
next_turn(game)
return
def say_speech(unit, game, text):
if unit['photo'] == None:
dnd.send_message(game['id'], unit['name']+': '+text)
else:
dnd.send_photo(game['id'], unit['photo'], caption = unit['name']+': '+text)
unit['speech_wait'] = False
unit['speeched'] = True
kb = mainmenu(game, unit)
dnd.send_message(unit['player'], 'Выберите действие.', reply_markup=kb)
def mainmenu(game, unit):
kb = types.InlineKeyboardMarkup()
kb.add(types.InlineKeyboardButton(text = 'Атака', callback_data = 'gameact select_attack '+str(unit['id'])+' '+str(game['id'])),
types.InlineKeyboardButton(text = 'Заклинание', callback_data = 'gameact select_spell '+str(unit['id'])+' '+str(game['id']))
)
kb.add(types.InlineKeyboardButton(text = 'Реакция', callback_data = 'gameact select_reaction '+str(unit['id'])+' '+str(game['id'])),
types.InlineKeyboardButton(text = 'Движение', callback_data = 'gameact select_move '+str(unit['id'])+' '+str(game['id']))
)
kb.add(types.InlineKeyboardButton(text = 'Свободная речь', callback_data = 'gameact select_speech '+str(unit['id'])+' '+str(game['id'])))
return kb
def sendunitedit(id, unit):
kb = create_edit_kb(unit)
dnd.send_message(id, 'Нажмите на характеристику для её изменения.', reply_markup=kb)
def sendspelledit(id, spell):
kb = create_spell_kb(spell)
dnd.send_message(id, 'Нажмите на характеристику для её изменения.', reply_markup=kb)
def sendweaponedit(id, weapon):
kb = create_weapon_kb(weapon)
dnd.send_message(id, 'Нажмите на характеристику для её изменения.', reply_markup=kb)
def create_spell_kb(spell):
kb = types.InlineKeyboardMarkup()
kb.add(addkb(kb, 'Название: ' + spell['name'], 'spell_change name ' + str(spell['id'])))
kb.add(addkb(kb, 'ID: ' + str(spell['id']), 'show id ' + str(spell['id'])))
kb.add(addkb(kb, 'Классы: ' + str(spell['classes']), 'spell_change classes ' + str(spell['id'])))
kb.add(addkb(kb, 'Описание: ' + str(spell['description']), 'spell_change description ' + str(spell['id'])))
kb.add(addkb(kb, 'Уровень: ' + str(spell['lvl']), 'spell_change lvl ' + str(spell['id'])))
kb.add(addkb(kb, 'Тип целей: ' + str(spell['target_type']), 'spell_change target_type ' + str(spell['id'])))
kb.add(addkb(kb, 'Урон: ' + str(spell['damage']), 'spell_change damage ' + str(spell['id'])))
kb.add(addkb(kb, 'Лечение: ' + str(spell['heal']), 'spell_change heal ' + str(spell['id'])))
kb.add(addkb(kb, 'Спасбросок: ' + str(len(spell['savethrow']))+' свойства', 'spell_change savethrow ' + str(spell['id'])))
kb.add(addkb(kb, 'Тип урона: ' + str(spell['damage_type']), 'spell_change damage_type ' + str(spell['id'])))
kb.add(addkb(kb, 'Эффекты: ' + str(len(spell['effects']))+' эффектов', 'spell_change effects ' + str(spell['id'])))
kb.add(addkb(kb, 'Кастомный текст применения: ' + str(spell['custom_text']), 'spell_change custom_text ' + str(spell['id'])))
return kb
def create_weapon_kb(weapon):
kb = types.InlineKeyboardMarkup()
kb.add(addkb(kb, 'Название: ' + weapon['name'], 'weapon_ch name ' + str(weapon['id'])))
kb.add(addkb(kb, 'ID: ' + str(weapon['id']), 'show id ' + str(weapon['id'])))
kb.add(addkb(kb, 'Максимальный урон: ' + str(weapon['maxdmg']), 'weapon_ch maxdmg ' + str(weapon['id'])))
kb.add(addkb(kb, 'Бонус урона: ' + str(weapon['dmg_buff']), 'weapon_ch dmg_buff ' + str(weapon['id'])))
kb.add(addkb(kb, 'Бонус точности: ' + str(weapon['accuracy_buff']), 'weapon_ch accuracy_buff ' + str(weapon['id'])))
kb.add(addkb(kb, 'Тип: ' + str(weapon['range']), 'weapon_ch range ' + str(weapon['id'])))
kb.add(addkb(kb, 'Эффекты: ' + str(len(weapon['effects']))+' эффектов', 'weapon_ch effects ' + str(weapon['id'])))
kb.add(addkb(kb, 'Кастомный текст атаки', 'weapon_ch custom_attack_text ' + str(weapon['id'])))
return kb
def create_effect_kb(effect):
kb = types.InlineKeyboardMarkup()
kb.add(addkb(kb, 'Название: ' + effect['name'], 'effect_ch name ' + str(effect['id'])))
kb.add(addkb(kb, 'ID: ' + str(effect['id']), 'show id ' + str(effect['id'])))
kb.add(addkb(kb, 'Условие', 'effect_ch condition ' + str(effect['id'])))
kb.add(addkb(kb, 'Эффект: '+effect['effect'], 'effect_ch effect ' + str(effect['id'])))
kb.add(addkb(kb, 'Мощность: '+str(effect['power']), 'effect_ch power ' + str(effect['id'])))
kb.add(addkb(kb, 'Кто получает эффект: '+str(effect['target']), 'effect_ch target ' + str(effect['id'])))
kb.add(addkb(kb, 'Длительность (в ходах): '+str(effect['duration']), 'effect_ch duration ' + str(effect['id'])))
kb.add(addkb(kb, 'Текст наложения: '+str(effect['custom_text']), 'effect_ch custom_text ' + str(effect['id'])))
return kb
def create_condition_kb(cond, obj):
kb = types.InlineKeyboardMarkup()
kb.add(addkb(kb, 'Требуемые статы цели', 'cond_ch target_stats '+str(obj['id'])))
kb.add(addkb(kb, 'Требуемые статы юнита', 'cond_ch unit_stats '+str(obj['id'])))
kb.add(addkb(kb, 'Шанс применения: '+str(cond['chance'])+'%', 'cond_ch chance '+str(obj['id'])))
return kb
def create_edit_kb(unit):
pl = 'Unknown ID'
user = users.find_one({'id':unit['player']})
if user != None:
pl = user['name']
wname = None
if unit['current_weapon'] != None:
wname = unit['current_weapon']['name']
kb = types.InlineKeyboardMarkup()
kb.add(addkb(kb, 'Имя: ' + unit['name'], 'change name ' + str(unit['id'])), addkb(kb, 'ID: ' + str(unit['id']), 'show id ' + str(unit['id'])))
kb.add(addkb(kb, 'Класс: ' + unit['class'], 'change class ' + str(unit['id'])), addkb(kb, 'Раса: ' + unit['race'], 'change race ' + str(unit['id'])))
kb.add(addkb(kb, 'Хп: ' + str(unit['hp']), 'change hp ' + str(unit['id'])), addkb(kb, 'Макс.хп: ' + str(unit['maxhp']), 'change maxhp ' + str(unit['id'])))
kb.add(addkb(kb, 'Сила: ' + str(unit['strenght']), 'change strenght ' + str(unit['id'])), addkb(kb, 'Ловкость: ' + str(unit['dexterity']), 'change dexterity ' + str(unit['id'])))
kb.add(addkb(kb, 'Телосложение: ' + str(unit['constitution']), 'change constitution ' + str(unit['id'])), addkb(kb, 'Интеллект: ' + str(unit['intelligence']), 'change intelligence ' + str(unit['id'])))
kb.add(addkb(kb, 'Мудрость: ' + str(unit['wisdom']), 'change wisdom ' + str(unit['id'])), addkb(kb, 'Харизма: ' + str(unit['charisma']), 'change charisma ' + str(unit['id'])))
kb.add(addkb(kb, 'Класс брони: ' + str(unit['armor_class']), 'change armor_class ' + str(unit['id'])))
kb.add(addkb(kb, 'Инвентарь: ' + str(len(unit['inventory'])) + ' предметов', 'change inventory ' + str(unit['id'])), addkb(kb, 'Заклинания', 'change spells ' + str(unit['id'])))
kb.add(addkb(kb, 'Максимум спеллов за раунд', 'change max_spells ' + str(unit['id'])))
kb.add(addkb(kb, 'Оружие: ' + str(wname), 'change current_weapon ' + str(unit['id'])))
kb.add(addkb(kb, 'Игрок: ' + pl, 'change player ' + str(unit['id'])), addkb(kb, 'Фото', 'change photo ' + str(unit['id'])))
return kb
def createspell():
targets = ['all_enemy', 'all_ally', 'enemy', 'ally', 'self', 'all', 'ally!self', 'all_ally!self',
'all!self', 'any', 'any!self']
id = randomid()
return {
'id': id,
'name': str(id),
'classes': 'socerer',
'description': 'Описание спелла',
'lvl': 0,
'target_type':random.choice(targets),
'damage':random.randint(0, 10),
'heal': random.randint(0, 10),
'custom_text':None,
'effects':{},
'savethrow':{'attribute':'strenght',
'value':10},
'debuffs':{},
'damage_type':random.choice(['target', 'aoe']),
'effects':{}
}
def createweapon():
id = randomid()
name = random.choice(['Топорик', 'Кинжал', 'Палка', 'Кулак Смерти', 'Нунчаки', 'Посох'])
return {
'id':id,
'name':name,
'maxdmg':random.randint(4, 10),
'dmg_buff':random.randint(0, 3),
'accuracy_buff':random.randint(0,3),
'range':random.choice(['melee', 'ranged']),
'custom_attack_text':None,
'effects':{}
}
def addkb(kb, text, calldata):
return types.InlineKeyboardButton(text=text, callback_data=calldata)
def createunit(user = None, userid = None):
maxx = 20
minn = 6
maxhp = random.randint(8, 20)
if user != None:
id = user['id']
else:
id = userid
return {
'id': randomid(),
'name': randomname(),
'class': randomclass(),
'race': randomrace(),
'hp': maxhp,
'maxhp': maxhp,
'strenght': random.randint(minn, maxx),
'dexterity': random.randint(minn, maxx),
'constitution': random.randint(minn, maxx),
'intelligence': random.randint(minn, maxx),
'wisdom': random.randint(minn, maxx),
'charisma': random.randint(minn, maxx),
'armor_class': random.randint(8, 16),
'initiative': 10,
'speed': 30,
'photo': None,
'death_saves(success)': 0,
'death_saves(fail)': 0,
'spells': {},
'inventory': [],
'current_weapon': None,
'owner': id,
'player': None,
'max_spells':{}
}
def createeffect():
id = randomid()
name = str(id)
effects = ['stun', 'weakness', 'kill', 'mark', 'bonus_accuracy', 'bonus_strenght', 'bonus_dexterity', 'bonus_wisdom', 'crit'
'bonus_charisma', 'bonus_constitution', 'bonus_intelligence', 'bonus_armor', 'bonus_maxhp', 'bonus_hp', 'respawn']
return {
'id':id,
'name':name,
'condition':createcondition(),
'effect':random.choice(effects),
'power':str(random.randint(0, 5)),
'target':random.choice(['target', 'unit']),
'duration':random.randint(0, 3),
'custom_text':None
}
def createcondition():
return {
'target_stats':{},
'unit_stats':{},
'chance':'100'
}
def kb_sort(kbs):
kb = types.InlineKeyboardMarkup()
i = 0
nextt = False
toadd = []
while i < len(kbs):
if nextt == True:
kb.add(*toadd)
toadd = []
toadd.append(kbs[i])
nextt = False
else:
toadd.append(kbs[i])
if i % 2 == 1:
nextt = True
i += 1
kb.add(*toadd)
return kb
def randomname():
names = ['Лурин Нвуд', 'Лонг Лао', 'Корза Ксогоголь', 'Алстон Опплбай', 'Холг', 'Лаэл Бит', 'Иглай Тай',
'Унео Ано', 'Джор Нарарис', 'Кара Чернин', 'Хама Ана', 'Мейлиль Думеин', 'Шаумар Илтазяра',
'Ромеро Писакар',
'Шандри Грэйкасл', 'Зэй Тилататна', 'Силусс Ори', 'Чиаркот Литоари', 'Дикай Талаф', 'Чка Хладоклят',
'Вренн', 'Пупа', 'Лупа', 'Харламов']
return random.choice(names)
def randomclass():
return random.choice(classes)
def randomrace():
return random.choice(races)
def randomid():
id = nowid.find_one({})['id']
nowid.update_one({}, {'$inc': {'id': 1}})
return id + 1
def createuser2(m):
user = users2.find_one({'id': m.from_user.id})
if user == None:
users2.insert_one(createu(m))
user = users2.find_one({'id': m.from_user.id})
return user
def createu(m):
d = {'id': m.from_user.id,
'name': m.from_user.first_name}
for ids in base:
d.update({ids: base[ids]})
return d
def dmedit(message_text, chat_id, message_id, reply_markup=None, parse_mode=None):
return dnd.edit_message_text(chat_id=chat_id, message_id=message_id, text=message_text, reply_markup=reply_markup,
parse_mode=parse_mode)
def creategame(m):
return {m.chat.id:{
'id':m.chat.id,
'master':createplayer(m.from_user),
'turn':1,
'units':{},
'started':False,
'now_unit':1,
'ctimer':None,
'kill':False,
'current_unit':None,
'current_msg':None
}
}
def createplayer(user):
return {
'id':user.id,
'name':user.first_name
}
def input_create_unit(userid, slovar):
values = []
text = ''
unit = createunit(userid = userid)
for ids in slovar:
try:
allow = False
if ids in numbervalues:
elem = int(slovar[ids])
allow = True
elif ids in dicts:
if type(elem) == dict:
allow = True
elif ids in lists:
if type(elem) == list:
allow = True
if ids == 'id' or ids == 'owner':
allow = False
if allow:
unit[ids] = elem
else:
text += 'Неверный формат элемента '+str(elem)+'!\n'
except:
dnd.send_message(441399484, traceback.format_exc())
text += 'Ошибка при добавлении элемента "'+ids+'"!\n'
return [unit, text]
def medit(message_text, chat_id, message_id, reply_markup=None, parse_mode=None):
return dnd.edit_message_text(chat_id=chat_id, message_id=message_id, text=message_text,
reply_markup=reply_markup,
parse_mode=parse_mode)
def upd_all(prm, value):
for ids in users.find({}):
for idss in ids['spells']:
print(idss)
users.update_one({'id':ids['id']},{'$set':{'spells.'+str(idss)+'.'+prm:value}})
for idss in ids['weapons']:
users.update_one({'id':ids['id']},{'$set':{'weapons.'+str(idss)+'.'+prm:value}})
for idss in ids['units']:
for idsss in ids['units'][idss]['spells']:
users.update_one({'id':ids['id']},{'$set':{'units.'+idss+'.spells.'+idsss+'.'+prm:value}})
if ids['units'][idss]['current_weapon'] != None:
users.update_one({'id':ids['id']},{'$set':{'units.'+idss+'.current_weapon.'+prm:value}})
x = open_objects.find_one({})
for idss in x['spells']:
open_objects.update_one({},{'$set':{'spells.'+str(idss)+'.'+prm:value}})
for idss in x['weapons']:
open_objects.update_one({},{'$set':{'weapons.'+str(idss)+'.'+prm:value}})
for idss in x['units']:
for idsss in x['units'][idss]['spells']:
open_objects.update_one({},{'$set':{'units.'+idss+'.spells.'+idsss+'.'+prm:value}})
if x['units'][idss]['current_weapon'] != None:
open_objects.update_one({},{'$set':{'units.'+idss+'.current_weapon.'+prm:value}})
#upd_all('effects', {})
for ids in users2.find({}):
for idss in base:
if idss not in ids:
users2.update_one({'id': ids['id']}, {'$set': {idss: base[idss]}})
dnd.polling(none_stop = True)
|
N, K, M = map(int, input().split())
A = list(map(int, input().split()))
sum = 0
for i in range(N-1):
sum += A[i]
flag = 0
for i in range(K+1):
if((sum + i)/N >= M):
print(i)
flag = 1
break
if(flag == 0):
print("-1") |
import re
from typing import List
from abc import ABC, abstractmethod
from source_hunter.utils.log_utils import logger
from collections import OrderedDict
class BaseParser(ABC):
@abstractmethod
def parse_children_modules(code_str: str):
"""
:param code_str: str, code string
:return: list of str, those statement that is responsible for import child module
"""
raise NotImplementedError("This method is not implemented")
@abstractmethod
def get_caller(code_str: str, class_or_func: str):
"""
check whether there is a func or class of parent_fnode calling the target child_class_or_func of child_fnode
:param code_str: str, code string of parent fnode
:param class_or_func: str, target class or function name
:return: list of str, function or class names that calling the child_class_or_func
"""
raise NotImplementedError("This method is not implemented")
@abstractmethod
def get_deps(code_str: str, class_or_func: str):
"""
get dependent modules of the class or function specified by parameter
:param class_or_func: str, class or function name
:return: list of str
"""
raise NotImplementedError("This method is not implemented")
class PythonParser(BaseParser):
@staticmethod
def parse_children_modules(code_str: str):
modules = []
# parse import in 'from example import ...' form
import_pattern = re.compile(r'from ([\w.]+) import (\([\w, \n]+\)|[\w_, ]+\n)')
matches = re.findall(import_pattern, code_str)
for match in matches:
pathes = match[0].strip().split('.')
for mod in match[1].strip().replace('\n', '').split(','):
if ' as ' in mod:
mod = mod.split(' as ')[0]
single_mod = pathes + [mod.strip()]
modules.append('.'.join(single_mod))
# parse import in 'import ...' form
import_pattern = re.compile(r'[^ \w_]import ([\w_, ]+)')
matches = re.findall(import_pattern, code_str)
for match in matches:
for single_mod in match.split(','):
if ' as ' in single_mod:
single_mod = single_mod.split(' as ')[0]
modules.append(single_mod.strip())
return modules
@staticmethod
def parse_structure(code_str: str):
code_lines = code_str.split('\n')
return PythonParser._parse_structure_helper(code_lines, 0)
@staticmethod
def _parse_structure_helper(code_lines: List[str], indent_level: int):
same_indent_lines = []
for i, code_line in enumerate(code_lines):
if len(code_line) == 0:
continue
try:
cur_indent_level = PythonParser.count_indent_level(code_line)
except ValueError:
continue
if cur_indent_level == indent_level:
same_indent_lines.append(i)
code_structure = OrderedDict()
for i, line_num in enumerate(same_indent_lines):
if i < len(same_indent_lines) - 1:
if line_num + 1 < same_indent_lines[i + 1]:
code_structure[code_lines[line_num]] = PythonParser._parse_structure_helper(
code_lines[line_num + 1: same_indent_lines[i + 1]], indent_level + 1)
else:
code_structure[code_lines[line_num]] = {}
else:
if line_num + 1 < len(code_lines):
code_structure[code_lines[line_num]] = PythonParser._parse_structure_helper(
code_lines[line_num + 1:], indent_level + 1)
else:
code_structure[code_lines[line_num]] = {}
return code_structure
@staticmethod
def count_indent_level(line: str):
i = 0
while i < len(line) and line[i] == ' ':
i += 1
if not (i / 4).is_integer():
raise ValueError('indented space is not multiple of 4')
return i // 4
@staticmethod
def get_caller(code_str: str, class_or_func: str):
code_structure = PythonParser.parse_structure(code_str)
result = []
PythonParser._get_caller_helper(code_structure, class_or_func, result)
logger.verbose_info('searching {} found {}'.format(class_or_func, result))
return result
@staticmethod
def _get_caller_helper(code_structure, class_or_func, result_container):
result = {'func_call': [], 'class_call': [], 'variable_call': [], 'other_call': []}
for statement, sub_structure in code_structure.items():
result_of_sub = PythonParser._get_caller_helper(sub_structure, class_or_func, result_container)
found = any(result_of_sub['func_call'] + result_of_sub['class_call'] + result_of_sub['variable_call'] +
result_of_sub['other_call'])
if found:
if 'def' in statement:
func_name = re.findall(r'def ([\w_]+)\([\w \t,_]*\):[\t ]*', statement)
result['func_call'].extend(func_name)
result_container.extend(func_name)
elif 'class' in statement:
class_name = re.findall(r'class ([\w_]+).*:', statement)
result['class_call'].extend(class_name)
result_container.extend(class_name)
else:
result['other_call'].extend(result_of_sub['other_call'])
if class_or_func in statement:
if '=' in statement and '==' not in statement and '<=' not in statement and '>=' not in statement:
statement_parts = statement.split('=')
result['variable_call'].extend(statement_parts[0].split(','))
else:
result['other_call'].append(statement)
return result
@staticmethod
def get_deps(code_str: str, class_or_func: str):
modules = PythonParser.parse_children_modules(code_str)
code_structure = PythonParser.parse_structure(code_str)
deps_modules = []
for statement, sub_structure in code_structure.items():
if any([name for name in class_or_func.split('.') if name and name in statement]):
for module in modules:
usages = PythonParser._get_deps_helper(sub_structure, module)
deps_modules.extend(usages)
return list(set(deps_modules))
@staticmethod
def _get_deps_helper(code_structure, module):
result = []
mod = module.split('.')[-1]
# module usage in 'module(...)' form
direct_use_pattern = re.compile(r'{}\('.format(mod))
# module usage in 'module.sub(...)' form
use_sub_pattern = re.compile(r'{}([.\w_]*)\('.format(mod))
# module usage in 'class A(module)' form
super_class_pattern = re.compile(r'class [\w_]+\({}\)'.format(mod))
for statement, sub_structure in code_structure.items():
direct_usages = re.findall(direct_use_pattern, statement)
if direct_usages:
result.append(module)
sub_mods = re.findall(use_sub_pattern, statement)
for sub_mod in sub_mods:
result.append(module + sub_mod)
super_class_usages = re.findall(super_class_pattern, statement)
if super_class_usages:
result.append(module)
result.extend(PythonParser._get_deps_helper(sub_structure, module))
return result
class ParserSelector:
lang_parser_dict = {
"python": PythonParser,
"python3": PythonParser
}
suffix_parser_dict = {
".py": PythonParser,
"py": PythonParser
}
@classmethod
def get_parser(cls, lang_or_suffix):
if lang_or_suffix in cls.lang_parser_dict:
return cls.lang_parser_dict[lang_or_suffix]
if lang_or_suffix in cls.suffix_parser_dict:
return cls.suffix_parser_dict[lang_or_suffix]
|
import FWCore.ParameterSet.Config as cms
from RecoParticleFlow.PFClusterProducer.particleFlowClusterECALUncorrected_cfi import *
particleFlowClusterOOTECALUncorrected = particleFlowClusterECALUncorrected.clone(
recHitsSource = "particleFlowRecHitOOTECAL"
)
|
from vk_api.keyboard import VkKeyboard, VkKeyboardColor
class Keyboard():
def create_keyboard(response):
themes = ['Социальная сфера','Политика','Экономика','Наркотики','Феминизм','Международные отношения',\
'Спорт', 'СМИ', 'Мигранты', 'Религия', 'Этика']
themes_low = ['социальная сфера', 'политика', 'экономика', 'наркотики', 'феминизм', 'международные отношения', \
'спорт', 'сми', 'мигранты', 'религия', 'этика']
keyboard = VkKeyboard(one_time=False)
if response == 'поехали' or response == 'заново' or response == 'начать' or response == 'start':
keyboard.add_button('Выбрать тему', color=VkKeyboardColor.POSITIVE)
keyboard.add_line() # Переход на вторую строку
keyboard.add_button('Случайная тема', color=VkKeyboardColor.NEGATIVE)
elif response == 'выбрать тему':
keyboard.add_button('Русский', color=VkKeyboardColor.POSITIVE)
keyboard.add_button('Английский', color=VkKeyboardColor.PRIMARY)
keyboard.add_line()
keyboard.add_button('Случайная тема', color=VkKeyboardColor.NEGATIVE)
elif response == 'английский' or response == 'русский':
keyboard.add_button('Легкая', color=VkKeyboardColor.POSITIVE)
keyboard.add_button('Сложная', color=VkKeyboardColor.PRIMARY)
keyboard.add_line()
keyboard.add_button('Случайная тема', color=VkKeyboardColor.NEGATIVE)
elif response == 'легкая' or response == 'сложная':
keyboard.add_button(themes[0], color=VkKeyboardColor.POSITIVE)
keyboard.add_line()
keyboard.add_button(themes[1], color=VkKeyboardColor.POSITIVE)
keyboard.add_button(themes[2], color=VkKeyboardColor.POSITIVE)
keyboard.add_line()
keyboard.add_button(themes[3], color=VkKeyboardColor.POSITIVE)
keyboard.add_button(themes[4], color=VkKeyboardColor.POSITIVE)
keyboard.add_line()
keyboard.add_button(themes[5], color=VkKeyboardColor.POSITIVE)
keyboard.add_line()
keyboard.add_button(themes[6], color=VkKeyboardColor.POSITIVE)
keyboard.add_button(themes[7], color=VkKeyboardColor.POSITIVE)
keyboard.add_button(themes[8], color=VkKeyboardColor.POSITIVE)
keyboard.add_line()
keyboard.add_button(themes[9], color=VkKeyboardColor.POSITIVE)
keyboard.add_button(themes[10], color=VkKeyboardColor.POSITIVE)
keyboard.add_line()
keyboard.add_button('Случайная тема', color=VkKeyboardColor.NEGATIVE)
elif response in themes_low: # было социалочка
keyboard.add_button('ЭП', color=VkKeyboardColor.POSITIVE)
keyboard.add_line()
keyboard.add_button('ЭПСЧ', color=VkKeyboardColor.POSITIVE)
keyboard.add_line()
keyboard.add_button('ЭП как', color=VkKeyboardColor.POSITIVE)
keyboard.add_line()
keyboard.add_button('Случайная тема', color=VkKeyboardColor.NEGATIVE)
elif response == 'случайная тема' or response == 'эп' or response == 'эпсч' or response == 'эп как':
keyboard.add_button('Заново', color=VkKeyboardColor.POSITIVE)
keyboard = keyboard.get_keyboard()
return keyboard
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.