index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
12,300 | 656dd9c359030c5265d3ef6459d5aaadf1068c76 | import requests as requests
import random
import credentials
url = credentials.telegramURL
start_command = '/start'
end_command = '/stop'
reset_amazonURL_command = '/resetAmazonURL'
test_command = '/runTest'
add_new_refresher = '/addNewRefresher'
pause_refresher = '/pause'
resume_refresher = '/resume'
valid_commands = ['/start', '/stop', '/resetAmazonURL:'] # list of valid commands that the bot recognises
# create func that get chat id
def get_chat_id(update):
chat_id = update['message']["chat"]["id"]
return chat_id
# create function that get message text
def get_message_text(update):
message_text = update["message"]["text"]
return message_text
# get user name of the bot
def get_user_name():
update = last_update(url)
user_name = update["message"]["from"]["first_name"]
return user_name
# create function that get last_update
def last_update(req):
response = requests.get(req + "getUpdates")
response = response.json()
result = response["result"]
total_updates = len(result) - 1
return result[total_updates] # get last record message update
# create function that let bot send message to user
def send_message(message_text):
chat_id = get_chat_id(last_update(url))
params = {"chat_id": chat_id, "text": message_text}
response = requests.post(url + "sendMessage", data=params)
return response
def is_valid_text(update):
message = get_message_text(update)
if message not in valid_commands:
return False
return True
def get_last_message():
return get_message_text(last_update(url))
def get_last_message_id():
return last_update(url)["update_id"]
'''
def main():
while True:
while get_message_text(last_update(url)) == start_command:
update_id = last_update(url)["update_id"]
update = last_update(url)
# end the script when the user types /stop
if get_message_text(update) == "/stop":
break
if update_id == update["update_id"]:
send_message("Testing Telegram Bot")
update_id += 1
#main()
'''
|
12,301 | 9feb32ec6aed3cb3d284581ee9f7b535b628cb73 | import networkx as nx
class Regions:
def __init__(self, dominator, code_blocks):
self.gen = [set() for _ in range(dominator.N)]
self.kill = [set() for _ in range(dominator.N)]
self.code_blocks = code_blocks
self.instructions = []
self.parse_instructions()
self.classification = [['Re'], [], []]
self.dominator = dominator
self.multi_graph = [set() for _ in range(dominator.N)]
# self.reverse_graph = [set() for _ in range(dominator.N)]
self.nodes = [-2] * dominator.N
self.keys = [-2] * dominator.N
self.N = dominator.N
self.control_tree = []
self.rename()
def find_regions(self):
yield self.graph_object()
while True:
multi_graph = []
for i in range(self.N):
for node in self.multi_graph[i]:
if node != -2:
multi_graph.append((i, node))
nx_multi_graph = nx.MultiDiGraph(multi_graph)
loops = sorted(nx.simple_cycles(nx_multi_graph), key=lambda x: len(x))
loops = sorted(loops, key=lambda x: x[0], reverse=True)
if not loops:
for i in range(self.N):
if self.multi_graph[i]:
self.control_tree.append((f'R{self.N - 1}', f'R{i}'))
elif i == self.N - 1:
self.control_tree.append((f'R{self.N - 1}', 'Re'))
self.classification[1].append(f'R{self.N - 1}')
# print(sorted(self.control_tree))
return
self.replace_loop(loops[0])
yield self.graph_object()
def graph_object(self):
multi_graph = []
for i in range(self.N):
for node in self.multi_graph[i]:
if node == -1:
multi_graph.append((f'R{i}', 'Re'))
else:
multi_graph.append((f'R{i}', f'R{node}'))
return nx.MultiDiGraph(multi_graph)
def rename(self):
n = 0
self.nodes[n] = 0
self.keys[0] = n
n += 1
i = 0
while i < n:
for child in self.dominator.edges[self.nodes[i]]:
if child != self.dominator.N - 1 and self.keys[child] == -2:
self.nodes[n] = child
self.keys[child] = n
n += 1
i += 1
self.nodes[n] = -1
self.keys[-1] = -1
for i in range(self.dominator.N):
if self.keys[i] != -2:
for node in self.dominator.edges[i]:
self.multi_graph[self.keys[i]].add(self.keys[node])
for i in range(self.dominator.N):
if i == self.dominator.N - 1:
self.control_tree.append(('Re', 'Exit'))
elif self.nodes[i] != -2:
self.classification[0].append(f'R{i}')
self.control_tree.append((f'R{i}', self.nodes[i]))
def replace_loop(self, loop):
self.multi_graph.append(set())
if len(loop) != 1:
self.classification[1].append(f'R{self.N - 1}')
self.multi_graph[self.N - 1].add(self.N - 1)
else:
self.classification[2].append(f'R{self.N - 1}')
reverse_graph = self.reverse()
i = 1
n = len(loop)
while i < n:
for node in reverse_graph[loop[i]]:
if node not in loop:
loop.append(node)
n += 1
i += 1
for node in loop:
self.control_tree.append((f'R{self.N - 1}', f'R{node}'))
for n in loop:
for node in self.multi_graph[n]:
if node not in loop:
self.multi_graph[self.N - 1].add(node)
for node in reverse_graph[loop[0]]:
self.multi_graph[node].add(self.N - 1)
self.multi_graph[node] -= {loop[0]}
for node in loop:
self.multi_graph[node] = set()
self.N += 1
def reverse(self):
graph = [set() for _ in range(len(self.multi_graph))]
for i in range(len(self.multi_graph)):
for node in self.multi_graph[i]:
graph[node].add(i)
return graph
def gen_kill(self):
columns = ['block'] + [i for i in range(self.dominator.N)]
table = [['gen<sub>block</sub>'] + [{f'd<sub>{i + 1}</sub>' for i in block} for block in self.gen],
['kill<sub>block</sub>'] + [{f'd<sub>{i + 1}</sub>' for i in block} for block in self.kill]]
return table, columns, [
'<comment>The instruction index corresponds to the line number in the original code.</comment>']
def parse_instructions(self):
i = 0
j = 0
for block in self.code_blocks:
for line in block:
self.instructions.append([])
for word in line:
if word[0] == 0 and word[3]:
# and [j, word[1]] not in self.instructions: # for unique instructions
self.instructions[i] = [j, word[1]]
self.gen[j].add(i)
i += 1
j += 1
n = len(self.instructions)
for i in range(n):
if self.instructions[i]:
for j in range(i + 1, n):
if self.instructions[j]:
if self.instructions[i][0] != self.instructions[j][0] and self.instructions[i][1] == \
self.instructions[j][1]:
self.kill[self.instructions[i][0]].add(j)
self.kill[self.instructions[j][0]].add(i)
def transfer_function(self):
spoilers = []
table = []
i = 0
while self.control_tree[i][0] != 'Re':
i += 1
i += 1
while i < len(self.control_tree):
row = [self.control_tree[i][0]]
tf = ['<div class="code">']
j = i
while j < len(self.control_tree) and self.control_tree[i][0] == self.control_tree[j][0]:
tf += [f'f<sub>{self.control_tree[i][0]}, In[{self.control_tree[j][1]}]</sub> = ']
lst = []
for pred in self.preds(self.control_tree[j][1]):
lst += [' ∧ ', f'f<sub>{self.control_tree[i][0]}, Out[', [pred], ']</sub>']
if not lst:
tf += ['I']
elif self.control_tree[i][0] in self.classification[1]:
tf += lst[1:]
elif self.control_tree[i][0] in self.classification[2]:
tf += ['('] + lst[1:] + [')*']
tf += ['<br>']
h = self.find(self.control_tree[j][1])
while h < len(self.control_tree) and self.control_tree[h][0] == self.control_tree[j][1]:
tf += [f'f<sub>{self.control_tree[i][0]}, Out[{self.control_tree[h][1]}]</sub> = ']
tf += [f'f<sub>{self.control_tree[j][1]}, Out[{self.control_tree[h][1]}]</sub> ° ']
tf += [f'f<sub>{self.control_tree[i][0]}, In[{self.control_tree[j][1]}]</sub> ']
h += 1
tf += ['<br>']
j += 1
row.append(tf + ['</div>'])
gen = '<div class="code">'
row.append([gen + '</div>'])
kill = '<div class="code">'
row.append([kill + '</div>'])
table.append(row)
i = j
return table, ['region', 'Transfer Function', 'gen', 'kill'], []
def preds(self, name):
if name == 'Exit':
return self.dominator.pred_list[-1]
if name == 'Entry':
return self.dominator.pred_list[0]
if type(name) is int:
return self.dominator.pred_list[name]
for edge in self.control_tree:
if edge[0] == name:
return self.preds(edge[1])
return []
def find(self, name):
i = 0
for edge in self.control_tree:
if edge[0] == name:
return i
i += 1
return 0
|
12,302 | 0af3cc8733b87fa3e5f1320b07cbf22b80a9fb05 | import moeda
n = float(input('Digite o valor R$'))
moeda.resumo(n, 20, 12)
|
12,303 | 8ff29191b39a6b38a9f1c125f356c3727ee00f88 | from django.db import models
import string
import random
def random_chassis(size=17, chars=string.ascii_uppercase + string.digits):
v = ''.join(random.choice(chars) for _ in range(size))
return v
class Car(models.Model):
marca = models.CharField(max_length=75, null=False)
modelo = models.CharField(max_length=75, null=False)
placa = models.CharField(max_length=25, null=False)
ano = models.CharField(max_length=4, null=True)
cor = models.CharField(max_length=50, null=False)
chassi = models.CharField(max_length=50, null=False)
def __str__(self):
return self.marca + ' - ' + self.modelo
|
12,304 | 85379c86d83f61c2c36346a21606976d35afe200 | def sample(s):
new_s = ""
d = {}
for i in range(len(s)-1):
if s[i] != s[i+1]:
new_s = new_s + s[i]
print(new_s + s[-1])
temp = s.split("_")
print(temp)
for i in range(len(temp)):
temp[i] = temp[i][0].upper() + temp[i][1:]
# temp[i] = "".join(temp[i].split("")[0].upper())
final_s = "".join(i[0].upper() + i[1:] for i in s.split("_"))
print(final_s)
# print("".join(temp))
# input = "hello_world_example"
# output = "HelloWorldExample
# input = 'aabbbaacccuussssss'
# output = 'abacus'
sample("hello_world_example") |
12,305 | 408104d44e464175d25fab6898f465b692683686 | import time
from homework3.task2 import calc_with_mp
def test_calc_with_mp():
start_time = time.time()
calc_with_mp(25)
end_time = time.time() - start_time
print(end_time)
assert (end_time <= 10) is True
|
12,306 | 0a4214257a3e4ed04e17b452d5605a0d25973f78 | import layout;
import menu;
import messages;
def menu(name,options):
clear();
drawHeader(name);
showOptions(options);
drawFooter(name);
ask(name,options);
def clear():
for i in range(0,layout.CLEAR_SIZE):
print "";
def drawHeader(name):
header = "";
for i in range(0,layout.LENGTH):
header += layout.PATTERN;
header += " " + name + " ";
for i in range(0,layout.LENGTH):
header += layout.PATTERN;
print header;
def drawFooter(name):
footer = "";
for i in range(0,2*layout.LENGTH+2+len(name)):
footer += layout.PATTERN;
print footer;
def showOptions(options):
for i in range(0, len(options)):
print(str(i+1)+") "+options[i]["label"]);
print("0) " + messages.EXIT);
def ask(name,options):
opcao = input(messages.ASK) - 1;
if opcao >= 0 and opcao < len(options):
options[opcao]["action"]();
elif opcao == -1:
return;
else:
print messages.INVALID;
pause();
menu(name,options);
def pause():
raw_input(messages.PAUSE);
|
12,307 | 7d66095a4f4ccfc6b195e14cbfe6e72eab3b2788 | from opcodes import INST_OP_CODE
from instruction import Instruction
class UnaryInst(Instruction):
def __init__(self, name=None, op_code=None, value=None):
super(UnaryInst, self).__init__(name, op_code)
self.operands.append(value)
@property
def op_code(self):
return self._op_code
@property
def value(self):
return self._value
@op_code.setter
def op_code(self, op):
self._op_code = op
@value.setter
def value(self, lv):
self._value = lv
def __eval__(self):
if self.opcode is None or len(self.operands) < 1:
return None
value = self.operands[0]
if self.opcode == INST_OP_CODE.ADD:
return value
if self.opcode == INST_OP_CODE.SUB:
return - value
if self.opcode == INST_OP_CODE.NOT:
return not value
if self.opcode == INST_OP_CODE.INVERT:
return ~ value
def __repr__(self):
_str = self.super(UnaryInst, self).__repr__()
_str += "[" + str(self.name) + " = " str(self.opcode) + " " + str(self.value) + "]"
return _str
|
12,308 | ba5557bb2f7c2578b5b058c7dde35ff40e4f4ea7 | import unittest
import loss_functions as lf
import numpy as np
class TestLossFunctions(unittest.TestCase):
def test_logistic_loss(self):
result = lf.logistic_loss(np.array([.9, 0.02, .8, .73]), np.array([1, 0, 1, 1]), 4)
expected_result = 0.165854
difference = result - expected_result
self.assertTrue(np.linalg.norm(difference) < 1e-4)
def test_logistic_loss_derivative(self):
result = lf.logistic_loss_derivative(np.array([.9, 0.02, .8, .73]), np.array([1, 0, 1, 1]))
expected_result = np.array([-1.111111, 1.020408, -1.25, -1.369863])
difference = result - expected_result
self.assertTrue(np.linalg.norm(difference) < 1e-4)
def test_likelihood_loss(self):
result = lf.max_likelihood_loss(np.array([.9, 0.02, .8, .73]), np.array([1, 0, 1, 1]), 4)
expected_result = 0.160803
difference = result - expected_result
self.assertTrue(np.linalg.norm(difference) < 1e-4)
def test_likelihood_loss_derivative(self):
result = lf.max_likelihood_loss_derivative(np.array([[.9, 0.1, .2], [.1, .9, .8]]),
np.array([[1, 0, 0], [0, 1, 1]]))
expected_result = np.array([[-1.111111, 0, 0], [0, -1.111111, -1.25]])
difference = result - expected_result
self.assertTrue(np.linalg.norm(difference) < 1e-4)
def test_text2func(self):
with self.assertRaises(NameError):
lf.text2func('notALossFunction')
if __name__ == "__main__":
unittest.main()
|
12,309 | ab9392d68a05bb8a93c74ae5de7d23e992cbcf3d | class Library():
def __init__(self, list_of_books, library_name):
self.lend_data = {}
self.list_of_books = list_of_books
self.library_name = library_name
for books in self.list_of_books:
self.lend_data[books] = None
def display_book(self):
for index,books in enumerate(self.list_of_books):
print(f"{index} {books}")
def lend_book(self, book_name, author):
if book_name in self.list_of_books:
if self.lend_data[book_name] is None:
self.lend_data[book_name] = author
else:
print(f"Sorry the book you have entered is not in the library its been taken by {self.lend_data[book_name]}")
else:
print("Please Enter A valid book name!")
def add_book(self, book_name):
self.list_of_books.append(book_name)
self.lend_data[book_name] = None
def return_book(self, book_name, author):
if book_name in self.list_of_books:
if self.lend_data[book_name] is not None:
self.lend_data.pop(book_name)
else:
print("sorry this book is not lended by any one!")
else:
print("you have entered a invalid book name!")
def delete_book(self, book_name):
self.list_of_books.remove(book_name)
self.lend_data.pop(book_name)
def main():
list_books = ['Cookbook','Motu Patlu','Chacha_chaudhary','Rich Dad and Poor Dad']
Library_name = 'Harry'
secret_key = 123456
Harry = Library(list_books, Library_name)
print(f"Welcome to Library of {Harry.library_name} \n\n Display Books Using 'D'\n Lend Book Using 'L'\n Add Book Using 'A'\n Return Book Using 'R'\n Delete Book using 'Del'\n Exit Using 'E'\n\n" )
Exit = False
while(Exit is not True):
_input1 = input("Option:")
print('\n')
if _input1 == 'D' or _input1 == 'd':
Harry.display_book()
elif _input1 == 'L' or _input1 == 'l':
_Author = input("Enter your name :")
_Book_name = input("Enter Book Name :")
print("-----Book Lend-----")
Harry.lend_book(_Book_name, _Author)
elif _input1 == 'A' or _input1 == 'a':
_Book_name = input("Enter Book Name :")
print("-----Adding Book-----")
Harry.add_book(_Book_name)
elif _input1 == 'R' or _input1 == 'r':
_Author = input("Enter your name :")
_Book_name = input("Enter Book Name :")
Harry.return_book(_Book_name, _Author)
elif _input1 == 'Del' or _input1 == 'del':
_Book_name = input("Enter Book Name you Want to Delete :")
_key = int(input("Enter the Secrate Key :"))
if _key == secret_key:
Harry.delete_book(_Book_name)
print("-----Book Deleted-----")
else:
print("Sorry your Secrate Key is not Right")
elif _input1 == 'E' or _input1 == 'e':
Exit = True
if __name__ == "__main__":
main()
|
12,310 | 6948f72dae3dc1e5e25a9d69d06529cfa709818a | import socket
import time
import threading
import os
import sys
from random import randint
host = raw_input("enter IP address: ")
port = 5000
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.bind((host, port))
def StopAndWait(file_name, addr):
with open(file_name, 'rb') as f:
# bytesToSend = f.read(512)
i=0
# flag = 1
bytesToSend=None
while bytesToSend != "":
# if flag == 0:
bytesToSend = f.read(512)
i+=1
randpacket = randint(0, 9)
sock.settimeout(10)
ack = False
if randpacket == 4:
print "dropped!"
while not socket.timeout:
continue
else:
sock.sendto(str(i), addr)
sock.sendto(bytesToSend, addr)
while not ack:
try:
print "TRY"
data, addr = sock.recvfrom(512)
ack = True
except socket.timeout :
print "timeout, resend packet..."
sock.sendto(str(i), addr)
sock.sendto(bytesToSend, addr)
print data
# flag = 0
def Main():
data, addr = sock.recvfrom(512)
print data + "Server Started.\n Client connected <" + str(addr) + ">"
quitting = False
while not quitting:
filename, addr = sock.recvfrom(512)
if str(filename) == "quit":
quitting = True
continue
print filename +" "+ str(addr)
if(os.path.isfile(filename)):
print "Exists "
sock.sendto(("EXISTS" + str(os.path.getsize(filename))), addr)
userResponse, addr = sock.recvfrom(512)
if(userResponse == 'OK'):
print (userResponse + " received")
StopAndWait(filename,addr)
sock.close()
Main() |
12,311 | 2c4f5564d985ba5c7bdb13c0a9f13c96040ad0f1 | ## contect manager using self define class
class My_Open_File():
def __init__(self,filename, mode):
self.filename = filename
self.mode = mode
def __enter__(self):
self.file =open(self.filename, self.mode)
return self.file
def __exit__(self, exc_type, exc_val, traceback):
self.file.close()
with My_Open_File('../Data.txt','w') as f:
f.write('Hello Ann, this is my first time trying context manager')
print(f.closed)
## context manager using decorator
from contextlib import contextmanager
@contextmanager
def my_open_file(file, mode):
try:
f = open(file, mode)
yield f ## run in __enter__
finally:
f.close() ## run in __exit__
with my_open_file("../Data.txt",'w') as f:
f.write('Hello Ann, this is my second time trying context manager')
print(f.closed)
import os
@contextmanager
def change_dir(destination):
try:
cwd = os.getcwd()
os.chdir(destination)
yield
finally:
os.chdir(cwd)
with change_dir('..'):
print(os.listdir()) |
12,312 | f4ae13c8b31bb17a6b1f712aac542d06f2465d7c | import argparse
import torch
import torch.nn as nn
import re
import numpy as np
import os
import pickle
from data_loader import get_loader
from data_loader import get_images
from build_vocab import Vocabulary
from model import EncoderCNN, DecoderRNN
from torch.autograd import Variable
from torch.nn.utils.rnn import pack_padded_sequence
from torchvision import transforms
def to_var(x, volatile=False):
if torch.cuda.is_available():
x = x.cuda()
return Variable(x, volatile=volatile)
def main(args):
# Create model directory
if not os.path.exists(args.model_path):
os.makedirs(args.model_path)
# Image preprocessing
# For normalization, see https://github.com/pytorch/vision#models
transform = transforms.Compose([
transforms.RandomCrop(args.crop_size),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize((0.485, 0.456, 0.406),
(0.229, 0.224, 0.225))])
# Load vocabulary wrapper.
with open(args.vocab_path, 'rb') as f:
vocab = pickle.load(f)
#read rationalization data
rationalizations = []
max_length = 0
lengths = []
bad_worker_ids = ['A2CNSIECB9UP05','A23782O23HSPLA','A2F9ZBSR6AXXND','A3GI86L18Z71XY','AIXTI8PKSX1D2','A2QWHXMFQI18GQ','A3SB7QYI84HYJT',
'A2Q2A7AB6MMFLI','A2P1KI42CJVNIA','A1IJXPKZTJV809','A2WZ0RZMKQ2WGJ','A3EKETMVGU2PM9','A1OCEC1TBE3CWA','AE1RYK54MH11G','A2ADEPVGNNXNPA',
'A15QGLWS8CNJFU','A18O3DEA5Z4MJD','AAAL4RENVAPML','A3TZBZ92CQKQLG','ABO9F0JD9NN54','A8F6JFG0WSELT','ARN9ET3E608LJ','A2TCYNRAZWK8CC',
'A32BK0E1IPDUAF','ANNV3E6CIVCW4']
with open('./Log/Rationalizations.txt') as f:
for line in f:
line = line.lower()
line = re.sub('[^a-z\ \']+', " ", line)
words = line.split()
length = len(words)
lengths.append(length)
if length>max_length:
max_length = length
for index,word in enumerate(words):
words[index] = vocab.word2idx[word]
rationalizations.append(words)
# max_length = max(rationalizations,key=len
rationalizations=[np.array(xi) for xi in rationalizations]
# for index,r in enumerate(rationalizations):
# # print(max_length)
# r = np.lib.pad(r,(0,max_length - len(r)),'constant')
# rationalizations[index] = r
# rationalizations = np.vstack(rationalizations)
# print(rationalizations)
# print(rationalizations.shape)
# print(torch.from_numpy(rationalizations))
# rationalizations = torch.from_numpy(rationalizations)
# print(np.asarray(rationalizations).reshape(rationalizations.shape,rationalizations.shape))
# Build data loader
data_loader = get_loader(args.image_dir, args.caption_path, vocab,
transform, args.batch_size,
shuffle=True, num_workers=args.num_workers)
# Build the models
encoder = EncoderCNN(args.embed_size)
decoder = DecoderRNN(args.embed_size, args.hidden_size,
len(vocab), args.num_layers)
if torch.cuda.is_available():
encoder.cuda()
decoder.cuda()
# Loss and Optimizer
criterion = nn.CrossEntropyLoss()
params = list(decoder.parameters()) + list(encoder.linear.parameters()) + list(encoder.bn.parameters())
optimizer = torch.optim.Adam(params, lr=args.learning_rate)
frogger_data_loader = get_images('./data/FroggerDataset/',args.batch_size,transform)
# exit(0)
# Train the Models
# data = iter(frogger_data_loader)
# imgs = data.next()[0]
# print(imgs)
# print(frogger_data_loader[0])
# exit(0)
# for i,(images) in enumerate(frogger_data_loader):
# print(images)
total_step = len(frogger_data_loader)
for epoch in range(args.num_epochs):
for i,x in enumerate(frogger_data_loader):
# print(x)
# print(x[0])
# exit(0)
# print(x[0])
# exit(0)
images = to_var(x[0], volatile=True)
print(images[0][1])
exit(0)
captions = []
max_length = max(lengths[i:i+2])
rats = rationalizations[i:i+2]
rats.sort(key = lambda s: len(s))
rats.reverse()
# print(rats)
# exit(0)
for index,r in enumerate(rats):
# print(max_length)
r = np.lib.pad(r,(0,max_length - len(r)),'constant')
captions.append(r)
# rationalizations = np.vstack(rationalizations)
# captions.sort(key = lambda s: len(s))
captions = to_var(torch.from_numpy(np.asarray(captions)))
# lengths.append(len(rationalizations[i]))
new_lengths = []
# new_lengths.append(lengths[i])
new_lengths = lengths[i:i+2]
new_lengths.sort()
new_lengths.reverse()
captions = captions
# print(captions)
# print(new_lengths)
targets = pack_padded_sequence(captions, new_lengths, batch_first=True)[0]
decoder.zero_grad()
encoder.zero_grad()
# print(images)
features = encoder(images)
# print(features)
# print(rats)
# print(len(lengths))
outputs = decoder(features, captions, new_lengths)
loss = criterion(outputs, targets)
loss.backward()
optimizer.step()
# Print log info
if i % args.log_step == 0:
print('Epoch [%d/%d], Step [%d/%d], Loss: %.4f, Perplexity: %5.4f'
%(epoch, args.num_epochs, i, total_step,
loss.data[0], np.exp(loss.data[0])))
# Save the models
if (i+1) % args.save_step == 0:
torch.save(decoder.state_dict(),
os.path.join(args.model_path,
'decoder-%d-%d.pkl' %(epoch+1, i+1)))
torch.save(encoder.state_dict(),
os.path.join(args.model_path,
'encoder-%d-%d.pkl' %(epoch+1, i+1)))
# exit(0)
# total_step = len(data_loader)
# for epoch in range(args.num_epochs):
# for i, (images, captions, lengths) in enumerate(data_loader):
# # print(captions)
# # print(images)
# # print(lengths)
# # print(captions)
# # # print(images)
# # exit(0)
# # Set mini-batch dataset
# images = to_var(images, volatile=True)
# print(captions)
# captions = to_var(captions)
# print(captions)
# print(lengths)
# targets = pack_padded_sequence(captions, lengths, batch_first=True)[0]
# # Forward, Backward and Optimize
# decoder.zero_grad()
# encoder.zero_grad()
# print(images)
# features = encoder(images)
# print(features)
# exit(0)
# outputs = decoder(features, captions, lengths)
# loss = criterion(outputs, targets)
# loss.backward()
# optimizer.step()
# # Print log info
# if i % args.log_step == 0:
# print('Epoch [%d/%d], Step [%d/%d], Loss: %.4f, Perplexity: %5.4f'
# %(epoch, args.num_epochs, i, total_step,
# loss.data[0], np.exp(loss.data[0])))
# # Save the models
# if (i+1) % args.save_step == 0:
# torch.save(decoder.state_dict(),
# os.path.join(args.model_path,
# 'decoder-%d-%d.pkl' %(epoch+1, i+1)))
# torch.save(encoder.state_dict(),
# os.path.join(args.model_path,
# 'encoder-%d-%d.pkl' %(epoch+1, i+1)))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--model_path', type=str, default='./models/' ,
help='path for saving trained models')
parser.add_argument('--crop_size', type=int, default=224 ,
help='size for randomly cropping images')
parser.add_argument('--vocab_path', type=str, default='./data/vocab_frogger.pkl',
help='path for vocabulary wrapper')
parser.add_argument('--image_dir', type=str, default='./data/resized2014' ,
help='directory for resized images')
parser.add_argument('--caption_path', type=str,
default='./data/annotations/captions_train2014.json',
help='path for train annotation json file')
parser.add_argument('--log_step', type=int , default=10,
help='step size for prining log info')
parser.add_argument('--save_step', type=int , default=20,
help='step size for saving trained models')
# Model parameters
parser.add_argument('--embed_size', type=int , default=256 ,
help='dimension of word embedding vectors')
parser.add_argument('--hidden_size', type=int , default=512 ,
help='dimension of lstm hidden states')
parser.add_argument('--num_layers', type=int , default=1 ,
help='number of layers in lstm')
parser.add_argument('--num_epochs', type=int, default=10)
parser.add_argument('--batch_size', type=int, default=2)
parser.add_argument('--num_workers', type=int, default=2)
parser.add_argument('--learning_rate', type=float, default=0.001)
args = parser.parse_args()
print(args)
main(args) |
12,313 | c87b23c46f69323da337d1ee50fc016f834551f7 | # -*- coding: utf-8 -*-
# @Author: iori
# @Date: 2016-11-17 13:56:19
# @Last Modified by: lei gao
# @Last Modified time: 2017-11-01 14:12:27
from __future__ import division
import heapq
import numpy as np
import copy
from collections import defaultdict
import random
import math
import logging
logger = logging.getLogger("APP.WORLD")
class Env(object):
"""docstring for World"""
def __init__(self, env_builder, temperature=1.0):
super(Env, self).__init__()
self.builder = env_builder
# self.config = env_builder.env_config
self.distances = env_builder.distances
def reset(self):
self.ordering = 0
self.car = [0,0]
# self.grid = np.zeros([self.config.screen_width,self.config.screen_height])
# self.barrier = np.zeros([self.config.screen_width,self.config.screen_height])
self.grid = np.zeros([10,10])
self.barrier = np.zeros([10,10])
for loc in [[2,2],[2,5],[2,8],[5,2],[5,5],[5,8],[8,2],[8,5],[8,8]]:
self.barrier[loc[0],loc[1]] = 1
for _ in range(self.config.job_num):
coord = np.random.randint(self.config.screen_width,size=2)
while self.grid[coord[0],coord[1]] != 0 or list(coord) == self.car or self.barrier[coord[0],coord[1]] == 1:
coord = np.random.randint(self.config.screen_width,size=2)
self.grid[coord[0],coord[1]] = np.random.randint(12, self.config.init_value+1)
return self.get_state(), 0, False
def get_state(self):
car_state = np.zeros([self.config.screen_width,self.config.screen_height])
car_state[self.car[0],self.car[1]] = 1
job_state = self.grid / self.config.init_value
barrier_state = self.barrier
return np.stack([car_state, job_state, barrier_state],axis=-1)
def take_action(self,policy):
if policy == 0 and self.car[0] < self.config.screen_height-1:
self.car[0] += 1
elif policy == 1 and self.car[0] > 0:
self.car[0] -= 1
elif policy == 2 and self.car[1] < self.config.screen_width-1:
self.car[1] += 1
elif policy == 3 and self.car[1] > 0:
self.car[1] -= 1
def step(self,policy):
self.ordering += 1
# get actions
self.take_action(policy)
reward, count = 0, 0
# get reward if possible
if self.grid[self.car[0], self.car[1]]:
reward += self.grid[self.car[0], self.car[1]]
self.grid[self.car[0], self.car[1]] = 0.0
count += 1
if self.barrier[self.car[0],self.car[1]] == 1:
reward -= 100
# get new states of parcels
for i in range(self.config.screen_width):
for j in range(self.config.screen_height):
if self.grid[i,j]:
self.grid[i,j] -= 1
if self.grid[i,j] == 0:
count += 1
# add new jobs
for _ in range(count):
coord = np.random.randint(self.config.screen_width,size=2)
while self.grid[coord[0],coord[1]] != 0 or list(coord) == self.car or self.barrier[coord[0],coord[1]] == 1:
coord = np.random.randint(self.config.screen_width,size=2)
self.grid[coord[0],coord[1]] = np.random.randint(12, self.config.init_value+1)
# check whether to terminate
if self.ordering <= self.config.ticks:
terminal = False
else:
terminal = True
return self.get_state(), reward/self.config.ticks, terminal
|
12,314 | e991e386818912e5c7d933c32f33b6c42ad02732 | names = input("Podaj imiona osób, które chcesz powitać oddzielone spacją!")
names = names.split()
for e in names:
e = e.capitalize()
print("Hello", e+'!')
|
12,315 | 053c61d1745d1e2683f4370f2140f2c2be7b71e6 | import json
import subprocess
from concurrent.futures import ThreadPoolExecutor
import bcrypt
import tornado.escape
import tornado.httpserver
import tornado.ioloop
import tornado.options
import tornado.web
from tornado.web import RequestHandler
from db import torndb
from core.player import Player
class BaseHandler(RequestHandler):
@property
def db(self) -> torndb.Connection:
return self.application.db
@property
def executor(self) -> ThreadPoolExecutor:
return self.application.executor
def data_received(self, chunk):
pass
def on_finish(self):
# self.session.flush()
pass
class WebHandler(BaseHandler):
# @tornado.web.authenticated
def get(self):
if not self.get_cookie("_csrf"):
self.set_cookie("_csrf", self.xsrf_token)
self.render('poker.html')
class UpdateHandler(BaseHandler):
def get(self):
proc = subprocess.run(["git", "pull"], stdout=subprocess.PIPE)
self.head('content-type', 'text/plain; charset=UTF-8')
self.write(proc.stdout)
class RegHandler(BaseHandler):
def post(self):
email = self.get_argument('email', self.get_argument('username'))
account = self.db.get('SELECT * FROM account WHERE email="%s"', email)
if account:
raise tornado.web.HTTPError(400, "username already taken")
username = self.get_argument('username')
password = self.get_argument('password')
password = bcrypt.hashpw(password.encode('utf8'), bcrypt.gensalt())
uid = self.db.insert('INSERT INTO account (email, username, password) VALUES ("%s", "%s", "%s")',
email, username, password)
self.set_secure_cookie("uid", str(account.get('id')))
self.write('ok')
class LoginHandler(BaseHandler):
def post(self):
username = self.get_argument('email')
password = self.get_argument("password")
account = self.db.get('SELECT * FROM account WHERE email="%s"', self.get_argument('email'))
password = bcrypt.hashpw(password.encode('utf8'), account.get('password'))
self.head('content-type', 'application/json')
if password == account.get('password'):
self.set_secure_cookie("uid", str(account.get('id')))
self.redirect(self.get_argument("next", "/"))
class LoginoutHandler(BaseHandler):
def post(self):
uid = self.get_secure_cookie("uid")
self.clear_cookie("uid")
self.session.remove(int(uid))
self.redirect(self.get_argument("next", "/"))
|
12,316 | 3a16874c4090d06c73aafd03cfb2f0b160db2e3f | import random
chars = 'abcdefghijklnopqrsuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890'
num = int(input('enter number of passwords'))
lenght = int(input('enter lenght of passwords'))
for i in range(num):
password = ''
for x in range(lenght):
password += random.choice(chars)
print(password)
|
12,317 | 82082c89883f6ddf69a7b7d8e35b5b4e13cbbf92 | from os import read
from .views import about_us, acidity, beans, blend, culture, customizing, grind, home, machines, products
from django.urls import path
urlpatterns = [
path('',home, name="home_page"),
path('products/', products,name='products'),
path('machines/', machines,name='machines'),
path('about_us/', about_us,name='about_us'),
path('customizing/',customizing,name='customizing'),
path('culture/',culture,name='culture'),
path('grind/', grind,name='grind'),
path('acidity/', acidity,name='acidity'),
path('beans/', beans,name='beans'),
path('blend/', blend,name='blend'),
] |
12,318 | 2fcf5ba869ee1fad4f18c14bbf214cf16012b2c1 | # vim: filetype=python ts=2 sw=2 sts=2 et :
from sym import Sym
s=Sym(all="aaaabbc")
assert 4==s.seen["a"]
assert 1.378 <= s.spread() <=1.38
|
12,319 | 676de88b908a0360c13813e17ce8234a5274e977 | # -*- coding: utf-8 -*-
#------------------------------------------------------------------
# LEIA E PREENCHA O CABEÇALHO
# NÃO ALTERE OS NOMES DAS FUNÇÕES
# NÃO APAGUE OS DOCSTRINGS
#------------------------------------------------------------------
'''
Nome: Gulherme Navarro
NUSP: 8943160
Ao preencher esse cabeçalho com o meu nome e o meu número USP,
declaro que todas as partes originais desse exercício programa (EP)
foram desenvolvidas e implementadas por mim e que portanto não
constituem desonestidade acadêmica ou plágio.
Declaro também que sou responsável por todas as cópias desse
programa e que não distribui ou facilitei a sua distribuição.
Estou ciente que os casos de plágio e desonestidade acadêmica
serão tratados segundo os critérios divulgados na página da
disciplina.
Entendo que EPs sem assinatura devem receber nota zero e, ainda
assim, poderão ser punidos por desonestidade acadêmica.
'''
# ============================================================
# DEFINIÇÃO DE CONSTANTES QUE VOCÊ PODE UTILIZAR
# DEFINA OUTRAS SE DESEJAR
PISO_VAZIO = ' ' #f#
VAZIA = '-'
NOVA_LINHA = '|'
DIGITOS = "0123456789"
# ============================================================
def main():
'''(None) -> None
Usado apenas para testar e exemplificar a chamada da função
monte_mapa().
Exemplo:
>>> main()
>>>
'''
mapa = [
['#', '#', '#', '#', '#', '#', '#'],
['#', '.', '@', ' ', '#', ' ', '#'],
['#', '$', '*', ' ', '$', ' ', '#'],
['#', ' ', ' ', ' ', '$', ' ', '#'],
['#', ' ', '.', '.', ' ', ' ', '#'],
['#', ' ', ' ', '*', ' ', ' ', '#'],
['#', '#', '#', '#', '#', '#', '#']
]
novo_mapa = ponha_espacos(mapa)
imprima_mapa( novo_mapa )
#-----------------------------------------------------------------------
def ponha_espacos(mapa):
'''(list) -> (list)'''
nlin = len(mapa)
i = 0
cols = []
while i < nlin:
cols += [len(mapa[i])]
i+=1
ncol = maxi(cols)
for elemento in mapa:
while len(elemento) != ncol:
elemento += PISO_VAZIO
return mapa
#-----------------------------------------------------------------------
def maxi(ncols):
'''list -> int '''
i = 0
tam = len(ncols)
maior = ncols[0]
while i < tam:
if ncols[i] > maior:
maior = ncols[i]
i += 1
return maior
#-----------------------------------------------------------------------
def imprima_mapa(mapa):
'''(list) -> None
Função que imprime um mapa com moldura.
Exemplo:
>>> mapa = [
['#', '#', '#', '#', '#', '#', '#'],
['#', '.', '@', ' ', '#', ' ', '#'],
['#', '$', '*', ' ', '$', ' ', '#'],
['#', ' ', ' ', ' ', '$', ' ', '#'],
['#', ' ', '.', '.', ' ', ' ', '#'],
['#', ' ', ' ', '*', ' ', ' ', '#'],
['#', '#', '#', '#', '#', '#', '#']
]
>>> imprima_mapa(mapa)
0 1 2 3 4 5 6
+---+---+---+---+---+---+---+
0 | # | # | # | # | # | # | # |
+---+---+---+---+---+---+---+
1 | # | . | @ | | # | | # |
+---+---+---+---+---+---+---+
2 | # | $ | * | | $ | | # |
+---+---+---+---+---+---+---+
3 | # | | | | $ | | # |
+---+---+---+---+---+---+---+
4 | # | | . | . | | | # |
+---+---+---+---+---+---+---+
5 | # | | | * | | | # |
+---+---+---+---+---+---+---+
6 | # | # | # | # | # | # | # |
+---+---+---+---+---+---+---+
>>> mapa = [
['#', '#', '#', '#'],
['#', '.', '@', '#', '#'],
['#', '$', '*', ' ', '$', '#'],
['#', ' ', ' ', ' ', '$', '#'],
['#', ' ', '.', '.', ' ', '#'],
[' ', '#', '#', '#', '#'],
]
>>> imprima_mapa(mapa)
0 1 2 3 4 5
+---+---+---+---+---+---+
0 | # | # | # | # | | |
+---+---+---+---+---+---+
1 | # | . | @ | # | # | |
+---+---+---+---+---+---+
2 | # | $ | * | | $ | # |
+---+---+---+---+---+---+
3 | # | | | | $ | # |
+---+---+---+---+---+---+
4 | # | | . | . | | # |
+---+---+---+---+---+---+
5 | | # | # | # | # | |
+---+---+---+---+---+---+
Dica: os números das linhas antes da primeira barra ('|')
podem ser impressos usando o formato ' %2d ', da mesma forma,
há 4 colunas em branco antes do primeiro '+'.
'''
# escreva abaixo o corpo da função
mapa = ponha_espacos(mapa)
nlin = len (mapa)
ncol = len(mapa[0])
colunas = '%7d' %0
for col in range(1,ncol):
colunas += '%4d' %(col)
colunas += PISO_VAZIO
print(colunas)
mold = '%4s'%(PISO_VAZIO) + ncol * '+---' + '+'
print(mold)
i = 0
while i < nlin:
j = 0
tab = '%3d' %(i)
while j < ncol:
tab += '%2s %s' %(NOVA_LINHA, mapa[i][j])
j += 1
tab += '%2s' %NOVA_LINHA
print(tab)
print(mold)
i += 1
#-----------------------------------------------------------------------
if __name__ == "__main__":
main()
|
12,320 | 4cd008a1cf96025c0a128f439c71942564622c06 |
import matplotlib; matplotlib.use("agg")
import theano
from theano import tensor as T
import lasagne
from lasagne.layers import *
from lasagne.objectives import *
from lasagne.nonlinearities import *
from lasagne.updates import *
from lasagne.utils import *
from lasagne.init import *
import numpy as np
#import cPickle as pickle
#import gzip
import matplotlib
#matplotlib.use('agg')
import matplotlib.pyplot as plt
import os
import sys
from time import time
if __name__ == "__main__":
sys.path.insert(0,'..')
from common import *
else:
from ..common import *
import time
import logging
from sklearn.manifold import TSNE
def get_net(net_cfg, args):
l_out, hid_layer = net_cfg(args)
X = T.tensor4('X')
Y = T.ivector('Y')
net_out = get_output(l_out, X)
hid_out = get_output(hid_layer, X)
clsf_loss = get_classifier_loss(hid_layer,X,Y,args)
rec_loss = squared_error(net_out, X).mean()
if args['with_classif_loss']:
loss = args['lrec'] * rec_loss + clsf_loss
inputs = [X,Y]
else:
loss = rec_loss
inputs=[X]
params = get_all_params(l_out, trainable=True)
lr = theano.shared(floatX(args["learning_rate"]))
updates = nesterov_momentum(loss, params, learning_rate=lr, momentum=0.9)
train_fn = theano.function(inputs, loss, updates=updates)
loss_fn = theano.function(inputs, loss)
out_fn = theano.function([X], net_out)
hid_fn = theano.function([X],hid_out)
return {
"train_fn": train_fn,
"loss_fn": loss_fn,
"out_fn": out_fn,
"lr": lr,
"l_out": l_out,
"h_fn": hid_fn,
}
def get_classifier_loss(hid_layer,x,y, args):
clsf = DenseLayer(hid_layer, num_units=args['num_classes'], nonlinearity=softmax)
label_inds = y > -3
#get x's with labels
x_lbl = x[label_inds.nonzero()]
y_lbl = y[label_inds.nonzero()]
y_lbl = y_lbl + 2
clsf_out = get_output(clsf, x_lbl)
clsf_loss = categorical_crossentropy(clsf_out, y_lbl).mean()
return clsf_loss
def autoencoder_basic_32(args):
conv_kwargs = {'nonlinearity': rectify, 'W': HeNormal()}
net = InputLayer(args['shape'])
net = GaussianNoiseLayer(net, args["sigma"])
for i in range(5):
net = Conv2DLayer(net, num_filters=args["nfilters"], filter_size=2,stride=2, **conv_kwargs)
#net = MaxPool2DLayer(net, pool_size=2)
last_conv_shape = tuple([k if k is not None else [i] for i,k in enumerate(get_output_shape(net,args['shape']))] )
hid_layer = DenseLayer(net, num_units=args['code_size'], **conv_kwargs)
net = DenseLayer(hid_layer, num_units=np.prod(last_conv_shape[1:]))
net = ReshapeLayer(net, shape=last_conv_shape)
for layer in get_all_layers(net)[::-1]:
if isinstance(layer, MaxPool2DLayer):
net = InverseLayer(net, layer)
if isinstance(layer, Conv2DLayer):
conv_dict = {key:getattr(layer, key) for key in ["stride", "pad", "num_filters", "filter_size"]}
conv_dict['crop'] = conv_dict['pad']
del conv_dict['pad']
if not isinstance(layer.input_layer,Conv2DLayer):
conv_dict['num_filters'] = args["shape"][1]
conv_dict['nonlinearity'] = linear
net = Deconv2DLayer(net, **conv_dict)
for layer in get_all_layers(net):
logger.info(str(layer) + str(layer.output_shape))
print count_params(layer)
return net, hid_layer
# def plot_learn_curve(tr_losses, val_losses, save_dir='.'):
# plt.clf()
# plt.plot(tr_losses)
# plt.plot(val_losses)
# plt.savefig(save_dir + '/learn_curve.png')
# plt.clf()
# def plot_clusters(i,x,y, save_dir='.'):
# x = np.squeeze(x)
# hid_L = net_cfg['h_fn'](x)
# ts = TSNE().fit_transform(hid_L)
# plt.clf()
# plt.scatter(ts[:,0], ts[:,1], c=y)
# plt.savefig(save_dir + '/cluster_%i.png'%(i))
# plt.clf()
# def plot_recs(i,x,net_cfg, save_dir='.'):
# ind = np.random.randint(0,x.shape[0], size=(1,))
# x=np.squeeze(x)
# #print x.shape
# im = x[ind]
# #print im.shape
# rec = net_cfg['out_fn'](im)
# ch=1
# plt.figure(figsize=(30,30))
# plt.clf()
# for (p_im, p_rec) in zip(im[0],rec[0]):
# p1 = plt.subplot(im.shape[1],2, ch )
# p2 = plt.subplot(im.shape[1],2, ch + 1)
# p1.imshow(p_im)
# p2.imshow(p_rec)
# ch = ch+2
# #pass
# plt.savefig(save_dir +'/recs_%i' %(i))
# def plot_filters(network, save_dir='.'):
# plt.figure(figsize=(30,30))
# plt.clf()
# lay_ind = 0
# num_channels_to_plot = 16
# convlayers = [layer for layer in get_all_layers(network) if isinstance(layer, Conv2DLayer)]
# num_layers = len(convlayers)
# spind = 1
# for layer in convlayers:
# filters = layer.get_params()[0].eval()
# #pick a random filter
# filt = filters[np.random.randint(0,filters.shape[0])]
# for ch_ind in range(num_channels_to_plot):
# p1 = plt.subplot(num_layers,num_channels_to_plot, spind )
# p1.imshow(filt[ch_ind], cmap="gray")
# spind = spind + 1
# #pass
# plt.savefig(save_dir +'/filters.png')
# def plot_feature_maps(i, x, network, save_dir='.'):
# plt.figure(figsize=(30,30))
# plt.clf()
# ind = np.random.randint(0,x.shape[0])
# x=np.squeeze(x)
# im = x[ind]
# convlayers = [layer for layer in get_all_layers(network) if not isinstance(layer,DenseLayer)]
# num_layers = len(convlayers)
# spind = 1
# num_fmaps_to_plot = 16
# for ch in range(num_fmaps_to_plot):
# p1 = plt.subplot(num_layers + 1,num_fmaps_to_plot, spind )
# p1.imshow(im[ch])
# spind = spind + 1
# for layer in convlayers:
# # shape is batch_size, num_filters, x,y
# fmaps = get_output(layer,x ).eval()
# for fmap_ind in range(num_fmaps_to_plot):
# p1 = plt.subplot(num_layers + 1,num_fmaps_to_plot, spind )
# p1.imshow(fmaps[ind][fmap_ind])
# spind = spind + 1
# #pass
# plt.savefig(save_dir +'/fmaps.png')
num_epochs = 5000
batch_size = 128
run_dir = create_run_dir()
try:
print logger
except:
logger = logging.getLogger('log_train')
logger.setLevel(logging.INFO)
fh = logging.FileHandler('%s/training.log'%(run_dir))
fh.setLevel(logging.INFO)
ch = logging.StreamHandler()
ch.setLevel(logging.INFO)
logger.addHandler(ch)
logger.addHandler(fh)
args = { "learning_rate": 0.01, "sigma":0.1, "shape": (None,16,128,128),
'code_size': 16384 , 'nfilters': 128, 'lrec': 1, 'num_classes': 3, "with_classif_loss": False }
net_cfg = get_net(autoencoder_basic_32, args)
tr_losses = []
val_losses = []
for epoch in range(num_epochs):
tr_iterator = data_iterator(batch_size=batch_size, step_size=128, days=1, month1='01', day1='01')
val_iterator = data_iterator(batch_size=batch_size, step_size=128, days=1, month1='10', day1='28')
start = time.time()
tr_loss = 0
for iteration, (x, y) in enumerate(tr_iterator):
#print iteration
x = np.squeeze(x)
loss = net_cfg['train_fn'](x)
tr_loss += loss
train_end = time.time()
tr_avgloss = tr_loss / (iteration + 1)
logger.info("train time : %5.2f seconds" % (train_end - start))
logger.info(" epoch %i of %i train loss is %f" % (epoch, num_epochs, tr_avgloss))
tr_losses.append(tr_avgloss)
val_loss = 0
for iteration, (xval, yval) in enumerate(val_iterator):
xval = np.squeeze(xval)
loss = net_cfg['loss_fn'](xval)
val_loss += loss
val_avgloss = val_loss / (iteration + 1)
logger.info("val time : %5.2f seconds" % (time.time() - train_end))
logger.info(" epoch %i of %i val loss is %f" % (epoch, num_epochs, val_avgloss))
val_losses.append(val_avgloss)
plot_learn_curve(tr_losses, val_losses, save_dir=run_dir)
if epoch % 5 == 0:
plot_filters(net_cfg['l_out'], save_dir=run_dir)
for iteration, (x,y) in enumerate(data_iterator(batch_size=batch_size, step_size=128, days=1, month1='01', day1='01')):
plot_recs(iteration,x,net_cfg=net_cfg, save_dir=run_dir)
plot_clusters(iteration,x,y,net_cfg=net_cfg, save_dir=run_dir)
plot_feature_maps(iteration,x,net_cfg['l_out'], save_dir=run_dir)
break;
|
12,321 | 3325944bf20e491d82b61afc155c77f7bcb283fb | """
https://leetcode.com/problems/rotate-image/
rotate through diagonal and then columns wise swapping
"""
class Solution:
def rotate(self, matrix: List[List[int]]) -> None:
"""
Do not return anything, modify matrix in-place instead.
"""
n = len(matrix)
for dig in range(n):
row = dig
for col in range(dig+1, n):
matrix[row][col] , matrix[col][row] = matrix[col][row], matrix[row][col]
print(matrix)
left = 0
right = n-1
while left < right:
for row in range(n):
matrix[row][left], matrix[row][right] = matrix[row][right], matrix[row][left]
left+=1
right-=1
|
12,322 | fb31bc2b81ce6a79de789dd2957e17006cf88b4c | import cv2
import numpy as np
def findCentre(frame,initial_pos):
centre = initial_pos
hsv = cv2.cvtColor(frame,cv2.COLOR_BGR2HSV)
low_green = np.array([35, 52, 50])
high_green = np.array([70, 255, 230])
# Red color
low_red = np.array([161, 155, 84])
high_red = np.array([179, 255, 255])
mask = cv2.inRange(hsv,low_green,high_green)
mask = cv2.medianBlur(mask,3)
#mask = cv2.morphologyEx(mask,cv2.MORPH_OPEN,np.ones((3,3),np.uint8))
#mask = cv2.morphologyEx(mask,cv2.MORPH_DILATE,np.ones((3,3),np.uint8))
contours,hierarchy = cv2.findContours(mask.copy(),cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
for cnt in contours:
if cv2.contourArea(cnt)>=600:
(x,y),radius = cv2.minEnclosingCircle(cnt)
centre = (int(x),int(y))
radius = int(radius)
cv2.circle(frame,centre,radius,(0,0,255),2)
cv2.circle(frame,centre,2,(0,0,255),2)
#cv2.imshow("mask",mask)
cv2.imshow("frame",frame)
return centre
|
12,323 | ec8fa49baa8380ac08d4d122130f8d6906495127 |
pages = input()
goal = input()
start = goal // 2
end = (pages - goal) // 2
if pages % 2 == 0:
end += goal % 2
if end > start:
print(start)
else:
print(end)
|
12,324 | 1c6b11a4d7366b8f70243f766d3e2d028c3ddec2 | from __future__ import annotations
import logging
import os
from helpers.data_source_fixture import DataSourceFixture
logger = logging.getLogger(__name__)
class SparkDataSourceFixture(DataSourceFixture):
def __init__(self, test_data_source: str):
super().__init__(test_data_source)
def _build_configuration_dict(self, schema_name: str | None = None) -> dict:
return {
"data_source spark": {
"type": "spark",
"host": os.getenv("DATABRICKS_HOST"),
"method": os.getenv("SPARK_METHOD"),
"http_path": os.getenv("DATABRICKS_HTTP_PATH"),
"token": os.getenv("DATABRICKS_TOKEN"),
"database": os.getenv("DATABRICKS_DATABASE"),
}
}
def _create_schema_if_not_exists_sql(self) -> str:
return f"CREATE SCHEMA IF NOT EXISTS {self.schema_name}"
def _use_schema_sql(self) -> str | None:
return None
def _drop_schema_if_exists_sql(self):
return f"DROP SCHEMA IF EXISTS {self.schema_name} CASCADE"
|
12,325 | 0c59954bc2b685b7a31e562cb51d889be4741273 | # Servidor TCP
import socket
from threading import Thread
def conexao(con, cli):
while True:
msg = con.recv(1024)
if not msg:
break
print(msg)
print('Finalizando conexao do cliente', cli)
con.close()
# Endereco IP do Servidor
HOST = ''
# Porta que o Servidor vai escutar
PORT = 5002
tcp = socket.socket(
socket.AF_INET, socket.SOCK_STREAM
)
orig = (HOST, PORT)
tcp.bind(orig)
tcp.listen(1)
while True:
con, cliente = tcp.accept()
print('Conectado por ', cliente)
t = Thread(target=conexao, args=(con, cliente,))
t.start()
u"""
Na criação do Socket, o socket.socket() pode receber até 3 parâmetros: o primeiro é a família de protocolos, o segundo é o tipo de transmissão, podendo ser TCP ou UDP; e o último parâmetro é o protocolo de transmissão (IPv4 ou IPv6). <br>
O método `tcp.bind(orig)` é utilizada apenas pelo servidor, uma vez que associa um determinado endereço IP e porta TCP para o processo servidor. <br>
Em `tcp.listen(1)` indica ao SO para colocar o socket em modo de espera para aguardar conexões de clientes, o valor `1` passado ao método define o número de conexões não aceitas que o sistema permitirá antes de recusar novas conexões. <br>
No laço `While`, O `tcp.accept()` aguarda ou bloquei uma nova conexão, quando um cliente se conecta é retornado um novo socket. <br>
Em `Thread(target=conexao, args=(con, cliente,))`, está definindo uma nova Thread que recebe como argumento o método a ser executado e uma tupla, onde é definido a conexão e o cliente. <br>
O argumento passado no target é um método que lê os dados passados pelo cliente e encerra a conexão.
"""
|
12,326 | 249cd889504af560de3f5cc4ec8654ea4635fec5 | # Generated by Django 3.1.8 on 2021-09-15 16:30
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('geocontext', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='service',
name='layer_geometry_field',
field=models.CharField(blank=True, help_text='Geometry field of the sercive if needed.', max_length=1000, null=True),
),
migrations.AlterField(
model_name='service',
name='cache_duration',
field=models.IntegerField(blank=True, default=604800, help_text='Service refresh time in seconds - determines Cache persistence. Default is one week.', null=True),
),
migrations.AlterField(
model_name='service',
name='layer_name',
field=models.CharField(help_text='Required name of the actual layer/feature to retrieve (Property name). Geocontext v1 used "Result regex"', max_length=200),
),
migrations.AlterField(
model_name='service',
name='layer_typename',
field=models.CharField(blank=True, help_text='Layer type name to get from the service (WMS/WFS).', max_length=200, null=True),
),
migrations.AlterField(
model_name='service',
name='tolerance',
field=models.FloatField(blank=True, default=10, help_text='Tolerance around query point in meters. Used for bounding box queries. Also determines cache hit range for all values', null=True),
),
]
|
12,327 | f6fb95cc472e7b71e8ea81e6608767f24872b157 | """
Affine, ReLU, SoftmaxWithLoss 클래스들을 이용한 신경망 구현
"""
import numpy as np
from ch05.ex05_relu import Relu
from ch05.ex07_affine import Affine
from ch05.ex08_softmax_loss import SoftmaxWithLoss
np.random.seed(106)
# 입력 데이터: (1,2) shape의 ndarray
X = np.random.rand(2).reshape((1, 2))
print('X =', X)
# 실제 레이블(정답):
Y_true = np.array([1, 0, 0])
print('Y =', Y_true)
# 첫번째 은닉층(hidden layer)에서 사용할 가중치/편향 행렬
# 첫번째 은닉층의 뉴런 개수 = 3개
# W1 shape: (2, 3), b1 shape: (3,)
W1 = np.random.randn(2, 3)
b1 = np.random.rand(3)
print('W1 =', W1)
print('b1 =', b1)
affine1 = Affine(W1, b1)
relu = Relu()
# 출력층의 뉴런 개수: 3개
# W shape: (3, 3), b shape: (3,)
W2 = np.random.randn(3, 3)
b2 = np.random.rand(3)
print('W2 =', W2)
print('b2 =', b2)
affine2 = Affine(W2, b2)
last_layer = SoftmaxWithLoss()
# 각 레이어들을 연결: forward propagation
Y = affine1.forward(X)
print('Y shape:', Y.shape)
Y = relu.forward(Y)
print('Y shape:', Y.shape)
Y = affine2.forward(Y)
print('Y shape:', Y.shape)
loss = last_layer.forward(Y, Y_true)
print('loss =', loss) # cross-entropy = 1.488
print('y_pred =', last_layer.y_pred) # [0.22573711 0.2607098 0.51355308]
# gradient를 계산하기 위해서 역전파(back propagation)
learning_rate = 0.1
dout = last_layer.backward(1)
print('dout 1 =', dout)
dout = affine2.backward(dout)
print('dout 2 =', dout)
print('dW2 =', affine2.dW)
print('db2 =', affine2.db)
dout = relu.backward(dout)
print('dout 3 =', dout)
dout = affine1.backward(dout)
print('dout 4 =', dout)
print('dW1 =', affine1.dW)
print('db1 =', affine1.db)
# 가중치/편향 행렬을 학습률과 gradient를 이용해서 수정
# print(id(W1), id(affine1.W))
W1 -= learning_rate * affine1.dW
b1 -= learning_rate * affine1.db
W2 -= learning_rate * affine2.dW
b2 -= learning_rate * affine2.db
# 수정된 가중치/편향 행렬들을 이용해서 다시 forward propagation
Y = affine1.forward(X)
Y = relu.forward(Y)
Y = affine2.forward(Y)
Y = last_layer.forward(Y, Y_true)
print('loss =', Y) # 1.217
print('y_pred =', last_layer.y_pred) # [0.29602246 0.25014373 0.45383381]
# 미니 배치(mini-batch)
X = np.random.rand(3, 2)
Y_true = np.identity(3) # [[1 0 0], [0 1 0], [0 0 1]]
# forward -> backward -> W,b 수정 -> forward
|
12,328 | 0bf6fc6c0ddc64a657f5c408465a49735fdf7f02 | import streamlit as st
from streamlit_webrtc import VideoProcessorBase, webrtc_streamer, WebRtcMode, ClientSettings
import av
import cv2
import numpy as np
import pandas as pd
import mediapipe as mp
import tensorflow as tf
from sklearn.pipeline import make_pipeline
from sklearn.ensemble import RandomForestClassifier
from sklearn.preprocessing import StandardScaler
from tf_bodypix.api import download_model, load_model, BodyPixModelPaths
from PIL import Image
from utils import visualize_boxes_and_labels_on_image_array
import gc
WEBRTC_CLIENT_SETTINGS = ClientSettings(
rtc_configuration={"iceServers": [{"urls": ["stun:stun.l.google.com:19302"]}]},
media_stream_constraints={"video": True, "audio": False},
)
@st.cache(allow_output_mutation=True)
def train(saved_landmarks, landmarks):
df = pd.DataFrame(saved_landmarks, columns=landmarks[:len(saved_landmarks[0])])
X = df.drop('class', axis=1)
y = df['class']
del df
pipeline = make_pipeline(StandardScaler(), RandomForestClassifier())
pipeline.fit(X, y)
del X, y
return pipeline
def main_section():
st.title('Simple Computer Vision Application')
st.image('main_background.jpg')
st.markdown('This application has 3 features which utilize different machine learning models. The first app is a body '
'language detector where the *mediapipe* package is used to detect face, pose and hands landmarks. User can choose '
'one of these and capture corresponding landmarks to train a model. More details are explained in a **Body Language Decoder** '
'section.')
st.markdown('In the **Body Segmentation** section we are using the *tf_bodypix* package for segmentation. User can upload an image '
'which is then used as a background.')
st.markdown('The last feature is a **Face Mask Detector** where the object detection model trained with a *Tensorflow Object Detection API* '
'is used. More details about this feature can be found here [Github](https://github.com/twrzeszcz/face-mask-detection-streamlit).')
st.markdown('First and second feature were implemented according to the tutorials on [YouTube1](https://www.youtube.com/watch?v=We1uB79Ci-w&t=2690s) '
'and [YouTube2](https://www.youtube.com/watch?v=0tB6jG55mig&t=317s).')
def body_language_decoder():
train_or_predict = st.sidebar.selectbox('Select type', ['Stream and Save', 'Stream, Train and Predict'])
@st.cache(allow_output_mutation=True)
def get_data():
saved_landmarks = []
return saved_landmarks
saved_landmarks = get_data()
@st.cache
def gen_feature_names():
landmarks = ['class']
for val in range(543):
landmarks.extend(['x' + str(val), 'y' + str(val), 'z' + str(val), 'v' + str(val)])
return landmarks
landmarks = gen_feature_names()
if train_or_predict == 'Stream and Save':
st.markdown('There are 2 types of streaming that you can choose here. It is either just the live webcam stream with displayed landmarks or a '
'live stream when the selected landmarks are saved. You can choose from 4 different types of landmarks to save: *Pose and Face*, '
'*Left Hand*, *Right Hand*, *Left and Right Hand*. To save landmarks you have to also specify the **class name** so the name of the '
'eg. expression, gesture etc. To stop the live stream and saving just press the **Stop** button. To get the new class from the same type '
'of landmarks you have to update the **class name** and start stream again. There is currently no option to have landmarks from different '
'types in the same file. To use a different type you can press **Clear saved landmarks**.')
stream_type = st.selectbox('Select streaming type', ['Stream only', 'Stream and save'])
model_type = st.selectbox('Select type of the model', ['Pose and Face', 'Left Hand', 'Right Hand', 'Left and Right Hand'])
class_name = st.text_input('Enter class name')
class BodyDecoder(VideoProcessorBase):
def __init__(self) -> None:
self.class_name = None
self.save = None
self.model_type = None
@st.cache
def load_model_utils(self):
mp_drawing = mp.solutions.drawing_utils
mp_holistic = mp.solutions.holistic
holistic = mp_holistic.Holistic(min_detection_confidence=0.5, min_tracking_confidence=0.5)
return mp_drawing, mp_holistic, holistic
def live_stream(self, image):
mp_drawing, mp_holistic, holistic = self.load_model_utils()
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
image.flags.writeable = False
results = holistic.process(image)
image.flags.writeable = True
image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
mp_drawing.draw_landmarks(image, results.face_landmarks, mp_holistic.FACE_CONNECTIONS,
mp_drawing.DrawingSpec(color=(80, 110, 10), thickness=1, circle_radius=1),
mp_drawing.DrawingSpec(color=(80, 256, 121), thickness=1, circle_radius=1)
)
mp_drawing.draw_landmarks(image, results.right_hand_landmarks, mp_holistic.HAND_CONNECTIONS,
mp_drawing.DrawingSpec(color=(80, 22, 10), thickness=2, circle_radius=4),
mp_drawing.DrawingSpec(color=(80, 44, 121), thickness=2, circle_radius=2)
)
mp_drawing.draw_landmarks(image, results.left_hand_landmarks, mp_holistic.HAND_CONNECTIONS,
mp_drawing.DrawingSpec(color=(121, 22, 76), thickness=2, circle_radius=4),
mp_drawing.DrawingSpec(color=(121, 44, 250), thickness=2, circle_radius=2)
)
mp_drawing.draw_landmarks(image, results.pose_landmarks, mp_holistic.POSE_CONNECTIONS,
mp_drawing.DrawingSpec(color=(245,117,66), thickness=2, circle_radius=4),
mp_drawing.DrawingSpec(color=(245,66,230), thickness=2, circle_radius=2)
)
return image
def live_stream_save(self, image):
mp_drawing, mp_holistic, holistic = self.load_model_utils()
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
image.flags.writeable = False
results = holistic.process(image)
image.flags.writeable = True
image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
mp_drawing.draw_landmarks(image, results.face_landmarks, mp_holistic.FACE_CONNECTIONS,
mp_drawing.DrawingSpec(color=(80, 110, 10), thickness=1, circle_radius=1),
mp_drawing.DrawingSpec(color=(80, 256, 121), thickness=1, circle_radius=1)
)
mp_drawing.draw_landmarks(image, results.right_hand_landmarks, mp_holistic.HAND_CONNECTIONS,
mp_drawing.DrawingSpec(color=(80, 22, 10), thickness=2, circle_radius=4),
mp_drawing.DrawingSpec(color=(80, 44, 121), thickness=2, circle_radius=2)
)
mp_drawing.draw_landmarks(image, results.left_hand_landmarks, mp_holistic.HAND_CONNECTIONS,
mp_drawing.DrawingSpec(color=(121, 22, 76), thickness=2, circle_radius=4),
mp_drawing.DrawingSpec(color=(121, 44, 250), thickness=2, circle_radius=2)
)
mp_drawing.draw_landmarks(image, results.pose_landmarks, mp_holistic.POSE_CONNECTIONS,
mp_drawing.DrawingSpec(color=(245,117,66), thickness=2, circle_radius=4),
mp_drawing.DrawingSpec(color=(245,66,230), thickness=2, circle_radius=2)
)
try:
if self.model_type == 'Pose and Face':
pose_row = list(np.array([[landmark.x, landmark.y, landmark.z, landmark.visibility] for landmark in results.pose_landmarks.landmark]).flatten())
face_row = list(np.array([[landmark.x, landmark.y, landmark.z, landmark.visibility] for landmark in results.face_landmarks.landmark]).flatten())
row = [self.class_name] + pose_row + face_row
elif self.model_type == 'Left Hand':
left_hand_row = list(np.array([[landmark.x, landmark.y, landmark.z, landmark.visibility] for landmark in results.left_hand_landmarks.landmark]).flatten())
row = [self.class_name] + left_hand_row
elif self.model_type == 'Right Hand':
right_hand_row = list(np.array([[landmark.x, landmark.y, landmark.z, landmark.visibility] for landmark in results.right_hand_landmarks.landmark]).flatten())
row = [self.class_name] + right_hand_row
elif self.model_type == 'Left and Right Hand':
left_hand_row = list(np.array([[landmark.x, landmark.y, landmark.z, landmark.visibility] for landmark in results.left_hand_landmarks.landmark]).flatten())
right_hand_row = list(np.array([[landmark.x, landmark.y, landmark.z, landmark.visibility] for landmark in results.right_hand_landmarks.landmark]).flatten())
row = [self.class_name] + left_hand_row + right_hand_row
saved_landmarks.append(row)
except:
pass
return image
def recv(self, frame: av.VideoFrame) -> av.VideoFrame:
image = frame.to_ndarray(format="bgr24")
if self.save == 'Stream only':
image = self.live_stream(image)
else:
image = self.live_stream_save(image)
return av.VideoFrame.from_ndarray(image, format="bgr24")
webrtc_ctx = webrtc_streamer(
key="body_decoder",
mode=WebRtcMode.SENDRECV,
client_settings=WEBRTC_CLIENT_SETTINGS,
video_processor_factory=BodyDecoder,
async_processing=True
)
if webrtc_ctx.video_processor:
webrtc_ctx.video_processor.class_name = class_name
webrtc_ctx.video_processor.save = stream_type
webrtc_ctx.video_processor.model_type = model_type
if train_or_predict == 'Stream, Train and Predict':
st.markdown('In this section a simple machine learning model is trained on the saved landmarks. You only have to '
'select the type of landmarks that were saved before.')
model_type = st.selectbox('Select type of the model', ['Pose and Face', 'Left Hand', 'Right Hand', 'Left and Right Hand'])
model = train(saved_landmarks, landmarks)
st.success('Successfully trained')
class BodyPredictor(VideoProcessorBase):
def __init__(self) -> None:
self.model_type = None
@st.cache
def load_model_utils(self):
mp_drawing = mp.solutions.drawing_utils
mp_holistic = mp.solutions.holistic
holistic = mp_holistic.Holistic(min_detection_confidence=0.5, min_tracking_confidence=0.5)
return mp_drawing, mp_holistic, holistic
def live_stream(self, image):
mp_drawing, mp_holistic, holistic = self.load_model_utils()
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
image.flags.writeable = False
results = holistic.process(image)
image.flags.writeable = True
image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
mp_drawing.draw_landmarks(image, results.face_landmarks, mp_holistic.FACE_CONNECTIONS,
mp_drawing.DrawingSpec(color=(80, 110, 10), thickness=1, circle_radius=1),
mp_drawing.DrawingSpec(color=(80, 256, 121), thickness=1, circle_radius=1)
)
mp_drawing.draw_landmarks(image, results.right_hand_landmarks, mp_holistic.HAND_CONNECTIONS,
mp_drawing.DrawingSpec(color=(80, 22, 10), thickness=2, circle_radius=4),
mp_drawing.DrawingSpec(color=(80, 44, 121), thickness=2, circle_radius=2)
)
mp_drawing.draw_landmarks(image, results.left_hand_landmarks, mp_holistic.HAND_CONNECTIONS,
mp_drawing.DrawingSpec(color=(121, 22, 76), thickness=2, circle_radius=4),
mp_drawing.DrawingSpec(color=(121, 44, 250), thickness=2, circle_radius=2)
)
mp_drawing.draw_landmarks(image, results.pose_landmarks, mp_holistic.POSE_CONNECTIONS,
mp_drawing.DrawingSpec(color=(245,117,66), thickness=2, circle_radius=4),
mp_drawing.DrawingSpec(color=(245,66,230), thickness=2, circle_radius=2)
)
try:
if self.model_type == 'Pose and Face':
pose_row = list(np.array([[landmark.x, landmark.y, landmark.z, landmark.visibility] for landmark in results.pose_landmarks.landmark]).flatten())
face_row = list(np.array([[landmark.x, landmark.y, landmark.z, landmark.visibility] for landmark in results.face_landmarks.landmark]).flatten())
X = pd.DataFrame([pose_row + face_row])
body_language_class = model.predict(X)[0]
body_language_prob = model.predict_proba(X)[0]
del X, pose_row, face_row
elif self.model_type == 'Left Hand':
left_hand_row = list(np.array([[landmark.x, landmark.y, landmark.z, landmark.visibility] for landmark in results.left_hand_landmarks.landmark]).flatten())
X = pd.DataFrame([left_hand_row])
body_language_class = model.predict(X)[0]
body_language_prob = model.predict_proba(X)[0]
del X, left_hand_row
elif self.model_type == 'Right Hand':
right_hand_row = list(np.array([[landmark.x, landmark.y, landmark.z, landmark.visibility] for landmark in results.right_hand_landmarks.landmark]).flatten())
X = pd.DataFrame([right_hand_row])
body_language_class = model.predict(X)[0]
body_language_prob = model.predict_proba(X)[0]
del X, right_hand_row
elif self.model_type == 'Left and Right Hand':
left_hand_row = list(np.array([[landmark.x, landmark.y, landmark.z, landmark.visibility] for landmark in results.left_hand_landmarks.landmark]).flatten())
right_hand_row = list(np.array([[landmark.x, landmark.y, landmark.z, landmark.visibility] for landmark in results.right_hand_landmarks.landmark]).flatten())
X = pd.DataFrame([left_hand_row + right_hand_row])
body_language_class = model.predict(X)[0]
body_language_prob = model.predict_proba(X)[0]
del X, left_hand_row, right_hand_row
img_shape = list(image.shape[:-1])
img_shape.reverse()
coords = tuple(np.multiply(np.array((
results.pose_landmarks.landmark[mp_holistic.PoseLandmark.LEFT_EAR].x,
results.pose_landmarks.landmark[mp_holistic.PoseLandmark.LEFT_EAR].y)), img_shape).astype(int))
cv2.rectangle(image, (coords[0], coords[1] + 5),
(coords[0] + len(body_language_class) * 20, coords[1] - 30),
(245, 117, 16), -1)
cv2.putText(image, body_language_class, coords,
cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 2, cv2.LINE_AA)
cv2.rectangle(image, (0, 0), (250, 60), (245, 117, 16), -1)
cv2.putText(image, 'CLASS', (95, 12), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 0), 1, cv2.LINE_AA)
cv2.putText(image, body_language_class, (90, 40),
cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 2, cv2.LINE_AA)
cv2.putText(image, 'PROB', (15, 12), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 0), 1, cv2.LINE_AA)
cv2.putText(image, str(np.max(body_language_prob)), (10, 40),
cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 2, cv2.LINE_AA)
except:
pass
return image
def recv(self, frame: av.VideoFrame) -> av.VideoFrame:
image = frame.to_ndarray(format="bgr24")
image = self.live_stream(image)
return av.VideoFrame.from_ndarray(image, format="bgr24")
webrtc_ctx = webrtc_streamer(
key="body_predictor",
mode=WebRtcMode.SENDRECV,
client_settings=WEBRTC_CLIENT_SETTINGS,
video_processor_factory=BodyPredictor,
async_processing=True
)
if webrtc_ctx.video_processor:
webrtc_ctx.video_processor.model_type = model_type
if st.button('Clear saved landmarks'):
saved_landmarks.clear()
st.write('Total number of saved landmarks: ' + str(len(saved_landmarks)))
def body_segmentation():
img = st.file_uploader('Choose a image file', type=['jpg', 'png'])
if img is not None:
img = np.array(Image.open(img))
st.image(img)
st.success('Successfully uploaded')
confidence_threshold = st.slider('Confidence threshold', 0.0, 1.0, 0.5, 0.05)
class BodySegmentation(VideoProcessorBase):
def __init__(self) -> None:
self.confidence_threshold = 0.5
@st.cache(allow_output_mutation=True)
def load_bodypix_model(self):
bodypix_model = load_model(download_model(BodyPixModelPaths.MOBILENET_FLOAT_50_STRIDE_16))
return bodypix_model
def live_stream(self, image):
model = self.load_bodypix_model()
result = model.predict_single(image)
mask = result.get_mask(threshold=self.confidence_threshold).numpy().astype(np.uint8)
masked_image = cv2.bitwise_and(image, image, mask=mask)
img_shape = list(image.shape[:-1])
img_shape.reverse()
image_shape = tuple(img_shape)
inverse_mask = np.abs(result.get_mask(threshold=self.confidence_threshold).numpy() - 1).astype(np.uint8)
masked_background = cv2.bitwise_and(cv2.resize(img, image_shape), cv2.resize(img, image_shape), mask=inverse_mask)
final = cv2.add(masked_image, cv2.cvtColor(masked_background, cv2.COLOR_BGR2RGB))
return final
def recv(self, frame: av.VideoFrame) -> av.VideoFrame:
image = frame.to_ndarray(format="bgr24")
image = self.live_stream(image)
return av.VideoFrame.from_ndarray(image, format="bgr24")
webrtc_ctx = webrtc_streamer(
key="body_segmentation",
mode=WebRtcMode.SENDRECV,
client_settings=WEBRTC_CLIENT_SETTINGS,
video_processor_factory=BodySegmentation,
async_processing=True
)
if webrtc_ctx.video_processor:
webrtc_ctx.video_processor.confidence_threshold = confidence_threshold
def face_mask_detection():
@st.cache
def load_model():
detect_fn = tf.saved_model.load('my_model_mobnet/saved_model')
return detect_fn
detect_fn = load_model()
class MaskDetector(VideoProcessorBase):
def __init__(self) -> None:
self.confidence_threshold = 0.5
self.category_index = {1: {'id': 1, "name": 'with_mask'}, 2: {'id': 2, 'name': 'without_mask'},
3: {'id': 3, 'name': 'mask_weared_incorrect'}}
self.num_boxes = 1
def gen_pred(self, image):
input_tensor = tf.convert_to_tensor(np.expand_dims(image, axis=0))
detections = detect_fn(input_tensor)
num_detections = int(detections.pop('num_detections'))
detections = {key: value[0, :num_detections].numpy()
for key, value in detections.items()}
detections['num_detections'] = num_detections
detections['detection_classes'] = detections['detection_classes'].astype(np.int64)
visualize_boxes_and_labels_on_image_array(
image,
detections['detection_boxes'],
detections['detection_classes'],
detections['detection_scores'],
self.category_index,
use_normalized_coordinates=True,
max_boxes_to_draw=self.num_boxes,
min_score_thresh=self.confidence_threshold,
agnostic_mode=False)
return image
def recv(self, frame: av.VideoFrame) -> av.VideoFrame:
image = frame.to_ndarray(format="bgr24")
image = self.gen_pred(image)
return av.VideoFrame.from_ndarray(image, format="bgr24")
webrtc_ctx = webrtc_streamer(
key="mask-detection",
mode=WebRtcMode.SENDRECV,
client_settings=WEBRTC_CLIENT_SETTINGS,
video_processor_factory=MaskDetector,
async_processing=True,
)
confidence_threshold = st.slider('Confidence threshold', 0.0, 1.0, 0.5, 0.05)
num_boxes = st.slider('Number of boxes', 1, 20, 1)
if webrtc_ctx.video_processor:
webrtc_ctx.video_processor.confidence_threshold = confidence_threshold
webrtc_ctx.video_processor.num_boxes = num_boxes
activities = ['Main', 'Body Language Decoder', 'Body Segmentation', 'Face Mask Detector']
section_type = st.sidebar.selectbox('Select Option', activities)
if section_type == 'Main':
main_section()
if section_type == 'Body Language Decoder':
body_language_decoder()
gc.collect()
if section_type == 'Body Segmentation':
body_segmentation()
gc.collect()
if section_type == 'Face Mask Detector':
face_mask_detection()
gc.collect()
|
12,329 | 674d97dfa8890f66a53213c9cea47f2193db9cd9 | #!/usr/bin/env python
from vmwc import VMWareClient
def main():
host = '192.168.1.1'
username = '<username>'
password = '<password>'
print 'WARNING - you must acknowledge that by executing the code below will result in deletion of all switches. (remove the "return" statement and re run this script to proceed)'
return
with VMWareClient(host, username, password) as client:
for vs in client.get_virtual_switches():
vs.delete()
if __name__ == '__main__':
main()
|
12,330 | caa0b907c9b983cf50c203457ca28783e62a1f26 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from IPython.core.display import display, HTML
display(HTML("""<style>
@font-face {
font-family: 'Cooper Hewit' ;
src: url(utils/CooperHewitt-Medium.otf);
}
@font-face {
font-family: 'Cooper Hewit Bold' ;
src: url(utils/CooperHewitt-Bold.otf);
}
@font-face {
font-family: 'Cooper Hewit Light' ;
src: url(utils/CooperHewitt-Light.otf);
}
.container {
width:96% !important;
font-family: 'Cooper Hewit','Source Sans Pro', 'Open Sans', 'Helvetica', Sans;
}
.text_cell_render h1 {
text-align: center;
font-family: 'Cooper Hewit Light';
font-size: 52px;
}
strong {
font-weight: bold;
}
h2, h3 {
font-family: 'Cooper Hewit Bold' ;
}
.text_cell_render p,
.text_cell_render h2,
.text_cell_render h3,
.text_cell_render h4,
.text_cell_render ul,
.text_cell_render ol,
.text_cell_render pre,
.text_cell_render table {
max-width: 860px;
margin: 0 auto;
line-height: 30px;
}
.text_cell_render p,
.text_cell_render ul,
.text_cell_render ol,
.text_cell_render table {
font-family: 'Cooper Hewit' ;
font-size: 20px;
padding-bottom : 26px;
}
.text_cell_render h4 {
font-size: 20px;
text-align: center;
}
.text_cell.rendered .input_prompt {
display : none !important;
}
.text_cell_render table {
width: 860px;
margin: 26px auto;
text-align: center;
}
.text_cell_render td,
.text_cell_render th {
padding: 8px;
text-align: center;
}
.text_cell_render table thead {
background-color: #333;
color: white;
font-family: 'Cooper Hewit Light';
text-align: center;
}
.CodeMirror {
padding: 8px 20px;
}
.CodeMirror pre {
font-size: 20px;
line-height: 28px;
}
div.output_text pre {
color: #333;
font-size: 18px;
line-height: 26px;
}
.output_png img{
margin: 0 auto;
margin-top: 12px;
display: block;
min-width: 600px;
}
.rendered_html blockquote cite:before {
content: '— ';
}
.rendered_html blockquote p:before {
content: "“";
font-size: 160px;
color: rgba(218, 218, 218, 0.68);
position: relative;
margin-left: -72px;
top: 32px;
left: 37px;
font-family: Cooper Hewitt Bold;
z-index: 0;
}
.rendered_html blockquote {
clear: both;
border: none;
}
.rendered_html blockquote p:after {
visibility: hidden;
display: block;
content: "";
clear: both;
height: 0;
}
.rendered_html blockquote cite {
display: block;
padding-left: 30%;
padding-right: 10%;
text-align: right;
margin-top: 12px;
}
li > * > li {
margin-left: 24px;
line-height: 30px;
}
.rendered_html li {
padding-bottom: 12px;
padding-left: 12px;
margin-left: 20px;
}
li > ul,
li > ol {
margin-top: 12px !important;
padding-bottom: 0px !important;
}
.rendered_html strong {
font-family: 'Cooper Hewit Bold';
color: #007eff;
}
</style>"""))
|
12,331 | 7466c91b749666f885ca0cd7e4160a00747e056f |
from appium.webdriver.common.mobileby import MobileBy
from djcelery.admin_utils import action
from selenium.webdriver.common.by import By
from test_app.page.base_page import BasePage
class Search(BasePage):
#todo: 多平台、多版本、多个定位符
_name_locator = (MobileBy.ID, "name")
def search(self, key: str):
self.find(By.XPATH,"//*[contains(@class,'androidx.appcompat.widget.LinearLayoutCompat')]").click()
self.find(MobileBy.ID, "ocet_edit").send_keys(key)
self._driver.execute_script("mobile:performEditorAction", {"action": "search"})
#改造 兼容多平台的定位符 element = (MobileBy.ID, "name")
#self.find(self._name_locator).click()
return self
def market_search(self, key):
self.find(MobileBy.ID, "action_search").click()
self.find(MobileBy.ID, "search_input_text").send_keys(key)
self.find(self._name_locator).click()
return self
def market_search_back(self):
self.find(MobileBy.ID, "action_close").click()
def get_address(self):
element = (By.XPATH, "//*[contains(@resource-id,'tv_box_address')]")
return self.find(element).text
def add_select(self):#666 get it
element = self.find_by_text("收藏")
element.click()
return self
def un_select(self):
element = self.find_by_text("已收藏")
element.click()
return self
def get_msg(self):
return self.find(By.ID, "tv_collection_text").text |
12,332 | 548e577eccf0898da0dd128f6b27a6083e8ebbb0 | import threading
import Queue
import time
addr_q = Queue.Queue()
reply_q = Queue.Queue()
i1 = 9
i2 = 9
data1_1 = None #192.168.1.3
data1_2 = None #192.168.1.4
def read_data(q,reply_q):
global data1_1,data1_2,i1,i2
#print "Running read_data"
s = q.get()
name = s[0]
try:
if name == "192.168.1.3":
i1 = i1 + 1
if name == "192.168.1.4":
i2 = i2 + 1
if i1 > 9 and name == "192.168.1.3":
data1_1 = time.time()
i1 = 0
if i2 > 9 and name == "192.168.1.4":
data1_2 = time.time()
i2 = 0
data1 = None
if name == "192.168.1.3":
data1 = data1_1
print "Here1"
elif name == "192.168.1.4":
data1 = data1_2
print "Here2"
data = {
"data1":data1,
"time":time.time(),
"unit":name
}
reply_q.put(data);
q.task_done()
except:
raise
def main():
try:
while True:
t0 = time.time()
thread1 = threading.Thread(target=read_data,args=(addr_q,reply_q,))
thread2 = threading.Thread(target=read_data,args=(addr_q,reply_q,))
thread1.start()
thread2.start()
addr_q.put(["192.168.1.3"])
addr_q.put(["192.168.1.4"])
addr_q.join()
print reply_q.get(block=True)
print reply_q.get(block=True)
print "Done."
time.sleep(1)
except:
raise
if __name__ == "__main__":
try:
main()
except KeyboardInterrupt:
raise |
12,333 | 9462ef377e7ad3724cf5426c5227696911dc48b6 | # _*_ coding: utf-8 _*_
"""获取一个数组中的前 m 个元素,共有 n 个元素,即 m <= n"""
"""
基本思路:可以先排序,然后取前 m 个元素, 时间复杂度 nlogN
""" |
12,334 | 99289212a29997e056d1ab3d329ef61add818d8e |
class Stationery:
title = 'None'
def draw(self):
print('Запуск отрисовки')
class Pen(Stationery):
def draw(self):
print('Рисуем ручкой')
class Pencil(Stationery):
def draw(self):
print('Рисуем карандашом')
class Handle(Stationery):
def draw(self):
print('Рисуем маркером')
my_tool = Stationery()
my_tool.draw()
my_handle = Handle()
my_handle.draw()
my_pencil = Pencil()
my_pencil.draw()
my_pen = Pen()
my_pen.draw()
|
12,335 | 007cf01723b9dfff1c9456ca2809386eeb7dfa14 | from django.urls import path
from django.views import generic
from drug import views, viewsets
from django.urls import path, include
app_name = 'drug'
urlpatterns = [
# path('', generic.TemplateView.as_view(template_name='drug/index.html'), name='index'),
path('<str:table>/<str:col>/json/', views.APIView.as_view(), name='api'),
#path('<str:name>/name/', viewsets.DrugNameViewSet.as_view()),
] |
12,336 | 45fea2c37a8d48dc480d57807eea72c60a43aaa2 | """Module provider for Name.com"""
from __future__ import absolute_import
import logging
from argparse import ArgumentParser
from typing import List
from requests import HTTPError, Session
from requests.auth import HTTPBasicAuth
from lexicon.exceptions import AuthenticationError
from lexicon.interfaces import Provider as BaseProvider
LOGGER = logging.getLogger(__name__)
DUPLICATE_ERROR = {
"message": "Invalid Argument",
"details": "Parameter Value Error - Duplicate Record",
}
class NamecomLoader(
object
): # pylint: disable=useless-object-inheritance,too-few-public-methods
"""Loader that handles pagination for the Name.com provider."""
def __init__(self, get, url, data_key, next_page=1):
self.get = get
self.url = url
self.data_key = data_key
self.next_page = next_page
def __iter__(self):
while self.next_page:
response = self.get(self.url, {"page": self.next_page})
for data in response[self.data_key]:
yield data
self.next_page = response.get("next_page")
class NamecomProvider(BaseProvider):
"""Provider implementation for Name.com."""
@staticmethod
def get_nameservers() -> List[str]:
return ["name.com"]
@staticmethod
def configure_parser(parser: ArgumentParser) -> None:
parser.add_argument("--auth-username", help="specify a username")
parser.add_argument("--auth-token", help="specify an API token")
def __init__(self, config):
super(Provider, self).__init__(config)
self.api_endpoint = "https://api.name.com/v4"
self.session = Session()
def authenticate(self):
self.session.auth = HTTPBasicAuth(
username=self._get_provider_option("auth_username"),
password=self._get_provider_option("auth_token"),
)
# checking domain existence
domain_name = self.domain
for domain in NamecomLoader(self._get, "/domains", "domains"):
if domain["domainName"] == domain_name:
self.domain_id = domain_name
return
raise AuthenticationError("{} domain does not exist".format(domain_name))
def cleanup(self) -> None:
pass
def create_record(self, rtype, name, content):
data = {
"type": rtype,
"host": self._relative_name(name),
"answer": content,
"ttl": self._get_lexicon_option("ttl"),
}
if rtype in ("MX", "SRV"):
# despite the documentation says a priority is
# required for MX and SRV, it's actually optional
priority = self._get_lexicon_option("priority")
if priority:
data["priority"] = priority
url = "/domains/{}/records".format(self.domain)
try:
record_id = self._post(url, data)["id"]
except HTTPError as error:
response = error.response
if response.status_code == 400 and response.json() == DUPLICATE_ERROR:
LOGGER.warning("create_record: duplicate record has been skipped")
return True
raise
LOGGER.debug("create_record: record %s has been created", record_id)
return record_id
def list_records(self, rtype=None, name=None, content=None):
url = "/domains/{}/records".format(self.domain)
records = []
for raw in NamecomLoader(self._get, url, "records"):
record = {
"id": raw["id"],
"type": raw["type"],
"name": raw["fqdn"][:-1],
"ttl": raw["ttl"],
"content": raw["answer"],
}
records.append(record)
LOGGER.debug("list_records: retrieved %s records", len(records))
if rtype:
records = [record for record in records if record["type"] == rtype]
if name:
name = self._full_name(name)
records = [record for record in records if record["name"] == name]
if content:
records = [record for record in records if record["content"] == content]
LOGGER.debug("list_records: filtered %s records", len(records))
return records
def update_record(self, identifier, rtype=None, name=None, content=None):
if not identifier:
if not (rtype and name):
raise ValueError("Record identifier or rtype+name must be specified")
records = self.list_records(rtype, name)
if not records:
raise Exception("There is no record to update")
if len(records) > 1:
filtered_records = [
record for record in records if record["content"] == content
]
if filtered_records:
records = filtered_records
if len(records) > 1:
raise Exception(
"There are multiple records to update: {}".format(
", ".join(record["id"] for record in records)
)
)
record_id = records[0]["id"]
else:
record_id = identifier
data = {"ttl": self._get_lexicon_option("ttl")}
# even though the documentation says a type and an answer
# are required, they are not required actually
if rtype:
data["type"] = rtype
if name:
data["host"] = self._relative_name(name)
if content:
data["answer"] = content
url = "/domains/{}/records/{}".format(self.domain, record_id)
record_id = self._put(url, data)["id"]
logging.debug("update_record: record %s has been updated", record_id)
return record_id
def delete_record(self, identifier=None, rtype=None, name=None, content=None):
if not identifier:
if not (rtype and name):
raise ValueError("Record identifier or rtype+name must be specified")
records = self.list_records(rtype, name, content)
if not records:
LOGGER.warning("delete_record: there is no record to delete")
return False
record_ids = [record["id"] for record in records]
else:
record_ids = [
identifier,
]
for record_id in record_ids:
url = "/domains/{}/records/{}".format(self.domain, record_id)
self._delete(url)
LOGGER.debug("delete_record: record %s has been deleted", record_id)
return True
def _get_raw_record(self, record_id):
url = "/domains/{}/records/{}".format(self.domain, record_id)
return self._get(url)
def _request(self, action="GET", url="/", data=None, query_params=None):
response = self.session.request(
method=action, url=self.api_endpoint + url, json=data, params=query_params
)
response.raise_for_status()
return response.json()
Provider = NamecomProvider
|
12,337 | 2585b4761fd6c8d4d75cacedaeec6dbebbb2f474 | # Сортировка выбором. Сложность О(n^2)
def find_smallest(arr):
sm = arr[0]
ind = 0
for i in range(1, len(arr)):
if arr[i]<sm:
sm = arr[i]
ind = i
return ind
def selection_sort(arr):
L = []
for i in range(len(arr)):
si = find_smallest(arr)
L.append(arr[si])
arr.pop(si)
return L
L = [6,7,839,2,0,9,3,87,65,62,3,14,43,27,8]
print(*L)
print(*selection_sort(L)) |
12,338 | 63a97f17a8dcdad2024c3e893d434c089ebf1b19 | from __future__ import (absolute_import, division, print_function)
from .util import extract_vars
def get_accum_precip(wrfin, timeidx=0):
ncvars = extract_vars(wrfin, timeidx, varnames=("RAINC", "RAINNC"))
rainc = ncvars["RAINC"]
rainnc = ncvars["RAINNC"]
rainsum = rainc + rainnc
return rainsum
def get_precip_diff(wrfin1, wrfin2, timeidx=0):
vars1 = extract_vars(wrfin1, timeidx, varnames=("RAINC", "RAINNC"))
vars2 = extract_vars(wrfin2, timeidx, varnames=("RAINC", "RAINNC"))
rainc1 = vars1["RAINC"]
rainnc1 = vars1["RAINNC"]
rainc2 = vars2["RAINC"]
rainnc2 = vars2["RAINNC"]
rainsum1 = rainc1 + rainnc1
rainsum2 = rainc2 + rainnc2
return (rainsum1 - rainsum2)
# TODO: Handle bucket flipping
|
12,339 | e0f0bc3e7638d05cf02fff57a4529b41d21fa0ac | import builtins as _mod_builtins
__builtins__ = {}
__doc__ = None
__file__ = '/home/chris/anaconda3/lib/python3.6/site-packages/sklearn/utils/_logistic_sigmoid.cpython-36m-x86_64-linux-gnu.so'
__name__ = 'sklearn.utils._logistic_sigmoid'
__package__ = 'sklearn.utils'
__test__ = _mod_builtins.dict()
def _log_logistic_sigmoid():
pass
|
12,340 | 8d9838cc4b2e4a4240b6ec1091576989deb4d969 | """ URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.conf.urls import include
from django.contrib import admin
#from django.views.generic import TemplateView
from views import hello, current_datetime, hours_ahead
from books import views
import ch13.views as ch13v
from django.conf.urls import handler404
#handler404 = current_datetime # a test, ok
urlpatterns = [
url(r'^$', hello),
url(r'^time/$', current_datetime),
url(r'^time/plus/(\d{1,2})/$', hours_ahead),
# url(r'^search-form/$', views.search_form), # no need now
url(r'^search/$', views.search),
url(r'^contact/$', views.contact0),
url(r'^contact1/$', views.contact1),
url(r'^contact/thanks/$', views.contact_thanks),
url(r'^ch13getpng$', ch13v.ch13getpng),
url(r'^ch13wt$', ch13v.ch13_write_twice),
url(r'^ch13csv$', ch13v.unruly_passengers_csv),
url(r'^ch13pdf$', ch13v.hello_pdf),
url(r'^ch13pdf2$', ch13v.hello_pdf2),
url(r'^admin/', admin.site.urls),
# url(r'^about/', about_views.contact),
]
|
12,341 | d450b8dba116ccb9bbe4f3106ee3b68e3408222c | import os
import pytest
from concard.app import run
def create_card(**kwargs) -> str:
card = {}
for key, value in kwargs.items():
card[key] = value
args = {'action': 'create', 'card': card}
response = run('test', args)
return response['card_uid']
def read_repo(filters=None) -> list:
args = {'action': 'read'}
if filters:
args['filters'] = filters
return run('test', args)
@pytest.fixture
def setup_teardown():
yield
directory = 'files/test/'
for filename in os.listdir(directory):
os.remove(directory + filename)
def test_create(setup_teardown):
env = 'test'
args = {
'action': 'create',
'card': {'title': 'test title', 'text': 'test text'}
}
response = run(env, args)
assert response['message'] == 'Card created'
assert 'card_uid' in response
def test_create_and_retrieve_one_card(setup_teardown):
env = 'test'
args = {
'action': 'create',
'card': {'title': 'test title', 'text': 'test text'}
}
response = run(env, args)
expected_uid = response['card_uid']
args = {
'action': 'read',
}
read_response = run(env, args)
assert 'cards' in read_response
assert read_response['cards'][0]['title'] == 'test title'
assert read_response['cards'][0]['text'] == 'test text'
assert read_response['cards'][0]['uid'] == expected_uid
def test_retrieve_multi_card(setup_teardown):
env = 'test'
args = {
'action': 'create',
'card': {'title': 'test title', 'text': 'test text'}
}
response = run(env, args)
first_uid = response['card_uid']
args['card']['title'] = '2nd test'
args['card']['text'] = '2nd test'
response = run(env, args)
second_uid = response['card_uid']
args = {
'action': 'read',
}
read_response = run(env, args)
assert len(read_response['cards']) == 2
uids = [c['uid'] for c in read_response['cards']]
assert first_uid in uids
assert second_uid in uids
def test_retrieve_by_uid(setup_teardown):
env = 'test'
args = {
'action': 'create',
'card': {'title': 'test title', 'text': 'test text'}
}
response = run(env, args)
target_uid = response['card_uid']
args = {
'action': 'create',
'card': {'title': 'not this one'}
}
run(env, args)
args = {
'action': 'read',
'filters': {
'uid__eq': str(target_uid)
}
}
read_response = run(env, args)
assert len(read_response['cards']) == 1
def test_edit_card(setup_teardown):
env = 'test'
args = {
'action': 'create',
'card': {'title': 'test title', 'text': 'test text'}
}
response = run(env, args)
target_uid = response['card_uid']
print(target_uid)
args = {
'action': 'update',
'card': {
'uid': str(target_uid),
'title': 'updated title',
'text': 'updated text'
}
}
response = run(env, args)
assert response['message'] == 'Card updated'
assert response['new_card']['title'] == 'updated title'
assert response['new_card']['text'] == 'updated text'
def test_update_card_doesnt_delete_existing_params(setup_teardown):
uid = create_card(title='test title', text='test text')
args = {
'action': 'update',
'card': {'uid': str(uid), 'text': 'updated text'}
}
run('test', args)
card = read_repo()['cards'][0]
assert card['title'] == 'test title'
assert card['text'] == 'updated text'
def test_delete(setup_teardown):
env = 'test'
args = {
'action': 'create',
'card': {'title': 'test title', 'text': 'test text'}
}
response = run(env, args)
target_uid = response['card_uid']
print(target_uid)
args = {
'action': 'delete',
'card': {
'uid': str(target_uid),
}
}
response = run(env, args)
assert response['message'] == 'Card deleted'
assert response['uid'] == str(target_uid)
assert len(run(env, {'action': "read"})['cards']) == 0
def test_delete_of_no_card(setup_teardown):
uid = '821c9390-845f-4d95-91af-10b654bc6ab9'
env = 'test'
args = {
'action': 'delete',
'card': {
'uid': uid
}
}
response = run(env, args)
expected = f'No card with uid "{uid}" was found in the repo'
assert response['message'] == expected
def test_create_with_parent(setup_teardown):
uid = create_card(title='test title', text='test text')
second_uid = create_card(parent=str(uid))
cards = read_repo({'uid__eq': str(second_uid)})['cards']
assert cards[0]['parent'] == str(uid)
def test_cant_delete_card_with_children(setup_teardown):
uid = create_card()
create_card(parent=str(uid))
response = run('test', {'action': 'delete', 'card': {'uid': str(uid)}})
print(response)
expected = 'This card has existing children cards, cannot delete'
assert response['message'] == expected
def test_card_has_length_attr(setup_teardown):
create_card(text='hello world')
response = read_repo()
print(response)
assert not response['cards'][0]['text_exceeds_500']
def test_card_returns_exceed_500_check(setup_teardown):
long_string = ('Lorem ipsum dolor sit amet, consectetur adipiscing elit. '
'Integer iaculis interdum diam vitae dapibus. Praesent '
'et dapibus eros, rutrum feugiat velit. Proin placerat '
'orci dignissim, eleifend dui quis, aliquet tellus. '
'Vestibulum ante ipsum primis in faucibus orci luctus et '
'ultrices posuere cubilia Curae; Cras vel tincidunt '
'velit. Fusce nulla erat, malesuada eu ultrices pulvinar,'
' fringilla viverra nisi. Donec non rutrum velit, sed '
'rutrum mi. Praesent consequat, tellus eget sagittis '
'ornare, augue justo molestie mi, vel accumsan risus '
'turpis id est. Donec congue hendrerit urna, nec aliquet '
'quam hendrerit at. Integer eget dui nec arcu venenatis '
'viverra nec nec justo. Praesent.')
create_card(text=long_string)
response = read_repo()
print(response)
assert response['cards'][0]['text_exceeds_500']
|
12,342 | 7e9c9807e56a0d0c06c7c99bab4bdbb2b4e058a2 | import time
from app.db.base import Session
from app.model.history import History
from .config_service import ConfigService
class LimitService:
_instance = None
def __init__(self, config_service: ConfigService, db_session: Session):
LimitService._instance = self
self.config_service = config_service
self.db = db_session
def check_limit(self, config: str, access_id: str) -> (bool, int):
req_time = time.time()
for cfg in self.config_service.config_map[config]:
start_time = req_time - cfg[0]
c = self.db.query(History) \
.filter(History.access_id == access_id) \
.filter(History.access_at >= start_time)
if c.count() >= cfg[1]:
item = c.first()
# Converting float to int will be always floored, so add 1
access_in = int(cfg[0] - (req_time - item.access_at)) + 1
return False, access_in
return True, 0
def add_usage(self, config: str, access_id: str):
req_time = time.time()
new_record = History(
access_id=access_id,
resource_name=config,
access_at=req_time
)
self.db.add(new_record)
self.db.commit()
@staticmethod
def get_instance():
return LimitService._instance
|
12,343 | 0932709aafe018fbdca30243a8a51cffe38a0a89 | t=int(input())
for i in range(t):
n=int(input())
bn=bin(n)
num=bn.count('1',0)
if num==1:
print(bn.index('1'))
else:
print(-1) |
12,344 | 9f00c532fae4226c751084a093ab054160629250 | #!/usr/bin/env python
import sys
from datetime import datetime, timedelta
from xml.etree import ElementTree as ET
import csv
weekdays = ('Mon', 'Tues', 'Wed', 'Thurs', 'Fri', 'Sat', 'Sun')
# locations = {}
# with open('lost-locations.csv') as locfile:
# reader = csv.DictReader(locfile)
# for line in reader:
# locations[line['name']] = (line['lat'], line['lon'])
# point_forecast_url = list(root.iter('moreWeatherInformation'))[0].text
def parse_noaa_time_string(noaa_time_str):
date_str, time_str = noaa_time_str.split('T') # will raise ValueError if it doesn't split into two pieces
tzhackdelta = None
if '-' in time_str:
time_str, tzinfo_str = time_str.split('-') # ignoring time zone info for now
elif time_str[-1] == 'Z':
print 'HACK subtracting eight hours from GMT'
tzhackdelta = timedelta(hours=-8)
time_str = time_str[:-1]
year, month, day = [ int(val) for val in date_str.split('-') ]
hour, minute, second = [ int(val) for val in time_str.split(':') ]
moment = datetime(year, month, day, hour, minute, second)
if tzhackdelta is not None:
moment += tzhackdelta
return moment
def get_time_layouts(root):
layouts = {}
for lout in root.find('data').findall('time-layout'):
name = lout.find('layout-key').text
layouts[name] = {'start':[], 'end':[]}
for start_end in ('start', 'end'):
for tmptime in lout.iter(start_end + '-valid-time'):
moment = parse_noaa_time_string(tmptime.text)
layouts[name][start_end].append(moment)
return layouts
def combine_days(action, pdata, debug=False):
"""
Perform <action> for all the values within each day, where <action> is either sum or mean.
"""
assert action == 'sum' or action == 'mean'
starts, ends, values, weight_sum = [], [], [], []
def get_time_delta_in_hours(start, end):
""" NOTE assumes no overflows or wraps or nothing """
dhour = end.hour - start.hour
dmin = end.minute - start.minute
dsec = end.second - start.second
dtime = timedelta(hours=dhour, minutes=dmin, seconds=dsec) # NOTE rounds to nearest second
# print start, end, dtime
return float(dtime.seconds) / (60*60)
def add_new_day(dstart, dend, dval):
weight = '-'
starts.append(dstart)
ends.append(dend)
if action == 'sum':
values.append(dval)
elif action == 'mean':
weight = float(get_time_delta_in_hours(dstart, dend))
values.append(weight*dval)
weight_sum.append(weight)
else:
raise Exception('invalid action'+action)
if debug:
print ' new day', dstart, dend, weight, dval
def increment_day(dstart, dend, dval):
ends[-1] = dend
weight = '-'
if action == 'sum':
values[-1] += dval
elif action == 'mean':
weight = float(get_time_delta_in_hours(dstart, dend))
values[-1] += weight * dval
weight_sum[-1] += weight
else:
raise Exception('invalid action'+action)
if debug:
print ' increment', starts[-1], dend, weight, dval, ' ', values[-1]
def incorporate_value(istart, iend, ival):
# if debug:
# print ' incorporate', istart, iend, ival
if len(values) == 0 or ends[-1].day != istart.day:
add_new_day(istart, iend, ival)
else:
increment_day(istart, iend, ival)
for ival in range(len(pdata['values'])):
start = pdata['time-layout']['start'][ival]
if len(pdata['time-layout']['end']) > 0: # some of them only have start times
end = pdata['time-layout']['end'][ival]
elif len(pdata['time-layout']['start']) > ival+1: # so use the next start time minus a ms if we can
end = pdata['time-layout']['start'][ival+1] - timedelta(milliseconds=-1)
else:
end = pdata['time-layout']['start'][ival] + timedelta(hours=6) # otherwise just, hell, add six hours
if debug:
print ' day %3d-%-3d hour %3d-%-3d %s' % (start.day, end.day, start.hour, end.hour, pdata['values'][ival])
# skip null values (probably from cloud cover)
if pdata['values'][ival] == None:
if debug:
print ' skipping null value'
continue
val = float(pdata['values'][ival])
if start.day == end.day:
incorporate_value(start, end, val)
else:
if debug:
print ' start (%s) and end (%s) days differ' % (start, end)
assert start.day + 1 == end.day # for now only handle the case where they differ by one day
midnight = datetime(year=end.year, month=end.month, day=end.day, hour=0, minute=0, second=0)
if action == 'sum':
hours_before = get_time_delta_in_hours(start, midnight) #24 - start.hour
hours_after = get_time_delta_in_hours(midnight, end) #end.hour
val_before = val * float(hours_before) / (hours_before + hours_after)
val_after = val * float(hours_after) / (hours_before + hours_after)
if debug:
print ' apportioning between',
print 'first %f * %f / (%f + %f) = %f' % (val, hours_before, hours_before, hours_after, val_before),
print 'and second %f * %f / (%f + %f) = %f' % (val, hours_after, hours_before, hours_after, val_after)
else:
val_before, val_after = val, val
incorporate_value(start, midnight + timedelta(milliseconds=-1), val_before) #start + timedelta(hours=24-start.hour, milliseconds=-1), val_before)
incorporate_value(midnight, end + timedelta(milliseconds=-1), val_after) # end - timedelta(hours=end.hour), end, val_after)
dailyvals = {}
for ival in range(len(values)):
dailyvals[int(starts[ival].day)] = values[ival]
if action == 'mean':
# if debug:
# print 'total', get_time_delta_in_hours(starts[ival], ends[ival])
dailyvals[int(starts[ival].day)] /= weight_sum[ival] #get_time_delta_in_hours(starts[ival], ends[ival])
if debug:
print ' final:'
for key in sorted(dailyvals.keys()):
print ' ', key, dailyvals[key]
return dailyvals
def parse_data(root, time_layouts, debug=False):
pars = root.find('data').find('parameters')
data = {}
for vardata in pars:
# first figure out the name
all_names = list(vardata.iter('name'))
if len(all_names) != 1:
raise Exception('ERROR too many names for %s: %s' % (vardata.tag, ', '.join(all_names)))
name = all_names[0].text
if name in data:
raise Exception('ERROR %s already in data' % key)
# then get the data
data[name] = {}
if vardata.get('time-layout') is None: # single-point data
if debug:
print ' no layout %s' % name
continue
else: # time series data
data[name]['time-layout'] = time_layouts[vardata.get('time-layout')]
data[name]['values'] = [ val.text for val in vardata.findall('value') ]
if debug:
print 'added %s (%s)' % (name, vardata.get('time-layout'))
if len(data[name]['time-layout']['start']) != len(data[name]['values']):
if debug:
print ' time layout different length for %s' % name
else:
pass
return data
def find_min_temp(pdata, prev_day, next_day):
""" find min temp for the night of <prev_day> to <next_day> """
for ival in range(len(pdata['values'])):
start = pdata['time-layout']['start'][ival]
end = pdata['time-layout']['end'][ival]
if start.day == prev_day and end.day == next_day:
return int(pdata['values'][ival])
# raise Exception('ERROR didn\'t find min temp for night of %d-%d in %s' % (prev_day, next_day, pdata['time-layout']))
return None
def find_max_temp(pdata, day):
""" find min temp for the night of <prev_day> to <next_day> """
for ival in range(len(pdata['values'])):
start = pdata['time-layout']['start'][ival]
end = pdata['time-layout']['end'][ival]
if start.day == day and end.day == day:
return int(pdata['values'][ival])
# raise Exception('ERROR didn\'t find max temp for %d in %s' % (day, pdata['time-layout']))
return None
def prettify_values(data, ndays=5, debug=False):
mintemps = data['Daily Minimum Temperature']
maxtemps = data['Daily Maximum Temperature']
liquid = combine_days('sum', data['Liquid Precipitation Amount'])
snow = combine_days('sum', data['Snow Amount'])
wind_speed = combine_days('mean', data['Wind Speed'])
cloud = combine_days('mean', data['Cloud Cover Amount'])
percent_precip = combine_days('mean', data['12 Hourly Probability of Precipitation'])
txtvals = {'days':[], 'tmax':[], 'tmin':[], 'liquid':[], 'snow':[], 'wind':[], 'cloud':[], 'precip':[]}
if debug:
print '%-5s %4s %5s%5s %5s %5s' % ('', 'hi lo', 'precip (snow)', '%', 'wind', 'cloud')
rowlist = []
for iday in range(ndays):
day = datetime.now() + timedelta(days=iday)
tmax = find_max_temp(maxtemps, day.day)
tmin = find_min_temp(mintemps, day.day, day.day+1)
row = ''
if tmax is not None:
row += ' %d' % tmax
if tmin is not None:
row += ' %d<br>' % tmin
if day.day in percent_precip:
row += ' %.0f<font size=1>%%</font>' % percent_precip[day.day]
# liquid
row += '<font color=blue><b>'
if day.day in liquid:
if liquid[day.day] > 0.0:
row += (' %.2f' % liquid[day.day]).replace('0.', '.')
else:
row += ' 0'
else:
row += ' - '
row += '</b></font>'
# snow
row += '<font color=grey><b>'
if day.day in liquid:
if snow[day.day] > 0.0:
row += (' (%.2f)' % snow[day.day]).replace('0.', '.')
else:
row += ' '
else:
row += ' - '
row += '</b></font>'
row += '<br>'
# wind speed
if day.day in wind_speed:
row += ' %.0f' % wind_speed[day.day]
row += '<font size=1>mph</font>'
else:
row += ' - '
# cloud cover
if day.day in cloud:
row += ' %.0f' % cloud[day.day]
row += '<font size=1>% cover</font>'
else:
row += ' - '
rowlist.append(row)
tv = txtvals
tv['tmax'].append('-' if tmax is None else tmax)
tv['tmin'].append('-' if tmin is None else tmin)
tv['liquid'].append(('%5.1f' % liquid[day.day]) if day.day in liquid else '-')
tv['snow'].append('')
if day.day in snow and snow[day.day] > 0.0:
tv['snow'][-1] = '%5.1f' % snow[day.day]
tv['wind'].append(('%5.0f' % wind_speed[day.day]) if day.day in wind_speed else '-')
tv['cloud'].append(('%5.0f' % cloud[day.day]) if day.day in cloud else '-')
tv['precip'].append(('%5.0f' % percent_precip[day.day]) if day.day in percent_precip else '-')
tv['days'].append(weekdays[day.weekday()])
if debug:
print '%-6s %4s %-3s %5s %5s %5s %5s %5s' % (weekdays[day.weekday()], tv['tmax'][-1], tv['tmin'][-1], tv['liquid'][-1], tv['snow'][-1], tv['precip'][-1], tv['wind'][-1], tv['cloud'][-1])
return tv, rowlist
def verbosocast(tree):
root = tree.getroot()
time_layouts = get_time_layouts(root)
data = parse_data(root, time_layouts)
point = root.find('data').find('location').find('point')
lat, lon = point.get('latitude'), point.get('longitude')
tv, rowlist = prettify_values(data, debug=True)
import HTML
rowlist.insert(0, ' %s <br> %s ' % (lat, lon))
table_vals = [rowlist,]
htmlcode = HTML.table(table_vals, header_row=['',] + tv['days'], col_width=['15%' for _ in range(len(table_vals[0]))])
with open('tmp.html', 'w') as outfile:
outfile.write(htmlcode)
|
12,345 | 9bf2bcc7832b0262037699da34fff6d9cfe6bbc0 | from django.shortcuts import render
from .load_data import load_job_data
from .forms import LoadData
from .queries import data
DATA_TO_BE_FETCHED = [("dev ops", 10),("contador", 10),("administracion", 10), ("diseno", 10)]
def home(request):
return render(request, "workdata/home.html")
def load_data_form(request):
# if this is a POST request we need to process the form data
if request.method == 'POST':
# create a form instance and populate it with data from the request:
form = LoadData(request.POST)
# check whether it's valid:
if form.is_valid():
cleaned_data = form.cleaned_data()
try:
query = cleaned_data["query"]
limit = cleaned_data["limit"]
load_job_data(query, limit)
except:
for data in DATA_TO_BE_FETCHED:
load_job_data(data[0], data[1])
return HttpResponseRedirect('/success/', {"query":query, "limit": limit})
# if a GET (or any other method) we'll create a blank form
else:
form = LoadData()
return render(request, 'workdata/load_data_form.html', {'form': form})
def load_data_success(request):
return render(request, 'workdata/load_data_success.html')
def index(request):
dict_of_dicts = data()
return render(request, 'workdata/index.html', {"resp":dict_of_dicts})
|
12,346 | 592cad3acb8bf03c44511dfd5cf6a4c36357e358 | from django.contrib import admin
from priton.models import Person, Phrase, Comics, Essense
class PersonAdmin(admin.ModelAdmin):
list_display = ('full_name', 'short_name',)
#list_editable = ('sort', )
class PhraseAdmin(admin.ModelAdmin):
list_display = ('phrase', 'author',)
class EssenseInline(admin.TabularInline):
model = Essense.comics.through
class ComicsAdmin(admin.ModelAdmin):
list_display = ('title', 'comics_descr',)
inlines = (EssenseInline,)
# exclude = ('participants', )
#list_editable = ('sort', )
class EssenseAdmin(admin.ModelAdmin):
list_display = ('name',)
exclude = ('comics',)
admin.site.register(Person, PersonAdmin)
admin.site.register(Phrase, PhraseAdmin)
admin.site.register(Comics, ComicsAdmin)
#admin.site.register(Essense, EssenseAdmin) |
12,347 | 13911b8ed07372539c36b7b4578e2bd7cccd5b60 | from _collections import deque
import heapq
def solution(jobs):
waiting, cand, jobs_size = deque(sorted(jobs)), [], len(jobs)
curr_time = total = done = 0
while done < jobs_size:
if cand:
time, input_time = heapq.heappop(cand)
curr_time += time
total += curr_time - input_time
else:
input_time, time = waiting.popleft()
total += time
curr_time = input_time + time
done += 1
while waiting and waiting[0][0] <= curr_time:
heapq.heappush(cand, waiting.popleft()[::-1])
return total // jobs_size |
12,348 | c5d8f21e8b781975d57fc093a56945b06ce99173 | import matplotlib.pyplot as plot
import math
f = open("/home/pi/Documents/rpi_xc111/test_envelope_data/7_5_2019_11_28_34_UTC.txt")
filelines = f.readlines()
testlines = []
data = []
if __name__ == '__main__':
testlines = filelines[10:630]
# actual start: 99 mm
# actual length: 400 mm
# actual end: 499 mm
# data length: 827
for i in range(0, len(testlines)):
amplitude = float(testlines[i][0:6])
dist_mm = 99 + (i * 400/827)
dist_in = dist_mm * 0.039370
data.append((dist_in, amplitude))
for i in data:
print(str(i))
xpoints = []
ypoints = []
for d in data:
xpoints.append(d[0])
ypoints.append(d[1])
plot.plot(xpoints, ypoints)
plot.show()
|
12,349 | 4f493076b3d8a27e8a78711cb423a33cc9055799 | from django.shortcuts import render, redirect
from django.http import HttpResponse
from django.contrib import messages
from . import dbsearch
def deleteLicense(request):
if request.method == "GET":
if dbsearch.deleteLicense(request.GET.get('Lno'))==True:
return redirect('/')
else:
return redirect('/')
return redirect('/')
def deleteClient(request):
if request.method == "GET":
if dbsearch.deleteClient(request.GET.get('Tno'),request.GET.get('Lno'))==True:
return redirect('/')
else:
return redirect('/')
return redirect('/') |
12,350 | b5781ade95f1def3f7788c26925b14d803f16b60 | import datetime
import pytz
tzinput=input('Enter TZ :')
ustime = datetime.datetime.now(pytz.timezone(tzinput))
a=ustime.strftime("%Y-%m-%d %H:%M:%S")
print(a) |
12,351 | 9af9adf77fdb7e1a3751f8ee57dafe861801f864 | """
This is the deployments module and supports all the ReST actions for the
ci collection
"""
from pprint import pformat
from flask import abort, make_response
from config import app, db
from models import CI, CISchema
def read_all():
"""
This function responds to a request for /api/ci
with the complete lists of CIs
:return: json string of list of CIs
"""
# Create the list of CIs from our data
ci = db.session.query(CI).order_by(CI.id).all()
app.logger.debug(pformat(ci))
# Serialize the data for the response
ci_schema = CISchema(many=True)
data = ci_schema.dump(ci)
return data
def read_one(id):
"""
This function responds to a request for /ci/{id}
with one matching ci from CIs
:param application: id of ci to find
:return: ci matching id
"""
ci = db.session.query(CI).filter(CI.id == id).one_or_none()
if ci is not None:
# Serialize the data for the response
ci_schema = CISchema()
data = ci_schema.dump(ci)
return data
else:
abort(404, "CI with id {id} not found".format(id=id))
def read_keyValues():
"""
This function responds to a request for /keyValues/ci
with the complete lists of CIs
:return: json string of list of CIs
"""
# Create the list of CIs from our data
ci = db.session.query(CI).order_by(CI.id).all()
app.logger.debug(pformat(ci))
# Serialize the data for the response
ci_schema = CISchema(many=True)
data = ci_schema.dump(ci)
keyValues = []
for d in data:
keyValuePair = {}
keyValuePair["key"] = d.get("id")
keyValuePair["value"] = d.get("value")
keyValues.append(keyValuePair)
print(keyValues)
return keyValues
def create(ciDetails):
"""
This function creates a new ci in the ci list
based on the passed in ci data
:param ci: ci to create in ci structure
:return: 201 on success, 406 on ci exists
"""
# Remove id as it's created automatically
if "id" in ciDetails:
del ciDetails["id"]
# Does the ci exist already?
existing_ci = (
db.session.query(CI).filter(CI.value == ciDetails["value"]).one_or_none()
)
if existing_ci is None:
schema = CISchema()
new_ci = schema.load(ciDetails, session=db.session)
db.session.add(new_ci)
db.session.commit()
# Serialize and return the newly created deployment
# in the response
data = schema.dump(new_ci)
return data, 201
# Otherwise, it already exists, that's an error
else:
abort(406, "CI already exists")
def update(id, ciDetails):
"""
This function updates an existing ci in the ci list
:param id: id of the ci to update in the ci list
:param ci: ci to update
:return: updated ci
"""
app.logger.debug(pformat(ciDetails))
if ciDetails["id"] != id:
abort(400, "Key mismatch in path and body")
# Does the ci exist in ci list?
existing_ci = db.session.query(CI).filter(CI.id == id).one_or_none()
# Does ci exist?
if existing_ci is not None:
schema = CISchema()
update_ci = schema.load(ciDetails, session=db.session)
update_ci.id = ciDetails["id"]
db.session.merge(update_ci)
db.session.commit()
# return the updted ci in the response
data = schema.dump(update_ci)
return data, 200
# otherwise, nope, deployment doesn't exist, so that's an error
else:
abort(404, "CI not found")
def delete(id):
"""
This function deletes a CI from the CI list
:param id: id of the CI to delete
:return: 200 on successful delete, 404 if not found
"""
# Does the ci to delete exist?
existing_ci = db.session.query(CI).filter(CI.id == id).one_or_none()
# if found?
if existing_ci is not None:
db.session.delete(existing_ci)
db.session.commit()
return make_response(f"CI {id} successfully deleted", 200)
# Otherwise, nope, ci to delete not found
else:
abort(404, f"CI {id} not found")
|
12,352 | d99dd9a962fe5a82930d5aabecf0d16b0999f73c | from selenium import webdriver
from selenium.webdriver.chrome.options import Options
class FirstSelenium:
options = Options()
options.add_argument('--ignore-certificate-errors')
options.add_argument('--test-type')
driver = webdriver.Chrome(executable_path="/home/richard-u18/PycharmProjects/SeleniumPython/webdrivers/chromedriver",chrome_options=options)
driver.get('https://python.org')
# time.sleep(5) # Let the user actually see something!
# search_box = driver.find_element_by_name('q')
# search_box.send_keys('ChromeDriver')
# search_box.submit()
# time.sleep(5) # Let the user actually see something!
# driver.quit() |
12,353 | 1e33a50887fd1c663b46507a5899cd1ed4d1e8d5 | from simulation import Simulation
def simulator(parameters):
pass
#print(parameters)
simulation = Simulation(parameters={
'SNR': [0, 10, 20, 30],
'τ': [0.6, 1]
}, function=simulator)
simulation.run()
|
12,354 | ba0eb73d5ab9685b11f09eeb55358e74e348177d | # finger Exercise 3 (2.4)
# Write a program that asks the user to input 10 integers, and then prints the largest odd number that was entered.
# If no odd number was entered, it should print a message to that effect.
def finger(numbers):
"""Print the largest odd number
Arguments:
numbers {list} -- list of integers
"""
greater = 0 #storing largest number
for i in numbers:
if i%2 != 0 and i > greater: #check if odd and if larger than greater
greater = i
if greater == 0: # True if none are odd, greater var not changed
return 'None of the numbers entered are odd.'
return 'The largest odd number is: ' + str(greater)
numbers = int(input('How many number do you wish to enter: '))
lista = []
for i in range(numbers):
entry = int(input(f'Enter a integer({i+1}): '))
lista.append(entry)
# print(lista)
print(finger(lista)) |
12,355 | e9533c264643630ea32de6ec0be2252bb0401120 | from django.apps import AppConfig
class KellycalcConfig(AppConfig):
name = 'kellycalc'
|
12,356 | da92343a0a9b652999bb2fd11e6c514901fce00c | import json
import requests
import time
import os
import sys
import random
def getprox():
proxies = ['u2.p.webshare.io:10000',
'u3.p.webshare.io:10001',
'u2.p.webshare.io:10002',
'u2.p.webshare.io:10003',
'u2.p.webshare.io:10004',
'e1.p.webshare.io:10005',
'u1.p.webshare.io:10006',
'u1.p.webshare.io:10007',
'u2.p.webshare.io:10008',
'e1.p.webshare.io:10009',
'u3.p.webshare.io:10010',
'u1.p.webshare.io:10011',
'u2.p.webshare.io:10012',
'u1.p.webshare.io:10013',
'u1.p.webshare.io:10014',
'e1.p.webshare.io:10015',
'e1.p.webshare.io:10016',
'u2.p.webshare.io:10017',
'u3.p.webshare.io:10018',
'u3.p.webshare.io:10019',
'e1.p.webshare.io:10020',
'u2.p.webshare.io:10021',
'u2.p.webshare.io:10022',
'u2.p.webshare.io:10023',
'u1.p.webshare.io:10024',
'u2.p.webshare.io:10025',
'e1.p.webshare.io:10026',
'u1.p.webshare.io:10027',
'u1.p.webshare.io:10028',
'u2.p.webshare.io:10029',
'u1.p.webshare.io:10030',
'e1.p.webshare.io:10031',
'u2.p.webshare.io:10032',
'e1.p.webshare.io:10033',
'u2.p.webshare.io:10034',
'u1.p.webshare.io:10035',
'u3.p.webshare.io:10036',
'e3.p.webshare.io:10037',
'u1.p.webshare.io:10038',
'u1.p.webshare.io:10039',
'u2.p.webshare.io:10040',
'u1.p.webshare.io:10041',
'e3.p.webshare.io:10042',
'u2.p.webshare.io:10043',
'u2.p.webshare.io:10044',
'u1.p.webshare.io:10045',
'e1.p.webshare.io:10046',
'u1.p.webshare.io:10047',
'u2.p.webshare.io:10048',
'u2.p.webshare.io:10049',
'u1.p.webshare.io:10050',
'u1.p.webshare.io:10051',
'u1.p.webshare.io:10052',
'u2.p.webshare.io:10053',
'u2.p.webshare.io:10054',
'u1.p.webshare.io:10055',
'e1.p.webshare.io:10056',
'u1.p.webshare.io:10057',
'u3.p.webshare.io:10058',
'u1.p.webshare.io:10059',
'e1.p.webshare.io:10060',
'u3.p.webshare.io:10061',
'u1.p.webshare.io:10062',
'u2.p.webshare.io:10063',
'e1.p.webshare.io:10064',
'u1.p.webshare.io:10065',
'u1.p.webshare.io:10066',
'u2.p.webshare.io:10067',
'u1.p.webshare.io:10068',
'u1.p.webshare.io:10069',
'u1.p.webshare.io:10070',
'u2.p.webshare.io:10071',
'u1.p.webshare.io:10072',
'u1.p.webshare.io:10073',
'u2.p.webshare.io:10074',
'u1.p.webshare.io:10075',
'u1.p.webshare.io:10076',
'e1.p.webshare.io:10077',
'u1.p.webshare.io:10078',
'e3.p.webshare.io:10079',
'e1.p.webshare.io:10080',
'u1.p.webshare.io:10081',
'u2.p.webshare.io:10082',
'u2.p.webshare.io:10083',
'u1.p.webshare.io:10084',
'u2.p.webshare.io:10085',
'u3.p.webshare.io:10086',
'u1.p.webshare.io:10087',
'u1.p.webshare.io:10088',
'u2.p.webshare.io:10089',
'e3.p.webshare.io:10090',
'u1.p.webshare.io:10091',
'u2.p.webshare.io:10092',
'u1.p.webshare.io:10093',
'u1.p.webshare.io:10094',
'u2.p.webshare.io:10095',
'u1.p.webshare.io:10096',
'u1.p.webshare.io:10097',
'e1.p.webshare.io:10098',
'u1.p.webshare.io:10099']
proxy = random.choice(proxies)
proxy = f'http://{proxy}'
proxies = {
"http": proxy,
"https": proxy,
}
return(proxies)
os.chdir(sys.path[0])
with open(f'yt-speedrun-games.txt', 'r') as f:
games = f.read()
f.close()
games = games.split('\n')
for game in games:
if len(game) < 3:
pass
offset = 0
print(game)
url = f"https://www.speedrun.com/api/v1/runs?game={game}&max=200"
continue_game = True
while continue_game:
print(url)
try:
data = requests.get(url, proxies=getprox()).text
data = json.loads(data)
except:
with open('failedruns.txt', 'a') as f:
f.write(url + "\n")
f.close()
pass
offset += 200
if offset >= 9800:
with open('failedgames.txt', 'a') as f:
f.write(game + "\n")
f.close()
continue_game = False
continue
try:
if len(data['data']) < 1:
continue_game = False
except:
with open('failedgames.txt', 'a') as f:
f.write(game + "\n")
f.close()
continue_game = False
continue
url = f"https://www.speedrun.com/api/v1/runs?game={game}&max=200&offset={offset}"
for run in data['data']:
try:
for video in run['videos']['links']:
video = video['uri']
print(video)
with open('yt-speedrun-links-new.txt', 'a') as f:
f.write(video + "\n")
f.close()
except:
pass
|
12,357 | 7fd168c4478fa00857626b2062c3a7c604227a54 | from django.contrib import admin
from webapp.models import riderride
from webapp.models import ridermaster
from webapp.models import limitwattsrider
from webapp.models import Document
# Register your models here.
admin.site.register(ridermaster)
admin.site.register(riderride)
admin.site.register(Document)
admin.site.register(limitwattsrider)
|
12,358 | 98709fda19a73f15ea9eb4b1e3a80bbe7309039c | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
'''
Creado 26/04/2016
@author: Cinthya Ramos. 09-11237
@author: Patricia Valencia. 10-10916
'''
import sys
from lexer import tokens, find_column, lexer_error, lexer_tokenList, analyzeNeo
if __name__ == '__main__':
#Comprobacion de los parametros de entrada.
if len(sys.argv) < 2:
print ("Error: Parametros de entrada incorrectos.")
print ("Debe hacerlo de la siguiente manera: ")
print ('\n''\t'+ "./LexNeo archivo.neo" +'\n')
exit()
#Abrimos el archivo del codigo Neo
codeFile = open(sys.argv[1], 'r')
code = codeFile.read()
analyzeNeo(code)
|
12,359 | d15d99872c85c0d87db13725113b87f61fd4f3c1 | print("Дан одномерный массив. Найти среднее арифметическое его элементов. Вывести на экран только те элементы массива, которые больше найденного среднего арифметического.")
arr = [1, -1, 2, 0, 3, 5, 11]
print('Массив: ', arr)
i = 0
mid = 0
while i < len(arr):
mid += arr[i]
i += 1
mid = mid/len(arr)
print('Среднее:', mid)
i = 0
while i < len(arr):
if arr[i] > mid:
print('Цифра ', arr[i], ' больше среднего')
i += 1 |
12,360 | 68de12957760f4bb53cf1c85a0697e0f95f4ad18 | def query(start, end, groupby, conditions=None, filter_keys=None, aggregations=None, rollup=None, arrayjoin=None, limit=None, orderby=None, having=None, referrer=None, is_grouprelease=False, selected_columns=None):
aggregations = (aggregations or [['count()', '', 'aggregate']])
filter_keys = (filter_keys or {
})
selected_columns = (selected_columns or [])
body = raw_query(start, end, groupby=groupby, conditions=conditions, filter_keys=filter_keys, selected_columns=selected_columns, aggregations=aggregations, rollup=rollup, arrayjoin=arrayjoin, limit=limit, orderby=orderby, having=having, referrer=referrer, is_grouprelease=is_grouprelease)
aggregate_cols = [a[2] for a in aggregations]
expected_cols = set(((groupby + aggregate_cols) + selected_columns))
got_cols = set((c['name'] for c in body['meta']))
assert (expected_cols == got_cols)
with timer('process_result'):
return nest_groups(body['data'], groupby, aggregate_cols) |
12,361 | b83e0edf938eba545f8679363bb37cc51fa4d6c8 | (ur'^password_reset/$', 'django.contrib.auth.views.password_reset'),
(ur'^password_reset/done/$', 'django.contrib.auth.views.password_reset_done'),
(ur'^reset/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/$', 'django.contrib.auth.views.password_reset_confirm'),
(ur'^reset/done/$', 'django.contrib.auth.views.password_reset_complete'),
|
12,362 | 06306afe1d8703454af5b1103e7855ea6c6e8a2d | from django.contrib import admin
from main.models import Ship, Container, Dock, Employee, DockHistory
admin.site.register(Ship)
admin.site.register(Container)
admin.site.register(Dock)
admin.site.register(Employee)
admin.site.register(DockHistory) |
12,363 | 5a5fef206c1aa15be1669684c738ca15ccb55cef | from keras.models import Model
from keras.datasets import mnist
from keras.callbacks import ModelCheckpoint
from keras.utils.np_utils import to_categorical
from keras.layers import Dense, Dropout, Flatten, Conv2D, MaxPooling2D, Input, add, concatenate
# load the mnist data
(x_train, y_train), (x_val, y_val) = mnist.load_data()
# reshape the data in three channels
x_train = x_train.reshape(x_train.shape[0], 28, 28, 1)
x_val = x_val.reshape(x_val.shape[0], 28, 28, 1)
# pixel values are originally ranged from 0-255, rescale it to 0-1
x_train = x_train.astype('float32')/255.0
x_val = x_val.astype('float32')/255.0
# one-hot output vector
y_train = to_categorical(y_train, 10)
y_val = to_categorical(y_val, 10)
inputs = Input(shape=(28, 28, 1), dtype='float32')
# a two layer deep cnn network
# 64 and 128 filters with filter size 3*3
# max-pool size 2*2 - it will downscale both the input dimensions into halve
conv_a1 = Conv2D(64, kernel_size=(3, 3), activation='relu', padding='same')(inputs)
max_pool_a1 = MaxPooling2D(pool_size=(2, 2))(conv_a1)
conv_a2 = Conv2D(128, kernel_size=(3, 3), activation='relu', padding='same')(max_pool_a1)
max_pool_a2 = MaxPooling2D(pool_size=(2, 2))(conv_a2)
out_a = Flatten()(max_pool_a2)
# another two layer deep cnn network
# 64 and 128 filters with filter size 4*4
# max-pool size 2*2 - it will downscale both the input dimensions into halve
conv_b1 = Conv2D(64, kernel_size=(4, 4), activation='relu', padding='same')(inputs)
max_pool_b1 = MaxPooling2D(pool_size=(2, 2))(conv_b1)
conv_b2 = Conv2D(128, kernel_size=(4, 4), activation='relu', padding='same')(max_pool_b1)
max_pool_b2 = MaxPooling2D(pool_size=(2, 2))(conv_b2)
out_b = Flatten()(max_pool_b2)
# the two outputs are merged in fully connected layer
out = concatenate([out_a, out_b])
out = Dropout(0.5)(out)
out = Dense(128, activation='relu')(out)
out = Dropout(0.5)(out)
out = Dense(10, activation='softmax')(out)
model = Model(inputs, out)
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
filepath = "mnist_cnn.hdf5"
# save weights whenever validation accuracy is improved
checkpoint = ModelCheckpoint(filepath, monitor='val_acc', verbose=1, save_best_only=True, mode='max')
callback = [checkpoint]
# fit the model
model.fit(x_train, y_train, batch_size=64, epochs=30, verbose=1, validation_data=(x_val, y_val), callbacks=callback) |
12,364 | 1b7d15da67ae50b2c2f705f28027e91689c460c6 | import json
import os
from paver.easy import pushd
import numpy as np
import pickle
import csv
from sklearn import metrics
import argparse
import multiprocessing
import time
import matplotlib
matplotlib.use('Agg') #in the case of perform on server
import matplotlib.pyplot as plt
#--------------------------------------multi process function--------------------------------------#
def multi_plot_object(summary,idx,count):
print summary.fig_title[idx], " plotting..."
summary.plot_states(idx)
plt.savefig('sample_states_%d.png' % idx)
summary.plot_state_boundaries(idx)
plt.savefig('state_boundary_%d.png' % idx)
summary.plot_letters(idx)
plt.savefig('sample_letters_%d.png' % idx)
plt.clf()
count.value = count.value + 1
print summary.fig_title[idx], 'plot finish-->count:',count.value
#--------------------------------------main function--------------------------------------#
def main():
#result_file make#
parser = argparse.ArgumentParser()
parser.add_argument('directory')
#opts = parser.parse_args()
figs_dir = 'summary_figs'
os.path.exists(figs_dir) or os.mkdir(figs_dir)
summary = Summary()
#evaluation_result save#
with pushd(figs_dir):
#gen confused matrix
summary.letter_confused_matrix()
summary.state_confused_matrix()
#gen PER and WER
summary.culPER()
summary.culWER()
#gen adjusted rand index
summary.a_rand_index(summary.sample_letters,summary.input_data,'l')
summary.a_rand_index(summary.sample_states,summary.input_data2,'s')
#gen word list
with open('WordList.txt',"w") as f:
for num, key in enumerate(summary.word_list):
f.write("iter%d:: " % num)
for num2, key2 in enumerate(key):
f.write("%d:" % num2 + str(key2) + " ")
f.write("\n")
#multi plot sample states and letters#
print "--------------------------------------plot process start--------------------------------------"
count = multiprocessing.Value('i', 0)
for idx in range(summary.data_size):
pr = multiprocessing.Process(target=multi_plot_object, args=(summary,idx,count))
pr.start()
time.sleep(0.1) #charm...!!!(koreganaito roop karanukenai)
while (1):
if count.value > 55:
time.sleep(1)
print "--------------------------------------plot process completed!!--------------------------------------"
break
#=====================summary(main process?) class=====================#
class Summary(object):
#--------------------------------------init paras--------------------------------------#
def __init__(self, dirpath = '.'):
with open('parameter.json') as f:
params = self.params = json.load(f)
with open('fig_title.json') as f2:
fig_title = self.fig_title = json.load(f2)
with open('sample_word_list.txt') as f3:
self.word_list = pickle.load(f3)
self.data_size = params['DATA_N']
self.input_data = [np.loadtxt("../LABEL/"+ i + ".lab") for i in fig_title]
self.input_data2 = [np.loadtxt("../LABEL/"+ i + ".lab2") for i in fig_title]
self.sample_states = [np.loadtxt('sample_states_%d.txt' % i)for i in range(params['DATA_N'])]
self.sample_letters = [np.loadtxt('sample_letters_%d.txt' % i)for i in range(params['DATA_N'])]
self.state_ranges = []
for i in range(params['DATA_N']):
with open('state_ranges_%d.txt' % i) as f:
self.state_ranges.append(pickle.load(f))
llist = np.loadtxt("loglikelihood.txt").tolist()
self.maxlikelihood = (max(llist), llist.index(max(llist)))
#manipulation part
self.l_label_dic={}
self.s_label_dic={}
#manipulation part end
#--------------------------------------write result_graph--------------------------------------#
#base_graph function#
def _plot_discreate_sequence(self, true_data, title, sample_data, label = u'', plotopts = {}):
ax = plt.subplot2grid((10, 1), (1, 0))
plt.sca(ax)
ax.matshow([true_data], aspect = 'auto')
plt.ylabel('Truth Label')
#label matrix
ax = plt.subplot2grid((10, 1), (2, 0), rowspan = 8)
plt.suptitle(title)
plt.sca(ax)
ax.matshow(sample_data, aspect = 'auto', **plotopts)
#write per 10 iterations(max_likelihood) label
"""
for i in range(label.shape[0]):
for j in range(label.shape[1]):
if i%10==0 or i==99 or i==self.maxlikelihood[1]:
if i==self.maxlikelihood[1]:
ax.text(j, i+1.5, int(label[i][j]), ha='center', va='bottom', color = 'red', fontsize=8)
else:
ax.text(j, i+1.5, int(label[i][j]), ha='center', va='bottom', color = 'black', fontsize=8)
ax.text(j, i+1.5, int(label[i][j]), ha='center', va='bottom', color = 'black', fontsize=8)
"""
#write x&y label
plt.xlabel('Frame')
plt.ylabel('Iteration')
plt.xticks(())
#plot letter_result graph#
def plot_letters(self, idx):
self._plot_discreate_sequence(
self.input_data[idx],
self.fig_title[idx],
self.sample_letters[idx],
label=self.sample_letters[idx]
)
#plot state_result graph#
def plot_states(self, idx):
self._plot_discreate_sequence(
self.input_data2[idx],
self.fig_title[idx],
self.sample_states[idx],
label=self.sample_states[idx]
)
#plot boundary graph#
def _plot_label_boundary(self, true_data, title, sample_data, label = u''):
boundaries = [[stop for state, (start, stop) in r] for r in sample_data]
size = boundaries[0][-1]
data = np.zeros((len(sample_data), size))
for i, b in enumerate(boundaries):
for x in b[:-1]:
data[i, x] = 1.0
self._plot_discreate_sequence(true_data, title, data, label, plotopts = {'cmap': 'Greys'})
def plot_state_boundaries(self, idx):
self._plot_label_boundary(
self.input_data2[idx],
self.fig_title[idx],
self.state_ranges[idx],
label=self.sample_states[idx]
)
#--------------------------------------compute adjusted rand index--------------------------------------#
def a_rand_index(self,sample_data,true_data,char):
RIs=[]
for idx in range(len(sample_data[0])):
true=[]
sample=[]
for key,key2 in zip(sample_data,true_data):
sample.extend(key[idx])
true.extend(key2)
ris=metrics.adjusted_rand_score(true, sample)
RIs.append(ris)
np.savetxt("aRIs_"+char+".txt",RIs)
true=[]
sample=[]
for key,key2 in zip(sample_data,true_data):
sample.extend(key[self.maxlikelihood[1]])
true.extend(key2)
ri=metrics.adjusted_rand_score(true, sample)
str="maxLk_adjusted_rand_index_"+char+".txt"
f = open(str,'w')
writer = csv.writer(f)
writer.writerow(["adjusted_rand_score",ri])
#<<<<<<<<<<<<<<<<<<<<<manipulation functions...>>>>>>>>>>>>>>>>>>>>>#
#--------------------------------------letter&state confused matrix function--------------------------------------#
def letter_confused_matrix(self):
a=[]
i=[]
u=[]
e=[]
o=[]
for key,key2 in zip(self.sample_letters,self.input_data):
for key3,key4 in zip(key[self.maxlikelihood[1]],key2):
if key4 == 0:
a.append(key3)
elif key4 == 1:
i.append(key3)
elif key4 == 2:
u.append(key3)
elif key4 == 3:
e.append(key3)
elif key4 == 4:
o.append(key3)
l_max=max(a+i+u+e+o)
a_count=[]
i_count=[]
u_count=[]
e_count=[]
o_count=[]
for num in range(int(l_max)+1):
a_count.append(a.count(num))
i_count.append(i.count(num))
u_count.append(u.count(num))
e_count.append(e.count(num))
o_count.append(o.count(num))
f = open('confused_matrix_l.csv','w')
writer = csv.writer(f)
writer.writerow(["phone|letter_label"]+range(int(l_max)+1))
writer.writerow(["a"]+a_count)
writer.writerow(["i"]+i_count)
writer.writerow(["u"]+u_count)
writer.writerow(["e"]+e_count)
writer.writerow(["o"]+o_count)
writer.writerow([])
writer.writerow(["a_label:"+str(a_count.index(max(a_count))),"i_label:"+str(i_count.index(max(i_count))),"u_label:"+str(u_count.index(max(u_count))),"e_label:"+str(e_count.index(max(e_count))),"o_label:"+str(o_count.index(max(o_count)))])
self.l_label_dic[a_count.index(max(a_count))]="a"
self.l_label_dic[i_count.index(max(i_count))]="i"
self.l_label_dic[u_count.index(max(u_count))]="u"
self.l_label_dic[e_count.index(max(e_count))]="e"
self.l_label_dic[o_count.index(max(o_count))]="o"
def state_confused_matrix(self):
aioi=[]
aue=[]
ao=[]
ie=[]
uo=[]
for key,key2 in zip(self.sample_states,self.input_data2):
for key3,key4 in zip(key[self.maxlikelihood[1]],key2):
if key4 == 0:
aioi.append(key3)
elif key4 == 1:
aue.append(key3)
elif key4 == 2:
ao.append(key3)
elif key4 == 3:
ie.append(key3)
elif key4 == 4:
uo.append(key3)
l_max=max(aioi+aue+ao+ie+uo)
aioi_count=[]
aue_count=[]
ao_count=[]
ie_count=[]
uo_count=[]
for num in range(int(l_max)+1):
aioi_count.append(aioi.count(num))
aue_count.append(aue.count(num))
ao_count.append(ao.count(num))
ie_count.append(ie.count(num))
uo_count.append(uo.count(num))
f = open('confused_matrix_s.csv','w')
writer = csv.writer(f)
writer.writerow(["word|state_label"]+range(int(l_max)+1))
writer.writerow(["aioi"]+aioi_count)
writer.writerow(["aue"]+aue_count)
writer.writerow(["ao"]+ao_count)
writer.writerow(["ie"]+ie_count)
writer.writerow(["uo"]+uo_count)
writer.writerow([])
writer.writerow(["aioi_label:"+str(aioi_count.index(max(aioi_count))),"aue_label:"+str(aue_count.index(max(aue_count))),"ao_label:"+str(ao_count.index(max(ao_count))),"ie_label:"+str(ie_count.index(max(ie_count))),"uo_label:"+str(uo_count.index(max(uo_count)))])
self.s_label_dic["aioi"]=aioi_count.index(max(aioi_count))
self.s_label_dic["aue"]=aue_count.index(max(aue_count))
self.s_label_dic["ao"]=ao_count.index(max(ao_count))
self.s_label_dic["ie"]=ie_count.index(max(ie_count))
self.s_label_dic["uo"]=uo_count.index(max(uo_count))
#--------------------------------------culculate PER and WER function--------------------------------------#
def _levenshtein_distance(self, a, b):
m = [ [0] * (len(b) + 1) for i in range(len(a) + 1) ]
for i in xrange(len(a) + 1):
m[i][0] = i
for j in xrange(len(b) + 1):
m[0][j] = j
for i in xrange(1, len(a) + 1):
for j in xrange(1, len(b) + 1):
if a[i - 1] == b[j - 1]:
x = 0
else:
x = 1
m[i][j] = min(m[i - 1][j] + 1, m[i][ j - 1] + 1, m[i - 1][j - 1] + x)
return m[-1][-1]
def culPER(self):
str_letter = []
print "--------------------------------------culPER function--------------------------------------"
print "P_DIC: ",self.l_label_dic
for key in self.sample_letters:
moji=[]
for count, key2 in enumerate(key[self.maxlikelihood[1]]):
try:
if key2 != key[self.maxlikelihood[1]][count+1]:
moji.append(self.l_label_dic[key2])
except IndexError:
try:
moji.append(self.l_label_dic[key2])
except KeyError:
moji.append("*")
except KeyError:
moji.append("*")
str_letter.append("".join(map(str, moji)))
str_true = []
for key in self.fig_title:
key=key.replace("2", "")
key=key.replace("_", "")
str_true.append(key)
#aioi_ie notokidake
where = np.where(np.array(str_true)=="aioiie")
for key in where[0].tolist():
str_letter[key] = str_letter[key][:-1]+"ie"
#aioi_ie notokidake end
print "TRUE: ",str_true
print "SAMP: ",str_letter
print "--------------------------------------culPER function end--------------------------------------"
score=[]
for p,p2 in zip(str_true,str_letter):
score.append(float(self._levenshtein_distance(p,p2))/len(p))
np.savetxt("PERandWER.txt", ["PER,"+str(np.average(score))], fmt="%s")
def culWER(self):
str_word = []
print "--------------------------------------culWER function--------------------------------------"
print "W_DIC: ",self.s_label_dic
for key in self.state_ranges:
moji = []
for key2 in key[self.maxlikelihood[1]]:
moji.append(key2[0])
str_word.append("".join(map(str, moji)))
str_true = []
for key in self.fig_title:
key = key.replace("2", "")
wl = key.split("_")
twl = []
for key2 in wl:
twl.append(self.s_label_dic[key2])
str_true.append("".join(map(str, twl)))
print "TRUE: ",str_true
print "SAMP: ",str_word
print "--------------------------------------culWER function end--------------------------------------"
score=[]
for w,w2 in zip(str_true,str_word):
score.append(float(self._levenshtein_distance(w,w2))/len(w))
with open('PERandWER.txt', 'a') as f_handle:
np.savetxt(f_handle, ["WER,"+str(np.average(score))], fmt="%s")
#<<<<<<<<<<<<<<<<<<<<<manipulation functions end!!!>>>>>>>>>>>>>>>>>>>>>#
#--------------------------------------direct execution function--------------------------------------#
if __name__ == '__main__':
main()
|
12,365 | 336d069cf2d2bc05f0e50805caf5ddb1b5087f33 | #!/usr/bin/python
#-*- coding=utf-8 -*-
"""
Usage:
start_api_server.py [--p=<argument>]
--p=PORT web server port [default: 1235]
"""
__author__ = ['"wuyadong" <wuyadong@tigerknows.com>']
import logging.config
import sys
import docopt
from server.service import WebService
logging.config.fileConfig(sys.path[0] + "/logging.conf")
if __name__ == "__main__":
arguments = docopt.docopt(__doc__, version="api server 1.0")
port = int(arguments['--p'])
web_service = WebService()
web_service.start(port) |
12,366 | 2dae9231a816f292aa82a0c321f79c43eb642bb1 | #!/usr/bin/env python3
#
# This script normalizes a YAML file.
#
# More information at:
# https://github.com/julianmendez/tabulas
#
import json
import sys
import yaml
def main(argv):
help = "usage: python3 " + argv[0] + " (YAML input/output file)\n" + \
" python3 " + argv[0] + " (YAML input file) (YAML output file)\n" + \
"\n" + \
"This normalizes a YAML file.\n"
if (len(argv) == 2 or len(argv) == 3):
input_file_name = argv[1]
if (len(argv) == 3):
output_file_name = argv[2]
else:
output_file_name = input_file_name
with open(input_file_name, 'r') as input_file:
try:
data = yaml.safe_load(input_file)
with open(output_file_name, 'w') as output_file:
yaml.safe_dump(data, output_file, default_flow_style=False, sort_keys=False, explicit_start=True)
except yaml.YAMLError as exception:
print(exception)
else:
print(help)
if __name__ == "__main__":
main(sys.argv)
|
12,367 | e5668b4539b8fe32ffd29126e048ca439e5680df | import os, datetime, codecs, random, string, json, re, time, sqlite3, shutil
from flask import Flask, render_template, request, url_for, abort, redirect, send_from_directory, g, Response, escape
from werkzeug import secure_filename
from functools import wraps
import email.parser, smtplib
from validate_email import validate_email
####################### config
UPLOAD_FOLDER = os.path.abspath(os.path.join(os.path.split(__file__)[0], "uploads"))
ALLOWED_EXTENSIONS = set(['click'])
####################### app config
app = Flask(__name__)
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
app.config['PROPAGATE_EXCEPTIONS'] = True
app.config['DATABASE'] = os.path.join(os.path.split(__file__)[0], "requests.db")
fp = open("claim_secret") # this needs to exist. Put many long random strings in it, one per worker
app.config["CLAIM_SECRETS"] = [x.strip() for x in fp.readlines()]
fp.close()
def get_db():
db = getattr(g, '_database', None)
if db is None:
db = g._database = sqlite3.connect(app.config['DATABASE'])
crs = db.cursor()
init_db(db, crs)
else:
crs = db.cursor()
return db, crs
def init_db(db, crs):
with app.app_context():
crs.execute(("create table if not exists requests ("
"id integer primary key, ip varchar, click_filename varchar, time TIMESTAMP DEFAULT CURRENT_TIMESTAMP"
")"))
crs.execute("create table if not exists devices (id integer primary key, printable_name varchar unique)")
crs.execute("create table if not exists request2device (deviceid integer, requestid integer)")
try:
crs.execute("alter table requests add column email varchar")
except sqlite3.OperationalError as e:
if "duplicate column name: email" in e.message:
pass
else:
raise e
try:
crs.execute("alter table devices add column code varchar")
except sqlite3.OperationalError as e:
if "duplicate column name: code" in e.message:
pass
else:
raise e
try:
crs.execute("alter table devices add column last_seen timestamp")
except sqlite3.OperationalError as e:
if "duplicate column name: last_seen" in e.message:
pass
else:
raise e
try:
crs.execute("alter table request2device add column screenshots integer default 0")
except sqlite3.OperationalError as e:
if "duplicate column name: screenshots" in e.message:
pass
else:
raise e
try:
crs.execute("alter table requests add column uid varchar")
except sqlite3.OperationalError as e:
if "duplicate column name: uid" in e.message:
pass
else:
raise e
####################### utility functions
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS and \
re.match(r"^[A-Za-z0-9.-]+_[a-zA-Z0-9.]+_[a-z0-9]+\.click$", filename)
def randomstring(N):
return ''.join(
random.SystemRandom().choice(
string.ascii_uppercase + string.digits
) for _ in range(N)
)
def slugify(s):
return re.sub(r"[^A-Za-z0-9]", "_", s)
def get_known_devices():
db, crs = get_db()
crs.execute("select printable_name, code from devices where last_seen > datetime('now', '-15 minutes')")
return [{"printable": row[0], "code":row[1]} for row in crs.fetchall()]
def save_device(device):
db, crs = get_db()
crs.execute("select printable_name from devices where printable_name = ?", (device,))
row = crs.fetchone()
if row and row[0]:
crs.execute("update devices set code = ?, last_seen = datetime('now') where printable_name = ?",
(slugify(device), device))
else:
crs.execute("insert into devices (printable_name, code, last_seen) values (?,?,datetime('now'))",
(device, slugify(device)))
db.commit()
def check_auth(username, password):
"""This function is called to check if a username /
password combination is valid.
"""
return username == 'admin' and password in app.config["CLAIM_SECRETS"]
def authenticate():
"""Sends a 401 response that enables basic auth"""
return Response(
'Could not verify your access level for that URL.\n'
'You have to login with proper credentials', 401,
{'WWW-Authenticate': 'Basic realm="Login Required"'})
def requires_auth(f):
@wraps(f)
def decorated(*args, **kwargs):
auth = request.authorization
if not auth or not check_auth(auth.username, auth.password):
return authenticate()
return f(*args, **kwargs)
return decorated
####################### routes
@app.route("/")
def frontpage():
is_paused = os.path.exists(os.path.join(app.config["UPLOAD_FOLDER"], "PAUSED"))
db, crs = get_db()
crs.execute("select sum(screenshots) from request2device")
res = crs.fetchone()
if res and res[0]:
screenshot_count = res[0]
else:
screenshot_count = 0
crs.execute("select count(distinct email) from requests")
res = crs.fetchone()
if res and res[0]:
developer_count = res[0]
else:
developer_count = 0
return render_template("upload.html", devices=get_known_devices(), is_paused=is_paused,
screenshot_count=screenshot_count, developer_count=developer_count)
@app.route("/about")
def about():
return render_template("about.html")
@app.route("/faq")
def faq():
return render_template("faq.html")
@app.route("/contact")
def contact():
return render_template("contact.html")
@app.route("/admin")
@requires_auth
def admin():
is_paused = os.path.exists(os.path.join(app.config["UPLOAD_FOLDER"], "PAUSED"))
queue = []
subfols = os.listdir(app.config["UPLOAD_FOLDER"])
for fol in subfols:
ffol = os.path.join(app.config["UPLOAD_FOLDER"], fol)
ometa = os.path.join(ffol, "metadata.json")
if os.path.exists(ometa):
fp = codecs.open(ometa, encoding="utf8")
metadata = fp.read()
fp.close()
metadata = json.loads(metadata)
cleanupable = True
if metadata.get("devices", []):
cleanupable = all([x.get("status") == "finished" for x in metadata["devices"]])
click = os.path.join(ffol, metadata["filename"])
if not os.path.exists(click):
dt = os.stat(ometa).st_ctime
else:
dt = os.stat(click).st_ctime
dt = metadata["time"]
metadata["filename"] = re.sub("_([0-9]+\.[0-9])", r" \1", metadata["filename"]).replace("com.ubuntu.developer.", "c.u.d.")
queue.append({"uid": fol, "metadata": metadata, "cleanupable": cleanupable,
"dt": dt,
"dta": time.strftime("%H.%M %Y/%m/%d", time.gmtime(dt))})
queue.sort(cmp=lambda a,b:cmp(b["dt"], a["dt"]))
return render_template("admin.html", queue=queue, is_paused=is_paused, completed_count=len([x for x in queue if x["cleanupable"]]))
@app.route("/setstatus", methods=["POST"])
@requires_auth
def setstatus():
uid = request.form.get("uid")
device = request.form.get("device")
status = request.form.get("status")
if not uid or not device or not status:
return "Bad call (%s)" % request.form, 400
if status not in ["pending", "failed"]:
return "Can't set status to that", 400
if not re.match("^[0-9]{14}-[A-Z0-9]{10}$", uid):
return "Invalid job ID", 400
ometa = os.path.join(app.config["UPLOAD_FOLDER"], uid, "metadata.json")
if not os.path.exists(ometa):
return "No such job", 400
fp = codecs.open(ometa, encoding="utf8")
metadata = fp.read()
fp.close()
metadata = json.loads(metadata)
device_status = metadata.get("devices", [])
for ds in device_status:
if ds["printable"] == device:
ds["status"] = status
metadata["devices"] = device_status
fp = codecs.open(ometa, mode="w", encoding="utf8")
json.dump(metadata, fp, indent=2)
fp.close()
return redirect(url_for("admin"))
@app.route("/togglepause", methods=["POST"])
@requires_auth
def togglepause():
pauseflag = os.path.join(app.config["UPLOAD_FOLDER"], "PAUSED")
if os.path.exists(pauseflag):
os.unlink(pauseflag)
else:
fp = open(pauseflag, "w")
fp.write(" ")
fp.close()
return redirect(url_for("admin"))
@app.route("/devicecount")
def devicecount():
return json.dumps({"devices": len(get_known_devices())})
@app.route("/upload", methods=["POST"])
def upload():
is_paused = os.path.exists(os.path.join(app.config["UPLOAD_FOLDER"], "PAUSED"))
if is_paused:
return render_template("user_error.html", message="Uploads are not available at the moment")
if not validate_email(request.form.get("email")):
return render_template("user_error.html", message="That doesn't seem to be a valid email address.")
file = request.files["click"]
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
metadata = {
"email": request.form['email'],
"filename": filename,
"devices": [],
"time": time.time(),
"failures": 0,
"runid": request.form.get("runid", "")
}
for device in get_known_devices():
if request.form.get("device_%s" % device["code"]) == "on" or request.form.get("device___all") == "on":
metadata["devices"].append({
"printable": device["printable"],
"status": "pending"
})
if not metadata["devices"]:
return render_template("user_error.html", message="You have to specify at least one device.")
if not app.config['TESTING']:
db, crs = get_db()
crs.execute("select count(*) from requests where time > datetime('now','-1 hour') and (ip = ? or email = ?)",
(request.remote_addr, metadata["email"]))
res = crs.fetchone()
if res and res[0] > 30:
return render_template("user_error.html", message="Overuse error: you have overrun the rate limit. Please wait an hour.")
ndir = "%s-%s" % (datetime.datetime.now().strftime("%Y%m%d%H%M%S"), randomstring(10))
ndirpath = os.path.join(app.config['UPLOAD_FOLDER'], ndir)
os.mkdir(ndirpath) # should not fail!
ofile = os.path.join(ndirpath, filename)
ometa = os.path.join(ndirpath, "metadata.json")
file.save(ofile)
fp = codecs.open(ometa, mode="w", encoding="utf8")
json.dump(metadata, fp)
fp.close()
db, crs = get_db()
crs.execute("insert into requests (ip, click_filename, email, uid) values (?,?,?,?)",
(request.remote_addr, file.filename, metadata["email"], ndir))
requestid = crs.lastrowid
for d in metadata["devices"]:
crs.execute("select id from devices where printable_name = ?", (d["printable"],))
res = crs.fetchone()
if res:
deviceid = res[0]
else:
crs.execute("insert into devices (printable_name) values (?)", (d["printable"],))
deviceid = crs.lastrowid
crs.execute("insert into request2device (requestid, deviceid) values (?,?)", (requestid, deviceid))
db.commit()
return redirect(url_for('status', uid=ndir))
else:
return render_template("user_error.html", message="That doesn't seem to be a legitimate click package name."), 400
@app.route("/status/<uid>")
def status(uid):
safe_uid = secure_filename(uid)
folder = os.path.join(app.config["UPLOAD_FOLDER"], safe_uid)
ometa = os.path.join(folder, "metadata.json")
if not os.path.exists(ometa):
return "No such pending test", 404
fp = codecs.open(ometa, encoding="utf8")
metadata = fp.read()
fp.close()
metadata = json.loads(metadata)
completed = True
for d in metadata.get("devices", []):
if d["status"] not in ["finished", "failed"]:
completed = False
return render_template("status.html", metadata=metadata, completed=completed)
@app.route("/claim")
def claim():
device = request.args.get('device')
if not device:
return json.dumps({"error": "No device specified"}), 400, {'Content-Type': 'application/json'}
if request.args.get("claim_secret", "").strip() not in app.config["CLAIM_SECRETS"]:
return json.dumps({"error": "Bad claim secret"}), 400, {'Content-Type': 'application/json'}
is_paused = os.path.exists(os.path.join(app.config["UPLOAD_FOLDER"], "PAUSED"))
if is_paused:
return json.dumps({"job": None}), 200, {'Content-Type': 'application/json'}
save_device(device)
device_code = [x["code"] for x in get_known_devices() if x["printable"] == device][0]
# find the next unclaimed item which wants this device
# this is a bit racy, but shouldn't be a problem in practice
for fol in sorted(os.listdir(app.config["UPLOAD_FOLDER"])):
ometa = os.path.join(app.config["UPLOAD_FOLDER"], fol, "metadata.json")
if os.path.exists(ometa):
fp = codecs.open(ometa, encoding="utf8")
metadata = json.load(fp)
fp.close()
if "failures" not in metadata: metadata["failures"] = 0
if "runid" not in metadata: metadata["runid"] = ""
device_status = metadata.get("devices", [])
for ds in device_status:
if ds["printable"] == device:
if ds["status"] == "pending":
ds["status"] = "claimed"
metadata["devices"] = device_status
fp = codecs.open(ometa, mode="w", encoding="utf8")
json.dump(metadata, fp, indent=2)
fp.close()
return json.dumps({
"job": fol,
"click": url_for("click", uid=fol),
"finished": url_for("finished", uid=fol, device_code=device_code),
"failed": url_for("failed", uid=fol, device_code=device_code),
"metadata": metadata,
"unclaim": url_for("unclaim", uid=fol, device_code=device_code)
}), 200, {'Content-Type': 'application/json'}
return json.dumps({"job": None}), 200, {'Content-Type': 'application/json'}
@app.route("/unclaim/<uid>/<device_code>")
def unclaim(uid, device_code):
device_printable = [x["printable"] for x in get_known_devices() if x["code"] == device_code]
if not device_printable:
return json.dumps({"error": "Bad device code"}), 400, {'Content-Type': 'application/json'}
device = device_printable[0]
if not uid:
return json.dumps({"error": "No job specified"}), 400, {'Content-Type': 'application/json'}
if not re.match("^[0-9]{14}-[A-Z0-9]{10}$", uid):
return json.dumps({"error": "Invalid job ID"}), 400, {'Content-Type': 'application/json'}
if request.args.get("claim_secret", "").strip() not in app.config["CLAIM_SECRETS"]:
return json.dumps({"error": "Bad claim secret"}), 400, {'Content-Type': 'application/json'}
ometa = os.path.join(app.config["UPLOAD_FOLDER"], uid, "metadata.json")
if not os.path.exists(ometa):
return json.dumps({"error": "No such job"}), 400, {'Content-Type': 'application/json'}
fp = codecs.open(ometa, encoding="utf8")
metadata = json.load(fp)
fp.close()
failures = metadata.get("failures", 0)
metadata["failures"] = failures + 1
device_status = metadata.get("devices", [])
for ds in device_status:
if ds["printable"] == device:
if ds["status"] == "claimed":
ds["status"] = "pending"
metadata["devices"] = device_status
fp = codecs.open(ometa, mode="w", encoding="utf8")
json.dump(metadata, fp, indent=2)
fp.close()
return json.dumps({"unclaimed": True}), 200, {'Content-Type': 'application/json'}
return json.dumps({"unclaimed": False, "error": "Not your job to unclaim"}), 200, {'Content-Type': 'application/json'}
@app.route("/click/<uid>")
def click(uid):
safe_uid = secure_filename(uid)
folder = os.path.join(app.config["UPLOAD_FOLDER"], safe_uid)
ometa = os.path.join(folder, "metadata.json")
if not os.path.exists(ometa):
return "No such pending test", 404
fp = codecs.open(ometa, encoding="utf8")
metadata = fp.read()
fp.close()
metadata = json.loads(metadata)
if not os.path.exists(os.path.join(folder, metadata["filename"])):
return "No such click", 404
return send_from_directory(folder, metadata["filename"], as_attachment=True)
def completed(uid, device_code, resolution):
if request.args.get("claim_secret", "").strip() not in app.config["CLAIM_SECRETS"]:
return json.dumps({"error": "Bad claim secret"}), 400, {'Content-Type': 'application/json'}
device_printable = [x["printable"] for x in get_known_devices() if x["code"] == device_code]
if not device_printable:
return json.dumps({"error": "Bad device code"}), 400, {'Content-Type': 'application/json'}
device = device_printable[0]
safe_uid = secure_filename(uid)
folder = os.path.join(app.config["UPLOAD_FOLDER"], safe_uid)
ometa = os.path.join(folder, "metadata.json")
if not os.path.exists(ometa):
return json.dumps({"error": "No such pending test"}), 400, {'Content-Type': 'application/json'}
fp = codecs.open(ometa, encoding="utf8")
metadata = json.load(fp)
fp.close()
device_status = metadata.get("devices", [])
for ds in device_status:
if ds["printable"] == device:
if ds["status"] == "claimed":
ds["status"] = resolution
metadata["devices"] = device_status
fp = codecs.open(ometa, mode="w", encoding="utf8")
json.dump(metadata, fp, indent=2)
fp.close()
screenshot_count = request.args.get("screenshot_count", 0)
try:
screenshot_count = int(screenshot_count)
except:
screenshot_count = None
if screenshot_count:
db, crs = get_db()
crs.execute("select id from devices where printable_name = ?", (device,))
row = crs.fetchone()
print "GOT DEVICE", row
if row and row[0]:
sql_device_id = row[0]
crs.execute("select id from requests where uid = ?", (uid,))
row = crs.fetchone()
print "GOT REQ", row
if row and row[0]:
sql_request_id = row[0]
crs.execute(
"update request2device set screenshots = screenshots + ? where requestid = ? and deviceid = ?",
(screenshot_count, sql_request_id, sql_device_id))
print "UPDATED", crs.rowcount
db.commit()
return json.dumps({"status": resolution}), 200, {'Content-Type': 'application/json'}
else:
return json.dumps({"error": "Job not in state 'claimed' (in state '%s')" % ds["status"]}), 400, {'Content-Type': 'application/json'}
return json.dumps({"error": "No such job"}), 400, {'Content-Type': 'application/json'}
@app.route("/finished/<uid>/<device_code>")
def finished(uid, device_code):
return completed(uid, device_code, "finished")
@app.route("/failed/<uid>/<device_code>")
def failed(uid, device_code):
return completed(uid, device_code, "failed")
@app.route("/sendmail", methods=["POST"])
def sendmail():
if request.args.get("claim_secret", "").strip() not in app.config["CLAIM_SECRETS"]:
return json.dumps({"error": "Bad claim secret"}), 400, {'Content-Type': 'application/json'}
msg = request.form.get("message")
if not msg:
return json.dumps({"error": "No message"}), 400, {'Content-Type': 'application/json'}
p = email.parser.Parser()
try:
msg = p.parsestr(msg)
except:
raise
return json.dumps({"error": "Bad message"}), 400, {'Content-Type': 'application/json'}
if not msg.get("From") or not msg.get("To"):
return json.dumps({"error": "No addresses"}), 400, {'Content-Type': 'application/json'}
fp = codecs.open("creds.json", encoding="utf8") # has username, name, password keys
creds = json.load(fp)
fp.close()
try:
session = smtplib.SMTP('smtp.gmail.com', 587)
session.ehlo()
session.starttls()
session.login(creds["username"], creds["password"])
session.sendmail(creds["username"], msg["To"], msg.as_string())
except:
return json.dumps({"error": "email not sent"}), 500, {'Content-Type': 'application/json'}
return json.dumps({"success": "ok"}), 200, {'Content-Type': 'application/json'}
@app.route("/cleanup")
def cleanup():
remcount = 0
keepcount = 0
subfols = os.listdir(app.config["UPLOAD_FOLDER"])
for fol in subfols:
ffol = os.path.join(app.config["UPLOAD_FOLDER"], fol)
ometa = os.path.join(ffol, "metadata.json")
if os.path.exists(ometa):
fp = codecs.open(ometa, encoding="utf8")
metadata = fp.read()
fp.close()
metadata = json.loads(metadata)
rem = True
for d in metadata.get("devices", []):
if d.get("status") != "finished":
rem = False
break
if rem:
shutil.rmtree(ffol, ignore_errors=True)
remcount += 1
else:
keepcount += 1
return "Cleaned up: %s, left untouched: %s" % (remcount, keepcount)
if __name__ == "__main__":
app.run(port=12346, debug=True)
|
12,368 | cb5956d5b34e2e07f1d6307a084cfe68999a62e0 | from datetime import timedelta
def add(moment):
return moment+timedelta(seconds=+10**9)
|
12,369 | 88b349a074dec1215c79d02bbaef790249c68775 | #!/usr/bin/env python
from __future__ import print_function
import rospy
import cv2
import sys
from sensor_msgs.msg import Image
from cv_bridge import CvBridge, CvBridgeError
if __name__ == '__main__':
node_name = 'image_listener'
topic_name = '/cam'
if len(sys.argv) > 1:
topic_name = sys.argv[1]
bridge = CvBridge()
def callback(data):
try:
img = bridge.imgmsg_to_cv2(data, "bgr8")
except CvBridgeError as e:
print(e)
cv2.imshow("Image window", img)
cv2.waitKey(3)
image_sub = rospy.Subscriber(topic_name, Image, callback)
rospy.init_node(node_name, anonymous=True)
print('listen on %s' % (topic_name,))
try:
rospy.spin()
except KeyboardInterrupt:
print("Shutting down")
cv2.destroyAllWindows()
|
12,370 | f81967bcc108b20d5cd9d7b8cb55661dac060ed3 | #!/usr/bin/python3
""" Mopdule for Place tests """
from tests.test_models.test_base_model import test_basemodel
from models.place import Place
import unittest
import inspect
import time
from datetime import datetime
from unittest import mock
import models
class test_Place(test_basemodel):
""" Class for tests Place """
def __init__(self, *args, **kwargs):
""" Init Place tests """
super().__init__(*args, **kwargs)
self.name = "Place"
self.value = Place
def test_city_id(self):
""" various tests """
place = Place()
self.assertTrue(hasattr(place, "city_id"))
if models.storage_type == "db":
self.assertEqual(place.city_id, None)
def test_user_id(self):
"""
Test Class attribute
"""
place = Place()
self.assertTrue(hasattr(place, "user_id"))
if models.storage_type == "db":
self.assertEqual(place.user_id, None)
else:
pass
def test_name(self):
"""
Test Class attribute
"""
place = Place()
self.assertTrue(hasattr(place, "name"))
if models.storage_type == "db":
self.assertEqual(place.name, None)
else:
pass
def test_description(self):
""" various tests """
place = Place()
self.assertTrue(hasattr(place, "description"))
if models.storage_type == "db":
self.assertEqual(place.description, None)
else:
pass
def test_number_bathrooms(self):
"""
Test Class attribute
"""
place = Place()
self.assertTrue(hasattr(place, "number_bathrooms"))
if models.storage_type == "db":
self.assertEqual(place.number_bathrooms, None)
else:
pass
def test_number_rooms(self):
"""
Test Class attribute
"""
place = Place()
self.assertTrue(hasattr(place, "number_rooms"))
if models.storage_type == "db":
self.assertEqual(place.number_rooms, None)
def test_max_guest(self):
"""
Test Class attribute
"""
place = Place()
self.assertTrue(hasattr(place, "max_guest"))
if models.storage_type == "db":
self.assertEqual(place.max_guest, None)
else:
pass
def test_price_by_night(self):
"""
Test Class attribute
"""
place = Place()
self.assertTrue(hasattr(place, "price_by_night"))
if models.storage_type == "db":
self.assertEqual(place.price_by_night, None)
else:
pass
def test_latitude(self):
"""
Test Class attribute
"""
place = Place()
self.assertTrue(hasattr(place, "latitude"))
if models.storage_type == "db":
self.assertEqual(place.latitude, None)
else:
pass
def test_longitude(self):
"""
Test Class attribute
"""
place = Place()
self.assertTrue(hasattr(place, "longitude"))
if models.storage_type == "db":
self.assertEqual(place.longitude, None)
else:
pass
def test_amenity_ids(self):
""" various tests """
new = self.value()
self.assertEqual(type(new.amenity_ids), list)
|
12,371 | 23cc52166b7698e035b1087ccce048fc85c0d85b | import tkinter as tk
import tkinter.ttk as ttk
from tkinter import StringVar
from tkinter import messagebox
import sqlite3
import os.path
listOfMovies = []
window = tk.Tk()
class Movie:
def __init__(self, name, category, description, price):
self.__name = name
self.__category = category
self.__description = description
self.__price = price
def getName(self):
return self.__name
def setName(self, name):
self.__name = name
def getCategory(self):
return self.__category
def setCategory(self, category):
self.__category = category
def getDescription(self):
return self.__description
def setDescription(self, description):
self.__description = description
def getPrice(self):
return self.__price
def setPrice(self, price):
self.__price = price
def getPriceWithGST(self):
return(round(self.__price*1.07, 2))
def initDatabase(Movie):
conn = sqlite3.connect('spMovieApp.db')
sql = "CREATE TABLE movielist(name text primary key, category text, description text, price real)"
conn.execute(sql)
file = open('movieList.txt', 'r')
lines = file.readlines()
movieLists = []
for eachLine in lines:
eachLine = eachLine.replace("\n", "")
cols = eachLine.split("|")
name = cols[0]
category = cols[1]
description = cols[2]
price = float(cols[3])
movieList = Movie(name, category, description, price)
movieLists.append(movieList)
sql = "INSERT INTO movielist(name,category,description,price) Values(?,?,?,?)"
conn.execute(sql,(name, category, description, price))
conn.commit()
window.geometry("300x300")
messagebox.showinfo("Success", "Database initialized!")
file.close()
conn.close()
return movieLists
def insert():
name = txtNameFilter.get()
category = txtCategory.get()
description = txtDescription.get()
price = txtPrice.get()
window.geometry("350x350")
if name == "" or category == "" or description == "" or price == "":
messagebox.showerror("Error", "Please key in all details!")
else:
conn = sqlite3.connect('spMovieApp.db')
sql = "INSERT INTO movielist(name,category,description,price) Values(?,?,?,?)"
conn.execute(sql, (name, category, description, price))
messagebox.showinfo("Success", "Insert Successful!")
conn.commit()
conn.close()
def delete():
global listOfMovies
name=txtNameFilter.get().upper()
if name != "":
conn = sqlite3.connect('spMovieApp.db')
sql = "DELETE FROM movielist WHERE name=?"
conn.execute(sql,(name,))
conn.commit()
conn.close()
messagebox.showinfo("Success", "Delete Successful!")
else:
messagebox.showerror("Error", "Delete not successful!")
if not os.path.exists('spMovieApp.db'):
listOfMovies=initDatabase(Movie)
# Main GUI
window.title("SP Movie Admin")
window.geometry("325x325")
window.resizable(0, 0)
window.configure(background='lavender')
labelAppName = ttk.Label(window, text="SP Movie Admin", padding=2)
labelAppName.config(font=("Helvetica", 20))
labelAppName.grid(row=0, column=0, columnspan=3, pady=10)
labelName = ttk.Label(window, text="Name", padding=2)
labelName.grid(row=1, column=0, sticky=tk.W)
txtNameFilter = StringVar()
textName = ttk.Entry(window, textvariable=txtNameFilter)
textName.grid(row=1, column=1, pady=2)
labelCategory = ttk.Label(window, text="Category", padding=2)
labelCategory.grid(row=2, column=0, sticky=tk.W)
txtCategory = StringVar()
textCategory = ttk.Entry(window, textvariable=txtCategory)
textCategory.grid(row=2, column=1, pady=2)
labelDescription = ttk.Label(window, text="Description", padding=2)
labelDescription.grid(row=3, column=0, sticky=tk.W)
txtDescription = StringVar()
textDescription = ttk.Entry(window, textvariable=txtDescription)
textDescription.grid(row=3, column=1, pady=2)
labelPrice = ttk.Label(window, text="Price", padding=2)
labelPrice.grid(row=4, column=0, sticky=tk.W)
txtPrice = StringVar()
textPrice = ttk.Entry(window, textvariable=txtPrice)
textPrice.grid(row=4, column=1, pady=2)
button1 = ttk.Button(window, text="Insert", command=insert)
button1.grid(row=5, column=1, sticky=tk.W, pady=10)
button2 = ttk.Button(window, text="Delete", command=delete)
button2.grid(row=5, column=1, sticky=tk.E, pady=10)
window.mainloop() # main loop to wait for events
|
12,372 | 53cc7cfd99e4835b13f9523cb50bd79a382b3123 | # coding=utf-8
from django.shortcuts import render, render_to_response
from django.http import HttpResponseRedirect, HttpResponse
from web.models import User, information, Out
from django import forms
from django.template import RequestContext
from data import *
import datetime
from dateutil import tz
import pytz, time
from download import *
from django.http import StreamingHttpResponse
class UserForm(forms.Form):
username = forms.CharField(label="账号 ", max_length=200)
password = forms.CharField(label="密码 ", widget=forms.PasswordInput())
def login_mytask(request):
week_c = int(time.strftime("%w"))
tasks = []
today = datetime.date.today()
if request.method == 'POST': ###登陆部分
uf = UserForm(request.POST)
if uf.is_valid():
username = uf.cleaned_data['username']
password = uf.cleaned_data['password']
user = User.objects.filter(username=username, password=password)
if user:
request.session['user_id'] = user[0].id
weeks = 0
s_Ddate = (today - datetime.timedelta(days=(week_c - 1 + 7 * weeks))).strftime('%Y-%m-%d')
e_Ddate = (today - datetime.timedelta(days=(week_c - 7 + 7 * weeks))).strftime('%Y-%m-%d')
Ddate = s_Ddate + " -- " + e_Ddate
for i in range(1, 8):
_date = (today - datetime.timedelta(days=(week_c - i + 7 * weeks))).strftime('%Y%m%d')
tasks.extend(task.objects.filter(user=user[0].chinese_name, IDD__contains=_date))
task_info = sorted(tasks, key=lambda a: a.IDD, reverse=True)
task_info = sorted(task_info, key=lambda a: a.status, reverse=True)
data_aa = {"username": user[0].chinese_name, 'group': user[0].groupname, "task_info": task_info,
"ago_week": weeks + 1, "week": weeks, "next_week": weeks - 1, "date": Ddate}
return render(request, 'my_tasks.html', data_aa)
# task_info=sorted(task.objects.filter(user=user[0].chinese_name),key=lambda a:a.IDD,reverse=True)
# task_info=sorted(task_info,key=lambda a:a.status,reverse=True)
# return render_to_response("my_tasks.html",{"task_info":task_info,"username":user[0].chinese_name,"group":user[0].groupname},context_instance=RequestContext(request))
else:
return HttpResponseRedirect('/')
else: ###个人任务部分
if request.session:
ID = request.session.get('user_id')
user = User.objects.filter(id=ID)
if len(user) == 1:
# task_info=get_tasks(user[0].chinese_name)
try:
weeks = int(request.GET['ago'])
except:
weeks = 0
s_Ddate = (today - datetime.timedelta(days=(week_c - 1 + 7 * weeks))).strftime('%Y-%m-%d')
e_Ddate = (today - datetime.timedelta(days=(week_c - 7 + 7 * weeks))).strftime('%Y-%m-%d')
Ddate = s_Ddate + "--" + e_Ddate
for i in range(1, 8):
_date = (today - datetime.timedelta(days=(week_c - i + 7 * weeks))).strftime('%Y%m%d')
tasks.extend(task.objects.filter(user=user[0].chinese_name, IDD__contains=_date))
task_info = sorted(tasks, key=lambda a: a.IDD, reverse=True)
task_info = sorted(task_info, key=lambda a: a.status, reverse=True)
try:
if request.GET['download'] == 'true':
task_tables = [['IDD', '日期'], ['info', '描述'], ['Type', '类型'], ['status', '进度'],
['shenpi', '审批'], ['pingjia', '评价']]
file_name = "我的任务" + Ddate + '.xlsx'
return createdownloadfile(task_tables, task_info, file_name)
except Exception, e:
print Exception, e
pass
# return render_to_response('my_tasks.html',{"username":user[0].chinese_name,'group':user[0].groupname,"task_info":task_info,"ago_week":weeks+1,"week":weeks,"next_week":weeks-1,"date":Ddate},context_instance=RequestContext(request))
data_aa = {"username": user[0].chinese_name, 'group': user[0].groupname, "task_info": task_info,
"ago_week": weeks + 1, "week": weeks, "next_week": weeks - 1, "date": Ddate}
return render(request, 'my_tasks.html', data_aa)
uf = UserForm()
return render(request, 'login.html', {'uf': uf})
def shenpi(request):
if request.session:
ID = request.session.get('user_id')
user = User.objects.filter(id=ID)[0]
if user.groupname == 'admin':
try:
Uname = request.GET['user']
tasks = task.objects.filter(status="已完成", shenpi="待审批", user=Uname)
except:
tasks = task.objects.filter(status="已完成", shenpi="待审批")
task_info = []
for task1 in tasks:
task_info.append(
{'id': task1.id, 'IDD': task1.IDD, 'info': task1.info, 'type': task1.Type, 'status': task1.status,
'shenpi': task1.shenpi, 'user': task1.user})
return render(request, "manger.html", {"task_info": task_info, "username": user.chinese_name})
else:
return HttpResponseRedirect('/')
def logout(request):
try:
del request.session['user_id']
except:
pass
return HttpResponseRedirect('/')
# uf=UserForm()
# return render_to_response('login.html',{'uf':uf},context_instance=RequestContext(request))
def bypass(request):
if request.session:
ID = request.session.get('user_id')
user = User.objects.filter(id=ID)
if len(user) == 1 and user[0].groupname == 'admin':
IDD = request.POST['id']
task1 = task.objects.filter(id=IDD)[0]
task1.shenpi = "已通过"
task1.pingjia = request.POST['pingjia']
task1.save()
return HttpResponseRedirect('/shenpi')
return HttpResponseRedirect('/')
def task_per(request):
if request.session:
try:
ID = request.session.get('user_id')
username = User.objects.filter(id=ID)[0].chinese_name
users = []
for user in User.objects.all():
if user.groupname == "admin": continue
users.append(user)
tasks = task.objects.filter(status="未完成")
# if len(tasks)>10:tasks=tasks[0:10]
return render(request, 'task_list.html', {"users": users, "tasks": tasks, "username": username})
except Exception,e:
print Exception,e
pass
return HttpResponseRedirect('/')
def show_per_all(request):
if request.session:
try:
Uname = request.GET['user']
user = User.objects.filter(chinese_name=Uname)[0]
ID = request.session.get('user_id')
cuser = User.objects.filter(id=ID)[0]
try:
weeks = int(request.GET['ago'])
except:
weeks = 0
week_c = int(time.strftime("%w"))
tasks = []
today = datetime.date.today()
s_Ddate = (today - datetime.timedelta(days=(week_c - 1 + 7 * weeks))).strftime('%Y-%m-%d')
e_Ddate = (today - datetime.timedelta(days=(week_c - 7 + 7 * weeks))).strftime('%Y-%m-%d')
Ddate = s_Ddate + " -- " + e_Ddate
for i in range(1, 8):
_date = (today - datetime.timedelta(days=(week_c - i + 7 * weeks))).strftime('%Y%m%d')
tasks.extend(task.objects.filter(user=user.chinese_name, IDD__contains=_date))
task_info = sorted(tasks, key=lambda a: a.IDD, reverse=True)
task_info = sorted(task_info, key=lambda a: a.status, reverse=True)
data_aa = {"user": user, "task_info": task_info, "cuser": cuser, "ago_week": weeks + 1,
"next_week": weeks - 1, "date": Ddate, "type": 1}
return render(request, 'task_per_all.html', data_aa)
except Exception, e:
print Exception, e
pass
return HttpResponseRedirect('/')
def show_per_all_month(request):
if request.session:
try:
Uname = request.GET['user']
user = User.objects.filter(chinese_name=Uname)[0]
ID = request.session.get('user_id')
cuser = User.objects.filter(id=ID)[0]
try:
months = int(request.GET['ago'])
except:
months = 0
tasks = []
today = datetime.date.today()
year = today.year
month = today.month - months
while month < 1:
year -= 1
month += 12
Ddate = datetime.datetime(year, month, 1).strftime('%Y年%m月')
DDdate = datetime.datetime(year, month, 1).strftime('%Y%m')
tasks.extend(task.objects.filter(user=user.chinese_name, IDD__contains=DDdate))
task_info = sorted(tasks, key=lambda a: a.IDD, reverse=True)
task_info = sorted(task_info, key=lambda a: a.status, reverse=True)
return render(request, 'task_per_all.html',
{"user": user, "task_info": task_info, "cuser": cuser, "ago_week": months + 1,
"next_week": months - 1, "date": Ddate, "type": 2})
except Exception, e:
print Exception, e
pass
return HttpResponseRedirect('/')
def xiafa_task(request):
if request.session:
ID = request.session.get("user_id")
user = User.objects.filter(id=ID)[0]
if user.groupname == "admin":
print request.method
if request.method == "GET":
users = User.objects.exclude(groupname="admin")
return render(request, 'xiafa_task.html', {"users": users, "cuser": user})
elif request.method == "POST":
now = datetime.datetime.utcnow().replace(tzinfo=pytz.utc).astimezone(
pytz.timezone('Asia/Shanghai')).strftime('%Y%m%d%H%M%S')
Uname = request.POST['user']
info = request.POST['info']
if request.POST['type'] == 0:
Type = "每日任务"
else:
Type = "单次任务"
task.objects.create(user=Uname, IDD=now, info=info, Type=Type, status="未完成", shenpi="待审批", pingjia="-",
createUserGroup="admin")
else:
pass
else:
return HttpResponseRedirect('/')
return HttpResponseRedirect('/')
# return render_to_response('login.html',{'uf':uf},context_instance=RequestContext(request))
def person(request):
try:
ID = request.session.get("user_id")
user = User.objects.filter(id=ID)[0]
if request.method == "GET":
return render(request, 'person_info.html', {"user": user})
elif request.method == "POST":
user.chinese_name = request.POST["chinese_name"]
user.photo_url = request.POST['photo_url']
user.save()
return render(request, 'person_info.html', {"user": user, "TYPE": "1"})
else:
return HttpResponseRedirect('/')
except:
return HttpResponseRedirect('/')
def weekly_tasks(request):
if request.session:
ID = request.session.get("user_id")
if ID is None: return HttpResponseRedirect('/')
user = User.objects.filter(id=ID)[0]
if user.groupname == "admin":
if request.method == "GET":
weeks = int(request.GET['ago'])
week_c = int(time.strftime("%w"))
tasks = []
today = datetime.date.today()
s_Ddate = (today - datetime.timedelta(days=(week_c - 1 + 7 * weeks))).strftime('%Y-%m-%d')
e_Ddate = (today - datetime.timedelta(days=(week_c - 7 + 7 * weeks))).strftime('%Y-%m-%d')
Ddate = s_Ddate + " -- " + e_Ddate
for i in range(1, 8):
_date = (today - datetime.timedelta(days=(week_c - i + 7 * weeks))).strftime('%Y%m%d')
tasks.extend(task.objects.filter(IDD__contains=_date))
try:
if request.GET['download'] == 'true':
task_tables = [['IDD', '日期'], ['user', '执行人'], ['info', '描述'], ['Type', '类型'], ['status', '进度'],
['shenpi', '审批'], ['pingjia', '评价']]
file_name = "周报" + Ddate + '.xlsx'
return createdownloadfile(task_tables, tasks, file_name)
except Exception, e:
print Exception, e
return render(request, 'weekly_tasks.html',
{"user": user, "tasks": tasks, "ago_week": weeks + 1, 'week': weeks,
"next_week": weeks - 1, "date": Ddate})
else:
return HttpResponseRedirect('/')
return HttpResponseRedirect('/')
def download(request):
createdownloadfile()
file_name = '/tmp/task.xls'
def file_iterator(file_name, chunk_size=512):
with open(file_name) as f:
while True:
c = f.read(chunk_size)
if c:
yield c
else:
break
response = StreamingHttpResponse(file_iterator(file_name), content_type='application/vnd.ms-excel')
response['Content-Disposition'] = 'attachment; filename="我的任务.xls"'
return response
|
12,373 | fabb519fc00f1396ca95c5361a54d2523d43186b | """Put all your globals here.
Things like layout and FPS are pretty universal, so
for convenience, just load up a global
"""
# http://www.aleax.it/Python/5ep.html
class Borg(object):
_shared_state = {}
def __init__(self):
self.__dict__ = self._shared_state
#
# Any instance of GlobalState()
# has the same shared state.
# a = GlobalState()
# b = GlobalState()
# a.layout == b.layout
#
class Global(Borg):
def __init__(self):
Borg.__init__(self)
self.layout = None
self.servers = None
self.stations = None
self.fps = None
self.verbose = False
self.codes = None
self.fountains = None
STATE = Global()
|
12,374 | 89f5c3b8ee6755ba2f74f6d84761ba03e60b158f | import pylab
sum_digits_raised=[]
linear=[]
for i in range(500000):
sum_digits_raised.append(sum(int(d)**5 for d in str(i)))
linear.append(i)
pylab.plot(sum_digits_raised, label='Sum of digits to the fifth power')
pylab.plot(linear, label='linear')
pylab.legend(loc = 'upper left')
pylab.figure()
pylab.show()
|
12,375 | ced54cbf85ff4e2ff88b55fd426df898ca64502a | # -*- coding: utf-8 -*-
"""
Created on Mon Jun 3 21:11:27 2019
@author: HP
"""
n = 0 #starting number
while (True): #condition
print (n)
n = n + 1 #increment by one
if(n==10): #condition checking if value of n is equal to 10
break #exit the loop |
12,376 | 1d842cf79389246451d2313f3793263ac479774e | word="ametikool"
used_word=["_,""_,""_,""_,""_,""_,""_,""_,""_,"]
used_letters= []
alphabet = "abcdefghijklmnopqrstuvwxyz"
letters = list(word.lower())
while True:
print (used_wordg)
print ("kasutatud tähed"+ str(used_letters))
letter=input("sisestage üks täht :")
used_letters.append(letter) |
12,377 | 1ef6235d088a133f58dd36ac8b69641f69caf6de | #!/usr/bin/env python3
import logging
import asyncio
import json
import sys
import uvloop
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
import aiohttp
from aiohttp import web
import litecord
logging.basicConfig(level=logging.DEBUG, \
format='[%(levelname)7s] [%(name)s] %(message)s')
loggers_to_info = ['websockets.protocol']
log = logging.getLogger('litecord')
handler = logging.FileHandler('litecord.log')
handler.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - [%(levelname)s] [%(name)s] %(message)s')
handler.setFormatter(formatter)
log.addHandler(handler)
app = web.Application()
async def index(request):
return web.Response(text='beep boop this is litecord!')
def shush_loggers():
"""Set some specific loggers to `INFO` level."""
for logger in loggers_to_info:
logging.getLogger(logger).setLevel(logging.INFO)
def main():
try:
config_path = sys.argv[1]
except IndexError:
config_path = 'litecord_config.json'
try:
cfgfile = open(config_path, 'r')
except FileNotFoundError:
cfgfile = open('litecord_config.sample.json', 'r')
shush_loggers()
loop = asyncio.get_event_loop()
flags = json.load(open(config_path, 'r'))
app.router.add_get('/', index)
litecord.init_server(app, flags, loop)
try:
loop.run_until_complete(litecord.start_all(app))
server = app.litecord_server
server.compliance()
log.debug('Running servers')
server.http_server = loop.run_until_complete(server.http_server)
server.ws_server = loop.run_until_complete(server.ws_server)
log.debug('Running server sentry')
loop.create_task(litecord.server_sentry(server))
log.debug('Running loop')
loop.run_forever()
except KeyboardInterrupt:
log.info('Exiting from a CTRL-C...')
except:
log.exception('Oh no! We received an error. Exiting.')
finally:
app.litecord_server.shutdown()
return 0
if __name__ == "__main__":
sys.exit(main())
|
12,378 | 36dfd10265bdb89736e99df069448d82ec2aaf8b | from datetime import datetime
import re
import os
# DATETIME
def convWebTimeStrToDatetime(timeStr):
return datetime.strptime(timeStr,"%d %b %Y, %H:%M")
def convInputStrToDatetime(timeStr):
return datetime.strptime(timeStr,"%Y-%m-%d %H:%M:%S")
def convTimeToStr(dateT):
return datetime.strftime(dateT,"%Y-%m-%d_%H-%M-%S")
def convTimeToMonthYearStr(dateT):
return datetime.strftime(dateT,"%Y-%m")
def getCurrDateStr():
return datetime.strftime(datetime.now(), "%Y-%m-%d")
# STRING
def cleanseStr(x):
# Swap whitespaces with underscores
x = re.sub(r'(\s+)', '_', x)
# Filter only alphanumeric and underscores
x = re.sub(r'[^\w+]', '', x)
return x
# OS
def safeCreateDir(relPath):
"""Will create a dir if doesnt exist yet"""
if not os.path.isdir(relPath):
os.mkdir(relPath) |
12,379 | 48d1ef5143bff5bc85f32b0ef6fc80c1a9a11904 | har=int(input())
x,y=0,1
while har>0:
print(y,end=' ')
x,y=y,x+y
har=har-1 |
12,380 | 1fb05c6be9434a065e8ae5bdc6e9d1347f9a0f05 | from django.shortcuts import render,redirect,get_object_or_404
from .forms import UserForm,ProfileForm,PrescriptionForm,AppointmentForm,PatientForm,DoctorForm,ProfileForm1,AccountForm
from .models import Profile,Patient,Doctor,Appointment,Prescription,Reception,HR,Accounts
from django.contrib.auth.models import auth,User
from django.contrib.auth.decorators import login_required
from django import forms
# Create your views here.
def home(request):
if request.user.is_authenticated:
user1=User.objects.get(username=request.user.username)
if Reception.objects.filter(user=user1).exists():
return redirect('rhome')
elif HR.objects.filter(user=user1).exists():
return redirect('hhome')
elif Patient.objects.filter(user=Profile.objects.get(user=user1)).exists():
return redirect('phome')
elif Doctor.objects.filter(user=Profile.objects.get(user=user1)).exists():
return redirect('dhome')
return render(request,'base/home.html')
@login_required(login_url='/login/')
def rhome(request):
return render(request,'reception/home.html')
@login_required(login_url='/login/')
def phome(request):
return render(request,'patient/home.html')
@login_required(login_url='/login/')
def dhome(request):
return render(request,'doctor/home.html')
@login_required(login_url='/login/')
def hhome(request):
return render(request,'hr/home.html')
def contact(request):
return render(request,'base/contact.html')
def register(request):
if request.user.is_authenticated:
return redirect('logout')
if request.method == "POST":
u_form = UserForm(request.POST)
p_form = ProfileForm(request.POST)
if u_form.is_valid() and p_form.is_valid():
user = u_form.save()
p_form = p_form.save(commit=False)
p_form.user = user
p_form.save()
if p_form.Registeras=="Doctor":
d=Doctor(user=p_form)
d.save()
if p_form.Registeras=="Patient":
p=Patient(user=p_form)
p.save()
return redirect('login')
else:
u_form = UserForm(request.POST)
p_form = ProfileForm(request.POST)
return render(request, 'base/register.html', {'u_form': u_form, 'p_form': p_form})
def login(request):
if request.user.is_authenticated:
auth.logout(request)
return redirect('login')
if request.method=='POST':
username=request.POST['uname']
password=request.POST['psw']
user=auth.authenticate(username=username,password=password)
if user is not None:
auth.login(request,user)
user1=User.objects.get(username=username)
if Reception.objects.filter(user=user1).exists():
return redirect('rhome')
elif HR.objects.filter(user=user1).exists():
return redirect('hhome')
elif Patient.objects.filter(user=Profile.objects.get(user=user1)).exists():
return redirect('phome')
elif Doctor.objects.filter(user=Profile.objects.get(user=user1)).exists():
return redirect('dhome')
else:
return redirect('login')
else:
return render(request,'base/login.html')
def logout(request):
auth.logout(request)
return redirect('home')
def appointment(request):
if request.user.is_authenticated:
name=request.user.username
profile=User.objects.get(username=name)
patient=Profile.objects.get(user=profile)
if Patient.objects.filter(user=patient).exists():
id=Patient.objects.get(user=patient).id
app=Appointment.objects.filter(Patient_id=id)
return render(request,'patient/appointment.html',{'app':app})
elif Doctor.objects.filter(user=patient).exists():
id=Doctor.objects.get(user=patient).id
app=Appointment.objects.filter(Doctor_id=id)
return render(request,'doctor/appointment.html',{'app':app})
else:
return redirect('login')
def prescription(request):
if request.user.is_authenticated:
name=request.user.username
profile=User.objects.get(username=name)
patient=Profile.objects.get(user=profile)
if Patient.objects.filter(user=patient).exists():
id=Patient.objects.get(user=patient).id
pre=Prescription.objects.filter(Patname_id=id)
return render(request,'patient/mh.html',{'pre':pre})
elif Doctor.objects.filter(user=patient).exists():
name=Doctor.objects.get(user=patient)
pre=Prescription.objects.filter(Docname=name)
return render(request,'doctor/prescription.html',{'pre':pre})
else:
return redirect('login')
@login_required(login_url='/login/')
def pre_new(request):
if request.user.is_authenticated:
profile=Profile.objects.get(user=request.user)
if Doctor.objects.filter(user=profile).exists():
if request.method == "POST":
form = PrescriptionForm(request.POST)
if form.is_valid():
post = form.save(commit=False)
post.Docname = request.user
post.save()
return redirect('pre')
else:
form = PrescriptionForm()
return render(request, 'doctor/form_edit.html', {'form': form})
else:
return redirect('login')
@login_required(login_url='/login/')
def dashboard(request):
app=Appointment.objects.all()
pat=Patient.objects.all()
context={
'app':app,
'pat':pat,
}
return render(request,'reception/dashboard.html',context)
def createapp(request):
if request.user.is_authenticated:
if Reception.objects.filter(user=request.user).exists():
if request.method == "POST":
form = AppointmentForm(request.POST)
if form.is_valid():
post = form.save(commit=False)
post.save()
return redirect('dash')
else:
form = AppointmentForm()
return render(request, 'reception/createapp.html', {'form': form})
else:
return redirect('logout')
else:
return redirect('login')
@login_required(login_url='/login/')
def createpat(request):
if request.user.is_authenticated:
if Reception.objects.filter(user=request.user).exists():
if request.method == "POST":
u_form=UserForm(request.POST)
p_form=ProfileForm1(request.POST)
form = PatientForm(request.POST)
if form.is_valid() and u_form.is_valid() and p_form.is_valid():
p=u_form.save(commit=False)
username=p.username
p.save()
profile=p_form.save(commit=False)
profile.user=User.objects.get(username=username)
profile.save()
post = form.save(commit=False)
post.user=Profile.objects.get(user=p)
post.save()
return redirect('dash')
u_form=UserForm()
p_form=ProfileForm1()
form = PatientForm()
context={
'form':form,
'u_form':u_form,
'p_form':p_form
}
return render(request, 'reception/createpat.html',context)
else:
return redirect('login')
@login_required(login_url='/login/')
def ddashboard(request):
doc=Doctor.objects.all()
context={
'doc':doc,
}
return render(request,'hr/dashboard.html',context)
@login_required(login_url='/login/')
def createdoc(request):
if request.user.is_authenticated:
if HR.objects.filter(user=request.user).exists():
if request.method == "POST":
u_form=UserForm(request.POST)
p_form=ProfileForm1(request.POST)
form = DoctorForm(request.POST)
if form.is_valid() and u_form.is_valid() and p_form.is_valid():
p=u_form.save(commit=False)
username=p.username
p.save()
profile=p_form.save(commit=False)
profile.user=User.objects.get(username=username)
profile.save()
post = form.save(commit=False)
post.user=Profile.objects.get(user=p)
post.save()
return redirect('ddash')
u_form=UserForm()
p_form=ProfileForm1()
form = DoctorForm()
context={
'form':form,
'u_form':u_form,
'p_form':p_form
}
return render(request, 'hr/createdoc.html',context)
else:
return redirect('login')
def profile_update(request,pk):
if request.user.is_authenticated:
if Reception.objects.filter(user=request.user).exists():
pprofile=get_object_or_404(Patient,pk=pk)
if request.method=="POST":
p_form=PatientForm(request.POST,instance=pprofile)
a_form=AccountForm(data=request.POST,files=request.FILES)
if p_form.is_valid() and a_form.is_valid():
p_form.save()
a=a_form.save(commit=False)
a.user=pprofile
print(a)
a.save()
return redirect('dash')
else:
print(a_form.errors)
else:
p_form=PatientForm(instance=pprofile)
a_form=AccountForm()
context={
'p_form':p_form,
'a_form':a_form
}
return render(request,'reception/profile_update.html',context)
elif HR.objects.filter(user=request.user).exists():
pprofile=get_object_or_404(Doctor,pk=pk)
if request.method=="POST":
p_form=DoctorForm(request.POST,instance=pprofile)
if p_form.is_valid():
p_form.save()
return redirect('ddash')
else:
p_form=DoctorForm(instance=pprofile)
context={
'p_form':p_form
}
return render(request,'hr/profile_update.html',context)
def profile_delete(request,pk):
if request.user.is_authenticated:
if Reception.objects.filter(user=request.user).exists():
pprofile=get_object_or_404(Patient,pk=pk)
profile=get_object_or_404(Profile,pk=pprofile.user.pk)
user=get_object_or_404(User,pk=profile.user.pk)
if request.method=="POST":
user.delete()
return redirect('dash')
else:
return render(request,'reception/profile_delete.html')
elif HR.objects.filter(user=request.user).exists():
pprofile=get_object_or_404(Doctor,pk=pk)
profile=get_object_or_404(Profile,pk=pprofile.user.pk)
user=get_object_or_404(User,pk=profile.user.pk)
if request.method=="POST":
user.delete()
return redirect('ddash')
else:
return render(request,'hr/profile_delete.html')
@login_required(login_url='/login/')
def payments(request):
user=request.user
profile=get_object_or_404(Profile,user=user)
user1=get_object_or_404(Patient,user=profile)
patient=Accounts.objects.filter(user=user1)
context={
'pat':patient
}
return render(request,'patient/payments.html',context)
@login_required(login_url='/login/')
def accounting(request):
if HR.objects.filter(user=request.user).exists():
context={
'account':Accounts.objects.all(),
'pat':Patient.objects.all()
}
return render(request,'hr/account.html',context)
|
12,381 | 2c00ce4c0cbfad5600ee73b779841211b27ef1f5 | # Import libraries.
import gdal
import os.path
import cv2
import os
import numpy as np
from numpy import inf
import rasterio
import matplotlib.pyplot as plt
path = r"C:/Users/Tim/Desktop/BOKU/GIS/GISproject/landsat/"
bands = ["band1", "band2", "band3", "band4", "band5", "band6", "band7"]
# File from which projection is copied.
def copy_projection(input_georeferencing, file_to_reproject):
'''
Copies projection of one image to another of the same dimensions.
Where:
input_georeferencing: the path to the image that has the projection information.
file_to_reproject: the path to the file that you want to reproject.
'''
dataset = gdal.Open(input_georeferencing)
if dataset is None:
print('Unable to open', input_georeferencing, 'for reading')
else:
projection = dataset.GetProjection()
geotransform = dataset.GetGeoTransform()
print(projection, geotransform)
if projection is None and geotransform is None:
print('No projection or geotransform found on file' + input_georeferencing)
else:
# File to which projection is copied to.
dataset2 = gdal.Open( file_to_reproject, gdal.GA_Update )
if dataset2 is None:
print('Unable to open', file_to_reproject, 'for writing')
if geotransform is not None:
dataset2.SetGeoTransform( geotransform )
if projection is not None:
dataset2.SetProjection( projection )
for band in bands:
for file in os.listdir(path + "landsat8/"):
if file.endswith(band + ".tif"):
try:
copy_projection(path + "landsat8/" + file, path + "landsat_8test/" + file)
except:
pass
|
12,382 | 9f6420520d51fa5d05e4dc1a3adca5a0dd32dd9b | from django.shortcuts import render
from django.http import HttpResponse, Http404
# Create your views here.
def index(request):
return render(request, 'ninja/index.html')
# def show(request, color):
# # context = {
# # 'id' : color_id
# # }
# return render(request, 'ninja/show.html')
# def show(request, color_id):
# context = {
# 'id': color_id,
# 'question': "Why is a boxing ring square?",
# }
# return render(request, 'quiz/show.html', context)
def show(request, color):
if color == 'blue':
return HttpResponse("<img src = '/static/ninja/blue.jpg'>")
elif color == 'red':
return HttpResponse("<img src = '/static/ninja/red.jpg'>")
elif color == 'orange':
return HttpResponse("<img src = '/static/ninja/orange.jpg'>")
elif color == 'purple':
return HttpResponse("<img src = '/static/ninja/purple.jpg'>")
else:
return HttpResponse("<img src = '/static/ninja/megan_fox.jpg'>") |
12,383 | 345cec0b20a9874073fea8b1a94594a8269c87e7 | #8/6/14
#read in pairs of data(Jilian date. temperature), check it for validity
#keep reading in until user says stop
total = 0.0
counter = 0
tot_temp = [0] * 366
count_slips = [0] * 366
user_date = input('Type in a Julian date or STOP ')
while(user_date.upper() != 'STOP'):
#reads in and checks down the date
user_date = int(user_date)
if (user_date < 1 or user_date > 365):
print('Bad date on input, retype')
#reads and checks in the temperature
else:
temp = float(input('Now type in the temperature: '))
if (temp < -70 or temp > 150):
print('temperature out of range, retype')
else:
print('Day',user_date,'was',temp,'degrees farenheit')
counter += 1
total += temp
tot_temp[user_date] += temp
count_slips[user_date] += 1
user_date = input('Type in a Julian date or STOP ')
if counter > 0 :
avg = total/counter
print('The average daily temperature was',avg,'degrees farenheit.')
average_hottest = -999
average_coldest = 999
for num in range(1,366):
if count_slips[num] > 0:
average_temp[num] = tot_temp[num]/count_slips[num]
print('The average for day',num,'was',average_temp[num],'for',count_slips[num],'slips of paper')
if average_temp[num] > average_hottest:
average_hottest = average_temp[num]
if average_temp[num] < average_coldest:
average_coldest = average_temp[num]
print('The hottest average was',average_hottest)
print('The coldest average was',average_coldest)
|
12,384 | 2c4c5f761e371c00b731a98c9c33f39dfa9a7f0e |
target_distribution_methods = {}
def register(target_distribution_method_name, target_distribution_method):
target_distribution_methods[target_distribution_method_name] = target_distribution_method
def create_action(task, source, filename, target):
return target_distribution_methods[target['distribution_method']](task, source, filename, target)
|
12,385 | 64cd0434c8f447b709db7a1c19ce3dfaf4c7f50e | import json
from pathlib import Path
import ipywidgets as ipw
import requests_cache
import traitlets as tl
from aiida import orm
from aiidalab_eln import get_eln_connector
from IPython.display import clear_output, display
ELN_CONFIG = Path.home() / ".aiidalab" / "aiidalab-eln-config.json"
ELN_CONFIG.parent.mkdir(
parents=True, exist_ok=True
) # making sure that the folder exists.
def connect_to_eln(eln_instance=None, **kwargs):
# assuming that the connection can only be established to the ELNs
# with the stored configuration.
try:
with open(ELN_CONFIG) as file:
config = json.load(file)
except (FileNotFoundError, json.JSONDecodeError, KeyError):
return (
None,
f"Can't open '{ELN_CONFIG}' (ELN configuration file). Instance: {eln_instance}",
)
# If no ELN instance was specified, trying the default one.
if not eln_instance:
eln_instance = config.pop("default", None)
if eln_instance: # The ELN instance could be identified.
if eln_instance in config:
eln_config = config[eln_instance]
eln_type = eln_config.pop("eln_type", None)
else: # The selected instance is not present in the config.
return None, f"Didn't find configuration for the '{eln_instance}' instance."
# If the ELN type cannot be identified - aborting.
if not eln_type:
return None, f"Can't identify the type of {eln_instance} ELN."
# Everything is alright, can populate the ELN connector
# with the required info.
try:
eln = get_eln_connector(eln_type)(
eln_instance=eln_instance, **eln_config, **kwargs
)
except NotImplementedError as err:
return None, str(err)
eln.connect()
return eln, None
return (
None,
"No ELN instance was provided, the default ELN instance is not configured either. Set a default ELN or select an ELN instance.",
)
class ElnImportWidget(ipw.VBox):
node = tl.Instance(orm.Node, allow_none=True)
def __init__(self, path_to_root="../", **kwargs):
# Used to output additional settings.
self._output = ipw.Output()
# Communicate to the user if something isn't right.
error_message = ipw.HTML()
super().__init__(children=[error_message], **kwargs)
eln, msg = connect_to_eln(**kwargs)
if eln is None:
url = f"{path_to_root}aiidalab-widgets-base/notebooks/eln_configure.ipynb"
error_message.value = f"""Warning! The access to ELN is not configured. Please follow <a href="{url}" target="_blank">the link</a> to configure it.</br> More details: {msg}"""
return
tl.dlink((eln, "node"), (self, "node"))
with requests_cache.disabled():
# Since the cache is enabled in AiiDAlab, we disable it here to get correct results.
eln.import_data()
class ElnExportWidget(ipw.VBox):
node = tl.Instance(orm.Node, allow_none=True)
def __init__(self, path_to_root="../", **kwargs):
self.path_to_root = path_to_root
# Send to ELN button.
send_button = ipw.Button(description="Send to ELN")
send_button.on_click(self.send_to_eln)
# Use non-default destination.
self.modify_settings = ipw.Checkbox(
description="Update destination.", indent=False
)
self.modify_settings.observe(self.handle_output, "value")
# Used to output additional settings.
self._output = ipw.Output()
# Communicate to the user if something isn't right.
self.message = ipw.HTML()
children = [
ipw.HBox([send_button, self.modify_settings]),
self._output,
self.message,
]
self.eln, msg = connect_to_eln()
if self.eln:
tl.dlink((self, "node"), (self.eln, "node"))
else:
self.modify_settings.disabled = True
send_button.disabled = True
self.message.value = f"""Warning! The access to ELN is not configured. Please follow <a href="{self.path_to_root}/aiidalab-widgets-base/notebooks/eln_configure.ipynb" target="_blank">the link</a> to configure it.</br> </br> More details: {msg}"""
super().__init__(children=children, **kwargs)
@tl.observe("node")
def _observe_node(self, _=None):
if self.node is None or self.eln is None:
return
if "eln" in self.node.extras:
info = self.node.extras["eln"]
else:
try:
q = orm.QueryBuilder().append(
orm.Node,
filters={"extras": {"has_key": "eln"}},
tag="source_node",
project="extras.eln",
)
q.append(
orm.Node,
filters={"uuid": self.node.uuid},
with_ancestors="source_node",
)
info = q.all(flat=True)[0]
except IndexError:
info = {}
self.eln.set_sample_config(**info)
def send_to_eln(self, _=None):
if self.eln and self.eln.is_connected:
self.message.value = f"\u29D7 Sending data to {self.eln.eln_instance}..."
with requests_cache.disabled():
# Since the cache is enabled in AiiDAlab, we disable it here to get correct results.
self.eln.export_data()
self.message.value = (
f"\u2705 The data were successfully sent to {self.eln.eln_instance}."
)
else:
self.message.value = f"""\u274C Something isn't right! We were not able to send the data to the "<strong>{self.eln.eln_instance}</strong>" ELN instance. Please follow <a href="{self.path_to_root}/aiidalab-widgets-base/notebooks/eln_configure.ipynb" target="_blank">the link</a> to update the ELN's configuration."""
def handle_output(self, _=None):
with self._output:
clear_output()
if self.modify_settings.value:
display(
ipw.HTML(
f"""Currently used ELN is: "<strong>{self.eln.eln_instance}</strong>". To change it, please follow <a href="{self.path_to_root}/aiidalab-widgets-base/notebooks/eln_configure.ipynb" target="_blank">the link</a>."""
)
)
display(self.eln.sample_config_editor())
class ElnConfigureWidget(ipw.VBox):
def __init__(self, **kwargs):
self._output = ipw.Output()
self.eln = None
self.eln_instance = ipw.Dropdown(
description="ELN:",
options=("Set up new ELN", {}),
style={"description_width": "initial"},
)
self.update_list_of_elns()
self.eln_instance.observe(self.display_eln_config, names=["value", "options"])
self.eln_types = ipw.Dropdown(
description="ELN type:",
options=["cheminfo", "openbis"],
value="cheminfo",
style={"description_width": "initial"},
)
self.eln_types.observe(self.display_eln_config, names=["value", "options"])
# Buttons.
# Make current ELN the default.
default_button = ipw.Button(description="Set as default", button_style="info")
default_button.on_click(self.set_current_eln_as_default)
# Save current ELN configuration.
save_config = ipw.Button(
description="Save configuration", button_style="success"
)
save_config.on_click(self.save_eln_configuration)
# Erase current ELN from the configuration.
erase_config = ipw.Button(
description="Erase configuration", button_style="danger"
)
erase_config.on_click(self.erase_current_eln_from_configuration)
# Check if connection to the current ELN can be established.
check_connection = ipw.Button(
description="Check connection", button_style="warning"
)
check_connection.on_click(self.check_connection)
self.my_output = ipw.HTML()
self.display_eln_config()
super().__init__(
children=[
self.eln_instance,
self.eln_types,
self._output,
ipw.HBox([default_button, save_config, erase_config, check_connection]),
self.my_output,
],
**kwargs,
)
def write_to_config(self, config):
with open(ELN_CONFIG, "w") as file:
json.dump(config, file, indent=4)
def get_config(self):
try:
with open(ELN_CONFIG) as file:
return json.load(file)
except (FileNotFoundError, json.JSONDecodeError, KeyError):
return {}
def update_list_of_elns(self):
config = self.get_config()
default_eln = config.pop("default", None)
if (
default_eln not in config
): # Erase the default ELN if it is not present in the config
self.write_to_config(config)
default_eln = None
self.eln_instance.options = [("Setup new ELN", {})] + [
(k, v) for k, v in config.items()
]
if default_eln:
self.eln_instance.label = default_eln
def set_current_eln_as_default(self, _=None):
self.update_eln_configuration("default", self.eln_instance.label)
def update_eln_configuration(self, eln_instance, eln_config):
config = self.get_config()
config[eln_instance] = eln_config
self.write_to_config(config)
def erase_current_eln_from_configuration(self, _=None):
config = self.get_config()
config.pop(self.eln_instance.label, None)
self.write_to_config(config)
self.update_list_of_elns()
def check_connection(self, _=None):
if self.eln:
err_message = self.eln.connect()
if self.eln.is_connected:
self.my_output.value = "\u2705 Connected."
return
self.my_output.value = f"\u274C Not connected. {err_message}"
def display_eln_config(self, value=None):
"""Display ELN configuration specific to the selected type of ELN."""
try:
eln_class = get_eln_connector(self.eln_types.value)
except NotImplementedError as err:
with self._output:
clear_output()
display(ipw.HTML("❌" + str(err)))
return
self.eln = eln_class(
eln_instance=self.eln_instance.label if self.eln_instance.value else "",
**self.eln_instance.value,
)
if self.eln_instance.value:
self.eln_types.value = self.eln.eln_type
self.eln_types.disabled = True
else:
self.eln_types.disabled = False
with self._output:
clear_output()
display(self.eln)
def save_eln_configuration(self, _=None):
config = self.eln.get_config()
eln_instance = config.pop("eln_instance")
if eln_instance:
self.update_eln_configuration(eln_instance, config)
self.update_list_of_elns()
|
12,386 | 6b8249e774581593f74cafb9922ac42247bd8be7 | # Copyright 2018 PIQuIL - All Rights Reserved
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from collections import Counter, OrderedDict
from operator import itemgetter
import torch
__all__ = [
"Sampler",
"TractableSampler",
"DataSampler"
]
class Sampler:
"""Abstract Sampler Class"""
def probability_ratio(self, a, b):
r"""Computes the ratio of the probabilities of ``a`` and ``b``:
.. math::
\frac{p(a)}{p(b)}
:param a: The batch of samples whose probabilities will be in the
numerator.
:type a: torch.Tensor
:param b: The batch of samples whose probabilities will be in the
denominator. Must be the same shape as ``a``.
:type b: torch.Tensor
:return: The elementwise probability ratios of the inputs.
:rtype: torch.Tensor
"""
pass
def log_probability_ratio(self, a, b):
r"""Computes the (natural) logarithm of the ratio of the probabilities
of ``a`` and ``b``:
.. math::
\log\left(\frac{p(a)}{p(b)}\right) = \log(p(a)) - \log(p(b))
:param a: The batch of samples whose probabilities will be in the
numerator.
:type a: torch.Tensor
:param b: The batch of samples whose probabilities will be in the
denominator. Must be the same shape as ``a``.
:type b: torch.Tensor
:return: The elementwise logarithms of the probability ratios of the
inputs.
:rtype: torch.Tensor
"""
pass
def sample(self, num_samples, **kwargs):
"""Generate samples from the sampler.
:param num_samples: The number of samples to generate.
:type num_samples: int
:param \**kwargs: Keyword arguments for the Sampler.
:returns: Samples drawn from the Sampler
:rtype: torch.Tensor
"""
pass
class TractableSampler(Sampler):
"""Abstract Class for Tractable Samplers (ie. Samplers whose probability
densities can be computed easily).
"""
def probability(self, samples):
r"""Computes the probabilities of the given samples.
:param a: A batch of samples.
:type a: torch.Tensor
:return: The probabilities of the samples.
:rtype: torch.Tensor
"""
pass
def log_probability(self, samples):
r"""Computes the (natural) logarithm of the probabilities of the
given samples.
:param a: A batch of samples.
:type a: torch.Tensor
:return: The log-probabilities of the samples.
:rtype: torch.Tensor
"""
pass
def probability_ratio(self, a, b):
return self.probability(a).div(self.probability(b))
def log_probability_ratio(self, a, b):
return self.log_probability(a).sub(self.log_probability(b))
class DataSampler(TractableSampler):
"""Concrete TractableSampler Class which samples from the given dataset
:param data: The dataset to sample from
:type data: torch.Tensor
"""
def __init__(self, data):
freq = Counter()
data = torch.tensor(data)
self.device = data.device
self.dtype = data.dtype
self.sample_size = data.size()[-1]
for row in data:
freq.update({
tuple(row.numpy()): 1
})
total = float(sum(freq.values()))
freq = sorted([(k, v) for k, v in freq.items()], key=itemgetter(1))
self.probs = OrderedDict([(k, v/total) for k, v in freq])
self.cdf = OrderedDict()
for i, (ki, pi) in enumerate(self.probs.items()):
cumulative_prob = 0.0
for j, (kj, pj) in enumerate(self.probs.items()):
cumulative_prob += pj
if i == j:
break
self.cdf[ki] = cumulative_prob
def sample(self, num_samples, dtype=torch.float):
unif = torch.rand(num_samples, device=self.device, dtype=dtype)
samples = torch.zeros(num_samples, self.sample_size,
device=self.device, dtype=self.dtype)
for i in range(num_samples):
for k, p in self.cdf.items():
if unif[i] < p:
samples[i] = torch.tensor(k, device=self.device,
dtype=self.dtype)
break
return samples
def probability(self, samples):
sample_probs = torch.zeros(samples.size()[0],
device=samples.device,
dtype=samples.dtype)
for i, sample in enumerate(samples):
key = tuple(sample.numpy())
sample_probs[i] = self.probs.get(key, 0.0)
return sample_probs
def log_probability(self, samples):
return self.probability(samples).log()
|
12,387 | 61dec3be7345236553c19d46eac221f3f66502d8 | # 16th program - copyfile
import sys
if len(sys.argv) == 3:
f1 = str(sys.argv[1])
f2 = str(sys.argv[2])
fs = open(f1, "r")
fd = open(f2, "w")
data = fs.read()
fd.write(data)
fs.close()
fd.close()
print("File copied successfully\nDestination data: ")
fp = open(f2, "r")
print(fp.read())
else:
if len(sys.argv) > 3:
print("Extra arguments entered")
else:
print("Insufficient data provided")
|
12,388 | 93e441996fae26eae28a9005f09a05cb54e44bf9 | # This script generates the texture containing the triangle patches for the
# triangle wave VFX.
# Needs Pillow to run: pip install pillow
# The script is not perfect, there's artifacts between the triangle patches.
import numpy as np
from PIL import Image
# Adjust these values to change resolution and number of triangles.
size_x = 1000
size_y = 1400
num_tris_x = 10
num_tris_y = 14
data = np.zeros((size_x, size_y, 3), dtype=np.uint8)
def sdTri(p_x, p_y):
k = np.sqrt(3.0)
p_x = np.abs(p_x) - 1.0
p_y = p_y + 1.0 / k
if (p_x + k*p_y > 0.0):
tp_x = (p_x - k*p_y) / 2.0
p_y = (-k*p_x - p_y) / 2.0
p_x = tp_x
p_x -= np.clip(p_x, -2.0, 0.0)
return -np.linalg.norm([p_x, p_y]) * np.sign(p_y)
bb_x = size_x / num_tris_x
bb_y = size_y / num_tris_y
offset = 0.2*np.random.rand(num_tris_x, num_tris_y) - 0.1
for x in range(size_x):
t_x = int(x / size_x * num_tris_x)
p_x = 2.0 * (x - t_x * size_x / num_tris_x) / bb_x - 1.0 # rel coords
for y in range(size_y):
t_y = int(y / size_y * num_tris_y)
p_y = np.sqrt(3.0) * (y - t_y * size_y / num_tris_y) / bb_y + (2.0 / np.sqrt(3.0) - np.sqrt(3.0))# rel coords
d_t = sdTri(p_x, p_y)
if d_t < 0.0:
data[x, y][1] = d_t * 255
dtx = np.linalg.norm([t_x - 0.5*num_tris_x, t_y - 0.5*num_tris_y])
data[x, y][0] = dtx * 255 / np.linalg.norm([0.5*num_tris_x, 0.5*num_tris_y])
# add a random offset
data[x, y][0] = np.clip(data[x, y][0] + offset[t_x, t_y] * 255, 0, 255)
# Due to symmetry, we can simply flip + shift
data += np.roll(np.fliplr(data), int(0.5*size_x/num_tris_x), axis=0)
image = Image.fromarray(data)
image.save("triangles.png") |
12,389 | 58f34bd81d444e602ca5a11378252ceda4fe35ec | def cointoss():
import random
heads = 0
tails = 0
for toss in range(5000):
result = random.randint(1,2)
if result == 1:
heads += 1
else:
tails += 1
print "there were {} heads and {} tails.".format(heads, tails)
cointoss() |
12,390 | c53610992b8f0d16e6d2a8c64410cf72461bb0bd | import numpy as np
from multiprocessing import Process, Value, Pool, Lock, Queue
import time
class Knn2:
def __init__(self, data, k, **kwargs):
self.kwargs = kwargs
self.data = data
self.k = k
def classify(self, tupla):
# if tupla in self.data[:,:-1].tolist():
# print('ALERT')
# else:
# print('NOT IN TRAIN SET')
k = self.k
results = np.sqrt(np.sum((self.data[:,:-1]-tupla)**2, axis=1))
# print('PREDICTING TUPLE\n', tupla)
# print(self.data[np.argsort(results)[:k]])
# input()
return np.bincount(self.data[np.argsort(results)[:k]][:,-1].astype(int)).argmax()
def distance(self, a, b):
return self.euclidean_distance(a, b)
def euclidean_distance(self, a, b):
return np.sqrt(np.sum((a-b)**2))
|
12,391 | 1dada32c35e4f3ef9d2755031ef6ba8e3e033093 | # Created by MechAviv
# Quest ID :: 34933
# Not coded yet
sm.setSpeakerID(3001510)
sm.setSpeakerType(3)
sm.flipDialogue()
sm.setBoxChat()
sm.boxChatPlayerAsSpeaker()
sm.setBoxOverrideSpeaker()
sm.flipBoxChat()
sm.flipBoxChatPlayerAsSpeaker()
sm.setColor(1)
sm.sendNext("#face4#All clear. Perfect! Let's get out of here!")
sm.completeQuest(34933)
# Update Quest Record EX | Quest ID: [15710] | Data: lasttime=19/03/09/15/15
# Unhandled Stat Changed [130064] Packet: 00 00 10 FC 01 00 00 00 00 00 1C 00 00 00 2C 03 00 00 2C 03 00 00 2F 02 00 00 2F 02 00 00 28 00 01 01 16 00 00 00 51 08 00 00 00 00 00 00 FF 00 00 00 00
sm.giveExp(8342)
# Update Quest Record EX | Quest ID: [34933] | Data: exp=1
# Update Quest Record EX | Quest ID: [34995] | Data: 00=h1;10=h0;01=h0;11=h0;02=h0;12=h0;13=h0;04=h0;23=h0;14=h0;05=h0;24=h0;15=h0;06=h0;16=h0;07=h0;17=h0;09=h0
# Update Quest Record EX | Quest ID: [34995] | Data: 00=h1;10=h0;01=h0;11=h0;02=h0;12=h0;13=h1;04=h0;23=h0;14=h0;05=h0;24=h0;15=h0;06=h0;16=h0;07=h0;17=h0;09=h0
# [SET_PARTNER] [01 A6 CC 2D 00 5D BD C4 04 00 ]
# [START_NAVIGATION] [F9 0A F6 17 00 00 00 00 00 00 ]
|
12,392 | b24c98ade315f6846bce4a08e63e010b7aecf132 | from NeuralNetwork import NeuralNetwork
from MySingletons import MyDevice
import numpy as np
import torch
class AutoEncoder(NeuralNetwork):
_greedy_layer_bias = None
_greedy_layer_output_bias = None
@property
def latent_space(self):
return self.layer_value[self.latent_space_position]
@property
def latent_space_size(self):
return self.layers[self.latent_space_position]
@property
def latent_space_position(self):
return int((len(self.layers) - 1) / 2)
def __init__(self, layers=[]):
NeuralNetwork.__init__(self, layers)
for i in range(self.number_hidden_layers):
self.activation_function[i] = self.ACTIVATION_FUNCTION_SIGMOID
self.output_activation_function = self.ACTIVATION_FUNCTION_SIGMOID
self.loss_function = self.LOSS_FUNCTION_MSE
def train(self, x: torch.tensor, is_tied_weight: bool = False, noise_ratio: float = 0.0, weight_number: int = None, y: torch.tensor = None):
if is_tied_weight:
for i in range(int(self.number_hidden_layers/2)):
if i == 0:
self.output_weight = self.weight[i].T
else:
self.weight[-i] = self.weight[i].T
if y is None:
y = x
NeuralNetwork.train(self, x=self.masking_noise(x=x, noise_ratio=noise_ratio), y=y, weight_no=weight_number)
def test(self, x: torch.tensor, is_beta_updatable: bool = False, y: torch.tensor = None):
if y is None:
y = x
return NeuralNetwork.test(self, x=x, y=y, is_beta_updatable=is_beta_updatable)
def grow_node(self, layer_number):
NeuralNetwork.grow_node(self, layer_number)
self.grow_greedy_layer_bias(layer_number)
def prune_node(self, layer_number, node_number):
NeuralNetwork.prune_node(self, layer_number, node_number)
self.prune_greedy_layer_bias(layer_number, node_number)
def grow_greedy_layer_bias(self, layer_number):
b = layer_number
if b is self.number_hidden_layers:
[n_out, n_in] = self._greedy_layer_output_bias.shape
self._greedy_layer_output_bias = torch.cat((self._greedy_layer_output_bias, self.xavier_weight_initialization(1, 1)), axis=1)
else:
[n_out, n_in] = self._greedy_layer_bias[b].shape
n_in = n_in + 1
self._greedy_layer_bias[b] = np.append(self._greedy_layer_bias[b], self.xavier_weight_initialization(n_out, n_in, shape=(n_out, 1)))
def grow_layer(self, option, number_of_nodes):
raise TypeError('Not implemented')
def prune_greedy_layer_bias(self, layer_number, node_number):
def remove_nth_element(greedy_bias_tensor, n):
bias_tensor = torch.cat([greedy_bias_tensor[0][:n], greedy_bias_tensor[0][n + 1:]])
return bias_tensor.view(1, bias_tensor.shape[0])
b = layer_number # readability
n = node_number # readability
if b is self.number_hidden_layers:
self._greedy_layer_output_bias = remove_nth_element(self._greedy_layer_output_bias, n)
else:
self._greedy_layer_bias[b] = remove_nth_element(self._greedy_layer_bias[b], n)
def greedy_layer_wise_pretrain(self, x: torch.tensor, number_epochs: int = 1, is_tied_weight: bool = False,
noise_ratio: float = 0.0):
for i in range(len(self.layers) - 1):
if i > self.number_hidden_layers:
nn = NeuralNetwork([self.layers[i], self.layers[-1], self.layers[i]], init_weights=False)
else:
nn = NeuralNetwork([self.layers[i], self.layers[i + 1], self.layers[i]], init_weights=False)
nn.activation_function[0] = nn.ACTIVATION_FUNCTION_SIGMOID
nn.output_activation_function = nn.ACTIVATION_FUNCTION_SIGMOID
nn.loss_function = nn.LOSS_FUNCTION_MSE
nn.momentum_rate = 0
if i >= self.number_hidden_layers:
nn.weight[0] = self.output_weight.clone()
nn.bias[0] = self.output_bias.clone()
nn.output_weight = self.output_weight.T.clone()
if self._greedy_layer_output_bias is None:
nodes_after = nn.layers[-1]
self._greedy_layer_output_bias = self.xavier_weight_initialization(1, nodes_after)
nn.output_bias = self._greedy_layer_output_bias.clone()
else:
nn.weight[0] = self.weight[i].clone()
nn.bias[0] = self.bias[i].clone()
nn.output_weight = self.weight[i].T.clone()
try:
nn.output_bias = self._greedy_layer_bias[i].detach()
except (TypeError, IndexError):
nodes_after = nn.layers[-1]
if self._greedy_layer_bias is None:
self._greedy_layer_bias = []
self._greedy_layer_bias.append(self.xavier_weight_initialization(1, nodes_after))
nn.output_bias = self._greedy_layer_bias[i].clone()
for j in range(0, number_epochs):
training_x = self.forward_pass(x=x).layer_value[i].detach()
nn.train(x=self.masking_noise(x=training_x, noise_ratio=noise_ratio), y=training_x)
if i >= self.number_hidden_layers:
self.output_weight = nn.weight[0].clone()
self.output_bias = nn.bias[0].clone()
else:
self.weight[i] = nn.weight[0].clone()
self.bias[i] = nn.bias[0].clone()
def update_weights_kullback_leibler(self, Xs, Xt, gamma=0.0001):
loss = NeuralNetwork.update_weights_kullback_leibler(self, Xs, Xs, Xt, Xt, gamma)
return loss
def compute_evaluation_window(self, x):
raise TypeError('Not implemented')
def compute_bias(self, y):
return torch.mean((self.Ey.T - y) ** 2)
@property
def network_variance(self):
return torch.mean(self.Ey2 - self.Ey ** 2)
class DenoisingAutoEncoder(AutoEncoder):
def __init__(self, layers=[]):
AutoEncoder.__init__(self, layers)
# FIXME: The lines below are just to build the greedy_layer_bias. Find a more intuitive way to perform it
random_x = np.random.rand(layers[0])
random_x = torch.tensor(np.atleast_2d(random_x), dtype=torch.float, device=MyDevice().get())
self.greedy_layer_wise_pretrain(x=random_x, number_epochs=0)
def train(self, x: torch.tensor, noise_ratio: float = 0.0, is_tied_weight: bool = False, weight_number: int = None, y: torch.tensor = None):
AutoEncoder.train(self, x=x, noise_ratio=noise_ratio, is_tied_weight=is_tied_weight, weight_number=weight_number, y=y)
def greedy_layer_wise_pretrain(self, x: torch.tensor, number_epochs: int = 1, is_tied_weight: bool = False, noise_ratio: float = 0.0, y: torch.tensor = None):
AutoEncoder.greedy_layer_wise_pretrain(self, x=x, number_epochs=number_epochs, is_tied_weight=is_tied_weight, noise_ratio=noise_ratio) |
12,393 | 67599a3e1fd0bf9668cd0a3fef44140fcf293458 | import pygame
from pygame.locals import *
def draw(display_surf,image_surf):
bx = 0
by = 0
for i in range(0,M*N):
if maze3[ bx + (by*M) ] == 1:
display_surf.blit(image_surf,( bx * block_width , by * block_width))
blocks.append([bx*block_width,by*block_width])
if maze3[ bx + (by*M) ] == 3:
display_surf.blit(p1st_surf,( bx * block_width , by * block_width))
# blocks.append([bx*block_width,by*block_width])
bx = bx + 1
if bx > M-1:
bx = 0
by = by + 1
def checkplayer(px, py):
if [px,py] == p2_start:
print 'winner!'
blocks = []
window_height = 810 # 14 rows
window_width = 810 # 18 columns
M = 81 # columns
N = 81 # rows
block_width = 10
vel = 10
# x = 0
# y = 10
x = 780
y = 790
p1_start = [x,y]
p2x = 800
p2y = 790
p2_start = [p2x, p2y]
player_width = 10
maze3 = [ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
3,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,
1,0,1,0,1,1,1,0,1,1,1,0,1,0,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1,0,1,1,1,0,1,1,1,0,1,1,1,1,1,1,1,0,1,0,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,0,1,0,1,0,1,1,1,1,1,1,1,0,1,
1,0,1,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,1,0,1,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,1,0,1,0,0,0,1,0,1,0,1,
1,0,1,0,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,0,1,0,1,1,1,0,1,0,1,1,1,1,1,1,1,0,1,1,1,1,1,0,1,1,1,0,1,0,1,1,1,0,1,0,1,1,1,1,1,0,1,1,1,0,1,0,1,0,1,0,1,0,1,0,1,
1,0,1,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,1,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,1,0,1,0,0,0,1,
1,0,1,0,1,0,1,1,1,1,1,1,1,0,1,1,1,1,1,0,1,1,1,1,1,0,1,1,1,1,1,1,1,0,1,1,1,1,1,0,1,0,1,0,1,0,1,1,1,0,1,0,1,0,1,0,1,1,1,0,1,1,1,0,1,0,1,0,1,1,1,1,1,0,1,0,1,0,1,1,1,
1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0,1,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,1,0,1,0,0,0,0,0,1,0,1,0,0,0,1,
1,0,1,1,1,0,1,1,1,0,1,0,1,1,1,1,1,0,1,1,1,1,1,0,1,1,1,0,1,1,1,0,1,0,1,1,1,1,1,1,1,0,1,0,1,1,1,0,1,0,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1,0,1,0,1,0,1,1,1,1,1,0,1,1,1,0,1,
1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,1,0,1,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,1,0,1,0,1,0,1,0,0,0,0,0,1,0,0,0,1,
1,1,1,0,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,0,1,0,1,0,1,0,1,1,1,1,1,0,1,0,1,0,1,1,1,0,1,0,1,1,1,0,1,1,1,1,1,0,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,0,1,0,1,1,1,1,1,1,1,1,1,
1,0,1,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0,0,0,1,0,1,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,1,
1,0,1,0,1,0,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,0,1,0,1,0,1,0,1,0,1,1,1,0,1,0,1,0,1,1,1,1,1,0,1,1,1,0,1,0,1,1,1,1,1,0,1,1,1,1,1,0,1,0,1,0,1,1,1,1,1,0,1,
1,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,1,0,1,0,1,0,0,0,1,0,1,0,1,0,1,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,1,0,1,
1,0,1,1,1,0,1,1,1,0,1,1,1,1,1,1,1,1,1,0,1,1,1,0,1,0,1,1,1,1,1,0,1,0,1,0,1,1,1,0,1,0,1,1,1,0,1,0,1,1,1,1,1,0,1,1,1,1,1,0,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1,0,1,0,1,
1,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,1,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,1,0,0,0,1,
1,1,1,0,1,1,1,0,1,1,1,0,1,1,1,1,1,1,1,1,1,0,1,1,1,0,1,0,1,0,1,1,1,1,1,0,1,1,1,0,1,1,1,1,1,1,1,0,1,0,1,0,1,1,1,1,1,0,1,0,1,0,1,1,1,1,1,0,1,1,1,1,1,1,1,0,1,1,1,0,1,
1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,1,0,1,0,1,
1,0,1,1,1,1,1,1,1,0,1,1,1,0,1,1,1,1,1,0,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1,1,1,0,1,0,1,0,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,0,1,0,1,0,1,0,1,
1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,1,0,0,0,1,0,1,0,1,
1,0,1,0,1,1,1,0,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,0,1,0,1,0,1,1,1,0,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,0,1,0,1,1,1,1,1,1,1,0,1,0,1,1,1,0,1,0,1,1,1,1,1,0,1,0,1,
1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,1,0,1,0,0,0,0,0,1,0,1,
1,0,1,1,1,0,1,0,1,1,1,1,1,1,1,0,1,0,1,0,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1,1,1,1,1,1,1,0,1,0,1,1,1,0,1,1,1,1,1,1,1,0,1,1,1,0,1,1,1,0,1,1,1,0,1,0,1,0,1,0,1,1,1,0,1,
1,0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,1,0,1,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,1,0,0,0,1,0,0,0,1,0,1,0,0,0,1,
1,1,1,0,1,0,1,0,1,0,1,1,1,0,1,0,1,0,1,0,1,1,1,0,1,1,1,0,1,1,1,1,1,1,1,1,1,0,1,0,1,0,1,1,1,1,1,1,1,1,1,0,1,1,1,0,1,1,1,0,1,1,1,0,1,1,1,0,1,1,1,0,1,1,1,0,1,0,1,1,1,
1,0,0,0,1,0,1,0,1,0,1,0,1,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,1,0,1,
1,0,1,1,1,1,1,0,1,0,1,0,1,1,1,1,1,0,1,1,1,0,1,0,1,1,1,1,1,0,1,1,1,0,1,1,1,1,1,1,1,0,1,0,1,1,1,0,1,0,1,1,1,0,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,0,1,0,1,
1,0,1,0,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,
1,0,1,0,1,0,1,1,1,1,1,0,1,0,1,0,1,0,1,0,1,1,1,1,1,1,1,0,1,1,1,1,1,0,1,1,1,0,1,1,1,1,1,1,1,0,1,1,1,1,1,0,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,1,1,1,0,1,
1,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0,0,0,0,0,1,0,1,
1,0,1,1,1,0,1,1,1,0,1,0,1,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1,0,1,0,1,0,1,1,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,1,1,1,1,0,1,0,1,1,1,0,1,1,1,1,1,0,1,0,1,
1,0,1,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,1,0,1,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,1,0,1,
1,1,1,0,1,1,1,0,1,1,1,0,1,0,1,0,1,1,1,0,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1,0,1,1,1,1,1,1,1,0,1,0,1,0,1,0,1,0,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1,1,1,1,1,0,1,0,1,0,1,
1,0,0,0,1,0,1,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,1,0,1,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,1,
1,0,1,1,1,0,1,0,1,0,1,1,1,1,1,1,1,0,1,1,1,0,1,0,1,0,1,0,1,1,1,1,1,0,1,0,1,0,1,0,1,1,1,0,1,1,1,1,1,0,1,1,1,1,1,0,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,1,
1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,1,0,0,0,1,0,1,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,1,0,1,0,0,0,0,0,1,
1,0,1,1,1,0,1,0,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1,1,1,1,1,0,1,0,1,1,1,0,1,0,1,1,1,0,1,1,1,1,1,0,1,1,1,0,1,0,1,0,1,0,1,0,1,1,1,1,1,0,1,1,1,0,1,0,1,0,1,0,1,1,1,1,1,
1,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,1,0,1,0,1,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,1,0,1,0,0,0,0,0,1,
1,0,1,0,1,1,1,1,1,0,1,0,1,0,1,0,1,1,1,0,1,0,1,1,1,0,1,1,1,1,1,0,1,0,1,0,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,0,1,1,1,1,1,1,1,0,1,0,1,0,1,1,1,1,1,0,1,
1,0,1,0,1,0,0,0,1,0,1,0,1,0,0,0,1,0,0,0,1,0,1,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,1,
1,0,1,0,1,0,1,0,1,1,1,0,1,1,1,1,1,1,1,1,1,0,1,0,1,1,1,0,1,0,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,0,1,0,1,1,1,0,1,1,1,1,1,0,1,1,1,1,1,0,1,1,1,1,1,0,1,0,1,
1,0,1,0,1,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,1,
1,0,1,0,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,0,1,0,1,1,1,0,1,1,1,1,1,1,1,0,1,0,1,0,1,0,1,1,1,0,1,1,1,0,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1,1,1,1,1,1,1,0,1,1,1,0,1,
1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,1,0,0,0,1,0,1,0,1,
1,0,1,0,1,1,1,1,1,1,1,1,1,0,1,0,1,1,1,0,1,0,1,1,1,1,1,1,1,1,1,0,1,0,1,1,1,0,1,0,1,1,1,1,1,0,1,1,1,0,1,1,1,1,1,0,1,1,1,1,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,1,1,0,1,0,1,
1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,1,0,1,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,1,0,1,0,1,0,1,0,0,0,0,0,1,0,1,
1,0,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,1,0,1,1,1,0,1,0,1,0,1,1,1,0,1,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1,1,1,0,1,0,1,1,1,1,1,1,1,0,1,
1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,1,0,0,0,1,
1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,0,1,0,1,1,1,0,1,0,1,1,1,1,1,0,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,1,0,1,0,1,1,1,0,1,1,1,1,1,0,1,0,1,0,1,1,1,0,1,0,1,0,1,0,1,1,1,
1,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,1,
1,0,1,0,1,0,1,0,1,0,1,1,1,0,1,0,1,1,1,0,1,0,1,1,1,0,1,0,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,0,1,1,1,1,1,0,1,1,1,1,1,0,1,1,1,0,1,
1,0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,1,
1,0,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1,0,1,1,1,1,1,1,1,1,1,0,1,0,1,1,1,0,1,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,0,1,0,1,0,1,0,1,1,1,0,1,0,1,0,1,0,1,1,1,
1,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,1,0,0,0,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,1,0,1,0,1,
1,0,1,1,1,1,1,0,1,1,1,1,1,1,1,0,1,1,1,1,1,0,1,1,1,0,1,0,1,1,1,0,1,0,1,1,1,0,1,0,1,0,1,1,1,0,1,0,1,1,1,1,1,1,1,0,1,1,1,1,1,0,1,1,1,1,1,0,1,0,1,1,1,0,1,0,1,0,1,0,1,
1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,1,0,0,0,1,0,1,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,1,0,1,0,1,0,1,0,1,0,1,
1,0,1,0,1,0,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1,1,1,0,1,0,1,0,1,0,1,1,1,0,1,1,1,1,1,0,1,1,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1,
1,0,0,0,1,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,1,0,1,0,1,0,1,0,1,0,0,0,1,
1,0,1,1,1,0,1,0,1,0,1,1,1,0,1,1,1,1,1,1,1,0,1,0,1,0,1,0,1,1,1,0,1,1,1,0,1,0,1,1,1,0,1,0,1,1,1,1,1,1,1,0,1,1,1,0,1,0,1,0,1,0,1,1,1,1,1,1,1,0,1,0,1,0,1,0,1,1,1,0,1,
1,0,1,0,0,0,1,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,1,
1,0,1,0,1,0,1,1,1,1,1,1,1,0,1,0,1,1,1,0,1,1,1,1,1,0,1,1,1,0,1,1,1,0,1,1,1,0,1,0,1,1,1,0,1,0,1,1,1,0,1,0,1,0,1,1,1,1,1,1,1,0,1,1,1,0,1,0,1,0,1,0,1,1,1,0,1,0,1,1,1,
1,0,1,0,1,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,1,0,1,0,1,0,0,0,1,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,1,
1,0,1,0,1,0,1,0,1,1,1,1,1,1,1,1,1,0,1,1,1,0,1,1,1,0,1,0,1,1,1,1,1,1,1,0,1,0,1,0,1,1,1,1,1,0,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,0,1,1,1,1,1,1,1,0,1,1,1,0,1,0,1,0,1,
1,0,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,1,0,1,
1,1,1,0,1,0,1,1,1,1,1,1,1,1,1,0,1,0,1,0,1,1,1,0,1,1,1,1,1,1,1,1,1,0,1,0,1,0,1,1,1,1,1,0,1,1,1,0,1,1,1,1,1,0,1,1,1,0,1,0,1,0,1,1,1,0,1,0,1,1,1,1,1,1,1,1,1,0,1,0,1,
1,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,1,0,1,0,0,0,0,0,0,0,1,0,1,0,1,
1,0,1,1,1,1,1,1,1,0,1,1,1,0,1,0,1,1,1,1,1,0,1,0,1,1,1,1,1,1,1,0,1,0,1,0,1,1,1,1,1,0,1,1,1,0,1,1,1,1,1,0,1,1,1,0,1,1,1,1,1,1,1,0,1,0,1,0,1,0,1,1,1,1,1,0,1,0,1,0,1,
1,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,1,0,1,0,1,0,1,0,1,0,0,0,1,0,1,0,1,
1,0,1,0,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,1,0,1,1,1,0,1,1,1,0,1,1,1,1,1,1,1,1,1,0,1,1,1,0,1,0,1,0,1,0,1,0,1,1,1,0,1,0,1,0,1,1,1,0,1,0,1,
1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,1,0,0,0,0,0,1,0,1,0,0,0,0,0,1,0,1,
1,0,1,1,1,0,1,0,1,0,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,0,1,0,1,1,1,1,1,0,1,1,1,0,1,0,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,0,1,
1,0,1,0,1,0,1,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,1,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,
1,0,1,0,1,0,1,1,1,1,1,0,1,1,1,0,1,0,1,0,1,0,1,0,1,1,1,1,1,1,1,0,1,0,1,0,1,1,1,0,1,0,1,1,1,0,1,1,1,1,1,1,1,0,1,1,1,0,1,0,1,1,1,0,1,0,1,0,1,0,1,0,1,1,1,1,1,1,1,0,1,
1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,1,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,1,0,1,
1,0,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,0,1,0,1,1,1,1,1,1,1,1,1,0,1,0,1,1,1,0,1,0,1,1,1,1,1,0,1,0,1,1,1,0,1,0,1,1,1,1,1,0,1,1,1,1,1,1,1,0,1,0,1,
1,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,1,0,1,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0,1,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,1,0,1,0,1,
1,0,1,0,1,0,1,1,1,0,1,0,1,1,1,1,1,0,1,1,1,0,1,0,1,1,1,1,1,0,1,0,1,0,1,1,1,1,1,0,1,1,1,0,1,1,1,0,1,0,1,0,1,0,1,0,1,1,1,0,1,1,1,0,1,0,1,0,1,1,1,1,1,0,1,0,1,0,1,0,1,
1,0,1,0,1,0,0,0,1,0,1,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,1,0,1,0,0,0,1,0,1,0,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,
1,0,1,0,1,1,1,0,1,1,1,0,1,0,1,0,1,1,1,0,1,1,1,1,1,0,1,0,1,0,1,1,1,1,1,0,1,0,1,1,1,0,1,1,1,0,1,0,1,1,1,1,1,0,1,1,1,1,1,1,1,0,1,1,1,0,1,1,1,1,1,1,1,1,1,0,1,1,1,0,1,
1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,3,
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
]
maze = [ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,
1,0,1,1,1,1,1,1,0,1,0,0,0,1,1,1,0,1,
1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,1,
1,0,1,0,1,1,1,1,0,1,0,0,1,0,0,1,0,1,
1,0,0,0,0,0,0,0,0,1,0,0,1,1,1,1,0,1,
1,0,1,1,1,0,1,0,1,1,0,0,0,0,0,1,0,1,
1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,1,0,1,
1,0,1,0,0,1,0,1,0,1,0,0,1,0,0,1,0,1,
1,0,0,0,1,0,1,0,0,0,0,0,1,0,0,1,0,1,
1,0,1,0,1,0,1,1,0,0,0,0,1,0,1,1,0,1,
1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,]
maze2 = [ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,
1,0,0,0,1,0,1,1,0,1,1,1,1,1,0,1,1,1,
1,1,1,1,1,0,1,0,0,0,0,0,1,0,0,0,0,1,
1,0,0,0,0,0,1,1,1,1,1,0,1,1,1,1,0,1,
1,0,1,0,1,1,1,1,0,1,0,0,1,0,0,0,0,1,
1,0,1,0,0,0,0,0,0,0,0,0,1,0,1,1,1,1,
1,0,1,1,1,0,1,0,1,1,0,0,0,0,0,1,0,1,
1,0,1,0,1,0,0,0,1,0,0,1,1,0,1,1,0,1,
1,0,1,0,1,1,1,0,1,1,0,1,1,0,1,0,0,1,
1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,1,0,1,
1,0,1,1,1,1,1,0,1,0,0,1,1,1,1,1,0,1,
1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,]
pygame.init()
display_surf = pygame.display.set_mode((window_width,window_height), pygame.HWSURFACE)
pygame.display.set_caption('Pygame pythonspot.com example')
running = True
# image_surf = pygame.image.load("player44.png")
image_surf = pygame.image.load("player_10.png")
p2_surf = pygame.image.load("lyla_10.png")
block_surf = pygame.image.load("block10.png")
p1st_surf = pygame.image.load("start10.png")
p2st_surf = pygame.image.load("start10.png")
display_surf.fill((0,0,0))
display_surf.blit(image_surf,(x,y))
display_surf.blit(p2_surf,(p2x,p2y))
draw(display_surf, block_surf)
# print (blocks)
pygame.display.flip()
while running:
pygame.time.delay(50) # This will delay the game the given amount of milliseconds. In our casee 0.1 seconds will be the delay
for event in pygame.event.get(): # This will loop through a list of any keyboard or mouse events.
if event.type == pygame.QUIT: # Checks if the red button in the corner of the window is clicked
running = False # Ends the game loop
keys = pygame.key.get_pressed()
if keys[pygame.K_LEFT] and x > vel and [x-vel,y] not in blocks:
x -= vel
checkplayer(x,y)
if keys[pygame.K_RIGHT] and [x+vel,y] not in blocks:
x += vel
if keys[pygame.K_UP] and y > vel and [x,y-vel] not in blocks:
y -= vel
if keys[pygame.K_DOWN] and y < window_height - vel - player_width and [x,y+vel] not in blocks:
y += vel
# player 2
if keys[pygame.K_a] and p2x > vel and [p2x-vel,p2y] not in blocks:
p2x -= vel
if keys[pygame.K_d] and p2x < window_width - vel - player_width and [p2x+vel,p2y] not in blocks:
p2x += vel
if keys[pygame.K_w] and p2y > vel and [p2x,p2y-vel] not in blocks:
p2y -= vel
if keys[pygame.K_s] and p2y < window_height - vel - player_width and [p2x,p2y+vel] not in blocks:
p2y += vel
# pygame.draw.rect(win, (255,0,0), (x, y, width, height))
display_surf.fill((0,0,0))
draw(display_surf, block_surf)
display_surf.blit(image_surf,(x,y))
display_surf.blit(p2_surf,(p2x,p2y))
pygame.display.update()
pygame.quit() |
12,394 | 67e1be6d5efb2b08bfbfd0d7684c5d8f53ddceae | from django.test import TestCase
from brambling.views.utils import FinanceTable
from brambling.tests.factories import (TransactionFactory, EventFactory,
PersonFactory, OrderFactory)
class FinanceTableTestCase(TestCase):
def setUp(self):
self.order = OrderFactory(code='TK421')
self.event = EventFactory()
self.transactions = [TransactionFactory(order=self.order)]
self.table = FinanceTable(self.event, self.transactions)
def test_headers(self):
self.assertEqual(len(self.table.headers()), 8)
def test_row_count(self):
self.assertEqual(len(list(self.table.get_rows())), 1)
def test_inclusion_of_header_row(self):
self.assertEqual(len(list(self.table.get_rows(include_headers=True))), 2)
def test_transaction_created_by_blank(self):
name = self.table.created_by_name(TransactionFactory())
self.assertEqual('', name)
def test_transaction_created_by_name(self):
creator = PersonFactory(first_name='Leia', last_name='Organa')
created_transaction = TransactionFactory(created_by=creator)
name = self.table.created_by_name(created_transaction)
self.assertEqual('Leia Organa', name)
def test_transaction_with_order_code(self):
code = self.table.order_code(self.transactions[0])
self.assertEqual('TK421', code)
def test_transaction_without_order(self):
transaction = TransactionFactory(order=None)
code = self.table.order_code(transaction)
self.assertEqual('', code)
def test_transaction_as_cell_row(self):
row = list(self.table.get_rows())[0]
self.assertEqual(len(row), 8)
self.assertEqual(row[4].field, 'order')
self.assertEqual(row[4].value, 'TK421')
|
12,395 | 6947a18d4cad28ee4ef7310eef9503cbbce74e0c | import numpy as np
import tensorflow as tf
import random, math
import matlab
import matlab.engine as me
from dataSetup import generateData, generateWeights, generateWeights_topk
from recovAnalysis import recovery, structDiff
from trainerUtil import tensorInit, train, train_topk, init_weights
import argparse
argparser = argparse.ArgumentParser(description="Experiments n")
argparser.add_argument('-w', '--winit', help='Weight Initialization', default='tensor')
argparser.add_argument('-i', '--iht', help='IHT Algorithm Type', default='topk')
argparser.add_argument('-l', '--log', help='Log File Name', default='log_thresh_gt.txt')
argparser.add_argument('-n', '--numT', help='Number of Trials', default=5)
argparser.add_argument('-gt', '--gtinit', help='Type of Ground Truth Weight Init', default=False)
args = argparser.parse_args()
num_epoch = 25
num_epoch_pretrain = 5
epsilon = 1e-4
recovery_delta = 1e-2
batch_size = 20
lr = 1e-3
test_n = 1000
if args.winit == 'random':
random = True
else:
random = False
iht = args.iht
if not random:
eng = me.start_matlab()
## Baseline Values
d = 20
k = 5
thresh_gt = 0.15
sparse_gt = 0.75
n = 6000
noise_sd = 0
thresh_train = 0.15
sparse_train = 0.75
num_trials = args.numT
## Vary thresh_gt
exp_thresh_gt = [0.0, 0.05, 0.10, 0.15, 0.20]
logFile = open(args.log,'w')
for thresh_gt in exp_thresh_gt:
recovery_this = []
recovery_o_this = []
truep = []
truep_o = []
truen = []
truen_o = []
false_posit = []
for trial in range(num_trials):
print "Experiment Starting for thresh_gt = ",thresh_gt, " trial: ", trial
if iht == 'topk':
w_gt, v_gt, m = generateWeights_topk(d, k, sparse_gt, bool(args.gtinit))
else:
w_gt, v_gt, m = generateWeights(d, k, thresh_gt, bool(args.gtinit))
train_x, train_y, test_x, test_y = generateData(w_gt, v_gt, n, test_n, d)
train_y_noisy = train_y + np.random.normal(0, noise_sd, n)
if random:
tensorWeights = []
else:
tensorWeights = tensorInit(train_x, train_y_noisy, w_gt, m ,k, eng)
if iht == 'topk':
w_res, train_loss, test_loss = train_topk(train_x, train_y_noisy, test_x,
test_y,
tensorWeights,v_gt, sparse_train,
num_epoch, batch_size, lr, epsilon,
num_epoch_pretrain, random)
w_res_o, train_loss, test_loss = train_topk(train_x, train_y_noisy, test_x,
test_y,
tensorWeights,v_gt, 1.0 , num_epoch,
batch_size, lr, epsilon,
num_epoch_pretrain, random)
else:
w_res, train_loss, test_loss = train(train_x, train_y_noisy, test_x, test_y,
tensorWeights,v_gt, thresh_train,
num_epoch, batch_size, lr, epsilon,
num_epoch_pretrain, random)
w_res_o, train_loss, test_loss = train(train_x, train_y_noisy, test_x,
test_y,
tensorWeights,v_gt, 0.0 , num_epoch,
batch_size, lr, epsilon,
num_epoch_pretrain, random)
recoveryVal = recovery(w_gt, v_gt, w_res, v_gt)
recoveryVal_o = recovery(w_gt, v_gt, w_res_o, v_gt)
recoveryStructure = structDiff(w_gt, w_res, recovery_delta)
recoveryStructure_o = structDiff(w_gt, w_res_o, recovery_delta)
recovery_this.append(recoveryVal)
recovery_o_this.append(recoveryVal_o)
truen.append(recoveryStructure[2])
truen_o.append(recoveryStructure_o[2])
truep.append(recoveryStructure[3])
truep_o.append(recoveryStructure_o[3])
false_post.append(recoveryStructure_o[0])
avg_recov = np.mean(recovery_this)
std_recov = np.std(recovery_this)
avg_recov_o = np.mean(recovery_o_this)
std_recov_o = np.std(recovery_o_this)
avg_truen = np.mean(truen)
avg_truep = np.mean(truep)
avg_truen_o = np.mean(truen_o)
avg_truep_o = np.mean(truep_o)
std_truen = np.std(truen)
std_truep = np.std(truep)
std_truen_o = np.std(truen_o)
std_truep_o = np.std(truep_o)
logFile.write(str(thresh_gt)+' '+str(avg_recov)+' '+str(std_recov) + ' '+str(avg_recov_o)+' '+str(std_recov_o)+' ' )
logFile.write(str(avg_truen) + ' '+ str(std_truen)+' ')
logFile.write(str(avg_truen_o) + ' '+ str(std_truen_o)+' ')
logFile.write(str(avg_truep) + ' '+ str(std_truep)+' ')
logFile.write(str(avg_truep_o) + ' '+ str(std_truep_o) +' ')
logFile.write(str(np.mean(false_posit)) + ' '+ str(np.std(false_posit)) )
logFile.write('\n')
logFile.close()
if not random:
eng.quit()
|
12,396 | 5e5297eef360c376700dc4b65d3297405b9a460d | """
Copyright 2017 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
from serial import Serial, SerialException
log = logging.getLogger(__name__)
log.setLevel(logging.DEBUG)
class SerialConnection:
def __init__(self, port=None, baudrate=9600, timeout=1):
self.ser = Serial(port, baudrate, timeout=timeout)
def open(self):
"""
Open serial port connection
"""
if not self.ser.is_open:
self.ser.open()
def readline(self):
"""
Read line from serial port
:return: One line from serial stream
"""
try:
output = self.ser.readline()
return output
except SerialException as se:
log.debug('Serial connection read error: {}'.format(se))
return None
def write(self, data):
"""
Write data to serial port
:param data: Data to send
"""
try:
self.ser.write(data)
except SerialException as se:
log.debug('Serial connection write error: {}'.format(se))
def close(self):
"""
Close serial port connection
"""
self.ser.close()
|
12,397 | 474bf5c5f65af221d4d2ec80a024595582f187ba | '''
Author: Suryakiran Menachery George
Date: March-19-2018 ver1
Date: March-22-2018 final
Purpose: EE 544 Mini Project, Harmonics & Inter-modulation Products Calculator
'''
import sys
print('\033[1;34mEnter the first frequency in MHz\033[1;m')
val1 = sys.stdin.readline()
print('\033[1;34mEnter the second frequency in MHz\033[1;m')
val2 = sys.stdin.readline()
print('\033[1;34mEnter the third frequency in MHz\033[1;m')
val3 = sys.stdin.readline()
print('\033[1;34mEnter the desired frequency(for hit calculation) MHz\033[1;m')
val4 = sys.stdin.readline()
print("\033[1;31mHarmonics & IM Components for each Non-Linearity\033[1;m")
f1 = int(val1)
f2 = int(val2)
f3 = int(val3)
des_freq = int(val4)
IM_freq_list = []
IM_freq_list_2nd = []
IM_freq_list_3rd = []
IM_freq_list_4th = []
IM_freq_list_5th = []
IM_freq_list_6th = []
IM_freq_list_7th = []
Total_hits = 0
Har_hits = 0
print("\033[1;34m-------------------------------------------------------------------------------\033[1;m")
'''---------------------------------------------------------------'''
''' 2nd Order Harmonics & Inter-modulation Components'''
hit_count = 0
# 2nd Harmonics
Har_freq_list_2nd = [2 * f1, 2 * f2, 2 * f3]
print("\033[1;33mThe 2nd Order Harmonic Freqs:\033[1;m", Har_freq_list_2nd)
for x in range(0, 3):
if((Har_freq_list_2nd[x]) == des_freq):
Har_hits += 1
# 2nd IM
print("\033[1;33mThe 2nd Order Inter-modulation Components:\033[1;m")
from itertools import permutations
list_1 = [(1, 1, 0), (1, 0, 1), (0, 1, 1)]
list_2 = [(-1, -1, 0), (-1, 0, -1), (0, -1, -1)]
list_3 = list(permutations([-1, 1, 0]))
IM2_order_list = list_1 + list_2 + list_3
for x in range(0, len(IM2_order_list)):
m = IM2_order_list[x][0]
n = IM2_order_list[x][1]
k = IM2_order_list[x][2]
Sum_Val = m * f1 + n * f2 + k * f3
if abs(Sum_Val) == des_freq:
hit_count += 1
print("(m,n,k):", m, n, k)
IM_freq_list_2nd.append(Sum_Val)
print("\033[1;34m\nNo of hits :\033[1;m", hit_count)
Total_hits += hit_count
# avoid -ve freqs
IM_freq_list_2nd = [abs(x) for x in IM_freq_list_2nd]
# convert to set to eliminate repeat values
IM_freq_set = set(IM_freq_list_2nd)
IM_freq_list_2nd = list(IM_freq_set)
IM_freq_list_2nd.sort()
print('\n',IM_freq_list_2nd)
print("\033[1;34m-------------------------------------------------------------------------------\033[1;m")
'''---------------------------------------------------------------'''
''' 3rd Order Harmonics & Inter-modulation Components'''
# 3rd Harmonics
Har_freq_list_3rd = [3 * f1, 3 * f2, 3 * f3]
print("\033[1;33mThe 3rd Order Harmonic Freqs:\033[1;m", Har_freq_list_3rd)
for x in range(0, 3):
if Har_freq_list_3rd[x] == des_freq:
Har_hits += 1
# 3rd IM
print("\033[1;33mThe 3rd Order Inter-modulation Components:\033[1;m")
hit_count = 0
list_1 = [(1, 1, 1),(-1,-1,-1)]
list_2 = [(-1, 1, 1), (1, -1, 1), (1, 1, -1)] + [(-1, -1, 1), (1, -1, -1), (-1, 1, -1)]
list_3 = list(permutations([-2, 1, 0])) + list(permutations([2, -1, 0]))
list_4 = list(permutations([2, 1, 0])) + list(permutations([-2, -1, 0]))
IM2_order_list = list_1 + list_2 + list_3 + list_4
for x in range(0, len(IM2_order_list)):
m = IM2_order_list[x][0]
n = IM2_order_list[x][1]
k = IM2_order_list[x][2]
Sum_Val = m * f1 + n * f2 + k * f3
if abs(Sum_Val) == des_freq:
hit_count += 1
print("(m,n,k):", m, n, k)
IM_freq_list_3rd.append(Sum_Val)
print("\033[1;34m\nNo of hits :\033[1;m", hit_count)
Total_hits += hit_count
# avoid -ve freqs
IM_freq_list_3rd = [abs(x) for x in IM_freq_list_3rd]
# convert to set to eliminate repeat values
IM_freq_set = set(IM_freq_list_3rd)
IM_freq_list_3rd = list(IM_freq_set)
IM_freq_list_3rd.sort()
print('\n',IM_freq_list_3rd)
print("\033[1;34m-------------------------------------------------------------------------------\033[1;m")
'''---------------------------------------------------------------'''
''' 4th Order Harmonics & Inter-modulation Components'''
# 4th Harmonics
Har_freq_list_4th = [4 * f1, 4 * f2, 4 * f3]
print("\033[1;33mThe 4th Order Harmonic Freqs:\033[1;m", Har_freq_list_4th)
for x in range(0, 3):
if((Har_freq_list_4th[x]) == des_freq):
Har_hits += 1
#print(Har_hits)
# 4th IM
print("\033[1;33mThe 4th Order Inter-modulation Components:\033[1;m")
hit_count = 0
list_1 = list(permutations([3, 1, 0])) + list(permutations([-3, -1, 0]))
list_2 = list(permutations([-3, 1, 0])) + list(permutations([3, -1, 0]))
list_3 = [(2, 2, 0), (2, 0, 2), (0, 2, 2)] + [(-2, -2, 0), (-2, 0, -2), (0, -2, -2)]
list_4 = list(permutations([-2, 2, 0]))
list_5 = [(2, 1, 1), (1, 2, 1), (1, 1, 2)] + [(-2, -1, -1), (-1, -2, -1), (-1, -1, -2)]
list_6 = [(-2, 1, 1), (1, -2, 1), (1, 1, -2)] + [(2, -1, -1), (-1, 2, -1), (-1, -1, 2)]
list_7 = list(permutations([2, -1, 1])) + list(permutations([-2, -1, 1]))
IM2_order_list = list_1 + list_2 + list_3 + list_4 + list_5 + list_6 + list_7
for x in range(0, len(IM2_order_list)):
m = IM2_order_list[x][0]
n = IM2_order_list[x][1]
k = IM2_order_list[x][2]
Sum_Val = m * f1 + n * f2 + k * f3
if (abs(Sum_Val) == des_freq):
hit_count += 1
print("(m,n,k):", m, n, k)
IM_freq_list_4th.append(Sum_Val)
print("\033[1;34m\nNo of hits :\033[1;m", hit_count)
Total_hits += hit_count
# avoid -ve freqs
IM_freq_list_4th = [abs(x) for x in IM_freq_list_4th]
# convert to set to eliminate repeat values
IM_freq_set = set(IM_freq_list_4th)
IM_freq_list_4th = list(IM_freq_set)
IM_freq_list_4th.sort()
print('\n',IM_freq_list_4th)
print("\033[1;34m-------------------------------------------------------------------------------\033[1;m")
'''---------------------------------------------------------------'''
''' 5th Order Harmonics & Inter-modulation Components'''
# 5th Harmonics
Har_freq_list_5th = [5 * f1, 5 * f2, 5 * f3]
print("\033[1;33mThe 5th Order Harmonic Freqs:\033[1;m", Har_freq_list_5th)
for x in range(0, 3):
if((Har_freq_list_5th[x]) == des_freq):
Har_hits += 1
#print(Har_hits)
# 5th IM
print("\033[1;33mThe 5th Order Inter-modulation Components:\033[1;m")
hit_count = 0
list_1 = list(permutations([4, 1, 0])) + list(permutations([-4, -1, 0]))
list_2 = list(permutations([4, -1, 0])) + list(permutations([-4, 1, 0]))
list_3 = list(permutations([3, 2, 0])) + list(permutations([-3, -2, 0]))
list_4 = list(permutations([-3, 2, 0])) + list(permutations([3, -2, 0]))
list_5 = [(3, 1, 1),(1, 3, 1), (1, 1, 3)] + [(-3, -1, -1),(-1, -3, -1), (-1, -1, -3)]
list_6 = [(-3, 1, 1), (1, -3, 1), (1, 1, -3)] + [(3, -1, -1), (-1, 3, -1), (-1, -1, 3)]
list_7 = list(permutations([3, -1, 1])) + list(permutations([-3, -1, 1]))
list_8 = [(2, 2, 1), (2, 1, 2), (1, 2, 2)] + [(-2, -2, -1), (-2, -1, -2), (-1, -2, -2)]
list_9 = list(permutations([-2, 2, 1])) + list(permutations([-2, 2, -1]))
list_10 = [(-2, -2, 1), (-2, 1, -2), (1, -2, -2)] + [(2, 2, -1), (2, -1, 2), (-1, 2, 2)]
IM2_order_list = list_1 + list_2 + list_3 + list_4 + list_5 + \
list_6 + list_7 + list_8 + list_9 + list_10
for x in range(0, len(IM2_order_list)):
m = IM2_order_list[x][0]
n = IM2_order_list[x][1]
k = IM2_order_list[x][2]
Sum_Val = m * f1 + n * f2 + k * f3
if abs(Sum_Val) == des_freq:
hit_count += 1
print("(m,n,k):", m, n, k)
IM_freq_list_5th.append(Sum_Val)
print("\033[1;34m\nNo of hits :\033[1;m", hit_count)
Total_hits += hit_count
# avoid -ve freqs
IM_freq_list_5th = [abs(x) for x in IM_freq_list_5th]
# convert to set to eliminate repeat values
IM_freq_set = set(IM_freq_list_5th)
IM_freq_list_5th = list(IM_freq_set)
IM_freq_list_5th.sort()
print('\n',IM_freq_list_5th)
print("\033[1;34m-------------------------------------------------------------------------------\033[1;m")
'''---------------------------------------------------------------'''
''' 6th Order Harmonics & Inter-modulation Components'''
# 6th Harmonics
Har_freq_list_6th = [6 * f1, 6 * f2, 6 * f3]
print("\033[1;33mThe 6th Order Harmonic Freqs:\033[1;m", Har_freq_list_6th)
for x in range(0, 3):
if((Har_freq_list_6th[x]) == des_freq):
Har_hits += 1
#print(Har_hits)
# 6th IM
print("\033[1;33mThe 6th Order Inter-modulation Components:\033[1;m")
hit_count = 0
list_1 = list(permutations([5, 1, 0])) + list(permutations([-5, -1, 0]))
list_2 = list(permutations([-5, 1, 0])) + list(permutations([5, -1, 0]))
list_3 = list(permutations([4, 2, 0])) + list(permutations([-4, -2, 0]))
list_4 = list(permutations([-4, 2, 0])) + list(permutations([4, -2, 0]))
list_5 = [(3, 3, 0), (3, 0, 3), (0, 3, 3)] + [(-3, -3, 0), (-3, 0, -3), (0, -3, -3)]
list_6 = list(permutations([-3, 3, 0]))
list_7 = list(permutations([3, 2, 1])) + list(permutations([-3, -2, -1]))
list_8 = list(permutations([3, -2, 1])) + list(permutations([-3, 2, -1]))
list_9 = list(permutations([-3, 2, 1])) + list(permutations([3, -2, -1]))
list_10 = list(permutations([3, 2, -1])) + list(permutations([-3, -2, 1]))
list_11 = [(4, 1, 1), (1, 4, 1), (1, 1, 4)] + [(-4, -1, -1), (-1, -4, -1), (-1, -1, -4)]
list_12 = list(permutations([4, -1, 1])) + list(permutations([-4, -1, 1]))
list_13 = [(-4, 1, 1), (1, -4, 1), (1, 1, -4)] + [(4, -1, -1), (-1, 4, -1), (-1, -1, 4)]
IM2_order_list = list_1 + list_2 + list_3 + list_4 + list_5 + list_6 + list_7 + \
list_8 + list_9 + list_10 + list_11 + list_12 + list_13
for x in range(0, len(IM2_order_list)):
m = IM2_order_list[x][0]
n = IM2_order_list[x][1]
k = IM2_order_list[x][2]
Sum_Val = m * f1 + n * f2 + k * f3
if (abs(Sum_Val) == des_freq):
hit_count += 1
print("(m,n,k):", m, n, k)
IM_freq_list_6th.append(Sum_Val)
print("\033[1;34m\nNo of hits :\033[1;m", hit_count)
Total_hits += hit_count
# avoid -ve freqs
IM_freq_list_6th = [abs(x) for x in IM_freq_list_6th]
# convert to set to eliminate repeat values
IM_freq_set = set(IM_freq_list_6th)
IM_freq_list_6th = list(IM_freq_set)
IM_freq_list_6th.sort()
print('\n',IM_freq_list_6th)
print("\033[1;34m-------------------------------------------------------------------------------\033[1;m")
'''---------------------------------------------------------------'''
''' 7th Order Harmonics & Inter-modulation Components'''
# 7th Harmonics
Har_freq_list_7th = [7 * f1, 7 * f2, 7 * f3]
print("\033[1;33mThe 7th Order Harmonic Freqs:\033[1;m", Har_freq_list_7th)
for x in range(0, 3):
if((Har_freq_list_7th[x]) == des_freq):
Har_hits += 1
#print(Har_hits)
# 7th IM
print("\033[1;33mThe 7th Order Inter-modulation Components:\033[1;m")
hit_count = 0
list_1 = list(permutations([6, 1, 0])) + list(permutations([-6, -1, 0]))
list_2 = list(permutations([-6, 1, 0])) + list(permutations([6, -1, 0]))
list_3 = list(permutations([5, 2, 0])) + list(permutations([-5, -2, 0]))
list_4 = list(permutations([-5, 2, 0])) + list(permutations([5, -2, 0]))
list_5 = list(permutations([4, 3, 0])) + list(permutations([-4, -3, 0]))
list_6 = list(permutations([-4, 3, 0])) + list(permutations([4, -3, 0]))
list_7 = [(3, 3, 1), (3, 1, 3), (1, 3, 3)] + [(-3, -3, -1), (-3, -1, -3), (-1, -3, -3)]
list_8 = list(permutations([3, -3, 1])) + list(permutations([3, -3, -1]))
list_9 = [(3, 3, -1), (3, -1, 3), (-1, 3, 3)] + [(-3, -3, 1), (-3, 1, -3), (1, -3, -3)]
list_10 = list(permutations([4, 2, 1])) + list(permutations([-4, -2, -1]))
list_11 = list(permutations([-4, 2, 1])) + list(permutations([4, -2, -1]))
list_12 = list(permutations([4, -2, 1])) + list(permutations([-4, 2, -1]))
list_13 = list(permutations([4, 2, -1])) + list(permutations([-4, -2, 1]))
list_14 = [(3, 2, 2), (2, 3, 2), (2, 2, 3)] + [(-3, -2, -2), (-2, -3, -2), (-2, -2, -3)]
list_15 = [(-3, 2, 2), (2, -3, 2), (2, 2, -3)] + [(3, -2, -2), (-2, 3, -2), (-2, -2, 3)]
list_16 = list(permutations([3, -2, 2])) + list(permutations([-3, -2, 2]))
list_17 = [(5, 1, 1), (1, 5, 1), (1, 1, 5)] + [(-5, -1, -1), (-1, -5, -1), (-1, -1, -5)]
list_18 = list(permutations([5, -1, 1])) + list(permutations([-5, -1, 1]))
list_19 = [(-5, 1, 1), (1, -5, 1), (1, 1, -5)] + [(5, -1, -1), (-1, 5, -1), (-1, -1, 5)]
IM2_order_list = list_1 + list_2 + list_3 + list_4 + list_5 + list_6 + list_7 + \
list_8 + list_9 + list_10 + list_11 + list_12 + list_13 + list_14 + \
list_15 + list_16 + list_17 + list_18 + list_19
for x in range(0, len(IM2_order_list)):
m = IM2_order_list[x][0]
n = IM2_order_list[x][1]
k = IM2_order_list[x][2]
Sum_Val = m * f1 + n * f2 + k * f3
if abs(Sum_Val) == des_freq:
hit_count += 1
print("(m,n,k):", m, n, k)
IM_freq_list_7th.append(Sum_Val)
print("\033[1;34m\nNo of hits :\033[1;m", hit_count)
Total_hits += hit_count
# avoid -ve freqs
IM_freq_list_7th = [abs(x) for x in IM_freq_list_7th]
# convert to set to eliminate repeat values
IM_freq_set = set(IM_freq_list_7th)
IM_freq_list_7th = list(IM_freq_set)
IM_freq_list_7th.sort()
print('\n',IM_freq_list_7th)
print("\033[1;34m-------------------------------------------------------------------------------\033[1;m")
'''---------------------------------------------------------------'''
''' ------------ All Upto 7th NL Combined-------------------------'''
print("\033[1;31m\n--------- SUMMARY -----------\033[1;m")
Har_freq_list = Har_freq_list_2nd + Har_freq_list_3rd + Har_freq_list_4th\
+ Har_freq_list_5th + Har_freq_list_6th + Har_freq_list_7th
print("\033[1;31m\nAll Harmonic Freqs up-to 7th Non Linearity\033[1;m")
Har_freq_set = set(Har_freq_list)
Har_freq_list_all = list(Har_freq_set)
Har_freq_list_all.sort()
print(Har_freq_list_all)
IM_freq_list = IM_freq_list_2nd + IM_freq_list_3rd + IM_freq_list_4th\
+ IM_freq_list_5th + IM_freq_list_6th + IM_freq_list_7th
IM_freq_set = set(IM_freq_list)
IM_freq_list_all = list(IM_freq_set)
IM_freq_list_all.sort()
print("\033[1;31m\nAll IM Freq Components up-to 7th Non Linearity\033[1;m")
print(IM_freq_list_all)
print("\033[1;31m\nTotal Hits at desired f due to Inter-Modulation:\033[1;m",Total_hits)
print("\033[1;31m\nTotal Hits at desired f due to Harmonics:\033[1;m",Har_hits) |
12,398 | 7f19ad893e642cb8b4ca429928bf5e5b5f7c1170 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed May 30 15:11:25 2018
@author: bitzer
"""
import helpers
import numpy as np
import matplotlib.pyplot as plt
import os
import scipy.stats
#%% define model
targets = helpers.cond * np.r_[1, -1]
cov = helpers.dotstd * np.eye(2)
def evidence(dots):
return (scipy.stats.multivariate_normal.logpdf(
dots, mean=np.r_[targets[0], 0], cov=cov) -
scipy.stats.multivariate_normal.logpdf(
dots, mean=np.r_[targets[1], 0], cov=cov))
#%%
fig, ax = plt.subplots(figsize=(4, 3));
dots = np.c_[np.arange(-200, 200), np.random.randn(400)]
ax.plot(dots[:, 0], evidence(dots), label='x-Koordinate (y beliebig)', lw=2,
color='#93107d')
dots = np.c_[np.zeros(400), np.arange(-200, 200)]
ax.plot(dots[:, 1], evidence(dots), label='y-Koordinate (x=0)', lw=2,
color='#717778')
ax.set_xlabel('Position des Punktes (px)')
ax.set_ylabel('Evidenz für rechts')
ax.legend()
fig.tight_layout()
fig.savefig(os.path.join(helpers.figdir, 'dot-position_vs_evidence.png')) |
12,399 | c1d8f20ce7619ff9427f7e448da4cde2e4236d86 | from django.conf.urls import url
from django.contrib import admin
from .import views
app_name='accounts'
urlpatterns=[
url(r'^signup/$', views.signup_view, name="signup"),
url(r'^details/$', views.details_view, name="details"),
url(r'^$',views.login_view,name="login"),
url(r'^signup/(?P<account_key>[\w |\w-]+)/$', views.account_details),
url(r'^logout$',views.logout_view,name="logout"),
url(r'^delete/$', views.delete_account, name='delete_account'),
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.