index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
21,300 | 97d251db331d120ae6710c3a37b5f37b41061c2c | hi = open("readme.txt","r")
a=hi.read()
print(a)
hi.close()
|
21,301 | 34e928416eab32197be63df4f4f6d4478ab9a689 | """
Default logging rules.
"""
import logging
import logging.config
DEFAULT_LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
},
'formatters': {
'default': {
'format': '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
}
},
'handlers': {
'console': {
'level': 'INFO',
'class': 'logging.StreamHandler',
'formatter': 'default'
},
}
}
def configure(logger, fhandler=False):
if fhandler:
fname = '.'.join([logger, 'log'])
fhandler = {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'formatter': 'default',
'filename': fname
}
DEFAULT_LOGGING['handlers']['file'] = fhandler
loggers = {
logger : {
'level': 'DEBUG',
'handlers': DEFAULT_LOGGING['handlers'],
'propagate': False
}
}
DEFAULT_LOGGING['loggers'] = loggers
logging.config.dictConfig(DEFAULT_LOGGING)
return logging.getLogger(logger)
|
21,302 | ad3914eb8c4640af7a51e067bd72c4f1185cfd60 | import logging
from model.inputs.input_fn_baseline import gen_pipeline_train_baseline, gen_pipeline_eval_baseline, \
gen_pipeline_test_baseline
from model import model_fn, train_baseline
from utils import utils_params, utils_misc
import argparse
def set_up_train(path_model_id='', config_names=['config.gin'], bindings=[]):
"""
Setup complete environment including datasets, models and start training
:param path_model_id: use if continue from existing model
:param config_names: name of config file(s) for gin config
:param bindings: to overwrite single parameters defined in the gin config file(s)
:return: return the last validation metric
"""
# inject config
utils_params.inject_gin(config_names, path_model_id=path_model_id,
bindings=bindings) # bindings=['train_and_eval.n_epochs = 3','train_and_eval.save_period = 1']
# generate folder structures
run_paths = utils_params.gen_run_folder(path_model_id=path_model_id)
# set loggers
utils_misc.set_loggers(run_paths['path_logs_train'], logging.INFO)
# Define input pipeline depending on the type of training
logging.info('Setup input pipeline...')
train_ds, train_ds_info = gen_pipeline_train_baseline()
eval_ds, eval_ds_info = gen_pipeline_eval_baseline()
test_ds, test_info = gen_pipeline_test_baseline()
# Define model
logging.info("Setup model...")
model = model_fn.gen_model(n_classes=train_ds_info.features['label'].num_classes)
# Train and eval
logging.info('Start training...')
results = train_baseline.train_and_eval_baseline(model, train_ds, train_ds_info, eval_ds, test_ds, run_paths)
return results
if __name__ == '__main__':
# Define model path
parser = argparse.ArgumentParser(description='Baseline training.')
parser.add_argument('--path', type=str, default='', required=False, help='Result path')
args = parser.parse_args()
if args.path:
path_model_id = args.path
else:
path_model_id = ''
# gin config files and bindings
config_names = ['configBaseline.gin']
# Get bindings
bindings = []
# start training
set_up_train(path_model_id=path_model_id, config_names=config_names, bindings=bindings)
|
21,303 | a3ac0ca0d03142e83450d98bd195601e7d2188f1 | import numpy as np
import matplotlib.pyplot as plt
data=np.loadtxt('results2.dat')
x=data[:,0]
theta=data[:,1]
t=data[:,2]
v=data[:,3]
nx=100
for i in range(0,8):
#Import from results.dat file for every 20 steps to account for all space at each time step
x=data[i*nx:(i+1)*nx,0]
theta=data[i*nx:(i+1)*nx,1]
t=data[i*nx:(i+1)*nx,2]
v=data[i*nx:(i+1)*nx,3]
p=str(i*0.2)
plt.plot(x,v,label="timestep is : "+p )
plt.legend(loc='best')
plt.plot(x,v)
plt.show()
|
21,304 | ce2d4e0e828c8319c9d726975fe95d99228ffb53 | __all__= ['kafka_producer']
|
21,305 | 199c7cb3c78fc747462c4e2e7a540d219f92d1b1 |
# coding: utf-8
# [題目](https://app.codesignal.com/arcade/intro/level-1/s5PbmwxfECC52PWyQ)
#
# Given the string, check if it is a palindrome.
#
# # Example
#
# >For inputString = "aabaa", the output should be
# checkPalindrome(inputString) = true;
# For inputString = "abac", the output should be
# checkPalindrome(inputString) = false;
# For inputString = "a", the output should be
# checkPalindrome(inputString) = true.
# Input/Output
#
# * [execution time limit] 4 seconds (py3)
#
# * [input] string inputString
# * A non-empty string consisting of lowercase characters.
# * Guaranteed constraints:
# * 1 ≤ inputString.length ≤ 105.
#
# * [output] boolean
# *true if inputString is a palindrome, false otherwise.
#
# # [Python3] Syntax Tips
#
# #Prints help message to the console
# #Returns a string
# >def helloWorld(name):
# print("This prints to the console when you Run Tests")
# return "Hello, " + name
#
# In[2]:
def checkPalindrome(inputString):
return inputString[::-1] == inputString
'''字串反轉:
1.使用切片
2.內建reserve()只適用於list。
'''
'''
palindrome:可以回文的字句。
'''
|
21,306 | ab6b29314a53ff9ae478e2ed83ba5ab56c44be62 | from collections import deque
d=deque()
n=int(input())
for i in range(n):
ls=list(input().split(" "))
if len(ls)==1:
action=ls[0]
else:
action=ls[0]
value=ls[1]
if action=="append":
d.append(int(value))
elif action=="appendleft":
d.appendleft(int(value))
elif action=='pop':
d.pop()
else:
d.popleft()
[print(i,end=" ") for i in d] |
21,307 | b95da1d90ea2e1f57aecf7e28c7c28ea49f71d9c | #!/usr/bin/env python
"""Exposes classes and functions to maintain a representation of,
and to interact with, the Boolector SMT solver.
"""
"""See the LICENSE file, located in the root directory of
the source distribution and
at http://verifun.eecs.berkeley.edu/gametime/about/LICENSE,
for details on the GameTime license and authors.
"""
import subprocess
from tempfile import NamedTemporaryFile
from gametime.defaults import config
from gametime.gametimeError import GameTimeError
from gametime.smt.model import Model
from gametime.smt.solvers.solver import Solver
class SatSolver(object):
"""This class represents the SAT solver used by Boolector."""
# Lingeling SAT solver (default).
LINGELING = 0
# MiniSat SAT solver.
MINISAT = 1
# PicoSAT SAT solver.
PICOSAT = 2
@staticmethod
def getSatSolver(satSolverName):
"""
Returns the SatSolver representation of the SAT solver
whose name is provided.
@param satSolverName Name of a SAT solver.
@retval SatSolver representation of the SAT solver
whose name is provided.
"""
satSolverName = satSolverName.lower()
if satSolverName == "lingeling" or satSolverName == "":
return SatSolver.LINGELING
elif satSolverName == "minisat":
return SatSolver.MINISAT
elif satSolverName == "picosat":
return SatSolver.PICOSAT
else:
errMsg = ("Unknown backend SAT solver for Boolector: %s" %
satSolverName)
raise GameTimeError(errMsg)
@staticmethod
def getName(satSolver):
"""
Returns the name of the SAT solver whose SatSolver representation
is provided.
@param satSolver SatSolver representation of a SAT solver.
@retval Name of the SAT solver whose SatSolver representation
is provided.
"""
if satSolver == SatSolver.LINGELING:
return "lingeling"
elif satSolver == SatSolver.MINISAT:
return "minisat"
elif satSolver == SatSolver.PICOSAT:
return "picosat"
else:
errMsg = ("Unknown backend SAT solver for Boolector: %s" %
satSolver)
raise GameTimeError(errMsg)
class BoolectorSolver(Solver):
"""This class maintains a representation of the Boolector SMT solver."""
def __init__(self, satSolver=SatSolver.LINGELING):
"""
Constructor for the BoolectorSolver class.
@param satSolver SatSolver representation of the SAT solver that
the Boolector executable uses.
"""
super(BoolectorSolver, self).__init__("boolector")
# SatSolver representation of the SAT solver that
# the Boolector executable uses.
self.satSolver = satSolver
def _generateBoolectorCommand(self, location):
"""
Generates the system call to run Boolector on a file containing
an SMT query whose location is provided.
@param location Location of a file containing an SMT query.
@retval Appropriate system call as a list that contains the program
to be run and the proper arguments.
"""
command = []
command.append(config.SOLVER_BOOLECTOR)
command.append("--model")
command.append("--smt2")
command.append("-" + SatSolver.getName(self.satSolver))
command.append(location)
return command
def checkSat(self, query):
"""
Checks and updates the satisfiability of the SMT query
represented by the Query object provided. If the SMT query
is satisfiable, the Query object is updated with a satisfying
model; if the query is unsatisfiable, the Query object is
updated with an unsatisfiable core.
@param query Query object that represents an SMT query.
"""
# Write the SMT query to a temporary file.
smtQueryFileHandler = NamedTemporaryFile()
with smtQueryFileHandler:
smtQueryFileHandler.write(query.queryStr)
smtQueryFileHandler.seek(0)
command = self._generateBoolectorCommand(smtQueryFileHandler.name)
proc = subprocess.Popen(command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True)
output = proc.communicate()[0]
outputLines = output.strip().split("\n")
isSat = outputLines.index("sat") if "sat" in outputLines else None
isUnsat = "unsat" in outputLines
if isSat is not None:
query.labelSat(Model("\n".join(outputLines[isSat+1:])))
elif isUnsat:
query.labelUnsat([])
else:
query.labelUnknown()
def __str__(self):
"""
Returns a string representation of this BoolectorSolver object.
@retval String representation of this BoolectorSolver object.
"""
return self.name + "-" + SatSolver.getName(self.satSolver)
|
21,308 | ea9ed172c971073f60739238ddb033f8053b89b1 | a = int(input())
if ((a > pow(10, 9)) or (a < 0)) :
print("Ошибка")
exit(0)
b = a
i = 1
a = 1
while (i < (b + 1)) :
a = a * i
i = i + 1
print(a)
|
21,309 | e11b075796c0cbf46b0afc5dd3ce21d39b280935 | import ast, json
from icecream import ic
from datetime import datetime
from django.core import serializers
from django.shortcuts import render
from django.forms import model_to_dict
from MW.settings import EMAIL_HOST_USER
from django.core.mail import send_mail
from usuarios.models import Usuarios, Linea, LineaStaff
from django.http import HttpResponse, JsonResponse
from django.contrib.auth.hashers import PBKDF2PasswordHasher
from django.views.decorators.http import require_http_methods
from django.views.decorators.csrf import csrf_exempt, ensure_csrf_cookie
from .models import Cronograma, Tarjetas, Maquina, Actividades, sel_com, com
# Create your views here.
hasher = PBKDF2PasswordHasher()
#INDICE
def index(request):
if 'Usuario' in request.session and 'Pass' in request.session and 'priv' in request.session:
return render(request, 'index.html', status = 200)
return render(request, 'index.html', status = 401)
#OBTIENE LA INFORMACION PARA SER MOSTRADA EN EL PANEL DEL TPM
@require_http_methods(['GET'])
def _get_panel_inf(request, linea=None):
if request.method == 'GET':
try:
dia = datetime.now().weekday()
cronAct = Cronograma.objects.filter(dia__exact=dia)
serializedCronAct = serializers.serialize('json', list(cronAct))
if 'Linea' in request.session:
estados = Tarjetas.objects.filter(localizacion_id__linea__exact=f"{request.session['Linea']}", fecha__range=(f"{datetime.date(datetime.now())}", f"{datetime.now()}"))
else:
estados = Tarjetas.objects.filter(localizacion_id__linea__exact=f"{linea}", fecha__range=(f"{datetime.date(datetime.now())}", f"{datetime.now()}"))
serializedEstados = serializers.serialize('json', list(estados))
print(serializedEstados)
try:
if 'Linea' in request.session:
if datetime.now().hour == 9 and len(Tarjetas.objects.filter(localizacion_id__linea__exact=f"{request.session['Linea']}", fecha__range=(datetime.date(datetime.now()), datetime.now()))) == 0:
send_mail('TPM', 'NO SE HAN REALIZADO TARJETAS', EMAIL_HOST_USER, ['undertale.9055@gmail.com'], False)
else:
if datetime.now().hour == 9 and len(Tarjetas.objects.filter(localizacion_id__linea__exact=f"{linea}", fecha__range=(datetime.date(datetime.now()), datetime.now()))) == 0:
send_mail('TPM', 'NO SE HAN REALIZADO TARJETAS', EMAIL_HOST_USER, ['undertale.9055@gmail.com'], False)
except Exception as e:
print(e)
return JsonResponse({'maqdia': serializedCronAct, "tarjetas": serializedEstados}, status=200)
except Exception as e:
print(e)
return HttpResponse(status=500)
except Cronograma.DoesNotExist:
print("No hay maquinas programadas para hoy")
return JsonResponse({'mensaje': "No existen maquinas programadas para hoy"},status=200)
return HttpResponse(status=405)
#POSTEA LA INFORMACION SOLICITADA
@require_http_methods(['POST'])
@ensure_csrf_cookie
#@csrf_exempt
def _post_tpm_inf(request):
if request.method == 'POST':
try:
data = request.POST.get('data')
print(data)
data = ast.literal_eval(data)
print(data)
maquina = Maquina.objects.get(nombre__exact=data['categoria'])
linea = Linea.objects.get(linea__exact=f"{data['linea']}")
user = Usuarios.objects.get(username__exact=f"{request.session['Usuario']}")
tarjeta = Tarjetas.objects.create(Id=None, maquina=maquina, descripcion=data['descripcion'], usuario = user, area='ensamble', categoria=data['categoria'], localizacion=linea, tipo=data['tipo'], fecha=datetime.now())
if data['tipo'] != 1:
html_message = f"\
<div style='width: 50%; margin: auto; background-color: yellow; border-radius: 10px; padding: 15px; color: black;'>\
<h1 style='text-align:center; font-family: Arial;'>TARJETA NO CONFORME</h1>\
<div style='width: 100%; background-color: white;'>\
<div style='padding: 10px;'>\
<div style='display: flex; align-items:center; justify-content: space-between; margin-bottom: 25px;'>\
<span style='width: 25%; font-size: 20px; font-weight: bold;'>NOMBRE: </span>\
<input style='width: 75%; outline: none; cursor: default; font-family: Arial; border-radius: 5px; padding: 10px; border: none; border-bottom: 1px solid black;' value={request.session['Usuario']} readonly>\
</div>\
<div style='display: flex; align-items: center; justify-content: space-between; margin-bottom: 25px;'>\
<span style='width: 25%; font-size: 20px; font-weight: bold;'>AREA: </span>\
<input style='width: 75%; outline: none; cursor: default; font-family: Arial; border-radius: 5px; padding: 10px; border: none; border-bottom: 1px solid black;' value='Ensamble' readonly>\
</div>\
<div style='display: flex; align-items:center; justify-content: space-between; margin-bottom: 25px;'>\
<span style='width: 25%; font-size: 20px; font-weight: bold;'>CATEGORIA: </span>\
<input style='width: 75%; outline: none; cursor: default; font-family: Arial; border-radius: 5px; padding: 10px; border: none; border-bottom: 1px solid black;' value={data['categoria']} readonly>\
</div>\
<div style='display: flex; align-items:center; justify-content: space-between; margin-bottom: 25px;'>\
<span style='width: 25%; font-size: 20px; font-weight: bold;'>LOCALIZACIÓN: </span>\
<input style='width: 75%; outline: none; cursor: default; font-family: Arial; border-radius: 5px; padding: 10px; border: none; border-bottom: 1px solid black;' value={data['linea']} readonly>\
</div>\
<div style='margin-bottom: 25px;'>\
<span style='font-size: 20px; font-weight: bold;'>DESCRIPCIÓN DEL PROBLEMA: </span>\
<textarea style='outline: none; cursor: default; font-family: Arial; border-radius: 5px; padding: 10px; border: 1px solid black; resize: none; width: 100%;' rows='15' columns='25' readonly>{data['descripcion']}</textarea>\
</div>\
</div>\
</div>\
</div>\
"
send_mail('TPM', 'NO CONFORME', EMAIL_HOST_USER, ['undertale.9055@gmail.com'], False, html_message=html_message)
return HttpResponse(status=201)
except Exception as e:
print(e)
return HttpResponse(status=500)
return HttpResponse(status=405)
@require_http_methods(['POST'])
@ensure_csrf_cookie
#@csrf_exempt
def _get_act_machine(request):
if request.method == 'POST':
try:
data = request.POST.get('id')
actividades = Actividades.objects.filter(maquinas_id__exact=data)
serializedAct = serializers.serialize('json', list(actividades))
print(serializedAct)
return JsonResponse({'actividades': serializedAct}, status = 200)
except Exception as e:
print(e)
return HttpResponse(status=500)
return HttpResponse(status=405)
#CRONOGRAMA
@require_http_methods(['GET', 'POST'])
@ensure_csrf_cookie
#@csrf_exempt
def cronograma(request, linea=None):
if request.method == 'GET' :
try:
print(linea)
Maquinas = Maquina.objects.all()
cronograma = Cronograma.objects.all()
serializedMaquinas = serializers.serialize('json', list(Maquinas))
serializedCronograma = serializers.serialize('json', list(cronograma))
print(serializedMaquinas)
print(serializedCronograma)
if 'Linea' in request.session:
return JsonResponse({'maquinas': serializedMaquinas, "cronograma": serializedCronograma, "linea": request.session['Linea'], "usuario": request.session['Usuario']}, status = 200)
else:
return JsonResponse({'maquinas': serializedMaquinas, "cronograma": serializedCronograma,"linea": linea, "usuario": request.session['Usuario']}, status = 200)
except Exception as e:
print(e)
return HttpResponse(status=500)
elif request.method == 'POST':
try:
maquina = request.POST.get('data')
maquina = ast.literal_eval(maquina)
maquinaAct = Maquina.objects.get(pk = f"{maquina['maquina']}")
print(maquina)
if len(Cronograma.objects.filter(dia__exact=maquina['dia'], maquina_id__exact= maquinaAct.Id)[:1]) == 0:
cronMaquina = Cronograma.objects.create(Id = None, maquina = maquinaAct, dia = maquina['dia'])
return HttpResponse(status = 201)
else:
print("REGISTRO EXISTENTE")
return HttpResponse(status = 400)
print(cronMaquina)
except Exception as e:
print(e)
return HttpResponse(status=500)
return HttpResponse(status=405)
@require_http_methods(['POST'])
@ensure_csrf_cookie
#@csrf_exempt
def cronograma_delete(request):
if request.method == 'POST':
try:
data = request.POST.get('data')
data = ast.literal_eval(data)
diaMaquina = Cronograma.objects.get(dia__exact=data['dia'], maquina_id__exact=data['maquina']['id'])
diaMaquina.delete()
return HttpResponse(status=203)
except Exception as e:
print(e)
return HttpResponse(status=500)
return HttpResponse(status=405)
#USUARIOS
@require_http_methods(['GET', 'POST'])
@ensure_csrf_cookie
#@csrf_exempt
def usuarios(request):
if request.method == 'GET':
try:
serializedUsuarios = {}
usuarios = Usuarios.objects.all()
print(len(usuarios))
serializedUsuarios = {
"id" : [i.id for i in usuarios],
"username": [i.username for i in usuarios],
"email": [i.email for i in usuarios],
"linea": [i.linea for i in usuarios],
"clave": [i.clave for i in usuarios]
}
print(serializedUsuarios)
return JsonResponse({'usuarios': serializedUsuarios}, status=200)
except Exception as e:
print(e)
return HttpResponse(status=500)
elif request.method == 'POST':
try:
data = request.POST.get('data')
print(data)
data = ast.literal_eval(data)
user = Usuarios.objects.create_superuser(username=data['user'], email=data['email'], password=data['password'], linea=data['linea'], clave = data['clave'], user_type=data['tipoUsuario'])
serialized = {
"id": user.id,
"username": user.username,
"linea": user.linea,
"email": user.email
}
return JsonResponse({'usuario': serialized}, status=201)
except Exception as e:
print(e)
return JsonResponse({'Error': "Entrada Duplicada: El usuario, correo y/o linea ya han sido registrado :("}, status= 200)
return HttpResponse(status=405)
#ELIMINA USUARIOS
@require_http_methods(['POST'])
@ensure_csrf_cookie
#@csrf_exempt
def _del_user(request):
if request.method == 'POST' and request.session['priv'] == 'admin':
try:
data = request.POST.get('id')
user = Usuarios.objects.get(pk=data)
user.delete()
return HttpResponse(status = 203)
except Exception as e:
print(e)
return HttpResponse(status=500)
return HttpResponse(status = 405)
#MODIFICA USUARIOS EXISTENTES
@require_http_methods(['POST'])
@ensure_csrf_cookie
#@csrf_exempt
def _modify_user(request):
if request.method == 'POST' and request.session['priv'] == 'admin':
try:
data = request.POST.get('data')
data =ast.literal_eval(data)
print(data)
user = Usuarios.objects.get(pk=data['id'])
user.username = data['user']
password = hasher.encode(data['password'], hasher.salt())
user.password = password
user.linea = data['linea']
user.email = data['email']
user.save()
print(user)
return HttpResponse(status=202)
except Exception as e:
print(e)
return HttpResponse(status = 405)
@require_http_methods(['POST'])
@csrf_exempt
def _add_production_line(request):
if request.method == 'POST':
try:
data = request.POST.get('data')
user = Usuarios.objects.get(username__exact=f"{data['usuario']}")
linea = Linea.objects.create(Id = None, linea = f"{data['linea']}", usuario = user)
return HttpResponse(status = 201)
except Exception as e:
print(e)
return HttpResponse(status = 500)
return HttpResponse(status = 405)
@require_http_methods(['POST', 'GET'])
@ensure_csrf_cookie
#@csrf_exempt
def _select_com(request):
if request.session['priv'] == 'admin':
if request.method == 'GET':
try:
com = sel_com.objects.get(pk=1)
return JsonResponse({'puerto': com.com}, status=201)
except sel_com.DoesNotExist:
print('No hay puertos creados')
sel_com.objects.create(Id= None, com="COM1")
else:
try:
data = request.POST.get('com')
com = sel_com.objects.get(pk=1)
com.com = data
com.save()
return HttpResponse(status=201)
except Exception as e:
print(e)
return HttpResponse(status=500)
return HttpResponse(status=401)
#HISTORIAL
@require_http_methods(['POST'])
@ensure_csrf_cookie
#@csrf_exempt
def _machine_history(request):
if request.method == 'POST':
try:
data = request.POST.get('data')
data = ast.literal_eval(data)
#user = Usuarios.objects.get(username__exact=f"{request.session['Usuario']}")
hisTarj = Tarjetas.objects.filter(localizacion_id__linea__exact=f"{data['linea']}", maquina_id__nombre__exact=f"{data['maquina']}")
arrTarj = list(hisTarj)
userArr = list()
for i in arrTarj:
userArr.append(i.usuario.username)
serializedTarj = serializers.serialize('json', list(hisTarj))
return JsonResponse({'hist': serializedTarj, 'users': userArr}, status = 200)
except Exception as e:
print(e)
return HttpResponse(status=500)
except Tarjetas.DoesNotExist:
print("No existen tarjetas todavia")
return JsonResponse({'Mensaje': "Todavia no existen tarjetas de esta maquina"}, status = 200)
return HttpResponse(status=405)
#TARJETA ESPECIFICA
@require_http_methods(['POST'])
@ensure_csrf_cookie
#@csrf_exempt
def _get_machine_card(request):
if request.method == 'POST':
try:
data = request.POST.get('data')
card = Tarjetas.objects.get(Id_exact=f"{data['id']}")
card = model_to_dict(card)
serializedCard = json.dumps(card)
return JsonResponse({'card': serializedCard}, status = 200)
except Exception as e:
print(e)
except Tarjetas.DoesNotExist:
print("No existe la tarjetas de la maquina")
return HttpResponse(status = 204)
return HttpResponse(status =405)
@require_http_methods(['POST'])
@ensure_csrf_cookie
#@csrf_exempt
def _get_machine_card_by_id(request):
if request.method == 'POST':
try:
data = request.POST.get('data')
data = ast.literal_eval(data)
Tarj = Tarjetas.objects.get(Id__exact=f"{data['id']}", localizacion_id__linea__exact=f"{data['linea']}")
serializedTarj = model_to_dict(Tarj)
return JsonResponse({'card': serializedTarj, "usuario": request.session['Usuario']}, status = 200)
except Tarjetas.DoesNotExist:
print("No existe la Tarjeta")
return JsonResponse({'mensaje': "No existen registros"},status = 200)
except Exception as e:
print(e)
return HttpResponse(status=500)
return HttpResponse(status = 405)
@require_http_methods(['POST'])
@ensure_csrf_cookie
#@csrf_exempt
def _modify_card(request):
if request.method == 'POST':
try:
data = request.POST.get('data')
print(data)
data = ast.literal_eval(data)
tarjeta = Tarjetas.objects.get(pk=data['id'])
tarjeta.propuesta = data['propuesta']
tarjeta.implementada = data['implementada']
tarjeta.save()
return HttpResponse(status=202)
except Exception as e:
print(e)
return HttpResponse(status=500)
return HttpResponse(status=405)
@require_http_methods(['GET'])
def __get_line_info(request, linea = None):
def __insertTurnData():
turns = ('A', 'B', 'C')
lineToEdit = Linea.objects.get(linea__exact=f"{linea}")
if len(LineaStaff.objects.filter(linea_id__linea__exact=f"{linea}")) == 0:
ic("NO EXISTEN REGISTROS")
ic("Creando Registros")
for i in range(3):
LineaStaff.objects.create(Id = None, turno = turns[i], staff = 0, linea = lineToEdit)
if request.method == "GET":
try:
if type(linea) == str and linea != "":
__insertTurnData()
infoLine = Linea.objects.get(linea__exact=f"{linea}")
userLine = Usuarios.objects.get(linea__exact=f"{linea}")
return JsonResponse({'Linea': infoLine.linea, 'Personal': infoLine.personal, "username": userLine.username})
except TypeError as te:
ic("Tipo de dato erroneo o no contiene informacion")
return HttpResponse(status = 400)
except Exception as e:
print(e)
return HttpResponse(status = 500)
return HttpResponse(status = 405)
@require_http_methods(['POST'])
@ensure_csrf_cookie
def _change_line_data(request):
if request.method == "POST":
staff = 0
try:
data = request.POST.get('info')
data = ast.literal_eval(data)
ic(data)
if data:
lineToEdit = Linea.objects.get(linea__exact=f"{data['lineToEdit']}")
lineStaff = LineaStaff.objects.get(linea_id__linea__exact=f"{data['lineToEdit']}", turno__exact=f"{data['turn']}")
if lineStaff:
lineStaff.staff = int(data['newWorkers'])
lineStaff.save()
for i in LineaStaff.objects.filter(linea_id__linea__exact=f"{data['lineToEdit']}"):
staff += i.staff
lineToEdit.personal = staff
lineToEdit.save()
return HttpResponse(status = 200)
raise Exception("La informacion se encuentra vacia")
return HttpResponse(status = 400)
except Exception as e:
print(e)
return HttpResponse(status = 500)
return HttpResponse(status = 405)
@require_http_methods(['GET'])
def _get_workers(request, line = None, turn = None):
if request.method == "GET":
try:
if line and turn != None:
data = LineaStaff.objects.get(linea_id__linea__exact=f"{line}", turno__exact=f"{turn}")
ic(data)
return JsonResponse({'workers': int(data.staff)}, status = 200)
except Exception as e:
print(e)
return HttpResponse(status = 500)
return HttpResponse(status = 405) |
21,310 | e8ff3bf06b09e51210bf5425d26ff8d4d379e3a4 | sandwich_orders = ['bacon','banana','xigua']
orders = []
finished_sandwiches = []
print("Today's sandwich have " + str(sandwich_orders))
print("Please input the number which sandwich did you want")
print("If you want to exit please input 'quit'")
switch = True
while switch != False :
order = raw_input("\nPlease input your order: ")
if order == "quit":
switch = False
elif order in sandwich_orders :
print("We will made: " + order + " sandwich for you")
order = sandwich_orders.remove(order)
finished_sandwiches.append(order)
elif order == "stock":
print(sandwich_orders)
else :
print("Sorry we have not " + order + " sandwich!")
|
21,311 | f1106f235c49a5c08917e422ae7a41be46782248 | print ("calculadora basica ")
print("ingrese el numero de operacion que desea realizar ")
print("opc 1 .-sumar ")
print("opc 2 .-restar ")
print("opc 3 .-dividir ")
print("opc 4 .-multiplicar ")
a = int (input("operacion ---> "))
b = int (input("ingrese el primer valor"))
c = int (input("ingrese el segundo valor"))
if a == 1:
d = b + c
print("el resultado de la suma es --> ",d)
elif a == 2 :
d = b - c
print("el resulñtado de la resta es --> ",d)
elif a == 4 :
d = b * c
d = int(d)
print("el resultado de la multiplicacion es --> ", d)
elif a == 3 :
d = b / c
print("el resultado de la division es ---> ", d )
else:
print("el numeor de operacion ingresada no exixte")
|
21,312 | 142468d2e81df416c35e487ba0b1fef1152c2c46 | from flask import Flask, render_template
app = Flask(__name__)
@app.route('/') #how to write the path..??
def password():
return render_template('pw.html')
if __name__ == '__main__':
app.run(host='0.0.0.0', port = 3000, threaded = True, debug = True)
|
21,313 | 3ffe3741622d81ed452fa396a785132807c6a916 | """
Paradrop command line utility.
Two guiding principles were used in the design of the Paradrop command
line utility.
* The command tree should be relatively flat for easy discoverability.
Commands are grouped into a few cohesive modules (chute, cloud, node,
and store). All of the commands in the cloud module make use of the
cloud controller API, for example. There are no further subgroups under
these top-level groups, which means that `pdtools cloud --help` lists
all relevant commands.
* Commands should be self-evident in their effect. All command names
begin with a verb. Commands that begin with "list-" can be expected
to produce a table of output; commands that begin with "create-" can
be expected to result in the creation of a new resource; and so on.
Although "describe-" is a bit verbose, it conveys a sense that this
command returns everything one needs to know about an object in a way that
"get" does not. It is possible to enable tab completion when pdtools
is installed as a system package.
To enable tab completion, add the following line to your .bashrc file:
eval "$(_PDTOOLS_COMPLETE=source pdtools)"
"""
import os
# Import readline for its side-effect: it wraps stdin and handles special
# characters such as the arrow keys (^[[D and ^[[C), backspace in SSH from
# Windows Powershell (^H), etc.
import readline
import click
# Deprecated command groups to be removed prior to 1.0 release
from . import device
from . import groups
from . import routers
# Command groups
from . import chute
from . import cloud
from . import discover
from . import node
from . import store
from . import wizard
from . import errors
PDSERVER_URL = os.environ.get("PDSERVER_URL", "https://paradrop.org")
CONTEXT_SETTINGS = dict(
# Options can be parsed from PDTOOLS_* environment variables.
auto_envvar_prefix = 'PDTOOLS',
# Respond to both -h and --help for all commands.
help_option_names = ['-h', '--help'],
obj = {
'pdserver_url': PDSERVER_URL
}
)
@click.group(context_settings=CONTEXT_SETTINGS)
def root():
"""
Paradrop command line utility.
"""
pass
@root.command('help')
@click.pass_context
def help(ctx):
"""
Show this message and exit
"""
click.echo(ctx.parent.get_help())
# Deprecated command groups to be removed prior to 1.0 release
root.add_command(device.device)
root.add_command(routers.routers)
groups.register_commands(root)
# Command groups
root.add_command(chute.root)
root.add_command(cloud.root)
root.add_command(discover.root)
root.add_command(node.root)
root.add_command(store.root)
root.add_command(wizard.root)
def handle_controller_connection_error(error):
click.echo("Connecting to the controller at {} failed.".format(
error.target))
if error.target in os.environ.get('PDSERVER_URL', ''):
click.echo('Please ensure that you have configured the PDSERVER_URL ' +
'environment variable correctly.')
click.echo('It is currently set to "{}".'.format(
os.environ.get('PDSERVER_URL', None)))
else:
click.echo('Please check your network connection.')
def handle_node_connection_error(error):
click.echo("Connecting to the node at {} failed.".format(
error.target))
if error.target == os.environ.get('PDTOOLS_NODE_TARGET', ''):
click.echo('Please ensure that you have configured the ' +
'PDTOOLS_NODE_TARGET environment variable correctly.')
click.echo('It is currently set to "{}".'.format(
os.environ.get('PDTOOLS_NODE_TARGET', None)))
else:
click.echo('Please ensure you have the correct address ' +
'for the node and that your network connection is up.')
def main():
"""
Entry point for the pdtools Python package.
"""
try:
root()
except errors.ControllerConnectionError as error:
handle_controller_connection_error(error)
except errors.NodeConnectionError as error:
handle_node_connection_error(error)
if __name__ == "__main__":
main()
|
21,314 | 9b11d4dcb10385c26affef706418da02f4bacac0 | """
Conftest for Sanic-Redis
"""
from typing import Any, Dict
import re
import pytest
from sanic import Sanic
from sanic_redis import SanicRedis
CACHE: Dict[str, Any] = {}
slugify = re.compile(r"[^a-zA-Z0-9_\-]")
@pytest.fixture(scope="function")
def app(request):
"""
Basic Fixture to test Sanic
"""
my_app = Sanic(slugify.sub("-", request.node.name))
redis = SanicRedis()
redis.init_app(my_app, config_name="my_test_redis",
redis_url="redis://127.0.0.1")
yield my_app
|
21,315 | 35fa755616dca7437d8b7e41294bd91a7d2339ec | def run_timing():
count = 0
km = 0.0
stop = False
while not stop:
try:
str = input('Enter 10 km run time: ')
if not str:
break
n = float(str)
km = km + n
count = count + 1
except ValueError as e:
print("That's not a valid number")
if count == 0:
print('No run!')
else:
print(f'Average of {km / count}, over {count} runs')
run_timing() |
21,316 | 71fe9284e98b927387bd578170001e3f1728ca42 | from sklearn.manifold import TSNE
import os
import matplotlib.pyplot as plt
from sklearn.cluster import KMeans
import numpy as np
from new_get_model_data import get_model_data
def run_tsne(channel_embeddings, output_dir):
tsne = TSNE(verbose=3)
X = tsne.fit_transform(channel_embeddings)
kmeans = KMeans()
kmeans.fit(X)
y_kmeans = kmeans.predict(X)
print("y_kmeans:")
i = 0
for i in range(0, len(y_kmeans)):
print("index:", i)
print("kmeans index: ", y_kmeans[i])
print("x-axis value: ", X[:, 0][i])
print("y-axis value: ", X[:, 1][i])
plt.scatter(X[:, 0], X[:, 1], c=y_kmeans, cmap='plasma')
plt.title('t-SNE Clusters of Channel Embeddings')
plt.xlabel('Feature 1')
plt.ylabel('Feature 2')
plt.show()
plt.savefig(output_dir + '/channel_cluster.png')
def main():
data = get_model_data('../data/model')
channel_embeddings = np.transpose(data['M_V'])
# user_embeddings = data['M_V']
output_dir = './plots'
# verify out directory exists
if not os.path.exists(output_dir):
os.makedirs(output_dir)
run_tsne(channel_embeddings, output_dir)
if __name__ == '__main__':
main()
|
21,317 | 30788f461f4e9fc9304256f053c9b04c7fb1cc0c | #!/usr/bin/env python3
ls = input().split()
ls.reverse()
print(' '.join(ls)) |
21,318 | 8d09fc623d5fb12ccea6e2e74992821ec51143c0 | import math, os, tempfile,copy
import time, random
import ray
import numpy as np
from simtk.openmm.app import *
from simtk.openmm import *
from simtk.unit import *
from sys import stdout
os.environ["CUDA_DEVICE_ORDER"]="PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"]="2,3,4"
ray.init(num_cpus=8, num_gpus=3)
# ray.init(num_cpus=3, memory=5_000_000_000, object_store_memory=5_000_000_000)
# ray.init(num_cpus=8, num_gpus=8, memory=20_000_000_000, object_store_memory=20_000_000_000) # for cluster, 20Gb memory
## ray start --head --num-gpus=2 --num-cpus=6 --memory 20000000000
# ray.init(address='auto', redis_password='5241590000000000')
# os.environ["CUDA_DEVICE_ORDER"]="PCI_BUS_ID"
# os.environ["CUDA_VISIBLE_DEVICES"]="0,2"
### currently cant inherit from a ray actor class...turn this on if you just want to use Replica
# @ray.remote(num_cpus=1, num_gpus=0.33)
# class Replica:
# def __init__(self, rep_num, replica_params, simulation_params):
# self._rep_num = rep_num
# self._replica_params = replica_params
# self._simulation_params = simulation_params
# self._initial = True
# def _get_integrator(self):
# if 'langevin' in self._simulation_params['integrator_type']:
# return LangevinIntegrator(self._replica_params['temp']*kelvin,
# self._simulation_params['damping']/picosecond,
# self._simulation_params['timestep']*picoseconds)
# else:
# print('need to implement integrators other than langevin')
# def _build(self):
# self.integrator = self._get_integrator()
# platform = Platform.getPlatformByName(self._simulation_params['platform'])
# if self._simulation_params['platform'] == 'CUDA':
# print('CUDA')
# properties = {'Precision':self._simulation_params['Precision'], 'UseCpuPme':self._simulation_params['UseCpuPme'], 'CudaDeviceIndex':self._replica_params['device']}
# self.context = Context(self.system, self.integrator, platform, properties)
# else:
# self.context = Context(self.system, self.integrator)
# self.context.setPositions(self.modeller.positions)
# def make_simulation(self, pdb, forcefield):
# self._pdb = pdb
# self._forcefield = forcefield
# self.modeller = Modeller(self._pdb.topology, self._pdb.positions)
# self.modeller.addSolvent(self._forcefield, padding=self._simulation_params['padding']) #, model=self._simulation_params['water'], padding=self._simulation_params['padding'])
# if self._simulation_params['water'] != 'tip3p' and self._simulation_params['water'] != 'implicit':
# self.modeller.addExtraParticles(self._forcefield)
# self.system = self._forcefield.createSystem(self.modeller.topology, nonbondedMethod=PME, nonbondedCutoff=0.9*nanometer, vdwCutoff=1.0*nanometer, constraints=HBonds)
# self.topology = self.modeller.topology
# self._write_dir = os.path.join(self._simulation_params['write_path'],'{}'.format(self._rep_num))
# if not os.path.exists(self._write_dir):
# os.mkdir(self._write_dir)
# self._build()
# self._subclassbuild()
# def update(self):
# '''
# For subclass-specific parameter updating (e.g. REST2 scaling after a switch)
# '''
# pass
# def step(self, steps, minimize, reset_vel):
# print('inner step')
# if minimize:
# tolerance = 10*kilojoule/mole
# maxIterations = 0
# LocalEnergyMinimizer.minimize(self.context, tolerance, maxIterations)
# if reset_vel:
# print('setting / resetting velocities')
# temp = self.integrator.getTemperature()
# self.context.setVelocitiesToTemperature(temp*kelvin)
# print('running for {} steps...'.format(steps))
# self.integrator.step(steps)
# print('rep {} done!'.format(self._rep_num))
# self.state = self.context.getState(getPositions=True, getVelocities=True)
# return self.state.getTime()
# def get_energies(self):
# '''
# Gets the energies of a replica under all thermodynamic states
# '''
# energies = []
# for i in range(self._simulation_params['num_replicas']):
# self.update(i)
# energies.append(self.context.getState(PotentialEnergy=True).getPotentialEnergy())
# return energies
# def _subclassbuild(self):
# '''
# Again, placeholder to be used by subclasses
# '''
# pass
# def sync(self):
# '''
# does nothing but block ray processes to sync simulations
# '''
# pass
# @ray.remote(num_cpus=1, num_gpus=0.2)
@ray.remote(num_cpus=1, num_gpus=1)
class REST2Replica():
def __init__(self, rep_num, replica_params, simulation_params):
self._rep_num = rep_num
self._step = 0
self._replica_params = replica_params
self._simulation_params = simulation_params
self._initial = True
self._R = 0.00831446261815324*kilojoules/moles/kelvin
self._state = None
self._reset_vel = False
self._min = False
self._current_chk = None
self._prev_chk = None
self._make_files = True
def update(self, new_state, calc_energy=False):
if self._initial:
init = True
self._initial = False
else:
init = False
if new_state != self._state and not calc_energy:
self._reset_vel = True
self._min = True
self._state = new_state
scale = self._scale_to_params[new_state]
self._parameter_scaling(self.solute_atoms, self.solvent_atoms, scale, initial=init)
# forces = self._scale_to_params[new_state]
# [f.updateParametersInContext(self.context) for f in forces]
# for i,force in enumerate(self._scale_to_params[new_state]):
# force.updateParametersInContext(self.context)
# nonbondedforces = [f for f in [self.context.getSystem().getForce(i) for i in range(self.system.getNumForces())] if type(f) == NonbondedForce][0]
# print(nonbondedforces.getNumExceptions())
# for i in range(3):
# print(force.getParticleParameters(i))
def _write_reporters(self):
## use context.createCheckpoint for checkpoints
# print('reporters')
if self._make_files:
self._make_files = False
self.out_name = '{}_sim_{}_restart_{}'.format(self._simulation_params['io_name'],self._rep_num,self._simulation_params['restart'])
if self._simulation_params['dcd_freq'] != 0:
# print('dcd file')
dcd_name = self.out_name + '.dcd'
self._dcd_file = DCDFile(open(os.path.join(self._write_dir,dcd_name), 'wb'), self.topology, self._simulation_params['timestep']*picoseconds)
if self._simulation_params['state_freq'] != 0:
# print('state file')
state_name = self.out_name + '.csv'
self._state_file = open(os.path.join(self._write_dir,state_name), 'w')
self._state_file.write('time-(ps), step, potential_energy_(KJ/mol), kinetic_energy_(KJ/mol)\n')
#### for writing
if self._step % self._simulation_params['chk_freq'] == 0:
# print('chk file')
self._prev_chk = self._current_chk
self._current_chk = self.out_name + '_step_{}.chk'.format(self._step)
chk_file = open(os.path.join(self._write_dir,self._current_chk),'wb')
chk_file.write(self.context.createCheckpoint())
chk_file.close()
if self._prev_chk != None:
# print('old and removing: ', os.path.join(self._write_dir,self._prev_chk))
os.system('rm {}'.format(os.path.join(self._write_dir,self._prev_chk)))
if self._step % self._simulation_params['dcd_freq'] == 0:
# print('writing dcd')
state = self.context.getState(getPositions=True, getEnergy=True, enforcePeriodicBox=True)
self._dcd_file.writeModel(state.getPositions(), periodicBoxVectors=state.getPeriodicBoxVectors())
if self._step % self._simulation_params['state_freq'] == 0:
# print('writing state')
if self._simulation_params['state_freq'] != self._simulation_params['dcd_freq']:
state = self.context.getStart(getEnergy=True)
data = [state.getTime()._value, self._step, state.getPotentialEnergy()._value, state.getKineticEnergy()._value]
info = ','.join(str(d) for d in data)
info += '\n'
self._state_file.write(info)
self._state_file.flush()
# print('done reporters')
def _get_integrator(self):
if 'langevin' in self._simulation_params['integrator_type']:
return LangevinIntegrator(self._replica_params['temp']*kelvin,
self._simulation_params['damping']/picosecond,
self._simulation_params['timestep']*picoseconds)
else:
print('need to implement integrators other than langevin')
def _build(self):
self.integrator = self._get_integrator()
platform = Platform.getPlatformByName(self._simulation_params['platform'])
# os.environ["CUDA_VISIBLE_DEVICES"]=self._replica_params['device']
if self._simulation_params['platform'] == 'CUDA':
print('making on device: ',self._replica_params['device'])
properties = {'Precision':self._simulation_params['Precision'], 'UseCpuPme':self._simulation_params['UseCpuPme'], 'DeviceIndex':'0'} #self._replica_params['device']}
try:
self.context = Context(self.system, self.integrator, platform, properties)
except:
print('CANNOT MAKE CONTEXT')
exit()
else:
self.context = Context(self.system, self.integrator)
self.context.setPositions(self.modeller.positions)
positions = self.context.getState(getPositions=True).getPositions()
init_rep_pdb = '{}_sim_{}_restart_{}.pdb'.format(self._simulation_params['io_name'],self._rep_num,self._simulation_params['restart'])
init_rep_pdb_path = os.path.join(self._write_dir,init_rep_pdb)
PDBFile.writeFile(self.topology, positions, open(init_rep_pdb_path,'w'))
return
def _subclassbuild(self):
self.solute_atoms, self.solvent_atoms = self._solvent_solute()
self._scale_to_params = {}
for i, scale in enumerate(self._replica_params['t_scales']):
# self._scale_to_params[i] = self._parameter_scaling(self.solute_atoms, self.solvent_atoms, scale, initial=init)
self._scale_to_params[i] = scale
self.update(self._rep_num)
return
def make_simulation(self, pdb, forcefield):
self._pdb = pdb
self._forcefield = forcefield
self.modeller = Modeller(self._pdb.topology, self._pdb.positions)
self.modeller.addSolvent(self._forcefield, padding=self._simulation_params['padding'], ionicStrength=self._simulation_params['ion_conc']) #, model=self._simulation_params['water'], padding=self._simulation_params['padding'])
if self._simulation_params['water'] != 'tip3p' and self._simulation_params['water'] != 'implicit':
self.modeller.addExtraParticles(self._forcefield)
self.system = self._forcefield.createSystem(self.modeller.topology, nonbondedMethod=PME, nonbondedCutoff=0.9*nanometer, vdwCutoff=1.0*nanometer, constraints=HBonds)
self.topology = self.modeller.topology
self._write_dir = os.path.join(self._simulation_params['write_path'],'{}'.format(self._rep_num))
if not os.path.exists(self._write_dir):
os.mkdir(self._write_dir)
self._build()
self._subclassbuild()
def step(self, steps, minimize, reset_vel, write):
# print('inner step')
if minimize or self._min:
tolerance = 5*kilojoule/mole
maxIterations = 0
# print('minimzing')
LocalEnergyMinimizer.minimize(self.context, tolerance, maxIterations)
self._min = False
if reset_vel or self._reset_vel:
# print('setting / resetting velocities')
temp = self._simulation_params['temp']
self.context.setVelocitiesToTemperature(temp*kelvin)
self._reset_vel = False
# nonbondedforces = [f for f in [self.context.getSystem().getForce(i) for i in range(self.context.getSystem().getNumForces())] if type(f) == NonbondedForce][0]
# for i in range(2):
# print(nonbondedforces.getParticleParameters(i))
# print('stepping')
self.integrator.step(steps)
self._step += steps
if write:
self._write_reporters()
# print('inner step done')
# self.state = self.context.getState(getPositions=True, getVelocities=True)
# return self.state.getTime()
def get_energies(self):
'''
Gets the energies of a replica under all thermodynamic states
'''
energies = []
for i in range(self._simulation_params['num_replicas']):
self.update(i, calc_energy=True)
energies.append(self.context.getState(getEnergy=True).getPotentialEnergy()/(self._R*self._replica_params['temp']*kelvin))
return energies
def _check_nans(self):
if np.isnan(coordinates).any():
raise RuntimeError(f'NaN in coordinates of {self._rep_num}')
if np.isnan(velocities).any():
raise RuntimeError(f'NaN in velocities of {self._rep_num}')
def sync(self):
'''
does nothing but block ray processes to sync simulations
'''
pass
def _solvent_solute(self):
solute_atoms = []
solvent_atoms = []
for i, res in enumerate(self.topology.residues()):
if res.name.upper() not in ['HOH','WAT','SOL','H2O','CL','NA','MG','K','RB','LI','I','F','BR','CA']: ### generalize to ligand?
for atom in res.atoms():
solute_atoms.append(atom.index)
else:
for atom in res.atoms():
solvent_atoms.append(atom.index)
return solute_atoms, solvent_atoms
def _parameter_scaling(self, solute_atoms, solvent_atoms, scale_factor, initial=False, nonbonded=True, torsions_p=True, torsions_np=False, bonds=False, angles=False):
'''
take all the solute-solute and solvent-solute interactions and scale them via the scale_factor.
The scale factor (effective temp) for solu-solu is Bj/B0, while solu-solv its sqrt(Bj/B0)
'''
if initial:
self.params_nonbond = {}
self.params_nonbondexcept = {}
self.params_torsion = {}
if nonbonded:
# nonbondedforces = [f for f in [self.context.getSystem().getForce(i) for i in range(self.context.getSystem().getNumForces())] if type(f) == NonbondedForce][0]
nonbondedforces = [f for f in [self.system.getForce(i) for i in range(self.system.getNumForces())] if type(f) == NonbondedForce][0]
# for i in range(2):
# print(nonbondedforces.getParticleParameters(i))
for ind in solute_atoms:
q, sigma, eps = nonbondedforces.getParticleParameters(ind)
if initial:
self.params_nonbond[ind] = [q, eps]
else:
q, eps = self.params_nonbond[ind][0], self.params_nonbond[ind][1]
nonbondedforces.setParticleParameters(ind, math.sqrt(scale_factor)*q, sigma, scale_factor*eps)
for ind in range(nonbondedforces.getNumExceptions()):
i, j, q, sigma, eps = nonbondedforces.getExceptionParameters(ind)
if i in solute_atoms and j in solute_atoms:
if initial:
self.params_nonbondexcept[ind] = [q, eps]
else:
q, eps = self.params_nonbondexcept[ind][0], self.params_nonbondexcept[ind][1]
nonbondedforces.setExceptionParameters(ind, i, j, scale_factor*q, sigma, scale_factor*eps)
nonbondedforces.updateParametersInContext(self.context)
if torsions_p:
### set specifically for the a99sb-disp FF I custom made from Robuselli et al / Paul's github FF gromacs/desmond release
# impropers = [4.60240, 43.93200, 4.18400]
impropers = [43.932, 4.184,4.6024] ## openMM ff14SB
# torsionforces = [f for f in [self.context.getSystem().getForce(i) for i in range(self.context.getSystem().getNumForces())] if type(f) == PeriodicTorsionForce][0]
torsionforces = [f for f in [self.system.getForce(i) for i in range(self.system.getNumForces())] if type(f) == PeriodicTorsionForce][0]
for ind in range(torsionforces.getNumTorsions()):
i,j,k,l,period,angle,const = torsionforces.getTorsionParameters(ind)
if i in solute_atoms and j in solute_atoms and k in solute_atoms and l in solute_atoms:
if const._value not in impropers or torsions_np:
if initial:
self.params_torsion[ind] = [const]
else:
const = self.params_torsion[ind][0]
torsionforces.setTorsionParameters(ind,i,j,k,l,period,angle,scale_factor*const)
torsionforces.updateParametersInContext(self.context)
if bonds:
bondforces = [f for f in [system.getForce(i) for i in range(system.getNumForces())] if type(f) == HarmonicBondForce][0]
print('Not implemented, does nothing')
if angles:
angleforces = [f for f in [system.getForce(i) for i in range(system.getNumForces())] if type(f) == HarmonicAngleForce][0]
print('Not implemented, does nothing')
# for i in range(2):
# print(nonbondedforces.getParticleParameters(i))
return [nonbondedforces, torsionforces]
####################################################################
###### Base class for running multicopy simulations
###### controls general launching and coordination
###### following the openMMtools version it doesn't implement the exchanges itself, left to subclasses
class MultiReplicaSampler:
def __init__(self, **kwargs):
self._pdb = None
self._write_path = None
self._io_name = None
self._num_replicas = None
## to use a different number of work threads or GPUs than replicas
self._num_workers = None
self._simulation_params = None
self._minimize_replica = {}
self.replicas = {}
self._replica_to_state = []
self._replica_type = None
self._initial = True
self._steps = 0
@property
def write_path(self):
return self._write_path
@write_path.setter
def write_path(self, path):
if not os.path.exists(path):
os.mkdir(path)
self._write_path = path
@property
def io_name(self):
return self._io_name
@io_name.setter
def io_name(self, name):
self._io_name = name
@property
def replica_type(self):
return self._replica_type
@replica_type.setter
def replica_type(self, rep_type):
self._replica_type = rep_type
def pdb(self, pdb):
self._pdb = PDBFile(pdb)
@property
def num_replicas(self):
return self._num_replicas
@num_replicas.setter
def num_replicas(self, num):
self._num_replicas = int(num)
self._minimize_replica = {i:True for i in range(int(num))}
def _add_replica_params(self, replica_params):
self._replica_params = replica_params
def _add_simulation_params(self, simulation_params):
self._simulation_params = simulation_params
self.num_replicas = self._simulation_params['num_replicas']
self._replica_to_state = np.array([i for i in range(self.num_replicas)])
assert 'platform' in simulation_params
self._platform = simulation_params['platform']
assert 'num_workers' in simulation_params
self._num_workers = simulation_params['num_workers']
if 'write_path' in simulation_params:
self.write_path = simulation_params['write_path']
if 'io_name' in simulation_params:
self.io_name = simulation_params['io_name']
def _write_replica_states(self):
pass
def launch_workers(self):
self.replicas = {i:self.replica_type.remote(i, self._replica_params[i], self._simulation_params) for i in range(self.num_replicas)}
synced = [ray.get(self.replicas[i].sync.remote()) for i in range(self.num_replicas)]
def make_simulations(self, forcefield):
for i in range(self.num_replicas):
self.replicas[i].make_simulation.remote(self._pdb, forcefield)
synced = [ray.get(self.replicas[i].sync.remote()) for i in range(self.num_replicas)]
def _calc_energies(self):
energies = ray.get([self.replicas[i].get_energies.remote() for i in range(self.num_replicas)])
energies = np.vstack([energies[i] for i in range(len(energies))])
return energies
def step(self, steps=1, minimize=False, reset_vel=False, write=True):
for i in range(self.num_replicas):
print(f'running {steps} steps')
self.replicas[i].step.remote(steps, minimize, reset_vel, write)
self._steps += steps
# synced = [ray.get(self.replicas[i].sync.remote()) for i in range(self.num_replicas)]
class ReplicaExchangeSampler(MultiReplicaSampler):
def __init__(self, **kwargs):
super(ReplicaExchangeSampler, self).__init__(**kwargs)
self._exchanges_attempted = {}
self._exchanges_accepted = {}
# index in the replica, value is the state its in
self._neighbors = True
self._priority = True
self._num_swaps = 0
self._num_swaps_prop = 0
def run(self, num_runs, step_size, minimize=False, reset_vel=False):
while num_runs > 0:
num_runs -= 1
print('runs left: ', num_runs)
# print('outter calc energy')
state_energy_matrix = self._calc_energies()
# print('exchange')
self._attempt_exchange(state_energy_matrix)
# print('step')
self.step(step_size, minimize, reset_vel)
# print('write outer states')
self._write_replica_states()
def _attempt_exchange(self, state_energy_matrix):
if self._neighbors:
## find which sim is in which specific state (ie scaling factor or temp)
swaps = {}
nonpriority_swaps = []
# print('energy: ', state_energy_matrix)
# print('pre exchange: ',self._replica_to_state)
for s1 in range(self.num_replicas-1):
self._num_swaps_prop += 1
s2 = s1+1
i = int(np.where(self._replica_to_state==s1)[0])
j = int(np.where(self._replica_to_state==s2)[0])
state_i_pe_i = state_energy_matrix[i,s1]
state_i_pe_j = state_energy_matrix[i,s2]
state_j_pe_j = state_energy_matrix[j,s2]
state_j_pe_i = state_energy_matrix[j,s1]
delta_pe = (state_i_pe_j+state_j_pe_i-state_j_pe_j-state_i_pe_i)
# print(delta_pe, math.exp(-delta_pe))
if delta_pe <= 0 or math.exp(-delta_pe) > np.random.rand():
if self._priority and s1%2 == 0:
self._num_swaps += 1
swaps[i] = s2
swaps[j] = s1
elif not self._priority and s1%2 ==1:
self._num_swaps += 1
swaps[i] = s2
swaps[j] = s1
else:
nonpriority_swaps.append((i,s1,j,s2))
for swap in nonpriority_swaps:
if (swap[0] not in swaps and swap[2] not in swaps):
self._num_swaps += 1
swaps[swap[0]] = swap[3]
swaps[swap[2]] = swap[1]
for s in swaps:
self._replica_to_state[s] = swaps[s]
# print('post exchange: ', self._replica_to_state)
[self.replicas[i].update.remote(self._replica_to_state[i]) for i in range(self.num_replicas)]
self._priority = not self._priority
class REST2Sampler(ReplicaExchangeSampler):
def __init__(self, **kwargs):
super(REST2Sampler,self).__init__(**kwargs)
def add_simulation_params(self, simulation_params):
self._add_simulation_params(simulation_params)
temp = self._simulation_params['temp']
min_t = self._simulation_params['min_temp']
max_t = self._simulation_params['max_temp']
reps = self.num_replicas
temps = self._set_temp_spacing(min_t,max_t,reps)
print('temperatures: ', temps)
self.scaling_factors = [temp/i for i in temps]
print('scaling factors: ', self.scaling_factors)
replica_params = {i:{'temp':temp,'t_scales':self.scaling_factors} for i, _ in enumerate(self.scaling_factors)}
# self.rep_to_theotemp_and_scale = {i:{'theoretical_temp':temp, 'scale':scale} for temp, scale in zip(temps, scaling_factors)}
if self._simulation_params['platform'] == 'CUDA':
for i in range(reps):
indexer = i%len(self._simulation_params['gpus'])
replica_params[i]['device'] = self._simulation_params['gpus'][indexer]
self._add_replica_params(replica_params)
def _write_replica_states(self):
if self._initial:
self.rep_file = open(os.path.join(self.write_path,self.io_name+'_rep_states_restart_{}.csv'.format(self._simulation_params['restart'])),'w')
header = 'step,'
header += ','.join(str(i) for i in range(self.num_replicas))
header += '\n'
self.rep_file.write(header)
self._initial = False
data_line = str(self._steps)+',' + ','.join(str(self.scaling_factors[self._replica_to_state[i]]) for i in range(self.num_replicas)) + '\n'
self.rep_file.write(data_line)
self.rep_file.flush()
@staticmethod
def _set_temp_spacing(min_t, max_t, n):
factor = (max_t/min_t)**(1/(n-1))
temps = []
for i in range(1,n+1):
temps.append(min_t*factor**(i-1))
return temps
class BiasExchangeReplicaSampler(MultiReplicaSampler):
def __init__(self, **kwargs):
super(BiasExchangeReplicaSampler, self).__init__()
self._replica_biases = {}
self._metad_sims = {}
self._bias_params = {}
def add_biases(self, rep_to_bias):
for rep in rep_to_bias:
self._replica_biases[rep] = rep_to_bias[rep]
def add_bias_params(self, bias_params):
for rep in bias_params:
self._bias_params[rep] = bias_params[rep]
def _calc_energies(self, rep_num_all_positions):
rep_num, positions = rep_num_all_positions[0], rep_num_all_positions[1]
device, temp, damping, ts = self._simulation_params[rep_num]
testsystem = testsystems.AlanineDipeptideExplicit()
integrator = LangevinIntegrator(temp*kelvin, damping/picosecond, ts*picoseconds)
platform = Platform.getPlatformByName(self._platform)
if self._platform == 'CUDA':
device = self.rep_to_gpu[str(device)]
#### usecpupme MUST BE OFF TO RUN MULTIPLE GPU
properties = {'Precision':'single' , 'UseCpuPme':'false', 'CudaDeviceIndex':device}
context = Context(testsystem.system, integrator, platform)
# simulation = Simulation(testsystem.topology, testsystem.system, integrator, platform, properties)
else:
context = Context(testsystem.system, integrator, platform)
# simulation = Simulation(testsystem.topology, testsystem.system, integrator)
energies = {}
#### THIS IS THE FULL PE, I ONLY WANT TO BIAS PE ... NEED TO ISOLATE THAT COMPONENT OF THE ENERGY
for rep, pos in positions:
context.setPositions(pos)
state = context.getState(getEnergy=True)
PE = state.getPotentialEnergy()
energies[int(rep)] = PE
# print('for potential of replica: ', rep_num, ' PE of: {} is {}'.format(rep, PE))
return rep_num, energies
def calc_energies(self):
positions = [[rep_num, pos] for rep_num, pos in self._rep_to_pos.items()]
p = multiprocessing.Pool(self._workers)
energies = p.map(self._calc_energies, [[str(i), positions] for i in range(self.num_replicas)])
self.energies = energies
print('all energies', energies)
p.close()
p.join()
def to_metadynamics_simulations(self):
for rep,sim in self._simulations.items():
self._metad_sims[rep] = Metadynamics(sim.system, self._replica_biases[rep],
self._simulation_params[rep][1]*kelvin, self._bias_params[rep][0],
self._bias_params[rep][1]*kilojoules_per_mole, self._bias_params[rep][2],
saveFrequency=self._bias_params[rep][3], biasDir=self._bias_params[rep][4])
def _meta_step(self, sim_and_steps):
sim_and_steps[0].step(sim_and_steps[1], sim_and_steps[2])
def meta_step(self, steps=1):
p = multiprocessing.Pool(self._workers)
p.map(self._meta_step, [[i, steps] for i in range(self.num_replicas)])
p.close()
def main():
rest2 = True
metadynamics = False
### REST2 testing
if rest2:
forcefield = ForceField('amber14-all.xml', 'amber14/tip3p.xml')
simulation_params = {'num_replicas':3, 'num_workers':3,
'platform':'CUDA', 'gpus':['0'],
'min_temp':300.0, 'max_temp':450.0, 'temp':300.0,
'damping':1, 'timestep':0.002,
'Precision':'mixed', 'UseCpuPme':'false',
'integrator_type':'langevin', 'water':'tip3p',
'padding':1.0*nanometer, 'write_path':'./ala_rest_4',
'io_name':'ala_rest_4', 'ion_conc':0.005*molar,
'chk_freq':50000, 'dcd_freq':1000, 'state_freq':1000,
'restart':0}
rest_replica_sampler = REST2Sampler()
rest_replica_sampler.replica_type = REST2Replica
rest_replica_sampler.pdb('./ala_dipep.pdb') #'./AAQAA_3_test.pdb', './IDPs/trpcage/random_trpcage.pdb',
rest_replica_sampler.add_simulation_params(simulation_params)
rest_replica_sampler.launch_workers()
rest_replica_sampler.make_simulations(forcefield)
s1 = time.perf_counter()
# # rep_reporters = ['dcd', 'state']
# # rest_replica_sampler.reporters = rep_reporters
print('equil steps')
rest_replica_sampler.step(50000, minimize=True, reset_vel=True, write=False)
print('runs')
rest_replica_sampler.run(1000000, 1000)
# rest_replica_sampler.step(1000)
# rest_replica_sampler.step(1000)
# rest_replica_sampler.step(1000)
# rest_replica_sampler.step(1000)
# rest_replica_sampler.step(1000)
# rest_replica_sampler.step(1000)
# rest_replica_sampler.step(1000)
# rest_replica_sampler.step(1000)
s2 = time.perf_counter()
print(f"total processing time took {s2 - s1:0.4f} seconds")
## rest_replica_sampler.calc_energies()
exit()
if metadynamics:
### Bias Potential 1
dih_0 = CustomTorsionForce("theta")
dih_0.addTorsion(4,5,8,14)
dih_1 = CustomTorsionForce("theta")
dih_1.addTorsion(6,8,14,16)
# cv_force = CustomCVForce('dih_0')
# cv_force = CustomCVForce('dih_1')
# cv_force.addCollectiveVariable('dih_1',dih_1)
# cv_force.addCollectiveVariable('dih_0',dih_0)
bv_0 = BiasVariable(dih_0, -np.pi, np.pi, np.pi/10, True)
bv_1 = BiasVariable(dih_1, -np.pi, np.pi, np.pi/10, True)
bvs = {'0':[bv_0], '1':[bv_1]}
### order of params: amplitude, gauss height, # steps to per apply, # steps per save, output dir
bias_params = {'0': [2.0, 0.1, 500, 500, './biases_0'],
'1': [2.0, 0.1, 500, 500, './biases_1']}
### order of params: device_index, temp, damping, ts
replica_params = {'0':[0, 310.5, 5, 0.002],
'1':[1, 310.5, 5, 0.002]}
replica_sampler = BiasExchangeReplicaSampler()
replica_sampler.write_path = './test'
replica_sampler.num_replicas = 2
replica_sampler.num_workers = 2
replica_sampler.platform = 'CPU'
# replica_sampler.platform = 'CUDA'
replica_sampler.add_replica_params(replica_params)
replica_sampler.add_biases(bvs)
replica_sampler.add_bias_params(bias_params)
replica_sampler.make_simulations()
### minimize the simulations, set velocities to temp and run 20ps equilibration
### min doesnt work with GPU version for diala sim
replica_sampler.minimize()
rep_reporters = ['dcd', 'state']
replica_sampler.reporters = rep_reporters
replica_sampler.step(2500)
replica_sampler.calc_energies()
exit()
replica_sampler.to_metadynamics_simulations()
replica_sampler.meta_step(1000000)
if __name__ == '__main__':
main() |
21,319 | 4b5b1e63667a8adc1de19f16a53020cc148730fa | import pandas as pd
import numpy as np
import talib as tb
from indicators.indicator_utils import *
def Envelopes(values, n, deviation):
"""
Return simple moving average of `values`, at
each step taking into account `n` previous values.
"""
mean = pd.Series(values).rolling(n).mean()
return mean,mean+mean*deviation,mean-mean*deviation
def create_HA(rates):
df_HA = rates.copy()
df_HA['Close']=(rates['Open']+ rates['High']+ rates['Low']+rates['Close'])/4
for i in range(0, len(rates)):
if i == 0:
df_HA['Open'][i]= ( (rates['Open'][i] + rates['Close'][i] )/ 2)
else:
df_HA['Open'][i] = ( (rates['Open'][i-1] + rates['Close'][i-1] )/ 2)
df_HA['High']=rates[['Open','Close','High']].max(axis=1)
df_HA['Low']=rates[['Open','Close','Low']].min(axis=1)
keys = ['Open','High','Low','Close']
return df_HA,keys
def create_percentage(values):
return pd.Series(values).pct_change() * 100
def create_RSI(values,period=14):
return tb.RSI(values, timeperiod=period)
def create_MACD(values, n_slow = 26, n_fast = 10, signalperiod = 9):
macd, macdsignal, macdhist = tb.MACD(values, fastperiod=n_fast, slowperiod=n_slow, signalperiod=signalperiod)
return macd, macdsignal
def create_bollinger_bands(values, r = 20, dev = 2):
upperband, middleband, lowerband = tb.BBANDS(values, timeperiod=r, nbdevup=dev, nbdevdn=dev)
return upperband, middleband,lowerband
def create_moving_averages(values, range1 = 21, range2 = 55, range3 = 128):
return tb.EMA(values,range1),tb.EMA(values,range2),tb.EMA(values,range3)
|
21,320 | b4c30f64248ef4cc57a7ff7cce7f88f9814c4d70 | import maya.cmds as mc
import maya.OpenMaya as OpenMaya
import mathUtils
import math
class MissingPluginError(Exception): pass
def getMatrix(transform,local=False,time=None):
'''
@param transform: Transform object to get world matrix from
@type transform: str
@param local: Get local space matrix instead of the world space matrix
@type local: bool
@param time: The frame to get the transforms world matrix for. If left at default, will use the current frame.
@type time: int or float
'''
# Check transform
if not mc.objExists(transform):
raise Exception('Object "'+transform+'" does not exist!!')
# Define Matrix attribute
matAttr = 'worldMatrix[0]'
if local: matAttr = 'matrix'
# Get time
mat = OpenMaya.MMatrix()
if time != None: mat = mc.getAttr(transform+'.'+matAttr,t=frame)
else: mat = mc.getAttr(transform+'.'+matAttr)
# Build Matrix
matrix = buildMatrix(translate=(mat[12],mat[13],mat[14]),xAxis=(mat[0],mat[1],mat[2]),yAxis=(mat[4],mat[5],mat[6]),zAxis=(mat[8],mat[9],mat[10]))
# Return result
return matrix
def buildMatrix(translate=(0,0,0),xAxis=(1,0,0),yAxis=(0,1,0),zAxis=(0,0,1)):
'''
Build a transformation matrix based on the input vectors
@param translate: Translate values for the matrix
@type translate: tuple/list
@param xAxis: xAxis of the matrix
@type xAxis: tuple/list
@param yAxis: yAxis of the matrix
@type yAxis: tuple/list
@param zAxis: zAxis of the matrix
@type zAxis: tuple/list
'''
# Create transformation matrix from input vectors
matrix = OpenMaya.MMatrix()
values = []
OpenMaya.MScriptUtil.setDoubleArray(matrix[0], 0, xAxis[0])
OpenMaya.MScriptUtil.setDoubleArray(matrix[0], 1, xAxis[1])
OpenMaya.MScriptUtil.setDoubleArray(matrix[0], 2, xAxis[2])
OpenMaya.MScriptUtil.setDoubleArray(matrix[1], 0, yAxis[0])
OpenMaya.MScriptUtil.setDoubleArray(matrix[1], 1, yAxis[1])
OpenMaya.MScriptUtil.setDoubleArray(matrix[1], 2, yAxis[2])
OpenMaya.MScriptUtil.setDoubleArray(matrix[2], 0, zAxis[0])
OpenMaya.MScriptUtil.setDoubleArray(matrix[2], 1, zAxis[1])
OpenMaya.MScriptUtil.setDoubleArray(matrix[2], 2, zAxis[2])
OpenMaya.MScriptUtil.setDoubleArray(matrix[3], 0, translate[0])
OpenMaya.MScriptUtil.setDoubleArray(matrix[3], 1, translate[1])
OpenMaya.MScriptUtil.setDoubleArray(matrix[3], 2, translate[2])
return matrix
def vectorMatrixMultiply(vector,matrix,transformAsPoint=False,invertMatrix=False):
'''
Transform a vector (or point) by a given transformation matrix.
@param vector: Vector or point to be transformed
@type vector: tuple/list
@param matrix: MMatrix object to provide the transformation
@type matrix: OpenMaya.MMatrix
@param transformAsPoint: Transform the vector as a point
@type transformAsPoint: bool
@param invertMatrix: Use the matrix inverse to transform the vector
@type invertMatrix: bool
'''
# Create MPoint/MVector object for transformation
if transformAsPoint: vector = OpenMaya.MPoint(vector[0],vector[1],vector[2],1.0)
else: vector = OpenMaya.MVector(vector[0],vector[1],vector[2])
# Check input is of type MMatrix
if type(matrix) != OpenMaya.MMatrix:
raise Exception('Matrix input variable is not of expected type! Expecting MMatrix, received '+str(type(matrix))+'!!')
# Transform vector
if matrix != OpenMaya.MMatrix.identity:
if invertMatrix: matrix = matrix.inverse()
vector *= matrix
# Return new vector
return [vector.x,vector.y,vector.z]
def getTranslation(matrix):
'''
Return the translation component of a matrix.
@param matrix: Matrix to extract translation from
@type matrix: maya.OpenMaya.MMatrix
'''
x = OpenMaya.MScriptUtil.getDoubleArrayItem(matrix[3],0)
y = OpenMaya.MScriptUtil.getDoubleArrayItem(matrix[3],1)
z = OpenMaya.MScriptUtil.getDoubleArrayItem(matrix[3],2)
return (x,y,z)
def getRotation(matrix,rotationOrder='xyz'):
'''
Return the rotation component of a matrix as euler (XYZ) values.
@param matrix: Matrix to extract rotation from
@type matrix: maya.OpenMaya.MMatrix
@param rotationOrder: Rotation order of the matrix
@type rotationOrder: str or int
'''
# Calculate radian constant
radian = 180.0/math.pi
# Check rotation order
if type(rotationOrder) == str:
rotationOrder = rotationOrder.lower()
rotateOrder = {'xyz':0,'yzx':1,'zxy':2,'xzy':3,'yxz':4,'zyx':5}
if not rotateOrder.has_key(rotationOrder):
raise Exception('Invalid rotation order supplied!')
rotationOrder = rotateOrder[rotationOrder]
else:
rotationOrder = int(rotationOrder)
# Get transformation matrix
transformMatrix = OpenMaya.MTransformationMatrix(matrix)
# Get Euler rotation from matrix
eulerRot = transformMatrix.eulerRotation()
# Reorder rotation
eulerRot.reorderIt(rotationOrder)
# Return XYZ rotation values
return (eulerRot.x*radian,eulerRot.y*radian,eulerRot.z*radian)
def buildRotation(aimVector,upVector=(0,1,0),aimAxis='x',upAxis='y'):
'''
Build rotation matrix from the specified inputs
@param aimVector: Aim vector for construction of rotation matrix (worldSpace)
@type aimVector: tuple or list
@param upVector: Up vector for construction of rotation matrix (worldSpace)
@type upVector: tuple or list
@param aimAxis: Aim vector for construction of rotation matrix
@type aimAxis: str
@param upAxis: Up vector for construction of rotation matrix
@type upAxis: str
'''
# Check negative axis
negAim = False
negUp = False
if aimAxis[0] == '-':
aimAxis = aimAxis[1]
negAim = True
if upAxis[0] == '-':
upAxis = upAxis[1]
negUp = True
# Check valid axis
axisList = ['x','y','z']
if not axisList.count(aimAxis): raise Exception('Aim axis is not valid!')
if not axisList.count(upAxis): raise Exception('Up axis is not valid!')
if aimAxis == upAxis: raise Exception('Aim and Up axis must be unique!')
# Determine cross axis
axisList.remove(aimAxis)
axisList.remove(upAxis)
crossAxis = axisList[0]
# Normaize aimVector
aimVector = mathUtils.normalizeVector(aimVector)
if negAim: aimVector = (-aimVector[0],-aimVector[1],-aimVector[2])
# Normaize upVector
upVector = mathUtils.normalizeVector(upVector)
if negUp: upVector = (-upVector[0],-upVector[1],-upVector[2])
# Get cross product vector
crossVector = (0,0,0)
if (aimAxis == 'x' and upAxis == 'z') or (aimAxis == 'z' and upAxis == 'y'):
crossVector = mathUtils.crossProduct(upVector,aimVector)
else:
crossVector = mathUtils.crossProduct(aimVector,upVector)
# Recalculate upVector (orthogonalize)
if (aimAxis == 'x' and upAxis == 'z') or (aimAxis == 'z' and upAxis == 'y'):
upVector = mathUtils.crossProduct(aimVector,crossVector)
else:
upVector = mathUtils.crossProduct(crossVector,aimVector)
# Build axis dictionary
axisDict={aimAxis: aimVector,upAxis: upVector,crossAxis: crossVector}
# Build rotation matrix
mat = buildMatrix(xAxis=axisDict['x'],yAxis=axisDict['y'],zAxis=axisDict['z'])
# Return rotation matrix
return mat
def inverseTransform(source,destination,translate=True,rotate=True,scale=True):
'''
Apply the inverse of a specified transform to another target transform.
@param source: The source transform that will supply the transformation
@type source: str
@param destination: The destination transform that will receive the inverse transformation
@type destination: str
@param translate: Apply inverse translate to destination transform
@type translate: bool
@param rotate: Apply inverse rotation to destination transform
@type rotate: bool
@param scale: Apply inverse scale to destination transform
@type scale: bool
'''
# ==========
# - Checks -
# ==========
if not mc.objExists(source): raise Exception('Transform "'+source+'" does not exist!!')
if not mc.objExists(destination): raise Exception('Transform "'+destination+'" does not exist!!')
# Load decomposeMatrix plugin
if not mc.pluginInfo('decomposeMatrix',q=True,l=True):
try: mc.loadPlugin('decomposeMatrix')
except: raise MissingPluginError('Unable to load "decomposeMatrix" plugin!!')
# =================================
# - Apply Inverse Transformations -
# =================================
# Create and name decomposeMatrix node
dcm = mc.createNode('decomposeMatrix',n=source+'_decomposeMatrix')
# Make connections
mc.connectAttr(source+'.inverseMatrix',dcm+'.inputMatrix',f=True)
if translate: mc.connectAttr(dcm+'.outputTranslate',destination+'.translate',f=True)
if rotate: mc.connectAttr(dcm+'.outputRotate',destination+'.rotate',f=True)
if scale: mc.connectAttr(dcm+'.outputScale',destination+'.scale',f=True)
# =================
# - Return Result -
# =================
return dcm
def fromList(valueList):
'''
Create matrix from value list.
@param valueList: List of matrix values
@type valueList: list
'''
# Check Value List
if len(valueList) != 16:
raise Exception('Invalid value list! Expecting 16 element, found '+str(len(valueList)))
# Create transformation matrix from input vaules
matrix = OpenMaya.MMatrix()
OpenMaya.MScriptUtil.createMatrixFromList(valueList,matrix)
#OpenMaya.MScriptUtil.setDoubleArray(matrix[0], 0, valueList[0])
#OpenMaya.MScriptUtil.setDoubleArray(matrix[0], 1, valueList[1])
#OpenMaya.MScriptUtil.setDoubleArray(matrix[0], 2, valueList[2])
#OpenMaya.MScriptUtil.setDoubleArray(matrix[0], 3, valueList[3])
#OpenMaya.MScriptUtil.setDoubleArray(matrix[1], 0, valueList[4])
#OpenMaya.MScriptUtil.setDoubleArray(matrix[1], 1, valueList[5])
#OpenMaya.MScriptUtil.setDoubleArray(matrix[1], 2, valueList[6])
#OpenMaya.MScriptUtil.setDoubleArray(matrix[1], 3, valueList[7])
#OpenMaya.MScriptUtil.setDoubleArray(matrix[2], 0, valueList[8])
#OpenMaya.MScriptUtil.setDoubleArray(matrix[2], 1, valueList[9])
#OpenMaya.MScriptUtil.setDoubleArray(matrix[2], 2, valueList[10])
#OpenMaya.MScriptUtil.setDoubleArray(matrix[2], 3, valueList[11])
#OpenMaya.MScriptUtil.setDoubleArray(matrix[3], 0, valueList[12])
#OpenMaya.MScriptUtil.setDoubleArray(matrix[3], 1, valueList[13])
#OpenMaya.MScriptUtil.setDoubleArray(matrix[3], 2, valueList[14])
#OpenMaya.MScriptUtil.setDoubleArray(matrix[3], 3, valueList[15])
# Return Result
return matrix
def asList(matrix):
'''
Return the specified matrix as a list
@param matrix: Matrix to return list for
@type matrix: maya.OpenMaya.MMatrix
'''
return [ matrix(0,0),matrix(0,1),matrix(0,2),matrix(0,3),
matrix(1,0),matrix(1,1),matrix(1,2),matrix(1,3),
matrix(2,0),matrix(2,1),matrix(2,2),matrix(2,3),
matrix(3,0),matrix(3,1),matrix(3,2),matrix(3,3), ]
def printMatrix(matrix):
'''
Print the specified matrix values to the script editor
@param matrix: Matrix to print
@type matrix: maya.OpenMaya.MMatrix
'''
print ('%.3f' % matrix(0,0))+', '+('%.3f' % matrix(0,1))+', '+('%.3f' % matrix(0,2))+', '+('%.3f' % matrix(0,3))
print ('%.3f' % matrix(1,0))+', '+('%.3f' % matrix(1,1))+', '+('%.3f' % matrix(1,2))+', '+('%.3f' % matrix(1,3))
print ('%.3f' % matrix(2,0))+', '+('%.3f' % matrix(2,1))+', '+('%.3f' % matrix(2,2))+', '+('%.3f' % matrix(2,3))
print ('%.3f' % matrix(3,0))+', '+('%.3f' % matrix(3,1))+', '+('%.3f' % matrix(3,2))+', '+('%.3f' % matrix(3,3))
|
21,321 | 744cb510a389853805365ee3cf317b102d9d9770 | from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.http import Http404, HttpResponse, JsonResponse, HttpResponseRedirect
from django.shortcuts import redirect, render, get_object_or_404
from ManageReview.forms import ReviewForm
from accounts.CustomerProfilemodel import CustomerProfile
from accounts.models import StoreAdminProfile
from .forms import ProductForm
from .models import Product
class ProductsCustomer():
def add_to_favorite(request, id, ):
user = get_object_or_404(CustomerProfile, user=request.user)
product = get_object_or_404(Product, id=id)
user.favorite.add(product)
return HttpResponseRedirect(request.META.get('HTTP_REFERER'))
def remove_to_favorite(request, id):
user = get_object_or_404(CustomerProfile, user=request.user)
p = Product.objects.get(id=id)
user.favorite.remove(p)
return HttpResponseRedirect(request.META.get('HTTP_REFERER'))
class ProductAdmin():
def product_view(request, pk):
product = get_object_or_404(Product, pk=pk)
form = ReviewForm()
return render(request, 'product_home.html', {'product': product, 'form': form})
@login_required(login_url='accounts:login')
def product_create(request):
try:
StorAdmin = get_object_or_404(StoreAdminProfile, user=request.user)
if request.method == 'POST':
form = ProductForm(request.POST or None, request.FILES or None)
if form.is_valid():
var = Product.objects.create(
name=request.POST['name'],
price=request.POST['price'],
type=request.POST['type'],
discount=request.POST['discount'],
manufacturer=request.POST['manufacturer'],
number_Of_Copy=request.POST['number_Of_Copy'],
par_Code=request.POST['par_Code'],
product_sections=request.POST['sections'],
image=request.FILES['image']
)
StorAdmin.store.products.add(var)
messages.success(request, 'Add product Done.')
return redirect('accounts:StoreAdminProfileView')
else:
Stor_sections = StorAdmin.store.sections.all()
sec_list = [sec.name for sec in Stor_sections]
form = ProductForm()
form.sec(sec_list=sec_list)
return render(request, 'product_add_form.html', {'form': form})
except:
messages.warning(request,"Make Sure That All Data Is Entered And Correct...!")
return HttpResponseRedirect(request.META.get('HTTP_REFERER'))
def update_product(request, pk):
StorAdmin = get_object_or_404(StoreAdminProfile, user=request.user)
print(StorAdmin)
product = get_object_or_404(Product, pk=pk)
if StorAdmin.store.products.get(pk=pk):
if request.method == 'POST':
form = ProductForm(request.POST or None, request.FILES or None, instance=product)
if form.is_valid():
edit = form.save(commit=False)
edit.save()
return redirect('accounts:StoreAdminProfileView')
else:
form = ProductForm(instance=product)
Stor_sections = StorAdmin.store.sections.all()
print(type(Stor_sections))
sec_list = [sec.name for sec in Stor_sections]
form.sec(sec_list=sec_list)
return render(request, 'product_add_form.html', {'form': form})
else:
return Http404
def delete_product(request, pk):
StorAdmin = get_object_or_404(StoreAdminProfile, user=request.user)
product = get_object_or_404(Product, pk=pk)
if StorAdmin.store.products.get(pk=pk):
if request.method == 'POST':
name = product.name
# StorAdmin.store.products.remove(product)
obj = get_object_or_404(Product, pk=pk)
obj.delete()
messages.success(request, 'Product {0} Delete Done'.format(name))
return HttpResponseRedirect(request.META.get('HTTP_REFERER'))
|
21,322 | 888346dc5834feb403f8283f349bc06ef19f77bc | """Graphical Gui Test"""
import sys
import os
from sdl2 import *
import sdl2.ext
# Create a resource, so we have easy access to the example images.
RESOURCES = sdl2.ext.Resources(__file__, "resources")
uifactory = 0
spriterenderer = 0
spriteBG = 0
buttonNext = 0
buttonPrevious = 0
buttonShutdown = 0
running = True
spriteArray = ["320-LP_480x320.jpg", "320-lmag_480x320.jpg", "320-hackable_480x320.jpg"]
numImg = 0
# A callback for the Button.click event.
def onclickNext(button, event):
global numImg, spriterenderer, buttonNext, buttonPrevious
numImg=numImg+1
if numImg >2:
numImg = 0
print("Button Next was clicked!")
buttonNextClicked = uifactory.from_image(sdl2.ext.BUTTON,RESOURCES.get_path("buttonNextClicked.png"))
buttonNextClicked.position = 370, 76
spriterenderer.render((buttonNextClicked, buttonPrevious))
timer.SDL_Delay(100)
image = uifactory.from_image(sdl2.ext.BUTTON, RESOURCES.get_path(spriteArray[numImg]))
image.position = 20,40
spriterenderer.render((buttonNext, buttonPrevious, image))
# A callback for the Button.click event.
def onclickPrevious(button, event):
global numImg, spriterenderer, buttonNext, buttonPrevious
numImg=numImg-1
if numImg < 0:
numImg = 2
print("Button Previous was clicked!")
buttonPreviousClicked = uifactory.from_image(sdl2.ext.BUTTON,RESOURCES.get_path("buttonPreviousClicked.png"))
buttonPreviousClicked.position = 370, 112
spriterenderer.render((buttonNext, buttonPreviousClicked))
timer.SDL_Delay(100)
image = uifactory.from_image(sdl2.ext.BUTTON, RESOURCES.get_path(spriteArray[numImg]))
image.position = 20,40
spriterenderer.render((buttonNext, buttonPrevious, image))
# A callback for the Button.click event.
def onclickShutdown (button, event):
global numImg, spriterenderer, buttonNext, buttonPrevious, buttonShutdown
print("Button Shutdown was clicked!")
buttonShutdownClicked = uifactory.from_image(sdl2.ext.BUTTON,RESOURCES.get_path("buttonShutDownClicked.png"))
buttonShutdownClicked.position = 370, 148
spriterenderer.render((buttonNext, buttonPrevious, buttonShutdownClicked))
timer.SDL_Delay(300)
spriterenderer.render((buttonNext, buttonPrevious, buttonShutdown))
#os.system('sudo halt')
def run():
# You know those from the helloworld.py example.
# Initialize the video subsystem, create a window and make it visible.
global window, factory, running, spriteArray, numImg, uifactory, spriterenderer , spriteBG, buttonNext, buttonPrevious, buttonShutdown
sdl2.ext.init()
#sdl2.mouse.SDL_ShowCursor(0) #hide cursor
window = sdl2.ext.Window("Gui Test 0.1", size=(480, 320))
window.show()
# Create a sprite factory that allows us to create visible 2D elements
# easily. Depending on what the user chosses, we either create a factory
# that supports hardware-accelerated sprites or software-based ones.
# The hardware-accelerated SpriteFactory requres a rendering context
# (or SDL_Renderer), which will create the underlying textures for us.
if "-hardware" in sys.argv:
print("Using hardware acceleration")
renderer = sdl2.ext.Renderer(window)
factory = sdl2.ext.SpriteFactory(sdl2.ext.TEXTURE, renderer=renderer)
else:
print("Using software rendering")
factory = sdl2.ext.SpriteFactory(sdl2.ext.SOFTWARE)
# Create a UI factory, which will handle several defaults for
# us. Also, the UIFactory can utilises software-based UI elements as
# well as hardware-accelerated ones; this allows us to keep the UI
# creation code clean.
uifactory = sdl2.ext.UIFactory(factory)
# Create a simple Button sprite, which reacts on mouse movements and
# button presses and fill it with a white color. All UI elements
# inherit directly from the TextureSprite (for TEXTURE) or SoftwareSprite
# (for SOFTWARE), so everything you can do with those classes is also
# possible for the UI elements.
#spriteBG = factory.from_image(RESOURCES.get_path("background.png"))
spriteBG = uifactory.from_image(sdl2.ext.BUTTON, RESOURCES.get_path("backgroundPySDL2.png"))
spriteBG.position = 0, 0
buttonNext = uifactory.from_image(sdl2.ext.BUTTON, RESOURCES.get_path("buttonNext.png"))
buttonNext.position = 370, 76
buttonPrevious = uifactory.from_image(sdl2.ext.BUTTON, RESOURCES.get_path("buttonPrevious.png"))
buttonPrevious.position = 370, 112
buttonShutdown = uifactory.from_image(sdl2.ext.BUTTON, RESOURCES.get_path("buttonShutDown.png"))
buttonShutdown.position = 370, 148
image = uifactory.from_image(sdl2.ext.BUTTON, RESOURCES.get_path(spriteArray[numImg]))
image.position = 20,40
# Bind some actions to the button's event handlers. Whenever a click
# (combination of a mouse button press and mouse button release), the
# onclick() function will be called.
# Whenever the mouse moves around in the area occupied by the button, the
# onmotion() function will be called.
# The event handlers receive the issuer of the event as first argument
# (the button is the issuer of that event) and the SDL event data as second
# argument for further processing, if necessary.
buttonNext.click += onclickNext
buttonPrevious.click += onclickPrevious
buttonShutdown.click += onclickShutdown
# Since all gui elements are sprites, we can use the
# SpriteRenderSystem class, we learned about in helloworld.py, to
# draw them on the Window.
spriterenderer = factory.create_sprite_render_system(window)
# Create a new UIProcessor, which will handle the user input events
# and pass them on to the relevant user interface elements.
uiprocessor = sdl2.ext.UIProcessor()
spriterenderer.render((spriteBG, buttonNext, buttonPrevious, buttonShutdown, image))
while running:
window.refresh()
events = sdl2.ext.get_events()
for event in events:
if event.type == sdl2.SDL_QUIT:
running = False
break
if event.key.keysym.sym == sdl2.SDLK_ESCAPE:
running = False
break
uiprocessor.dispatch([buttonNext, buttonPrevious, buttonShutdown], event)
sdl2.ext.quit()
return 0
if __name__ == "__main__":
sys.exit(run())
|
21,323 | 4e5a5176e04c0ecd062210ebb0e341f44a2e1f94 | import datetime
from spindrift.micro import Micro
PORT = 12345
PATH = '/test/coerce'
ID = 123
DATE = datetime.date(2018, 12, 13)
def to_date(ts):
return datetime.datetime.strptime(ts, '%Y-%m-%d').date()
def coerce(request, id, when):
assert id == ID
return when.isoformat()
def on_coerce(rc, result):
assert rc == 0
assert result == DATE.isoformat()
def test_ping():
s = [
'SERVER coerce {}'.format(PORT),
r' ROUTE {}/(?P<id>\d+)$'.format(PATH),
' ARG int',
' GET test.test_micro_coerce.coerce',
' CONTENT when type=test.test_micro_coerce.to_date',
'CONNECTION coerce http://localhost:{}'.format(PORT),
' RESOURCE get %s/{id} is_json=False' % PATH,
' REQUIRED when',
]
micro = Micro().load(s).setup()
c = micro.connection.coerce.resource.get(
on_coerce,
ID,
DATE.isoformat(),
)
while c.is_open:
micro.network.service()
assert c.t_http_data > 0
micro.close()
|
21,324 | 8cc3a489dcac3f0bc3041358f2942fc8051e229d | def combinationSum(candidates, target):
arr = []
candidates = sorted(candidates)
for i in range(len(candidates)):
if candidates[i] == target:
arr.append([candidates[i]])
elif candidates[i] > target:
break
else:
result = combinationSum(candidates[i:], target-candidates[i])
for r in result:
r.append(candidates[i])
arr.append(r)
return arr
a = combinationSum([2, 3, 6, 7, 8], 7)
print(a)
|
21,325 | ad69f6a8158a8241c9c72c9f8a2729ffc7b33fe4 | from fabric.api import *
from os import path
from sys import argv
from tasks import ExecuteCommands, CopyFilesPatternCommand, SendAFile
if __name__ == '__main__':
domain = argv[1]
local_path = argv[2]
remote_path = argv[3]
user = argv[4]
key_file = argv[5]
war_dev = SendAFile("NAT WAR", domain, user, key_file, local_path, remote_path)
war_dev.execute()
stop_tomcat = ExecuteCommands("Tomcat", domain, user, key_file, "sudo service tomcat7 stop", "sudo rm -rf /var/lib/tomcat7/last-webapps/*",
"sudo mv /var/lib/tomcat7/webapps/new-ado* /var/lib/tomcat7/last-webapps/",
"sudo cp " + remote_path + "/new-ado.war /var/lib/tomcat7/webapps/", "sudo service tomcat7 start")
stop_tomcat.execute()
|
21,326 | 5d0de859dcf61353f188ed28fdea862907bfef19 | from setuptools import setup
from os import path
def get_long_description():
with open(
path.join(path.dirname(path.abspath(__file__)), "README.md"),
encoding="utf8",
) as fp:
return fp.read()
def get_requirements(fn='requirements.txt', nogit=True):
"""Get requirements."""
if path.exists(fn):
with open(fn, 'r') as f:
requirements = f.read().splitlines()
else:
requirements = []
requirements = [r.split()[0].strip() for r in requirements if r and not r.startswith('#')]
if nogit:
requirements = [r for r in requirements if not r.startswith('git+')]
return requirements
requirements = get_requirements()
setup(
name="ou-jupyter-book-tools",
packages=['tags2myst'],
version='0.0.1',
author="Tony Hirst",
author_email="tony.hirst@gmail.com",
description="Tools for working with Jupyter books.",
long_description=get_long_description(),
long_description_content_type="text/markdown",
install_requires=requirements,
entry_points='''
[console_scripts]
tags2myst=tags2myst.cli:cli
'''
)
|
21,327 | 8cb48c0d7e4db15dad87f2429d7d5a85af6b3b04 | #!/usr/bin/python
#
# (C) 2018 Dan Boneh, Riad S. Wahby <rsw@cs.stanford.edu>
import math
import sys
def show_one_result(name, fails, nreps):
istr = { False: u" \u2717 ", True: u" \u2713 " }
if fails is None:
sys.stdout.write(u"\033[92m%sPASS\033[0m: %s\n" % (istr[True], name))
elif fails > 0:
sys.stdout.write(u"\033[91m%sFAIL\033[0m: %s (%d/%d failed)\n" % (istr[False], name, fails, nreps))
else:
sys.stdout.write(u"\033[92m%sPASS\033[0m: %s\n" % (istr[True], name))
sys.stdout.flush()
def show_test(name, just=32):
sys.stdout.write(("\033[38;5;33m%s\033[0m: " % name).ljust(just))
sys.stdout.flush()
def show_warning(warn):
sys.stdout.write('\033[91mWARNING\033[0m: %s\n' % warn)
sys.stdout.flush()
def show_progress(failed):
if failed:
sys.stdout.write('\033[91m.\033[0m')
else:
sys.stdout.write('\033[92m.\033[0m')
sys.stdout.flush()
def show_timing(tname, tvals, just=32):
mean = sum(tvals) / len(tvals)
samp_dev = math.sqrt(sum( pow(tval - mean, 2) for tval in tvals ) / max(1, len(tvals) - 1))
sys.stdout.write((u"\033[92m \u25f7 %s\033[0m: " % tname).ljust(just))
sys.stdout.write(u"%2.2f ms, \u03c3=%2.2f ms, max=%2.2f ms, min=%2.2f ms\n" % (mean * 1000, samp_dev * 1000, max(tvals) * 1000, min(tvals) * 1000))
def show_timing_triple(tname, gpvvals):
(gvals, pvals, vvals) = gpvvals
show_test("Timings for %s" % tname, 0)
sys.stdout.write('\n')
show_timing("Token generation", gvals, 36)
show_timing("Signing", pvals, 36)
show_timing("Verifying", vvals, 36)
sys.stdout.write('\n')
sys.stdout.flush()
def run_test(f, nreps):
ndisp = max(1, nreps >> 6)
fail_names = f.__doc__.split(",")
(test_name, fail_names) = (fail_names[0], fail_names[1:])
show_test(test_name)
fails = [0] * len(fail_names)
failed = False
for idx in range(0, nreps):
checks = f()
cidx = 0
for (cidx, c) in enumerate(checks):
if not c:
failed = True
fails[cidx] += 1
assert cidx + 1 == len(fails)
if idx % ndisp == ndisp - 1:
show_progress(failed)
failed = False
sys.stdout.write("\n")
if all( x == 0 for x in fails ):
show_one_result("all %s subtests passed (%d)" % (test_name, len(fails)), None, None)
else:
for (nf, nn) in zip(fails, fail_names):
show_one_result("%s_%s" % (test_name, nn), nf, nreps)
return (sum( 1 for x in fails if x > 0 ), len(fails))
def run_all_tests(nreps, modname, *tests):
show_test("%s tests" % modname, 0)
sys.stdout.write("\n")
fails = subtests = 0
for test in tests:
(f, s) = run_test(test, nreps)
fails += f
subtests += s
show_test("Summary", 0)
sys.stdout.write("\n")
if fails == 0:
show_one_result("all %d subtests passed" % subtests, None, None)
else:
show_one_result("some subtests did not pass", fails, subtests)
sys.stdout.write("\n")
# some random primes for testing (saves time vs generating on the fly)
primes_1024 = [ 56274201999185253089085184360489614464587875600149878584244396627532609037349612090400754567365855244937901931566175395708309079701360682759565279697244503974698875900562080663185314141329470374192565937999938483985541046278291016570306508184902225765396481128862669276234521449975232892841665242434137701527
, 160104197413736169533482759075458845682436887326335627265951716533753696521217542066928770121419596131312569877508547873037068057643306417272906101678445492650136649349831406458729482165924107830600707254298690550622589262441989507791406389691677212415870455495626407554897654029460469218783676336130440662717
, 52828911461112478441270628238860061511086340665919846590064620799570780148469340944817277481178659426571021663858644754300762202642019137175572336082456511752263174197160867137875968843838492616465556555003697225833924447576372394029110849757285642577712619586019143079206838681507774558984122215664121534097
, 30817232815047642690236588208685313400154297235718280962353461088444039444767896432041584053877144776813350642179762766786200442462726417721398882012241658852450801783557114398190455668187615608420407603137227573537518972006447022723327819681064903298527596961298903632728016406327572510976474173587383508441
, 50234396392114601906369517490271405706779453387174004387887062135354397816073227664596117010731087706650982551180551742319768285697770907971884622981786899412377283615833797740867528687626853264257923194614045996817479013499341140009125067651594245366245571102783530284519583414762906878214562523881269085501
, 166026642875106375051034241518019827798440232677503751158252527796596266221483628854205778090754395709550370401757546587718077868745599449976792599466486738662702419673857207822750830069904057032427140245821423345247375275338399989102158265070361664184890653627020491663664701444313643989318739917517437739597
, 120429362240799794734097056121781111889416344846617491446072479493520317897247381775672807532312222866461352013649767497341684666187080854322745664256348123768976812604407009553267600258149639792624030786554001031971876793707887247956012683117418500433216543529498241794347902218768939144492131816414949790259
, 111264220151166897989689719414310608509475706129310699955279411834403734180887710205473894077261789057437173161031291574227762408936952460764742602612178582974691641277043186165163571150528422777864796412585291171794132268692186881308777059073551575344943350511375411975719556347168232986530955605075295395337
, 99497545659105903710372261880208221140666317320992291847386569571982434788164598641016617307908621100911117217831675986177793230188626246852856478132652458129822999179806820189312391402489045092076084315988854658329929233688717839661764092300845275320364611782131314533387913425474034296637397377282061034179
, 35919557373817058082032299551948299441296544220659794348467889288910634536267059101853019831126857410815227627750661273866727497548261337508572797549774507296989199436139515090326615912551861402679696715157117084973624372549166565796845592843813491655279107597701276852654795628230159107195052415523909677043
, 77207464062561793441862919975165882232393310680694163164453026921299299176207348298180615922267593986045582691397012342285298072335394635283845297930356017429566214196325916661638620412222370592511751327358978532506292784251220432104858938892410296648581766602218067583077494942412209140853644721561241182617
, 20976502932218302879958781051144060332373134257432147161821578606633264347804545941924456565833047942802104632109179477068242991635914390983830918250054251032136774042849843585576701818168222059868334397817932524939950233894741832949244189818495652070981520002109196165488795079890260194760111215232654093213
, 64792968451745325196061747473710129984883596869894123714595765294707977204287966360455774750884867159416441219255556687182510974031342036015442773280448409632048117178171233343789717735428741934709554474378820724648016469938198950712248225804518723124281768472922202356681229452605304858007616298005349571523
, 146345619690110583716099072965451485680708140237410107816966158735120453142417798978390256999756514198972414950772966204670030109172227305876754018949768747012321003925986003404397948538364890140981122767689566656075928535963939736825266942979310599873470528792919336316267862397569308203938790815911491583817
, 10698280100175128261355784512002372257803990757307528706975691477763419472588738973707249180554046968151427562156098787430798191626464327694376410155646169916276988330640670988699299287735298651425125982065104959337595363675167163164429381660089648013622014505387208980462338992036927276635250678637991900059
, 144727526348696960777687188957623344586715633236231280926210579744117307276173206229818866025865948526274565132711325190982518978405490461840295029011450679038012329034465851473063664377365267450191200591372341286072154829271807914168564708600945013830189972450612656059143580154493679883768964063169455150451
]
primes_2048 = [ 25847034201546780436474342804132278483696270887601946305801448178962948965302643193578313051153708440165586169446573111899545638276565225547370613926376202239986186394490817640433772010604479374729636877928340132492900126597741285585650417070732892578335460551215808711573626798728360522677447511895214001659196380148436581780985161666118027818353700314750304649102693217892523951160234688961921833921702015271727119994786036544348110728236000356810227691086994146439877828093383576478999943334939473373693932031913683006851237810106317060060670987149099666470644760334604393839712127685796326237126412086058517030651
, 16685694124116278119373951214115941534612189784654380837587853678710895800990248828598342503403539634780917184477426245693065888489763794871485405488028663760341682804666885555994273589405500760381869075719153999265208572379444102825725841709132945102716055341328506203013850453402399053957744562273061385819793837259385753736376678606576687959931908923712451329564505988408329613421459701883273638713598663301570848574671765575928619812340182346721265286950878078799706570215472403257353425479962496988595498280185125818449933136603190472852628969582278763374624868131590297556310302226188860754096190467807090040877
, 14435992843798391187135161818224243192437496549297003937503687162256087387658357332239013638177828388393296280174030574325182448030091558695212026230455574487126069320558229870034061565619621976653307666888412638034825630437564225397242431656561321584306654147689751275220761780421307459533854130263824229281621813714768129412655080414004541312303915634447187145820931279057684094503308571786051887579117705962966360644935130466960460008604005384237648774068571118696185055316634955685332389901129326246258081088199625692290271359195269026755421365230353530576952119906799097959359461177579370547311513459151723280309
, 18404707629122352924109127109314440313634545687157214353243891214755044740041921405008260565120219441482945462173737403425925642037560530985000533659799801216836842601441135207520665496053735858465971575613776158696648708229189937217681733567121747948035703833819725139693110676735881528922108150520638721743371521744752393830109996305682602665400043766045412805121692573326731522634021825305086771004483680194330317928227248177525822570109293880446950596763207130955071177582866116042337555976099654007985233308539137815986106042851717459644495744128668241306225455499441210832901024346912674577462918426353433486599
, 5280103075115904558508765027567682103999799385075455729255313607740458670654451837488656661174870040131125195302568330903150028901612311610861488437468541273699109061240613794257202167700224092506149782472810790069581159146555963757196008297274492405757021568945618872731291099830758072469400915268030365390441456640716106771499041296888399540384763822586997581833623839432859912972998901579800671303641738960486256160616767548343713500120554476891956567755013069645048733681623498719449738180003654383395349213770892680591175041711754900441630190345732428323095998987986352061632297615433328449749193392849707828769
, 22667104471494592288099945177752692694153971164710449416170916790713538459827167064217679971049921583125499186663152505913982536488228996181467965051766512759089976041483192994651459059238971155334217890518602198953607609141984523884974100067770827231818319378963775510379785999908902993219206649314791136060662744080989551770784503823001731700025141492153124022857989221728021434250112197366254827028502753749880291215209747013295418076184447599255402976615333143567703017984736301780852720468357267573591524310702340568397901845986378996145081540148880393248385356826379946298212061289362695190770872627441860286223
, 31369525209554606243781872919147324495102438235204029024209863960990364788173016485609984157542616821530687077860054182657226524268508850870308695525705464179120260058399486119923345168413129341961927332012856973037798959880697235803480704035779279173240855808158142157971123503154296708811317754274213461651382233302344648050509226964917724185500045556091237800513090875435392081653086625727098741948718259026787073395040413243577779586443920174270515835168155527724979839452996835774962902591815840849991142745534394197953906778975966142467432235494552657288854307685522009043975433801213110572831125772428981005613
, 9830889462965132138705959462261553388126280626805187143388562744640198373566296292273357048632589230670080127192008901370831775879317326960347961482833817171115437262853880339669403198417241594166879587414133075107451372037847631227713408882880398509149327046749725833772140201359658190669764465796820928318881602091170613212182342256385965652082031372762599856552019784468687370788837279870703159968394203891693401628970626854333045166966465593433831533032912244431989114902745938091527792128853799787836770246456315935650842345253375010353964349164641991917776480058108762281089941261153346857167120797701797173217
, 16425001985705049801406567854047086073127739400886126153156457090314190058792977582132326854862479031884437919286067612592750885780324499346040778117136571506414806055640673328850574898234716103123831451036219427744982197364896110018082074373560968723240016617417761480092490817533728673675894824760064888029030969786857427299891767724464100751425180717972298635050804633282834206479831362178462016634908982950618850270683421048435990333914799277540793592187097212465985935361520816006226206142522253660049411914334843188999273366975679945638123242981045376686086415351857926240087756000529444249797948281140860750797
, 12174000277072663132694111281367365428645877576618290061201151401411879625094186161225139001483127756850347306901565160901724456236850818840308128553980292264151202304387603539937849158327594379952903498414842500096909122346854920785464176200030890325516276678069322996265407928741941020271815742604393440830668186520437195654768441331465412571264473078339881537597636077932794077180222506207121343238963393278270853526704883650546969581986807346741158197537368810933762588613969529258234138213444363793559806753896940263386806544083417836806265688209717892609305307822002489081816938971540715010204635638959872069383
, 3237103031500867098321186786517528168574799789915832107232898654735840604943570992286132082345937243291531205211192012779075563641326889836973070545211822990617652558426452697857789899999279683993090012117464672117886770602440168319863143917822916206683351635354555063916015816370917232268413925159859287908508182052893407531427829957936920793714202088216692972386200887736250286344513457226319963902073528399561281693746379915787996867612570396498063107400269305088714514781363835891303118394643451756680125341238941200325994765342167708278057001805841012849015739791345634583206018308274378034775423507486682291441
, 23798395843759865970572157405719299409679832080031916162437000667138444865321976549957060252027933852435488114051971679262317660941838502637952049563627504489830541114255355055173575438534085749093838573154617770199255452691401501447448985499390415175837609475399831264415203639498953614365358450310849286047470753782408733509604190681375662715761198453929075911524651421748647975291134925207394558178441805235091431738981785099338589230380955599231105404180359080673242469902417933923785998557390169755852795155240452383094318092140045538401186275331485291236035719677943588391038583107742219935264275607723334588021
, 4180671455942636543094139596341252547581638085999390538168781596666986312916316791586350549270383786652688738133561023106628512993555465905685635185509792172540769848856186647779433181360435282821345145176892498810050194148836627533777703015698219288642887929879987091108360342564013772571067705932172721125976953679624100542018155417064037394044882992756154197852850976377939745237288587249405604225705042846401919591830021713507244051120288238344392109854312502233677349574017528999827121159888999209698851606749628795460677993125630927672818963553720538572517969079138105020363675169913748497505438535096129094543
, 13498311400665465113692361968715863060943526907836574816844702975139163986008331077392886710016607552225961987734904867853270635328516523541804454220536047246050744924864205915177960153489189762088771785771792330724995531512702565171713426397860490262184371822873702155319146672894031125790600394666890016397666349827660063420547801489067037013417986070508442887833624986374625894727010690822525602414452704243663855059608392832571057586003290565306256647718766131862247517539906186862423783643549573629253881416830342642043364533411602933262834266547002676886644504668871898870606289473798362201631868550391485261817
, 23327344558654898417807202127235596756043853075449217195800374285863570109994985422746315115613399388482819306231463673042914944001635008012441817938140619119975695355879792660806335386219100733604935187312996604320514233616080226114662990106230206895848550953635979359397598700137432346931335375082648318008317432443769746291452408552230469862815593503779549612181045113917334290858886861384958798931545668122178627783564003535532440937362056754015627420708871620538761982919958787428669649550623931592001608149255309112723581989853951586189622828078638304544957659516956904162247400293872782370874168557361801709499
, 7797475962205081870379191226238756363914735330638510142979001385759278289428172500433219492402719413813213353794634519914085596871088029020930597102155050063349311475963159312523950132864740760938472382180770869970444905454393697268441434535356975507854658375869242166191654801621568857412166088953956766961530000830007641956183444570307155650450051979464775690617245426905450694525265441731687586290221525296981187118531950369082343812021032829166615628759440051417737697463983449619956441795607846427445827011773838819989877116539203848895055939800852004184549768172936393304644932490210448159285118834217596434563
]
|
21,328 | 37ecd9f94801f32b2d18687fd5ba5508cdb58d2f | student_scores = {
"a": 81,
"b": 78,
"c": 99,
"d": 74,
"e": 62,
}
student_grades = {}
for student in student_scores:
score = student_scores[student]
if score > 90:
student_grades[student] = "Outstanding"
elif score > 80:
student_grades[student] = "Exceeds Expectations"
elif score > 70:
student_grades[student] = "Acceptable"
else:
student_grades[student] = "Fail"
print(student_grades)
|
21,329 | f30db0aae167f9bc73cf5fdce12758ca01a2b865 | # -*- coding: utf-8 -*-
"""
Created on Sun Oct 21 16:33:16 2018
@author: Sruthi Pasumarthy & Rahul Kodarapu
"""
import csv
import os
import sys
fileName = sys.argv[1]
if os.path.exists(fileName):
csvFileLoc = os.path.basename(fileName)
learningRate = float(sys.argv[2])
threshold = float(sys.argv[3])
'''threshold=float(input('Enter the Threshold :'))
learningRate=float(input('Enter the Learning Rate : '))
fileLocation= input('Please enter file location : ')
csvFileLoc = os.path.isdir(fileLocation)'''
#threshold = 0.0001
#learningRate= 0.0001
#csvFileLoc = 'C:\\Users\\rahut\\Desktop\\p1linreg\\random.csv'
csvFile = open(csvFileLoc,'r')
reader = csv.reader(csvFile, delimiter = ',')
numOfRows = len(list(reader))
#print("Num of Rows: ",numOfRows)
csvFile.seek(0) #Goes back to the beginning of the file
listOfLists = []
firstRow = next(reader,None) #This is because the reader is omitting the first row, so we are forece feeding it. It
# None tells it that the first row is not a header
listOfLists.append(firstRow)
#print(len(listOfLists))
#rint(listOfLists)
csvFile.seek(0)
numOfCols = len(next(reader))
#print ("Num of Columns: ",numOfCols)
for row in reader:
listOfLists.append(row)
#print(len(listOfLists))
weights = []
j = 0
while j < numOfCols:
initialWeight = 0
weights.append(initialWeight)
j = j+1
#print('Weights: ',weights)
#weights=[-0.0940224666666667, -0.5375774493333338, -0.2591702260000002]
weightsList = []
weightsList.append(weights)
#print(weightsList)
i = 0
xVectors = []
yValues = []
counter = 0
for item in listOfLists:
x = []
while i < numOfCols:
#print(item[i])
if(i == numOfCols-1):
yValues.append(item[i])
else:
x.append(item[i])
#print(x)
i = i+1
xVectors.append(x)
counter = counter + 1
i=0 #reinitialising the value of i
#print(xVectors)
#print(yValues)
#print(counter)
def calculateLinearFunction(w):
linearFunction = 0.0
k = 0
x0 = 1
linearFuncList = []
while k < len(yValues):
#print('LF y : ',yValues[k])
# print('LF x : ',xVectors[k])
temp = xVectors[k]
#print(temp)
e=0
while e < len(temp)+1:
if(e==0):
linearFunction = float(w[e])*float(x0)
else:
linearFunction = float(linearFunction) + (float(w[e]))*float(temp[e-1]) #w1*x1 + w2*x2,for random
e = e+1
linearFuncList.append(linearFunction)
k = k+1
#print('LinearFuncListLen: ',linearFuncList)
return linearFuncList;
def calculateGradientsAndSSE(lf,w):
p = 0
q = 0
i = 0
e = 0
gradientsList=[]
while i< len(w):
grad=0
gradientsList.append(grad)
i=i+1
#gradient = 0.0
SSE = 0.0
elementTimesErrorList = []
temp = xVectors[0]
while e < len(temp):
x = 0
elementTimesErrorList.append(x)
e = e+1
# print('elementTimesErrorList:',elementTimesErrorList)
while p < len(yValues):
# print(yValues[p])
error = float(yValues[p]) - float(lf[p])
# print('Error: ',error)
squaredError = float(error) * float(error)
# print('Squared Error: ',squaredError)
temp = xVectors[p]
# print('X Vector: ',temp)
#elementTimesError = 0.0
gradientsList[0]= float(gradientsList[0])+ float(error)
while q < len(temp):
# print("X: ",temp[q])
elementTimesErrorList[q] = float(elementTimesErrorList[q]) + (float(temp[q]) * float(error)) #xi(yi-f(xi))
# print("element x error: ",elementTimesErrorList[q])
gradientsList[q+1] = float(gradientsList[q+1]) + (float(temp[q]) * float(error)) #float(elementTimesErrorList[q])
#if(q==len(temp)-1):
# print('updated gradient(w2) Value', gradientsList[q+1])
#if(q==len(temp)-2):
# print('updated gradient(w1) Value', gradientsList[q+1])
q = q+1
#gradient = float(gradient) + float(elementTimesError)
SSE = float(SSE) + float(squaredError)
p = p+1
q=0
return SSE,gradientsList;
linearFunctions = []
linearFunctions = calculateLinearFunction(weights)
SSEout=0
gradientsListOut=[]
SSEout,gradientsListOut=calculateGradientsAndSSE(linearFunctions, weights)
iterationWiseResult=[]
resultDisplay= []
iteration= 0
resultDisplay.append(iteration)
resultDisplay.append(weights)
resultDisplay.append(SSEout)
#print(resultDisplay)
iterationWiseResult.append(resultDisplay)
#print(iterationWiseResult)
def calculateNewWeights(w,g):
newWeights=[]
i=0
while i< len(w):
nw= w[i]+ learningRate*g[i]
newWeights.append(nw)
i=i+1
nw=0
w=newWeights
# print('newWeights:',newWeights)
weightsList.append(newWeights)
return w;
newWeightsOut=[]
newWeightsOut= calculateNewWeights(weights,gradientsListOut)
#print(weightsList)
def updateIterationWeightsSSE(sse,newweightsout):
iteration=1
SSEnew = sse
newWeights= newweightsout
gradientsListNew=[]
#print(1)
#k=0
#while k<2:
while SSEnew > threshold :
#print(2)
linearFunctionsNew = []
linearFunctionsNew = calculateLinearFunction(newWeights)
SSEnew,gradientsListNew=calculateGradientsAndSSE(linearFunctionsNew, newWeights)
resultDisplayNew= []
resultDisplayNew.append(iteration)
resultDisplayNew.append(newWeights)
resultDisplayNew.append(SSEnew)
iterationWiseResult.append(resultDisplayNew)
newWeights= calculateNewWeights(newWeights,gradientsListNew)
iteration= iteration+1
i=0
while i< len(iterationWiseResult):
print(iterationWiseResult[i])
i= i+1
return;
updateIterationWeightsSSE(SSEout,newWeightsOut)
|
21,330 | 34233f3468edfe256978234d9cf604f703882cd0 | #! /usr/bin/env python
# .. coding: utf-8
# $Id: test_error_reporting.py 7723 2013-09-28 09:17:07Z milde $
# Author: Günter Milde <milde@users.sourceforge.net>
# Copyright: This module has been placed in the public domain.
"""
Test `EnvironmentError` reporting.
In some locales, the `errstr` argument of IOError and OSError contains
non-ASCII chars.
In Python 2, converting an exception instance to `str` or `unicode`
might fail, with non-ASCII chars in arguments and the default encoding
and errors ('ascii', 'strict').
Therefore, Docutils must not use string interpolation with exception
instances like, e.g., ::
try:
something
except IOError, error:
print 'Found %s' % error
unless the minimal required Python version has this problem fixed.
"""
import unittest
import sys, os
import codecs
try: # from standard library module `io`
from io import StringIO, BytesIO
except ImportError: # new in Python 2.6
from StringIO import StringIO
BytesIO = StringIO
import DocutilsTestSupport # must be imported before docutils
from docutils import core, parsers, frontend, utils
from docutils.utils.error_reporting import SafeString, ErrorString, ErrorOutput
from docutils._compat import b, bytes
oldlocale = None
if sys.version_info < (3,0): # problems solved in py3k
try:
import locale # module missing in Jython
oldlocale = locale.getlocale()
# Why does getlocale return the defaultlocale in Python 3.2 ????
# oldlocale = (None, None) # test suite runs without locale
except ImportError:
print ('cannot test error reporting with problematic locales,\n'
'`import locale` failed.')
# locales confirmed to use non-ASCII chars in the IOError message
# for a missing file (https://bugs.gentoo.org/show_bug.cgi?id=349101)
# TODO: add more confirmed problematic locales
problematic_locales = ['cs_CZ', 'cs_CZ.UTF8',
'el_GR', 'el_GR.UTF-8',
# 'fr_FR.UTF-8', # only OSError
'ja_JP.UTF-8',
'ru_RU', 'ru_RU.KOI8-R',
'ru_RU.UTF-8',
'', # default locale: might be non-problematic
]
if oldlocale is not None:
# find a supported problematic locale:
for testlocale in problematic_locales:
try:
locale.setlocale(locale.LC_ALL, testlocale)
except locale.Error:
testlocale = None
else:
break
locale.setlocale(locale.LC_ALL, oldlocale) # reset
else:
testlocale = None
class SafeStringTests(unittest.TestCase):
# the error message in EnvironmentError instances comes from the OS
# and in some locales (e.g. ru_RU), contains high bit chars.
# -> see the test in test_error_reporting.py
# test data:
bs = b('\xfc') # unicode(bs) fails, str(bs) in Python 3 return repr()
us = u'\xfc' # bytes(us) fails; str(us) fails in Python 2
be = Exception(bs) # unicode(be) fails
ue = Exception(us) # bytes(ue) fails, str(ue) fails in Python 2;
# unicode(ue) fails in Python < 2.6 (issue2517_)
# .. _issue2517: http://bugs.python.org/issue2517
# wrapped test data:
wbs = SafeString(bs)
wus = SafeString(us)
wbe = SafeString(be)
wue = SafeString(ue)
def test_7bit(self):
# wrapping (not required with 7-bit chars) must not change the
# result of conversions:
bs7 = b('foo')
us7 = u'foo'
be7 = Exception(bs7)
ue7 = Exception(us7)
self.assertEqual(str(42), str(SafeString(42)))
self.assertEqual(str(bs7), str(SafeString(bs7)))
self.assertEqual(str(us7), str(SafeString(us7)))
self.assertEqual(str(be7), str(SafeString(be7)))
self.assertEqual(str(ue7), str(SafeString(ue7)))
self.assertEqual(unicode(7), unicode(SafeString(7)))
self.assertEqual(unicode(bs7), unicode(SafeString(bs7)))
self.assertEqual(unicode(us7), unicode(SafeString(us7)))
self.assertEqual(unicode(be7), unicode(SafeString(be7)))
self.assertEqual(unicode(ue7), unicode(SafeString(ue7)))
def test_ustr(self):
"""Test conversion to a unicode-string."""
# unicode(self.bs) fails
self.assertEqual(unicode, type(unicode(self.wbs)))
self.assertEqual(unicode(self.us), unicode(self.wus))
# unicode(self.be) fails
self.assertEqual(unicode, type(unicode(self.wbe)))
# unicode(ue) fails in Python < 2.6 (issue2517_)
self.assertEqual(unicode, type(unicode(self.wue)))
self.assertEqual(self.us, unicode(self.wue))
def test_str(self):
"""Test conversion to a string (bytes in Python 2, unicode in Python 3)."""
self.assertEqual(str(self.bs), str(self.wbs))
self.assertEqual(str(self.be), str(self.be))
# str(us) fails in Python 2
self.assertEqual(str, type(str(self.wus)))
# str(ue) fails in Python 2
self.assertEqual(str, type(str(self.wue)))
class ErrorStringTests(unittest.TestCase):
bs = b('\xfc') # unicode(bs) fails, str(bs) in Python 3 return repr()
us = u'\xfc' # bytes(us) fails; str(us) fails in Python 2
def test_str(self):
self.assertEqual('Exception: spam',
str(ErrorString(Exception('spam'))))
self.assertEqual('IndexError: '+str(self.bs),
str(ErrorString(IndexError(self.bs))))
self.assertEqual('ImportError: %s' % SafeString(self.us),
str(ErrorString(ImportError(self.us))))
def test_unicode(self):
self.assertEqual(u'Exception: spam',
unicode(ErrorString(Exception(u'spam'))))
self.assertEqual(u'IndexError: '+self.us,
unicode(ErrorString(IndexError(self.us))))
self.assertEqual(u'ImportError: %s' % SafeString(self.bs),
unicode(ErrorString(ImportError(self.bs))))
# ErrorOutput tests
# -----------------
# Stub: Buffer with 'strict' auto-conversion of input to byte string:
class BBuf(BytesIO, object): # super class object required by Python <= 2.5
def write(self, data):
if isinstance(data, unicode):
data.encode('ascii', 'strict')
super(BBuf, self).write(data)
# Stub: Buffer expecting unicode string:
class UBuf(StringIO, object): # super class object required by Python <= 2.5
def write(self, data):
# emulate Python 3 handling of stdout, stderr
if isinstance(data, bytes):
raise TypeError('must be unicode, not bytes')
super(UBuf, self).write(data)
class ErrorOutputTests(unittest.TestCase):
def test_defaults(self):
e = ErrorOutput()
self.assertEqual(e.stream, sys.stderr)
def test_bbuf(self):
buf = BBuf() # buffer storing byte string
e = ErrorOutput(buf, encoding='ascii')
# write byte-string as-is
e.write(b('b\xfc'))
self.assertEqual(buf.getvalue(), b('b\xfc'))
# encode unicode data with backslashescape fallback replacement:
e.write(u' u\xfc')
self.assertEqual(buf.getvalue(), b('b\xfc u\\xfc'))
# handle Exceptions with Unicode string args
# unicode(Exception(u'e\xfc')) # fails in Python < 2.6
e.write(AttributeError(u' e\xfc'))
self.assertEqual(buf.getvalue(), b('b\xfc u\\xfc e\\xfc'))
# encode with `encoding` attribute
e.encoding = 'utf8'
e.write(u' u\xfc')
self.assertEqual(buf.getvalue(), b('b\xfc u\\xfc e\\xfc u\xc3\xbc'))
def test_ubuf(self):
buf = UBuf() # buffer only accepting unicode string
# decode of binary strings
e = ErrorOutput(buf, encoding='ascii')
e.write(b('b\xfc'))
self.assertEqual(buf.getvalue(), u'b\ufffd') # use REPLACEMENT CHARACTER
# write Unicode string and Exceptions with Unicode args
e.write(u' u\xfc')
self.assertEqual(buf.getvalue(), u'b\ufffd u\xfc')
e.write(AttributeError(u' e\xfc'))
self.assertEqual(buf.getvalue(), u'b\ufffd u\xfc e\xfc')
# decode with `encoding` attribute
e.encoding = 'latin1'
e.write(b(' b\xfc'))
self.assertEqual(buf.getvalue(), u'b\ufffd u\xfc e\xfc b\xfc')
class SafeStringTests_locale(unittest.TestCase):
"""
Test docutils.SafeString with 'problematic' locales.
The error message in `EnvironmentError` instances comes from the OS
and in some locales (e.g. ru_RU), contains high bit chars.
"""
if testlocale:
locale.setlocale(locale.LC_ALL, testlocale)
# test data:
bs = b('\xfc')
us = u'\xfc'
try:
open(b('\xfc'))
except IOError, e: # in Python 3 the name for the exception instance
bioe = e # is local to the except clause
try:
open(u'\xfc')
except IOError, e:
uioe = e
except UnicodeEncodeError:
try:
open(u'\xfc'.encode(sys.getfilesystemencoding(), 'replace'))
except IOError, e:
uioe = e
try:
os.chdir(b('\xfc'))
except OSError, e:
bose = e
try:
os.chdir(u'\xfc')
except OSError, e:
uose = e
except UnicodeEncodeError:
try:
os.chdir(u'\xfc'.encode(sys.getfilesystemencoding(), 'replace'))
except OSError, e:
uose = e
# wrapped test data:
wbioe = SafeString(bioe)
wuioe = SafeString(uioe)
wbose = SafeString(bose)
wuose = SafeString(uose)
# reset locale
if testlocale:
locale.setlocale(locale.LC_ALL, oldlocale)
def test_ustr(self):
"""Test conversion to a unicode-string."""
# unicode(bioe) fails with e.g. 'ru_RU.utf8' locale
self.assertEqual(unicode, type(unicode(self.wbioe)))
self.assertEqual(unicode, type(unicode(self.wuioe)))
self.assertEqual(unicode, type(unicode(self.wbose)))
self.assertEqual(unicode, type(unicode(self.wuose)))
def test_str(self):
"""Test conversion to a string (bytes in Python 2, unicode in Python 3)."""
self.assertEqual(str(self.bioe), str(self.wbioe))
self.assertEqual(str(self.uioe), str(self.wuioe))
self.assertEqual(str(self.bose), str(self.wbose))
self.assertEqual(str(self.uose), str(self.wuose))
class ErrorReportingTests(unittest.TestCase):
"""
Test cases where error reporting can go wrong.
Do not test the exact output (as this varies with the locale), just
ensure that the correct exception is thrown.
"""
# These tests fail with a 'problematic locale',
# Docutils revision < 7035, and Python 2:
parser = parsers.rst.Parser()
"""Parser shared by all ParserTestCases."""
option_parser = frontend.OptionParser(components=(parsers.rst.Parser,))
settings = option_parser.get_default_values()
settings.report_level = 1
settings.halt_level = 1
settings.warning_stream = ''
document = utils.new_document('test data', settings)
def setUp(self):
if testlocale:
locale.setlocale(locale.LC_ALL, testlocale)
def tearDown(self):
if testlocale:
locale.setlocale(locale.LC_ALL, oldlocale)
def test_include(self):
source = ('.. include:: bogus.txt')
self.assertRaises(utils.SystemMessage,
self.parser.parse, source, self.document)
def test_raw_file(self):
source = ('.. raw:: html\n'
' :file: bogus.html\n')
self.assertRaises(utils.SystemMessage,
self.parser.parse, source, self.document)
def test_csv_table(self):
source = ('.. csv-table:: external file\n'
' :file: bogus.csv\n')
self.assertRaises(utils.SystemMessage,
self.parser.parse, source, self.document)
if __name__ == '__main__':
unittest.main()
|
21,331 | 67e3d4f48d04c18b0ac42260c82cd5b9b6f19883 | #!/usr/bin/env python3.9
# 0, 3, 6
# count[0] = [2, {1, 4}]
# count[3] = [1, {None, 2}]
# count[6] = [1, {None, 3}]
#
# global
PUZZLE_INPUT = [20, 0, 1, 11, 6, 3]
def set_new_count(num, turn, count):
count[num] = [1, {"last_to_last": None, "last": turn}]
def update_count(num, turn, count):
count[num][0] += 1
count[num][1]["last_to_last"] = count[num][1]["last"]
count[num][1]["last"] = turn
def get_new_number(num, count):
return count[num][1]["last"] - count[num][1]["last_to_last"]
def part_1():
turn = 0
count = {}
last_num = None
while True:
turn += 1
if turn == 2021:
print(f"2020th Number: {last_num}")
if turn == 30000001:
return last_num
if turn <= len(PUZZLE_INPUT):
last_num = PUZZLE_INPUT[turn - 1]
else:
last_num = 0 if count[last_num][0] == 1 else get_new_number(last_num, count)
if last_num not in count.keys():
set_new_count(last_num, turn, count)
else:
update_count(last_num, turn, count)
if __name__ == "__main__":
print("30000000th Turn - last number: {}".format(part_1()))
|
21,332 | bff5da773d5feb83d0816e05c5f3fd32d9fca054 | """
蓝牙设备追踪服务
device_tracker:
- platform: ble_tracker
"""
import logging
import time
from homeassistant.components.http import HomeAssistantView
from homeassistant.helpers.typing import HomeAssistantType
_LOGGER = logging.getLogger(__name__)
VERSION = '1.1'
DOMAIN = 'ble_tracker'
BT_PREFIX = "BLE_"
# 设置设备
async def see_device(
hass: HomeAssistantType, async_see, result
) -> None:
mac = result['mac']
name = result['name']
attributes = {
"device_name": name,
"services": result['services'],
"rssi": result['rssi'],
"type": result['type'],
"scan_time": result['time'],
"update_time": str(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())))
}
await async_see(
mac=f"{BT_PREFIX}{mac}",
host_name=name,
attributes=attributes,
source_type='ble_tracker',
)
# 安装扫描器
async def async_setup_scanner(
hass: HomeAssistantType, config: dict, async_see, discovery_info=None
):
_LOGGER.info('''
-------------------------------------------------------------------
蓝牙设备追踪【作者QQ:635147515】
版本:''' + VERSION + '''
介绍:这是一个在树莓派上使用的,与蓝牙扫描服务搭配使用的插件
项目地址:https://github.com/shaonianzhentan/ha_cloud_music/tree/master/custom_components/ble_tracker
-------------------------------------------------------------------''')
# 创建API网关
class HAGateView(HomeAssistantView):
url = '/' + DOMAIN + '-api'
name = DOMAIN
requires_auth = True
async def post(self, request):
"""更新状态实体."""
response = await request.json()
await see_device(hass, async_see, response)
return self.json(response)
# 创建HTTP服务
hass.http.register_view(HAGateView)
return True
|
21,333 | f4200b8a4c3d40008f23541759ee5b4f4f45bca3 | from sklearn.base import BaseEstimator, TransformerMixin
from sklearn.exceptions import NotFittedError
from mlxtend.feature_selection import ExhaustiveFeatureSelector
import pandas as pd
class DFExhaustiveFeatureSelector(BaseEstimator, TransformerMixin):
def __init__(self, columns=None, **kwargs):
self.columns = columns
self.selector = ExhaustiveFeatureSelector(**kwargs)
self.transform_cols = None
self.stat_df = None
def fit(self, X, y):
self.columns = X.columns if self.columns is None else self.columns
self.transform_cols = [x for x in X.columns if x in self.columns]
self.selector.fit(X[self.transform_cols], y)
self.stat_df = pd.DataFrame.from_dict(self.selector.get_metric_dict()).T
self.stat_df.at[self.stat_df['avg_score'].astype(float).idxmax(), 'support'] = True
self.stat_df['support'].fillna(False, inplace=True)
return self
def transform(self, X):
if self.transform_cols is None:
raise NotFittedError(f"This {self.__class__.__name__} instance is not fitted yet. Call 'fit' with appropriate arguments before using this estimator.")
features = list(self.stat_df[self.stat_df['support']]['feature_names'].values[0])
new_X = X[features].copy()
return new_X
def fit_transform(self, X, y):
return self.fit(X, y).transform(X) |
21,334 | b6358c02d0700f7a9c588c36c0e8a7ff4fe2a676 | from arche.tools import api
def generate_quality_estimation(
job,
crawlera_user,
no_of_validation_warnings,
no_of_duplicated_items,
checked_dup_items_count,
no_of_price_warns,
no_of_checked_price_items,
tested,
**kwargs
):
no_of_scraped_items = api.get_items_count(job)
no_of_errors = api.get_errors_count(job)
job_state = api.get_job_state(job)
job_close_reason = api.get_job_close_reason(job)
response_status_count = api.get_response_status_count(job)
adherence_to_schema_percent = float(
get_adherence_to_schema_percent(no_of_validation_warnings, no_of_scraped_items)
)
duplicated_items_percent = float(
get_duplicated_items_percent(no_of_duplicated_items, no_of_scraped_items)
)
crawlera_incapsula_percent = float(get_crawlera_incapsula_percent(crawlera_user))
no_of_errors_percent = float(get_errors_count_percent(no_of_errors))
price_was_price_now_comparison_percent = float(
get_price_was_price_now_comparison_percent(
no_of_price_warns, no_of_scraped_items
)
)
outcome_percent = float(get_outcome_percent(job_state, job_close_reason))
response_status_count_percent = float(
get_response_status_count_percent(response_status_count)
)
tested_percent = float(get_tested_percent(tested))
if all([checked_dup_items_count == 0, no_of_checked_price_items == 0]):
quality_estimation = (
adherence_to_schema_percent * 60 / 100
+ crawlera_incapsula_percent * 8 / 100
+ no_of_errors_percent * 5 / 100
+ outcome_percent * 5 / 100
+ response_status_count_percent * 7 / 100
+ tested_percent * 15 / 100
)
elif checked_dup_items_count == 0:
quality_estimation = (
adherence_to_schema_percent * 55 / 100
+ crawlera_incapsula_percent * 8 / 100
+ no_of_errors_percent * 5 / 100
+ price_was_price_now_comparison_percent * 5 / 100
+ outcome_percent * 5 / 100
+ response_status_count_percent * 7 / 100
+ tested_percent * 15 / 100
)
elif checked_dup_items_count == 0 and no_of_checked_price_items == 0:
quality_estimation = (
adherence_to_schema_percent * 60 / 100
+ crawlera_incapsula_percent * 8 / 100
+ no_of_errors_percent * 5 / 100
+ outcome_percent * 5 / 100
+ response_status_count_percent * 7 / 100
+ tested_percent * 15 / 100
)
elif no_of_checked_price_items == 0:
quality_estimation = (
adherence_to_schema_percent * 50 / 100
+ duplicated_items_percent * 10 / 100
+ crawlera_incapsula_percent * 8 / 100
+ no_of_errors_percent * 5 / 100
+ outcome_percent * 5 / 100
+ response_status_count_percent * 7 / 100
+ tested_percent * 15 / 100
)
elif checked_dup_items_count == 0:
quality_estimation = (
adherence_to_schema_percent * 55 / 100
+ crawlera_incapsula_percent * 8 / 100
+ no_of_errors_percent * 5 / 100
+ price_was_price_now_comparison_percent * 5 / 100
+ outcome_percent * 5 / 100
+ response_status_count_percent * 7 / 100
+ tested_percent * 15 / 100
)
elif no_of_checked_price_items == 0:
quality_estimation = (
adherence_to_schema_percent * 45 / 100
+ duplicated_items_percent * 15 / 100
+ crawlera_incapsula_percent * 8 / 100
+ no_of_errors_percent * 5 / 100
+ outcome_percent * 5 / 100
+ response_status_count_percent * 7 / 100
+ tested_percent * 15 / 100
)
else:
quality_estimation = (
adherence_to_schema_percent * 40 / 100
+ duplicated_items_percent * 15 / 100
+ crawlera_incapsula_percent * 8 / 100
+ no_of_errors_percent * 5 / 100
+ price_was_price_now_comparison_percent * 5 / 100
+ outcome_percent * 5 / 100
+ response_status_count_percent * 7 / 100
+ tested_percent * 15 / 100
)
field_accuracy = adherence_to_schema_percent * 100 / 100
for rule_result in kwargs.values():
if rule_result.err_items_count / rule_result.items_count < 0.1:
quality_estimation = quality_estimation * 0.95
else:
quality_estimation = quality_estimation * 0.90
return int(quality_estimation), int(field_accuracy)
def check_percentage(rule, scraped_items):
return float(rule) / scraped_items * 100
def get_adherence_to_schema_percent(rule_result, no_of_scraped_items):
if rule_result == 0:
percent = 100
elif 0 < check_percentage(float(rule_result), no_of_scraped_items) <= 2:
percent = 75
elif 2 < check_percentage(float(rule_result), no_of_scraped_items) <= 4:
percent = 50
elif 4 < check_percentage(float(rule_result), no_of_scraped_items) <= 8:
percent = 25
else:
percent = 0
return percent
def get_duplicated_items_percent(rule_result, no_of_scraped_items):
if rule_result == 0:
percent = 100
elif 0 < check_percentage(float(rule_result), no_of_scraped_items) <= 5:
percent = 75
elif 5 < check_percentage(float(rule_result), no_of_scraped_items) <= 10:
percent = 50
elif 10 < check_percentage(float(rule_result), no_of_scraped_items) <= 20:
percent = 25
else:
percent = 0
return percent
def get_duplicated_skus_percent(rule_result, no_of_scraped_items):
if rule_result == 0:
percent = 100
elif 0 < check_percentage(float(rule_result), no_of_scraped_items) <= 5:
percent = 75
elif 5 < check_percentage(float(rule_result), no_of_scraped_items) <= 10:
percent = 50
elif 10 < check_percentage(float(rule_result), no_of_scraped_items) <= 20:
percent = 25
else:
percent = 0
return percent
def get_crawlera_incapsula_percent(crawlera_user):
"""Having crawlera/incapsula enabled makes spider more unstable"""
if crawlera_user:
return 0
else:
return 100
def get_errors_count_percent(rule_result):
if int(rule_result) == 0:
percent = 100
elif 0 < int(rule_result) <= 5:
percent = 50
elif 5 < int(rule_result) <= 10:
percent = 20
else:
percent = 0
return percent
def get_price_was_price_now_comparison_percent(rule_result, no_of_scraped_items):
if rule_result == 0:
percent = 100
elif 0 < check_percentage(float(rule_result), no_of_scraped_items) <= 5:
percent = 50
else:
percent = 0
return percent
def get_outcome_percent(job_state, job_close_reason):
if all(
[job_state.lower() == "finished", str(job_close_reason).lower() == "finished"]
):
percent = 100
else:
percent = 0
return percent
def get_response_status_count_percent(rule_result):
if rule_result[1] == rule_result[2] == rule_result[3] == 0:
percent = 100
elif (
0
< check_percentage(
rule_result[1] + rule_result[2] + rule_result[3], rule_result[0]
)
<= 1
):
percent = 100
elif (
1
< check_percentage(
rule_result[1] + rule_result[2] + rule_result[3], rule_result[0]
)
<= 5
):
percent = 50
elif (
5
< check_percentage(
rule_result[1] + rule_result[2] + rule_result[3], rule_result[0]
)
<= 10
):
percent = 20
else:
percent = 0
return percent
def get_tested_percent(rule_result):
if rule_result:
percent = 100
else:
percent = 0
return percent
|
21,335 | c02cc1b2460933e28922cbab67092de6b3bc61b8 | import logging
import os
from pyscreenshot.imcodec import codec
from pyscreenshot.loader import FailedBackendError
from pyscreenshot.procutil import proc, run_in_childprocess
from pyscreenshot.tempdir import TemporaryDirectory
log = logging.getLogger(__name__)
# 0 = multiprocessing (fork)
# 1 = popen (spawn)
POPEN = 1
def childprocess_backend_version(_backend_version, backend):
if POPEN:
return childprocess_backend_version_popen(backend)
else:
return run_in_childprocess(_backend_version, None, backend)
def childprocess_backend_version_popen(backend):
p = proc("pyscreenshot.cli.print_backend_version", [backend])
if p.return_code != 0:
log.error(p)
raise FailedBackendError(p)
return p.stdout
def childprocess_grab(_grab_simple, backend, bbox):
if POPEN:
return childprocess_grab_popen(backend, bbox)
else:
return run_in_childprocess(_grab_simple, codec, backend, bbox)
def childprocess_grab_popen(backend, bbox):
if not backend:
backend = ""
if not bbox:
bbox = (0, 0, 0, 0)
x1, y1, x2, y2 = map(str, bbox)
with TemporaryDirectory(prefix="pyscreenshot") as tmpdirname:
filename = os.path.join(tmpdirname, "screenshot.png")
p = proc(
"pyscreenshot.cli.grab_to_file",
[filename, x1, y1, x2, y2, "--backend", backend],
)
if p.return_code != 0:
# log.debug(p)
raise FailedBackendError(p)
data = open(filename, "rb").read()
data = codec[1](data)
return data
|
21,336 | 6e780c98e24231f6ad6a333cbd8ffc0bd5dc915c | # -*- coding: utf-8 -*-
import time
from django.db import models
from django.urls.base import reverse
from mptt.models import MPTTModel, TreeForeignKey
from taggit.managers import TaggableManager
class Report(models.Model):
"""
A test report.
"""
datetime = models.DateTimeField(verbose_name="Date/Time")
tests = models.IntegerField()
errors = models.IntegerField()
failures = models.IntegerField()
skipped = models.IntegerField(blank=True, null=True)
modules = models.IntegerField()
timetaken = models.FloatField(blank=True, null=True)
installed = models.CharField(
max_length=255, blank=True, null=True, db_index=True)
node = models.CharField(max_length=64)
system = models.CharField(max_length=16, db_index=True)
architecture = models.CharField(max_length=16, db_index=True)
version = models.CharField(max_length=16, db_index=True)
prurl = models.URLField(
verbose_name="Pull request URL", blank=True, null=True, db_index=True)
ciurl = models.URLField(
verbose_name="Continuous Integration URL", blank=True, null=True)
architecture = models.CharField(max_length=16, db_index=True)
xml = models.TextField(verbose_name='XML Document')
tags = TaggableManager()
def __str__(self):
return "Report %d" % (self.pk)
class Meta:
ordering = ['-datetime']
def get_absolute_url(self):
return reverse('report_html', kwargs={'pk': self.pk})
@property
def executed_tests(self):
if self.skipped:
return self.tests - self.skipped
return self.tests
@property
def ciurl_type(self):
if 'travis' in self.ciurl:
return 'Tra'
elif 'appveyor' in self.ciurl:
return 'Apv'
return None
@property
def prurl_number(self):
try:
return int(self.prurl.split('/')[-1])
except Exception:
return None
@property
def timestamp(self):
return int(time.mktime(self.datetime.timetuple()))
@property
def sum(self):
return self.failures + self.errors
@property
def status(self):
if self.sum:
if self.errors:
return "danger"
else:
return "warning"
else:
return "success"
@property
def status_icon(self):
if self.sum:
if self.errors:
return "glyphicon glyphicon-remove"
else:
return "glyphicon glyphicon-remove"
else:
return "glyphicon glyphicon-ok"
@property
def next_id(self):
obj = self.get_next_by_datetime()
if obj:
return obj.id
return False
@property
def previous_id(self):
obj = self.get_previous_by_datetime()
if obj:
return obj.id
return False
@property
def is_git(self):
if not self.installed:
return False
# new style dev version (see obspy/obspy#955)
# e.g. 0.9.2.dev0+2003.g1b283f1b40.dirty.qulogic.pep440
# n.b.: since obspy/obspy#1338 we have ".post0" instead of ".dev0"
if '.dev0+' in self.installed or '.post0+' in self.installed:
local_version = self.installed.split("+")[1].split(".")
if len(local_version) > 1 and local_version[1].startswith("g"):
if len(local_version[1]) != 11:
return False
return True
else:
return False
elif '0.0.0+archive' in self.installed:
return False
# old style dev version
else:
if self.installed is None:
return False
elif self.installed.endswith('-dirty'):
return False
elif '-g' in self.installed:
# GIT
return True
elif '.dev-r' in self.installed:
# SVN
return False
elif self.installed.startswith('0.5.'):
return False
elif self.installed.startswith('0.6.'):
return False
elif self.installed.startswith('0.7.'):
return False
elif self.installed.count('.') == 2:
return True
return False
@property
def git_commit_hash(self):
if self.is_git:
if '.dev0+' in self.installed or '.post0+' in self.installed:
local_version = self.installed.split("+")[1].split(".")
if len(local_version) > 1 and local_version[1].startswith("g"):
return local_version[1][1:]
return self.installed
return None
class SelectedNode(models.Model):
"""
A pre-selected node.
"""
name = models.CharField(max_length=64, primary_key=True)
def __str__(self):
return "SelectedNode %s" % (self.name)
class Meta:
ordering = ['name']
class MenuItem(MPTTModel):
parent = TreeForeignKey(
'self', null=True, blank=True, related_name='children',
on_delete=models.CASCADE)
name = models.CharField(max_length=50, help_text='Use "-" for dividers')
icon = models.CharField(
max_length=100, blank=True, null=True,
help_text="see http://getbootstrap.com/components/#glyphicons-glyphs")
url = models.CharField(max_length=200, blank=True, null=True)
def __str__(self):
return "%s" % (self.name)
|
21,337 | bad37182fcbcfd65c8f50074f564789e39e595dc | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
'''
Authors: Juliani Schlickmann Damasceno
Mateus Seenem Tavares
Description: This file contain functions developed to use in UDP connections.
'''
from socket import *
import random
import json
import time
class Package():
'''
Constructor default
'''
def __init__(self):
# header TCP attributes -------- segments header
self.package = {}
self.package['origin_port'] = None
self.package['destination_port'] = None
self.package['sequence_number'] = None
self.package['confirmation_number'] = None #it's a ACK
self.package['length_header'] = None
self.package['flags'] = { 'ACK': None, 'SYN': None, 'FIN': None } #dictionary (it's look like a json ['key': value])
self.package['data'] = ""
self.package['rwnd'] = 65535 #receiver window
def change_dictionary_value(self, dictionary, key_to_find, new_value):
for key in dictionary.keys():
if key == key_to_find:
dictionary[key] = new_value
break
'''
This function is the main point to change values in our application.
It receive a dic (key: value), notice that is the new value.
'''
def update_values(self, aKeys):
for key,val in aKeys.items():
if key == 'SYN' or key == 'ACK' or key == 'FIN':
self.change_dictionary_value(self.package['flags'], key, val)
else:
self.change_dictionary_value(self.package, key, val)
class API_TCP_UDP():
'''
Constructor default
'''
def __init__(self):
# General attributes
self.socket = socket(AF_INET, SOCK_DGRAM)
# self.socket.settimeout(30)
self.window = []
self.toRTT = {} #time-out of RTT
self.sampleRTT = 0
self.timeout = 1
self.MSS = 1204
self.buffer = 4096
self.slow_start = True
self.cwnd = 1
self.last_seq = 0
self.last_ack = None
self.duploAck = False
self.triploAck = False
'''
Function for the server to listen client's commands
'''
def server_listening(self, server_address, server_port):
self.socket.bind((server_address, server_port))
print ("\n****** The server is ready! ******\n")
object_package = Package()
while True:
'''
Here starts the handshake
'''
package_string, (client_address, client_port) = self.socket.recvfrom(self.buffer) #first touch between server and client
object_package.package = json.loads(package_string)
#as the context changed, swaping origin and destination, and inserting the port used by client
object_package.update_values({'origin_port': object_package.package['destination_port'], 'destination_port': client_port})
if object_package.package['confirmation_number'] is None and object_package.package['sequence_number'] is None:
object_package.update_values({'ACK': 1, 'rwnd': 65535})
package_string = json.dumps(object_package.package, sort_keys=True, indent=4)
object_package.update_values({'length_header': len(package_string) - len(object_package.package['data'])})
package_string = json.dumps(object_package.package, sort_keys=True, indent=4)
print ("\nSending a package!\n\n")
self.socket.sendto(package_string, (client_address, client_port))
package_string, (client_address, client_port) = self.socket.recvfrom(self.buffer) #third touch between server and client
object_package.package = json.loads(package_string)
elif object_package.package['flags']['FIN'] is not None:
package_string = json.dumps(object_package.package, sort_keys=True, indent=4)
print ("\nSending a package!\n\n")
self.socket.sendto(package_string , (client_address, client_port))
self.socket.close()
print ("\nConnection finished successfully!\n")
exit(0)
else:
object_package.update_values({'confirmation_number': (object_package.package['sequence_number'] + len(object_package.package['data']))})
self.last_seq = self.getting_sequence_number()
self._window(object_package.package)
self.verifyTripleAck(object_package, (client_address, client_port))
self.last_ack = self.getting_last_ack()
if not self.verify_next_package_sequence(self.last_ack):
self.toRTT[self.last_ack] = time.time()
self.socket.sendto(package_string , (client_address, client_port))
print(json.dumps(object_package.package, sort_keys=True, indent=4))
package_string = json.dumps(object_package.package, sort_keys=True, indent=4)
print ("\nSending a package!\n\n")
self.toRTT[object_package.package['sequence_number']] = time.time()
self.socket.sendto(package_string , (client_address, client_port))
print ('MINHA JANELA')#remove later
for a in self.window: #remove later
print (json.dumps(a, sort_keys=True, indent=4)) #remove later (estranhamente num formatou como no cliente, e preciso formatar em json)
def connection(self, server_address, server_port):
if str(server_address) == 'localhost':
server_address = '127.0.0.1'
object_package = Package()
self.socket.settimeout(1)
#beginning connection
object_package.update_values({'destination_port': server_port,'SYN': 1, 'sequence_number': 0})
package_string = json.dumps(object_package.package, sort_keys=True, indent=4)
print ("\nSending a package!\n\n")
RTT = time.time()
self.socket.sendto(package_string , (server_address, server_port))
if object_package.package['flags']['SYN'] == 1:
package_string, address = self.socket.recvfrom(self.buffer) #second touch between server and client
self.sampleRTT = RTT - time.time()
object_package.package = json.loads(package_string)
#as the context changed, swaping origin and destination
object_package.update_values({'origin_port': object_package.package['destination_port'],
'destination_port': object_package.package['origin_port'],
'SYN': 0})
package_string = json.dumps(object_package.package, sort_keys=True, indent=4)
print ("\nSending a package!\n\n")
self.socket.sendto(package_string , (server_address, server_port))
return (self.socket, (server_address, server_port))
else:
print("The server is not prepared to start a connection")
def close_connection(self, connected):
self.socket, (address, port) = connected
object_package = Package()
object_package.update_values({'FIN': 1})
package_string = json.dumps(object_package.package, sort_keys=True, indent=4)
print ("\nSending a package!\n\n")
self.socket.sendto(package_string, (address, port))
package_string, (address, port) = self.socket.recvfrom(self.buffer) #second touch between server and client
object_package.package = json.loads(package_string)
if object_package.package['flags']['FIN'] == 1:
print ("\nConnection finished successfully!\n")
else:
print ("\nSomething is wrong. The connection was not closed on the server.\n")
self.socket.close()
def send_data(self, aData, connected):
self.socket, (address, port) = connected
object_package = Package()
segment = 0
number_segment = -1
quebre = False
self.last_ack = None
self.duploAck = False
self.triploAck = False
self.break_in_segments(aData, port)
#verify if window is empty
if self.window is None:
print ('\nThe window is empty. \n')
else:
while self.slow_start: #PRECISAMOS IMPLEMENTAR O SLOW START.
if quebre: break
self.verifyRTT((address, port))
if segment < len(self.window):
for i in range(self.cwnd):
object_package.package = self.window[segment]
self.toRTT[object_package.package['sequence_number']] = time.time()
object_package.update_values({'length_header':
len(json.dumps(object_package.package, sort_keys=True, indent=4)) -
len(object_package.package['data'])})
self.window[segment] = json.dumps(object_package.package, sort_keys=True, indent=4)
print ("\nSending a package!\n\n")
self.socket.sendto(self.window[segment] , (address, port))
segment = segment + 1
else:
for i in range(self.cwnd):
try:
package_string, (address, port) = self.socket.recvfrom(self.buffer)
except:
if not self.toRTT:
quebre = True
#check RTT timeout
continue
object_package.package = json.loads(package_string)
self.removeRTT(object_package.package['sequence_number'])
self.verifyTripleAck(object_package, (address, port))
print ('MINHA JANELA')#remove later
for a in self.window: #remove later
print (a) #remove later
self.cwnd = self.cwnd * 2
def create_package(self, aData, port):
object_package = Package()
self.last_seq = self.getting_sequence_number()
object_package.update_values({'ACK': 0, 'sequence_number': self.last_seq, 'data': aData, 'destination_port': port })
self._window(object_package.package)
def _window(self, package):
self.window.append(package)
def break_in_segments(self, aData, port):
for item in aData:
temp = item
while len(temp) > self.MSS:
variavel = temp[0:self.MSS]
temp = temp.replace(variavel, "")
self.create_package(variavel, port) #create segment
if temp is not None:
self.create_package(temp, port)
def verifyTripleAck(self, object_package, address):
if self.last_ack is None:
self.last_ack = object_package.package['confirmation_number']
elif self.last_ack == object_package.package['confirmation_number']:
if self.duploAck:
self.triploAck = True
retrived_package = self.search_package(object_package.package['confirmation_number'])
''' Re-send the ackwnoledge package? if cwnd is low? '''
self.socket.sendto(json.dumps(retrived_package, sort_keys=True, indent=4), address)
else:
self.duploAck = True
else:
self.last_ack = object_package.package['confirmation_number']
self.duploAck = False
self.triploAck = False
def search_package(self, num_seq):
for i in self.window:
try:
j = (json.loads(i))
except:
j = i
if int(num_seq) == j['sequence_number']:
return j
print ('Package not found within the window. sequence_number: ' + str(num_seq))
return 0
def verifyRTT(self, address):
if len(self.toRTT) > 0:
for a in self.toRTT:
if time.time() - self.toRTT[int(a)] > self.timeout + self.sampleRTT:
self.toRTT[a] = time.time()
self.socket.sendto(json.dumps(self.search_package(a), sort_keys=True, indent=4), address)
def removeRTT(self,sequence_number):
dict = self.toRTT.copy()
print (self.toRTT)
for i in self.toRTT:
if sequence_number >= i:
dict.pop(i)
self.toRTT = dict
def getting_sequence_number(self):
seq = 0
if len(self.window) > 0:
package = self.window[-1]
seq = package['sequence_number'] + len(package['data'])
return seq
def getting_last_ack(self):
ack = self.window[-1]['confirmation_number']
if len(self.window) > 1:
package = self.window[-2]
ack = package['confirmation_number']
return ack
def verify_next_package_sequence(self, last_ack):
if len(self.window) > 1:
package = self.window[-1]
if package['sequence_number'] != last_ack:
print('Something is wrong... Last ACK is ', last_ack, 'and Sequence Number received was ', package['sequence_number'])
return False
else:
print('OK....')
return True
else:
print('The first ACK received ', last_ack)
return True
|
21,338 | 101588c154375b6c1febf188dad87cdb42e7ca64 | # => programa para aprovar ou negar votos
def voteTester():
from datetime import date
# => cálculo idade
ano = date.today().year
idade = ano - nascimento
if idade < 16:
print(f'Com {idade} anos, voce NÃO vota!')
elif idade >= 16 and idade < 18 or idade > 65:
print(f'Com {idade}, seu voto é OPCIONAL!')
else:
print(f'Com {idade} anos, seu voto é OBRIGATÓRIO!')
nascimento = int(input('Ano de seu nascimento:\n> '))
voteTester()
|
21,339 | c11c9fe2419014bf634fc6b29678b5f205e34662 | from Feed.BaseFeed import BaseFeed
from Feed.BaseNews import BaseNews
from typing import List, Optional, Any, Union, Tuple
from requests_html import AsyncHTMLSession, HTMLResponse, HTMLSession
import asyncio
import json
from tqdm import tqdm
from hanziconv import HanziConv
class YahooHK(BaseFeed):
def __init__(self):
super().__init__()
self.news_publisher = 6
self.display_name = "Yahoo HK"
self.__init_written_list__()
async def fetch(self, link: str) -> Optional[Tuple]:
try:
session = AsyncHTMLSession()
r = await session.get(link)
body = r.html.find(".caas-body", first=True)
cover = body.find(".caas-img", first=True)
cover = cover.attrs['data-src'] if cover else None
html = body.text
self.parser.parse(html)
return HanziConv.toSimplified(self.parser.convert()), HanziConv.toSimplified(str(self.parser)), cover
except Exception as e:
print(e)
return None, None, None
async def fetch_list(self) -> List[Tuple[str, str, Optional[str]]]:
try:
session = AsyncHTMLSession()
r: HTMLResponse = await session.get("https://hk.news.yahoo.com/")
news_list = []
list_elem = r.html.find(".js-stream-content")
for ele in list_elem:
ad = ele.find(".Feedback", first=True)
# Only get content if it is not ad
if not ad:
title = ele.find("h3", first=True).text
link = ele.find("a", first=True).absolute_links.pop()
news_list.append(
(HanziConv.toSimplified(title), link, None))
return news_list
except Exception as e:
# print(e)
e
async def main():
try:
bbc = YahooHK()
await bbc.fetch_feed()
await bbc.upload()
except Exception as e:
print(e)
if __name__ == '__main__':
asyncio.run(main())
|
21,340 | 84ee499c2e993929c8aa7aadab895955e3103e34 | from sklearn.metrics import precision_recall_fscore_support
def metrics_cal(predicts, tags, detail=False, sklearn_mode='macro'):
# number of negative prediction
negative_pred = len(predicts) - sum(predicts)
acc = sum([x == y for x, y in zip(predicts, tags)])
if not sklearn_mode:
# tag = 1, pred = 0
FN = sum([1 for pred, tag in zip(predicts, tags) if pred - tag == -1])
# tag = 0, pred = 1
FP = sum([1 for pred, tag in zip(predicts, tags) if pred - tag == 1])
TP = sum(predicts) - FP
TN = negative_pred - FN
if detail:
print("TP: {} FP: {} TN: {} FN: {}".format(TP, FP, TN, FN))
precision = TP / (TP + FP)
recall = TP / (TP + FN)
f1 = (2 * precision * recall) / (precision + recall)
else:
precision, recall, f1, _ = precision_recall_fscore_support(tags, predicts, average=sklearn_mode)
accuracy = acc / len(tags)
return accuracy, precision, recall, f1
if __name__ == "__main__":
iphone_label = [1 for _ in range(70)] + [0 for _ in range(30)]
iphone_pred = [1 for _ in range(40)] + [0 for _ in range(30)] + \
[1 for _ in range(20)] + [0 for _ in range(10)]
# Expect 0.667, 0.5714, 0.6153
print(precision_recall_fscore_support(iphone_label, iphone_pred, average='binary'))
# Expect 0.4583, 0.4523, 0.4505
print(precision_recall_fscore_support(iphone_label, iphone_pred, average='macro'))
# TP = 40, FP = 20, TN=10, FN=30
acc, precision, recall, F1 = metrics_cal(iphone_pred, iphone_label, detail=True)
print('Metric Testing' + ": acc: {} precision: {} recall: {} F1: {}".format(
round(acc, 2), round(precision, 2),
round(recall, 2), round(F1, 2)))
|
21,341 | 64adcc72a267e94ad9c6b763d83c5e0ac5242695 | """
Loader functions for the brainstorming tasks
Separated out to make it clean
:Author: David Engel
:Email: entrymissing@gmail.com
"""
def importBrainstormWordsFile(filename):
"""
Import the brainstorming words results from one file and return the brainstormed words
All returned words are lowercase
If one line contains several words separated by commas or whitespaces they will returned as separate words
Order is preserved as in the file
>>> words = importBrainstormWordsFile('Test Study/Data/XVal Session 21 - Group 1/XVal Session 21 - Group 1 - Brainstorm - Words.txt')
>>> len(words)
26
>>> words[0]
'soon'
>>> words[-2]
'spain'
"""
#init the list with all words in the file
allWords = []
#open the brainstorming words file and read the lines
with open(filename, 'r') as fp:
lines = fp.read().splitlines()
#split the lines for the idiots that didn't read the instructions and add them to the output
for curLine in lines:
if curLine.startswith('Please type one'):
continue
cutLines = curLine.replace(',',' ').split()
#cycle the word and add them
for curWord in cutLines:
allWords.append(curWord.strip().lower())
return allWords
def importBrainstormBrickFile(filename):
"""
Import the brainstorming brick results from one file
Empty lines will be ignored
All uses are lowercase
Order is preserved as in the file
>>> bricks = importBrainstormBrickFile('Test Study/Data/XVal Session 21 - Group 1/XVal Session 21 - Group 1 - Brainstorm Object - Brick.txt')
>>> len(bricks)
20
>>> bricks[0]
'build a wall'
>>> bricks[-2]
'catapult it'
"""
#init the list with all bricks in the file
allBricks = []
#open the brainstorming words file and read the lines
with open(filename, 'r') as fp:
lines = fp.readlines()
#cycle strip and clean the lines and add them to the set
for curLine in lines:
if curLine.startswith('Enter one user'):
continue
if curLine.strip():
allBricks.append( curLine.strip().lower() )
return allBricks
def importBrainstormEquationsFile(filename):
"""
Import the brainstormed equations from one file
Empty lines will be ignored
All whitespaces will be stripped
x and X is translated to *
everything after the = is cut away
Order is preserved as in the file
>>> equations = importBrainstormEquationsFile('Test Study/Data/XVal Session 21 - Group 1/XVal Session 21 - Group 1 - Brainstorm - Equations.txt')
>>> len(equations)
3
>>> equations[0]
'6+4'
>>> equations[-1]
'2'
"""
#init the list with all bricks in the file
allEquations = []
#open the brainstorming words file and read the lines
with open(filename, 'r') as fp:
lines = fp.readlines()
#cycle strip and clean the lines and add them to the set
for curLine in lines:
if curLine.strip():
curLine = curLine.strip().replace(' ','').replace('x','*').replace('X','*').split('=')[0]
allEquations.append( curLine )
return allEquations
def loadBrainstormingCorrectAnswersFile( filename ):
"""
Load the files that contain correct answers for the brainstorming tasks.
These files need to be in a human readable format.
The function returns a synonym table which is a dict linking a category to all the
words / uses / equations that are acceptable answers for this cateogry
All categories and synonyms are stripped lower case
All categories are synonyms for themselves
>>> synonymTable = loadBrainstormingCorrectAnswersFile('Test Study/Scoring/Brainstorming Brick - Correct Answers.txt')
>>> 'build a patio' in synonymTable['patio']
True
>>> all([(curKey in synonymTable[curKey]) for curKey in synonymTable])
True
"""
#read the file and init the output struct
with open(filename, 'r') as fp:
lines = fp.readlines()
synonymTable = {}
curCategory = ''
for curLine in lines:
#skip empty lines and lines that start with # as they are comments
curLine = curLine.strip().lower()
if not curLine or curLine.startswith('#'):
continue
#the > symbol indicates a new category all other lines are synonys for this cateogry
if curLine.startswith('>'):
curCategory = curLine[1:].strip()
synonymTable[curCategory] = [curCategory]
continue
synonymTable[curCategory].append(curLine)
return synonymTable
if __name__ == '__main__':
words = importBrainstormWordsFile('../Test Study/Data/XVal Session 21 - Group 1/XVal Session 21 - Group 1 - Brainstorm - Words.txt')
print words |
21,342 | bd83c8b6f053735ca74737ea17ccfcd59d00dd68 | import numpy as np
import logging
log = logging.getLogger(__name__)
from functools import cached_property
import starwinds_magnetogram.coordinate_transforms
class ZdiGeometry:
def __init__(self,
polar_corners=None,
azimuthal_corners=None):
if type(polar_corners) is int and type(azimuthal_corners) is int:
polar_corners = np.linspace(0, np.pi, polar_corners)
azimuthal_corners = np.linspace(0, 2 * np.pi, azimuthal_corners)
elif type(polar_corners) is int and azimuthal_corners is None:
polar_corners = np.linspace(0, np.pi, polar_corners)
azimuthal_corners = np.linspace(0, 2 * np.pi, 2 * len(polar_corners))
elif polar_corners is None and azimuthal_corners is None:
polar_corners = np.linspace(0, np.pi, 64 + 1)
azimuthal_corners = np.linspace(0, 2 * np.pi, 2 * len(polar_corners))
# Only go around once.
assert(np.abs(np.max(polar_corners) - np.min(polar_corners)) <= np.pi)
assert(np.abs(np.max(azimuthal_corners) - np.min(azimuthal_corners)) <= 2*np.pi)
self.polar_corners, self.azimuthal_corners = np.meshgrid(polar_corners, azimuthal_corners)
@property
def shape(self):
return self.polar_corners.shape
def corners(self):
"""Facet corner polar coordinates (polar, azimuth)"""
return self.polar_corners, self.azimuthal_corners
# @cached_property
def centers(self):
"""Facet center polar coordinates (polar, azimuth)"""
def make_centers(x):
return 0.25 * (x[:-1, :-1] + x[:-1, 1:] + x[1:, :-1] + x[1:, 1:])
polar_centers = make_centers(self.polar_corners)
azimuthal_centers = make_centers(self.azimuthal_corners)
assert azimuthal_centers.shape == polar_centers.shape
return polar_centers, azimuthal_centers
def unit_normals(self):
"""Facet unit normal vector cartesian coordinates as ndarray"""
return np.stack(self.centers_cartesian(), axis=-1)
# def normals(self):
# """Facet normal vector cartesian coordinates as ndarray. The lengths correspond to the facet area."""
# return self.unit_normals() * self.areas()[..., np.newaxis]
def areas(self):
"""Calculate area as difference between two spherical caps
https://en.wikipedia.org/wiki/Spherical_cap
TODO this is inconsistent with the faceted approach used elsewhere in this class."""
height_delta = (np.cos(self.polar_corners[:-1, :-1]) - np.cos(self.polar_corners[:-1, 1:]))
azimuth_delta = (self.azimuthal_corners[1:, 1:] - self.azimuthal_corners[:-1, 1:])
return height_delta * azimuth_delta
# def visible_areas(self, projection_direction):
# """Calculate projected (visible) area in the direction of projection_direction.
# Invisible facets have zero area."""
# proj_dir_length = np.sum(projection_direction ** 2) ** .5
# projected_areas = np.sum((projection_direction / proj_dir_length) * self.normals(), axis=-1)
# return np.where(projected_areas > 0, projected_areas, 0)
def projected_visible_area_fraction(self, projection_direction):
"""
Calculate each facet's projected visible area divided by its total area.
Obscured facets have zero projected visible area.
:param projection_direction: direction of the projection
:return:
"""
proj_dir_length = np.sum(projection_direction ** 2) ** .5 # Length of projection_direction
# Calculate dot product of unit projection direction and unit surface normals.
proj_area_frac = np.sum((projection_direction / proj_dir_length) * self.unit_normals(), axis=-1)
# Set obscured faces (faces with negative dot product) to zero.
return np.where(proj_area_frac > 0, proj_area_frac, 0)
def corners_cartesian(self):
"""Facet corner cartesian coordinates"""
x_corners, y_corners, z_corners = \
starwinds_magnetogram.coordinate_transforms.rectangular_coordinates_from_spherical(
np.ones(self.polar_corners.shape),
self.polar_corners,
self.azimuthal_corners)
return x_corners, y_corners, z_corners
def centers_cartesian(self):
"""Facet center cartesian coordinates"""
polar_centers, azimuthal_centers = self.centers()
x_centers, y_centers, z_centers = \
starwinds_magnetogram.coordinate_transforms.rectangular_coordinates_from_spherical(
np.ones(polar_centers.shape),
polar_centers,
azimuthal_centers)
return x_centers, y_centers, z_centers
def numerical_description(zdi_geometry, zdi_magnetogram, dest=None):
"""
Describe field by numerically evaluating it at a set of points, then taking sums and
averages.
TODO test that this still works.
:param zdi_geometry:
:param zdi_magnetogram:
:return:
"""
if dest is None:
dest = dict()
def describe(name, values):
"""Calculate area-weighted average (mean) and find maximum value."""
log.info("Describing %s component." % name)
abs_max_indices = np.unravel_index(np.argmax(np.abs(values), axis=None), values.shape)
abs_max_polar = zdi_geometry.centers()[0][abs_max_indices]
abs_max_azimuth = zdi_geometry.centers()[1][abs_max_indices]
abs_max = np.abs(values[abs_max_indices])
abs_mean = np.sum(np.abs(values) * zdi_geometry.areas()) / (4 * np.pi)
abs_rms = (np.sum(values**2 * zdi_geometry.areas()) / (4 * np.pi))**.5
abs_std = (np.sum((np.abs(values) - abs_mean)**2 * zdi_geometry.areas()) / (4 * np.pi))**.5
# This is a statistical identity.
assert np.isclose(abs_rms**2, abs_mean**2 + abs_std**2), "RMS does not match mean and std."
dest[f"magnetogram.{name}.abs.max"] = abs_max
log.info(f"{name} |B|_max = %4.4g Gauss" % abs_max)
log.info(f"{name} |B|_max at az=%2.2f deg, pl=%3.2f deg" % (np.rad2deg(abs_max_azimuth),
np.rad2deg(abs_max_polar)))
log.info(f"{name} |B|_mean = %4.4g Gauss" % abs_mean)
log.info(f"{name} |B|_var = %4.4g Gauss" % abs_std)
dest[f"magnetogram.{name}.abs.mean"] = abs_mean
dest[f"magnetogram.{name}.abs.rms"] = abs_rms
dest[f"magnetogram.{name}.abs.std"] = abs_std
return abs_mean
_dict = zdi_magnetogram.get_all()
accumulated_strength_squared = np.zeros_like(zdi_geometry.centers()[0])
for sph_dir, field in _dict.items():
accumulated_component = np.zeros_like(zdi_geometry.centers()[0])
for key_1, method_1 in field.items():
values_1 = method_1(*zdi_geometry.centers())
describe(sph_dir + "-" + key_1, values_1)
accumulated_component += values_1
describe(sph_dir, accumulated_component)
accumulated_strength_squared += accumulated_component ** 2
describe("field strength", accumulated_strength_squared**.5)
return dest
|
21,343 | 4d9a6c8b8cbe7ea4cb33047e321dd8e454e5c154 | import numpy as np
import requests
import datetime
import pandas as pd
from io import StringIO
import csv
import sys
import os
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import zip_conversion
def most_infected(n: int, day: datetime.date, return_zip: bool) -> list:
"""returns a list of the most infected counties
Args:
n (int): top n counties will be returned
day (datetime.date): date to observe counties
Returns:
list: names of counties or zip codes
"""
day = day.strftime('%m-%d-%Y')
url = f'https://raw.githubusercontent.com/CSSEGISandData/COVID-19/master/csse_covid_19_data/csse_covid_19_daily_reports/{day}.csv'
df = pd.read_csv(StringIO(requests.get(url).text))
df = df.loc[df['Country_Region'] == 'US'].sort_values(by='Confirmed', ascending=False) # only US and sort by Confirmed
df = df.loc[(df['Admin2'] != 'Unassigned') & (df['Province_State'] != 'Puerto Rico')] # remove Unassigned
locations = list(df.head(n)[['Admin2', 'Province_State']].values)
df = pd.read_csv(r'Data Collection\Apparatus\Docs\zip_code_database.csv')[['county', 'state', 'zip']]
df['county'] = df['county'].str.replace(' County','').str.replace(' City','')
if (return_zip):
state_abbreviation = pd.read_csv(r'Data Collection\Apparatus\Docs\states_and_counties.csv')
state_abbreviation['State Name'].str.title()
result = []
for county, state in locations:
if (type(county) == str):
county = county.replace(' County', '').replace(' City', '')
state = zip_conversion.state_to_abbreviation(state, state_abbreviation)
result.append([str(code).zfill(5) for code in zip_conversion.county_to_zip(county, state, df)])
result = [codes for codes in result if codes != []]
return [y for x in result for y in x]
else:
result = []
for county, state in locations:
if (type(county) == str):
county = county.replace(' County', '').replace(' City', '')
result.append((county, state))
return result
def top_percent(n: float, day: datetime.date) -> int:
"""how many counties make up n percent of cases
Args:
n (float): fraction of total cases
day (datetime.date): day to check
Returns:
int: this many counties makes up n of the cases
"""
day = day.strftime('%m-%d-%Y')
url = f'https://raw.githubusercontent.com/CSSEGISandData/COVID-19/master/csse_covid_19_data/csse_covid_19_daily_reports/{day}.csv'
df = pd.read_csv(StringIO(requests.get(url).text))
df = df.loc[df['Country_Region'] == 'US'].sort_values(by='Confirmed', ascending=False)
confirmed = list(df['Confirmed'])
reach = sum(confirmed) * n
top = list(np.cumsum(confirmed) >= reach).index(True)
return top
if __name__ == "__main__":
date = datetime.date(2020, 4, 1)
zip_code = False
a = top_percent(0.77, date)
# b = most_infected(a, date, zip_code)
print(a)
# print(a)
exit(0)
with open('counties.csv', 'w', newline='') as f:
writer = csv.writer(f)
# writer.writerows([[i] for i in b])
if (zip_code):
for code in b:
f.write(code + '\n')
else:
for location in b:
writer.writerow(location)
|
21,344 | d459e2f9282d1836390a445759feec18a73e1b79 | import re
import requests
import http.cookiejar
import time
import json
class Login(object):
def __init__(self):
self.headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 '
'(KHTML, like Gecko) Chrome/57.0.2987.98 Safari/537.36',
"Host": "www.zhihu.com",
"Referer": "https://www.zhihu.com/",
}
self.session = requests.Session()
# 建立一个会话,可以把同一用户的不同请求联系起来;直到会话结束都会自动处理cookies
# 建立LWPCookieJar实例,可以存Set-Cookie3类型的文件。
# 而MozillaCookieJar类是存为'/.txt'格式的文件
def main(self):
session = self.session
session.cookies = http.cookiejar.LWPCookieJar("cookie")
# 若本地有cookie则不用再post数据了
try:
session.cookies.load(ignore_discard=True)
except IOError:
print('Cookie未加载!')
if self.isLogin():
print('您已经登录')
else:
account = input('输入账号:')
secret = input('输入密码:')
self.login(account, secret)
def get_xsrf(self):
"""
获取参数_xsrf
"""
session = self.session
headers = self.headers
response = session.get('https://www.zhihu.com', headers=headers)
html = response.text
get_xsrf_pattern = re.compile(r'<input type="hidden" name="_xsrf" value="(.*?)"')
_xsrf = re.findall(get_xsrf_pattern, html)[0]
return _xsrf
def get_captcha(self):
"""
获取验证码本地显示
返回你输入的验证码
"""
session = self.session
headers = self.headers
t = str(int(time.time() * 1000))
captcha_url = 'http://www.zhihu.com/captcha.gif?r=' + t + "&type=login"
response = session.get(captcha_url, headers=headers)
with open('cptcha.gif', 'wb') as f:
f.write(response.content)
# Pillow显示验证码
captcha = input('本次登录需要输入验证码: ')
return captcha
def login(self, username, password):
"""
输入自己的账号密码,模拟登录知乎
"""
# 检测到11位数字则是手机登录
session = self.session
headers = self.headers
if re.match(r'\d{11}$', username):
url = 'http://www.zhihu.com/login/phone_num'
data = {'_xsrf': self.get_xsrf(),
'password': password,
'remember_me': 'true',
'phone_num': username
}
else:
url = 'https://www.zhihu.com/login/email'
data = {'_xsrf': self.get_xsrf(),
'password': password,
'remember_me': 'true',
'email': username
}
# 若不用验证码,直接登录
result = session.post(url, data=data, headers=headers)
# 打印返回的响应,r = 1代表响应失败,msg里是失败的原因
# loads可以反序列化内置数据类型,而load可以从文件读取
if (json.loads(result.text))["r"] == 1:
# 要用验证码,post后登录
data['captcha'] = self.get_captcha()
result = session.post(url, data=data, headers=headers)
print((json.loads(result.text))['msg'])
# 保存cookie到本地
session.cookies.save(ignore_discard=True, ignore_expires=True)
def isLogin(self):
# 通过查看用户个人信息来判断是否已经登录
session = self.session
headers = self.headers
url = "https://www.zhihu.com/settings/profile"
# 禁止重定向,否则登录失败重定向到首页也是响应200
login_code = session.get(url, headers=headers, allow_redirects=False).status_code
if login_code == 200:
return True
else:
return False
if __name__ == '__main__':
Login().main() |
21,345 | 29abe8a1642722737172020ad0378df6c414804c | '''
[1, 3, 5, 4, 2]라는 리스트를 [5, 4, 3, 2, 1]로 만들어보자.
'''
a = [1, 3, 5, 4, 2]
a.sort()
a.reverse()
print(a) # [5, 4, 3, 2, 1]
|
21,346 | 68926d39c08c234e87cc24ed4e30995b90bf084a | from math import factorial
'''
Problem 34
145 is a curious number, as 1! + 4! + 5! = 1 + 24 + 120 = 145.
Find the sum of all numbers which are equal to the sum of the factorial of their digits.
Note: as 1! = 1 and 2! = 2 are not sums they are not included.
'''
factorials = {i: factorial(i) for i in range(0, 10)}
def generateNumbers():
n = 13
while n <= 2540160:
yield n
n += 1
def getFactorialDigitSum(n):
summation = 0
for digit in n:
summation += factorials[int(digit)]
return summation
def main():
curiousNumbers = []
for number in generateNumbers():
digitSum = getFactorialDigitSum(str(number))
if digitSum == number:
curiousNumbers.append(number)
print(sum(curiousNumbers))
if __name__ == '__main__':
main()
|
21,347 | 9d4e0ef5ffdf6ce9eac2a1d63748513f8c0418cf | from __future__ import print_function
try:
import numpy as np
from torch.autograd import Variable
from torch.autograd import grad as torch_grad
import torch.nn as nn
import torch.nn.functional as F
import torch
from itertools import chain as ichain
from gaussgan.utils import tlog, softmax, initialize_weights, calc_gradient_penalty
except ImportError as e:
print(e)
raise ImportError
import math
class GELU(nn.Module):
"""
Paper Section 3.4, last paragraph notice that BERT used the GELU instead of RELU
"""
def forward(self, x):
return 0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3))))
class Reshape(nn.Module):
"""
Class for performing a reshape as a layer in a sequential model.
"""
def __init__(self, shape=[]):
super(Reshape, self).__init__()
self.shape = shape
def forward(self, x):
return x.view(x.size(0), *self.shape)
def extra_repr(self):
# (Optional)Set the extra information about this module. You can test
# it by printing an object of this class.
return 'shape={}'.format(
self.shape
)
class Generator(nn.Module):
"""
Input is a vector from representation space of dimension z_dim
output is a vector from image space of dimension X_dim
"""
def __init__(self, latent_dim, x_dim, dscale=1, verbose=False):
super(Generator, self).__init__()
self.name = 'generator'
self.latent_dim = latent_dim
self.x_dim = x_dim
self.verbose = verbose
self.dscale = dscale
self.scaled_x_lat = int(dscale*self.latent_dim)
self.scaled_x_dim = int(dscale*self.x_dim)
self.model = nn.Sequential(
# Fully connected layers
torch.nn.Linear(self.latent_dim, self.scaled_x_lat),
#torch.nn.Dropout(0.2),
nn.BatchNorm1d(self.scaled_x_lat),
##torch.nn.Linear(self.latent_dim, 512),
##nn.BatchNorm1d(512),
nn.LeakyReLU(0.2, inplace=True),
#nn.Sigmoid(),
#GELU(),
#nn.Sigmoid(),
#nn.ReLU(),
#torch.nn.Linear(self.scaled_x_lat, 512),
#nn.BatchNorm1d(512),
#nn.LeakyReLU(0.2, inplace=True),
torch.nn.Linear(self.scaled_x_lat, self.scaled_x_lat),
#torch.nn.Dropout(0.2),
nn.BatchNorm1d(self.scaled_x_lat),
nn.LeakyReLU(0.2, inplace=True),
#nn.Sigmoid(),
#GELU(),
#nn.ReLU(),
#torch.nn.Linear(512, self.scaled_x_dim),
torch.nn.Linear(self.scaled_x_lat, self.scaled_x_dim),
#torch.nn.Dropout(0.2),
nn.BatchNorm1d(self.scaled_x_dim),
nn.LeakyReLU(0.2, inplace=True),
#nn.Sigmoid(),
#GELU(),
#nn.ReLU(),
torch.nn.Linear(self.scaled_x_dim, x_dim),
)
initialize_weights(self)
if self.verbose:
print("Setting up {}...\n".format(self.name))
print(self.model)
def forward(self, z):
#z = z.unsqueeze(2).unsqueeze(3)
x_gen = self.model(z)
# Reshape for output
#x_gen = x_gen.view(x_gen.size(0), *self.x_dim)
return x_gen
class Discriminator(nn.Module):
"""
Input is tuple (X) of an image vector.
Output is a 1-dimensional value
"""
def __init__(self, dim, dscale=1, wass_metric=False, verbose=False):
super(Discriminator, self).__init__()
self.name = 'discriminator'
self.wass = wass_metric
self.dim = dim
self.verbose = verbose
self.dscale = dscale
self.scaled_x_dim = int(dscale*self.dim)
self.model = nn.Sequential(
nn.Linear(self.dim, self.scaled_x_dim),
nn.LeakyReLU(0.2, inplace=True),
#nn.Sigmoid(),
#GELU(),
#nn.ReLU(),
#torch.nn.Dropout(0.2),
nn.Linear(self.scaled_x_dim, 512),
nn.LeakyReLU(0.2, inplace=True),
#nn.Sigmoid(),
#GELU(),
#nn.ReLU(),
#torch.nn.Dropout(0.2),
#nn.Linear(self.scaled_x_dim, 1024),
#nn.LeakyReLU(0.2, inplace=True),
#nn.Linear(1024, 512),
#nn.LeakyReLU(0.2, inplace=True),
# Fully connected layers
torch.nn.Linear(512, 256),
nn.LeakyReLU(0.2, inplace=True),
#nn.Sigmoid(),
#GELU(),
#nn.ReLU(),
#torch.nn.Dropout(0.2),
# Fully connected layers
torch.nn.Linear(256, 128),
nn.LeakyReLU(0.2, inplace=True),
#nn.Sigmoid(),
#GELU(),
#nn.ReLU(),
#torch.nn.Dropout(0.2),
torch.nn.Linear(128, 1),
)
# If NOT using Wasserstein metric, final Sigmoid
if (not self.wass):
self.model = nn.Sequential(self.model, torch.nn.Sigmoid())
initialize_weights(self)
if self.verbose:
print("Setting up {}...\n".format(self.name))
print(self.model)
def forward(self, img):
# Get output
validity = self.model(img)
return validity
|
21,348 | 7a0d1225a41233753506668e3c4e965aee8a0c62 | import pyqrcode
import io
import base64
import datetime
import json
import os, tempfile, zipfile
from django.conf import settings
from django.shortcuts import get_list_or_404, get_object_or_404, render, redirect
from django.db.models import Q
from django.views.generic.edit import CreateView, UpdateView, DeleteView
from django.urls import reverse_lazy, reverse
from django.core.exceptions import PermissionDenied, ObjectDoesNotExist
from django.views.generic import ListView
from django.contrib.auth.decorators import login_required
from django.views.generic.detail import SingleObjectMixin
from django.contrib import messages
from django.utils.safestring import mark_safe
from django.utils.encoding import force_text
from django.utils.http import urlsafe_base64_decode, urlsafe_base64_encode
from django.contrib.auth.tokens import PasswordResetTokenGenerator
from django.contrib.sites.shortcuts import get_current_site
from django.utils.encoding import force_bytes
from django.utils import six
from django.http import HttpResponse
from wsgiref.util import FileWrapper
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from easy_pdf.views import PDFTemplateView
from easy_pdf.rendering import html_to_pdf, make_response, render_to_pdf_response
from django.utils.decorators import method_decorator
from django import template
from django.contrib.auth.models import User
from django.contrib.auth import login, authenticate
from django.contrib.auth.forms import UserCreationForm
from django.views.decorators.csrf import csrf_exempt
from django.core.mail import send_mail, get_connection
from django.db.models import Value as V
from django.db.models.functions import Concat
from django.http import JsonResponse, HttpResponseRedirect
from django.template import loader
from django.utils import timezone
from reserver.utils import render_add_cal_button
from reserver.utils import check_for_and_fix_users_without_userdata
from reserver.utils import create_cruise_notifications, create_cruise_administration_notification
from reserver.utils import create_cruise_deadline_and_departure_notifications, delete_cruise_notifications
from reserver.utils import delete_cruise_departure_notifications, delete_cruise_deadline_and_departure_notifications
from reserver.utils import create_season_notifications, delete_season_notifications
from reserver.emails import account_activation_token, send_activation_email, send_user_approval_email
from reserver.emails import send_email, send_template_only_email
from reserver import jobs
from reserver.models import *
from reserver.forms import *
from dal import autocomplete
class OwnerAutoCompleteView(autocomplete.Select2QuerySetView):
def get_result_label(self, item):
return item.get_full_name()
def get_queryset(self):
org = self.request.user.userdata.organization
qs = User.objects.filter(userdata__organization=org).exclude(userdata=self.request.user.userdata).exclude(userdata__email_confirmed=False).exclude(userdata__user__is_active=False)
if self.q:
qs = qs.annotate(full_name=Concat('first_name', V(' '), 'last_name')).filter(full_name__icontains=self.q)
return qs
def cruise_pdf_view(request, pk):
cruise = get_object_or_404(Cruise, pk=pk)
if not cruise.is_viewable_by(request.user):
raise PermissionDenied
context = {
'pagesize': 'A4',
'title': 'Cruise summary for ' + str(cruise),
'cruise': cruise,
'http_host': request.META['HTTP_HOST']
}
return render_to_pdf_response(
request,
'reserver/pdfs/cruise_pdf.html',
context,
download_filename='cruise.pdf'
)
class CruiseDeleteView(DeleteView):
model = Cruise
template_name = 'reserver/cruises/cruise_delete_form.html'
def dispatch(self, request, *args, **kwargs):
object = get_object_or_404(self.model, pk=self.kwargs.get('pk'))
if not object.is_cancellable_by(request.user):
raise PermissionDenied
return super().dispatch(request, *args, **kwargs)
def get_success_url(self):
action = Action(user=self.request.user, timestamp=timezone.now(), target=str(self.object))
action.action = "deleted cruise"
action.save()
return reverse_lazy('user-page')
class CruiseList(ListView):
model = Cruise
template_name = 'reserver/cruises/cruise_list.html'
class CruiseCreateView(CreateView):
template_name = 'reserver/cruises/cruise_create_form.html'
model = Cruise
form_class = CruiseForm
def get_success_url(self, is_submitting, cruise):
action = Action(user=self.request.user, timestamp=timezone.now(), target=str(self.object))
action.action = "created cruise"
action.save()
if is_submitting and not cruise.is_submitted:
return reverse_lazy('cruise-update', kwargs={'pk': cruise.pk})
return reverse_lazy('user-page')
def get_form_kwargs(self):
kwargs = super(CruiseCreateView, self).get_form_kwargs()
kwargs.update({'request': self.request})
return kwargs
def get(self, request, *args, **kwargs):
"""Handles creation of new blank form/formset objects."""
self.object = None
form_class = self.get_form_class()
form_class.user = request.user
form = self.get_form(form_class)
cruiseday_form = CruiseDayFormSet()
participant_form = ParticipantFormSet()
document_form = DocumentFormSet()
equipment_form = EquipmentFormSet()
invoice_form = InvoiceFormSet()
if not self.request.user.userdata.email_confirmed and self.request.user.userdata.role == "":
messages.add_message(self.request, messages.WARNING, mark_safe("You have not yet confirmed your email address. Your account will not be eligible for approval or submitting cruises before this is done. If you typed the wrong email address while signing up, correct it in your profile and we'll send you a new one. You may have to add no-reply@rvgunnerus.no to your contact list if our messages go to spam."+"<br><br><a class='btn btn-primary' href='"+reverse('resend-activation-mail')+"'>Resend activation email</a>"))
elif self.request.user.userdata.email_confirmed and self.request.user.userdata.role == "":
messages.add_message(self.request, messages.WARNING, "Your user account has not been approved by an administrator yet. You may save cruise drafts and edit them, but you may not submit cruises for approval before your account is approved.")
return self.render_to_response(
self.get_context_data(
form=form,
cruiseday_form=cruiseday_form,
participant_form=participant_form,
document_form=document_form,
equipment_form=equipment_form,
invoice_form=invoice_form,
is_NTNU=request.user.userdata.organization.is_NTNU,
billing_type="auto",
is_invalid=False
)
)
def post(self, request, *args, **kwargs):
"""Handles receiving submitted form and formset data and checking their validity."""
self.object = None
form_class = self.get_form_class()
form_class.user = request.user
form = self.get_form(form_class)
cruiseday_form = CruiseDayFormSet(self.request.POST)
participant_form = ParticipantFormSet(self.request.POST)
document_form = DocumentFormSet(self.request.POST, self.request.FILES)
equipment_form = EquipmentFormSet(self.request.POST)
invoice_form = InvoiceFormSet(self.request.POST)
if not self.request.user.userdata.email_confirmed and self.request.user.userdata.role == "":
messages.add_message(self.request, messages.WARNING, mark_safe("You have not yet confirmed your email address. Your account will not be eligible for approval or submitting cruises before this is done. If you typed the wrong email address while signing up, correct it in your profile and we'll send you a new one. You may have to add no-reply@rvgunnerus.no to your contact list if our messages go to spam."+"<br><br><a class='btn btn-primary' href='"+reverse('resend-activation-mail')+"'>Resend activation email</a>"))
elif self.request.user.userdata.email_confirmed and self.request.user.userdata.role == "":
messages.add_message(self.request, messages.WARNING, "Your user account has not been approved by an administrator yet. You may save cruise drafts and edit them, but you may not submit cruises for approval before your account is approved.")
# check if all our forms are valid, handle outcome
if (form.is_valid() and cruiseday_form.is_valid() and participant_form.is_valid() and document_form.is_valid() and equipment_form.is_valid() and invoice_form.is_valid()):
return self.form_valid(form, cruiseday_form, participant_form, document_form, equipment_form, invoice_form)
else:
return self.form_invalid(form, cruiseday_form, participant_form, document_form, equipment_form, invoice_form)
def form_valid(self, form, cruiseday_form, participant_form, document_form, equipment_form, invoice_form):
"""Called when all our forms are valid. Creates a Cruise with Participants and CruiseDays."""
is_submitting = False
Cruise = form.save(commit=False)
Cruise.leader = self.request.user
try:
Cruise.organization = Cruise.leader.userdata.organization
except:
pass
form.cleaned_data["leader"] = self.request.user
if hasattr(self, "request"):
# check whether we're saving or submitting the form
if self.request.POST.get("save_cruise"):
Cruise.is_submitted = False
messages.add_message(self.request, messages.SUCCESS, mark_safe('Cruise successfully saved. You may edit and submit it on the "<a href="/user/cruises/unsubmitted/">Unsubmitted Cruises</a>" page.'))
elif self.request.POST.get("submit_cruise"):
is_submitting = True
cruiseday_form = CruiseDayFormSet(self.request.POST)
participant_form = ParticipantFormSet(self.request.POST)
cruise_days = cruiseday_form.cleaned_data
cruise_participants = participant_form.cleaned_data
cruise_invoice = invoice_form.cleaned_data
if (Cruise.is_submittable(user=self.request.user, cleaned_data=form.cleaned_data, cruise_invoice=cruise_invoice, cruise_days=cruise_days, cruise_participants=cruise_participants)):
Cruise.is_submitted = True
Cruise.submit_date = timezone.now()
messages.add_message(self.request, messages.SUCCESS, mark_safe('Cruise successfully submitted. You may track its approval status on the "<a href="/user/cruises/submitted/">Submitted Cruises</a>" page.'))
else:
Cruise.is_submitted = False
messages.add_message(self.request, messages.ERROR, mark_safe('Cruise could not be submitted:' + str(Cruise.get_missing_information_string(cleaned_data=form.cleaned_data, cruise_invoice=cruise_invoice, cruise_days=cruise_days, cruise_participants=cruise_participants)) + '<br>If you decide to do this later, you can get back to this cruise to review and add any missing or invalid information on the "<a href="/user/cruises/unsubmitted/">Unsubmitted Cruises</a>" page.'))
else:
Cruise.is_submitted = False
messages.add_message(self.request, messages.ERROR, mark_safe('Cruise could not be submitted: We were unable to determine the action you wished to take on submit. Please try to submit again below.'))
Cruise.save()
self.object = form.save()
cruiseday_form.instance = self.object
cruiseday_form.save()
participant_form.instance = self.object
participant_form.save()
document_form.instance = self.object
document_form.save()
equipment_form.instance = self.object
equipment_form.save()
invoice_form.instance = self.object
invoice_form.save()
return HttpResponseRedirect(self.get_success_url(is_submitting, Cruise))
def form_invalid(self, form, cruiseday_form, participant_form, document_form, equipment_form, invoice_form):
"""Throw form back at user."""
print(cruiseday_form)
print(document_form)
print(equipment_form)
print(invoice_form)
return self.render_to_response(
self.get_context_data(
form=form,
cruiseday_form=cruiseday_form,
participant_form=participant_form,
document_form=document_form,
equipment_form=equipment_form,
invoice_form=invoice_form,
is_NTNU=self.request.user.userdata.organization.is_NTNU,
billing_type="auto",
is_invalid=True,
)
)
class CruiseEditView(UpdateView):
template_name = 'reserver/cruises/cruise_edit_form.html'
model = Cruise
form_class = CruiseForm
def get_success_url(self, is_submitting, cruise):
action = Action(user=self.request.user, timestamp=timezone.now(), target=str(self.object))
action.action = "edited cruise"
action.save()
if is_submitting and not cruise.is_submitted:
return reverse_lazy('cruise-update', kwargs={'pk': cruise.pk})
return reverse_lazy('user-page')
def get_form_kwargs(self):
kwargs = super(CruiseEditView, self).get_form_kwargs()
kwargs.update({'request': self.request})
return kwargs
def get(self, request, *args, **kwargs):
"""Handles creation of new blank form/formset objects."""
self.object = get_object_or_404(Cruise, pk=self.kwargs.get('pk'))
if not self.object.is_editable_by(request.user):
raise PermissionDenied
form_class = self.get_form_class()
form_class.user = request.user
form = self.get_form(form_class)
cruiseday_form = CruiseDayFormSet(instance=self.object)
participant_form = ParticipantFormSet(instance=self.object)
document_form = DocumentFormSet(instance=self.object)
equipment_form = EquipmentFormSet(instance=self.object)
invoice_form = InvoiceFormSet(instance=self.object)
return self.render_to_response(
self.get_context_data(
form=form,
cruiseday_form=cruiseday_form,
participant_form=participant_form,
document_form=document_form,
equipment_form=equipment_form,
invoice_form=invoice_form,
billing_type=self.object.billing_type,
is_NTNU=self.object.leader.userdata.organization.is_NTNU,
is_submitted=self.object.is_submitted
)
)
def post(self, request, *args, **kwargs):
"""Handles receiving submitted form and formset data and checking their validity."""
self.object = get_object_or_404(Cruise, pk=self.kwargs.get('pk'))
if not self.object.is_editable_by(request.user):
raise PermissionDenied
form_class = self.get_form_class()
form_class.user = request.user
form = self.get_form(form_class)
cruiseday_form = CruiseDayFormSet(self.request.POST, instance=self.object)
participant_form = ParticipantFormSet(self.request.POST, instance=self.object)
document_form = DocumentFormSet(data=request.POST, files=request.FILES, instance=self.object)
equipment_form = EquipmentFormSet(self.request.POST, instance=self.object)
invoice_form = InvoiceFormSet(self.request.POST, instance=self.object)
# check if all our forms are valid, handle outcome
if (form.is_valid() and cruiseday_form.is_valid() and participant_form.is_valid() and document_form.is_valid() and equipment_form.is_valid() and invoice_form.is_valid()):
return self.form_valid(form, cruiseday_form, participant_form, document_form, equipment_form, invoice_form)
else:
return self.form_invalid(form, cruiseday_form, participant_form, document_form, equipment_form, invoice_form)
def form_valid(self, form, cruiseday_form, participant_form, document_form, equipment_form, invoice_form):
"""Called when all our forms are valid. Creates a Cruise with Participants and CruiseDays."""
is_submitting = False
old_cruise = get_object_or_404(Cruise, pk=self.kwargs.get('pk'))
old_cruise_days_string = str(old_cruise.get_cruise_days())
new_cruise = form.save(commit=False)
new_cruise.information_approved = False
self.object = form.save()
cruiseday_form.instance = self.object
cruiseday_form.save()
participant_form.instance = self.object
participant_form.save()
document_form.instance = self.object
document_form.save()
equipment_form.instance = self.object
equipment_form.save()
invoice_form.instance = self.object
invoice_form.save()
new_cruise.leader = self.request.user
try:
new_cruise.organization = new_cruise.leader.userdata.organization
except:
pass
form.cleaned_data["leader"] = self.request.user
new_cruise.outdate_missing_information()
if new_cruise.is_submitted and old_cruise_days_string != str(new_cruise.get_cruise_days()):
new_cruise.is_approved = False
messages.add_message(self.request, messages.SUCCESS, mark_safe('Cruise ' + str(Cruise) + ' updated. Your cruise days were modified, so your cruise is now pending approval. You may track its approval status on the "<a href="/user/cruises/submitted/">Submitted Cruises</a>" page.'))
delete_cruise_deadline_and_departure_notifications(new_cruise)
set_date_dict_outdated()
elif new_cruise.is_submitted and old_cruise.information_approved:
messages.add_message(self.request, messages.SUCCESS, mark_safe('Cruise ' + str(Cruise) + ' updated. Your cruise information was modified, so your cruise\'s information is now pending approval. You may track its approval status on the "<a href="/user/cruises/upcoming/">Upcoming Cruises</a>" page.'))
delete_cruise_departure_notifications(new_cruise)
# check whether we're saving or submitting the form
elif hasattr(self, "request") and self.request.POST.get("save_cruise"):
messages.add_message(self.request, messages.SUCCESS, mark_safe('Cruise successfully saved. You may edit and submit it on the "<a href="/user/cruises/unsubmitted/">Unsubmitted Cruises</a>" page.'))
elif hasattr(self, "request") and self.request.POST.get("submit_cruise"):
is_submitting = True
cruise_days = cruiseday_form.cleaned_data
cruise_participants = participant_form.cleaned_data
cruise_invoice = invoice_form.cleaned_data
if (new_cruise.is_submittable(user=self.request.user, cleaned_data=form.cleaned_data, cruise_invoice=cruise_invoice, cruise_days=cruise_days, cruise_participants=cruise_participants)):
new_cruise.is_submitted = True
new_cruise.submit_date = timezone.now()
messages.add_message(self.request, messages.SUCCESS, mark_safe('Cruise successfully submitted. You may track its approval status on the "<a href="/user/cruises/submitted/">Submitted Cruises</a>" page.'))
else:
new_cruise.is_submitted = False
messages.add_message(self.request, messages.ERROR, mark_safe('Cruise could not be submitted:' + str(new_cruise.get_missing_information_string(cleaned_data=form.cleaned_data, cruise_invoice=cruise_invoice, cruise_days=cruise_days, cruise_participants=cruise_participants)) + '<br>If you decide to do this later, you can get back to this cruise to review and add any missing or invalid information on the "<a href="/user/cruises/unsubmitted/">Unsubmitted Cruises</a>" page.'))
else:
messages.add_message(self.request, messages.ERROR, mark_safe('We were unable to determine the action you wished to take when saving the form. If you wished to send it in for approval and not just save it, please submit it below.'))
new_cruise.save()
if old_cruise.information_approved:
admin_user_emails = [admin_user.email for admin_user in list(User.objects.filter(userdata__role='admin'))]
send_template_only_email(admin_user_emails, EmailTemplate.objects.get(title='Approved cruise updated'), cruise=old_cruise)
return HttpResponseRedirect(self.get_success_url(is_submitting, new_cruise))
def form_invalid(self, form, cruiseday_form, participant_form, document_form, equipment_form, invoice_form):
"""Throw form back at user."""
return self.render_to_response(
self.get_context_data(
form=form,
cruiseday_form=cruiseday_form,
participant_form=participant_form,
document_form=document_form,
equipment_form=equipment_form,
invoice_form=invoice_form,
billing_type=self.object.billing_type,
is_NTNU=self.object.leader.userdata.organization.is_NTNU
)
)
class CruiseView(CruiseEditView):
template_name = 'reserver/cruises/cruise_view_form.html'
def get(self, request, *args, **kwargs):
self.object = get_object_or_404(Cruise, pk=self.kwargs.get('pk'))
if not self.object.is_viewable_by(request.user):
raise PermissionDenied
form_class = self.get_form_class()
form = self.get_form(form_class)
cruiseday_form = CruiseDayFormSet(instance=self.object)
participant_form = ParticipantFormSet(instance=self.object)
document_form = DocumentFormSet(instance=self.object)
equipment_form = EquipmentFormSet(instance=self.object)
invoice_form = InvoiceFormSet(instance=self.object)
for key in form.fields.keys():
form.fields[key].widget.attrs['readonly'] = True
form.fields[key].widget.attrs['disabled'] = True
for subform in cruiseday_form:
for key in subform.fields.keys():
subform.fields[key].widget.attrs['readonly'] = True
subform.fields[key].widget.attrs['disabled'] = True
for subform in participant_form:
for key in subform.fields.keys():
subform.fields[key].widget.attrs['readonly'] = True
subform.fields[key].widget.attrs['disabled'] = True
for subform in document_form:
for key in subform.fields.keys():
subform.fields[key].widget.attrs['readonly'] = True
subform.fields[key].widget.attrs['disabled'] = True
for subform in equipment_form:
for key in subform.fields.keys():
subform.fields[key].widget.attrs['readonly'] = True
subform.fields[key].widget.attrs['disabled'] = True
for subform in invoice_form:
for key in subform.fields.keys():
subform.fields[key].widget.attrs['readonly'] = True
subform.fields[key].widget.attrs['disabled'] = True
return self.render_to_response(
self.get_context_data(
form=form,
cruiseday_form=cruiseday_form,
participant_form=participant_form,
document_form=document_form,
equipment_form=equipment_form,
invoice_form=invoice_form,
billing_type=self.object.billing_type,
is_NTNU=self.object.leader.userdata.organization.is_NTNU
)
)
def submit_cruise(request, pk):
cruise = get_object_or_404(Cruise, pk=pk)
if request.user == cruise.leader or request.user in cruise.owner.all():
if not cruise.is_submittable(user=request.user):
messages.add_message(request, messages.ERROR, mark_safe('Cruise could not be submitted: ' + str(cruise.get_missing_information_string()) + '<br>You may review and add any missing or invalid information on the "<a href="/user/cruises/unsubmitted/">Unsubmitted Cruises</a>" page.'))
else:
cruise.is_submitted = True
cruise.information_approved = False
cruise.is_approved = False
cruise.submit_date = timezone.now()
cruise.save()
action = Action(user=request.user, target=str(cruise))
action.action = "submitted cruise"
action.timestamp = timezone.now()
action.save()
"""Sends notification email to admins about a new cruise being submitted."""
admin_user_emails = [admin_user.email for admin_user in list(User.objects.filter(userdata__role='admin'))]
send_template_only_email(admin_user_emails, EmailTemplate.objects.get(title='New cruise'), cruise=cruise)
messages.add_message(request, messages.SUCCESS, mark_safe('Cruise successfully submitted. You may track its approval status on the "<a href="/user/cruises/submitted/">Submitted Cruises</a>" page.'))
else:
raise PermissionDenied
return redirect(request.META['HTTP_REFERER'])
def unsubmit_cruise(request, pk):
cruise = get_object_or_404(Cruise, pk=pk)
if cruise.is_cancellable_by(request.user):
cruise.is_submitted = False
cruise.information_approved = False
cruise.is_approved = False
cruise.save()
action = Action(user=request.user, target=str(cruise))
action.action = "unsubmitted cruise"
action.timestamp = timezone.now()
action.save()
set_date_dict_outdated()
messages.add_message(request, messages.WARNING, mark_safe('Cruise ' + str(cruise) + ' cancelled.'))
admin_user_emails = [admin_user.email for admin_user in list(User.objects.filter(userdata__role='admin'))]
send_template_only_email(admin_user_emails, EmailTemplate.objects.get(title='Cruise cancelled'), cruise=cruise)
delete_cruise_deadline_and_departure_notifications(cruise)
else:
raise PermissionDenied
return redirect(request.META['HTTP_REFERER'])
# admin-only
@csrf_exempt
def reject_cruise(request, pk):
cruise = get_object_or_404(Cruise, pk=pk)
if request.user.is_superuser:
#message
try:
json_data = json.loads(request.body.decode("utf-8"))
message = json_data["message"]
except:
message = ""
#end message
cruise.is_approved = False
cruise.information_approved = False
cruise.is_submitted = False
cruise.save()
action = Action(user=request.user, target=str(cruise))
action.action = "rejected cruise"
action.timestamp = timezone.now()
action.save()
messages.add_message(request, messages.WARNING, mark_safe('Cruise ' + str(cruise) + ' rejected.'))
create_cruise_administration_notification(cruise, 'Cruise rejected', message=message)
if cruise.information_approved:
delete_cruise_deadline_notifications(cruise)
else:
delete_cruise_deadline_and_departure_notifications(cruise)
else:
raise PermissionDenied
return JsonResponse(json.dumps([], ensure_ascii=True), safe=False)
@csrf_exempt
def approve_cruise(request, pk):
cruise = get_object_or_404(Cruise, pk=pk)
if request.user.is_superuser:
#message
try:
json_data = json.loads(request.body.decode("utf-8"))
message = json_data["message"]
except:
message = ""
#end message
cruise.is_approved = True
cruise.save()
action = Action(user=request.user, target=str(cruise))
action.action = "approved cruise days"
action.timestamp = timezone.now()
action.save()
messages.add_message(request, messages.SUCCESS, mark_safe('Cruise ' + str(cruise) + ' approved.'))
create_cruise_administration_notification(cruise, 'Cruise dates approved', message=message)
set_date_dict_outdated()
if cruise.information_approved:
create_cruise_deadline_and_departure_notifications(cruise)
else:
create_cruise_notifications(cruise, 'Cruise deadlines')
else:
raise PermissionDenied
return JsonResponse(json.dumps([], ensure_ascii=True), safe=False)
@csrf_exempt
def unapprove_cruise(request, pk):
cruise = get_object_or_404(Cruise, pk=pk)
if request.user.is_superuser:
#message
try:
json_data = json.loads(request.body.decode("utf-8"))
message = json_data["message"]
except:
message = ""
#end message
cruise.is_approved = False
cruise.information_approved = False
cruise.save()
action = Action(user=request.user, target=str(cruise))
action.action = "unapproved cruise days"
action.timestamp = timezone.now()
action.save()
set_date_dict_outdated()
messages.add_message(request, messages.WARNING, mark_safe('Cruise ' + str(cruise) + ' unapproved.'))
create_cruise_administration_notification(cruise, 'Cruise unapproved', message=message)
if cruise.information_approved:
delete_cruise_deadline_notifications(cruise)
else:
delete_cruise_deadline_and_departure_notifications(cruise)
else:
raise PermissionDenied
return JsonResponse(json.dumps([], ensure_ascii=True), safe=False)
@csrf_exempt
def approve_cruise_information(request, pk):
cruise = get_object_or_404(Cruise, pk=pk)
if request.user.is_superuser:
#message
try:
json_data = json.loads(request.body.decode("utf-8"))
message = json_data["message"]
except:
message = ""
#end message
cruise.information_approved = True
cruise.save()
action = Action(user=request.user, target=str(cruise))
action.action = "approved cruise information"
action.timestamp = timezone.now()
action.save()
messages.add_message(request, messages.SUCCESS, mark_safe('Cruise information for ' + str(cruise) + ' approved.'))
if cruise.is_approved:
create_cruise_notifications(cruise, 'Cruise departure')
create_cruise_administration_notification(cruise, 'Cruise information approved', message=message)
else:
raise PermissionDenied
return JsonResponse(json.dumps([], ensure_ascii=True), safe=False)
@csrf_exempt
def unapprove_cruise_information(request, pk):
cruise = get_object_or_404(Cruise, pk=pk)
if request.user.is_superuser:
#message
try:
json_data = json.loads(request.body.decode("utf-8"))
message = json_data["message"]
except:
message = ""
#end message
cruise.information_approved = False
cruise.save()
action = Action(user=request.user, target=str(cruise))
action.action = "unapproved cruise information"
action.timestamp = timezone.now()
action.save()
messages.add_message(request, messages.WARNING, mark_safe('Cruise information for ' + str(cruise) + ' unapproved.'))
delete_cruise_departure_notifications(cruise)
create_cruise_administration_notification(cruise, 'Cruise information unapproved', message=message)
else:
raise PermissionDenied
return JsonResponse(json.dumps([], ensure_ascii=True), safe=False)
@csrf_exempt
def send_cruise_message(request, pk):
cruise = get_object_or_404(Cruise, pk=pk)
if request.user.is_superuser:
#message
try:
json_data = json.loads(request.body.decode("utf-8"))
message = json_data["message"]
except:
message = ""
#end message
action = Action(user=request.user, target=str(cruise))
action.action = "sent message to cruise"
action.timestamp = timezone.now()
action.save()
create_cruise_administration_notification(cruise, 'Cruise message', message=message)
messages.add_message(request, messages.SUCCESS, mark_safe('Message sent to ' + str(cruise) + '.'))
else:
raise PermissionDenied
return JsonResponse(json.dumps([], ensure_ascii=True), safe=False)
|
21,349 | 515977bccdb716b88e6915beab55ab2f21892e4b | # Uma matriz não vazia A que consiste em N números inteiros é fornecida. A matriz contém um número ímpar de elementos e cada elemento da matriz pode ser emparelhado com outro elemento que tem o mesmo valor, exceto por um elemento que não é emparelhado.
#
# Por exemplo, na matriz A, de modo que:
#
# A [0] = 9 A [1] = 3 A [2] = 9
# A [3] = 3 A [4] = 9 A [5] = 7
# A [6] = 9
# os elementos nos índices 0 e 2 têm valor 9,
# os elementos nos índices 1 e 3 têm valor 3,
# os elementos nos índices 4 e 6 têm valor 9,
# o elemento no índice 5 tem valor 7 e não está emparelhado.
# Escreva uma função:
#
# solução def (A)
#
# que, dada uma matriz A que consiste em N números inteiros que atendem às condições acima, retorna o valor do elemento não emparelhado.
#
# Por exemplo, dada a matriz A de modo que:
#
# A [0] = 9 A [1] = 3 A [2] = 9
# A [3] = 3 A [4] = 9 A [5] = 7
# A [6] = 9
# a função deve retornar 7, conforme explicado no exemplo acima.
#
# Escreva um algoritmo eficiente para as seguintes suposições:
#
# N é um número inteiro ímpar dentro do intervalo [1 .. 1.000.000];
# cada elemento da matriz A é um número inteiro dentro do intervalo [ 1 .. 1.000.000.000 ];
#
lista = [1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9]
def retorna_numero() |
21,350 | 40f5f099711e854b00b1d312bbd96a6ebc1b922e | import unittest
import guessing_game
class TestGame(unittest.TestCase):
def test_input(self):
result = guessing_game.run_guess(5, 5)
self.assertTrue(result)
def test_input_wrong_guess(self):
result = guessing_game.run_guess(5, 0)
self.assertFalse(result)
def test_input_wrong_number(self):
result = guessing_game.run_guess(5, 11)
self.assertFalse(result)
def test_input_wrong_number2(self):
result = guessing_game.run_guess(5, '11')
self.assertFalse(result)
if __name__ == '__main__':
unittest.main()
|
21,351 | a7ca1b992e54c18bc51b09e4665c0011e4998825 | from six import add_metaclass
from spinn_utilities.abstract_base import AbstractBase
from spinn_utilities.abstract_base import abstractmethod
@add_metaclass(AbstractBase)
class AbstractHasProfileData(object):
""" Indicates an object that can record a profile
"""
@abstractmethod
def get_profile_data(self, transceiver, placement):
""" Get the profile data recorded during simulation
:rtype:\
:py:class:`spinn_front_end_common.interface.profiling.profile_data.ProfileData`
"""
|
21,352 | c94ea67c97b20d1900fe63f2ebc757b31832ad6f | class Player:
"""
class representing the Player
"""
pass |
21,353 | 6c04f86d02cbbd26506d54d6080b6887d5281884 | def mostraMenu():
print('-' * 30)
print(f'{"MENU PRINCIPAL":^30}')
print('-' * 30)
print('\033[0;32m1 - \033[mVer pessoas cadastradas')
print('\033[0;32m2 - \033[mCadastrar novas pessoas')
print('\033[0;32m3 - \033[mSair do sistema')
print('-' * 30)
def selecionaMenu():
while True:
op = input('\033[0;32mSua opção: \033[m').strip()
if op == '1' or op == '2' or op == '3':
break
else:
print('\033[0;32mErro! Digite uma opção válida!\033[m')
return op
def cadastro():
# verificar se arquivo txt já existe
import os.path
os.path.isfile('cadastro.txt')
try:
with open('cadastro.txt', 'r') as f:
processar_arquivo(f)
except IOError:
print('Arquivo não encontrado!')
# Programa principal
while True:
mostraMenu()
op = selecionaMenu()
if op == '3':
break
elif op == '2':
cadastro('Nome: ', 'Idade: ')
elif op == '1':
imprime() |
21,354 | c1410046e7fb9114887791dbc9e1baad6d8f6390 | import random
random.seed()
#numer indexu panstwa musi byc taki sam jak numer indexu stolicy
panstwa = ['Niemcy', 'Białorus', 'Litwa', 'Estonia', 'USA', 'Chiny', 'Australia', 'Hiszpania', 'Portugalia', 'Austria',
"Bangladesz", "Belgia", "Bośnia i Hercegowina", "Bułgaria", "Chorwacja", "Czechy", "Dania", "Egipt",
"Finlandia", "Francja", "Grecja", "Holandia", "Irlandia", "Japonia", "Kanada", "Korea Południowa", "Kuba",
"Kuwejt", "Liban", "Łotwa", "Macedonia", "Meksyk", "Monako", "Norwegia", "Panama", "Peru", "Rumunia",
"Senegal", "Serbia", "Singapur", "Słowacja", "Syria", "Szwajcaria", "Szwecja", "Tajlandia", "Tunezja",
"Turcja", "Ukraina", "Watykan", "Węgry", "Wielka Brytania", "Włochy"]
stolice = ['Berlin', 'Minsk', 'Wilno', 'Tallinn', 'Waszyngton', 'Pekin', 'Canberra', 'Madryt', 'Lizbona', 'Wieden',
"Dhaka", "Bruksela", "Sarajewo", "Sofia", "Zagrzeb", "Praga", "Kopenhaga", "Kair", "Helsinki", "Paryz",
"Ateny", "Amsterdam", "Dublin", "Tokio", "Ottawa", "Seul", "Hawana", "Kuwejt", "Bejrut", "Ryga", "Skopje",
"Meksyk", "Monako", "Oslo", "Panama", "Lima", "Bukareszt", "Dakar", "Belgrad", "Singapur", "Bratyslawa",
"Damaszek", "Berno", "Sztokholm", "Bangkok", "Tunis", "Ankara", "Kijow", "Watykan", "Budapeszt", "Londyn",
"Rzym"]
iloscPunktow = 0
liczbaPytan = 10
def odmianaPunkt(iloscPunktow): # odmienia przez przypadki slowo punkt
odmienPunkt = "punkty"
if iloscPunktow < - 4 or iloscPunktow > 4 or iloscPunktow == 0:
odmienPunkt = "punktów"
if iloscPunktow == 1 or iloscPunktow == - 1:
odmienPunkt = "punkt"
if iloscPunktow == -2 or iloscPunktow == -3 or iloscPunktow == -4:
odmienPunkt = "punkty"
if iloscPunktow == 2 or iloscPunktow == 3 or iloscPunktow == 4:
odmienPunkt = "punkty"
return odmienPunkt
for numerPytania in range(1, liczbaPytan + 1): # dla liczby pytan od 1 do liczby pytan
print("Pytanie", numerPytania, "/", liczbaPytan)
index = random.randint(0, len(panstwa) - 1) # randomowy numer indexu z tablicy panstwa
iloscPierwszychLiter = 1 # bedzie pokazywac pierwsze litery stolicy
pierwszeLitery = stolice[index][:iloscPierwszychLiter]
for iloscPowtorzen in range(1, 4): # max liczba nie poprawnych odpowiedzi to 3
pierwszeLitery = stolice[index][:iloscPierwszychLiter]
guess = input("Podaj stolice państwa " + panstwa[
index] + ":" + "\n" + pierwszeLitery) # dokoncz nazwe stolicy wyswietlonego panstwa
if guess == stolice[index][iloscPierwszychLiter:]: # jesli zgadles nazwe stolice
iloscPunktow = iloscPunktow + 4 - iloscPowtorzen # max liczba punktow za 1 zadanie to 3 za pierwszym razem
print("Brawo masz", iloscPunktow,
odmianaPunkt(iloscPunktow)) # odmienia przez przypadki slowo punkt w zalerznosci od ilosci punktow
break
else: # jesli nie zgadles nazwy stolicy
iloscPunktow -= 1
iloscPierwszychLiter += 1
if iloscPowtorzen < 3:
print("jeszcze raz masz", iloscPunktow, odmianaPunkt(iloscPunktow))
if iloscPowtorzen == 3:
print("prawidlowa odpowiedz to", stolice[index], "masz", iloscPunktow, odmianaPunkt(iloscPunktow))
|
21,355 | 1449d259a4ebcc8dda24587b2943c8d496f45210 | #Solicitar números al usuario hasta que ingrese el cero. Por cada uno, mostrar la suma de sus dígitos.
# Al finalizar, mostrar la sumatoria de todos los números ingresados y la suma de sus dígitos.
# Reutilizar la misma función realizada en el ejercicio 2. este no es.
def sumaDigitos(numero):
suma=0
while numero !=0:
digito=numero%10
suma=suma+digito
numero=numero//10
return suma
sumatoria=0
num=int(input("Número a procesar,ingrese 0 para terminar: "))
while num!=0:
print("Suma:",sumaDigitos(num))
sumatoria+=num
num=int(input("Número a procesar,ingrese 0 para terminar: "))
print("Sumatoria de los numeros ingresados:", sumatoria)
print("Suma de los dígitos de la sumatoria:", sumaDigitos(sumatoria))
print('El proceso ha facilitado')
|
21,356 | 85f37248ed85488d41fa0e9b6dcd98e8064c9954 | # sastrim
# routines for trimming and processing scattering curves
import numpy as np
import copy as cp
def mask_data(data_to_mask, mask):
q = data_to_mask[:,0]
i = data_to_mask[:,1]
q_masked = np.extract(mask, q)
i_masked = np.extract(mask, i)
masked_data = np.column_stack((q_masked,i_masked))
return masked_data
def generate_mask(data_to_mask, mask_ranges):
q_mask = data_to_mask[:,0]
mask = cp.deepcopy(q_mask) # copy the q list because we will change it
for i in range(0,len(mask)):
for low, high in mask_ranges:
if q_mask[i] >= low and q_mask[i] <= high:
mask[i] = 0
break
else:
mask[i] = 1
return mask
|
21,357 | 6f91680445fc8e8389449176673c42dbd922f951 | # y = wx + b
from logging import WARNING
import tensorflow as tf
tf.set_random_seed(77)
# x_train = [1, 2, 3] # w=1, b=0
# y_train = [1, 2, 3]
x_train = tf.compat.v1.placeholder(tf.float32, shape=[None])
y_train = tf.compat.v1.placeholder(tf.float32, shape=[None])
# W = tf.Variable(1, dtype=tf.float32) # 랜덤하게 넣어준 초기값
# b = tf.Variable(1, dtype=tf.float32)
W = tf.Variable(tf.compat.v1.random_normal([1]), dtype=tf.float32)
b = tf.Variable(tf.compat.v1.random_normal([1]), dtype=tf.float32)
hypothesis = x_train * W + b # 모델 구현
# f(x) = wx + b
loss = tf.reduce_mean(tf.square(hypothesis - y_train)) # mse
optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.01)
train = optimizer.minimize(loss)
sess = tf.Session()
sess.run(tf.global_variables_initializer())
for step in range(2001):
# sess.run(train)
_, loss_val, W_val, b_val = sess.run([train, loss, W, b], feed_dict={x_train:[1, 2, 3], y_train:[3, 5, 7]})
if step % 20 == 0:
# print(step, sess.run(loss), sess.run(W), sess.run(b))
print(step, loss_val, W_val, b_val) #, W_val, b_val) |
21,358 | d149e8673cb56279d4acc8a9b332e5319c7d1d71 | from django import forms
from .models import Profile
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
class RegistrationForm(UserCreationForm):
email = forms.EmailField(required=True)
class Meta:
model = User
fields = [
'username',
'first_name',
'last_name',
'email',
'password1',
'password2'
]
def __init__(self, *args, **kwargs):
super(RegistrationForm, self).__init__(*args, **kwargs)
for fieldname in ['username', 'password1', 'password2']:
self.fields[fieldname].help_text = None
class LoginForm(forms.ModelForm):
class Meta:
model = User
fields = ['username', 'password']
widgets = {
'password': forms.PasswordInput(),
}
def __init__(self, *args, **kwargs):
super(LoginForm, self).__init__(*args, **kwargs)
self.fields['username'].help_text = None
class UserUpdateForm(forms.ModelForm):
class Meta:
model = User
fields = [
'username',
'first_name',
'last_name',
'email'
]
def __init__(self, *args, **kwargs):
super(UserUpdateForm, self).__init__(*args, **kwargs)
self.fields['username'].help_text = None
class ProfileUpdateForm(forms.ModelForm):
image = forms.ImageField(help_text='Please upload a profile picture', label = '')
class Meta:
model = Profile
fields = [
'city',
'country',
'image'
] |
21,359 | 913291f3c4b409a442135725352a830c80c64d0e | # Use the direct and indirect perl scripts provided in the GLOE-pipe pipline
# to correct orrientation or aligned reads. Different orrientation is due
# to GLOE-seq protocol
rule direct_mode:
input:
'output/{sample}/process_alignment/bed/sorted.trim.{region}.bed'
output:
sites=temp('output/{sample}/reorient_alignments/direct/trim.{region}.bed')
params:
index_dir='output/{sample}/direct'
shell:'''
module load perl
mkdir -p {params.index_dir}
perl scripts/direct_mode.pl {input} > {output.sites}
'''
rule indirect_mode:
input:
'output/{sample}/process_alignment/bed/sorted.trim.{region}.bed'
output:
sites=temp('output/{sample}/reorient_alignments/indirect/trim.{region}.bed')
params:
index_dir='output/{sample}/indirect'
shell:"""
module load perl
mkdir -p {params.index_dir}
perl scripts/indirect_mode.pl {input} > {output.sites}
"""
rule get_second_column:
input:
'output/{sample}/reorient_alignments/{mode}/trim.{region}.bed'
output:
temp('output/{sample}/reorient_alignments/{mode}/sorted.col2.trim.{region}.bed')
shell:"""
awk '($2 >= 0)' {input} > {output}
"""
rule perl_mode_big_awk:
input:
'output/{sample}/reorient_alignments/{mode}/sorted.col2.trim.{region}.bed'
output:
'output/{sample}/reorient_alignments/{mode}/bigawk.sorted.trim.{region}.bed'
shell:"""
awk '{{print $1 "\t" $2 "\t" $3 "\t" $4 "\t" 0 "\t" $6}}' {input} > {output}
"""
rule seperate_forward_strand:
input:
'output/{sample}/reorient_alignments/{mode}/bigawk.sorted.trim.{region}.bed'
output:
'output/{sample}/reorient_alignments/{mode}/fwd/seperated.{region}.bed'
shell:'''
grep "+" {input} > {output}
'''
rule seperate_reverse_strand:
input:
'output/{sample}/reorient_alignments/{mode}/bigawk.sorted.trim.{region}.bed'
output:
'output/{sample}/reorient_alignments/{mode}/rev/seperated.{region}.bed'
shell:'''
grep "-" {input} > {output}
''' |
21,360 | 918274bd258f268fe734e824d302d845e314f771 | print('Новые блюда')
dishes = set()
day_dishes = set()
for i in range(int(input('Количество блюд: '))):
dishes.add(input('Блюдо: '))
for i in range(int(input('Количество дней: '))):
name_of_recipe = ' '
for j in range(int(input('Количество блюд: '))):
day_dishes.add(input('Название блюда: '))
not_appearing_dishes = dishes - day_dishes
if len(not_appearing_dishes) > 0:
for i in not_appearing_dishes:
print('Блюдо, которое ниразу не готовилось:',i)
else:
print('Все блюда были приготовлены') |
21,361 | 4ec3edd5c01995ab9864c6330f81d4b499ec8e9d | # -*- coding:utf-8 -*-
"""
Created on Jan 6, 2012
@author: Oberron
"""
from distutils.core import setup
setup(
name = 'pyICSParser',
version = '0.6.1y4',
author = 'Oberron',
author_email = 'one.annum@gmail.com',
packages=['pyICSParser'],
scripts=['rsc/utest/u_icalendar.py','rsc/utest/test_vect.py','rsc/utest/u_dateutil_rrul.py'],
url = 'http://ical2list.appspot.com',
license = 'LICENSE.txt',
keywords = 'iCalendar ical ics parser validator generator events enumerator rfc5545 rfc2445 vcal',
description='iCalendar parser, validator, generator, events enumerator',
long_description='README.txt',
package_dir={'pyICSParser':".\src"},
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
)
|
21,362 | 89c88fd48b840b212e27e5be7d96737b8c6d4e03 | from pydantic.dataclasses import dataclass
@dataclass
class Foo:
foo: int
@dataclass(config={})
class Bar:
bar: str
|
21,363 | 4b39a5de102ee4b824b74ffe226df218b7340426 | import logging
from django.shortcuts import render
from .models import Page, Category, Pagelinks, Categorylinks
from .cache import cache
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
import time
logger = logging.getLogger("info")
error_logger = logging.getLogger("django_error")
def handle_page_query(request):
start_time = time.time()
query_text = request.POST['page']
logger.info("Running query: {}".format(query_text))
error_message = ""
desc = ('page_id','page_title', 'page_is_new', 'page_links_updated', 'page_len')
if cache.has_key(query_text):
logger.info('Data is fetched from cache')
page = request.GET.get('page', 1)
results, pages = cache.get(query_text+str(page))
else:
try:
logger.info('Data is fetched from database')
page_objs = Page.objects.raw(query_text)
paginator = Paginator(page_objs, 50) # Show 50 contacts per page
page = request.GET.get('page', 1)
print('page', page)
pages = paginator.get_page(page)
results = []
for page_obj in pages:
results.append(page_obj.content())
cache.put(query_text+str(page), (results, pages))
logger.info('Data stored in cache')
except Exception as e:
print(str(e))
error_logger.error(str(e))
error_message = str(e)
pages = []
desc = []
end_time = time.time()
return render(request, "querymanager/results.html", {
'results': results,
"page": pages,
'error_message': error_message,
'query': query_text,
'desc': desc,
'time_taken': end_time - start_time,
'name': 'page'
})
def handle_category_query(request):
start_time = time.time()
query_text = request.POST['category']
logger.info("Running query: {}".format(query_text))
desc = ('cat_id', 'cat_title')
error_message = ''
if cache.has_key(query_text):
page = request.GET.get('page', 1)
results, pages = cache.get(query_text + str(page))
else:
try:
cat_objs = Category.objects.raw(query_text)
paginator = Paginator(cat_objs, 50) # Show 50 contacts per page
page = request.GET.get('page', 1)
print('page', page)
pages = paginator.get_page(page)
results = []
for page_obj in pages:
results.append(page_obj.content())
cache.put(query_text + str(page), (results, pages))
except Exception as e:
error_logger.error(str(e))
error_message = str(e)
results = []
desc = []
end_time = time.time()
return render(request, "querymanager/results.html", {
'results': results,
"page": pages,
'error_message': error_message,
'query': query_text,
'desc': desc,
'time_taken': end_time - start_time,
'name': 'category'
})
def handle_pagelinks_query(request):
start_time = time.time()
query_text = request.POST['pl']
logger.info("Running query: {}".format(query_text))
desc = ('pl_from', 'pl_namespace', 'pl_title', 'pl_from_namespace')
error_message = ''
if cache.has_key(query_text):
page = request.GET.get('page', 1)
results, pages = cache.get(query_text + str(page))
else:
try:
pl_objs = Pagelinks.objects.raw(query_text)
paginator = Paginator(pl_objs, 50) # Show 50 contacts per page
page = request.GET.get('page', 1)
print('page', page)
pages = paginator.get_page(page)
results = []
for page_obj in pages:
results.append(page_obj.content())
cache.put(query_text + str(page), (results, pages))
except Exception as e:
error_logger.error(str(e))
error_message = str(e)
results = []
desc = []
end_time = time.time()
return render(request, "querymanager/results.html", {
'results': results,
"page": pages,
'error_message': error_message,
'query': query_text,
'desc': desc,
'time_taken': end_time - start_time,
'name': 'pl'
})
def handle_categorylinks_query(request):
start_time = time.time()
query_text = request.POST['cl']
logger.info("Running query: {}".format(query_text))
desc = ('cl_from', 'self.cl_to', 'self.cl_collation', 'self.cl_sortkey', 'self.cl_timestamp')
error_message = ''
if cache.has_key(query_text):
page = request.GET.get('page', 1)
results, pages = cache.get(query_text + str(page))
else:
try:
cl_objs = Categorylinks.objects.raw(query_text)
paginator = Paginator(cl_objs, 50) # Show 50 contacts per page
page = request.GET.get('page', 1)
print('page', page)
pages = paginator.get_page(page)
results = []
for page_obj in pages:
results.append(page_obj.content())
cache.put(query_text + str(page), (results, pages))
except Exception as e:
error_logger.error(str(e))
error_message = str(e)
results = []
desc = []
end_time = time.time()
return render(request, "querymanager/results.html", {
'results': results,
"page": pages,
'error_message': error_message,
'query': query_text,
'desc': desc,
'time_taken': end_time - start_time,
'name': 'cl'
}) |
21,364 | 491eb0e8ef9496058348bfd7668afad7aa5266da | # This class is a binary tree
# which support tree-related question in leetcode
class Tree:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
# print tree using level_order_traversal
def level_order_traversal(root):
ans = []
level = [root]
while root and level:
ans.append([i.val for i in level])
level = [leaf for node in level for leaf in (node.left, node.right) if leaf]
print ans
if __name__ == '__main__':
tree = Tree(1)
tree.left = Tree(2)
tree.right = Tree(3)
tree.left.left = Tree(4)
level_order_traversal(tree)
|
21,365 | 9fa2e249465b5d988e422ae538a5197c25edb864 | from dcase2020.datasets.machine_sound_dataset import all_devtrain_machines, MachineSoundDataset
from dcase2020.config import DATA_ROOT
from dcase2020.datasets.preprocessing import baseline_preprocessing
import numpy as np
import matplotlib.pyplot as plt
import os
preprocessing_params = {
"sr": 16000,
"n_fft": 1024,
"hop_length": 512,
"power": 2.0,
"n_mels": 128
}
n_datasets = sum([len(m_ids) for _, m_ids in all_devtrain_machines.items()])
ds_means = np.zeros((preprocessing_params['n_mels'], n_datasets))
ds_stds = np.zeros((preprocessing_params['n_mels'], n_datasets))
ds_lengths = []
ds_names = []
datasets = []
i = 0
for m_type in all_devtrain_machines.keys():
for m_id in all_devtrain_machines[m_type]:
ds = MachineSoundDataset(
DATA_ROOT, m_type, m_id,
preprocessing_fn=baseline_preprocessing,
preprocessing_name="baseline_preprocessing",
preprocessing_params=preprocessing_params,
use_input_as_target=False,
use_machine_id_as_target=False,
use_machine_type_as_target=False,
maximum_snippets="auto",
frames_per_snippet=8,
# return_meta=True,
norm_std_mean=True#,
# mean=mean, std=std
)
ds_means[:, i] = ds.mean
ds_stds[:, i] = ds.std
ds_names.append(m_type + "_" + m_id)
ds_lengths.append(len(ds.files))
datasets.append(ds)
i += 1
## heatmap
# plt.imshow(ds_means)
# plt.colorbar()
# plt.show()
# nr. of audios
x = range(len(ds_lengths))
plt.bar(x, ds_lengths)
plt.xticks(ticks=x, labels=ds_names, rotation=90)
plt.show()
# lines
plt.figure(figsize=(15, 10))
plt.plot(ds_means)
plt.legend(ds_names)
plt.savefig(os.path.expanduser("~/Repos/DCASE_2020/visualisations/dataset_means.png"))
plt.xlabel("Frequency bins")
plt.show() |
21,366 | a5e36cbac511acffaa5acc77e2c15c4e45ce935a | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from io import BytesIO
from pyiceberg.avro.codecs.codec import Codec
try:
from zstandard import ZstdCompressor, ZstdDecompressor
class ZStandardCodec(Codec):
@staticmethod
def compress(data: bytes) -> tuple[bytes, int]:
compressed_data = ZstdCompressor().compress(data)
return compressed_data, len(compressed_data)
@staticmethod
def decompress(data: bytes) -> bytes:
uncompressed = bytearray()
dctx = ZstdDecompressor()
with dctx.stream_reader(BytesIO(data)) as reader:
while True:
chunk = reader.read(16384)
if not chunk:
break
uncompressed.extend(chunk)
return uncompressed
except ImportError:
class ZStandardCodec(Codec): # type: ignore
@staticmethod
def compress(data: bytes) -> tuple[bytes, int]:
raise ImportError("Zstandard support not installed, please install using `pip install pyiceberg[zstandard]`")
@staticmethod
def decompress(data: bytes) -> bytes:
raise ImportError("Zstandard support not installed, please install using `pip install pyiceberg[zstandard]`")
|
21,367 | 059bc5e274c22c2ce6c05a26882c0c14b6bce872 | alien_0 = {'color': 'green', 'points': 5}
# printing singular
print(alien_0['color'])
print(alien_0['points'])
# printing key + value
for (key, value) in alien_0.items():
print("the alien's " + key + " is " + str(value))
# modifying
alien_0['color'] = 'yellow'
print(alien_0)
# adding new values
alien_1 = {}
alien_1['color'] = 'blue'
alien_1['points'] = 25
print(alien_1)
# removing
del alien_1['points']
print(alien_1)
# looping through keys or values
for key in alien_0.keys():
print(key)
for key in alien_0:
print(key)
for value in alien_0.values():
print(value)
# looping in order
for key in sorted(alien_0):
print(key)
# checking if not in
print('points' not in alien_0)
# array of
aliens = [alien_0, alien_1]
for alien in aliens:
print(alien)
alienDictionary = {'bob': {'color': 'red', 'points': 5}, 'tim': {'color': 'orange', 'points': 20}}
print(alienDictionary) |
21,368 | 1a2194d4961e4628bb683eb31e2cc1af0fc9ac8b | # Generated by Django 2.2 on 2021-05-25 10:44
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('communitymanager', '0003_auto_20210523_0112'),
]
operations = [
migrations.AlterField(
model_name='post',
name='date_evenement',
field=models.DateTimeField(blank=True, null=True, verbose_name="Date de l'evenement"),
),
migrations.AlterField(
model_name='post',
name='slug',
field=models.SlugField(max_length=100, null=True),
),
]
|
21,369 | c009d36af27aa3b0bbc53bafff5ab7111f79c9c3 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from collections import defaultdict
from datetime import datetime, date, time
import pytz
from odoo import api, fields, models, _
from odoo.exceptions import UserError
class HrPayslipEmployees(models.TransientModel):
_name = 'attendance.employees.payslip'
def _get_employees(self):
# YTI check dates too
return self.env['hr.employee'].search(self._get_available_contracts_domain())
employee_ids = fields.Many2many('hr.employee', 'hr_employee_group_rell', 'payslip_idd', 'employee_idd', 'Employeess',
default=lambda self: self._get_employees(), required=True)
def compute_sheet(self):
active_id = self.env.context.get('active_id')
attendance_sheet_run = self.env['hr.attendance.run'].browse(active_id)
for value in self.employee_ids:
contract = self.env['hr.contract'].search([('employee_id','=',value.id),('state','=','open')],limit=1)
policy=contract.att_policy_id.id
x=self.env['attendance.sheet'].create({
'employee_id':value.id,
'date_from':attendance_sheet_run.date_start,
'date_to':attendance_sheet_run.date_end,
'att_policy_id':policy,
'batattempid':attendance_sheet_run.id
})
x.onchange_employee()
x.get_attendances()
attendance_sheet_run.state="attendance_sheet"
class Batchatt(models.Model):
_inherit="attendance.sheet"
batattempid=fields.Many2one('hr.attendance.run')
|
21,370 | 5044a9e4ab2ea604c0fc44b5565961a64e6d952e | import datetime as dt
from marshmallow import Schema
from marshmallow import fields
from marshmallow import post_load
from api.db import Model
from api.db import fields
from api.schemas.organization import Organization
from guid import GUID
class User(Model):
name = fields.Str(required=True)
email = fields.Email(default='example@example.com')
joined = fields.DateTime(missing=dt.datetime.utcnow())
org = fields.Nested(Organization)
|
21,371 | ac8688fde40d305bd1a9fe3eb0d0c4e22860f42f | import tensorflow as tf
from keras.callbacks import TensorBoard
from src.neural_network_datasets import MNISTDataset
import matplotlib.pyplot as plt
from keras import losses, optimizers, metrics
from keras.models import Sequential
from keras.layers import Dense, Dropout, Flatten
from keras.layers import Conv2D, MaxPooling2D
from keras import backend as K
import numpy as np
from numpy.random import seed
seed(1)
from tensorflow import set_random_seed
set_random_seed(2)
input_shape = (784,)
num_classes = 10
n_epochs = 10
batch_size = 50
# Center
centered_data_dir = "R:/Research Common/EBL/Researcher/Mohsen/Class/Fall 2018/Neural Network/project02/data/centered_data/"
centered_mnist_dataset = MNISTDataset(dataset_dir= centered_data_dir)
centered_train_data, centered_train_labels = centered_mnist_dataset.load_train_dataset()
centered_test_data, centered_test_labels = centered_mnist_dataset.load_test_dataset()
centered_validation_data, centered_validation_labels = centered_mnist_dataset.load_validation_dataset()
one_hotted_centered_train_labels = centered_mnist_dataset.one_hot_encode_dataset(centered_train_labels)
one_hotted_centered_test_labels = centered_mnist_dataset.one_hot_encode_dataset(centered_test_labels)
one_hotted_centered_validation_labels = centered_mnist_dataset.one_hot_encode_dataset(centered_validation_labels)
# UN-Center
uncentered_data_dir = "R:/Research Common/EBL/Researcher/Mohsen/Class/Fall 2018/Neural Network/project02/data/uncentered_data/"
uncentered_mnist_dataset = MNISTDataset(dataset_dir= uncentered_data_dir)
uncentered_train_data, uncentered_train_labels = uncentered_mnist_dataset.load_train_dataset()
uncentered_test_data, uncentered_test_labels = uncentered_mnist_dataset.load_test_dataset()
uncentered_validation_data, uncentered_validation_labels = uncentered_mnist_dataset.load_validation_dataset()
uncentered_train_data = np.squeeze(uncentered_train_data)
uncentered_test_data = np.squeeze(uncentered_test_data)
uncentered_validation_data = np.squeeze(uncentered_validation_data)
one_hotted_uncentered_train_labels = uncentered_mnist_dataset.one_hot_encode_dataset(uncentered_train_labels)
one_hotted_uncentered_test_labels = uncentered_mnist_dataset.one_hot_encode_dataset(uncentered_test_labels)
one_hotted_uncentered_validation_labels = uncentered_mnist_dataset.one_hot_encode_dataset(uncentered_validation_labels)
# combine Center-UnCenter
centered_uncentered_train_data = np.vstack((centered_train_data, uncentered_train_data))
centered_uncentered_test_data = np.vstack((centered_test_data, uncentered_test_data))
centered_uncentered_validation_data = np.vstack((centered_validation_data, uncentered_validation_data))
one_hotted_centered_uncentered_train_labels = np.vstack((one_hotted_centered_train_labels, one_hotted_uncentered_train_labels))
one_hotted_centered_uncentered_test_labels = np.vstack((one_hotted_centered_test_labels, one_hotted_uncentered_test_labels))
one_hotted_centered_uncentered_validation_labels = np.vstack((one_hotted_centered_validation_labels, one_hotted_uncentered_validation_labels))
shuffle_index_train = np.arange(centered_uncentered_train_data.shape[0])
shuffle_index_test = np.arange(centered_uncentered_test_data.shape[0])
shuffle_index_validation = np.arange(centered_uncentered_validation_data.shape[0])
np.random.shuffle(shuffle_index_train)
np.random.shuffle(shuffle_index_test)
np.random.shuffle(shuffle_index_validation)
centered_uncentered_train_data = centered_uncentered_train_data[shuffle_index_train]
centered_uncentered_test_data = centered_uncentered_test_data[shuffle_index_test]
centered_uncentered_validation_data = centered_uncentered_validation_data[shuffle_index_validation]
one_hotted_centered_uncentered_train_labels = one_hotted_centered_uncentered_train_labels[shuffle_index_train]
one_hotted_centered_uncentered_test_labels = one_hotted_centered_uncentered_test_labels[shuffle_index_test]
one_hotted_centered_uncentered_validation_labels = one_hotted_centered_uncentered_validation_labels[shuffle_index_validation]
# Position and Size Invariance
psi_data_dir = "R:/Research Common/EBL/Researcher/Mohsen/Class/Fall 2018/Neural Network/project02/data/position_and_size_invariance/"
psi_mnist_dataset = MNISTDataset(dataset_dir= psi_data_dir)
psi_train_data, psi_train_labels = psi_mnist_dataset.load_train_dataset()
psi_test_data, psi_test_labels = psi_mnist_dataset.load_test_dataset()
# psi_validation_data, psi_validation_labels = psi_mnist_dataset.load_validation_dataset()
psi_train_data = np.squeeze(psi_train_data)
psi_test_data = np.squeeze(psi_test_data)
# psi_validation_data = np.squeeze(psi_validation_data)
one_hotted_psi_train_labels = psi_mnist_dataset.one_hot_encode_dataset(psi_train_labels)
one_hotted_psi_test_labels = psi_mnist_dataset.one_hot_encode_dataset(psi_test_labels)
# one_hotted_psi_validation_labels = psi_mnist_dataset.one_hot_encode_dataset(psi_validation_labels)
############################################
# main model
model = Sequential()
model.add(Dense(100, activation='relu', input_shape=input_shape))
model.add(Dense(10, activation='softmax'))
model.compile(loss=losses.categorical_crossentropy,
optimizer=optimizers.Adadelta(),
metrics=[metrics.categorical_accuracy, metrics.mean_squared_error])
# Center model
tensorboard = TensorBoard(log_dir="logs_three_layer_centered")
model.fit(centered_train_data, one_hotted_centered_train_labels,
batch_size=batch_size,
epochs=n_epochs,
verbose=1,
validation_data=(centered_validation_data, one_hotted_centered_validation_labels),
callbacks=[tensorboard])
score = model.evaluate(centered_test_data, one_hotted_centered_test_labels, verbose=0)
print('loss:', score[0], 'categorical_accuracy:', score[1])
# tensorboard --logdir=foo:C:\Users\Mohsen.SharifiRenani\Desktop\pycharmprojects\neuralnetwork3\logs_three_layer_centered
# Un Centered Model
tensorboard = TensorBoard(log_dir="logs_three_layer_uncentered")
model.fit(centered_train_data, one_hotted_centered_train_labels,
batch_size=batch_size,
epochs=n_epochs,
verbose=1,
validation_data=(uncentered_validation_data, one_hotted_uncentered_validation_labels),
callbacks=[tensorboard])
score = model.evaluate(uncentered_test_data, one_hotted_uncentered_test_labels, verbose=0)
print('loss:', score[0], 'categorical_accuracy:', score[1])
# tensorboard --logdir=foo:C:\Users\Mohsen.SharifiRenani\Desktop\pycharmprojects\neuralnetwork3\logs_three_layer_uncentered
# Centered_Un Centered Model
tensorboard = TensorBoard(log_dir="logs_three_layer_center_uncentered")
model.fit(centered_uncentered_train_data, one_hotted_centered_uncentered_train_labels,
batch_size=batch_size,
epochs=n_epochs,
verbose=1,
validation_data=(centered_uncentered_validation_data, one_hotted_centered_uncentered_validation_labels),
callbacks=[tensorboard])
score = model.evaluate(centered_uncentered_test_data, one_hotted_centered_uncentered_test_labels, verbose=0)
print('loss:', score[0], 'categorical_accuracy:', score[1])
# tensorboard --logdir=foo:C:\Users\Mohsen.SharifiRenani\Desktop\pycharmprojects\neuralnetwork3\logs_three_layer_center_uncentered
|
21,372 | ff847a7ba93e935ee3edcfd633b877f07d0346ed | #!/usr/bin/python
# encoding: utf-8
import re
def getline(file):
with open(file, 'r') as f:
lastline = ''
for nextline in f.readlines()[1:]:
nextline = cleanline(nextline)
if nextline == '':
continue
if nextline[0] == '+':
lastline += ' ' + nextline[1:]
lastline = cleanline(lastline)
elif lastline[-2:] == '\\\\':
lastline = lastline[:-2] + nextline
lastline = cleanline(lastline)
else:
tmp, lastline = lastline, nextline
if tmp == '' or tmp[:4] == '.sub' or tmp[:5] == '.ends' or tmp[:7] == '.global' or tmp[0] != '.':
yield tmp
elif tmp == '.end' or tmp[:6] == '.alter' :
yield tmp
return
else:
yield ''
# remove the comments and un-necessary space or tabs
def cleanline(line, raw = False):
line = re.sub(r'\t', ' ', line).strip()
line = re.sub(r'\^M$', '', line)
line = re.sub(r'^[\*\$].*$', '', line)
line = re.sub(r'\ \$.*$', '', line)
line = re.sub(r'=', ' = ', line)
line = re.sub(r'\ +', ' ', line).strip()
if not raw:
line = line.lower()
return line
def gettokenlist(line):
tokens = line.split(' ')
return_list = []
for i in range(len(tokens)):
if tokens[i] == '=':
return_list.pop()
break
return_list.append(tokens[i])
return return_list
|
21,373 | 9d99d894ca83c997254a63f2e8b871e796d4ec66 | import numpy as np
import scipy.sparse as sp
from sklearn.base import BaseEstimator
from sklearn.base import ClassifierMixin
from sklearn.base import RegressorMixin
from sklearn import cross_validation
from sklearn.utils import check_array, as_float_array
from pyfm_fast import FM_fast, CSRDataset
LEARNING_RATE_TYPES = {"optimal": 0, "invscaling": 1, "constant": 2}
TASKS = {"regression": 0, "classification" : 1}
class BaseFM(BaseEstimator):
"""Factorization machine fitted by minimizing a regularized empirical loss with adaptive SGD.
Parameters
----------
num_factors : int
The dimensionality of the factorized 2-way interactions
num_iter : int
Number of iterations
k0 : bool
Use bias. Defaults to true.
k1 : bool
Use 1-way interactions (learn feature weights).
Defaults to true.
init_stdev : double, optional
Standard deviation for initialization of 2-way factors.
Defaults to 0.01.
validation_size : double, optional
Proportion of the training set to use for validation.
Defaults to 0.01.
learning_rate_schedule : string, optional
The learning rate:
constant: eta = eta0
optimal: eta = 1.0/(t+t0) [default]
invscaling: eta = eta0 / pow(t, power_t)
eta0 : double
Defaults to 0.01
power_t : double
The exponent for inverse scaling learning rate [default 0.5].
t0 : double
Constant in the denominator for optimal learning rate schedule.
Defaults to 0.001.
task : string
regression: Labels are real values.
classification: Labels are either positive or negative.
verbose : bool
Whether or not to print current iteration, training error
shuffle_training: bool
Whether or not to shuffle training dataset before learning
seed : int
The seed of the pseudo random number generator
"""
def __init__(self,
num_factors=10,
num_iter=1,
k0=True,
k1=True,
init_stdev=0.1,
validation_size=0.01,
learning_rate_schedule="optimal",
eta0=0.01,
power_t=0.5,
t0=0.001,
task='classification',
verbose=True,
shuffle_training=True,
seed = 28):
self.num_factors = num_factors
self.num_iter = num_iter
self.sum = np.zeros(self.num_factors)
self.sum_sqr = np.zeros(self.num_factors)
self.k0 = k0
self.k1 = k1
self.init_stdev = init_stdev
self.validation_size = validation_size
self.task = task
self.shuffle_training = shuffle_training
self.seed = seed
# Learning rate Parameters
self.learning_rate_schedule = learning_rate_schedule
self.eta0 = eta0
self.power_t = power_t
self.t0 = t0
self.verbose = verbose
def _validate_params(self):
"""Validate input params. """
if not isinstance(self.shuffle_training, bool):
raise ValueError("shuffle must be either True or False")
if self.num_iter <= 0:
raise ValueError("num_iter must be > zero")
if self.learning_rate_schedule in ("constant", "invscaling"):
if self.eta0 <= 0.0:
raise ValueError("eta0 must be > 0")
self.num_factors = int(self.num_factors)
self.num_iter = int(self.num_iter)
self.t0 = float(self.t0)
self.power_t = float(self.power_t)
try:
self.eta0 = float(self.eta0)
except TypeError:
raise ValueError('eta0 expected float but got %r' % self.eta0)
def _get_learning_rate_type(self, learning_rate):
"""Map learning rate string to int for cython"""
try:
return LEARNING_RATE_TYPES[learning_rate]
except KeyError:
raise ValueError("learning rate %s "
"is not supported. " % learning_rate)
def _get_task(self, task):
"""Map task string to int for cython"""
try:
return TASKS[task]
except KeyError:
raise ValueError("task %s "
"is not supported. " % task)
def _bool_to_int(self, bool_arg):
"""Map bool to int for cython"""
if bool_arg == True:
return 1
else:
return 0
def _prepare_y(self,y):
"""Maps labels to [-1, 1] space"""
y_i = np.ones(y.shape, dtype=np.float64, order="C")
y_i[y != 1] = -1.0
return y_i
def fit(self, X, y):
"""Fit factorization machine using Stochastic Gradient Descent with Adaptive Regularization.
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
Training data
y : numpy array of shape [n_samples]
Target values
Returns
-------
self : returns an instance of self.
"""
# X = as_float_array(X)
# X, y = check_array(X, dtype=np.float64)
if not isinstance(X, sp.csr_matrix):
X = sp.csr_matrix(X)
self._validate_params()
self.t_ = 1.0
self.max_target_ = y.max()
self.min_target_ = y.min()
# convert member variables to ints for use in cython
k0 = self._bool_to_int(self.k0)
k1 = self._bool_to_int(self.k1)
shuffle_training = self._bool_to_int(self.shuffle_training)
verbose = self._bool_to_int(self.verbose)
learning_rate_schedule = self._get_learning_rate_type(self.learning_rate_schedule)
task = self._get_task(self.task)
# use sklearn to create a validation dataset for lambda updates
if self.verbose:
print("Creating validation dataset of %.2f of training for adaptive regularization"
% self.validation_size)
X_train, validation, train_labels, validation_labels = cross_validation.train_test_split(
X, y, test_size=self.validation_size, random_state=self.seed)
self.n_features_ = X_train.shape[1]
# Convert datasets to sklearn sequential datasets for fast traversal
X_train_dataset = _make_dataset(X_train, train_labels)
validation_dataset = _make_dataset(validation, validation_labels)
# Set up params
self.w0 = 0.0
self.w = np.zeros(self.n_features_, dtype=np.float64)
rng = np.random.RandomState(self.seed)
self.v = rng.normal(scale=self.init_stdev,
size=(self.num_factors, self.n_features_)).astype(np.float64)
self.fm_fast = FM_fast(self.w,
self.v,
self.num_factors,
self.n_features_,
self.num_iter,
k0,
k1,
self.w0,
self.t_,
self.t0,
self.power_t,
self.min_target_,
self.max_target_,
self.eta0,
learning_rate_schedule,
shuffle_training,
task,
self.seed,
verbose)
self.fm_fast.fit(X_train_dataset, validation_dataset)
return self
def predict(self, X):
"""Predict using the factorization machine
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Returns
-------
float if X is one instance
array, shape = [n_samples] if X is sparse matrix
Predicted target values per element in X.
"""
X, = check_arrays(X, sparse_format='csr', dtype=np.float64)
if not sp.issparse(X):
X = sp.csr_matrix(X)
sparse_X = _make_dataset(X, np.ones(X.shape[0]))
return self.fm_fast._predict(sparse_X)
class FMClassifier(BaseFM, ClassifierMixin):
"""Factorization machine fitted by minimizing a regularized empirical loss with adaptive SGD.
Parameters
----------
num_factors : int
The dimensionality of the factorized 2-way interactions
num_iter : int
Number of iterations
k0 : bool
Use bias. Defaults to true.
k1 : bool
Use 1-way interactions (learn feature weights).
Defaults to true.
init_stdev : double, optional
Standard deviation for initialization of 2-way factors.
Defaults to 0.01.
validation_size : double, optional
Proportion of the training set to use for validation.
Defaults to 0.01.
learning_rate_schedule : string, optional
The learning rate:
constant: eta = eta0
optimal: eta = 1.0/(t+t0) [default]
invscaling: eta = eta0 / pow(t, power_t)
eta0 : double
Defaults to 0.01
power_t : double
The exponent for inverse scaling learning rate [default 0.5].
t0 : double
Constant in the denominator for optimal learning rate schedule.
Defaults to 0.001.
verbose : bool
Whether or not to print current iteration, training error
shuffle_training: bool
Whether or not to shuffle training dataset before learning
seed : int
The seed of the pseudo random number generator
"""
def __init__(self,
num_factors=10,
num_iter=1,
k0=True,
k1=True,
init_stdev=0.1,
validation_size=0.01,
learning_rate_schedule="optimal",
eta0=0.01,
power_t=0.5,
t0=0.001,
task='classification',
verbose=True,
shuffle_training=True,
seed = 28):
super(FMClassifier, self).__init__(num_factors=num_factors,
num_iter=num_iter,
k0=k0, k1=k1,
init_stdev=init_stdev,
validation_size=validation_size,
learning_rate_schedule=learning_rate_schedule,
eta0=eta0,
power_t=power_t,
t0=t0,
task='classification',
verbose=verbose,
shuffle_training=shuffle_training,
seed=seed)
def fit(self, X, y):
self.classes_, y = np.unique(y, return_inverse=True)
if len(self.classes_) != 2:
raise ValueError('FMClassifier only supports binary classification')
y = (y * 2) - 1
super(FMClassifier, self).fit(X, y)
return self
def predict(self, X):
scores = super(FMClassifier, self).predict(X)
pred = self.classes_[(scores >= 0.5).astype(np.int)]
return pred
def predict_proba(self, X):
"""Predict probabilities using the factorization machine
Parameters
----------
X : sparse matrix, shape = [n_samples, n_features]
or
X : single instance [1, n_features]
Returns
-------
float if X is one instance
array, shape = [n_samples] if X is sparse matrix
Predicted target values per element in X.
"""
proba = np.ones((X.shape[0], 2), dtype=np.float64)
proba[:, 1] = super(FMClassifier, self).predict(X)
proba[:, 0] -= proba[:, 1]
return proba
class FMRegressor(BaseFM, RegressorMixin):
"""Factorization machine fitted by minimizing a regularized empirical loss with adaptive SGD.
Parameters
----------
num_factors : int
The dimensionality of the factorized 2-way interactions
num_iter : int
Number of iterations
k0 : bool
Use bias. Defaults to true.
k1 : bool
Use 1-way interactions (learn feature weights).
Defaults to true.
init_stdev : double, optional
Standard deviation for initialization of 2-way factors.
Defaults to 0.01.
validation_size : double, optional
Proportion of the training set to use for validation.
Defaults to 0.01.
learning_rate_schedule : string, optional
The learning rate:
constant: eta = eta0
optimal: eta = 1.0/(t+t0) [default]
invscaling: eta = eta0 / pow(t, power_t)
eta0 : double
Defaults to 0.01
power_t : double
The exponent for inverse scaling learning rate [default 0.5].
t0 : double
Constant in the denominator for optimal learning rate schedule.
Defaults to 0.001.
verbose : bool
Whether or not to print current iteration, training error
shuffle_training: bool
Whether or not to shuffle training dataset before learning
seed : int
The seed of the pseudo random number generator
"""
def __init__(self,
num_factors=10,
num_iter=1,
k0=True,
k1=True,
init_stdev=0.1,
validation_size=0.01,
learning_rate_schedule="optimal",
eta0=0.01,
power_t=0.5,
t0=0.001,
verbose=True,
shuffle_training=True,
seed = 28):
super(FMRegressor, self).__init__(
num_factors=num_factors,
num_iter=num_iter,
k0=k0, k1=k1,
init_stdev=init_stdev,
validation_size=validation_size,
learning_rate_schedule=learning_rate_schedule,
eta0=eta0,
power_t=power_t,
t0=t0,
task='regression',
verbose=verbose,
shuffle_training=shuffle_training,
seed=seed)
def _make_dataset(X, y_i):
"""Create ``Dataset`` abstraction for sparse and dense inputs."""
assert isinstance(X, sp.csr_matrix)
assert X.dtype == np.float64
# ignore sample weight for the moment
sample_weight = np.ones(X.shape[0], dtype=np.float64, order='C')
dataset = CSRDataset(X.data, X.indptr, X.indices, y_i, sample_weight)
return dataset
|
21,374 | a575dd7a384332c96de909a08fb7348302cb2ada | # Generated by Django 3.1.6 on 2021-03-09 17:58
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('dashboard', '0004_auto_20210306_1743'),
('dashboard', '0022_auto_20210308_2144'),
]
operations = [
]
|
21,375 | ddf3f5fd86178e2f3014ca949ea444e0bc2076dc | from . import action
from . import attachment
from . import block
from . import response
__all__ = [
"action",
"attachment",
"block",
"response",
]
|
21,376 | 950b8d8c51358e9ea135d88de196caff008ca63d | from django.contrib import admin
from .models import Major,Student
class MajorAdmin(admin.ModelAdmin):
list_display=['pk','major','num_of_women','num_of_men','isDelete','school']
list_filter=['major']
search_fields=['major']
list_per_page=7
fieldsets=[
("base",{"fields":['major','isDelete',]}),
("num",{"fields":['num_of_women','num_of_men']})
]
admin.site.register(Major,MajorAdmin)
class StudentsAdmin(admin.ModelAdmin):
def gender(self):
if self.gender:
return"男"
else:
return"女"
list_display=['sid','student_name','major',gender,'school','isDelete']
list_filter=['student_name']
search_fields=['student_name']
list_per_page=6
admin.site.register(Student,StudentsAdmin)
|
21,377 | 050c93c94e4e2195c98ce7c601a22585a18dff78 | import os
import keras
import numpy as np
from sklearn import preprocessing
from sklearn.model_selection import train_test_split
from lang_config import *
def count_chars(text):
"""
Counts number of chars in text based on given alphabet.
"""
alphabet_counts = [0] * len(alphabet)
for char in text:
index = char_index.get(char, -1)
if index >= 0:
alphabet_counts[index] += 1
return alphabet_counts
def get_sample(file_content, start_index, sample_size):
"""
Returns a sample of text from `file_content` of length no more than `sample_size`,
starting at `start_index` and preserving full words.
"""
# we want to start from full first word
# if the first character is not space, move to next ones
while not file_content[start_index].isspace():
start_index += 1
# now we look for first non-space character - beginning of any word
while file_content[start_index].isspace():
start_index += 1
end_index = min(len(file_content) - 1, start_index + sample_size)
# we also want full words at the end
while not file_content[end_index].isspace():
end_index -= 1
return file_content[start_index:end_index]
def build_input_vector(sample_text):
"""
Creates an input vector for the NN from the provided sample.
Currently it is the vector of letter counts.
"""
return count_chars(sample_text.lower())
vector_size = len(build_input_vector(""))
def create_sample_vectors(cleaned_data_directory, out_vectors_path):
"""
Generates input vectors for the NN.
"""
vectors = []
for filename in os.listdir(cleaned_data_directory):
if not filename.endswith(".txt"):
continue
path = os.path.join(cleaned_data_directory, filename)
f = open(path, mode='r', encoding='utf8')
print("Processing", path)
lang = filename[:2]
lang_number = language_codes.index(lang)
print(f"\tLanguage: {lang} ({lang_number})")
print("\tReading...", end=' ')
file_content = f.read()
content_length = len(file_content)
print("done.")
print("\tExtracting vectors...", end=' ')
sample_start_index = 0
count = 0
while sample_start_index + text_sample_size < content_length:
sample = get_sample(file_content, sample_start_index, text_sample_size)
input_vector = build_input_vector(sample)
vector = input_vector + [lang_number]
vectors.append(vector)
sample_start_index += text_sample_size
count += 1
print("done.")
print(f"\tExtracted {count} vectors.")
del file_content
print(f"Total {len(vectors)} vectors.")
np_vectors = np.array(vectors, dtype=np.uint16)
np.random.shuffle(np_vectors)
print(f"Converted to NumPy array, shape: {np_vectors.shape}.")
np.savez_compressed(out_vectors_path, data=np_vectors)
print(f"Saved to {out_vectors_path}.")
def size_kb(path):
"""
Returns file size in KB.
"""
size = os.path.getsize(path)
return '{:.2f}'.format(size / 1000.0)
def gen_train_test(vectors_path, out_train_test_path):
"""
Generates train/test data with preprocessing.
"""
data = np.load(vectors_path)['data']
x = data[:, 0:vector_size]
y = data[:, vector_size]
del data
# x preprocessing
standard_scaler = preprocessing.StandardScaler().fit(x)
x = standard_scaler.transform(x)
# Convert y to binary class matrix (for categorical_crossentropy)
y = keras.utils.to_categorical(y, num_classes=len(language_codes))
# Static seed to have comparable results for different runs
seed = 42
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.20, random_state=seed)
del x, y
np.savez_compressed(out_train_test_path, X_train=x_train, y_train=y_train, X_test=x_test, y_test=y_test)
print(f"Saved train/test data to {out_train_test_path}, size: {size_kb(out_train_test_path)} KB.")
del x_train, y_train, x_test, y_test
|
21,378 | d387cc36636248f63a49a287a7902817cd415c0b | import configparser
import json
import requests
from twilio.rest import Client
# pip3 install -r requirements.txt
class TwilioHelper(object):
def __init__(self):
#read config from twilio.config file
self.config = configparser.RawConfigParser()
self.config.read('twilio.config')
self.tw_sid = self.config.get('twilio_sms','sid')
self.tw_token = self.config.get('twilio_sms','auth')
self.tw_from_ = self.config.get('twilio_sms','from')
self.phone_validate_api_key = self.config.get('abstract_api','key')
self.phone_validate_api_url = self.config.get('abstract_api','api_url')
self._client = Client(self.tw_sid, self.tw_token)
def sendsms(self, send_to, send_text):
"""Send SMS"""
self.to = send_to
self.valid = self.validatephone(send_to)
if self.valid:
self.body = send_text
self.message = self._client.messages.create(
to=self.to,
from_=self.tw_from_,
body=self.body
)
self.sid = self.message.sid
self.message_status = self._client.messages(self.sid).fetch().status
while (self.message_status == 'sent'):
self.message_status = self._client.messages(self.sid).fetch().status
else:
self.message_status = self._client.messages(self.sid).fetch().status
return self.message_status
else:
self.message_status = 'Destination Phone Number '
self.message_status += send_to
self.message_status += ' is invalid. Please Try again'
return self.message_status
def validatephone(self, send_to):
"""Validate Phone Number
Using Abstract Phone Validation API
https://app.abstractapi.com/api/phone-validation
"""
self.send_to = send_to.lstrip('+')
self.params = {
'api_key': self.phone_validate_api_key,
'phone' : self.send_to
}
response = requests.get(self.phone_validate_api_url,params=self.params)
decoded_content = json.loads(response.content.decode('UTF-8'))
is_phonenumber_valid = decoded_content['valid']
return is_phonenumber_valid
|
21,379 | 4914bb01bb6d7f6e6f939e5c216eed73c9263751 | from django.urls import include, path
from . import views
urlpatterns = [
# path('', views.EntryListView.as_view(), name='entry_changelist'),
# path('add/', views.EntryCreateView.as_view(), name='entry_add'),
path('add2/',views.add2, name='add2'),
path('new_entry/', views.new_entry, name = 'new_entry'),
# path('<int:pk>/', views.EntryUpdateView.as_view(), name='entry_change'),
path('ajax/load-categories/', views.load_categories, name='ajax_load_categories'),
path('ajax/load-types/', views.load_types, name='ajax_load_types'),
path('ajax/load-payments/', views.load_payments, name='ajax_load_payments'),
path('',views.full_list, name='view_all'),
path('view/<int:year>/<int:month>', views.view_month, name ='view_month'),
path('savings/', views.savings_graph, name='savings'),
path('<int:entry_id>/del_entry', views.del_entry, name='del_entry'),
path('transactions',views.show_transactions, name='transactions')
]
|
21,380 | e375cf92ca5bf4312f44894aa129ef345dcd85fa | # -*- coding: utf-8 -*-
"""
初めてのpythonレッスン7
数値と文字列の型変換
文字列 → 数値 int, float
数値 → 文字列 str
Created on Tue Jun 7 14:59:04 2016
@author: B5_2
"""
print (5+int("5"))
print (5+float("5"))
age = 20
print("i am " + str(age) + " years old!") |
21,381 | eb7a66e33823679af5ec0934c64e4707223e53d9 | class DobleNodo:
def __init__(self, value, siguiente = None, anterior = None):
self.data = value
self.next = siguiente
self.prev = anterior
class DoubleLinkedList:
def __init__(self):
self.__head = DobleNodo(None, None, None)
self.__tail = DobleNodo(None, None, None)
self.__head.next = self.__tail
self.__tail.prev = self.__head
self.__size = 0
print(f'head: {self.__head}')
print(f'tail: {self.__tail}')
def get_size(self):
return self.__size
def is_empty(self):
return self.__head.data == None and self.__tail.data == None
#agrega un nodo al final
def append(self, value):
self.__size+=1
new_node = DobleNodo(value)
if self.is_empty():
self.__head = new_node
self.__tail = self.__head
else:
curr_node = self.__head
while curr_node.next != None:
curr_node = curr_node.next
new = DobleNodo(value = value, anterior = curr_node)
curr_node.next = new
self.__tail = new
def transversal(self):
curr_node = self.__head
while curr_node != None:
print(f"{curr_node.data} -->", end = "")
curr_node = curr_node.next
print(" ")
def reverse_transversal(self):
curr_node = self.__tail
while curr_node != None:
print(f"{curr_node.data} -->", end = "")
curr_node = curr_node.prev
print(" ")
def remove_from_head(self, value): #value es el valor que queremos eliminar #remove, elimina la primer coincidencia del dato solicitado
if self.__head.data == value: #si la cabeza, es decir, el primer dato, es igual al valor buscado...
self.__head = self.__head.siguiente #apuntamos head al siguiente dato
self.__head.next = None
else:
curr_node = self.__head #creamos la variable anterior (al dato que buscamos)
while curr_node.data != value and curr_node.next != None: #que no es el dato que buscamos ni el último
curr_node = curr_node.next #SI se cumplen ambas condiciones va al siguiente valor para encontrar el dato buscado
if curr_node.data == value: #si curr_node es igual al valor que queremos
curr_node.prev.next = curr_node
curr_node.next.prev = curr_node.prev
else:
self.__size += 1
print('El dato no existe en la lista')
self.__size -=1
def remove_from_tail(self, value): #value es el valor que queremos eliminar #remove, elimina la primer coincidencia del dato solicitado
if self.__tail.data == value: #si la cola, es decir, el último dato, es igual al valor buscado...
self.__tail = self.__tail.prev #apuntamos tail al datoanterior
self.__tail.next = None
else:
curr_node = self.__tail
while curr_node.data != value and curr_node.prev != None: #que no es el dato que buscamos ni el último
curr_node = curr_node.prev #SI se cumplen ambas condiciones va al valor anterior para encontrar el dato buscado
if curr_node.data == value: #si curr_node es igual al valor que queremos
curr_node.prev.next = curr_node.next
curr_node.next.prev = curr_node.prev
else:
self.__size += 1
print('El dato no existe en la lista')
self.__size -=1
def find_from_tail(self, value):
curr_node = self.__head #Ponemos apuntador a la cabeza
find = None
try:
while curr_node.data != value: #Mientras el apuntador sea diferente al valor que queremos
curr_node = curr_node.next #Apuntamos al siguiente nodo
if curr_node.data == value: #Si el apuntador encontró el nodo buscado
find = curr_node.data #encontramos el dato
except: #Lanzamos excepción
print("El valor no se encuentra en la lista")
return find
def find_from_head(self, value):
curr_node = self.__head #Ponemos apuntador a la cabeza
find = None
try:
while curr_node.data != value: #Mientras el apuntador sea diferente al valor que queremos
curr_node = curr_node.next #Apuntamos al siguiente nodo
if curr_node.data == value: #Si el apuntador encontró el nodo buscado
find = curr_node.data #encontramos el dato
except:
print("El valor no se encuentra en la lista")
return find
# Encuentra posición desde tail
def find_fromtail(self , value):
curr_node = self.__tail
posicion = 0
try:
while value != curr_node.data:
curr_node = curr_node.prev
posicion +=1
if value == curr_node.data:
curr_node = curr_node
print(f'Desde tail... La posición del dato {curr_node.data} es: ')
except:
print('No se pudo encontrar el dato')
return
return posicion
#Encuentra posición desde head
def find_fromhead(self , value):
curr_node = self.__head
posicion = 0
try:
while value != curr_node.data:
curr_node = curr_node.next
posicion +=1
if value == curr_node.data:
curr_node = curr_node
print(f'Desde head... La posición del dato {curr_node.data} es: ')
except:
print(f'No se pudo encontrar el dato en la lista')
return
return posicion
|
21,382 | 25641eb1ee3a002589c4cd45c9ce0da3a67d25bb | import ast
import matplotlib.pyplot as plt
import numpy
def plot_threshold(data):
"""
Plots a graph.
:param data: Data is a list of dictionaries with keys id and count.
:return: None
"""
ids = []
count = []
for data_obj in data:
ids.append(data_obj['id'])
count.append(data_obj['count'])
line, = plt.plot(ids, count, label='Threshold')
plt.ticklabel_format(useOffset=False)
plt.legend(handles=[line], loc=2)
plt.xlabel('Number of matches')
plt.ylabel('Count of matches')
stats = 'Minimum: ' + str(numpy.min(count)) \
+ '\n' + 'Maximum: ' + str(int(numpy.max(count))) \
+ '\n' + 'Average: ' + str(int(numpy.average(count)))
plt.annotate(stats, xy=(1, 1), xycoords='axes fraction', fontsize=16, xytext=(-5, -5), textcoords='offset points',
ha='right', va='top')
# Logarithmic scale
plt.yscale('log')
plt.show()
# Read file and assign
fname = './threshold_volkskrant_output/result.dat'
with open(fname) as f:
content = f.readlines()
# Raw data
threshold_raw = content
# List of objects
threshold_list = []
for obj in threshold_raw:
# Parse objects to tuples
threshold_list.append(ast.literal_eval(obj))
idx = 0
for obj in threshold_list:
# Parse tuples to dictionaries
new_dict = {'id': int(obj[0]), 'count': obj[1]}
threshold_list[idx] = new_dict
idx += 1
# Sort by id
sorted_thresholds = sorted(threshold_list, key=lambda k: k['id'])
plot_threshold(sorted_thresholds)
|
21,383 | d93ad1ca1922ab64123620fccaf8ab66a64354e5 | # -- coding: utf-8 --
import ast
import numpy as np
name_dic = {'T12-L1': 0, 'L1-L2': 1, 'L2-L3': 2, 'L3-L4': 3,
'L4-L5': 4, 'L5-S1': 5, 'L1': 6, 'L2': 7, 'L3': 8, 'L4': 9, 'L5': 10}
genre_dic = {'v1': 0, 'v2': 1, 'v3': 2, 'v4': 3, 'v5': 4} # 用于2,5分类标签
def load_all(path):
# 加载所有信息,以x,y,genre_disc,genre_ver方式(numpy矩阵类型)返回
# 其中genre_disc[0]代表T12-L1的类�?genre_ver[0]代表'L1'的类�?
genre_disc = np.zeros((6, 5)) # 6种非L型分类标�?
genre_ver = np.zeros((5, 2)) # 5种L型分类标�?
x = np.zeros(11, dtype='int')
y = np.zeros(11, dtype='int')
f = open(path, mode='r')
for line in f.readlines():
pos1 = line.find(',')
pos2 = line.find(',', pos1+1)
other = line[pos2+1:]
other_dic = ast.literal_eval(other)
iden = name_dic[other_dic['identification']]
if iden < 6: # 为非L型切�?分类情况可能不只分到一�?
genre_str = other_dic['disc'] # 得到'v2,v5'类型的字符串
genre_list = genre_str.split(',') # 得到['v2','v5']类型的list
for i in genre_list:
j = genre_dic[i]
genre_disc[iden, j] = 1
else: # 为L型切�?
j = genre_dic[other_dic['vertebra']]
genre_ver[iden-6, j] = 1
x[iden] = int(line[0:pos1])
y[iden] = int(line[pos1+1:pos2])
f.close()
return x, y, genre_disc, genre_ver
def load_location(path):
# 加载位置信息,以x,y矩阵方式返回
x = np.zeros(11, dtype='int')
y = np.zeros(11, dtype='int')
f = open(path,mode= 'r')
for line in f.readlines():
pos1 = line.find(',')
pos2 = line.find(',', pos1+1)
other = line[pos2+1:]
other_dic = ast.literal_eval(other)
iden = name_dic[other_dic['identification']]
x[iden] = int(line[0:pos1])
y[iden] = int(line[pos1+1:pos2])
return x, y
|
21,384 | dccdb36c886014b1418fb28a50a3437107f9669f | import discord
from discord.ext import commands
from .utils.chat_formatting import escape_mass_mentions, italics, pagify
from random import randint
from random import choice
from enum import Enum
from urllib.parse import quote_plus
import datetime
import time
import aiohttp
import asyncio
@commands.command(pass_context=True)
async def rps(self, ctx, your_choice : RPSParser):
"""Play rock paper scissors"""
author = ctx.message.author
player_choice = your_choice.choice
red_choice = choice((RPS.rock, RPS.paper, RPS.scissors))
cond = {
(RPS.rock, RPS.paper) : False,
(RPS.rock, RPS.scissors) : True,
(RPS.paper, RPS.rock) : True,
(RPS.paper, RPS.scissors) : False,
(RPS.scissors, RPS.rock) : False,
(RPS.scissors, RPS.paper) : True
}
if red_choice == player_choice:
outcome = None # Tie
else:
outcome = cond[(player_choice, red_choice)]
if outcome is True:
await self.bot.say("{} You win {}!"
"".format(red_choice.value, author.mention))
elif outcome is False:
await self.bot.say("{} You lose {}!"
"".format(red_choice.value, author.mention))
else:
await self.bot.say("{} We're square {}!"
"".format(red_choice.value, author.mention))
@commands.command(pass_context=True)
async def flip(self, ctx, user : discord.Member=None):
"""Flips a coin... or a user.
Defaults to coin.
"""
if user != None:
msg = ""
if user.id == self.bot.user.id:
user = ctx.message.author
msg = "Nice try. You think this is funny? How about *this* instead:\n\n"
char = "abcdefghijklmnopqrstuvwxyz"
tran = "ɐqɔpǝɟƃɥᴉɾʞlɯuodbɹsʇnʌʍxʎz"
table = str.maketrans(char, tran)
name = user.display_name.translate(table)
char = char.upper()
tran = "∀qƆpƎℲפHIſʞ˥WNOԀQᴚS┴∩ΛMX⅄Z"
table = str.maketrans(char, tran)
name = name.translate(table)
await self.bot.say(msg + "(╯°□°)╯︵ " + name[::-1])
else:
await self.bot.say("*flips a coin and... " + choice(["HEADS!*", "TAILS!*"]))
@commands.command(pass_context=True)
async def roll(self, ctx, number : int = 100):
"""Rolls random number (between 1 and user choice)
Defaults to 100.
"""
author = ctx.message.author
if number > 1:
n = randint(1, number)
await self.bot.say("{} :game_die: {} :game_die:".format(author.mention, n))
else:
await self.bot.say("{} Maybe higher than 1? ;P".format(author.mention))
|
21,385 | 0f47008c43eaa3b4a719c312ca25329491bbea9f | from os import system
MOVES = ["ROCK", "PAPER", "SCISSORS"]
def get_player_move():
return MOVES[0]
def get_computer_move():
return MOVES[0]
def get_winner(player_move, computer_move):
if player_move == computer_move:
return "It's a draw"
else:
return "Moves unrecognised"
if __name__ == '__main__':
while True:
system("cls")
print("Welcome to the rock paper scissors game")
player = get_player_move()
computer = get_computer_move()
print("The player picked", player)
print("The computer picked", computer)
print(get_winner(player, computer))
if input("Press (Y) to continue or (N) to quit:\n") == "N":
break
|
21,386 | e1a48a9d83fc9c3edc30ff2fa4143947332b9d8a | # -*- coding: utf-8 -*-
# @Time : 2021/2/10 16:52
# @Author : Lim Yoona
# @Site :
# @File : 02_double_ended_queue.py
# @Software: PyCharm
"""
双端队列(deque,double-ended queue)
"""
"""
具有队列和栈的性质的数据结构
双端队列的任意一端入队和出队
"""
class Deque:
def __init__(self):
self.items = []
def is_empty(self):
return self.items == []
def add_first(self, item):
self.items.insert(0, item)
def add_pail(self, item):
self.items.append(item)
def remove_first(self):
return self.items.pop(0)
def remove_pail(self):
return self.items.pop()
def size(self):
return len(self.items)
if __name__ == '__main__':
deque = Deque()
deque.add_first(1)
deque.add_first(2)
deque.add_pail(6)
deque.add_pail(8)
print(deque.size())
print(deque.items)
print(deque.remove_first())
print(deque.remove_pail())
|
21,387 | 335c48f6cae149f504c1057e628db881fd619ea0 | from datetime import date
from FlaskApp import db
class Employee(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(100), nullable=False)
address = db.Column(db.String(200), nullable=False)
date_of_birth = db.Column(db.Date, nullable=True)
email = db.Column(db.String(120), nullable=False, unique=True)
phone = db.Column(db.String(15), nullable=False, unique=True)
images = db.Column(db.String(200), nullable=True)
gender = db.Column(db.Integer, nullable=True)
def __repr__(self):
return f"Employee('{self.title}', '{self.date_employee}')" |
21,388 | 860813baf94cdae7de3673babc2717bd983097a2 | from django.contrib import admin
from .django_talk.models import ChatRoom
class ChatRoomAdmin(admin.ModelAdmin):
pass
admin.site.register(ChatRoom, ChatRoomAdmin)
|
21,389 | ae4ec3dfc94b114fffb4ab2726ff0e7a2a22c26a | import pandas as pd
import numpy as np
import torch
from torch import nn
import datetime
# You should not modify this part.
def config():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--consumption", default="./sample_data/consumption.csv",
help="input the consumption data path")
parser.add_argument("--generation", default="./sample_data/generation.csv",
help="input the generation data path")
parser.add_argument(
"--bidresult", default="./sample_data/bidresult.csv", help="input the bids result path")
parser.add_argument("--output", default="output.csv",
help="output the bids path")
return parser.parse_args()
def output(path, data):
df = pd.DataFrame(
data, columns=["time", "action", "target_price", "target_volume"])
df.to_csv(path, index=False)
def getData(path1, path2):
dateFormatter = "%Y-%m-%d %H:%M:%S"
df_consumption = pd.read_csv(path1, encoding='utf-8')
df_generation = pd.read_csv(path2, encoding='utf-8')
h = len(df_consumption)
gen, con = [], []
for i in range(0, h):
gen.append(df_generation["generation"][i])
con.append(df_consumption["consumption"][i])
last_date = datetime.datetime.strptime(
df_consumption["time"][i], dateFormatter)
gen = np.array(gen, dtype='float32')
con = np.array(con, dtype='float32')
gen = gen.reshape(-1, 1, 168)
con = con.reshape(-1, 1, 168)
gen = torch.from_numpy(gen)
con = torch.from_numpy(con)
return gen, con, last_date
class GRUNet(nn.Module):
def __init__(self, input_size):
super(GRUNet, self).__init__()
self.rnn = nn.GRU(
input_size=input_size,
hidden_size=64,
num_layers=3,
batch_first=True,
bidirectional=True
)
self.out = nn.Sequential(
nn.Linear(128, 24)
)
def forward(self, x):
x, _ = self.rnn(x)
s, b, h = x.shape
x = x.view(s*b, h)
x = self.out(x)
x = x.view(s, b, -1)
return x
def test_model(gen, con):
model = GRUNet(168)
model.load_state_dict(torch.load('./consumptionV1.pt'))
model.eval()
pd_con = model(con)
model = GRUNet(168)
model.load_state_dict(torch.load('./generationV1.pt'))
model.eval()
pd_gen = model(gen)
print(pd_con.squeeze())
print(pd_gen.squeeze())
return pd_con.squeeze(), pd_gen.squeeze()
def rule(predict_consumption, predict_generation, last_date):
ans = []
for i in range(0, len(predict_consumption)):
last_date = last_date + datetime.timedelta(hours=1)
if predict_consumption[i] - predict_generation[i] > 0:
ans.append([str(last_date), "buy", 2.3, 1])
elif predict_consumption[i] - predict_generation[i] < 0:
ans.append([last_date, "sell", 1.5, 1])
return ans
if __name__ == "__main__":
args = config()
# try:
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
gen, con, last_date = getData(config().consumption, config().generation)
pd_con, pd_gen = test_model(gen, con)
data = rule(pd_con, pd_gen, last_date)
# except:
# print('error')
# data = []
output(args.output, data)
|
21,390 | 703d0781aeb12faee70ae2ce4ca7c3bf1368cfbc | # Generated by Django 3.2.1 on 2021-05-05 04:03
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('news', '0004_rename_article_airticle'),
]
operations = [
migrations.RenameField(
model_name='airticle',
old_name='descirption',
new_name='description',
),
]
|
21,391 | 4d600f7b664c53e6b77c574e543ef56d7b50a64d | ids = []
n, k = 0, 0
dry_run = True
if dry_run:
n, k = 4, 2
ids = [[4, 4, 6, 7, 8], [4, 8, 3, 0, 4], [2, 0, 10], [6, 1, 2, 3, 0, 5, 8]]
else:
n, k = map(int, input().split())
for i in range(n):
ids.append([int(id) for id in input().split()])
print(ids)
|
21,392 | 563c4634594bdcffd0d211f9e0d5ac558b0a7734 | import time
import datetime
import logging
import ast
import sys
import data_handling_functions as dhf
import sourceTree
import function_paths
import unit_test_maker as utm
from pprint import pprint
timex = datetime.datetime.fromtimestamp(int(time.time())).strftime('%d.%m.%Y.%H.%M.%S')
# creates a log file for each run, stores file,line,time,message
logging.basicConfig(filename='../logs/pyGen'+timex+'.log', format="%(levelname)s;%(filename)s;\
%(lineno)d;%(asctime)s.%(msecs)03d;%(message)s", datefmt="%H:%M:%S", level=logging.DEBUG)
# setting an alias for sending data to the log filem, must be string formatted on sending
log_info = logging.info
log_info("run started at {0}".format(timex))
"""
PyGen Test Data.
Author : David Bryan.
Email : C00188175@itcarlow.ie
Supervisor : Christophe Meudec.
This package is an investigation into the automatic generation of Unit Tests from source code.
The evaluation techniques of Symbolic Execution and Constraint logic programming are used.
This module is the main program.
It requires setting up a configuration settings section in a gui to handle the following
1. ECLiPSe program directory - folder that Eclipse is installed, main folder (not /bin)
2. ECLiPSe pl files directory - for placing temporary Prolog files during the program run.
3. Test folder location - folder where sample Python files are placed, makes choosing easier
"""
def main():
# check if 'conf' argument has been included in call
if len(sys.argv) > 1:
if sys.argv[1] == 'conf':
dhf.create_config()
# load configuration settings
config = dhf.get_config()
source_directory = config['SOURCE_DIR']
eclipse_file_dir = config['ECLIPSE_FILES_DIR']
source_file = ""
# get user to select source file.
try:
source_file = dhf.get_source_file(source_directory)
except TypeError:
print("This process requires the selection of a valid python source code file .py")
# set folder for saving Prolog files
output_dir = '/'.join([eclipse_file_dir, 'solver/'])
print("file chosen is ", source_file)
# get source code from target source file
content = dhf.read_source_file(source_file)
# convert the file contents to an Abstract Syntax Tree
try:
ast_tree_source = ast.parse(content)
except IndentationError as err:
print("There was a problem with the indentation in that source file")
print(err.msg, 'on line', err.lineno)
return 0
# this creates an ast_object for traversing the tree
ast_object = sourceTree.SourceTree(ast_tree_source)
# this runs the node extraction process
ast_object.process_source_code()
# this creates the complete node list for the source code 'nodevals'
ast_object.construct_node_data_list()
print('Extracting Functions and classes')
log_info('Extracting Functions and classes')
# filter nodevals to extract the indices of Function and class Definition Nodes
list_of_functions = [node_number[0] for node_number in ast_object.nodevals if node_number[5] == 'FunctionDef']
list_of_classes = [node_number[0] for node_number in ast_object.nodevals if node_number[5] == 'ClassDef']
log_info('classes = {0}'.format(str(list_of_classes)))
# Start FunctionDef analysis
print('Iterate through Functions')
# list to store all functions
function_objects_list = []
# iterate through the list of indices
for function in list_of_functions:
# create an object to store this function, this points the function_object at the id
# which is the memory address of the function node
function_object = function_paths.FuncDef(ast_object.nodevals[function][6], output_dir)
# extract the functions name
function_name = function_object.head.name
print('analysing function ', function_name)
# extract parameters from the function
function_object.get_variables_from_args()
# extract returns and decorator_list (these functions are empty currently)
function_object.get_decorator_list()
function_object.get_return_list()
# extract function body to create paths
function_object.process_function_body()
print("At this point the function has been analysed to identify the possible paths")
print("there are 3 important data structures ready for the Symbolic analysis")
print("1. function_object.conditions_dict")
pprint(function_object.conditions_dict)
print("2. function_object.list_of_statements_in_function")
pprint(function_object.list_of_statements_in_function)
print("3. function_object.paths")
pprint(function_object.paths)
# At this point the function is broken into paths and the paths need to be built and then
# Symbolically executed to evaluate the return expression
function_object.build_paths()
function_object.symbolically_execute_paths()
print("path_dict")
for key, value in function_object.path_dict.items():
print(key, value)
# this adds the current function object reference to the list of function object references
function_objects_list.append(function_object)
# this creates and runs a Prolog file for each path and collects results
function_object.test_path_constraints()
# remove non satisfiable paths
function_object.filter_returned_paths()
# use return expression and input data set to add tested values to return_dict
function_object.evaluate_expected_results()
# send the result structure for extraction to Unit Test Files
utm.make_unit_tests(source_file, function_objects_list)
if __name__ == '__main__':
main()
|
21,393 | e2db5c87582f6688492fe99c1874a918f5364358 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2008 John Paulett (john -at- paulett.org)
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
"""Helper functions for pickling and unpickling. Most functions assist in
determining the type of an object.
"""
import time
import types
import inspect
from jsonstruct import tags
from jsonstruct.compat import set
from jsonstruct.compat import unicode, long
COLLECTIONS = (list, set, tuple)
COLLECTIONS_SET = set(COLLECTIONS)
PRIMITIVES = set((str, unicode, bool, float, int, long))
def is_type(obj):
"""Returns True is obj is a reference to a type.
>>> is_type(1)
False
>>> is_type(object)
True
>>> class Klass: pass
>>> is_type(Klass)
True
"""
return type(obj) is type or type(obj) is types.ClassType
def is_object(obj):
"""Returns True is obj is a reference to an object instance.
>>> is_object(1)
True
>>> is_object(object())
True
>>> is_object(lambda x: 1)
False
"""
return (isinstance(obj, object) and
type(obj) is not type and
type(obj) is not types.FunctionType)
def is_primitive(obj):
"""Helper method to see if the object is a basic data type. Strings,
integers, longs, floats, booleans, and None are considered primitive
and will return True when passed into *is_primitive()*
>>> is_primitive(3)
True
>>> is_primitive([4,4])
False
"""
if obj is None:
return True
elif type(obj) in PRIMITIVES:
return True
return False
def is_dictionary(obj):
"""Helper method for testing if the object is a dictionary.
>>> is_dictionary({'key':'value'})
True
"""
return type(obj) is dict
def is_collection(obj):
"""Helper method to see if the object is a Python collection (list,
set, or tuple).
>>> is_collection([4])
True
"""
return type(obj) in COLLECTIONS_SET
def is_list(obj):
"""Helper method to see if the object is a Python list.
>>> is_list([4])
True
"""
return type(obj) is list
def is_set(obj):
"""Helper method to see if the object is a Python set.
>>> is_set(set())
True
"""
return type(obj) is set
def is_tuple(obj):
"""Helper method to see if the object is a Python tuple.
>>> is_tuple((1,))
True
"""
return type(obj) is tuple
def is_dictionary_subclass(obj):
"""Returns True if *obj* is a subclass of the dict type. *obj* must be
a subclass and not the actual builtin dict.
>>> class Temp(dict): pass
>>> is_dictionary_subclass(Temp())
True
"""
return (hasattr(obj, '__class__') and
issubclass(obj.__class__, dict) and not is_dictionary(obj))
def is_collection_subclass(obj):
"""Returns True if *obj* is a subclass of list, set or tuple.
*obj* must be a subclass and not the actual builtin, such
as list, set, tuple, etc..
>>> class Temp(list): pass
>>> is_collection_subclass(Temp())
True
"""
#TODO add UserDict
return ((issubclass(obj.__class__, COLLECTIONS) or
is_list_like(obj)) and
not is_collection(obj))
def is_noncomplex(obj):
"""Returns True if *obj* is a special (weird) class, that is more complex
than primitive data types, but is not a full object. Including:
* :class:`~time.struct_time`
"""
if type(obj) is time.struct_time:
return True
return False
def is_function(obj):
"""Returns true if passed a function
>>> is_function(lambda x: 1)
True
>>> is_function(locals)
True
>>> def method(): pass
>>> is_function(method)
True
>>> is_function(1)
False
"""
if type(obj) is types.FunctionType:
return True
if not is_object(obj):
return False
if not hasattr(obj, '__class__'):
return False
module = obj.__class__.__module__
name = obj.__class__.__name__
return (module == '__builtin__' and
name in ('function',
'builtin_function_or_method',
'instancemethod',
'method-wrapper'))
def is_module(obj):
"""Returns True if passed a module
>>> import os
>>> is_module(os)
True
"""
return type(obj) is types.ModuleType
def is_picklable(name, value):
"""Return True if an object cannot be pickled
>>> import os
>>> is_picklable('os', os)
True
>>> def foo(): pass
>>> is_picklable('foo', foo)
False
"""
if name in tags.RESERVED:
return False
return not is_function(value)
def is_installed(module):
"""Tests to see if ``module`` is available on the sys.path
>>> is_installed('sys')
True
>>> is_installed('hopefullythisisnotarealmodule')
False
"""
try:
__import__(module)
return True
except ImportError as e:
return False
def is_list_like(obj):
return hasattr(obj, '__getitem__') and hasattr(obj, 'append')
def is_container(obj):
return is_dictionary(obj) or is_collection(obj)
def get_public_variables(t):
"""Returns public variables of a type t."""
return [i[0] for i in
inspect.getmembers(t, lambda i:not inspect.isroutine(i))
if not i[0].startswith("__")]
|
21,394 | 2a6382ee6d4ec6dc2bf66831c48eddb64ae79929 | """table data
Revision ID: 60b4rc26f1a7
Revises: 59a3f9bc7288
Create Date: 2019-10-06 14:56:07.564667
"""
from sqlalchemy import orm
from alembic import op
from db.models import Employee, Skill, Department, EmployeesSkills, CadreMovement, EmployeeDepartments
revision = '60b4rc26f1a7'
down_revision = '59a3f9bc7288'
branch_labels = None
depends_on = None
def upgrade():
bind = op.get_bind()
session = orm.Session(bind=bind)
# Skills creation:
fly_skill = Skill(name='Fly', description='I believe I can Fly. I believe I can touch the sky')
light_shield_skill = Skill(name='Light Shield', description='Light protect. Perfect for everything')
heavy_shield_skill = Skill(name='Heavy Shield', description='Try to hurt me')
elbanador_skill = Skill(name='Elbanador ability', description='Godlike')
flexibility_skill = Skill(name='Flexibility', description='Like a rubber')
decrease_skill = Skill(name='Decrease', description='As less as it possible')
session.add_all([fly_skill, light_shield_skill, heavy_shield_skill, elbanador_skill, flexibility_skill, decrease_skill])
session.flush()
# Departmets creation:
guards_department = Department(name='Guards', description='People, who protect us from UFO')
legions_department = Department(name='Legions', description='Just Legions')
session.add_all([guards_department, legions_department])
session.flush()
# Employees creation:
tony_stark = Employee(first_name='Tony', last_name='Stark', phone='+52 111 111 11 11', description='Iron Man', status='vacation')
steve_rogers = Employee(first_name='Steve', last_name='Rogers', phone='+52 222 222 22 22', description='Captain America', status='active')
peter_parker = Employee(first_name='Peter', last_name='Parker', phone='+52 333 333 33 33', description='Spider Man', status='hangover')
scott_lang = Employee(first_name='Scott', last_name='Lang', phone='+52 444 444 44 44', description='Ant Man', status='hangover')
session.add_all([tony_stark, steve_rogers, peter_parker, scott_lang])
session.flush()
# Employees Skills
session.add_all([
EmployeesSkills(employee_id=tony_stark.id, skill_id=fly_skill.id),
EmployeesSkills(employee_id=tony_stark.id, skill_id=light_shield_skill.id),
EmployeesSkills(employee_id=tony_stark.id, skill_id=elbanador_skill.id),
EmployeesSkills(employee_id=steve_rogers.id, skill_id=heavy_shield_skill.id),
EmployeesSkills(employee_id=peter_parker.id, skill_id=flexibility_skill.id),
EmployeesSkills(employee_id=scott_lang.id, skill_id=decrease_skill.id),
])
# Employee CadreMovements
session.add_all([
CadreMovement(employee=tony_stark.id, old_department=guards_department.id, new_department=legions_department.id, reason='simple'),
CadreMovement(employee=tony_stark.id, old_department=legions_department.id, new_department=guards_department.id, reason='simple'),
CadreMovement(employee=steve_rogers.id, old_department=guards_department.id, new_department=legions_department.id, reason='simple'),
CadreMovement(employee=peter_parker.id, old_department=legions_department.id, new_department=guards_department.id, reason='complicated'),
])
# Employee departments
session.add_all([
EmployeeDepartments(employee_id=tony_stark.id, department_id=guards_department.id),
EmployeeDepartments(employee_id=tony_stark.id, department_id=legions_department.id),
])
session.commit()
def downgrade():
pass |
21,395 | c0375677947e0059d4535c8f5ebf886a06ba4610 | import sys
import os.path
import lxml.html
from optparse import OptionParser
from clanlists.check_server import check_server_availability
from modules.filetools import ensure_dir, settings
SETTINGS_FILE_NAME = 'clanlist_settings.ini'
mypath = os.path.abspath(os.path.split(sys.argv[0])[0]) + '/clanlists'
def make_clanlist(server, start=None):
check_server_availability(server, 'clanlist')
if not start:
try:
with open(mypath + '/' + server.upper() + '_clans.lst', mode='r', encoding='utf_8') as clanlist:
l = clanlist.readlines()
start = int(l[-1].split('\t')[0]) + 1
except:
start = 1 + settings(mypath + '/' + SETTINGS_FILE_NAME)[server.upper() + '_clan_index_shift']
url = settings(mypath + '/' + SETTINGS_FILE_NAME)[server.upper() + '_clan_page_link']
i = start
c = True
while c:
try:
c = lxml.html.parse(url+str(i)+'/')
known_404 = None
except:
known_404s = settings(mypath + '/' + SETTINGS_FILE_NAME)[server.upper() + '_known_404']
if isinstance(known_404s, int):
if i == settings(mypath + '/' + SETTINGS_FILE_NAME)[server.upper() + '_known_404']:
known_404 = True
elif isinstance(known_404s, list) or isinstance(known_404s, tuple):
if i in settings(mypath + '/' + SETTINGS_FILE_NAME)[server.upper() + '_known_404']:
known_404 = True
c = False
if c and not known_404:
t = c.find(".//title").text
if settings(mypath + '/' + SETTINGS_FILE_NAME)[server.upper() + '_disbanded'] not in t :
tag_name = t[:-settings(mypath + '/' + SETTINGS_FILE_NAME)[server.upper() + '_clan_page_trim']]
tag_end = tag_name.find(']')
clan_tag = tag_name[1:tag_end]
clan_name = tag_name[tag_end+2:]
print('\rProcessing clan: # {0} [{1}]'.format(i,clan_tag).ljust(67),end='')
with open(mypath + '/' + server.upper() + '_clans.lst', mode='a', encoding='utf_8') as clanlist:
clanlist.write('{0}\t{1}\t{2}\n'.format(i,clan_tag,clan_name))
if known_404:
c = True
i += 1
print('\nDONE.')
if __name__ == "__main__":
pass
|
21,396 | c682c86cf07b388ed2e3a275b34fe3a679ab0c02 | from overrides import overrides
from allennlp.common.util import JsonDict
from allennlp.data import Instance
from allennlp.predictors.predictor import Predictor
@Predictor.register('seq2seq_dynamic')
class Seq2SeqDynamicPredictor(Predictor):
"""
Predictor for sequence to sequence models with dynamic vocabulary, including
:class:`~allennlp.models.encoder_decoder.simple_seq2seq_dynamic`
"""
def predict(self, source: str, allowed_tokens: str) -> JsonDict:
return self.predict_json({"source": source, "allowed_tokens": allowed_tokens})
@overrides
def _json_to_instance(self, json_dict: JsonDict) -> Instance:
"""
Expects JSON that looks like ``{"source": "..."}``.
"""
source = json_dict["source"]
allowed_tokens = json_dict["allowed_tokens"]
return self._dataset_reader.text_to_instance(source, allowed_tokens)
|
21,397 | f17d296270b462ba006f827a5bf52db9a602a412 | import atexit
import sys
from enum import Enum
from random import randint
from game import Game
transcript_file_name = "transcript"
class Mode(Enum):
RANDOM = 1
LEARN = 2
def play_game(mode=Mode.RANDOM):
"""
Automates game playing by deciding the next move according to mode.
"""
game = Game()
moves = []
margins = {'left': 4, 'top': 4, 'bottom': 4}
atexit.register(game.showCursor)
try:
game.hideCursor()
while True:
game.clearScreen()
print(game.__str__(margins=margins))
if game.board.won() or not game.board.canMove():
break
if (mode == Mode.RANDOM):
m = get_random_move()
moves.append(str(m))
else:
sys.exit("unsupported learning mode")
m = get_random_move()
game.incScore(game.board.move(m))
except KeyboardInterrupt:
game.saveBestScore()
return
game.saveBestScore()
print('You won!' if game.board.won() else 'Game Over')
record_game(game, moves)
return game.score
def get_random_move():
"""
As defined on board.Board, the keystrokes are mapped as:
UP, DOWN, LEFT, RIGHT, PAUSE = 1, 2, 3, 4, 5
Here, we randomly choose and return a keystroke excluding PAUSE
"""
random_move = randint(1, 4)
return random_move
def record_game(game, moves):
"""
Appends the game transcript to the end of the file defined by transcript_file_name.
A transcript is string of all moves played in the game followed by the game score,
with each item separated by a " ".
"""
transcript = " ".join(moves) + " " + str(game.score) + "\n"
transcript_file = open(transcript_file_name, 'a')
transcript_file.write(transcript)
transcript_file.close()
|
21,398 | bb300a79bdb7e0e52b9eab1099236766aaf7051d | #!/usr/bin/env python3
from pytradfri import Gateway
from pytradfri.api.libcoap_api import APIFactory
import uuid
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('host', metavar='IP', type=str,
help='IP Address of your Tradfri gateway')
parser.add_argument('key',
help='Security code found on your Tradfri gateway')
identity = uuid.uuid4().hex
args = parser.parse_args()
api_factory = APIFactory(host=args.host, psk_id=identity)
try:
psk = api_factory.generate_psk(args.key)
print('Identity: ', identity)
print('Generated PSK: ', psk)
except AttributeError:
print("Failed") |
21,399 | c7cfcfad807d069b859616a9678b33bc4aca3f1d | #VoidFinder Function to do just about everything
import numpy as np
from sklearn import neighbors
from astropy.table import Table
import time
from hole_combine import combine_holes
from voidfinder_functions import mesh_galaxies, in_mask,not_in_mask, in_survey, save_maximals, mesh_galaxies_dict
from table_functions import add_row, subtract_row, to_vector, to_array, table_dtype_cast, table_divide
from volume_cut import volume_cut
from avsepcalc import av_sep_calc
from mag_cutoff_function import mag_cut, field_gal_cut
maskra = 360
maskdec = 180
dec_offset = -90
dl = 5 # Cell side length [Mpc/h]
dr = 1. # Distance to shift the hole centers
frac = 0.1 # Overlap fraction for calculating maximal spheres
# Constants
c = 3e5
DtoR = np.pi/180.
RtoD = 180./np.pi
def filter_galaxies(infile, maskfile, min_dist, max_dist, survey_name, mag_cut_flag, rm_isolated_flag):
################################################################################
#
# PRE-PROCESS DATA
#
################################################################################
print('Pre-processing data', flush=True)
# Remove faint galaxies
if mag_cut_flag:
infile = mag_cut(infile,-20)
# Convert galaxy coordinates to Cartesian
xin = infile['Rgal']*np.cos(infile['ra']*DtoR)*np.cos(infile['dec']*DtoR)
yin = infile['Rgal']*np.sin(infile['ra']*DtoR)*np.cos(infile['dec']*DtoR)
zin = infile['Rgal']*np.sin(infile['dec']*DtoR)
coord_in_table = Table([xin, yin, zin], names=('x','y','z'))
# Cartesian coordinate minima
coord_min_x = [min(coord_in_table['x'])]
coord_min_y = [min(coord_in_table['y'])]
coord_min_z = [min(coord_in_table['z'])]
coord_min_table = Table([coord_min_x, coord_min_y, coord_min_z], names=('x','y','z'))
# Cartesian coordinate maxima
coord_max_x = [max(coord_in_table['x'])]
coord_max_y = [max(coord_in_table['y'])]
coord_max_z = [max(coord_in_table['z'])]
coord_max_table = Table([coord_max_x, coord_max_y, coord_max_z], names=('x','y','z'))
# Number of galaxies
N_gal = len(infile)
print('x:', coord_min_table['x'][0], coord_max_table['x'][0], flush=True)
print('y:', coord_min_table['y'][0], coord_max_table['y'][0], flush=True)
print('z:', coord_min_table['z'][0], coord_max_table['z'][0], flush=True)
print('There are', N_gal, 'galaxies in this simulation.', flush=True)
# Convert coord_in, coord_min, coord_max tables to numpy arrays
coord_in = to_array(coord_in_table)
coord_min = to_vector(coord_min_table)
coord_max = to_vector(coord_max_table)
print('Reading mask',flush=True)
#maskfile = Table.read(mask_filename, format='ascii.commented_header')
mask = np.zeros((maskra, maskdec), dtype=bool)
mask[maskfile['ra'].astype(int), maskfile['dec'].astype(int) - dec_offset] = True
vol = len(maskfile)
print('Read mask',flush=True)
################################################################################
#
# PUT THE GALAXIES ON A CHAIN MESH
#
################################################################################
#dl = box/ngrid # length of each side of the box
#print('Number of grid cells is', ngrid, dl, box)
#print('Making the grid')
#print('coord_min shape:', coord_min.shape)
#print('coord_max shape:', coord_max.shape)
# Array of size of survey in x, y, z directions [Mpc/h]
#box = np.array([coord_max_x[0] - coord_min_x[0], coord_max_y[0] - coord_min_y[0], coord_max_z[0] - coord_min_z[0]])
box = coord_max - coord_min
#print('box shape:', box.shape)
# Array of number of cells in each direction
ngrid = box/dl
ngrid = np.ceil(ngrid).astype(int)
#print('ngrid shape:', ngrid.shape)
print('Number of grid cells is', ngrid, 'with side lengths of', dl, 'Mpc/h', flush=True)
'''
# Bin the galaxies onto a 3D grid
#mesh_indices, ngal, chainlist, linklist = mesh_galaxies(coord_in_table, coord_min_table, dl, ngrid)
#ngal, chainlist, linklist = mesh_galaxies(coord_in_table, coord_min_table, dl, tuple(ngrid))
#print('Made the grid')
print('Checking the grid')
grid_good = True
for i in range(ngrid[0]):
for j in range(ngrid[1]):
for k in range(ngrid[2]):
count = 0
igal = chainlist[i,j,k]
while igal != -1:
count += 1
igal = linklist[igal]
if count != ngal[i,j,k]:
print(i,j,k, count, ngal[i,j,k])
grid_good = False
if grid_good:
print('Grid construction was successful.')
'''
################################################################################
#
# SEPARATION
#
################################################################################
if rm_isolated_flag:
sep_start = time.time()
print('Finding sep',flush=True)
l, avsep, sd, dists3 = av_sep_calc(coord_in_table)
print('Average separation of n3rd gal is', avsep, flush=True)
print('The standard deviation is', sd,flush=True)
# l = 5.81637 # s0 = 7.8, gamma = 1.2, void edge = -0.8
# l = 7.36181 # s0 = 3.5, gamma = 1.4
# or force l to have a fixed number by setting l = ****
print('Going to build wall with search value', l, flush=True)
sep_end = time.time()
print('Time to find sep =',sep_end-sep_start, flush=True)
fw_start = time.time()
f_coord_table, w_coord_table = field_gal_cut(coord_in_table, dists3, l)
else:
w_coord_table = coord_in_table
f_coord_table = Table(names=coord_in_table.colnames)
f_coord_table.write(survey_name + 'field_gal_file.txt', format='ascii.commented_header', overwrite=True)
w_coord_table.write(survey_name + 'wall_gal_file.txt', format='ascii.commented_header', overwrite=True)
if rm_isolated_flag:
fw_end = time.time()
print('Time to sort field and wall gals =', fw_end-fw_start, flush=True)
nf = len(f_coord_table)
nwall = len(w_coord_table)
print('Number of field gals:', nf, 'Number of wall gals:', nwall, flush=True)
return coord_min_table, mask, ngrid[0]
def find_voids(ngrid, min_dist, max_dist, coord_min_table, mask, out1_filename, out2_filename, survey_name):
w_coord_table = Table.read(survey_name + 'wall_gal_file.txt', format='ascii.commented_header')
w_coord = to_array(w_coord_table)
coord_min = to_vector(coord_min_table)
#coord_min = coord_min[0] # 0-index is to convert from shape (1,3) to shape (3,)
################################################################################
#
# SET UP CELL GRID DISTRIBUTION
#
################################################################################
'''
print('Setting up grid of wall galaxies')
#wall_mesh_indices, ngal_wall, chainlist_wall, linklist_wall = mesh_galaxies(w_coord_table, coord_min_table, dl, ngrid)
ngal_wall = mesh_galaxies(w_coord_table, coord_min_table, dl, tuple(ngrid))
print('Wall galaxy grid set up')
'''
# Build a dictionary of all the cell IDs that have at least one galaxy in them
#cell_ID_dict = mesh_galaxies_dict(w_coord_table, coord_min_table, dl)
cell_ID_dict = mesh_galaxies_dict(w_coord, coord_min, dl)
print('Galaxy grid indices computed')
################################################################################
#
# BUILD NEAREST-NEIGHBOR TREE
#
################################################################################
kdtree_start_time = time.time()
galaxy_tree = neighbors.KDTree(w_coord)
print('KDTree creation time:', time.time() - kdtree_start_time)
################################################################################
#
# GROW HOLES
#
################################################################################
hole_times = []
tot_hole_start = time.time()
print('Growing holes', flush=True)
# Center of the current cell
#hole_center_table = Table(np.zeros(6), names=('x', 'y', 'z', 'r', 'ra', 'dec'))
# Initialize list of hole details
myvoids_x = []
myvoids_y = []
myvoids_z = []
myvoids_r = []
# Number of holes found
n_holes = 0
# Find where all the empty cells are
#empty_indices = np.where(ngal_wall == 0)
# Go through each empty cell in the grid
empty_cell = 0
# Number of empty cells
n_empty_cells = ngrid[0]*ngrid[1]*ngrid[2] - len(cell_ID_dict)
#######
# DEBUGGING VARIABLES
#######
cycle_time = time.time()
#for empty_cell in range(len(empty_indices[0])):
for i in range(ngrid[0]):
for j in range(ngrid[1]):
for k in range(ngrid[2]):
check_bin_ID = (i,j,k)
# Check if there are any galaxies in this grid in this cell
if check_bin_ID not in cell_ID_dict:
hole_start = time.time()
'''
# Retrieve empty cell indices
i = empty_indices[0][empty_cell]
j = empty_indices[1][empty_cell]
k = empty_indices[2][empty_cell]
if empty_cell%10000 == 0:
print('Looking in empty cell', empty_cell, 'of', len(empty_indices[0]))
'''
empty_cell += 1
if empty_cell%10000 == 0:
print('Looking in empty cell', empty_cell, 'of', n_empty_cells, flush=True)#, '---', i,j,k, '---', time.time() - cycle_time, flush=True)
cycle_time = time.time()
#print('coord_min shape:', coord_min.shape)
# Calculate center coordinates of cell
#hole_center_table['x'] = (i + 0.5)*dl + coord_min_table['x']
#hole_center_table['y'] = (j + 0.5)*dl + coord_min_table['y']
#hole_center_table['z'] = (k + 0.5)*dl + coord_min_table['z']
hole_center = (np.array([[i, j, k]]) + 0.5)*dl + coord_min # Purposefully making hole_center have shape (1,3) for KDtree queries
#hole_center = to_vector(hole_center_table)
# Check to make sure that the hole center is still within the survey
if not_in_mask(hole_center, mask, min_dist, max_dist):
continue
#print('______________________________________________')
# Find closest galaxy to cell center
#modv1, k1g = galaxy_tree.query(hole_center.T, k=1)
modv1, k1g = galaxy_tree.query(hole_center, k=1)
modv1 = modv1[0][0]
k1g = k1g[0][0]
# Unit vector pointing from cell center to the closest galaxy
#v1_unit = (w_coord[k1g] - hole_center.T)/modv1
v1_unit = (w_coord[k1g] - hole_center)/modv1
#print('v1_unit shape:', v1_unit.shape)
#print('Hole radius', modv1, 'after finding 1st galaxy')
############################################################
# We are going to shift the center of the hole by dr along
# the direction of the vector pointing from the nearest
# galaxy to the center of the empty cell. From there, we
# will search within a radius of length the distance between
# the center of the hole and the first galaxy from the
# center of the hole to find the next nearest neighbors.
# From there, we will minimize top/bottom to find which one
# is the next nearest galaxy that bounds the hole.
############################################################
galaxy_search = True
hole_center_2 = hole_center
in_mask_2 = True
while galaxy_search:
# Shift hole center away from first galaxy
hole_center_2 = hole_center_2 - dr*v1_unit
# Distance between hole center and nearest galaxy
modv1 += dr
# Search for nearest neighbors within modv1 of the hole center
#i_nearest, dist_nearest = galaxy_tree.query_radius(hole_center_2, r=modv1, return_distance=True, sort_results=True)
i_nearest = galaxy_tree.query_radius(hole_center_2, r=modv1)
i_nearest = i_nearest[0]
#dist_nearest = dist_nearest[0]
# Remove nearest galaxy from list
boolean_nearest = i_nearest != k1g
i_nearest = i_nearest[boolean_nearest]
#dist_nearest = dist_nearest[boolean_nearest]
if len(i_nearest) > 0:
# Found at least one other nearest neighbor!
# Calculate vector pointing from next nearest galaxies to the nearest galaxy
BA = w_coord[k1g] - w_coord[i_nearest] # shape (N,3)
#print('BA shape:', BA.shape)
bot = 2*np.dot(BA, v1_unit.T) # shape (N,1)
#print('bot shape:', bot.shape)
top = np.sum(BA**2, axis=1) # shape (N,)
#print('top shape:', top.shape)
x2 = top/bot.T[0] # shape (N,)
#print('x2 shape:', x2.shape)
# Locate positive values of x2
valid_idx = np.where(x2 > 0)[0] # shape (n,)
#print('valid_idx shape:', valid_idx.shape)
if len(valid_idx) > 0:
# Find index of 2nd nearest galaxy
k2g_x2 = valid_idx[x2[valid_idx].argmin()]
k2g = i_nearest[k2g_x2]
minx2 = x2[k2g_x2] # Eliminated transpose on x2
galaxy_search = False
#elif not in_mask(hole_center_2.T, mask, [min_dist, max_dist]):
elif not_in_mask(hole_center_2, mask, min_dist, max_dist):
# Hole is no longer within survey limits
galaxy_search = False
in_mask_2 = False
# Check to make sure that the hole center is still within the survey
#if not in_mask(hole_center_2.T, mask, [min_dist, max_dist]):
#if not_in_mask(hole_center_2, mask, min_dist, max_dist):
if not in_mask_2:
#print('hole not in survey')
continue
#print('Found 2nd galaxy')
'''
if k2g == i_nearest[0]:
print('2nd galaxy was the next nearest neighbor.')
else:
print('2nd galaxy was NOT the next nearest neighbor.')
'''
# Calculate new hole center
hole_radius = 0.5*np.sum(BA[k2g_x2]**2)/np.dot(BA[k2g_x2], v1_unit.T) # shape (1,)
hole_center = w_coord[k1g] - hole_radius*v1_unit # shape (1,3)
#print('hole_center shape:', hole_center.shape)
#print('hole_radius shape:', hole_radius.shape)
#print('Hole radius', hole_radius, 'after finding 2nd galaxy')
# Check to make sure that the hole center is still within the survey
#if not in_mask(hole_center, mask, [min_dist, max_dist]):
if not_in_mask(hole_center, mask, min_dist, max_dist):
#print('hole not in survey')
continue
'''
########################################################################
# TEST BLOCK
# Make sure that there are no galaxies contained within hole
i_nearest, dist_nearest = galaxy_tree.query_radius(hole_center, r=hole_radius, return_distance=True, sort_results=True)
i_nearest = i_nearest[0]
dist_nearest = dist_nearest[0]
# Remove two nearest galaxies from list
boolean_nearest = np.logical_and(i_nearest != k1g, i_nearest != k2g)
dist_nearest = dist_nearest[boolean_nearest]
i_nearest = i_nearest[boolean_nearest]
if len(i_nearest) > 0:
print('2nd galaxy - There are galaxies inside the hole!', len(i_nearest))
########################################################################
'''
########################################################################
# Now find the third nearest galaxy.
########################################################################
# Same methodology as for finding the second galaxy
# Find the midpoint between the two nearest galaxies
midpoint = 0.5*(w_coord[k1g] + w_coord[k2g]) # shape (3,)
#print('midpoint shape:', midpoint.shape)
# Define the unit vector along which to move the hole center
modv2 = np.linalg.norm(hole_center - midpoint)
v2_unit = (hole_center - midpoint)/modv2 # shape (1,3)
#print('v2_unit shape', v2_unit.shape)
# Calculate vector pointing from the hole center to the nearest galaxy
Acenter = w_coord[k1g] - hole_center # shape (1,3)
# Calculate vector pointing from the hole center to the second-nearest galaxy
Bcenter = w_coord[k2g] - hole_center # shape (1,3)
# Initialize moving hole center
hole_center_3 = hole_center # shape (1,3)
galaxy_search = True
in_mask_3 = True
while galaxy_search:
# Shift hole center along unit vector
hole_center_3 = hole_center_3 + dr*v2_unit
# New hole "radius"
search_radius = np.linalg.norm(w_coord[k1g] - hole_center_3)
# Search for nearest neighbors within modv1 of the hole center
#i_nearest, dist_nearest = galaxy_tree.query_radius(hole_center, r=np.linalg.norm(Acenter), return_distance=True, sort_results=True)
i_nearest = galaxy_tree.query_radius(hole_center_3, r=search_radius)
i_nearest = i_nearest[0]
#dist_nearest = dist_nearest[0]
# Remove two nearest galaxies from list
boolean_nearest = np.logical_and(i_nearest != k1g, i_nearest != k2g)
i_nearest = i_nearest[boolean_nearest]
#dist_nearest = dist_nearest[boolean_nearest]
if len(i_nearest) > 0:
# Found at least one other nearest neighbor!
# Calculate vector pointing from hole center to next nearest galaxies
Ccenter = w_coord[i_nearest] - hole_center # shape (N,3)
bot = 2*np.dot((Ccenter - Acenter), v2_unit.T) # shape (N,1)
top = np.sum(Ccenter**2, axis=1) - np.sum(Bcenter**2) # shape (N,)
x3 = top/bot.T[0] # shape (N,)
# Locate positive values of x3
valid_idx = np.where(x3 > 0)[0] # shape (N,)
if len(valid_idx) > 0:
# Find index of 3rd nearest galaxy
k3g_x3 = valid_idx[x3[valid_idx].argmin()]
k3g = i_nearest[k3g_x3]
minx3 = x3[k3g_x3]
galaxy_search = False
#elif not in_mask(hole_center_3, mask, [min_dist, max_dist]):
elif not_in_mask(hole_center_3, mask, min_dist, max_dist):
# Hole is no longer within survey limits
galaxy_search = False
in_mask_3 = False
# Check to make sure that the hole center is still within the survey
#if not in_mask(hole_center_3, mask, [min_dist, max_dist]):
#if not_in_mask(hole_center_3, mask, min_dist, max_dist):
if not in_mask_3:
#print('hole not in survey')
continue
#print('Found 3rd galaxy')
'''
if k3g == i_nearest[0]:
print('3rd galaxy was the next nearest neighbor.')
else:
print('3rd galaxy was NOT the next nearest neighbor.')
'''
# Calculate new hole center
hole_center = hole_center + minx3*v2_unit # shape (1,3)
hole_radius = np.linalg.norm(hole_center - w_coord[k1g]) # shape ()
#print('hole_center shape:', hole_center.shape)
#print('hole_radius shape:', hole_radius.shape)
#print('Hole radius', hole_radius, 'after finding 3rd galaxy')
# Check to make sure that the hole center is still within the survey
#if not in_mask(hole_center, mask, [min_dist, max_dist]):
if not_in_mask(hole_center, mask, min_dist, max_dist):
#print('hole not in survey')
continue
'''
########################################################################
# TEST BLOCK
# Make sure that there are no galaxies contained within hole
i_nearest, dist_nearest = galaxy_tree.query_radius(hole_center, r=hole_radius, return_distance=True, sort_results=True)
i_nearest = i_nearest[0]
dist_nearest = dist_nearest[0]
# Remove two nearest galaxies from list
boolean_nearest = np.logical_and.reduce((i_nearest != k1g, i_nearest != k2g, i_nearest != k3g))
dist_nearest = dist_nearest[boolean_nearest]
i_nearest = i_nearest[boolean_nearest]
if len(i_nearest) > 0:
print('3rd galaxy - There are galaxies inside the hole!', len(i_nearest))
########################################################################
'''
########################################################################
# Now find the 4th nearest neighbor
#
# Process is very similar as before, except we do not know if we have to
# move above or below the plane. Therefore, we will find the next closest
# if we move above the plane, and the next closest if we move below the
# plane.
########################################################################
# The vector along which to move the hole center is defined by the cross
# product of the vectors pointing between the three nearest galaxies.
AB = w_coord[k1g] - w_coord[k2g] # shape (3,)
BC = w_coord[k3g] - w_coord[k2g] # shape (3,)
v3 = np.cross(AB,BC) # shape (3,)
#print('AB shape:', AB.shape)
#print('BC shape:', BC.shape)
#print('v3 shape:', v3.shape)
modv3 = np.linalg.norm(v3)
v3_unit = v3/modv3 # shape (3,)
# Calculate vector pointing from the hole center to the nearest galaxy
Acenter = w_coord[k1g] - hole_center # shape (1,3)
# Calculate vector pointing from the hole center to the second-nearest galaxy
Bcenter = w_coord[k2g] - hole_center # shape (1,3)
# First move in the direction of the unit vector defined above
galaxy_search = True
hole_center_41 = hole_center
in_mask_41 = True
while galaxy_search:
# Shift hole center along unit vector
hole_center_41 = hole_center_41 + dr*v3_unit
#print('Shifted center to', hole_center_41)
# New hole "radius"
search_radius = np.linalg.norm(w_coord[k1g] - hole_center_41)
# Search for nearest neighbors within R of the hole center
#i_nearest, dist_nearest = galaxy_tree.query_radius(hole_center_41, r=np.linalg.norm(Acenter), return_distance=True, sort_results=True)
i_nearest = galaxy_tree.query_radius(hole_center_41, r=search_radius)
i_nearest = i_nearest[0]
#dist_nearest = dist_nearest[0]
# Remove two nearest galaxies from list
boolean_nearest = np.logical_and.reduce((i_nearest != k1g, i_nearest != k2g, i_nearest != k3g))
i_nearest = i_nearest[boolean_nearest]
#dist_nearest = dist_nearest[boolean_nearest]
#print('Number of nearby galaxies', len(i_nearest))
#if i_nearest.shape[0] > 0:
if len(i_nearest) > 0:
# Found at least one other nearest neighbor!
# Calculate vector pointing from hole center to next nearest galaxies
Dcenter = w_coord[i_nearest] - hole_center # shape (N,3)
#print('Dcenter shape:', Dcenter.shape)
bot = 2*np.dot((Dcenter - Acenter), v3_unit.T) # shape (N,)
#print('bot shape:', bot.shape)
top = np.sum(Dcenter**2, axis=1) - np.sum(Bcenter**2) # shape (N,)
#print('top shape:', top.shape)
x41 = top/bot.T[0] # shape (N,)
#print('x41 shape:', x41.shape)
# Locate positive values of x41
valid_idx = np.where(x41 > 0)[0] # shape (n,)
#print('valid_idx shape:', valid_idx.shape)
#if valid_idx.shape[0] == 1:
# k4g1 = i_nearest[valid_idx[0]]
# minx41 = x41[valid_idx[0]]
# galaxy_search = False
#
#elif valid_idx.shape[0] > 1:
if len(valid_idx) > 0:
# Find index of 4th nearest galaxy
k4g1_x41 = valid_idx[x41[valid_idx].argmin()]
k4g1 = i_nearest[k4g1_x41]
minx41 = x41[k4g1_x41]
galaxy_search = False
#elif not in_mask(hole_center_41, mask, [min_dist, max_dist]):
elif not_in_mask(hole_center_41, mask, min_dist, max_dist):
# Hole is no longer within survey limits
galaxy_search = False
in_mask_41 = False
#print('Found first potential 4th galaxy')
'''
if k4g1 == i_nearest[0]:
print('First 4th galaxy was the next nearest neighbor.')
else:
print('First 4th galaxy was NOT the next nearest neighbor.')
'''
# Calculate potential new hole center
#if in_mask(hole_center_41, mask, [min_dist, max_dist]):
#if not not_in_mask(hole_center_41, mask, min_dist, max_dist):
if in_mask_41:
hole_center_41 = hole_center + minx41*v3_unit # shape (1,3)
#print('______________________')
#print(hole_center_41, 'hc41')
#print('hole_radius_41', np.linalg.norm(hole_center_41 - w_coord[k1g]))
# Repeat same search, but shift the hole center in the other direction this time
v3_unit = -v3_unit
# First move in the direction of the unit vector defined above
galaxy_search = True
# Initialize minx42 (in case it does not get created later)
minx42 = np.infty
hole_center_42 = hole_center
minx42 = np.infty
in_mask_42 = True
while galaxy_search:
# Shift hole center along unit vector
hole_center_42 = hole_center_42 + dr*v3_unit
# New hole "radius"
search_radius = np.linalg.norm(w_coord[k1g] - hole_center_42)
# Search for nearest neighbors within R of the hole center
#i_nearest, dist_nearest = galaxy_tree.query_radius(hole_center_42, r=np.linalg.norm(Acenter), return_distance=True, sort_results=True)
i_nearest = galaxy_tree.query_radius(hole_center_42, r=search_radius)
i_nearest = i_nearest[0]
#dist_nearest = dist_nearest[0]
# Remove three nearest galaxies from list
boolean_nearest = np.logical_and.reduce((i_nearest != k1g, i_nearest != k2g, i_nearest != k3g))
i_nearest = i_nearest[boolean_nearest]
#dist_nearest = dist_nearest[boolean_nearest]
if len(i_nearest) > 0:
# Found at least one other nearest neighbor!
# Calculate vector pointing from hole center to next nearest galaxies
Dcenter = w_coord[i_nearest] - hole_center # shape (N,3)
bot = 2*np.dot((Dcenter - Acenter), v3_unit.T) # shape (N,)
top = np.sum(Dcenter**2, axis=1) - np.sum(Bcenter**2) # shape (N,)
x42 = top/bot.T[0] # shape (N,)
# Locate positive values of x42
valid_idx = np.where(x42 > 0)[0] # shape (n,)
if len(valid_idx) > 0:
# Find index of 3rd nearest galaxy
k4g2_x42 = valid_idx[x42[valid_idx].argmin()]
k4g2 = i_nearest[k4g2_x42]
minx42 = x42[k4g2_x42]
galaxy_search = False
#elif not in_mask(hole_center_42, mask, [min_dist, max_dist]):
elif not_in_mask(hole_center_42, mask, min_dist, max_dist):
# Hole is no longer within survey limits
galaxy_search = False
in_mask_42 = False
#print('Found second potential 4th galaxy')
'''
if k4g2 == i_nearest[0]:
print('Second 4th galaxy was the next nearest neighbor.')
else:
print('Second 4th galaxy was NOT the next nearest neighbor.')
'''
# Calculate potential new hole center
#if in_mask(hole_center_42, mask, [min_dist, max_dist]):
#if not not_in_mask(hole_center_42, mask, min_dist, max_dist):
if in_mask_42:
hole_center_42 = hole_center + minx42*v3_unit # shape (1,3)
#print(hole_center_42, 'hc42')
#print('hole_radius_42', np.linalg.norm(hole_center_42 - w_coord[k1g]))
#print('minx41:', minx41, ' minx42:', minx42)
'''
if not in_mask(hole_center_41, mask, [min_dist, max_dist]):
print('hole_center_41 is NOT in the mask')
if not in_mask(hole_center_42, mask, [min_dist, max_dist]):
print('hole_center_42 is NOT in the mask')
'''
# Determine which is the 4th nearest galaxy
#if in_mask(hole_center_41, mask, [min_dist, max_dist]) and minx41 <= minx42:
not_in_mask_41 = not_in_mask(hole_center_41, mask, min_dist, max_dist)
if not not_in_mask_41 and minx41 <= minx42:
# The first 4th galaxy found is the next closest
hole_center = hole_center_41
k4g = k4g1
#elif in_mask(hole_center_42, mask, [min_dist, max_dist]):
elif not not_in_mask(hole_center_42, mask, min_dist, max_dist):
# The second 4th galaxy found is the next closest
hole_center = hole_center_42
k4g = k4g2
#elif in_mask(hole_center_41, mask, [min_dist, max_dist]):
elif not not_in_mask_41:
# The first 4th galaxy found is the next closest
hole_center = hole_center_41
k4g = k4g1
else:
# Neither hole center is within the mask - not a valid hole
continue
# Radius of the hole
hole_radius = np.linalg.norm(hole_center - w_coord[k1g])
'''
########################################################################
# TEST BLOCK
# Make sure that there are no galaxies contained within hole
i_nearest, dist_nearest = galaxy_tree.query_radius(hole_center.T, r=hole_radius, return_distance=True, sort_results=True)
i_nearest = i_nearest[0]
dist_nearest = dist_nearest[0]
# Remove two nearest galaxies from list
boolean_nearest = np.logical_and.reduce((i_nearest != k1g, i_nearest != k2g, i_nearest != k3g, i_nearest != k4g))
dist_nearest = dist_nearest[boolean_nearest]
i_nearest = i_nearest[boolean_nearest]
if len(i_nearest) > 0:
print('________________________________________________')
print('There are galaxies inside the hole!', len(i_nearest))
print('Final radius:', hole_radius)
########################################################################
'''
'''
if hole_radius > 23:
if len(i_nearest) == 0:
print('_______________________________________________')
print(hole_center_41, 'hc41')
print('hole_radius_41', np.linalg.norm(hole_center_41 - w_coord[k1g]))
print(hole_center_42, 'hc42')
print('hole_radius_42', np.linalg.norm(hole_center_42 - w_coord[k1g]))
print('Final hole radius:', hole_radius)
'''
# Save hole
myvoids_x.append(hole_center[0,0])
myvoids_y.append(hole_center[0,1])
myvoids_z.append(hole_center[0,2])
myvoids_r.append(hole_radius)
hole_times.append(time.time() - hole_start)
#print(hole_times[n_holes], i,j,k)
n_holes += 1
'''
if n_holes%100 == 0:
print("number of holes=",n_holes)
print("number of holes=",n_holes)
'''
print('Found a total of', n_holes, 'potential voids.', flush=True)
print('Time to find all holes =', time.time() - tot_hole_start, flush=True)
print('AVG time to find each hole =', np.mean(hole_times), flush=True)
################################################################################
#
# SORT HOLES BY SIZE
#
################################################################################
sort_start = time.time()
print('Sorting holes by size', flush=True)
potential_voids_table = Table([myvoids_x, myvoids_y, myvoids_z, myvoids_r], names=('x','y','z','radius'))
# Need to sort the potential voids into size order
potential_voids_table.sort('radius')
potential_voids_table.reverse()
'''
potential_voids_file = open('potential_voids_list.txt', 'wb')
pickle.dump(potential_voids_table, potential_voids_file)
potential_voids_file.close()
in_file = open('potential_voids_list.txt', 'rb')
potential_voids_table = pickle.load(in_file)
in_file.close()
'''
sort_end = time.time()
print('Holes are sorted.',flush=True)
print('Time to sort holes =', sort_end-sort_start,flush=True)
################################################################################
#
# CHECK IF 90% OF VOID VOLUME IS WITHIN SURVEY LIMITS
#
################################################################################
print('Removing holes with at least 10% of their volume outside the mask',flush=True)
potential_voids_table = volume_cut(potential_voids_table, mask, [min_dist, max_dist])
potential_voids_table.write(survey_name + 'potential_voids_list.txt', format='ascii.commented_header', overwrite=True)
################################################################################
#
# FILTER AND SORT HOLES INTO UNIQUE VOIDS
#
################################################################################
combine_start = time.time()
print('Combining holes into unique voids',flush=True)
maximal_spheres_table, myvoids_table = combine_holes(potential_voids_table, frac)
print('Number of unique voids is', len(maximal_spheres_table),flush=True)
# Save list of all void holes
myvoids_table.write(out2_filename, format='ascii.commented_header', overwrite=True)
combine_end = time.time()
print('Time to combine holes into voids =', combine_end-combine_start,flush=True)
'''
################################################################################
#
# COMPUTE VOLUME OF EACH VOID
#
################################################################################
print('Compute void volumes')
# Initialize void volume array
void_vol = np.zeros(void_count)
nran = 10000
for i in range(void_count):
nsph = 0
rad = 4*myvoids_table['radius'][v_index[i]]
for j in range(nran):
rand_pos = add_row(np.random.rand(3)*rad, myvoids_table['x','y','z'][v_index[i]]) - 0.5*rad
for k in range(len(myvoids_table)):
if myvoids_table['flag'][k]:
# Calculate difference between particle and sphere
sep = sum(to_vector(subtract_row(rand_pos, myvoids_table['x','y','z'][k])))
if sep < myvoids_table['radius'][k]**2:
# This particle lies in at least one sphere
nsph += 1
break
void_vol[i] = (rad**3)*nsph/nran
################################################################################
#
# IDENTIFY VOID GALAXIES
#
################################################################################
print('Assign field galaxies to voids')
# Count the number of galaxies in each void
nfield = np.zeros(void_count)
# Add void field to f_coord
f_coord['vID'] = -99
for i in range(nf): # Go through each void galaxy
for j in range(len(myvoids_table)): # Go through each void
if np.linalg.norm(to_vector(subtract_row(f_coord[i], myvoids_table['x','y','z'][j]))) < myvoids_table['radius'][j]:
# Galaxy lives in the void
nfield[myvoids_table['flag'][j]] += 1
# Set void ID in f_coord to match void ID
f_coord['vID'][i] = myvoids_table['flag'][j]
break
f_coord.write(voidgals_filename, format='ascii.commented_header')
'''
################################################################################
#
# MAXIMAL HOLE FOR EACH VOID
#
################################################################################
save_maximals(maximal_spheres_table, out1_filename)
'''
################################################################################
#
# VOID REGION SIZES
#
################################################################################
# Initialize
void_regions = Table()
void_regions['radius'] = myvoids_table['radius'][v_index]
void_regions['effective_radius'] = (void_vol*0.75/np.pi)**(1./3.)
void_regions['volume'] = void_vol
void_regions['x'] = myvoids_table['x'][v_index]
void_regions['y'] = myvoids_table['y'][v_index]
void_regions['z'] = myvoids_table['z'][v_index]
void_regions['deltap'] = (nfield - N_gal*void_vol/vol)/(N_gal*void_vol/vol)
void_regions['n_gal'] = nfield
void_regions['vol_maxHole'] = (4./3.)*np.pi*myvoids_table['radius'][v_index]**3/void_vol
void_regions.write(out3_filename, format='ascii.commented_header')
'''
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.