text stringlengths 38 1.54M |
|---|
from . import ffi as ffi
from typing import Any
def address_of_symbol(name: Any): ...
def add_symbol(name: Any, address: Any) -> None: ...
def load_library_permanently(filename: Any) -> None: ...
|
'''
Created on Jul 3, 2015
@author: kieran
'''
from spectral_tools import Sav_Template,apply_window
from functions import log_n
from echo_tools import *
import pylab as p
from general_tools import progress_bar,pload,pdump
from time import time
import pickle as pkl
T=2009.
d0=11000.#distance to Cas a (=3.4kpc
mag_base=100.**(1./5)#base of the magnitude system
WINDOW_FNAME='template_windows.dat'
def scale(x):#scale such that the average value is 1
x=np.array(x)
norm=np.abs(np.sum(x)/len(x))
return x/norm
# sigma, rho_0, z0, alpha, distance
le_2116=dust_sheet(0.031, 627.9561, 437.11, 9., d0, T,psf=0.97,offset=-0.42,delta=-39.97,profile=gaussian)
le_2521=dust_sheet(0.093, 317.9613, -9.89, 54., d0, T,psf=1.13,offset=-0.01,delta=-0.91,profile=gaussian)
le_3923=dust_sheet(0.614, 1203.8904, 2045.38, 7., d0, T,psf=1.65,offset=-0.35,delta=-35.36,profile=gaussian)
les=[le_2116,le_2521,le_3923]
colours=['r','b','y']
sn=Sav_Template('IIb_1specperSN')
sn.get_lightcurve()
ts_days,_,_=sn.extract_data()#finds the range of phases required
#to add ts_days=np.array(x,y,500)
sn.continuum='cubic'#adds a cubic continuum
sn.get_lightcurve()
mags=sn.lightcurve['V'](ts_days)
mags=normalise_mags(mags)
try:
windows=pload(WINDOW_FNAME)
print 'window functions read from file'
except IOError:
print '\n\n'
print 'calculating window functions'
windows=[[] for le in les]
slit=Slit(0.,1.,subslits=8)
ts_years=ts_days/365.#todo change echo_tools to work in days
source=Source(sn.lightcurve['V'],min(sn.lc_phases)/365.,max(sn.lc_phases/365.),T-300)
for i in range(len(les)):
print '\n'
print 'Dust sheet %d of %d' %(i+1,len(les))
for j in range(len(ts_years)):
progress_bar(j,len(ts_years))
windows[i].append(les[i].window(ts_years[j],T,slit,source=source))
windows[i]=normalise(np.array(windows[i]))
pdump(windows,WINDOW_FNAME)
print '\n'
print 'producing spectra'
phase,wlength,flux=sn.extract_data()
lambdas=np.linspace(4200,8500,500)
spectra=[]
for i in range(len(les)):
spectra.append(apply_window(sn,windows[i], ts_days, lambdas))
unmodified=apply_window(sn,np.ones_like(windows[0]), ts_days, lambdas)#unmodified spectrum is weighted with flat window fn
print '\n'
print 'convolving with the light curves'
light_curves=[]
for i in range(len(les)):
light_curves.append(mags-log_n(windows[i],mag_base))
f,ax=p.subplots(2,1,sharex=True)
ax[0].plot(ts_days,np.ones_like(ts_days),'k')
for i in range(len(windows)):
ax[0].plot(ts_days,normalise(windows[i]),colours[i])
#ax[0].plot([-10,-10],[0,1.2],'k--')
#ax[0].plot([40,40],[0,1.2],'k--')
ax[0].set_ylim(0,1.2)
ax[0].set_ylabel('Window Function')
ax[1].plot(ts_days,normalise_mags(mags),'k')
for i in range(len(light_curves)):
ax[1].plot(ts_days,light_curves[i],colours[i])
#ax[1].plot([-10,-10],[6,-1],'k--')
#ax[1].plot([40,40],[6,-1],'k--')
ax[1].set_ylim(6,-1)
ax[1].set_ylabel('V Magnitude')
ax[1].set_xlim(min(ts_days),250)
ax[1].set_xlabel('Time, days')
ax[0].legend(['sn1993j','le2116','le2521','le3923'])
p.figure()
#p.plot(lambdas,remove_continuum(lambdas,unmodified),'k')
p.plot(lambdas,scale(unmodified),'k')
for i in range(len(les)):
#p.plot(lambdas,remove_continuum(lambdas,spectra[i]),colours[i])
p.plot(lambdas,scale(spectra[i]),colours[i])
wfspectra = np.array(lambdas, scale(spectra[0]), scale(spectra[1]), scale(spectra[2]))
f = open ('Templates_lewf.pkl','wb')
pl.dump(wfspectra,f)
p.legend(['unmodified','le2116','le2521','le3923'],loc='lower right')
p.xlabel('Wavelength (A)')
p.ylabel('Flux')
p.title('Type IIb mean spectrum')
p.savefig('%s.png'%sn.name)
p.show()
|
#!/usr/bin/python3
#-- coding: utf-8 --
import time
import subprocess
import os
import telebot
import urllib #modulo tratamento urls
import emoji
from emoji import emojize
API_TOKEN= '<TOKEN DO SEU BOT>' #Bot gerado pelo BOTFATHER
bot = telebot.TeleBot(API_TOKEN, threaded=False) #Sumario do telebot funcao que aplica o TOKEN
localtime = time.localtime(time.time())
@bot.message_handler(commands=['cable'])
def send_cable(message):
msg = bot.reply_to(message, """ Digite o *mac* que deseja consultar: \n Conforme o exemplo: *AA:BB:DD:CC:11:22*
""", parse_mode="markdown") #responde ao comando solicitado /cable
cid = message.chat.id #paga o id da conversa
bot.register_next_step_handler(msg,send_cable_step) #armazena a informação digitada e continua
def send_cable_step(message):
try:
cid = message.chat.id #paga o id da conversa
cable = message.text #mensagem digitada
msg = bot.reply_to(message, "Só um momento por favor...irei realizar sua *consulta!* para o cable: " + str(cable), parse_mode="markdown")
dados = os.popen('cable.sh ' + str(cable)).read()
print (dados)
bot.send_message(cid,"Seguem dados solicitados: \n" + str(dados))
bot.send_message(cid,"Tenha um ótimo dia de trabalho!")
except Exception as e:
bot.reply_to(message, 'Oops, algo deu errado')
print(e)
@bot.message_handler(commands=['node'])
def send_node(message):
msg = bot.reply_to(message, """Digite o *node* que deseja consultar: \n Conforme o exemplo: *SFLCM*
""", parse_mode="markdown") #responde ao comando solicitado /cable
cid = message.chat.id #paga o id da conversa
bot.register_next_step_handler(msg,send_node_step) #armazena a informação digitada e continua
def send_node_step(message):
try:
cid = message.chat.id #paga o id da conversa
node = message.text #mensagem digitada
msg = bot.reply_to(message, "Só um momento por favor...irei realizar sua *consulta!* para o node: " + str(node), parse_mode="markdown")
dados = os.popen('buscanode.sh ' + str(node)).read()
print (dados)
bot.send_message(cid,"Seguem dados solicitados: \n" + str(dados), parse_mode="markdown")
bot.send_message(cid,"Tenha um ótimo dia de trabalho!")
except Exception as e:
bot.reply_to(message, 'Oops, algo deu errado')
print(e)
@bot.message_handler(commands=['insere'])
def send_insere(message):
msg = bot.reply_to(message, """Digite o *cable MAC* que deseja inserir no monitoramento: \n Conforme o exemplo: *AA:BB:DD:CC:EE:00:11*
""", parse_mode="markdown") #responde ao comando solicitado /cable
cid = message.chat.id #paga o id da conversa
bot.register_next_step_handler(msg,send_insere_step) #armazena a informação digitada e continua
def send_insere_step(message):
try:
cid = message.chat.id #paga o id da conversa
insere = message.text #mensagem digitada
msg = bot.reply_to(message, "Só um momento por favor...irei realizar sua *consulta!* para o node: " + str(insere), parse_mode="markdown")
dados = os.popen('insere_mon.sh ' + str(insere)).read()
print (dados)
bot.send_message(cid,"Seguem dados solicitados: \n" + str(dados), parse_mode="markdown")
bot.send_message(cid,"Tenha um ótimo dia de trabalho!")
except Exception as e:
bot.reply_to(message, 'Oops, algo deu errado')
print(e)
@bot.message_handler(commands=['monitor'])
def send_monitor(message):
msg = bot.reply_to(message, """Digite o *cable* que deseja resgatar o monitoramento: \n Conforme o exemplo: *AA:BB:DD:CC:EE:00:11*
""", parse_mode="markdown") #responde ao comando solicitado /cable
cid = message.chat.id #paga o id da conversa
bot.register_next_step_handler(msg,send_monitor_step) #armazena a informação digitada e continua
def send_monitor_step(message):
try:
cid = message.chat.id #paga o id da conversa
node_monitor = message.text #mensagem digitada
msg = bot.reply_to(message, "Só um momento por favor...irei realizar sua *consulta!* para o node: " + str(node_monitor), parse_mode="markdown")
dados = os.popen('busca_mon.sh ' + str(node_monitor)).read()
print (dados)
bot.send_message(cid,"Seguem dados solicitados: \n" + str(dados), parse_mode="markdown")
bot.send_message(cid,"Tenha um ótimo dia de trabalho!")
except Exception as e:
bot.reply_to(message, 'Oops, algo deu errado')
print(e)
@bot.message_handler(commands=['ajuda', 'start', 'help'])
def send_node(message):
msg = bot.reply_to(message, """Olá, sou o *LIA* seu consultor digital!\n
Digite /cable para interação de consulta do *cable* aguarde resposta e informe o *MAC* ex: *AA:BB:DD:CC:FF:11*\n
Digite /node para interação de consulta do *node* aguarde resposta e informe o *nome do node* que deseja consultar ex: *SFLCM*\n
Para informações sobre *versão* e *desenvolvedor* o comando /sobre te trará informações sobre mim
""", parse_mode="markdown") #responde ao comando solicitado /cable
cid = message.chat.id #paga o id da conversa
bot.send_message(cid,emojize(":robot:", use_aliases=True))
@bot.message_handler(commands=['sobre'])
def about(message):
msg = bot.reply_to(message, """Olá, sou o *Lenha Inteligência Artificial* rsrsrsr
para os mais intimos pode me chamar de *LIA* seu ajudante virtual...
Estou engatinhando ainda minha *versão estável* é rev *0.1.38*
Fui criado pelo Analista *Jamil Walber* e esse projeto foi desenvolvido do zero, isso mesmo não é uma copia foi tudo feito no braço!
caso precise entrar em contato com o desenvolvedor seu email é [jamil.walber@claro.com]
Me use com cautela pois todas informações estão sendo logadas hehehe
""", parse_mode="markdown") #responde ao comando solicitado /cable
cid = message.chat.id #paga o id da conversa
img = open("/dados/teste_mod/py/img/img1.png", 'rb')
bot.send_photo(cid, img)
bot.send_message(cid, """Minhas técnicas de consulta são feitas com *Shell Script* e *Python*!
Favor reportar *falhas* nas *consultas* ou erros meu pai é o *Jamil* então ja sabe para quem reclamar rsrsrs\n
Time de desenvolvimento *Datacenter Claro PR*
*Todos os direitos são reservados* ®
""", parse_mode="markdown")
bot.enable_save_next_step_handlers(delay=2)
bot.load_next_step_handlers()
bot.polling(none_stop=True)
|
from django.core import serializers
from django.core.paginator import Paginator
from django.shortcuts import render, redirect, get_object_or_404
from django.template import loader
from django.utils import timezone
from django.http import JsonResponse
from django.db.models import Count
from .forms import GrupoForm, MusicoForm, AlbumForm
from .models import Grupo, Musico, Album
# Create your views here.
def index(request):
context = {}
return render(request,'práctica_05/index.html', context)
def test_template(request):
context = {} # Aquí van la las variables para la plantilla
return render(request,'test.html', context)
def grupo_lista(request):
return render(request, 'práctica_05/grupos.html')
def musico_lista(request):
return render(request, 'práctica_05/musicos.html')
def album_lista(request):
return render(request, 'práctica_05/albumes.html')
def grupo_datos(request):
grupos = Grupo.objects.filter(creacion__lte=timezone.now()).order_by('creacion')
paginator = Paginator(grupos, 3)
pagina = request.GET.get('pagina')
grupos = paginator.get_page(pagina)
grupos_html = loader.render_to_string(
'práctica_05/grupos_tabla.html',
{'grupos': grupos}
)
datos = {
'grupos_html':grupos_html,
'has_next': grupos.has_next(),
'has_previous': grupos.has_previous(),
'page': grupos.number,
'num_pages': paginator.num_pages,
}
return JsonResponse(datos)
def musico_datos(request):
musicos = Musico.objects.filter(creacion__lte=timezone.now()).order_by('creacion')
paginator = Paginator(musicos, 3)
pagina = request.GET.get('pagina')
musicos = paginator.get_page(pagina)
musicos_html = loader.render_to_string(
'práctica_05/musicos_tabla.html',
{'musicos': musicos}
)
datos = {
'musicos_html':musicos_html,
'has_next': musicos.has_next(),
'has_previous': musicos.has_previous(),
'page': musicos.number,
'num_pages': paginator.num_pages,
}
return JsonResponse(datos)
def album_datos(request):
albumes = Album.objects.filter(creacion__lte=timezone.now()).order_by('creacion')
paginator = Paginator(albumes, 3)
pagina = request.GET.get('pagina')
albumes = paginator.get_page(pagina)
albumes_html = loader.render_to_string(
'práctica_05/albumes_tabla.html',
{'albumes': albumes}
)
datos = {
'albumes_html':albumes_html,
'has_next': albumes.has_next(),
'has_previous': albumes.has_previous(),
'page': albumes.number,
'num_pages': paginator.num_pages,
}
return JsonResponse(datos)
def grupo_nuevo(request):
if request.method == "POST":
form = GrupoForm(request.POST)
if form.is_valid():
grupo = form.save(commit=False)
grupo.save()
return redirect('grupo_lista')
else:
form = GrupoForm()
return render(request, 'práctica_05/nuevo.html', {'form': form})
def musico_nuevo(request):
if request.method == "POST":
form = MusicoForm(request.POST)
if form.is_valid():
musico = form.save(commit=False)
musico.save()
return redirect('musico_lista')
else:
form = MusicoForm()
return render(request, 'práctica_05/nuevo.html', {'form': form})
def album_nuevo(request):
if request.method == "POST":
form = AlbumForm(request.POST)
if form.is_valid():
album = form.save(commit=False)
album.save()
return redirect('album_lista')
else:
form = AlbumForm()
return render(request, 'práctica_05/nuevo.html', {'form': form})
def grupo_editar(request, pk):
grupo = get_object_or_404(Grupo, pk=pk)
if request.method == "POST":
form = GrupoForm(request.POST, instance=grupo)
if form.is_valid():
grupo = form.save(commit=False)
grupo.creacion = timezone.now()
grupo.save()
return redirect('grupo_lista')
else:
form = GrupoForm(instance=grupo)
return render(request, 'práctica_05/editar.html', {'form': form})
def grupo_borrar(request, pk):
grupo = get_object_or_404(Grupo, pk=pk)
grupo.delete()
return redirect('grupo_lista')
def musico_editar(request, pk):
musico = get_object_or_404(Musico, pk=pk)
if request.method == "POST":
form = MusicoForm(request.POST, instance=musico)
if form.is_valid():
musico = form.save(commit=False)
musico.creacion = timezone.now()
musico.save()
return redirect('musico_lista')
else:
form = MusicoForm(instance=musico)
return render(request, 'práctica_05/editar.html', {'form': form})
def musico_borrar(request, pk):
musico = get_object_or_404(Musico, pk=pk)
musico.delete()
return redirect('musico_lista')
def album_editar(request, pk):
album = get_object_or_404(Album, pk=pk)
if request.method == "POST":
form = AlbumForm(request.POST, instance=album)
if form.is_valid():
album = form.save(commit=False)
album.creacion = timezone.now()
album.save()
return redirect('album_lista')
else:
form = AlbumForm(instance=album)
return render(request, 'práctica_05/editar.html', {'form': form})
def album_borrar(request, pk):
album = get_object_or_404(Album, pk=pk)
album.delete()
return redirect('album_lista')
def estadisticas(request):
sitios_grupos = Grupo.objects.values_list('nombre', 'origen', named=True)
instrumentos = {
v['instrumento']: v['instrumento__count']
for v in
Musico.objects.values('instrumento').annotate(Count('instrumento')).order_by('instrumento')
}
generos = {
v['genero']: v['genero__count']
for v in
Grupo.objects.values('genero').annotate(Count('genero')).order_by('genero')
}
return render(request, 'práctica_05/estadisticas.html', {'sitios_grupos': sitios_grupos, 'instrumentos': instrumentos, 'generos': generos}) |
from abc import ABC, abstractmethod
class Invader:
INVADER_TYPES = {'goblin': 'green', 'troll': 'grey', 'orc': 'green', 'ogre': 'tan', 'dragon': 'red'}
def __init__(self, canvas, path):
self._canv = canvas
self._path = path
self._health = 100
self._size = 4 # radius of circle to draw (for now)
self._speed = 2
self._reachedGoal = False
self._offCanvas = False
# _dest_cell is the next cell's center that we are moving toward.
# Start at the 0th cell, which may be off the screen. Use it
# to get your x, y value. Then, find the 1st cell and use that to
# set the x and y directions.
self._dest_cell_idx = 0
self._dest_cell = self._path.get_cell(0)
self._x, self._y = self._dest_cell.get_center()
self._compute_new_dir()
# identifier for the circle we draw to represent the invader
self._id = None
def update(self):
if(self._x, self._y) == self._path._pathGoal.get_center():
self._reachedGoal = True
self.remove()
elif self._health >= 0:
self.render()
else:
self.remove()
def _compute_new_dir(self):
'''Get (and remember) the next cell in that path, and then
compute the xdir and ydir to get us from our current position
to the center of that next cell.'''
self._dest_cell_idx += 1
self._dest_cell = self._path.get_cell(self._dest_cell_idx)
d = self._dest_cell.get_center_x() - self._x
if d > 0:
self._xdir = self._speed
elif d == 0:
self._xdir = 0
else:
self._xdir = -self._speed
d = self._dest_cell.get_center_y() - self._y
if d > 0:
self._ydir = self._speed
elif d == 0:
self._ydir = 0
else:
self._ydir = -self._speed
def move(self):
if (self._x, self._y) == self._dest_cell.get_center():
self._compute_new_dir()
self._x += self._xdir
self._y += self._ydir
@abstractmethod
def render(self):
self._canv.delete(self._id)
self.move()
self._id = self._canv.create_oval(self._x - self._size, self._y - self._size,
self._x + self._size, self._y + self._size,
fill="black")
def remove(self):
self._canv.delete(self._id)
self._offCanvas = True
|
#!/usr/bin/python
# -*- coding:UTF-8 -*-
'''
// H_FILE_PART1
#ifndef SRC_FILETEXTGENERATOR_HFILEGENERATOR_H_
#define SRC_FILETEXTGENERATOR_HFILEGENERATOR_H_
#include <string>
#include "h_file_text_generator.h"
using namespace std;
class HFileTextGenerator
{
// H_FILE_PART2
private:
int _size;
string _fileText;
public:
HFileTextGenerator();
HFileTextGenerator(const HFileTextGenerator& rhs);
HFileTextGenerator& operator=(const HFileTextGenerator& rhs);
~HFileTextGenerator();
void setSize(int size);
int getSize() const;
void setFileText(const string& fileText);
string& getFileText();
const string& getFileText() const;
// H_FILE_PART3
};
#endif
'''
from src.data.class_description import ClassDescription
NEW_LINE = '\n'
class HFileTextGenerator:
def __init__(self, class_description=None):
self.class_description = class_description
def generate_h_file_text(self):
h_file_text = ''
h_file_text = self.generate_h_file_part1()
h_file_text += self.generate_h_file_part2()
h_file_text += self.generate_h_file_part3()
return h_file_text
def generate_h_file_part1(self):
project_name = self.class_description.get_project_path()
class_name = self.class_description.get_class_name()
include_files = self.class_description.get_include_files()
h_file_part1 = ''
h_file_part1 += '#ifdef ' + project_name.upper() + '_' + class_name.upper() + '_H_' + NEW_LINE
h_file_part1 += '#define ' + project_name.upper() + '_' + class_name.upper() + '_H_' + NEW_LINE
h_file_part1 += NEW_LINE
for inc_file in include_files:
h_file_part1 += '#include <' + inc_file + '>' + NEW_LINE
h_file_part1 += NEW_LINE + NEW_LINE
return h_file_part1
def generate_h_file_part2(self):
h_file_part2 = ''
return h_file_part2
def generate_h_file_part3(self):
h_file_part3 = ''
h_file_part3 += '};'
h_file_part3 += NEW_LINE + NEW_LINE
h_file_part3 += '#endif'
h_file_part3 += NEW_LINE
return h_file_part3
if __name__ == '__main__':
project_path_temp = 'src'
class_name_temp = 'file'
include_files_temp = ['vector', 'string', 'time.h', 'file.h']
declare_vars_temp = []
attributes_temp = []
class_description_temp = ClassDescription(project_path_temp,
class_name_temp,
include_files_temp,
declare_vars_temp,
attributes_temp)
h_file_text_generator = HFileTextGenerator(class_description_temp)
print(h_file_text_generator.generate_h_file_part1())
|
"""
Created on Mon July 10, 2017
@author: Ruchika Chhabra
"""
from ConfigParser import ConfigParser
class ConfigParse():
'''
DESCRIPTION:
------------
This class reads config.ini file and sets the required user inputs
in the class attributes.
ATTRIBUTES:
----------
1. ES_Host (string) : Elasticsearch host IP
2. ES_Port (string) : Elasticsearch port which is 5601 by default
3. Index_Name (string) : Elasticsearch index name
4. Type_Name (string) : Elasticsearch index type
5. Mapping_File (string) : Mapping file to used for creating the ES index.
6. del_flag (int) : Can take values 1 and 0 to delete the already existing
elasticsearch index or not respectively.
7. Index_ID : Index ID from which documents should be inserted
to ES.
8. URL_File : List of URLs to be scraped.
'''
def __init__(self):
'''
DESCRIPTION:
------------
This method declares the class attributes.
'''
self.ES_Host = None
self.ES_Port = None
self.Index_Name = None
self.Type_Name = None
self.Mapping_File = None
self.Index_ID = None
self.del_flag = None
self.URL_File = None
def configReader(self):
'''
DESCRIPTION:
-----------
* This method parses the config file and read the variables defined by
the user in the config.ini file.
* The values of the variables are then set in the corresponding class
attributes.
'''
parser = ConfigParser(allow_no_value=True)
# Read config.ini
parser.read('config.ini')
# Define ES Instance config variables.
self.ES_Host = parser.get('ES Config Variables', 'es_host')
self.ES_Port = int(parser.get('ES Config Variables', 'es_port'))
self.Index_Name = parser.get('ES Config Variables', 'index_name')
self.Type_Name = parser.get('ES Config Variables', 'type_name')
self.Mapping_File = parser.get('ES Config Variables', 'mapping_file')
self.Index_ID = int(parser.get('ES Config Variables', 'index_id'))
self.del_flag = int(parser.get('ES Config Variables', 'delete_index'))
# Read input variables for the code
self.URL_File = parser.get('Input Variables', 'url_list')
def updateConfigParam(self, param, value):
'''
DESCRIPTION:
------------
This method updates the value of param in the config.ini file.
PARAMETERS:
---------
param (string): Config Parameter to be updated.
value (string) Value of Config Parameter to be set.
'''
config = ConfigParser(allow_no_value=True)
config.read('config.ini')
config.set('ES Config Variables', param, value)
with open('config.ini', 'wb') as configfile:
config.write(configfile)
|
#!/user/bin/env python
# -*- coding: utf-8 -*-
# fib
def fib(max):
n,a,b=0,0,1
while n<max:
print b
a,b = b,a+b
n = n+1
fib(100) |
import numpy as np
from collections import deque
from rdpg_constants import *
class ReplayMemory:
def __init__(self, state_dim, action_dim):
self.index = 0
self.histories = np.zeros((MAX_CAPACITY, LENGTH, state_dim + action_dim))
self.states = np.zeros((MAX_CAPACITY, LENGTH, state_dim))
self.observations = np.zeros((MAX_CAPACITY, LENGTH, state_dim))
self.actions = np.zeros((MAX_CAPACITY, LENGTH, action_dim))
self.rewards = np.zeros((MAX_CAPACITY, LENGTH, 1))
self.indices = None
def append(self, history):
'''
Puts experience into memory buffer
args:
:experience: a tuple consisting of (S, A, S_prime, R)
'''
states = history.get_states()
observations = history.get_observations()
actions = history.get_actions()
self.histories[self.index] = np.concatenate((states, actions), axis=1)
self.states[self.index] = states
self.observations[self.index] = observations
self.actions[self.index] = actions
self.rewards[self.index] = history.get_rewards()
self.index = (self.index + 1) % MAX_CAPACITY
def set_indices(self):
self.indices = np.random.choice([i for i in range(MAX_CAPACITY)], BATCH_SIZE)
"""
set indices must be called before the following functions
"""
def sample_states(self):
return self.states[self.indices]
def sample_histories(self):
return self.histories[self.indices]
def sample_actions(self):
return self.actions[self.indices]
def sample_observations(self):
return self.observations[self.indices]
def sample_rewards(self):
return self.rewards[self.indices]
class History:
def __init__(self, state_dim, action_dim):
self.index = 0
self.states = np.zeros((LENGTH, state_dim))
self.observations = np.zeros((LENGTH, state_dim)) # observation is just s prime
self.actions = np.zeros((LENGTH, action_dim))
self.rewards = np.zeros((LENGTH, 1))
self.histories = np.zeros((LENGTH, state_dim + action_dim)) # history is just
def append(self, obs, obs_prime, action, reward):
self.states[self.index] = obs
self.observations[self.index] = obs_prime
self.actions[self.index] = action
self.rewards[self.index] = reward
self.histories[self.index] = np.concatenate((obs, action))
self.index += 1
def get(self):
return self.histories[self.index - 1:self.index], self.states[self.index - 1:self.index] # change this back to observations if unsuccessful
def get_states(self):
return self.states
def get_observations(self):
return self.observations
def get_actions(self):
return self.actions
def get_rewards(self):
return self.rewards |
# Generated by Django 2.2.6 on 2019-12-07 14:46
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('work', '0059_resolutionlink'),
]
operations = [
migrations.AlterField(
model_name='resolutionlink',
name='resolution',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='work.Resolution'),
),
]
|
from rest_framework.parsers \
import MultiPartParser, \
FormParser, \
FileUploadParser
from rest_framework.response import Response
from . import models, serializers
from rest_framework import generics, status
from django.utils.translation import ugettext_lazy as _
from apiNomad.setup import service_init_database
class Genre(generics.ListAPIView):
"""
get:
Return a list of all the existing genres.
"""
serializer_class = serializers.GenreBasicSerializer
def get_queryset(self):
queryset = models.Genre.objects.all()
return queryset
class VideoGenreId(generics.UpdateAPIView):
"""
patch:
Will delete genre of a video
"""
def patch(self, request, *args, **kwargs):
if self.request.user.has_perm("video.uodate_video"):
if 'genre' in request.data.keys() and \
'video' in request.data.keys():
video_id = request.data['video']
genre_id = request.data['genre']
video = models.Video.objects.filter(
id=video_id
)[:1].get()
genre = models.Genre.objects.filter(
id=genre_id
)[:1].get()
if video and genre:
video.genres.remove(genre)
return Response(genre.label, status=status.HTTP_200_OK)
content = {
'detail': _("You are not authorized to update a given video."),
}
return Response(content, status=status.HTTP_403_FORBIDDEN)
class Video(generics.ListCreateAPIView):
"""
get:
Return a list of all the existing genres.
post:
Create a new events.
"""
parser_classes = (MultiPartParser, FormParser, FileUploadParser)
serializer_class = serializers.VideoBasicSerializer
def get_queryset(self):
# service_init_database()
if 'param' in self.request.query_params.keys():
queryset = models.Video.objects.filter(
owner=self.request.user
)
list_exclude = list()
for video in queryset:
if video.is_delete:
list_exclude.append(video)
else:
queryset = models.Video.objects.all()
list_exclude = list()
if 'is_deleted' in self.request.query_params.keys():
for video in queryset:
if not video.is_delete:
list_exclude.append(video)
elif 'is_actived' in self.request.query_params.keys():
for video in queryset:
if not video.is_active:
list_exclude.append(video)
queryset = queryset.\
exclude(pk__in=[video.pk for video in list_exclude])
return queryset
def post(self, request, *args, **kwargs):
if self.request.user.has_perm("video.add_video"):
# request.data['owner'] = self.request.user.id
return self.create(request, *args, **kwargs)
return Response(
_("video invalide. \n Revoyer les critères "
"d'acceptation de projet pour "
"vous assurer que vous êtes en confirmité. "
"Si le probléme persiste, "
"merci de contacter l'administration"),
status=status.HTTP_400_BAD_REQUEST
)
class VideoId(generics.RetrieveUpdateDestroyAPIView):
"""
get:
Return the detail of a specific video.
patch:
Update a specific video.
delete:
Delete a specific video.
"""
serializer_class = serializers.VideoBasicSerializer
def get_queryset(self):
return models.Video.objects.filter()
def patch(self, request, *args, **kwargs):
if 'file' in request.data.keys():
del request.data['file']
if 'owner' in request.data.keys():
del request.data['owner']
if 'genre' in request.data.keys():
del request.data['genres']
if self.request.user.has_perm('video.change_video'):
return self.partial_update(request, *args, **kwargs)
content = {
'detail': _("You are not authorized to update a given video."),
}
return Response(content, status=status.HTTP_403_FORBIDDEN)
def delete(self, request, *args, **kwargs):
return self.destroy(request, *args, **kwargs)
|
from pandas import read_csv
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.colors import ListedColormap
from sklearn import preprocessing
from sklearn.preprocessing import scale
from sklearn.model_selection import train_test_split
from sklearn import metrics
from sklearn.model_selection import GridSearchCV
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.ensemble import RandomForestClassifier
import warnings
warnings.filterwarnings("ignore", category=DeprecationWarning)
data_cols_cont_descr = {
"a1": False,
"a2": True,
"a3": True,
"a4": False,
"a5": False,
"a6": False,
"a7": False,
"a8": True,
"a9": False,
"a10": False,
"a11": True,
"a12": False,
"a13": False,
"a14": True,
"a15": True,
"a16": False,
}
# prepare data
def prepare_test_data():
x = np.array([[-3, 7], [1, 5], [1, 2], [-2, 0], [2, 3], [-4, 0],
[-1, 1], [1, 1], [-2, 2], [2, 7], [-4, 1], [-2, 7]])
Y = np.array([3, 3, 3, 3, 4, 3, 3, 4, 3, 4, 4, 4])
return x, Y
def prepare_data(fn):
if fn is None:
return prepare_test_data()
le = preprocessing.LabelEncoder()
df = read_csv(fn, ';')
y = le.fit_transform(df["a16"])
x = df.drop('a16', 1)
for c in x:
dc = x[c]
if data_cols_cont_descr[c]:
dc = pd.to_numeric(dc, errors='coerce')
# print(dc)
dfc = dc.to_frame(c)
m = dfc.mean()
dfc = dfc.fillna(m)
x[c] = dfc[c]
else:
x[c] = le.fit_transform(dc)
print("Data control")
print(x.info())
return x, y
def find_2_max_regresors(model, X_train, X_test, y_train, y_test):
max_score = -1
max_c1 = None
max_c2 = None
for c1 in X_train.columns:
for c2 in X_train.columns:
if c1 == c2:
continue
new_x_train = X_train[[c1, c2]]
new_x_test = X_test[[c1, c2]]
model.fit(new_x_train, y_train)
y_test_pred = model.predict(new_x_test)
score = metrics.accuracy_score(y_test, y_test_pred)
if score > max_score:
max_c1 = c1
max_c2 = c2
max_score = score
return max_c1, max_c2
def draw_classification_area(model, c1, c2, X_train, X_test, y_train, y_test, title):
x_train_draw = scale(X_train[[c1, c2]].values)
x_test_draw = scale(X_test[[c1, c2]].values)
model.fit(x_train_draw, y_train)
x_min, x_max = x_train_draw[:, 0].min() - 1, x_train_draw[:, 0].max() + 1
y_min, y_max = x_train_draw[:, 1].min() - 1, x_train_draw[:, 1].max() + 1
h = 0.02
xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
np.arange(y_min, y_max, h))
pred = model.predict(np.c_[xx.ravel(), yy.ravel()])
pred = pred.reshape(xx.shape)
cmap_light = ListedColormap(['#FFAAAA', '#AAFFAA', '#AAAAFF'])
cmap_bold = ListedColormap(['#FF0000', '#00FF00', '#0000FF'])
plt.figure()
plt.pcolormesh(xx, yy, pred, cmap=cmap_light)
plt.scatter(x_train_draw[:, 0], x_train_draw[:, 1],
c=y_train, cmap=cmap_bold)
plt.xlim(xx.min(), xx.max())
plt.ylim(yy.min(), yy.max())
plt.title("%s %s:%s score: %.0f percents" % (title, c1, c2, model.score(x_test_draw, y_test) * 100))
plt.savefig('fig3_{}.png'.format(title))
def decision_tree_model(x, Y):
print('Decision tree model')
X_train, X_test, y_train, y_test = train_test_split(x, Y, test_size=0.20, random_state=0)
tuned_parameters = {'criterion': ['gini', 'entropy'],
'max_depth': [4, 5, 6, 7, 8, 9, 10, 11, 12, 15, 20, 30, 40, 50, 70, 90, 120, 150]}
model_dt = GridSearchCV(DecisionTreeClassifier(), tuned_parameters, cv=5)
model_dt.fit(X_train, y_train)
print('best model parameters')
print(model_dt.best_params_)
y_train_pred = model_dt.predict(X_train)
y_test_pred = model_dt.predict(X_test)
acc_train = metrics.accuracy_score(y_train, y_train_pred)
acc_test = metrics.accuracy_score(y_test, y_test_pred)
print()
print("DecisionTree classifier")
print('Accuracy on train set: {:.2f}'.format(acc_train))
print('Accuracy on test set: {:.2f}'.format(acc_test))
model_dt = DecisionTreeClassifier(**model_dt.best_params_)
c1, c2 = find_2_max_regresors(model_dt, X_train, X_test, y_train, y_test)
draw_classification_area(model_dt, c1, c2, X_train, X_test, y_train, y_test, 'decision_tree')
def boosting_model(x, Y):
print()
print('Boosting model')
X_train, X_test, y_train, y_test = train_test_split(x, Y, test_size=0.20, random_state=0)
tuned_parameters = {
"loss": ["deviance"],
"learning_rate": [0.01, 0.025, 0.05, 0.075, 0.1, 0.15, 0.2],
# "min_samples_split": np.linspace(0.1, 0.5, 12),
# "min_samples_leaf": np.linspace(0.1, 0.5, 12),
"max_depth": [3, 5, 8],
"max_features": ["log2", "sqrt"],
"criterion": ["friedman_mse", "mae"],
# "subsample": [0.5, 0.618, 0.8, 0.85, 0.9, 0.95, 1.0],
"n_estimators": [10]
}
model_gb = GridSearchCV(GradientBoostingClassifier(), tuned_parameters, cv=5)
model_gb.fit(X_train, y_train)
print('best model parameters')
print(model_gb.best_params_)
y_train_pred = model_gb.predict(X_train)
y_test_pred = model_gb.predict(X_test)
acc_train = metrics.accuracy_score(y_train, y_train_pred)
acc_test = metrics.accuracy_score(y_test, y_test_pred)
print()
print("Boosting classifier")
print('Accuracy on train set: {:.2f}'.format(acc_train))
print('Accuracy on test set: {:.2f}'.format(acc_test))
model_gb = GradientBoostingClassifier(**model_gb.best_params_)
c1, c2 = find_2_max_regresors(model_gb, X_train, X_test, y_train, y_test)
draw_classification_area(model_gb, c1, c2, X_train, X_test, y_train, y_test, 'boosting')
def random_forest_model(x, Y):
print()
print('Random forest model')
X_train, X_test, y_train, y_test = train_test_split(x, Y, test_size=0.20, random_state=0)
tuned_parameters = {
'n_estimators': [200, 500],
'max_features': ['auto', 'sqrt', 'log2'],
'max_depth': [4, 5, 6, 7, 8],
'criterion': ['gini', 'entropy']
}
model_rf = GridSearchCV(RandomForestClassifier(), tuned_parameters, cv=5)
model_rf.fit(X_train, y_train)
print('best model parameters')
print(model_rf.best_params_)
y_train_pred = model_rf.predict(X_train)
y_test_pred = model_rf.predict(X_test)
acc_train = metrics.accuracy_score(y_train, y_train_pred)
acc_test = metrics.accuracy_score(y_test, y_test_pred)
print()
print("Random Forest classifier")
print('Accuracy on train set: {:.2f}'.format(acc_train))
print('Accuracy on test set: {:.2f}'.format(acc_test))
model_rf = RandomForestClassifier(**model_rf.best_params_)
c1, c2 = find_2_max_regresors(model_rf, X_train, X_test, y_train, y_test)
draw_classification_area(model_rf, c1, c2, X_train, X_test, y_train, y_test, 'random_forest')
def main():
features, labels = prepare_data("crx.data.csv")
decision_tree_model(features, labels)
boosting_model(features, labels)
random_forest_model(features, labels)
if __name__ == "__main__":
main()
|
# -*- coding: utf-8 -*-
"""
Created on Thu Jan 21 12:53:10 2021
Programme to select all kinematic and kinetic data from a trial, read it in from
for single cycle on each side, normalise to gait cycle, plot kinematic and kinetic
graph
@author: snbar
"""
import c3dreader
import ezc3d
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
import numpy as np
import operator
from mpl_toolkits.mplot3d import axes3d
from matplotlib.animation import FuncAnimation
class marker_data(object):
def __init__(self):
self.c3dname=[]
self.DimensionName=[]
class marker_c3d(object):
def __init__(self):
for r in range(len(mk_list)):
for c in range(3):
# for s in ['L','R']:
setattr(self, required_data.DimensionName[r][c],[])
class file_def(object):
def __init__(self):
for index,at in enumerate(attr_list):
setattr(self,at,val_list[index])
attr_list=['mkr_attr']
mk_list=['LASI','LPSI','LTHI','LKNE','LTIB','LANK','LHEE','LTOE',
'RASI','RPSI','RTHI','RKNE','RTIB','RANK','RHEE','RTOE']
required_data=marker_data()
for n in range(len(mk_list)):
required_data.c3dname.append(mk_list[n])
l=[]
for i in range(3):
l.append(mk_list[n]+'_'+str(i+1))
required_data.DimensionName.append(l)
t=marker_c3d()
filename = 'C:\\Users\\snbar\\projects\\Data\\Pats20\\Misc\\APatient\\Helen_NF_N\\Helen_NF_N07.c3d'
#filename='C:\\Users\\snbar\\projects\\nj80102.c3d'
[required_data]=c3dreader.read_data(required_data,t,filename)
val_list=[t]
p=file_def()
########### now make 3D graph
#get_ipython().run_line_magic('matplotlib', 'qt')
#####
fig = plt.figure(figsize=(9,9)) # figure 1 3D plot of frame i
ax = fig.add_subplot(111, projection='3d')
ax.xaxis.set_major_locator(ticker.NullLocator())
ax.yaxis.set_major_locator(ticker.NullLocator())
ax.zaxis.set_major_locator(ticker.NullLocator())
#ax.set_aspect("equal")
i=50
ax.plot3D([t.LASI_1[i], t.RASI_1[i], t.RPSI_1[i], t.LPSI_1[i], t.LASI_1[i]],
[t.LASI_2[i], t.RASI_2[i], t.RPSI_2[i], t.LPSI_2[i],t.LASI_2[i]],
[t.LASI_3[i], t.RASI_3[i], t.RPSI_3[i], t.LPSI_3[i],t.LASI_3[i]], color='black')
# left leg
ax.plot3D([t.LASI_1[i], t.LTHI_1[i], t.LKNE_1[i], t.LTIB_1[i], t.LANK_1[i]],\
[t.LASI_2[i], t.LTHI_2[i], t.LKNE_2[i], t.LTIB_2[i], t.LANK_2[i]],\
[t.LASI_3[i], t.LTHI_3[i], t.LKNE_3[i], t.LTIB_3[i], t.LANK_3[i]], color='red')
# right leg
ax.plot3D([t.RASI_1[i], t.RTHI_1[i], t.RKNE_1[i], t.RTIB_1[i], t.RANK_1[i]],\
[t.RASI_2[i], t.RTHI_2[i], t.RKNE_2[i], t.RTIB_2[i], t.RANK_2[i]],\
[t.RASI_3[i], t.RTHI_3[i], t.RKNE_3[i], t.RTIB_3[i], t.RANK_3[i]], color='green')
# left foot
ax.plot3D([t.LANK_1[i], t.LHEE_1[i], t.LTOE_1[i], t.LANK_1[i]],\
[t.LANK_2[i], t.LHEE_2[i], t.LTOE_2[i], t.LANK_2[i]],\
[t.LANK_3[i], t.LHEE_3[i], t.LTOE_3[i], t.LANK_3[i]], color='red')
# right foot
ax.plot3D([t.RANK_1[i], t.RHEE_1[i], t.RTOE_1[i], t.RANK_1[i]],\
[t.RANK_2[i], t.RHEE_2[i], t.RTOE_2[i], t.RANK_2[i]],\
[t.RANK_3[i], t.RHEE_3[i], t.RTOE_3[i], t.RANK_3[i]], color='green')
ax.set_ylim([-1000, 1000])
ax.set_xlim([0, 2000])
ax.set_zlim([0, 2000])
ax.grid(False)
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.set_zlabel('z')
fig = plt.figure(figsize=(9,9)) # figure 1 3D plot of frame i
ax = fig.add_subplot(111, projection='3d')
#fig, ax = plt.subplots()
xdata, ydata, zdata = [], [], []
ln, = ax.plot3D([], [],[], 'ro')
def init():
ax.set_ylim([-1000, 1000])
ax.set_xlim([0, 2000])
ax.set_zlim([0, 2000])
return ln,
def update(frame):
xdata.append(t.LASI_1[frame])
ydata.append(t.LASI_2[frame])
zdata.append(t.LASI_3[frame])
ln.set_data(xdata, ydata, zdata)
return ln,
ani = FuncAnimation(fig, update, frames=range(10),
init_func=init, blit=True)
plt.show()
#max_range = np.array([X.max()-X.min(), Y.max()-Y.min(), Z.max()-Z.min()]).max() / 2.0
#mid_x = (X.max()+X.min()) * 0.5
#mid_y = (Y.max()+Y.min()) * 0.5
#mid_z = (Z.max()+Z.min()) * 0.5
#ax.set_xlim(mid_x - max_range, mid_x + max_range)
#ax.set_ylim(mid_y - max_range, mid_y + max_range)
#ax.set_zlim(mid_z - max_range, mid_z + max_range)
#plt.show() |
from sqlalchemy import (
Column, String, create_engine, BigInteger, Integer, DateTime)
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import as_declarative, declared_attr
from datetime import datetime
import functools
from blog import app
rw_engine = create_engine(app.config['MYSQL_RW'])
rw_session = sessionmaker(bind=rw_engine, autocommit=True)
ro_engine = create_engine(app.config['MYSQL_RO'])
ro_session = sessionmaker(bind=ro_engine, autocommit=True)
@as_declarative()
class Base:
@declared_attr
def __tablename__(cls):
return cls.__name__.lower()
id = Column(BigInteger, primary_key=True)
remark = Column(String(255), default='')
is_drop = Column(Integer, default=0)
create_at = Column(DateTime, default=datetime.now())
update_at = Column(DateTime, default=datetime.now())
def use_orm(func=None, name='rw'):
if func is None:
return functools.partial(use_orm, name=name)
@functools.wraps(func)
def wrapper(*args, **kwargs):
if name not in ('rw', 'ro'):
raise Exception(f'MySQL connection name not found: {name}')
session = rw_session() if name == 'rw' else ro_session()
kwargs.update(session=session)
ret_val = func(*args, **kwargs)
return ret_val
return wrapper
|
import pybullet_envs
import gym
import torch
import numpy as np
from agent import Agent
from pybullet_wrappers import RealerWalkerWrapper
import argparse
import torch
torch.autograd.set_detect_anomaly(True)
if __name__ == '__main__':
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
# Algorithms
parser.add_argument('--env', type=str, default='HalfCheetahBulletEnv-v0') # pybullet environment
# parser.add_argument('--env', type=str, default='Pong-v0') # pybullet environment
parser.add_argument('--rl', type=str, default='SAC') # model free agent algorithm
parser.add_argument('--planner', type=str, default='MCTS-UCT') # model based algorithm
parser.add_argument('--model-arch', type=str, default='mdrnn') # type of self-model
parser.add_argument('--atari', action='store_true', default=False)
# Training Parameters
parser.add_argument('--steps', type=int, default=1e6) # training steps
parser.add_argument('--batch-size', type=int, default=512) # SM batch size
parser.add_argument('--seq-len', type=int, default=10) # SM sequence modeling window size
parser.add_argument('--replay-size', type=int, default=100000) # SM replay memory size
parser.add_argument('--width', type=str, default=8) # width of the search tree at every level
parser.add_argument('--depth', type=int, default=5) # depth of the search tree
parser.add_argument('--nodes', type=int, default=2048) # depth of the search tree
# Repeatability
parser.add_argument('--seed', type=int, default=None) # Initial seed
parser.add_argument('--load-all', type=str, default=None) # path to general model
parser.add_argument('--load-model', type=str, default=None) # path to self-model
parser.add_argument('--load-agent', type=str, default=None) # path to agent model
args = parser.parse_args()
cmd = 'python main.py --env '+str(args.env)+' --agent '+str(args.rl)+' --planner '+str(args.planner)+' --width '+str(args.width)+\
' --depth '+str(args.depth)+' --steps '+str(args.steps)+' --batch-size '+str(args.batch_size)+\
' --replay-size '+str(args.replay_size)+' --model-arch '+str(args.model_arch)
if args.seed is not None: cmd += ' --seed '+str(args.seed)
print(cmd)
if args.env[:4].lower() == 'jump' and 'Bullet' in args.env:
print('Jumping task chosen')
env = RealerWalkerWrapper(gym.make(args.env[4:]), rew='jump')
elif 'Bullet' in args.env:
print('Bullet env chosen')
env = RealerWalkerWrapper(gym.make(args.env))
elif args.atari:
from atari_wrapper import make_atari, wrap_deepmind, wrap_pytorch
print('Atari env chosen')
env = make_atari(args.env)
env = wrap_deepmind(env)
env = wrap_pytorch(env)
else:
env = gym.make(args.env)
try:
env = env.env
except:
print('Env not wrapped')
env._max_episode_steps = 100
if args.seed is not None:
torch.manual_seed(args.seed)
np.random.seed(args.seed)
if torch.cuda.is_available():
torch.cuda.manual_seed(args.seed)
env.seed(args.seed)
ensemble = False
if args.model_arch[:len('ensemble-')] == 'ensemble-':
ensemble = True
args.model_arch = args.model_arch[len('ensemble-'):]
if args.model_arch == 'precogen':
from models.preco_gen_dynamics_model import PreCoGenDynamicsModel as DyanmicsModel
elif args.model_arch == 'rnn':
from models.rnn_dynamics_model import RNNDynamicsModel as DyanmicsModel
elif args.model_arch == 'rnn-vae':
from models.rnn_vae import RNNVAE as DyanmicsModel
elif args.model_arch == '1dcnn-vae':
from models.cnn1d_vae import CNNVAE as DyanmicsModel
elif args.model_arch == 'mdrnn':
from models.mdrnn_dynamics_model import MDRNNDynamicsModel as DyanmicsModel
elif args.model_arch == 'mdn-seq':
from models.mdn_seq_dynamics_model import MDNSeqDynamicsModel as DyanmicsModel
elif args.model_arch == 'latent-seq':
from models.latent_seq_dynamics_model import LatentSeqDynamicsModel as DyanmicsModel
elif args.model_arch == 'vrnn':
from models.vrnn_dynamics_model import VRNNDynamicsModel as DyanmicsModel
elif args.model_arch == 'bseq':
from models.bayesian_dynamics_model import BayesianSequenceDynamicsModel as DyanmicsModel
elif args.model_arch == 'biased-bseq':
from models.biased_bayesian_dynamics_model import BayesianSequenceDynamicsModel as DyanmicsModel
elif args.model_arch == 'seq-cnn':
from models.cnn1D_dynamics_model import SeqCNNDynamicsModel as DyanmicsModel
elif args.model_arch == 'mlp':
from models.mlp_dynamics_model import SeqMLPDynamicsModel as DyanmicsModel
elif args.model_arch == '1dcnn':
from models.cnn1D_dynamics_model import SeqCNNDynamicsModel as DyanmicsModel
elif args.model_arch == '2dcnn':
from models.cnn2D_dynamics_model import SeqCNNDynamicsModel as DyanmicsModel
elif args.model_arch.lower() == 'none': # TODO Test to ensures this doesn't break anything
DyanmicsModel = None
dynamics_model = None
else:
print('No Valid Dynamics Model chosen exiting...')
exit(1)
if DyanmicsModel is not None:
if ensemble:
from models.ensemble import Ensemble as Ensemble
# from models.ensemble_parallel import ParallelEnsemble as Ensemble
dynamics_model = Ensemble(DyanmicsModel, env)
args.batch_size = int(args.batch_size)*dynamics_model.ensemble_size
else:
dynamics_model = DyanmicsModel(env, seq_len=int(args.seq_len))
if args.rl.upper() == 'TD3':
from model_free.TD3 import TD3
rl_learner = TD3(env)
elif args.rl.upper() == 'SAC':
from model_free.SAC import SAC
rl_learner = SAC(env)
elif args.rl.upper() == 'DQN':
from model_free.DQN import DQN
rl_learner = DQN(env)
elif args.rl.upper() == 'DDQN':
from model_free.DDQN import DDQN
rl_learner = DDQN(env)
elif args.rl.lower() == 'none' or args.rl.lower() == 'null':
from model_free.Null import NullAgent
rl_learner = NullAgent(env)
else:
from model_free.Null import NullAgent
rl_learner = NullAgent(env)
if args.planner == 'MCTS':
from model_based.mcts import MCTS
planner = MCTS(int(args.depth), dynamics_model, rl_learner, int(args.width))
if args.planner == 'MCTS-UCT':
from model_based.mcts_uct import MCTS
planner = MCTS(int(args.depth), dynamics_model, rl_learner, int(args.width), nodes=int(args.nodes))
elif args.planner == 'CEM':
from model_based.cem import CEM
planner = CEM(int(args.depth), dynamics_model, rl_learner, int(args.width))
elif args.planner.lower() == 'null' or args.planner.lower() == 'none':
planner = None
else:
from model_based.mcts import MCTS
planner = MCTS(int(args.depth), dynamics_model, rl_learner, int(args.width))
if dynamics_model is None and planner is not None:
print('Error: Cannot have a null model with a planner')
exit(1)
agent = Agent(dynamics_model, rl_learner, planner,
batch_size=int(args.batch_size), replay_size=int(args.replay_size), seq_len=int(args.seq_len))
if args.load_all is not None:
args.load_model = args.load_all
args.load_agent = args.load_all
if args.load_model is not None:
print('Loading Model...')
dynamics_model.load(args.load_model+'_self_model.pt')
if args.load_agent is not None:
print('Loading Agent...')
agent.rl_learner.load(args.load_agent)
agent.learn(env, int(args.steps))
|
import datetime
import json
import uuid
from django.core.paginator import Paginator
from django.http import HttpResponse, JsonResponse
from django.shortcuts import render
# Create your views here.
from django.views.decorators.csrf import csrf_exempt
from article.models import Carousel
def query_carousel(request):
rows = []
page_num = request.GET.get('page')
row_num = request.GET.get('rows')
# print(page_num,type(page_num), row_num,type(row_num))
banner = Carousel.objects.all().order_by('id')
all_page = Paginator(banner, row_num)
page = Paginator(banner, row_num).page(page_num).object_list
for i in page:
rows.append(i)
page_data = {"total": all_page.num_pages,
"records": all_page.count,
"page": page_num,
"rows": rows
}
def myDefault(u):
if isinstance(u, Carousel):
return {'id': u.id,
'title': u.title,
'status': u.status,
'url': str(u.url),
'createDate': u.create_date.strftime('%Y-%m-%d'),
'description': u.description,
}
data = json.dumps(page_data, default=myDefault)
return HttpResponse(data)
def index(request):
return render(request, 'carousel/main.html')
@csrf_exempt
def edit_Banner(request):
id = request.POST.get('id')
title = request.POST.get('title')
status = request.POST.get('status')
description = request.POST.get('description')
oper = request.POST.get('oper')
if oper == 'edit':
banner = Carousel.objects.get(pk=id)
banner.title = title
banner.description = description
banner.status = status
# banner.url = url
banner.save()
elif oper == 'add':
now = datetime.datetime.now().strftime('%Y-%m-%d')
u_id = str(uuid.uuid4())
d = {'bannerId': u_id}
bannerId = json.dumps(d)
print(type(bannerId), bannerId)
Carousel.objects.create(id=u_id, title=title, status=status, description=description, create_date=now)
return JsonResponse(d)
elif oper == 'del':
Carousel.objects.get(pk=id).delete()
return HttpResponse()
def demo(request):
return render(request, 'carousel/demo.html')
@csrf_exempt
def upload_file(request):
pic = request.FILES.get('upload_pic1')
name = request.POST.get('upload_title1')
status = request.POST.get('upload_status1')
title = request.POST.get('upload_name1')
u_id = str(uuid.uuid4())
now = datetime.datetime.now().strftime('%Y-%m-%d')
print(pic, name, status, title)
Carousel.objects.create(id=u_id, url=pic, title=title, description=name, status=status, create_date=now)
return HttpResponse()
|
#!/usr/bin/python3
#Run make to update
import os
import sys
import argparse
import shutil
sys.path.insert(0, "/home/pi/Documents/ScanningSystem/atlundiumberry/")
import stepper_helpers as sh
#directory in which this file exists
#dir_path = os.path.dirname(os.path.realpath(__file__))+"/"
dir_path = "/home/pi/Documents/ScanningSystem/atlundiumberry/"
#print("Directory of program:", dir_path)
parser = argparse.ArgumentParser("Specify how many steps that should be taken in as: 'steps_x steps_y'")
parser.add_argument("-step", dest='steps', type=int, help="steps_x steps_y", nargs=2)
parser.add_argument("-new_origin", dest='new_xy', action="store_true", help="Set current position as origin.")
parser.add_argument("-xy", dest='xy', nargs=2, help="Provide the new coordinates as: 'x y' (mm)")
parser.add_argument("-swipe_file", dest='swipe', nargs=7, help="Generate a file with coordinates to perform an (x0-x1, y0-y1) with step lengths (dx, dy) swipe scan. As: 'file_name x0 x1 dx y0 y1 dy'. Neglect file ending.")
parser.add_argument("-file_xy", dest='file_xy', nargs=1, help="Provide the file name from which the position data is to be read from. If file is set then every time the program is executed the collimator will move to the positions indicated in the file. A temporary file temp.'file name'.scan is deleted if the scan is completed.")
parser.add_argument("-no_file", dest='no_file', action="store_true", help="Deactivate read from file.")
parser.add_argument("-set_freq", dest='freq', nargs=1, help="Set freq as: 'new_freq' (Hz)")
parser.add_argument("-set_defaults", dest='defs', action="store_true", help="Reset default settings.")
parser.add_argument("-set_power_com", dest='tdk', action="store_true", help="Set up the power supply communication (tdk-lambda Gen 50-30). OBS: this activates automatic power ON/OFF over the operation.")
parser.add_argument("-no_power_com", dest='no_tdk', action="store_true", help="Inactivate automatic power supply communication and power ON/OFF during operation.")
parser.add_argument("-STOP", dest='stop', action="store_true", help="Emergency stop the scanning!")
parser.add_argument("-clear_coords", dest='coords', action="store_true", help="Clear the coordinate (coords.log) file.")
parser.add_argument("-clear_log", dest='clear_log', action="store_true", help="Clear the stepper log-file.")
parser.add_argument("-clear_logs", dest='clear_logs', action="store_true", help="Clear ALL log-files.")
parser.add_argument("-ON", dest='on', action="store_true", help="Deactivate emergency stop.")
parser.add_argument("-ResetToOrigin", dest='resetO', action="store_true", help="Reset coordinates to origin, i.e. where sensors at (x0, y0) activate.")
parser.add_argument("-ResetToIndex", dest='c_index', nargs=1, help="Reset the coordinate file to the index corresponding to the desired read file @rio4-1. If power communication and reading from file is not set they will be activated. OBS, it resets to the lastly written (not finished) file.")
parser.add_argument("-v", dest='view', action="store_true", help="View current settings.")
args = parser.parse_args()
#print(args)
steps_x = 0
steps_y = 0
config_file = '.scan.yaml'
Scan = sh.Scanner(config_file, dir_path)
orig_stdout = sys.stdout
f_log = open("stepper.log", 'w')
if args.on:
print("Deactivating emergency stop.")
Scan.ChangeSetting("stop", 0)
if Scan.ReadSetting("stop"):
print("EMERGENCY STOP ACTIVATED!")
sys.exit(2)
#sys.stdout = f_log
if len(sys.argv)==1:
#print("is_file=", Scan.ReadSetting("is_file"))
if Scan.ReadSetting("is_file"):
x, y = Scan.ReadCoordsFile()
if x == None and y == None:
Scan.Finished()
steps_x, steps_y = Scan.PosEval(x, y)
if steps_x == 0 and steps_y == 0:
Scan.PerformedMove()
else:
parser.print_help()
Scan.ChangeSetting("is_file", 0)
sys.exit(1)
if args.tdk:
print("Setting up power supply communication ")
#os.system("echo 'Setting up tdk-lambda'")
os.system(Scan.dir_path+"power_set setup &")
Scan.ChangeSetting("is_power_com", 1)
if args.no_tdk:
print("Inactivating power supply communication ")
Scan.ChangeSetting("is_power_com", 0)
if args.view:
print("\nCurrent settings are:")
os.system("cat " + Scan.dir_path+Scan.config_file)
print("")
if args.clear_log:
print("Clearing stepper log file: \"stepper.log\"")
os.system("cp /dev/null stepper.log")
if args.clear_logs:
print("Clearing log files: \"stepper.log\", \"coords.log\" and \"power.log\"")
os.system("cp /dev/null stepper.log")
os.system("cp /dev/null coords.log")
os.system("cp /dev/null power.log")
if args.coords:
print("Clearing coordinate log file: \"coords.log\"")
os.system("cp /dev/null coords.log")
if args.stop:
print("\n Emergency stop with current settings:")
os.system("cat " + Scan.dir_path+Scan.config_file)
Scan.ChangeSetting("stop", 1)
print("")
sys.exit(2)
if args.resetO:
steps_x, steps_y = Scan.PosEval(-1, -1)
print("Resetting to position (0, 0)", "Stepping [x, y]: [", steps_x,", ",steps_y,"]")
if args.c_index:
print("Resetting the coordinate file to index: ", args.c_index)
Scan.ResetCoordFile(args.c_index)
print("Ensuring read from file and power communication is activated ...")
Scan.ChangeSetting("is_file", 1)
os.system(Scan.dir_path+"power_set setup")
Scan.ChangeSetting("is_power_com", 1)
if args.freq:
print("Setting frequency to: ", args.freq)
Scan.ChangeSetting("freq", args.freq)
if args.defs:
print("Setting defaults")
Scan.ChangeSetting("freq", 50)
Scan.ChangeSetting("is_file", 0)
Scan.ChangeSetting("is_power_com", 0)
if args.swipe:
print("Generating swipe file:", args.swipe[0]+".scan")
Scan.GenerateSwipeFile(args.swipe)
if args.new_xy:
Scan.ChangeSetting("pos", [0, 0]);
print("The new origin has been successfully added to"+config_file)
sys.exit()
if args.file_xy:
print("Setting file to read from:", args.file_xy[0])
Scan.ChangeSetting("is_file", 1)
Scan.ChangeSetting("read_file", args.file_xy[0])
os.system("> coords.log")
os.system("> power.log")
os.system("> stepper.log")
shutil.copyfile(Scan.dir_path+args.file_xy[0]+".scan", Scan.dir_path+"temp."+args.file_xy[0]+".scan")
if args.no_file:
print("Deactivating read file")
Scan.ChangeSetting("is_file", 0)
if args.steps:
steps_x = args.steps[0]
steps_y = args.steps[1]
Scan.SetNewPosition(float(steps_x), float(steps_y))
if args.xy:
steps_x, steps_y = Scan.PosEval(float(args.xy[0]), float(args.xy[1]))
#print("Stepping [x, y]: [", steps_x,", ",steps_y,"]")
if steps_x == 0 and steps_y == 0:
print("Exiting since no steps set")
sys.exit(0)
if not Scan.ReadSetting("is_power_com"):
print("Power communication have not been set. Doing that now...")
os.system(Scan.dir_path+"power_set setup")
Scan.ChangeSetting("is_power_com", 1)
print("Stepping [x, y]: [", steps_x,", ",steps_y,"]")
import time
# Use the Gertbot drivers
import gertbot as gb
# Initial settings:
BOARD = 3 # which board we talk to
# View the linear units from the where the wagons are as close as possible to the motors: Y is bottom motor (i.e. running the KK50) and X is top motor (i.e. running the KK60)
STEPPER_Y = 0 # channel for first stepper motor
STEPPER_X = 2 # channel for second stepper motor
MODE = gb.MODE_STEPG_OFF # stepper control, gray code
# mode 0=odd
# 1=brushed
# 2=DCC
# 8=step gray off
# 9=step pulse off
# 24=step gray powered
# 25=step pulse powered
FREQ = Scan.ReadSetting("freq") # frequency
#print("Set frequency is:", FREQ)
# Main program
# Open serial port to talk to Gertbot
#print("Opening serial port ...")
gb.open_uart(0)
# Setup the channels for stepper motors
#print("Setting up channels for stepper motors ...")
gb.set_mode(BOARD,STEPPER_Y,MODE)
gb.freq_stepper(BOARD,STEPPER_Y,FREQ)
gb.set_mode(BOARD,STEPPER_X,MODE)
gb.freq_stepper(BOARD,STEPPER_X,FREQ)
# END-STOP NEEDS TO BE IMPLEMENTED CAREFULLY. (note motor polarisation and j3-pin!)
# ENDSTOP_OFF = 0
# ENDSTOP_LOW = 1
# ENDSTOP_HIGH = 2
# Set active-low endstop as:
#def set_endstop (board,channel,stop_A,stop_B)
gb.set_endstop(BOARD, STEPPER_Y, gb.ENDSTOP_LOW, gb.ENDSTOP_LOW)
gb.set_endstop(BOARD, STEPPER_X, gb.ENDSTOP_LOW, gb.ENDSTOP_LOW)
sleepy = Scan.GetSleep(steps_x, steps_y)
s_out = "echo \"Invoking move. Duration: "+str(sleepy)+" s ...\""
os.system(s_out)
if Scan.ReadSetting("is_power_com"):
print("Activating power")
Scan.SetPower("OUT 1")
time.sleep(2) #this is to ensure power is on
# DO THE ACTUAL MOVE
print("Invoking move ...")
gb.move_stepper(BOARD,STEPPER_Y,steps_y)
gb.move_stepper(BOARD,STEPPER_X,steps_x)
#Checking status after move for both motors and aborts if anything is wrong.
#This is somewhat unclear still
#motor_status = gb.get_motor_status(BOARD, STEPPER_Y)
#print("Motor status Y: ", motor_status)
#if steps_y != 0 and any(motor_status) != 0:
# print("There was a motor error/end stop reached for motor Y. - The run is aborted.")
#
#motor_status = gb.get_motor_status(BOARD, STEPPER_X)
#print("Motor status X: ", motor_status)
#if steps_x != 0 and any(motor_status) != 0:
# print("There was a motor error/end stop reached for motor X. - The run is aborted.")
#status = gb.get_io_setup(BOARD)
#print("Status: ", status)
print("Now sleeping "+str(sleepy)+" s ...")
time.sleep(sleepy)
missed_y = gb.get_motor_missed(BOARD, STEPPER_Y)
missed_x = gb.get_motor_missed(BOARD, STEPPER_X)
m_x = Scan.GetMissed(steps_x, missed_x)
m_y = Scan.GetMissed(steps_y, missed_y)
#print("Missed X,Y: ", missed_x, missed_y)
print("Missed X,Y: ", m_x, m_y)
Scan.SetRealPosition(steps_x-m_x, steps_y-m_y)
if args.resetO:
#Due to some error, we might not be at real (x0, y0) -> looping here till missed > 0
while m_x == 0 or m_y == 0:
steps_x, steps_y = Scan.PosEval(-5, -5)
if m_x == 0:
gb.move_stepper(BOARD,STEPPER_X,steps_x)
if m_y == 0:
gb.move_stepper(BOARD,STEPPER_Y,steps_y)
sleepy = Scan.GetSleep(steps_x, steps_y)
time.sleep(sleepy)
if m_x == 0:
missed_x = gb.get_motor_missed(BOARD, STEPPER_X)
m_x = Scan.GetMissed(steps_x, missed_x)
if m_y == 0:
missed_y = gb.get_motor_missed(BOARD, STEPPER_Y)
m_y = Scan.GetMissed(steps_y, missed_y)
print("RESETTING to (0,0)")
Scan.ChangeSetting("pos", [0, 0])
#print("Reading error status ...")
status = gb.read_error_status(BOARD)
print("Status received ...")
if status != 0:
print("Gertbot reports error(s):")
print(gb.error_string(status))
else:
print("all good!")
# Added this to avoid motor going into pwr state after end-stop activation.
gb.set_mode(BOARD,STEPPER_X,MODE)
gb.set_mode(BOARD,STEPPER_Y,MODE)
if Scan.ReadSetting("is_power_com"):
print("Deactivating power")
Scan.SetPower("OUT 0")
if Scan.ReadSetting("is_file"):
if m_x > 0 or m_y > 0:
Scan.ForkProcCmd("ssh mbsdaq@rio4-1 \"touch /nfs/mbsusr/mbsdaq/mbsrun/Scanner/mbs/vme_0/.abort_scan\"")
else:
Scan.PerformedMove()
f_log.close()
sys.stdout = orig_stdout
#Seems necessary for the ssh session to end?
sys.exit(0)
# on exit stop everything
#gb.emergency_stop()
|
from urllib import request
import os
import re
def download_file(url, dest_dir):
dst_fname = url.split('/')[-1]
dst_fname = os.path.join(dest_dir, dst_fname)
html = request.urlopen(url)
with open(dst_fname, 'wb') as fobj:
while True:
data = html.read(4096)
if not data:
break
fobj.write(data)
def get_patt(fname, patt):
patt_list = []
cpatt = re.compile(patt)
with open(fname, 'rb') as fobj:
while True:
try:
line = fobj.readline().decode('utf8')
except:
continue
if not line:
break
m = cpatt.search(line)
if m:
patt_list.append(m.group())
return patt_list
if __name__ == '__main__':
if not os._exists('/tmp/netease'):
os.makedirs('/tmp/netease')
download_file('http://sports.163.com/index.html', '/tmp/netease')
url_patt = 'http://[^\s;)(:]+\.(png|jpeg|jpg)'
url_list = get_patt('/tmp/netease/index.html', url_patt)
for img_url in url_list:
download_file(img_url, '/tmp/netease')
|
"""
#冒泡排序
def bubble_sort(alist):
n=len(alist)
for j in range(0,n-1):
count=0
for i in range(0,n-1-j):
if alist[i]>alist[i+1]:
alist[i],alist[i+1]=alist[i+1],alist[i]
count+=1
if count==0:
return
if __name__=="__main__":
alist=[54,26,93,17,31,44,55,20]
bubble_sort(alist)
print(alist)
"""
"""
#选择排序
def select_sort(alist):
n=len(alist)
for j in range(0,n-1):
min_index=j
for i in range(j+1,n):
if alist[min_index]>alist[i]:
min_index=i
alist[min_index],alist[j]=alist[j],alist[min_index]
if __name__=="__main__":
alist=[54,26,93,17,31,44,55,20]
select_sort(alist)
print(alist)
"""
"""
#插入排序
def insert_sort(alist):
n=len(alist)
for j in range(1,n):
i=j
while(i>0):
if alist[i]<alist[i-1]:
alist[i],alist[i-1]=alist[i-1],alist[i]
i-=1
else:
break
if __name__ == "__main__":
alist=[54,26,93,17,77,31,44,55,20]
insert_sort(alist)
print(alist)
"""
"""
#希尔排序
def shell_sort(alist):
n=len(alist)
gap=n//2
while gap>0:
for j in range(gap,n):
i=j
while(i>0):
if alist[i]<alist[i-gap]:
alist[i],alist[i-gap]=alist[i-gap],alist[i]
i-=gap
else:
break
gap//=2
if __name__ == "__main__":
alist=[54,26,93,17,77,31,44,55,20]
shell_sort(alist)
print(alist)
"""
"""
#快速排序
def quick_sort(alist,first,last):
if first>=last:
return
target_value=alist[first]
low=first
high=last
while low<high:
while low<high and alist[high]>=target_value:
high-=1
alist[low]=alist[high]
while low<high and alist[low]<target_value:
low+=1
alist[high]=alist[low]
alist[low]=target_value
quick_sort(alist,first,low-1)
quick_sort(alist,low+1,last)
if __name__ == "__main__":
alist=[54,26,93,17,77,31,44,55,20]
print(alist)
quick_sort(alist,0,len(alist)-1)
print(alist)
"""
#二分查找
#递归实现
def binary_search1(alist,item):
n=len(alist)
if n>0:
mid=n//2
if alist[mid]==item:
return True
elif alist[mid]<item:
return binary_search1(alist[:mid],item)
else:
return binary_search1(alist[mid+1:],item)
return False
#非递归
def binary_search2(alist,item):
n=len(alist)
first=0
last=n-1
while first<=last:
mid=(last+first)//2
if alist[mid]==item:
return True
elif alist[mid]<item:
last=mid-1
else:
first=mid+1
return False
if __name__ == "__main__":
alist=[54,26,93,17,77,31,44,55,20]
result1=binary_search1(alist,55)
result2=binary_search1(alist,66)
print(result1)
print(result2) |
import os, sys
import glob
import pandas as pd
import numpy as np
import scipy.special
import operator
from collections import Counter, defaultdict
from time import time
import datetime
import matplotlib.pyplot as plt
import matplotlib
from Upload_BodyGuardz import *
font = {'family' : 'monospace',
'weight' : 'medium',
'size' : 24}
matplotlib.rc('font', **font)
today = str(datetime.date.today())
newpath = r'/home/rafa/github/For Businesses/Infinite-Agency/Results-From-{}/'.format(today)
if not os.path.exists(newpath):
os.makedirs(newpath)
def make_histogram_time_of_day(new_frame):
fonts = 24
f, (ax0) = plt.subplots(1, 1, figsize = (10, 10))
ax0.grid(zorder = 0, linestyle = 'dashed', color = '#acaaa8')
ax0.hist(new_frame['hour'], bins = 24, color = '#a7c059', zorder = 3, histtype='bar', ec='#383632')
ax0.set_title('Conversions by Time of Day \nTotal Conversions = {}'.format(new_frame.shape[0]))
ax0.set_xlabel('Time of Day', fontsize = fonts)
ax0.set_ylabel('Conversions', fontsize = fonts)
plt.tight_layout()
plt.savefig(newpath + 'ConversionsTimeOfDay-{}.png'.format(today))
plt.show()
fig, ax = plt.subplots(figsize = (10, 10))
plt.grid(zorder = 0, linestyle = 'dashed', color = '#acaaa8')
def make_ad_dict(optimal):
ad_list = []
for path_name in optimal['paths_by_adname']:
for ad in path_name:
if ad not in ad_list:
ad_list.append(ad)
vals = [0] * len(ad_list)
ads = dict(zip(ad_list, vals))
for different_paths in optimal['paths_by_adname']:
for ad in different_paths:
ads[ad] += 1
ads = sorted(ads.items(), key = operator.itemgetter(1))[::-1]
xvalues = range(1, len(ads) + 1)
yvalues = [ad[1] for ad in ads]
names = [str(ad[0]) for ad in ads]
sums = []
for index in range(len(yvalues)):
sums.append(sum(yvalues[0:index]))
disp_avg = sum([ad[1] for ad in ads])/float(len(ads))
return xvalues, yvalues, disp_avg, names
def make_impressions_hist(big_frame):
xvalues, yvalues, disp_avg, names = make_ad_dict(big_frame)
ax = plt.bar(xvalues,
yvalues,
ec='#383632',
color='#a7c059',
zorder = 3)
plt.axhline(y = disp_avg,
zorder = 3,
color = '#383632',
linestyle = 'dashed',
label = 'Avg Impressions({})'.format(round(disp_avg, 0)))
plt.ylabel('Number of Impressions')
# plt.xlabel('Ad Type')
plt.xticks(xvalues,
names,
rotation = 90)
plt.legend()
plt.savefig(newpath + 'AdImpressions-Conversions_TAGID-{}.png'.format(today))
plt.show()
if __name__ == '__main__':
big_frame_path = r'/home/rafa/github/For Businesses/Infinite-Agency/Results-From-{}/big_frame-{}.csv'.format(today, today)
if not os.path.exists(big_frame_path):
big_frame = make_big_frame()
else:
big_frame = pd.read_csv(big_frame_path, low_memory = False)
make_histogram_time_of_day(big_frame)
make_impressions_hist(big_frame) |
import numpy as np
from numpy.testing import assert_array_equal
from pystruct.inference import inference_lp
def test_chain():
# test LP, AD3, AD3-BB and JT on a chain.
# they should all be exact
rnd = np.random.RandomState(0)
for i in xrange(10):
forward = np.c_[np.arange(9), np.arange(1, 10)]
backward = np.c_[np.arange(1, 10), np.arange(9)]
unary_potentials = rnd.normal(size=(10, 3))
pairwise_potentials = rnd.normal(size=(3, 3))
# test that reversing edges is same as transposing pairwise potentials
y_forward = inference_lp(unary_potentials, pairwise_potentials,
forward)
y_backward = inference_lp(unary_potentials, pairwise_potentials.T,
backward)
assert_array_equal(y_forward, y_backward)
for chain in [forward, backward]:
y_lp = inference_lp(unary_potentials, pairwise_potentials, chain)
try:
from pystruct.inference import inference_dai
y_dai = inference_dai(unary_potentials, pairwise_potentials,
chain, alg='jt')
assert_array_equal(y_dai, y_lp)
except:
pass
try:
from pystruct.inference import inference_ad3
y_ad3 = inference_ad3(unary_potentials, pairwise_potentials,
chain)
y_ad3bb = inference_ad3(unary_potentials, pairwise_potentials,
chain, branch_and_bound=True)
assert_array_equal(y_ad3, y_lp)
assert_array_equal(y_ad3bb, y_lp)
except:
pass
|
# -*- coding: utf-8 -*-
"""
Created on Mon Oct 3 08:30:22 2016
@author: chris
"""
import random
guesses=0
lownumber =0
highnumber=100
number=random.randint(lownumber,highnumber)
print("Is the number ", number, "?")
response=input("Respond 'Yes','Higher', or 'Lower':")
while response != "Yes":
if guesses > 5:
input("That is incorrect..... Type the answer here:")
print("Dang it!")
break
if response == "Higher":
lownumber = number
number = random.randint(lownumber+1,highnumber)
print("Is it ", number, "?")
response = input("Respond 'Yes', 'Higher', or 'Lower':")
guesses=guesses+1
elif response == "Lower":
highnumber = number
number = random.randint(lownumber,highnumber-1)
print("Is it ", number, "?")
response = input("Respond 'Yes', 'Higher', or 'Lower':")
guesses=guesses+1
if response == "Yes":
print("Yeah boi!!!!")
input("Press enter to exit")
|
from typing import List
import collections
class Solution:
def maxDistance(self, colors: List[int]) -> int:
# min_colors = collections.defaultdict(lambda: 1000)
# max_colors = collections.defaultdict(int)
# max_distance = 0
# for index, val in enumerate(colors):
# min_colors[val] = min(min_colors[val], index)
# max_colors[val] = max(max_colors[val], index)
# for color in colors:
# for dif in colors:
# if color != dif:
# max_distance = max(max_distance, abs(max_colors[color] - min_colors[dif]))
# return max_distance
left, right = 0, len(colors) - 1
while colors[0] == colors[right]:
right -= 1
while colors[-1] == colors[left]:
left += 1
return max(len(colors) - 1 - left, right)
def main():
sol = Solution()
print(sol.maxDistance(colors = [1,1,1,6,1,1,1]))
print(sol.maxDistance(colors = [1,8,3,8,3]))
print(sol.maxDistance([0,1]))
if __name__ == '__main__':
main() |
from Parser import *
from ListCreator import *
import os
import cProfile
test = PlayList('https://www.youtube.com/playlist?list=PLMC1lL-g1-zajmJpSneZcXCB-dVpMwbcB','Test Playlist')
s = "test = PlayList('https://www.youtube.com/playlist?list=PLMC1lL-g1-zajmJpSneZcXCB-dVpMwbcB','Test Playlist')"
print("Tests For %s" % test.fileName)
#urlList
testList = ['https://www.youtube.com/watch?v=SeyweGHLxSg','https://www.youtube.com/watch?v=yappQQtfk8Q']
if testList == test.urlList:
print("test for urlList: %s" % str(True))
else:
print("test for urlList: %s" % str(False))
#empty file
test.writeE()
if os.stat(test.fileName).st_size == 0:
print('test for empty file: %s' % True)
else:
print('test for empty file %s' % False)
#fileName
print("Test for fileName:")
if test.fileName == 'csv files/Test Playlist.csv':
print("%s" % True)
else:
print("%s" % False)
#FileWrite
test.write(test.urlList)
#oldList()
print("%s" % (test.oldList[0]))
print("%s" % (test.urlList))
test.write(test.urlList)
print(test.oldList)
print(test.urlList)
a = [s for s in test.oldList if "=" in s]
b = list(filter(lambda x: 'watch' in x, test.oldList))
any('=' == x for x in test.oldList)
print(a)
print(b)
#Timing of Functions
|
import sqlite3
import sys
conn = sqlite3.connect('mollys_mansion.db')
c = conn.cursor()
name = ("offscreen",)
c.execute("select desc from object where name=?", name)
p=''.join(c.fetchone())
print(p)
running=1
currLocation=" "
player=("player",)
c.execute("SELECT holder from object where name=?",player)
holder = c.fetchone()
#print(holder[0])
initialLoc=list(holder)
currLocation=list(holder)
#History location for none-type error catching
prevLocation = currLocation
#print(currLocation)
c.execute("SELECT name from object where id=?",initialLoc)
initLocName= ''.join(c.fetchone())
print(initLocName)
none = None
while(running):
#Input getter and direction determinerrrrr
input= input("Enter ")
input = str(input.upper())
if input == "LOOK":
input = input.lower()
if input == 'U' or input =='D' or input =='N' or input =='S' or input =='E' or input =='W':
c.execute("SELECT("+input+") from object WHERE id=?",currLocation)
#Conditional statements
elif input=="look":
c.execute("SELECT desc FROM object where id=?",currLocation)
currDesc=c.fetchone()
if currDesc[0] is None:
print("Nothing out of the ordinary")
else:
printableDesc=''.join(currDesc)
print(printableDesc)
elif input=="Exit" or input=="exit":
print("exiting.....")
running=0
break
else:
print("I didn't understand that come again")
#--------------------------------------------------------------------------------------
#None-Type checking for your location variables
holder = c.fetchone()
if type(holder) == type(none):
continue
currLocation = list(holder)
if type(currLocation[0]) == type(none):
print ("Cant go that way dummy")
currLocation = prevLocation
else:
prevLocation = currLocation
#Displays your location variable
c.execute('SELECT name FROM object WHERE id=?', currLocation)
print ('\n\n' ,"You are in the", ''.join(c.fetchone()), '\n\n')
|
def even_or_odd(n):
if n % 2 == 0:
print("even")
return
print("odd")
# w = even_or_odd(31) #Assigning result of a function call, where the function returns None
# odd
# print(w)
# None
|
from flask import Flask
app = Flask(__name__)
@app.route('/')
def hello():
return "Hello World!"
@app.route('/<userinput>')
def hellodojo(userinput):
return userinput.title()
@app.route('/say/<userinput>')
def helloinput(userinput):
return "Hi " + userinput.title() + "!"
@app.route('/repeat/<num>/<userinput>')
def hellorepeat(num,userinput):
return int(num) * userinput + "*"
if __name__=="__main__":
app.run(debug=True)
|
import numpy as np # linear algebra
import pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv)
ds = pd.read_csv("./csv-data/site-content.csv", index_col="site_host")
# intとobjectの clolumnsを取得する
int_cols = [col for col in ds.columns if ds[col].dtype in ["int64", "float64"]]
obj_cols = [col for col in ds.columns if ds[col].dtype == "object"]
# int columnの nilは 0で差し替える
new_int_ds = ds[int_cols].copy().fillna(0)
#nil のobject columnをexpendする
append_ds = pd.DataFrame()
for col in obj_cols:
append_ds[col + "_is_none"] = ds[col].isnull()
new_all_ds = pd.concat([new_int_ds, append_ds], axis="columns")
# object columnの nilは ""で差し替える
new_obj_ds = ds[obj_cols].copy().fillna("")
new_all_ds = pd.concat([new_all_ds, new_obj_ds], axis="columns")
#int_cols.extend(obj_cols)
#そのたtypeの columnsを取得する
#left_ds = ds.copy().drop(columns=int_cols)
from sklearn.preprocessing import LabelEncoder
lable_enc = LabelEncoder()
ds_1 = new_all_ds.copy()
for col in obj_cols:
ds_1[col] = lable_enc.fit_transform(ds_1[col])
data_array = ds_1.to_numpy().tolist()
#全ての hostnameを取る
indexs = ds_1.index
def get_similar_map(fun_call):
result_df = pd.DataFrame()
for index in range(len(data_array)):
result = list()
compare_x = data_array[index]
for line in data_array:
result.append(fun_call(line, compare_x))
result_df[indexs[index]] = result
result_df.index = ds_1.index
return result_df
def pearsonSimilar(inA,inB):
if len(inA)<3:
return 1.0
return 0.5+0.5*np.corrcoef(inA,inB,rowvar=0)[0][1]
def euclidSimilar(inA,inB):
return 1.0/(1.0+np.linalg.norm(np.array(inA)-np.array(inB)))
def cosSimilar(inA,inB):
inA=np.mat(inA)
inB=np.mat(inB)
num=float(inA*inB.T)
denom=np.linalg.norm(inA)*np.linalg.norm(inB)
return 0.5+0.5*(num/denom)
result_df = get_similar_map(pearsonSimilar)
result_df.to_csv("./result/site_pearson_result.csv")
result_df = get_similar_map(euclidSimilar)
result_df.to_csv("./result/site_euclid_result.csv")
result_df = get_similar_map(cosSimilar)
result_df.to_csv("./result/site_cos_result.csv")
import matplotlib.pyplot as plt
import seaborn as sns
graf = plt.figure(figsize=(40,40))
# Add title
plt.title("similiar of site")
# Heatmap showing average arrival delay for each airline by month
sns.heatmap(data=result_df, annot=True)
# Add label for horizontal axis
plt.xlabel("site_host")
graf.savefig("test.png")
|
from django.test import TestCase
from django.forms.models import model_to_dict
from .factories import DemandFactory
from ..serializers import DemandSerializer
from nose.tools import eq_, ok_
import pytest
pytestmark = pytest.mark.django_db
class TestCreateDemandSerializer(TestCase):
def setUp(self):
self.demand_data = model_to_dict(DemandFactory.build())
def test_serializer_with_empty_data(self):
serializer = DemandSerializer(data={})
eq_(serializer.is_valid(), False)
def test_serializer_with_valid_data(self):
serializer = DemandSerializer(data=self.demand_data)
ok_(serializer.is_valid())
|
from flask import (
Blueprint, abort, flash, g, redirect, render_template, request, url_for
)
from datetime import datetime
from kittycount.db import get_db
bp = Blueprint('visits', __name__)
@bp.route('/')
def index():
db = get_db()
visits = db.execute(
"""
SELECT visits, datetime(time, 'localtime') as time
FROM visits
ORDER BY time DESC
"""
).fetchall()
return render_template('visits/visits.html', visits=visits)
@bp.route('/webhook', methods=['POST'])
def webhook():
db = get_db()
if request.method == 'POST':
print(request.json)
visits = request.json['count']
db.execute(
'INSERT INTO visits (visits)'
' VALUES (?)',
(visits,)
)
db.commit()
return redirect(url_for('visits.index'))
@bp.context_processor
def utility_processor():
def str2dt(s):
return datetime.strptime(s, "%Y-%m-%d %H:%M:%S")
return dict(str2dt=str2dt) |
import numpy
import sys
print(sys.argv)
i = int(sys.argv[1])
j = int(sys.argv[2])
a = numpy.random.normal(size=i * j)
print("output_%d_%d.txt" % (i, j))
print("output_{}_{}.txt".format(i, j))
info = {
"firstvalue": i,
"secondvalue": j,
}
print(info)
#print("output_%(firstvalue)d_%(secondvalue)d_%(firstvalue).5f.txt" % info)
numpy.savetxt("output_%d_%d.txt" % (i, j), a)
|
from flask import Flask, render_template,request,send_from_directory
from flask_sqlalchemy import SQLAlchemy
from flask_security import Security,SQLAlchemyUserDatastore,UserMixin, RoleMixin, login_required
from flask_security.utils import hash_password
import json
import os
file_path = os.path.abspath(os.getcwd())+"\main.db"
with open('config.json', 'r') as c:
params = json.load(c)["params"]
app = Flask(__name__,instance_path='/home/abrar/Desktop/Abrar/myBlog/engineering-blog-repository-master/special_files')
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///'+file_path
app.config["SECRET_KEY"]="###@@@***786786"
app.config["SECURITY_PASSWORD_SALT"]="###@@@***abrar"
# app.config['SECURITY_LOGIN_USER_TEMPLATE'] = '/security/login_user.html'
app.app_context().push()
db=SQLAlchemy(app)
# ! all_post Table starts---------------------------------------------------------------------------------------
# TODO hoe to aces elements using below database structure------>>>
# ? -----example is taken of Arduinoproject_posts table---- alias of Arduinoproject_posts is App
# ? App_obj_name.content_parts[index_num].columns -----1 (.part1 or .img1)
# ? App_obj_name.comment[index_num].column ----3
# ? App_obj_name.comment[index_num].replies[index_num].column ----3
# ? PERFECT >>>> {+++**+++} {+++**++++}
# ?
# ? 00
# ? ----___ ___----
#? -----
# !----------------------Admin pannel setup-------------------------------------------------
roles_users=db.Table('roles_users',
db.Column('user_id',db.Integer,db.ForeignKey('user.id')),
db.Column('role_id',db.Integer,db.ForeignKey('role.id'))
)
class User(UserMixin,db.Model):
id=db.Column(db.Integer,primary_key=True)
email=db.Column(db.String(100),unique=True)
password=db.Column(db.String(255))
active=db.Column(db.Boolean)
confirmed_at=db.Column(db.DateTime)
roles=db.relationship('Role',
secondary=roles_users,
backref=db.backref('users',lazy='dynamic')
)
class Role(db.Model,RoleMixin):
id=db.Column(db.Integer,primary_key=True)
name=db.Column(db.String(100))
description=db.Column(db.String(200))
user_datastore=SQLAlchemyUserDatastore(db,User,Role)
security=Security(app,user_datastore)
# !--------------------------------------------------------------------------------------------------------------------------
class Arduinoproject_posts(db.Model):
id=db.Column(db.Integer ,primary_key=True, nullable=False);
date=db.Column(db.String(50), nullable=False);
thumbnail=db.Column(db.String(100), nullable=False);
cover_img=db.Column(db.String(100), nullable=False);
url=db.Column(db.String(100), nullable=False);
meta_keywords=db.Column(db.String(100), nullable=False);
meta_title=db.Column(db.String(100), nullable=False);
img_description=db.Column(db.String(100), nullable=False);
keyword=db.Column(db.String(100), nullable=False);
type=db.Column(db.String(100), nullable=True);
heading=db.Column(db.String(100), nullable=False);
description=db.Column(db.String(200), nullable=False);
article=db.Column(db.Text);
# * one to many relationship tables down
quick_answers=db.relationship('Quick_answers_arduino', backref='post_name')
index=db.relationship('Index_arduino',backref='post_name');
faq=db.relationship('Faq_arduino',backref='post_name');
comment=db.relationship('Comments_arduino',backref='post_name'); #! currently this comment fuunctuanilitiy is not added
arduino_id=db.Column(db.Integer,db.ForeignKey('arduinoproject_posts.id'))
class Index_arduino(db.Model):
id=db.Column(db.Integer,primary_key=True,nullable=False)
topic=db.Column(db.String(100),nullable=True)
arduinopost_id=db.Column(db.Integer,db.ForeignKey('arduinoproject_posts.id'))
class Quick_answers_arduino(db.Model):
id=db.Column(db.Integer,primary_key=True,nullable=False)
ques=db.Column(db.String(100),nullable=True)
ans=db.Column(db.Text,nullable=True)
arduinopost_id=db.Column(db.Integer,db.ForeignKey('arduinoproject_posts.id'))
class Faq_arduino(db.Model):
id=db.Column(db.Integer,primary_key=True,nullable=False);
faq_q=db.Column(db.String(100),nullable=True)
faq_ans=db.Column(db.Text,nullable=True)
arduinopost_id=db.Column(db.Integer,db.ForeignKey('arduinoproject_posts.id'))
class Comments_arduino(db.Model):
id=db.Column(db.Integer ,primary_key=True, nullable=False)
user_name=db.Column(db.String(100),nullable=False)
comment=db.Column(db.String(200),nullable=False)
replies=db.relationship('Comment_replies_arduino',backref='comment_name');
arduinopost_nameid=db.Column(db.Integer,db.ForeignKey('arduinoproject_posts.id'))
class Comment_replies_arduino(db.Model):
id=db.Column(db.Integer ,primary_key=True, nullable=False)
user_name=db.Column(db.String(200),nullable=False)
reply=db.Column(db.String(200),nullable=False)
Comment_nameid=db.Column(db.Integer,db.ForeignKey('comments_arduino.id'))
# *-----------------------------------------------------------------------------------------------------------
class Basicproject_posts(db.Model):
id=db.Column(db.Integer ,primary_key=True, nullable=False);
date=db.Column(db.String(50), nullable=False);
thumbnail=db.Column(db.String(100), nullable=False);
cover_img=db.Column(db.String(100), nullable=False);
url=db.Column(db.String(100), nullable=False);
meta_keywords=db.Column(db.String(100), nullable=False);
meta_title=db.Column(db.String(100), nullable=False);
img_description=db.Column(db.String(100), nullable=False);
keyword=db.Column(db.String(100), nullable=False);
type=db.Column(db.String(100), nullable=True);
heading=db.Column(db.String(100), nullable=False);
description=db.Column(db.String(200), nullable=False);
article=db.Column(db.Text);
quick_answers=db.relationship('Quick_answers_basic', backref='post_name')
index=db.relationship('Index_basic',backref='post_name');
faq=db.relationship('Faq_basic',backref='post_name');
comment=db.relationship('Comments_basic',backref='post_name');
class Para_basic(db.Model):
id=db.Column(db.Integer,primary_key=True,nullable=False)
content=db.Column(db.Text)
basic_id=db.Column(db.Integer,db.ForeignKey('basicproject_posts.id'))
class Quick_answers_basic(db.Model):
id=db.Column(db.Integer,primary_key=True,nullable=False)
ques=db.Column(db.String(100),nullable=True)
ans=db.Column(db.Text,nullable=True)
basicpost_id=db.Column(db.Integer,db.ForeignKey('basicproject_posts.id'))
class Faq_basic(db.Model):
id=db.Column(db.Integer,primary_key=True,nullable=False);
faq_q=db.Column(db.String(100),nullable=True)
faq_ans=db.Column(db.Text,nullable=True)
basicpost_id=db.Column(db.Integer,db.ForeignKey('basicproject_posts.id'))
class Comments_basic(db.Model):
id=db.Column(db.Integer ,primary_key=True, nullable=False)
user_name=db.Column(db.String(100),nullable=False)
comment=db.Column(db.String(200),nullable=False)
basicpost_nameid=db.Column(db.Integer,db.ForeignKey('basicproject_posts.id'))
replies=db.relationship('Comment_replies_basic',backref='comment_name');
class Comment_replies_basic(db.Model):
id=db.Column(db.Integer ,primary_key=True, nullable=False)
user_name=db.Column(db.String(200),nullable=False)
reply=db.Column(db.String(200),nullable=False)
Comment_nameid=db.Column(db.Integer,db.ForeignKey('comments_basic.id'))
class Index_basic(db.Model):
id=db.Column(db.Integer,primary_key=True,nullable=False)
topic=db.Column(db.String(100),nullable=True)
basicpost_id=db.Column(db.Integer,db.ForeignKey('basicproject_posts.id'))
# *-----------------------------------------------------------------------------------------------------------
class Iotproject_posts(db.Model):
id=db.Column(db.Integer ,primary_key=True, nullable=False);
date=db.Column(db.String(50), nullable=False);
thumbnail=db.Column(db.String(100), nullable=False);
cover_img=db.Column(db.String(100), nullable=False);
url=db.Column(db.String(100), nullable=False);
meta_keywords=db.Column(db.String(100), nullable=False);
meta_title=db.Column(db.String(100), nullable=False);
img_description=db.Column(db.String(100), nullable=False);
keyword=db.Column(db.String(100), nullable=False);
type=db.Column(db.String(100), nullable=True);
heading=db.Column(db.String(100), nullable=False);
description=db.Column(db.String(200), nullable=False);
article=db.Column(db.Text);
quick_answers=db.relationship('Quick_answers_iot', backref='post_name')
index=db.relationship('Index_iot',backref='post_name');
faq=db.relationship('Faq_iot',backref='post_name');
comment=db.relationship('Comments_iot',backref='post_name');
class Para_iot(db.Model):
id=db.Column(db.Integer,primary_key=True,nullable=False)
content=db.Column(db.Text)
iot_id=db.Column(db.Integer,db.ForeignKey('iotproject_posts.id'))
class Quick_answers_iot(db.Model):
id=db.Column(db.Integer,primary_key=True,nullable=False)
ques=db.Column(db.String(100),nullable=True)
ans=db.Column(db.Text,nullable=True)
iotpost_id=db.Column(db.Integer,db.ForeignKey('iotproject_posts.id'))
class Faq_iot(db.Model):
id=db.Column(db.Integer,primary_key=True,nullable=False);
faq_q=db.Column(db.String(100),nullable=True)
faq_ans=db.Column(db.Text,nullable=True)
iotpost_id=db.Column(db.Integer,db.ForeignKey('iotproject_posts.id'))
class Comments_iot(db.Model):
id=db.Column(db.Integer ,primary_key=True, nullable=False)
user_name=db.Column(db.String(100),nullable=False)
comment=db.Column(db.String(200),nullable=False)
iotpost_nameid=db.Column(db.Integer,db.ForeignKey('iotproject_posts.id'))
replies=db.relationship('Comment_replies_iot',backref='comment_name');
class Comment_replies_iot(db.Model):
id=db.Column(db.Integer ,primary_key=True, nullable=False)
user_name=db.Column(db.String(200),nullable=False)
reply=db.Column(db.String(200),nullable=False)
Comment_nameid=db.Column(db.Integer,db.ForeignKey('comments_iot.id'))
class Index_iot(db.Model):
id=db.Column(db.Integer,primary_key=True,nullable=False)
topic=db.Column(db.String(100),nullable=True)
iotpost_id=db.Column(db.Integer,db.ForeignKey('iotproject_posts.id'))
# *-----------------------------------------------------------------------------------------------------------
class Other_posts(db.Model):
id=db.Column(db.Integer ,primary_key=True, nullable=False);
date=db.Column(db.String(50), nullable=False);
thumbnail=db.Column(db.String(100), nullable=False);
cover_img=db.Column(db.String(100), nullable=False);
url=db.Column(db.String(100), nullable=False);
meta_keywords=db.Column(db.String(100), nullable=False);
meta_title=db.Column(db.String(100), nullable=False);
img_description=db.Column(db.String(100), nullable=False);
keyword=db.Column(db.String(100), nullable=False);
type=db.Column(db.String(100), nullable=True);
heading=db.Column(db.String(100), nullable=False);
description=db.Column(db.String(200), nullable=False);
article=db.Column(db.Text);
quick_answers=db.relationship('Quick_answers_other', backref='post_name')
index=db.relationship('Index_other',backref='post_name');
faq=db.relationship('Faq_other',backref='post_name');
comment=db.relationship('Comments_other',backref='post_name');
class Quick_answers_other(db.Model):
id=db.Column(db.Integer,primary_key=True,nullable=False)
ques=db.Column(db.String(100),nullable=True)
ans=db.Column(db.Text,nullable=True)
otherpost_id=db.Column(db.Integer,db.ForeignKey('other_posts.id'))
class Faq_other(db.Model):
id=db.Column(db.Integer,primary_key=True,nullable=False);
faq_q=db.Column(db.String(100),nullable=True)
faq_ans=db.Column(db.Text,nullable=True)
otherpost_id=db.Column(db.Integer,db.ForeignKey('other_posts.id'))
class Comments_other(db.Model):
id=db.Column(db.Integer ,primary_key=True, nullable=False)
user_name=db.Column(db.String(100),nullable=False)
comment=db.Column(db.String(200),nullable=False)
other_nameid=db.Column(db.Integer,db.ForeignKey('other_posts.id'))
replies=db.relationship('Comment_replies_other',backref='comment_name');
class Comment_replies_other(db.Model):
id=db.Column(db.Integer ,primary_key=True, nullable=False)
user_name=db.Column(db.String(200),nullable=False)
reply=db.Column(db.String(200),nullable=False)
Comment_nameid=db.Column(db.Integer,db.ForeignKey('comments_other.id'))
class Index_other(db.Model):
id=db.Column(db.Integer ,primary_key=True, nullable=False)
topic=db.Column(db.String(100),nullable=True)
otherpost_id=db.Column(db.Integer,db.ForeignKey('other_posts.id'))
#!----------------Draft database---------------------------------------------! #
class Draft(db.Model):
id=db.Column(db.Integer ,primary_key=True, nullable=False);
date=db.Column(db.String(50), nullable=False);
thumbnail=db.Column(db.String(100), nullable=False);
cover_img=db.Column(db.String(100), nullable=False);
url=db.Column(db.String(100), nullable=False);
meta_keywords=db.Column(db.String(100), nullable=False);
meta_title=db.Column(db.String(100), nullable=False);
img_description=db.Column(db.String(100), nullable=False);
keyword=db.Column(db.String(100), nullable=False);
type=db.Column(db.String(100), nullable=True);
heading=db.Column(db.String(100), nullable=False);
description=db.Column(db.String(200), nullable=False);
article=db.Column(db.Text);
quick_answers=db.relationship('Quick_answers_draft', backref='post_name')
index=db.relationship('Index_draft',backref='post_name');
faq=db.relationship('Faq_draft',backref='post_name');
class Quick_answers_draft(db.Model):
id=db.Column(db.Integer,primary_key=True,nullable=False)
ques=db.Column(db.String(100),nullable=True)
ans=db.Column(db.Text,nullable=True)
draft_id=db.Column(db.Integer,db.ForeignKey('draft.id'))
class Faq_draft(db.Model):
id=db.Column(db.Integer,primary_key=True,nullable=False);
faq_q=db.Column(db.String(100),nullable=True)
faq_ans=db.Column(db.Text,nullable=True)
draft_id=db.Column(db.Integer,db.ForeignKey('draft.id'))
class Index_draft(db.Model):
id=db.Column(db.Integer,primary_key=True,nullable=False)
topic=db.Column(db.String(100),nullable=True)
draft_id=db.Column(db.Integer,db.ForeignKey('draft.id'))
# !-------------------------------------------------------------------------------------------------------------------------
#!--all_post tables END-----|||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
# ! other_details tables start-----------------------------------------------------------------------------------
class Subscribers(db.Model):
id=db.Column(db.Integer,primary_key=True, nullable=False)
email=db.Column(db.String(111),nullable=False,unique=True)
class Messages(db.Model):
id=db.Column(db.Integer,primary_key=True, nullable=False)
email=db.Column(db.String(111),nullable=False,unique=False)
messsage=db.Column(db.Text,nullable=False)
class About_me(db.Model):
id=db.Column(db.Integer,primary_key=True, nullable=False)
profile=db.Column(db.Text,nullable=True)
#! end---------------------------------------------------------------------------------------------------------
class Variables(db.Model):
id=db.Column(db.Integer,primary_key=True, nullable=False)
url=db.Column(db.Text,nullable=True)
title=db.Column(db.Text,nullable=True)
cover_image=db.Column(db.Text,nullable=True)
cover_image_description=db.Column(db.Text,nullable=True)
meta_keywords=db.Column(db.Text,nullable=True)
type_=db.Column(db.Text,nullable=True)
thumbnail=db.Column(db.Text,nullable=True)
keyword=db.Column(db.Text,nullable=True)
heading=db.Column(db.Text,nullable=True)
description=db.Column(db.Text,nullable=True)
quick_questions=db.Column(db.Text,nullable=True)
quick_answers=db.Column(db.Text,nullable=True)
index=db.Column(db.Text,nullable=True)
faq_q=db.Column(db.Text,nullable=True)
faq_ans=db.Column(db.Text,nullable=True)
article=db.Column(db.Text,nullable=True)
def putPassword(password,email):
user_datastore.create_user(
email=email,
password=password
)
db.session.commit()
def createVariables():
var=Variables(
url="",
title="",
cover_image="",
cover_image_description="",
meta_keywords="",
type_="",
thumbnail="",
keyword="",
heading="",
description="",
quick_questions="[]",
quick_answers="[]",
index="[]",
faq_q="[]",
faq_ans="[]",
article=""
)
db.session.add(var)
db.session.commit()
|
from _utils.pathfinder import get_repo_path
import pickle
import os
class TrainedModelLoader:
def __init__(self, experiment):
self.experiment = experiment
self.xgboost = self._load('xgboost')
self.lr = self._load('lr')
self.svm = self._load('svm')
def _load(self, model_name):
path = os.path.join(get_repo_path(), '_experiments', self.experiment.name, 'models', f'{model_name}.pkl')
if 'svm' in model_name:
model = pickle.load(open(path, 'rb'), encoding='latin1')
else:
model = pickle.load(open(path, 'rb'))
return model
|
#!/bin/python3
# https://www.hackerrank.com/challenges/py-if-else/problem
import math
import os
import random
import re
import sys
def py_if_else(n):
if N % 2 > 0:
print('Weird')
elif 2 <= N <= 5:
print('Not Weird')
elif 6 <= N <= 20:
print('Weird')
elif N >= 20:
print('Not Weird')
if __name__ == '__main__':
N = int(input())
py_if_else(N)
|
A=str(input("Numbers:"))
a=len(A)
b=[]
for i in range(0,a):
x=A[i]
b.append(x)
b.sort()
print(min(b)) |
class MyHashTable:
def __init__(self, size):
self.size = size
self.capacity = self.size
self.slots = [None] * self.size
self.data = [None] * self.size
def __setitem__(self, key, value):
self.put(key, value)
def __getitem__(self, key):
try:
h = self.find_key_hash(key)
return self.data[h]
except KeyError(key):
raise
def __delitem__(self, key):
try:
h = self.find_key_hash(key)
self.slots[h] = None
self.data[h] = None
self.capacity += 1
except KeyError("there is no such key") :
return "there is no such key"
def __repr__(self):
return str(self.slots) +'\n'+str(self.data)
def hashfunction(self, key):
return len(key) % self.size
def put(self, key, value):
h = self.hashfunction(key)
# проверяем наличие места в таблице
if self.capacity < 3:
self.slots += [None]
self.data += [None]
self.capacity += 1
# если ячейка пустая, записываем значения
if self.slots[h] is None:
self.slots[h] = key
self.data[h] = value
self.capacity -= 1
# иначе ищем ячейку с существующим ключем и меняем значение
else:
if self.slots[h] == key:
self.data[h] = value
else:
next_h = h + 1
while self.slots[next_h] is not None:
if self.slots[next_h] == key:
self.data[next_h] = value
break
next_h += 1
self.slots[next_h] = key
self.data[next_h] = value
self.capacity -= 1
def find_key_hash(self, key):
h = self.hashfunction(key)
while self.slots[h] is not None:
if self.slots[h] == key:
return h
h += 1
raise KeyError(key)
m = {'Batman': 280,
'Spider man': 260,
'Thor': 159,
'Robin': 291,
'Jocker': 266,
'Hulk': 225,
'Eagle': 283,
'Iron Man': 152,
'Catwoman': 215,
'Aquaman': 142,
'Wolverine': 248,
'Ninja Turtles': 118,
'Guardians of the Galaxy': 395,
'Wonder Woman': 101,
'Hellboy': 104}
table = MyHashTable(20)
for key, value in m.items():
table[key] = value
print(table['Hulk'])
print(table)
|
import webapp2
import jinja2
from main import template_dir
class Handler(webapp2.RequestHandler):
"""
Base class handler
"""
# environment is common for all
jinja_env = jinja2.Environment(loader = jinja2.FileSystemLoader(template_dir),
autoescape = True)
def write(self, *a, **kw):
self.response.write(*a, **kw)
def render_str(self, template, **params):
t = Handler.jinja_env.get_template(template)
return t.render(params)
def render(self, template, **kw):
self.write(self.render_str(template, **kw)) |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import wx
class PlayerInteraction(object):
def install(self, controller, pres):
self.controller = controller
self.presentation = pres
pres.Bind(wx.EVT_BUTTON, self.on_button)
def on_button(self, evt):
btn = evt.GetEventObject()
if btn.Name != 'dialog':
coords = btn.Name.split('_')[-1]
self.presentation.coords_clicked.append(coords)
sort_fn = lambda x: '%s%02d' % (x[0], int(x[1:]))
self.presentation.coords_clicked = sorted(
self.presentation.coords_clicked,
key= sort_fn)
# We're modal, so use EndModal, not Close. Ending a modal
# also happens to fire a command/button event on most
# platforms, with the dialog itself as the target. We screen
# that possibility out above.
self.presentation.EndModal(0)
|
import random
import time
import allure
from allure import description, epic, feature, severity, story
from allure_commons.types import Severity
from service.macro.define import MacroDefine
@epic("TMS-流程引擎")
@feature("流程配置")
@story("宏定义")
@severity(Severity.NORMAL)
@description("新建宏定义")
def test_create(session):
with allure.step("1. 新建宏定义"):
value = str(int(time.time()))
MacroDefine(session).create(xid=value, name=value)
|
# -*- coding: utf-8 -*-
"""
Created on Thu Mar 22 12:42:33 2018
@author: Ledicia Díaz
"""
import numpy as np
import matplotlib.pyplot as plt
delta_t=0.01
t=0
x=1
y=1
w0=1
b=0
w=1
F=0
x0=1
y0=1
for i in range(1000):
x=x0+delta_t*y0
y=y0+delta_t*(-b*y0-w0**2*x0+F*np.cos(w*t))
t=delta_t+t
x0=x
y0=y
plt.plot(x,y,"+") |
# Generated by Django 2.2.2 on 2019-06-27 05:55
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('images', '0003_auto_20190627_1436'),
]
operations = [
migrations.CreateModel(
name='Franchise',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('franchise_1', models.ImageField(upload_to='franchise_1')),
('franchise_2', models.ImageField(upload_to='franchise_2')),
('franchise_3', models.ImageField(upload_to='franchise_3')),
('franchise_4', models.ImageField(upload_to='franchise_4')),
],
),
migrations.CreateModel(
name='FranchiseNotice',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('franchise_notice', models.ImageField(upload_to='franchise_notice')),
],
),
]
|
# CMPT 145: Assignment 5 Question 1
# test script
import a5q1 as a5q1
import node as N
#### UNIT TEST CASES
test_item = 'to_string()'
data_in = None
expected = 'EMPTY'
reason = 'Empty node chain'
result = a5q1.to_string(data_in)
if result != expected:
print('Test failed: {}: got "{}" expected "{}" -- '.format(test_item, result, expected))
data_in = N.node(1)
expected = '[ 1 | / ]'
reason = 'node chain with one node'
result = a5q1.to_string(data_in)
if result != expected:
print('Test failed: {}: got "{}" expected "{}" -- '.format(test_item, result, expected))
data_in = N.node(1, N.node('two'))
expected = '[ 1 | *-]-->[ two | / ]'
reason = 'node chain with two nodes'
result = a5q1.to_string(data_in)
if result != expected:
print('Test failed: {}: got "{}" expected "{}" -- '.format(test_item, result, expected))
data_in = N.node(1, N.node('two', N.node(3)))
expected = '[ 1 | *-]-->[ two | *-]-->[ 3 | / ]'
reason = 'node chain with three nodes'
result = a5q1.to_string(data_in)
if result != expected:
print('Test failed: {}: got "{}" expected "{}" -- '.format(test_item, result, expected))
print('*** testing complete ***')
|
import wx
import pymouse
import time
LIST_COLORS = ['#F7F7F7', '#FFFFFF', '#FCF8E3']
#73879C
def hexToColour(value):
value = value.lstrip('#')
lv = len(value)
t = tuple(int(value[i:i + lv // 3], 16) for i in range(0, lv, lv // 3))
return wx.Colour(t[0],t[1],t[2])
class CategoryListPage(wx.Panel):
def __init__(self, panelParent, mainFrame):
wx.Panel.__init__(self, panelParent)
self.mainFrame = mainFrame
self.elementBaseThread = mainFrame.elementBaseThread
self.popWin = None
self.order = True
self.category_lc = wx.ListCtrl(self, wx.ID_ANY, wx.DefaultPosition, wx.Size(700,600), wx.LC_REPORT | wx.LC_HRULES | wx.LC_VRULES)
self.category_lc.Bind( wx.EVT_LIST_ITEM_SELECTED, self.onCategorySelected)
self.category_lc.Bind( wx.EVT_LIST_ITEM_RIGHT_CLICK, self.onCategoryRightClick)
self.category_lc.Bind( wx.EVT_LIST_COL_CLICK, self.onColumClick)
self.category_lc.SetFont(self.mainFrame.font)
self.category_lc.InsertColumn(0, 'Name')
self.category_lc.InsertColumn(1, 'Number')
self.category_lc.InsertColumn(2, 'Description')
self.category_lc.InsertColumn(3, 'Element count')
self.category_lc.SetColumnsOrder([1,0,2,3])
self.mainSizer = wx.BoxSizer(wx.VERTICAL)
self.mainSizer.Add(self.category_lc,1,wx.EXPAND)
self.Bind(wx.EVT_SIZE, self.OnResize, self)
self.SetSizerAndFit(self.mainSizer)
self.mainSizer.Fit(self)
self.setListWidth()
def setListWidth(self):
size = self.GetSize()[0]
self.category_lc.SetColumnWidth(0, size*0.15)
self.category_lc.SetColumnWidth(1, size*0.30)
self.category_lc.SetColumnWidth(2, size*0.40)
self.category_lc.SetColumnWidth(3, size*0.15)
def addToList(self, position, element_number):
self.category_lc.InsertStringItem(position, str(self.categoryDic[element_number]["Category"]['name']))
self.category_lc.SetStringItem(position, 1, str(self.categoryDic[element_number]["Category"]['id']))
self.category_lc.SetStringItem(position, 2, str(self.categoryDic[element_number]["Category"]['description']))
self.category_lc.SetStringItem(position, 3, str(self.categoryDic[element_number]["Category"]['elements_count']))
self.category_lc.SetItemBackgroundColour(position, hexToColour(LIST_COLORS[position%3]))
self.category_lc.SetItemData(position, element_number)
def UpdateList(self):
self.category_lc.DeleteAllItems()
for row in range(len(self.categoryDic)):
self.addToList(row,row)
def OnBaseLoaded(self, event):
self.collection_id = event.collection_id
self.categoryDic = self.elementBaseThread.elementBase.categories[self.collection_id]
self.UpdateList()
def OnResize(self, event):
event.Skip()
self.setListWidth()
def onCategorySelected(self, event):
category_num = self.category_lc.GetItemData(self.category_lc.GetFirstSelected())
def onCategoryRightClick(self, event):
category_num = self.category_lc.GetItemData(self.category_lc.GetFirstSelected())
def sortColume(self, colume_num, element_path, is_string):
self.category_lc.DeleteAllItems()
self.order = not self.order
for category_num in range(len(self.categoryDic)):
if category_num:
found=False
for position in range(self.category_lc.GetItemCount()):
if is_string:
str_diff = cmp(self.categoryDic[category_num]['Category'][element_path],self.category_lc.GetItemText(position,col=colume_num))
if (self.order and str_diff >= 0 ) or (not self.order and str_diff < 0):
self.addToList(position,category_num)
found = True
break
else:
int_diff = int(self.categoryDic[category_num]['Category'][element_path]) - int(self.category_lc.GetItemText(position,col=colume_num))
if (self.order and int_diff >= 0) or (not self.order and int_diff < 0):
self.addToList(position,category_num)
found = True
break
if found == False:
self.addToList(self.category_lc.GetItemCount(),category_num)
else:
#first element
self.addToList(0,category_num)
def onColumClick(self,event):
column_clicked = event.m_col
if column_clicked == 0: #name
self.sortColume(column_clicked,'name',1)
elif column_clicked == 1: #number
self.category_lc.DeleteAllItems()
for category_num in range(len(self.categoryDic)):
if self.order:
self.addToList(0,category_num)
else:
self.addToList(self.category_lc.GetItemCount(),category_num)
self.order = not self.order
elif column_clicked == 3: #Element count
self.sortColume(column_clicked,'elements_count',0)
|
import io
import sys
import json
from datetime import date
from bot.data import Gender, Request, Response, parse_request
from bot.logger import logger
from bot.mood_analyzer import analyze
from bot.pattern_recognizer import answer_for_pattern
from bot.text_processor.generator import generate_answer
def handle_request(request: Request) -> Response:
"""
Handles a request and generates an appropriate response.
Args:
request: The request to handle.
Returns:
The response generated for the specified request.
"""
logger.debug('Handling request: {}'.format(request))
# mood, affection:
# value between -1 (negative sentiment) and 1 (positive sentiment)
mood_bot, affection_bot = analyze(request)
logger.debug('Mood {}, Affection {}'.format(mood_bot, affection_bot))
result = answer_for_pattern(request)
if result:
pattern, answer = result
else:
# No pattern found, fall back to generative model
pattern = None
answer = generate_answer(request)
response = Response(text=answer,
pattern=pattern,
mood=mood_bot,
affection=affection_bot)
logger.debug(response)
return response
def run_demo():
"""
Starts a command-line based demo request loop for debugging.
"""
logger.info('Starting request loop')
previous_pattern = None
while True:
try:
text = input('User input: ')
request = Request(
text=text,
previous_pattern=previous_pattern,
mood=0.0,
affection=0.0,
bot_gender=Gender.FEMALE,
bot_name='Lana',
bot_birthdate=date(1995, 10, 5),
bot_favorite_color='grün',
father_name='Georg',
father_age=49,
mother_name='Agathe',
mother_age=47
)
response = handle_request(request)
print('Response: ', response.text)
previous_pattern = response.pattern
except KeyboardInterrupt:
# Interrupt requested by user
logger.info('Keyboard interrupt detected, aborting request loop')
return
except Exception as ex:
logger.error('{}: {}'.format(type(ex).__name__, str(ex)))
continue
def run_loop():
"""
Starts a request loop that reads lines from stdin.
Each line represents a new request in JSON format that will be parsed by the
loop and handled by the request handler. The response returned by the
request handler will again be formatted as a JSON string and written to
stdout, including a newline character after every response.
If an error is raised during parsing of the request data or the request
handling itself, the current request will be aborted which is signaled by
the 'error\n' string written to stdout. The loop will then wait for a new
request.
The loop can be interrupted by either closing the stdin pipe, resulting in
an EOFError handled by the loop, or by sending a
keyboard interrupt (Ctrl + C).
"""
logger.info('Starting request loop')
# Setup streams for reading requests and writing responses
input_stream = io.TextIOWrapper(
sys.stdin.buffer, encoding='utf-8', newline='\n')
output_stream = io.TextIOWrapper(
sys.stdout.buffer, encoding='utf-8', newline='\n', line_buffering=True)
while True:
try:
logger.debug('Waiting for request input')
json_data = input_stream.readline()
if json_data == '':
# Empty string equals EOF for io.TextIOWrapper
# Abort loop
logger.info('EOF detected, aborting request loop')
return
logger.debug('Received request, parsing')
request = parse_request(json_data)
response = handle_request(request)
output_stream.write(json.dumps(response._asdict()) + '\n')
except KeyboardInterrupt:
# Interrupt requested by developer
logger.info('Keyboard interrupt detected, aborting request loop')
return
except Exception as ex:
logger.error('{}: {}'.format(type(ex).__name__, str(ex)))
# Pass error to Go and await next request
print('error')
continue
|
""" OCAPI Data Endpoints
"""
from .code_versions import CodeVersions
from .custom_objects import CustomObjects
from .customer_lists import CustomerLists
from .customer_objects_search import CustomObjectsSearch
from .global_jobs import GlobalJobs
from .job_execution_search import JobExecutionSearch
from .jobs import Jobs
from .libraries import Libraries
|
#Simple Calculator
#By Yasmine Lopes
##################
##Ask which operation the user is going to do
##Ask for the first number
##Ask for the second number
##Calculate the operation
##Print the result
#Return to the calculator
while True:
operation = input('Which of these operations you want to? \n(+, -, *, /) \nType here one of these symbols: ')
num1 = int(input ('Ok, now type the first number: '))
num2 = int(input ('Sure! Type the second number now: '))
if operation == '+':
total = num1 + num2
print('Here is the total: ',total)
elif operation == '-':
total = num1 - num2
print('Here is the total: ',total)
elif operation == '*':
total = num1 * num2
print('Here is the total: ',total)
elif operation == '/':
total = num1 / num2
print('Here is the total: ',total)
else:
print('Invalid Operation!!') |
from django.contrib import admin
from .models import *
admin.site.register(Game)
admin.site.register(Card)
admin.site.register(CardSet)
|
import datetime
from eppy.doc import EppUpdateCommand
from registrobrepp.ipnetwork.addipnetwork import AddIpNetwork
from registrobrepp.ipnetwork.aggripnetwork import AggrIpNetwork
from registrobrepp.ipnetwork.chgipnetwork import ChgIpNetwork
from registrobrepp.ipnetwork.remipnetwork import RemIpNetwork
class BrEppUpdateIpNetworkCommand(EppUpdateCommand):
def __init__(self, roid: str, creationdate: datetime = None, add: AddIpNetwork = None, rem: RemIpNetwork = None,
chg: ChgIpNetwork = None, aggr: AggrIpNetwork = None):
if not add and not rem and not chg:
raise ValueError('At least one <ipnetwork:add>, <ipnetwork:rem>, or <ipnetwork:chg> element MUST be provided')
cd = None
if creationdate:
cd = creationdate.strftime('%Y-%m-%dT%H:%M:%S.0Z')
dct = {
'epp': {
'command': {
'update': {
'ipnetwork:update': {
'roid': roid,
'add': add,
'rem': rem,
'chg': chg,
'aggr': aggr,
'creation_date': cd
}
}
}
}
}
extra_nsmap = {'ipnetwork': 'urn:ietf:params:xml:ns:ipnetwork-1.0'}
super(BrEppUpdateIpNetworkCommand, self).__init__(dct=self.annotate(dct), extra_nsmap=extra_nsmap)
|
__author__ = 'Thierry Schellenbach'
__copyright__ = 'Copyright 2010, Thierry Schellenbach'
__credits__ = ['Thierry Schellenbach']
__license__ = 'BSD'
__version__ = '1.1'
__maintainer__ = 'Thierry Schellenbach'
__email__ = 'thierryschellenbach@gmail.com'
__status__ = 'Production'
|
"""
net_surgery.py
VGG16 Transfer Learning After 3-to-4-Channel Input Conversion
Written by Phil Ferriere
Licensed under the MIT License (see LICENSE for details)
Based on:
- https://github.com/minhnhat93/tf_object_detection_multi_channels/blob/master/edit_checkpoint.py
Written by SNhat M. Nguyen
Unknown code license
"""
from tensorflow.python import pywrap_tensorflow
import numpy as np
import tensorflow as tf
num_input_channels = 4 # AStream uses 4-channel inputs
init_method = 'gaussian' # ['gaussian'|'spread_average'|'zeros']
input_path = 'models/vgg_16_3chan/vgg_16_3chan.ckpt' # copy of checkpoint in http://download.tensorflow.org/models/vgg_16_2016_08_28.tar.gz
output_path = 'models/vgg_16_4chan/vgg_16_4chan.ckpt'
print('Loading checkpoint...')
reader = pywrap_tensorflow.NewCheckpointReader(input_path)
print('...done loading checkpoint.')
var_to_shape_map = reader.get_variable_to_shape_map()
var_to_edit_name = 'vgg_16/conv1/conv1_1/weights'
for key in sorted(var_to_shape_map):
if key != var_to_edit_name:
var = tf.Variable(reader.get_tensor(key), name=key, dtype=tf.float32)
else:
var_to_edit = reader.get_tensor(var_to_edit_name)
print('Tensor {} of shape {} located.'.format(var_to_edit_name, var_to_edit.shape))
sess = tf.Session()
if init_method != 'gaussian':
print('Error: Unimplemented initialization method')
new_channels_shape = list(var_to_edit.shape)
new_channels_shape[2] = num_input_channels - 3
gaussian_var = tf.random_normal(shape=new_channels_shape, stddev=0.001).eval(session=sess)
new_var = np.concatenate([var_to_edit, gaussian_var], axis=2)
new_var = tf.Variable(new_var, name=var_to_edit_name, dtype=tf.float32)
sess.run(tf.global_variables_initializer())
saver = tf.train.Saver()
saver.save(sess, output_path)
|
#!/usr/bin/env python
import requests
from bs4 import BeautifulSoup
import sys
from twython import Twython
import numpy as np
apiKey = '...'
apiSecret = '...'
accessToken = '...'
accessTokenSecret = '...'
#BeautifulSoup scraping algorythm
url = 'https://coinmarketcap.com'
soup = BeautifulSoup(requests.get(url).text, 'lxml')
L=[]
#H =["Rank","Name","M Cap","$/1", "HURR", "DURR", "24 hr"]
F=0
for tr in soup.select('#currencies tr'):
if not tr.select('td'):
continue
for i, td in enumerate(tr.select('td')[:7]) :
txt = td.text.replace('\n',' ').replace('*', '').replace('%','').replace('.com','').replace('chain','').replace('coin','').strip()
L.append(txt)
#dictates how many lines will be read
F=F+1
if F>99:
break
#reshapes array to only include necessary columns and re orders them
A = np.reshape(L, (100,7))
Perm = [1,3,6,2,4,5,0]
A = A[:, Perm]
A = np.delete(A, (1,3,4,5,6), 1)
#sorting array based on percent change
A = sorted(A,key=lambda x: (float(x[1])))
A = A[:10]
#write table to a python file and re reads it, possibly poor method
with open("output10losers.txt", "w") as txt_file:
for line in A:
txt_file.write("#" + " ".join(line) + "%" + "\n" )
T = open("output10losers.txt", "r")
finaltweet = T.read()
tweetStr = "Top 10 #Crypto Losers 24hrs:" + "\n" + finaltweet
#twitter API commands
api = Twython(apiKey,apiSecret,accessToken,accessTokenSecret)
api.update_status(status=tweetStr)
print("Tweeted: " + tweetStr)
|
#####
from model.Data import UsFo
import re
class UserHelper:
def __init__(self, app):
self.app = app
def Open_home_page(self):
wd = self.app.wd
if not (wd.current_url.endswith("/") and len(wd.find_elements_by_name("searchform")) > 0):
wd.get("http://localhost/addressbook/")
def Add_user(self, user):
wd = self.app.wd
wd.find_element_by_link_text("add new").click()
self.fill_user_form(user)
wd.find_element_by_xpath("//div[@id='content']/form/input[21]").click()
self.Return_home_page()
self.user_cache = None
def fill_user_form(self, user):
wd = self.app.wd
self.change_field_value("firstname", user.firstname)
self.change_field_value("lastname", user.lastname)
self.change_field_value("address", user.address)
self.change_field_value("home", user.homephone)
self.change_field_value("mobile", user.mobilephone)
self.change_field_value("work", user.workphone)
self.change_field_value("phone2", user.secondaryphone)
self.change_field_value("email", user.email)
self.change_field_value("email2", user.email2)
self.change_field_value("email3", user.email3)
def change_field_value(self, field_name, text):
wd = self.app.wd
if text is not None:
wd.find_element_by_name(field_name).click()
wd.find_element_by_name(field_name).clear()
wd.find_element_by_name(field_name).send_keys(text)
def select_first(self):
self.select_user_by_index()
def select_user_by_index(self, index):
wd = self.app.wd
wd.find_elements_by_name("selected[]")[index].click()
def select_user_by_id(self, id):
wd = self.app.wd
wd.find_element_by_css_selector("input[value='%s']" % id).click()
def Edit_user(self):
self.Edit_user_by_index(0)
def Edit_user_by_index(self, index, new_user_data):
wd = self.app.wd
self.select_user_by_index(index)
wd.find_elements_by_xpath("//a[contains(@href,'edit.php?id=')]")[index].click()
self.fill_user_form(new_user_data)
# Submit group creation
wd.find_element_by_name("update").click()
self.user_cache = None
def Edit_user_by_id(self, id, new_user_data):
wd = self.app.wd
self.select_user_by_id(id)
wd.find_element_by_xpath("//a[contains(@href, %s) and contains(@href, 'edit.php?id=')]" % id).click()
self.fill_user_form(new_user_data)
# Submit group creation
wd.find_element_by_name("update").click()
self.user_cache = None
def delete_first_user(self):
self.delete_user_by_index(0)
def delete_user_by_index(self, index):
wd = self.app.wd
self.select_user_by_index(index)
wd.find_element_by_css_selector("input[value=Delete]").click()
wd.switch_to_alert().accept()
self.Open_home_page()
self.user_cache = None
def delete_user_by_id(self, id):
wd = self.app.wd
self.select_user_by_id(id)
wd.find_element_by_css_selector("input[value=Delete]").click()
wd.switch_to_alert().accept()
self.Open_home_page()
self.user_cache = None
def Return_home_page(self):
wd = self.app.wd
wd.find_element_by_link_text("home page").click()
def counts(self):
wd = self.app.wd
self.Open_home_page()
return len(wd.find_elements_by_name("selected[]"))
user_cache = None
def get_user_list(self):
if self.user_cache is None:
wd = self.app.wd
self.Open_home_page()
self.user_cache = []
for row in wd.find_elements_by_name("entry"):
cells = row.find_elements_by_tag_name("td")
firstname = cells[2].text
lastname = cells[1].text
id = cells[0].find_element_by_tag_name("input").get_attribute("value")
address = cells[3].text
all_emails = cells[4].text
all_phones = cells[5].text
self.user_cache.append(UsFo(firstname=firstname, lastname=lastname, id=id, address=address,
all_emails_from_home_page=all_emails,
all_phones_from_home_page=all_phones))
return list(self.user_cache)
def open_user_to_edit_by_index(self, index):
wd = self.app.wd
self.app.Open_home_page()
row = wd.find_elements_by_name("entry")[index]
cell = row.find_elements_by_tag_name("td")[7]
cell.find_element_by_tag_name("a").click()
def open_user_view_by_index(self, index):
wd = self.app.wd
self.app.Open_home_page()
row = wd.find_elements_by_name("entry")[index]
cell = row.find_elements_by_tag_name("td")[6]
cell.find_element_by_tag_name("a").click()
def get_user_info_from_edit_page(self, index):
wd = self.app.wd
self.open_user_to_edit_by_index(index)
firstname = wd.find_element_by_name("firstname").get_attribute("value")
lastname = wd.find_element_by_name("lastname").get_attribute("value")
id = wd.find_element_by_name("id").get_attribute("value")
homephone = wd.find_element_by_name("home").get_attribute("value")
workphone = wd.find_element_by_name("work").get_attribute("value")
mobilephone = wd.find_element_by_name("mobile").get_attribute("value")
secondaryphone = wd.find_element_by_name("phone2").get_attribute("value")
address = wd.find_element_by_name("address").get_attribute("value")
email = wd.find_element_by_name("email").get_attribute("value")
email2 = wd.find_element_by_name("email2").get_attribute("value")
email3 = wd.find_element_by_name("email3").get_attribute("value")
return UsFo(firstname=firstname, lastname=lastname, id=id, address=address,
homephone=homephone, mobilephone=mobilephone, workphone=workphone,
secondaryphone=secondaryphone, email=email, email2=email2, email3=email3)
def get_user_from_view_page(self, index):
wd = self.app.wd
self.open_user_view_by_index(index)
text = wd.find_element_by_id("content").text
homephone = re.search("H: (.*)", text).group(1)
workphone = re.search("W: (.*)", text).group(1)
mobilephone = re.search("M: (.*)", text).group(1)
secondaryphone = re.search("P: (.*)", text).group(1)
return UsFo(homephone=homephone, mobilephone=mobilephone,
workphone=workphone, secondaryphone=secondaryphone)
def add_to_group(self, group_id, user_id):
wd = self.app.wd
self.select_user_by_id(user_id)
self.select_group_in_dropdown(group_id)
wd.find_element_by_name("add").click()
self.app.Open_home_page()
def filter_for_group(self, id):
wd = self.app.wd
wd.find_element_by_name("group").click()
wd.find_element_by_xpath("//select[@name='group']//option[@value='%s']" % id).click()
def remove_from_group(self, group_id, user_id):
wd = self.app.wd
self.filter_for_group(group_id)
self.select_user_by_id(user_id)
wd.find_element_by_name("remove").click()
def select_group_in_dropdown(self, id):
wd = self.app.wd
wd.find_element_by_xpath("//select[@name='to_group']//option[@value='%s']" % id).click()
|
from classes.aux_code_thread import Thread
from utils import logger
from utils import RunShellFunc
import os
def create_logger(log_file):
print(f"Creating log file")
logger.setup_logger(log_file)
def aux_analysis(Thread: Thread, aux_analysis_dir):
create_logger(os.path.join(aux_analysis_dir, Thread.log_file))
print(f"Running {Thread.thread_name}")
for script in Thread.scripts:
RunShellFunc.run_shell_command(f"bash {os.path.join(aux_analysis_dir,script)}") |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
r"""pprint_dir_magic -- DESCRIPTION
"""
from IPython.core.magic import Magics, magics_class, line_magic
from IPython.core.magic import register_line_magic
from IPython.core.magic_arguments import (argument, magic_arguments,
parse_argstring)
## for implements
#
import re
from inspect import getmembers, isclass, ismodule
from pprint import pprint
try:
from columnize import columnize
except ImportError as _err:
print(err)
print('try: pip install columnize')
try:
from colorama import Fore, Style, Back, init, deinit, reinit
except ImportError as _err:
print(err)
print('try: pip install colorama')
##
@magics_class
class ColumnizeDirMagic(Magics):
"""ColumnizeDirMagic
ColumnizeDirMagic is a Magics.
Responsibility:
"""
@magic_arguments()
@argument(
'-g', '--grep', dest='pattern', default=None, type=str,
help='grep method')
@argument('evaluation', help='string of evaluation')
@line_magic
def D(self, parameter_s=''):
"""SUMMARY
D(parameter_s='')
@Arguments:
- `parameter_s`:
@Return:
@Error:
"""
init() # for colorama
args = parse_argstring(self.D, parameter_s)
obj = self.shell.ev(args.evaluation)
if args.pattern: # "-g" grep option
regex = re.compile(args.pattern)
members = [(name, elem) for name, elem in getmembers(obj)
if regex.search(name) is not None]
else:
members = getmembers(obj)
results = []
for name, elem in members:
if isclass(elem):
member = ColumnizeDirMagic._fore_green_reset(name)
elif self._iscallable(elem):
member = ColumnizeDirMagic._fore_red_reset(name)
elif ismodule(elem):
member = ColumnizeDirMagic._fore_cyan_reset(name)
elif not name.startswith('_', ):
member = ColumnizeDirMagic._fore_yellow_reset(name)
else:
member = ColumnizeDirMagic._fore_white_reset(name)
results.append(member)
print(columnize(results, displaywidth=110))
# fore
@classmethod
def _bright(cls, string):
return Style.BRIGHT + '{}'.format(string)
@classmethod
def _fore_white(cls, string):
return Fore.WHITE + '{}'.format(string)
@classmethod
def _fore_black(cls, string):
return Fore.BLACK + '{}'.format(string)
@classmethod
def _fore_blue(cls, string):
return Fore.BLUE + '{}'.format(string)
@classmethod
def _fore_cyan(cls, string):
return Fore.CYAN + '{}'.format(string)
@classmethod
def _fore_red(cls, string):
return Fore.RED + '{}'.format(string)
@classmethod
def _fore_magenta(cls, string):
return Fore.MAGENTA + '{}'.format(string)
@classmethod
def _fore_green(cls, string):
return Fore.GREEN + '{}'.format(string)
@classmethod
def _fore_yellow(cls, string):
return Fore.YELLOW + '{}'.format(string)
# back
@classmethod
def _back_white(cls, string):
return Back.WHITE + '{}'.format(string)
@classmethod
def _back_black(cls, string):
return Back.BLACK + '{}'.format(string)
@classmethod
def _back_blue(cls, string):
return Back.BLUE + '{}'.format(string)
@classmethod
def _back_cyan(cls, string):
return Back.CYAN + '{}'.format(string)
@classmethod
def _back_red(cls, string):
return Back.RED + '{}'.format(string)
@classmethod
def _back_magenta(cls, string):
return Back.MAGENTA + '{}'.format(string)
@classmethod
def _back_green(cls, string):
return Back.GREEN + '{}'.format(string)
@classmethod
def _back_yellow(cls, string):
return Back.YELLOW + '{}'.format(string)
# reset
@classmethod
def _reset_all(cls, string):
return '{}'.format(string) + Style.RESET_ALL
# fore reset
@classmethod
def _fore_white_reset(cls, string):
return cls._reset_all(cls._fore_white('{}'.format(string)))
@classmethod
def _fore_black_reset(cls, string):
return cls._reset_all(cls._fore_black('{}'.format(string)))
@classmethod
def _fore_blue_reset(cls, string):
return cls._reset_all(cls._fore_blue('{}'.format(string)))
@classmethod
def _fore_cyan_reset(cls, string):
return cls._reset_all(cls._fore_cyan('{}'.format(string)))
@classmethod
def _fore_red_reset(cls, string):
return cls._reset_all(cls._fore_red('{}'.format(string)))
@classmethod
def _fore_magenta_reset(cls, string):
return cls._reset_all(cls._fore_magenta('{}'.format(string)))
@classmethod
def _fore_green_reset(cls, string):
return cls._reset_all(cls._fore_green('{}'.format(string)))
@classmethod
def _fore_yellow_reset(cls, string):
return cls._reset_all(cls._fore_yellow('{}'.format(string)))
# back reset
@classmethod
def _back_white_reset(cls, string):
return cls._reset_all(cls._back_white('{}'.format(string)))
@classmethod
def _back_black_reset(cls, string):
return cls._reset_all(cls._back_black('{}'.format(string)))
@classmethod
def _back_blue_reset(cls, string):
return cls._reset_all(cls._back_blue('{}'.format(string)))
@classmethod
def _back_cyan_reset(cls, string):
return cls._reset_all(cls._back_cyan('{}'.format(string)))
@classmethod
def _back_red_reset(cls, string):
return cls._reset_all(cls._back_red('{}'.format(string)))
@classmethod
def _back_magenta_reset(cls, string):
return cls._reset_all(cls._back_magenta('{}'.format(string)))
@classmethod
def _back_green_reset(cls, string):
return cls._reset_all(cls._back_green('{}'.format(string)))
@classmethod
def _back_yellow_reset(cls, string):
return cls._reset_all(cls._back_yellow('{}'.format(string)))
@classmethod
def _iscallable(cls, obj):
"""SUMMARY
_iscallable(obj)
@Arguments:
- `obj`:
@Return:
@Error:
"""
return hasattr(obj, '__call__')
def load_ipython_extension(ipython):
global __loaded
if not __loaded:
ipython.register_magics(ColumnizeDirMagic)
__loaded = True
__loaded = False
# For Emacs
# Local Variables:
# coding: utf-8
# End:
# pprint_dir_magic.py ends here
|
#! /usr/bin/env python
import cherrypy.daemon
if __name__ == '__main__':
cherrypy.daemon.run()
|
#!/usr/bin/env python3
# Created by: Christina Ngwa
# Created on: October 2019
# This program uses a nested if statement
def main():
# this function uses a nested if statement
# output
print("Is the year a leap year? Find out.")
print("")
# input
year = int(input("Enter a year: "))
print("")
# process
if year % 4 == 0:
if year % 100 == 0:
if year % 400 == 0:
print("It is a leap year.")
else:
print("It is not a leap year.")
else:
print("It is not a leap year.")
else:
print("It is not a leap year.")
if __name__ == "__main__":
main()
|
# -*- coding: utf-8 -*-
# Copyright (C) 2012 Rosen Diankov <rosen.diankov@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os, json
IPYTHON_DEBUG = False
DEBUG = True#False
TEMPLATE_DEBUG = False
SECRETS = json.load(open('/var/openrave.org_secrets.json'))
EMAIL_HOST_USER, EMAIL_HOST_PASSWORD = SECRETS.get('email')
EMAIL_HOST='smtp.gmail.com'
EMAIL_PORT=587
EMAIL_SUBJECT_PREFIX='[openrave.org] '
EMAIL_USE_TLS=True
SEND_BROKEN_LINK_EMAILS=True
SERVER_EMAIL='openrave.testing@gmail.com'
DEFAULT_FROM_EMAIL = 'openrave.testing@gmail.com'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'openrave_website',
'USER': 'openrave',
'PASSWORD': SECRETS['dbpass'],
'HOST': 'openrave.org',
'PORT': '5432',
'TIME_ZONE': 'UTC',
}
}
SESSION_COOKIE_SECURE = True
SESSION_COOKIE_HTTPONLY = True
SECURE_SSL_REDIRECT = False
#SECURE_FRAME_DENY = True # do not set since doxygen search will not work, use X_FRAME_OPTIONS instead
SECURE_HSTS_SECONDS = 600
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTOCOL", "SSL")
CACHES = {
'default' : { 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
# 'TIMEOUT': 60,
# 'OPTIONS': { 'MAX_ENTRIES': 1000 }
}
}
CACHE_MIDDLEWARE_SECONDS = 60 * 5 # 5 minutes
CACHE_MIDDLEWARE_KEY_PREFIX = 'openravedocs'
CACHE_MIDDLEWARE_GZIP = True
CACHE_MIDDLEWARE_ANONYMOUS_ONLY = True
MIDDLEWARE_CLASSES = [
'djangosecure.middleware.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.middleware.common.CommonMiddleware',
'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware',
'django.contrib.redirects.middleware.RedirectFallbackMiddleware',
]
MIDDLEWARE_CLASSES.insert(0, 'django.middleware.cache.UpdateCacheMiddleware')
MIDDLEWARE_CLASSES.append('django.middleware.cache.FetchFromCacheMiddleware')
LOGGING = {
"version": 1,
"disable_existing_loggers": True,
"formatters": {
"simple": {"format": "[%(name)s] %(levelname)s: %(message)s"},
"full": {"format": "%(asctime)s [%(name)s] %(levelname)s: %(message)s"}
},
"handlers": {
"mail_admins": {
"level": "ERROR",
"class": "django.utils.log.AdminEmailHandler",
},
"console": {
"level": "DEBUG",
"class": "logging.StreamHandler",
"formatter": "simple",
},
"logfile":{
"formatter": "full",
"level": "DEBUG",
"class": "logging.handlers.TimedRotatingFileHandler",
"filename": "/var/log/openrave_website/website.log",
"when": "D",
"interval": 7,
"backupCount": 5,
},
},
"loggers": {
"django.request": {
"handlers": ["mail_admins","logfile"],
"level": "ERROR",
"propagate": False,
},
"openrave_website": {
"handlers": ["console","logfile"],
"level": "DEBUG",
}
}
}
# necessary?
#HAYSTACK_SEARCH_ENGINE = 'xapian'
#HAYSTACK_XAPIAN_PATH = os.path.join(OPENRAVE_DOCUMENT_ROOT_PATH,'openravedocs.index')
#PUSH_SSL_CALLBACK = True
OPENRAVE_DOCUMENT_ROOT_PATH = '/var/openrave/docdata/'
MEDIA_ROOT = '/var/openrave/media/'
MEDIA_URL = '/m/'
STATICFILES_DIRS = ('/var/www/static/',)
|
import os
import imp
"""
This script allows creating a directory structure that corresponds to the
parameterized inputs present in the file test/integration/test_display_callback.py
Run this from the root of the ansible-runner directory
It will write these files to a folder named "callback-testing-playbooks"
"""
callback_tests = imp.load_source('test.integration.test_display_callback', 'test/integration/test_display_callback.py')
BASE_DIR = 'callback-testing-playbooks'
names = [test_name for test_name in dir(callback_tests) if test_name.startswith('test_')]
for name in names:
print('')
print('Processing test {}'.format(name))
bare_name = name[len('test_callback_plugin_'):]
if not os.path.exists('{}/{}'.format(BASE_DIR, bare_name)):
os.makedirs('{}/{}'.format(BASE_DIR, bare_name))
the_test = getattr(callback_tests, name)
for test_marker in the_test.pytestmark:
if test_marker.name == 'parametrize':
inputs = test_marker.args[1]
break
else:
raise Exception('Test {} not parameterized in expected way.'.format(the_test))
for input in inputs:
for k, v in input.items():
filename = '{}/{}/{}'.format(BASE_DIR, bare_name, k)
print(' Writing file {}'.format(filename))
if not os.path.exists(filename):
with open(filename, 'w') as f:
f.write(v)
|
"""Math functions for calculator allowing for multiple inputs"""
def add(array):
return reduce(lambda x,y: x+y, array)
def subtract(array):
return reduce(lambda x,y: x-y, array)
def multiply(array):
return reduce(lambda x,y: x*y, array)
def divide(array):
return reduce(lambda x,y: x/y, array)
def power(array):
return reduce(lambda x,y: x**y, array)
def mod(array):
return reduce(lambda x,y: x % y, array)
|
my_dict = {'a':645, 'b':3987, 'c': 93,'d': 111, 'e': 646, 'f': 20}
print("Словарик: ", my_dict)
max_keys = sorted(my_dict, key = my_dict.get, reverse = True)
print('Наибольшое значение в ключе: ', max_keys[0])
print('2-е наибольшое значение в ключе: ', max_keys[1])
print('3-е наибольшое значение в ключе: ', max_keys[2]) |
"""
Calculates cohen kappa for the coding in this study
"""
from sklearn.metrics import cohen_kappa_score
import pandas as pd
def labels_to_numbers(labels1, labels2):
"""
Turn labels, eg. [tfoj, tfoz]
into numbers, e.g [0, 1]
"""
uniques = list(set(labels1 + labels2))
mapping = {}
for i, label in enumerate(uniques):
mapping[label] = i
return [mapping[label] for label in labels1], [mapping[label] for label in labels2], list(mapping.values())
def main():
"""do calc"""
print('main')
labels = pd.read_csv('coding_work\\different_codes.csv', header=None, names = ['first', 'second', 'domain'])
print(labels)
for domain in list(labels.domain.drop_duplicates()):
subdf = labels[labels.domain == domain]
y_1, y_2, all_labels = labels_to_numbers(list(subdf['first']), list(subdf['second']))
print(all_labels)
print(y_1)
print(y_2)
score = cohen_kappa_score(y_1, y_2)
print(domain, score)
print('called')
main()
|
import uuid
import requests
from flask import Flask, render_template, session, request, redirect, url_for, jsonify
from flask_session import Session # https://pythonhosted.org/Flask-Session
from services.user import UserService
from services.outlook import OutlookService
from services.auth import AuthService
from loaders.loader import load
from config import REDIRECT_PATH, CONFIRMATION_PAGE_URL, LOG_LEVEL, PORT
from schemas.user import UserSchema
from utils import remove_none_from_dict, clean_phone_number, handle_error
from datetime import datetime, timedelta
import pytz
import coloredlogs
dependencies = load()
app = dependencies["app"]
logger = app.logger
coloredlogs.install(level=LOG_LEVEL, logger=logger)
@app.route("/login", methods=["POST"])
@handle_error
def login():
auth_service = dependencies["auth_service"]
session = dependencies["session"]
# Technically we could use empty list [] as scopes to do just sign in,
# here we choose to also collect end user consent upfront
auth_uri = auth_service.build_auth_code_flow()
user = UserSchema(**{
"first": request.form["first_name"],
"last": request.form["last_name"],
"interval": int(request.form["interval"]),
"phone_number": clean_phone_number(request.form["phoneOne"]),
"lastJob": datetime.utcnow() - timedelta(hours=24 * 365),
"subscribed": [key for key in ["covid_updates", "job_opportunities", "school", "events"] if key in request.form]
})
session["user_info"] = remove_none_from_dict(user.dict())
return redirect(auth_uri)
@app.route(REDIRECT_PATH) # Its absolute URL must match your app's redirect_uri set in AAD
@handle_error
def authorized():
auth_service = dependencies["auth_service"]
user_service = dependencies["user_service"]
outlook_service = dependencies["outlook_service"]
twilio_service = dependencies["twilio_service"]
session = dependencies["session"]
token = auth_service.get_access_token(request.args)
user = outlook_service.get_user_info(token)
user = user.dict()
user.update(session["user_info"])
user = UserSchema(**user)
cache = auth_service.load_cache()
user.token = auth_service.dumps_cache(cache)
user.nextJob = datetime.utcnow() + timedelta(hours=user.interval)
res = user_service.upsertUser(user)
twilio_service.send_welcome(user)
session.clear()
return redirect(CONFIRMATION_PAGE_URL)
@app.route("/sync")
@handle_error
def sync():
auth_service = dependencies["auth_service"]
user_service = dependencies["user_service"]
job_service = dependencies["job_service"]
users = user_service.getUsersToProcess()
logger.debug("Got users")
result = []
for user in users:
token = auth_service.get_access_token_from_serialized(user.token)
logger.debug("Processing emails")
num_emails = job_service.process_user_emails(user, token)
logger.debug("Got emails")
cache = auth_service.load_cache()
user_update = UserSchema(**{
"token": auth_service.dumps_cache(cache)
})
user_service.updateUser(user_update, user.email)
result.append({user.email: num_emails})
return jsonify(result)
@app.route("/syncnow/<string:email>")
@handle_error
def sync_now(email: str):
auth_service = dependencies["auth_service"]
user_service = dependencies["user_service"]
job_service = dependencies["job_service"]
user = user_service.getUser(email)
token = auth_service.get_access_token_from_serialized(user.token)
num_emails = job_service.process_user_emails(user, token, ignore_lastJob=True, process_descending=True)
cache = auth_service.load_cache()
user_update = UserSchema(**{
"token": auth_service.dumps_cache(cache)
})
user_service.updateUser(user_update, user.email)
return jsonify({user.email: num_emails})
@app.route("/donate", methods=["POST"])
@handle_error
def donate():
email = request.form['email']
nonprofit = request.form['nonprofit']
routing_num = request.form.get('routing_num', '')
account_num = request.form.get('account_num', '')
amount = int(request.form.get('amount', 0))
message = request.form.get('message', '')
checkbook_service = dependencies['checkbook_service']
redirect_url = checkbook_service.donate(email, nonprofit, routing_num, account_num, amount, message)
return redirect(redirect_url)
if __name__ == "__main__":
app.run(host="0.0.0.0", port=PORT)
|
from rest_framework import generics
from django.contrib.auth.models import User
from rest_framework import permissions
from rest_framework.response import Response
from django.db.models import Count, Avg
from buses.models import Driver
from buses.api.serializers.driver_serializer import DriverSerializer
class DriverList(generics.ListCreateAPIView):
queryset = Driver.objects.all()
serializer_class = DriverSerializer
# permission_classes = [permissions.IsAuthenticatedOrReadOnly]
# def perform_create(self, serializer):
# serializer.save(owner=self.request.user)
class DriverDetail(generics.RetrieveUpdateDestroyAPIView):
queryset = Driver.objects.all()
serializer_class = DriverSerializer
# permission_classes = [permissions.IsAuthenticatedOrReadOnly, IsOwnerOrReadOnly]
|
"""
Problem 22
Using names.txt (right click and 'Save Link/Target As...'), a 46K text file containing over five-thousand first names, begin by sorting it into alphabetical order. Then working out the alphabetical value for each name, multiply this value by its alphabetical position in the list to obtain a name score.
For example, when the list is sorted into alphabetical order, COLIN, which is worth 3 + 15 + 12 + 9 + 14 = 53, is the 938th name in the list. So, COLIN would obtain a score of 938 × 53 = 49714.
What is the total of all the name scores in the file?
"""
with open("names.txt", 'r') as myFile: #read the txt file as one giant string
data = myFile.read()
data = data.replace("\"", "") #removes the " from the string
data = data.split(",") #split into a list by ,
data.sort() #sort acb
[x.upper() for x in data] #dont know fore sure if everything is uppercase, so everything is ade into upper case
def nameValue(name):# has a docronary of every letter (in upper case) associated with it's value
alphatoNum = {'A': 1, 'B': 2, 'C': 3, 'D': 4, 'E': 5, 'F': 6, 'G': 7, 'H': 8, 'I': 9,
'J': 10, 'K': 11, 'L': 12, 'M': 13, 'N': 14, 'O': 15, 'P': 16, 'Q': 17, 'R': 18,
'S': 19, 'T': 20, 'U': 21, 'V': 22, 'W': 23, 'X': 24, 'Y': 25, 'Z': 26}
total = 0
for letter in name:
total += alphatoNum[letter]
return total
score = 0
for names in data:
score += nameValue(names)
print(score) |
#!/usr/bin/env python
def predict(RD, RIA, RIS, RRV, RWR, num_courses):
from sklearn.model_selection import GridSearchCV
from sklearn.model_selection import StratifiedShuffleSplit
from sklearn.model_selection import train_test_split
from sklearn.svm import SVC
import pandas as pd
import numpy as np
model_df = pd.read_csv('../src/Data/allcourses.csv')
model_df = model_df.iloc[:, 1:]
model_df.drop(labels=['Rec Major', 'Rec Non Major'], axis=1, inplace=True)
# Calculate ranks along columns
ranks_df = model_df.iloc[:, 2:].rank(axis=1)
rank_columns = ['RD', 'RIA', 'RIS', 'RRV', 'RWR']
ranks_df.columns = rank_columns
ranks_df = pd.concat([ranks_df, model_df], axis=1)
ranks_df['Label'] = ranks_df[ranks_df.columns[:5]].apply(lambda x: ''.join(x.dropna().astype(int).astype(str)), axis=1)
ranks_df['Label'] = ranks_df['Label'].astype(int)
# Get rid of columns with ranks as this is now captures in Label
ranks_df.drop(labels=rank_columns, axis=1, inplace=True)
# Obtain the number of unique courses under each label
counts = ranks_df['Label'].value_counts()
# Only keep those labels with more than one sample per label
labels_tokeep = counts[counts > 1]
# Remove samples with undesired labels from dataframe
final_df = ranks_df[ranks_df['Label'].isin(labels_tokeep.index)]
# split data into features and labels for model
x = final_df.iloc[:, 2:-1]
y = final_df['Label']
svm_clf = SVC()
svm_clf.fit(x, y)
################################################
# Making prediction
################################################
random_vec = np.asarray([RD, RIA, RIS, RRV, RWR])
random_vec = random_vec.reshape(1, -11)
class_pred = svm_clf.predict(random_vec)[0]
pred_df = final_df.loc[final_df['Label'] == class_pred]
return pred_df['Course Alias'].tolist()[:num_courses]
|
from typing import List
class Solution1:
def largestRectangleArea(self, heights: List[int]) -> int:
if not heights:
return 0
length = len(heights)
less_to_left = [-1] * length
less_to_right = [length] * length
for i in range(1, length):
p = i - 1
while p >= 0 and heights[i] <= heights[p]:
p = less_to_left[p]
less_to_left[i] = p
for i in range(length - 2, -1, -1):
p = i + 1
while p < length and heights[i] <= heights[p]:
p = less_to_right[p]
less_to_right[i] = p
histograms = [heights[i] * (less_to_right[i] - less_to_left[i] - 1) for i in range(length)]
return max(histograms)
if __name__ == '__main__':
l = [2, 1, 5, 6, 2, 3]
s = Solution1()
print(s.largestRectangleArea(l), end='')
|
# -*- coding: utf-8 -*-
"""
Created on Wed Mar 4 19:26:07 2020
@author: Utilisateur
"""
from copy import deepcopy
import autograd.numpy as np
from autograd.numpy.linalg import pinv
from autograd.numpy import newaxis as n_axis
from autograd.numpy import transpose as t
##########################################################################################################
#################################### DGMM Utils ##########################################################
##########################################################################################################
def repeat_tile(x, reps, tiles):
''' Repeat then tile a quantity to mimic the former code logic
reps (int): The number of times to repeat the first axis
tiles (int): The number of times to tile the second axis
-----------------------------------------------------------
returns (ndarray): The repeated then tiled nd_array
'''
x_rep = np.repeat(x, reps, axis = 0)
x_tile_rep = np.tile(x_rep, (tiles, 1, 1))
return x_tile_rep
def compute_path_params(eta, H, psi):
''' Compute the gaussian parameters for each path
H (list of nb_layers elements of shape (K_l x r_{l-1}, r_l)): Lambda
parameters for each layer
psi (list of nb_layers elements of shape (K_l x r_{l-1}, r_{l-1})): Psi
parameters for each layer
eta (list of nb_layers elements of shape (K_l x r_{l-1}, 1)): mu
parameters for each layer
------------------------------------------------------------------------------------------------
returns (tuple of len 2): The updated parameters mu_s and sigma for all s in Omega
'''
#=====================================================================
# Retrieving model parameters
#=====================================================================
L = len(H)
k = [len(h) for h in H]
k_aug = k + [1] # Integrating the number of components of the last layer i.e 1
r1 = H[0].shape[1]
r2_L = [h.shape[2] for h in H] # r[2:L]
r = [r1] + r2_L # r augmented
#=====================================================================
# Initiating the parameters for all layers
#=====================================================================
mu_s = [0 for i in range(L + 1)]
sigma_s = [0 for i in range(L + 1)]
# Initialization with the parameters of the last layer
mu_s[-1] = np.zeros((1, r[-1], 1)) # Inverser k et r plus tard
sigma_s[-1] = np.eye(r[-1])[n_axis]
#==================================================================================
# Compute Gaussian parameters from top to bottom for each path
#==================================================================================
for l in reversed(range(0, L)):
H_repeat = np.repeat(H[l], np.prod(k_aug[l + 1: ]), axis = 0)
eta_repeat = np.repeat(eta[l], np.prod(k_aug[l + 1: ]), axis = 0)
psi_repeat = np.repeat(psi[l], np.prod(k_aug[l + 1: ]), axis = 0)
mu_s[l] = eta_repeat + H_repeat @ np.tile(mu_s[l + 1], (k[l], 1, 1))
sigma_s[l] = H_repeat @ np.tile(sigma_s[l + 1], (k[l], 1, 1)) @ t(H_repeat, (0, 2, 1)) \
+ psi_repeat
return mu_s, sigma_s
def compute_chsi(H, psi, mu_s, sigma_s):
''' Compute chsi as defined in equation (8) of the DGMM paper
H (list of nb_layers elements of shape (K_l x r_l-1, r_l)): Lambda
parameters for each layer
psi (list of nb_layers elements of shape (K_l x r_l-1, r_l-1)): Psi
parameters for each layer
mu_s (list of nd-arrays): The means of the Gaussians starting at each layer
sigma_s (list of nd-arrays): The covariance matrices of the Gaussians
starting at each layer
------------------------------------------------------------------------------------------------
returns (list of ndarray): The chsi parameters for all paths starting at each layer
'''
L = len(H)
k = [len(h) for h in H]
#=====================================================================
# Initiating the parameters for all layers
#=====================================================================
# Initialization with the parameters of the last layer
chsi = [0 for i in range(L)]
chsi[-1] = pinv(pinv(sigma_s[-1]) + t(H[-1], (0, 2, 1)) @ pinv(psi[-1]) @ H[-1])
#==================================================================================
# Compute chsi from top to bottom
#==================================================================================
for l in range(L - 1):
Ht_psi_H = t(H[l], (0, 2, 1)) @ pinv(psi[l]) @ H[l]
Ht_psi_H = np.repeat(Ht_psi_H, np.prod(k[l + 1:]), axis = 0)
sigma_next_l = np.tile(sigma_s[l + 1], (k[l], 1, 1))
chsi[l] = pinv(pinv(sigma_next_l) + Ht_psi_H)
return chsi
def compute_rho(eta, H, psi, mu_s, sigma_s, z_c, chsi):
''' Compute rho as defined in equation (8) of the DGMM paper
eta (list of nb_layers elements of shape (K_l x r_{l-1}, 1)): mu
parameters for each layer
H (list of nb_layers elements of shape (K_l x r_{l-1}, r_l)): Lambda
parameters for each layer
psi (list of nb_layers elements of shape (K_l x r_{l-1}, r_{l-1})): Psi
parameters for each layer
z_c (list of nd-arrays) z^{(l)} - eta^{(l)} for each layer.
chsi (list of nd-arrays): The chsi parameters for each layer
-----------------------------------------------------------------------
returns (list of ndarrays): The rho parameters (covariance matrices)
for all paths starting at each layer
'''
L = len(H)
rho = [0 for i in range(L)]
k = [len(h) for h in H]
k_aug = k + [1]
for l in range(0, L):
sigma_next_l = np.tile(sigma_s[l + 1], (k[l], 1, 1))
mu_next_l = np.tile(mu_s[l + 1], (k[l], 1, 1))
HxPsi_inv = t(H[l], (0, 2, 1)) @ pinv(psi[l])
HxPsi_inv = np.repeat(HxPsi_inv, np.prod(k_aug[l + 1: ]), axis = 0)
rho[l] = chsi[l][n_axis] @ (HxPsi_inv[n_axis] @ z_c[l][..., n_axis] \
+ (pinv(sigma_next_l) @ mu_next_l)[n_axis])
return rho
##########################################################################################################
#################################### DGMM Utils ##########################################################
##########################################################################################################
import matplotlib
import matplotlib.pyplot as plt
def plot_2d(zl, classes):
''' Plot a representation of 2D latent variables
zl (numobs, M^{(l)} x r_l ndarray): The latent variable of layer l
classes (numobs x n_clusters ndarray): The predicted or ground truth labels
---------------------------------------------------------------------------
returns (None): The plot of the latent variables colorized by class
'''
n_clusters = len(np.unique(classes))
colors = ['red', 'green', 'blue', 'silver', 'purple', 'black',\
'gold', 'orange'] # For a 2 classes classification
if n_clusters >= len(colors):
raise ValueError('Too many classes for plotting,\
please add some colors names above this line')
fig = plt.figure(figsize=(16, 9))
ax = plt.axes()
ax.scatter(zl[:, 0], zl[:, 1] , c = classes,\
cmap=matplotlib.colors.ListedColormap(colors[:n_clusters]))
plt.title("2D Latent space representation of the data")
ax.set_xlabel('Latent dimension 1', fontweight ='bold')
ax.set_ylabel('Latent dimension 2', fontweight ='bold')
plt.show()
def plot_3d(zl, classes):
''' Plot a representation of 3D latent variables
zl (numobs, M^{(l)} x r_l ndarray): The latent variable of layer l
classes (numobs x n_clusters ndarray): The predicted or ground truth labels
---------------------------------------------------------------------------
returns (None): The plot of the latent variables colorized by class
'''
n_clusters = len(np.unique(classes))
colors = ['red', 'green', 'blue', 'silver', 'purple', 'black',\
'gold', 'orange'] # For a 2 classes classification
if n_clusters >= len(colors):
raise ValueError('Too many classes for plotting,\
please add some colors names above this line')
fig = plt.figure(figsize = (16, 9))
ax = plt.axes(projection ="3d")
# Add x, y gridlines
ax.grid(b = True, color ='grey',
linestyle ='-.', linewidth = 0.3,
alpha = 0.2)
# Creating plot
sctt = ax.scatter3D(zl[:,0], zl[:,1], zl[:,2],
alpha = 0.8,
c = classes,
cmap = matplotlib.colors.ListedColormap(colors[:n_clusters]))
plt.title("3D Latent space representation of the data")
ax.set_xlabel('Latent dimension 1', fontweight ='bold')
ax.set_ylabel('Latent dimension 2', fontweight ='bold')
ax.set_zlabel('Latent dimension 3', fontweight ='bold')
# show plot
plt.show()
##########################################################################################################
################################# General purposes #######################################################
##########################################################################################################
def isnumeric(var):
''' Check if a variable is numeric
var (int, str, float etc.): The variable whom type has to be tested
---------------------------------------------------------------------------
returns (Bool): Whether the variable is of numeric type (True) or not (False)
'''
is_num = False
try:
int(var)
is_num = True
except:
pass
return is_num
def asnumeric(lst):
''' Tries to convert all the elements of a list into numeric elements
lst (list): The list of elements to convert
---------------------------------------------------------------------------
returns (list): The converted list
'''
try:
lst = [int(el) for el in lst]
except:
raise ValueError('r and k values must be numeric')
return lst
def check_inputs(k, r):
''' Check if the values of (k,r) specified to launch the algorithm are suited
if not raise an error
k (dict): The original number of component on each layer
r (dict): The original dimensions of the network layers
---------------------------------------------------------------------------
returns (None): Does not return anything if the values are well suited
'''
# Check k and r are dict
if not(isinstance(k, dict)):
raise TypeError('k must be a dict')
if not(isinstance(r, dict)):
raise TypeError('r must be a dict')
# Check keys == ['c', 'd', 't']
if set(k.keys()) != set(['c', 'd', 't']):
raise ValueError('The keys of k have to be [\'c\', \'d\', \'t\']')
if set(r.keys()) != set(['c', 'd', 't']):
raise ValueError('The keys of r have to be [\'c\', \'d\', \'t\']')
# Check k and r have the same length
for h in ['c', 'd', 't']:
if len(k[h]) != len(r[h]):
raise ValueError('r and k must have the same lengths for each head and tail')
# Check valid k and r values model
# ! Implement isnumeric
for h, kh in k.items():
k[h] = asnumeric(kh)
r[h] = asnumeric(r[h])
# Check k['c'] is 1
if k['c'][0] != 1:
raise ValueError('The first continuous head layer are the data hence k[\'c\'] = 1')
# Check identifiable model
for h in ['c', 'd']:
r_1Lh = r[h] + r['t']
are_dims_decreasing = np.all([r_1Lh[l] - r_1Lh[l + 1] > 0 \
for l in range(len(r_1Lh) - 1)])
if not(are_dims_decreasing):
raise ValueError('Dims must be decreasing from heads to tail !')
###############################################################################
############################ Syntaxic Sugar ###################################
###############################################################################
def dispatch_dgmm_init(init):
''' Dispatch the initial values of eta, Lambda and Psi between the associated
variables
init(dict): The dict of the initial values
--------------------------------------------------------------------------
returns (tuple of size 6): the eta, Lambda and Psi estimators of all
network layers
'''
eta_c = deepcopy(init['c']['eta'])
eta_d = deepcopy(init['d']['eta'])
H_c = deepcopy(init['c']['H'])
H_d = deepcopy(init['d']['H'])
psi_c = deepcopy(init['c']['psi'])
psi_d = deepcopy(init['d']['psi'])
return eta_c, eta_d, H_c, H_d, psi_c, psi_d
def dispatch_gllvm_init(init):
''' Dispatch the initial values of lambda_bin, lambda_ord and lambda_categ
between the associated variables
init(dict): The dict of the initial values
--------------------------------------------------------------------------
returns (tuple of size 3): the lambda_bin, lambda_ord and lambda_categ estimators
of the first discrete head layer
'''
lambda_bin = deepcopy(init['lambda_bin'])
lambda_ord = deepcopy(init['lambda_ord'])
lambda_categ = deepcopy(init['lambda_categ'])
return lambda_bin, lambda_ord, lambda_categ
def dispatch_paths_init(init):
''' Dispatch the initial values of w_s_*, the paths probabilities starting
from head * between the associated variables
init(dict): The dict of the initial values
--------------------------------------------------------------------------
returns (tuple of size 2): The paths probabilities starting from each head
'''
w_s_c = deepcopy(init['c']['w_s'])
w_s_d = deepcopy(init['d']['w_s'])
return w_s_c, w_s_d
def compute_S_1L(L_1L, k_1L, k):
''' Compute the number of paths starting from each head and tail of the
network.
L_1L (dict): The number of layers where the lists include the heads and the tail layers
k_1L (list of int): The number of component on each layer including the common layers
k (dict): The original number of component on each layer
--------------------------------------------------------------------------
returns (dict): The number of paths starting from each head and tail
'''
# Paths of both (heads+tail) and tail
S1cL = [np.prod(k_1L['c'][l:]) for l in range(L_1L['c'] + 1)]
S1dL = [np.prod(k_1L['d'][l:]) for l in range(L_1L['d'])]
St = [np.prod(k['t'][l:]) for l in range(L_1L['t'])]
return {'c': S1cL, 'd': S1dL, 't': St}
def nb_comps_and_layers(k):
''' Compute the number of components and layers starting from each head and
tail of the network.
k (dict): The original number of component on each layer
--------------------------------------------------------------------------
returns (tuple of size 5): The number of components and layers in the network
'''
k_1L = {'c': k['c'] + k['t'], 'd': k['d'] + k['t'], 't': k['t']}
# Number of hidden layers of both (heads + tail) and tail
L_1L = {'c': len(k['c']) + len(k['t']) - 1, 'd': len(k['d']) + len(k['t']),\
't': len(k['t'])}
L = {'c': len(k['c']) - 1, 'd': len(k['d']), 't': len(k['t'])}
# Index of the first tail layer starting from head h
bar_L = {'c': len(k['c']), 'd': len(k['d'])}
S_1L = compute_S_1L(L_1L, k_1L, k)
return k_1L, L_1L, L, bar_L, S_1L
|
def funcion1():
x = 5 + funcion2() #L2
print("ingreso por el método 1")
return x
def funcion2():
x = 3 + funcion3() #L3
print("Ingreso por el método 2")
return x
def funcion3():
print("Ingreso por el método 3")
x = 7
return x
x = funcion1() #L1
print (x)
#Recursión
def factorial(n):
if n==0:
return 1
else:
return n*factorial(n-1)
factorial(5)
#Pilas con listas
stack = [] #crear una pila vacía
stack.append('a') #apilar
stack.append('e')
stack.append('i')
print(stack)
stack.pop() #Desapilar
print(stack)
stack.pop()
print(stack)
#Clase Pila ->
#init: Inicializar una pila nueva y vacia
#apilar: Agrega un elemento a la pila
#desapilar: Elimina el tope de la pila y lo devuelve (El elemento que se devuelve siempre es el último que se agrego)
#imprimirPila: Recorre la pila y muestra los datos
#estaVacia: Devuelve True o False según la pila este vacía o este llena
class Pila:
def __init__(self):
self.arreglo = [] #init
def apilar(self,x): #apilar
self.arreglo.append(x)
def desapilar(self):
self.arreglo.pop()
def imprimirPila(self):
for i in self.arreglo:
print(i, end=",")
print()
def estaVacia(self):
# return len(self.arreglo) == 0 opcion1
return self.arreglo == [] #opcion2
p=Pila()
print(p.estaVacia())
p.imprimirPila()
p.apilar(4)
p.imprimirPila()
p.apilar(5)
p.imprimirPila()
p.desapilar()
p.imprimirPila()
print(p.estaVacia())
#Pilas usando la clase ListasEnlazadas
#Retomamos el codigo de la clase de ListasEnlazadas
#Nodo
class nodoSimple:
def __init__(self, d = None):
self.dato = d
self.liga = None #puntero o conector al siguiente nodo
def asignarDato(self, d): #setters y getters
self.dato = d
def asignarLiga(self, x):
self.liga = x
def retornarDato(self):
return self.dato
def retornarLiga(self):
return self.liga
#Clase LSL
class LSL:
def __init__(self): #Constructor
self.primero = None
self.ultimo = None
def insertar (self, d, y=None): #cambio
x = nodoSimple(d) #cambio
self.conectar(x, y)
def conectar (self, x, y):
if y == None:
if self.primero == None:
self.ultimo = x
else:
x.asignarLiga(self.primero)
self.primero = x
return
x.asignarLiga(y.retornarLiga())
y.asignarLiga(x)
if y == self.ultimo:
self.ultimo = x
def primerNodo (self):
return self.primero
def finDeRecorrido (self, p):
return p == None
def esVacia (self):
return self.primero == None
def recorrerLista (self):
p = self.primerNodo()
while not self.finDeRecorrido(p):
print(p.retornarDato(), end = ", ")
p = p.retornarLiga()
def borrar (self, x, y = None):
if x == None:
print("Dato no está en la lista")
return
if y == None:
if x != self.primero:
print("Falta el anterior del dato a borrar")
return
else:
y = y.retornarDato()
self.desconectar(x,y)
def desconectar(self, x, y):
if y == None:
self.primero = x.retornarLiga()
if self.esVacia():
self.ultimo = None
else:
y.asignarLiga(x.retornarLiga())
if x == self.ultimo:
self.ultimo = y
#init: Inicializar una pila nueva y vacia
#apilar: Agrega un elemento a la pila
#desapilar: Elimina el tope de la pila y lo devuelve (El elemento que se devuelve siempre es el último que se agrego)
#imprimirPila: Recorre la pila y muestra los datos
#estaVacia: Devuelve True o False según la pila este vacía o este llena
class PilaLSL(LSL):
def __init__(self): #init
LSL.__init__(self)
def apilar(self,d): #apilar
self.insertar(d)
def imprimirPila(self): #imprimir
self.recorrerLista()
def desapilar(self):
p = self.primerNodo()
d = p.retornarDato()
self.borrar(p)
return d
a = PilaLSL()
a.apilar('a')
a.apilar('e')
a.apilar('i')
a.recorrerLista()
a.desapilar()
a.recorrerLista()
#Crear un programa que muestre un menú donde el usuario pueda seleccionar entre:
#1. Apilar
#2. Desapilar
#3. Imprimir el contenido de la pila
#0. Salir
|
import cv2
import json
import math
import numpy as np
from pycocotools.coco import COCO
from pycocotools.cocoeval import COCOeval
import torch
from opendr.perception.pose_estimation.lightweight_open_pose.algorithm.datasets.coco import CocoValDataset
from opendr.perception.pose_estimation.lightweight_open_pose.algorithm.modules.keypoints import \
extract_keypoints, group_keypoints
def run_coco_eval(gt_file_path, dt_file_path, verbose=False):
annotation_type = 'keypoints'
if verbose:
print('Running test for {} results.'.format(annotation_type))
coco_gt = COCO(gt_file_path)
coco_dt = coco_gt.loadRes(dt_file_path)
result = COCOeval(coco_gt, coco_dt, annotation_type)
result.evaluate()
result.accumulate()
result.summarize()
return result
def normalize(img, img_mean, img_scale):
img = np.array(img, dtype=np.float32)
img = (img - img_mean) * img_scale
return img
def pad_width(img, stride, pad_value, min_dims):
h, w, _ = img.shape
h = min(min_dims[0], h)
min_dims[0] = math.ceil(min_dims[0] / float(stride)) * stride
min_dims[1] = max(min_dims[1], w)
min_dims[1] = math.ceil(min_dims[1] / float(stride)) * stride
pad = []
pad.append(int(math.floor((min_dims[0] - h) / 2.0)))
pad.append(int(math.floor((min_dims[1] - w) / 2.0)))
pad.append(int(min_dims[0] - h - pad[0]))
pad.append(int(min_dims[1] - w - pad[1]))
padded_img = cv2.copyMakeBorder(img, pad[0], pad[2], pad[1], pad[3],
cv2.BORDER_CONSTANT, value=pad_value)
return padded_img, pad
def convert_to_coco_format(pose_entries, all_keypoints):
coco_keypoints = []
scores = []
for n in range(len(pose_entries)):
if len(pose_entries[n]) == 0:
continue
keypoints = [0] * 17 * 3
to_coco_map = [0, -1, 6, 8, 10, 5, 7, 9, 12, 14, 16, 11, 13, 15, 2, 1, 4, 3]
person_score = pose_entries[n][-2]
position_id = -1
for keypoint_id in pose_entries[n][:-2]:
position_id += 1
if position_id == 1: # no 'neck' in COCO
continue
cx, cy, score, visibility = 0, 0, 0, 0 # keypoint not found
if keypoint_id != -1:
cx, cy, score = all_keypoints[int(keypoint_id), 0:3]
cx = cx + 0.5
cy = cy + 0.5
visibility = 1
keypoints[to_coco_map[position_id] * 3 + 0] = cx
keypoints[to_coco_map[position_id] * 3 + 1] = cy
keypoints[to_coco_map[position_id] * 3 + 2] = visibility
coco_keypoints.append(keypoints)
scores.append(person_score * max(0, (pose_entries[n][-1] - 1))) # -1 for 'neck'
return coco_keypoints, scores
def infer(net, img, scales, base_height, stride, pad_value=(0, 0, 0), img_mean=(128, 128, 128), img_scale=1 / 256):
normed_img = normalize(img, img_mean, img_scale)
height, width, _ = normed_img.shape
scales_ratios = [scale * base_height / float(height) for scale in scales]
avg_heatmaps = np.zeros((height, width, 19), dtype=np.float32)
avg_pafs = np.zeros((height, width, 38), dtype=np.float32)
for ratio in scales_ratios:
scaled_img = cv2.resize(normed_img, (0, 0), fx=ratio, fy=ratio, interpolation=cv2.INTER_CUBIC)
min_dims = [base_height, max(scaled_img.shape[1], base_height)]
padded_img, pad = pad_width(scaled_img, stride, pad_value, min_dims)
tensor_img = torch.from_numpy(padded_img).permute(2, 0, 1).unsqueeze(0).float().cuda()
stages_output = net(tensor_img)
stage2_heatmaps = stages_output[-2]
heatmaps = np.transpose(stage2_heatmaps.squeeze().cpu().data.numpy(), (1, 2, 0))
heatmaps = cv2.resize(heatmaps, (0, 0), fx=stride, fy=stride, interpolation=cv2.INTER_CUBIC)
heatmaps = heatmaps[pad[0]:heatmaps.shape[0] - pad[2], pad[1]:heatmaps.shape[1] - pad[3]:, :]
heatmaps = cv2.resize(heatmaps, (width, height), interpolation=cv2.INTER_CUBIC)
avg_heatmaps = avg_heatmaps + heatmaps / len(scales_ratios)
stage2_pafs = stages_output[-1]
pafs = np.transpose(stage2_pafs.squeeze().cpu().data.numpy(), (1, 2, 0))
pafs = cv2.resize(pafs, (0, 0), fx=stride, fy=stride, interpolation=cv2.INTER_CUBIC)
pafs = pafs[pad[0]:pafs.shape[0] - pad[2], pad[1]:pafs.shape[1] - pad[3], :]
pafs = cv2.resize(pafs, (width, height), interpolation=cv2.INTER_CUBIC)
avg_pafs = avg_pafs + pafs / len(scales_ratios)
return avg_heatmaps, avg_pafs
def evaluate(labels, output_name, images_folder, net, multiscale=False, visualize=False):
net = net.cuda().eval()
base_height = 368
scales = [1]
if multiscale:
scales = [0.5, 1.0, 1.5, 2.0]
stride = 8
dataset = CocoValDataset(labels, images_folder)
coco_result = []
for sample in dataset:
file_name = sample['file_name']
img = sample['img']
avg_heatmaps, avg_pafs = infer(net, img, scales, base_height, stride)
total_keypoints_num = 0
all_keypoints_by_type = []
for kpt_idx in range(18): # 19th for bg
total_keypoints_num += extract_keypoints(avg_heatmaps[:, :, kpt_idx], all_keypoints_by_type,
total_keypoints_num)
pose_entries, all_keypoints = group_keypoints(all_keypoints_by_type, avg_pafs)
coco_keypoints, scores = convert_to_coco_format(pose_entries, all_keypoints)
image_id = int(file_name[0:file_name.rfind('.')])
for idx in range(len(coco_keypoints)):
coco_result.append({
'image_id': image_id,
'category_id': 1, # person
'keypoints': coco_keypoints[idx],
'score': scores[idx]
})
if visualize:
for keypoints in coco_keypoints:
for idx in range(len(keypoints) // 3):
cv2.circle(img, (int(keypoints[idx * 3]), int(keypoints[idx * 3 + 1])),
3, (255, 0, 255), -1)
print(coco_keypoints)
cv2.imshow('keypoints', img)
key = cv2.waitKey()
if key == 27: # esc
return
with open(output_name, 'w') as f:
json.dump(coco_result, f, indent=4)
run_coco_eval(labels, output_name)
|
import sys
import platform
import os
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
from gui.mainWindow import helpform, newimagedlg
import gui.mainWindow.qrc_resources
from gui.mainWindow.exercise import resizedlg
__version__ = "1.0.1"
class MainWindow(QMainWindow):
def __init__(self, parent=None):
super(MainWindow, self).__init__(parent)
self.dirty = False
self.filename = None
self.mirroredvertically = False
self.mirroredhorizontally = False
self.printer = None
self.image = QImage()
self.imageLable = QLabel()
self.imageLable.setMinimumSize(500, 500)
self.imageLable.setAlignment(Qt.AlignCenter)
self.imageLable.setContextMenuPolicy(Qt.ActionsContextMenu)
self.setCentralWidget(self.imageLable)
dockWidget = QDockWidget("Log", self)
dockWidget.setObjectName("LogDockWidger")
dockWidget.setAllowedAreas(Qt.LeftDockWidgetArea | Qt.RightDockWidgetArea)
self.logListWidget = QListWidget()
dockWidget.setWidget(self.logListWidget)
self.addDockWidget(Qt.LeftDockWidgetArea, dockWidget)
self.sizeLable = QLabel()
self.sizeLable.setFrameStyle(QFrame.StyledPanel | QFrame.Sunken)
status = self.statusBar()
status.setSizeGripEnabled(False)
status.setSizeGripEnabled(False)
status.addPermanentWidget(self.sizeLable)
status.showMessage("Ready", 5000)
# Action
fileNewAction = self.createAction("&New...", "filenew", QKeySequence.New, "Create an image file",
listener=self.fileNew)
fileOpenAction = self.createAction("&Open...", "fileopen", QKeySequence.Open, "Open an existing image file",
listener=self.fileOpen)
fileSaveAction = self.createAction("&Save...", "filesave", QKeySequence.Save, "Save the image",
listener=self.fileSave)
fileSaveAsAction = self.createAction("Save &As......", "filesaveas", QKeySequence.Save,
"Save the image using a new name", listener=self.fileSaveAs)
fileQuitAction = self.createAction("&Quit", "filequit", QKeySequence.Save, "Close the application",
listener=self.close)
editResizeAction = self.createAction("&Resize...", "editresize", "Ctrl+R", "Resize image",
listener=self.editResize)
editInvertAction = self.createAction("&Invert", "editinvert", "Ctrl+I", "Invert the image's colors", True,
listener=self.editInvert, signal="toggled")
editSwapRedAndBlueAction = self.createAction("Sw&ap Red and Blue", "editswap", "Ctrl+A",
"Swap the image's red and blue color components", True,
listener=self.editSwapRedAndBlue, signal="toggled")
editZoomAction = self.createAction("&Zoom...", "editzoom", "Alt+Z", "Zoom the image",
listener=self.editSwapRedAndBlue)
editUnMirrorAction = self.createAction("&Unmirror", "editunmirror", "Ctrl+U", "Unmirror the image", True,
listener=self.editUnMirror, signal="toggled")
editMirrorHorizontalAction = self.createAction("Mirror &Horizontally", "editmirrorhoriz", "Ctrl+H",
"Horizontally mirror the image", True,
listener=self.editMirrorHorizontal, signal="toggled")
editMirrorVerticalAction = self.createAction("Mirror &Vertically", "editmirrorvert", "Ctrl+V",
"Vertically mirror the image", True,
listener=self.editMirrorHorizontal, signal="toggled")
helpAboutAction = self.createAction("&About Image Changer", listener=self.helpAbout)
helpHelpAction = self.createAction("&Help", shorcut=QKeySequence.HelpContents, listener=self.helpHelp)
mirrorGroup = QActionGroup(self)
mirrorGroup.addAction(editUnMirrorAction)
mirrorGroup.addAction(editMirrorHorizontalAction)
mirrorGroup.addAction(editMirrorVerticalAction)
self.fileMenu = self.menuBar().addMenu("&File")
self.fileMenuActions = (
fileNewAction,
fileOpenAction,
fileSaveAction,
fileSaveAsAction,
None,
fileQuitAction)
self.fileMenu.aboutToShow.connect(self.updateFileMenu)
editMenu = self.menuBar().addMenu("&Edit")
self.addActions(editMenu, (editInvertAction, editSwapRedAndBlueAction, editZoomAction))
mirrorMenu = editMenu.addMenu(QIcon(":/editmirror.png"), "&Mirror")
self.addActions(mirrorMenu, (editUnMirrorAction, editMirrorHorizontalAction, editMirrorVerticalAction))
helpMenu = self.menuBar().addMenu("&Help")
self.addActions(helpMenu, (helpAboutAction, helpHelpAction))
fileToolBar = self.addToolBar("File")
fileToolBar.setObjectName("FileToolBar")
self.addActions(fileToolBar, (fileNewAction, fileOpenAction, fileSaveAction))
editToolBar = self.addToolBar("Edit")
editToolBar.setObjectName("EditToolBar")
self.addActions(editToolBar, (
editResizeAction, editInvertAction, editSwapRedAndBlueAction, editUnMirrorAction,
editMirrorHorizontalAction,
editMirrorVerticalAction))
self.zoomSpinBox = QSpinBox()
self.zoomSpinBox.setRange(1, 400)
self.zoomSpinBox.setSuffix(" %")
self.zoomSpinBox.setValue(100)
self.zoomSpinBox.setToolTip("Zoom the Image")
self.zoomSpinBox.setStatusTip(self.zoomSpinBox.toolTip())
self.zoomSpinBox.setFocusPolicy(Qt.NoFocus)
self.zoomSpinBox.valueChanged[int].connect(self.showImage)
editToolBar.addWidget(self.zoomSpinBox)
settings = QSettings()
self.recentFiles = settings.value("RecentFiles") or []
self.restoreGeometry(settings.value("MainWindow/Geometry", QByteArray()))
self.restoreState(settings.value("MainWindow/State", QByteArray()))
self.setWindowTitle("Image Changer")
self.updateFileMenu()
self.resetableActions = ((editInvertAction, False),
(editSwapRedAndBlueAction, False),
(editUnMirrorAction, True))
QTimer.singleShot(0, self.loadInitialFile)
def createAction(self, text, icon=None, shorcut=None, tip=None, checkable=False, listener=None, signal="triggered"):
action = QAction(text, self)
if icon is not None:
action.setIcon(QIcon(":/{}.png".format(icon)))
if shorcut is not None:
action.setShortcut(shorcut)
if tip is not None:
action.setToolTip(tip)
action.setStatusTip(tip)
if checkable:
action.setCheckable(True)
if listener is not None:
getattr(action, signal).connect(listener)
return action
def addActions(self, target, actions):
for action in actions:
if action is None:
target.addSeparator()
else:
target.addAction(action)
def fileNew(self):
if not self.okToContinue():
return
dialog = newimagedlg.NewImageDlg(self)
if dialog.exec_():
# self.addRecentFile(self.filename)
self.image = QImage()
for action, check in self.resetableActions:
action.setChecked(check)
self.image = dialog.image()
self.filename = None
self.dirty = True
self.showImage()
self.sizeLable.setText("{} x {}".format(
self.image.width(), self.image.height()))
self.updateStatus("Created new image")
def fileOpen(self):
pass
def fileSave(self):
pass
def fileSaveAs(self):
pass
def editResize(self):
print("Resize Image")
if self.image.isNull():
return
form = resizedlg.ResizeDlg(self.image.width(), self.image.height())
if form.exec_():
width, height = form.result()
if (
width == self.image.width() and
height == self.image.height()
):
self.statusBar().showMessage("Resized to the same size",
5000)
else:
self.image = self.image.scaled(width, height)
self.showImage()
self.dirty = True
size = "{} x {}".format(
self.image.width(), self.image.height())
self.sizeLable.setText(size)
self.updateStatus("Resized to {}".format(size))
def editInvert(self):
pass
def editSwapRedAndBlue(self):
pass
def editUnMirror(self):
pass
def editMirrorHorizontal(self):
pass
def helpAbout(self):
QMessageBox.about(
self,
"About Image Changer",
"""<b>Image Changer</b> v {0}
<p>Copyright © 2008-10 Qtrac Ltd.
All rights reserved.
<p>This application can be used to perform
simple image manipulations.
<p>Python {1} - Qt {2} - PyQt {3} on {4}""".format(
__version__, platform.python_version(),
QT_VERSION_STR, PYQT_VERSION_STR,
platform.system()))
def helpHelp(self):
form = helpform.HelpForm("index.html", self)
form.show()
def loadInitialFile(self):
pass
def updateFileMenu(self):
pass
def showImage(self):
if self.image.isNull():
return
factor = self.zoomSpinBox.value() / 100.0
width = self.image.width() * factor
height = self.image.height() * factor
image = self.image.scaled(width, height, Qt.KeepAspectRatio)
self.imageLable.setPixmap(QPixmap.fromImage(image))
def okToContinue(self):
if self.dirty:
reply = QMessageBox.question(
self,
"Image Changer - Unsaved Changes",
"Save unsaved changes?",
QMessageBox.Yes | QMessageBox.No | QMessageBox.Cancel)
if reply == QMessageBox.Cancel:
return False
elif reply == QMessageBox.Yes:
return self.fileSave()
return True
def updateStatus(self, message):
self.statusBar().showMessage(message, 5000)
self.logListWidget.addItem(message)
if self.filename:
self.setWindowTitle("Image Changer - {}[*]".format(
os.path.basename(self.filename)))
elif not self.image.isNull():
self.setWindowTitle("Image Changer - Unnamed[*]")
else:
self.setWindowTitle("Image Changer[*]")
self.setWindowModified(self.dirty)
def main():
app = QApplication(sys.argv)
app.setOrganizationName("Qtrac Ltd.")
app.setOrganizationDomain("qtrac.eu")
app.setApplicationName("Image Changer")
app.setWindowIcon(QIcon(":/icon.png"))
form = MainWindow()
form.show()
app.exec_()
main()
|
import sys
import os
from subprocess import check_output
from subprocess import call
from subprocess import Popen
from multiprocessing import Pool
import math
import numpy as np
import pandas as pd
from collections import OrderedDict
import csv
script_path = os.path.dirname(os.path.realpath(__file__))+'/'
sys.path.append(script_path)
def count_aligned_reads_bedgraph(bam):
command = 'samtools view -F 0x904 -c {0}'.format(bam)
aligned_reads = check_output(command.split(), shell=False)
aligned_reads = float(aligned_reads)/1000000
print '\n'+bam
print "{0} million reads".format("%.2f" % aligned_reads)
return {bam:1/aligned_reads}
def run_genomeCoverageBed(command):
with open('error.log','a') as fout:
process = Popen(command, shell=True, stdout=fout, stderr=fout)
ret_code = process.wait()
return ret_code
def generate_scaled_bedgraphs2(directory, untagged, organism='crypto', start_only=False, stranded=False, threads=1, expand=False, bam_list=None):
if 'crypto' in organism.lower():
genome = script_path+'GENOMES/crypto_for_bedgraph.genome'
elif 'cerev' in organism.lower():
genome = script_path+'GENOMES/S288C/S288C_for_bedgraph.genome'
elif 'pombe' in organism.lower():
genome = script_path+'GENOMES/POMBE/Sp_for_bg.genome'
elif 'albicans' in organism.lower() or 'candida' in organism.lower():
genome = script_path+'GENOMES/C_albicans_for_bg.genome'
else:
genome = organism
untagged_other_dir = False
if bam_list is None:
bam_list = [x for x in os.listdir(directory) if x.endswith("sorted.bam") or x.endswith("sortedByCoord.out.bam")]
untagged_bams = [x for x in bam_list if untagged in x]
if len(untagged_bams) == 1:
untagged = untagged_bams[0]
elif len(untagged_bams) > 1:
print "Too many matches for untagged"
return None
else:
untagged = untagged
untagged_other_dir = True
bam_list.append(untagged)
else:
untagged_bams = [x for x in bam_list if untagged in x]
if len(untagged_bams) == 0:
try:
untagged = [x for x in os.listdir(directory) if untagged in x]
except IndexError:
untagged = untagged
untagged_other_dir = True
p = Pool(threads)
totals = {}
entries = p.map(count_aligned_reads_bedgraph, bam_list)
for x in entries:
totals.update(x)
commands = {}
if expand:
flag = '-d'
else:
flag = '-bga'
for n, bam in enumerate(bam_list):
command = 'genomeCoverageBed -ibam {0} -g {1} {2} -scale {3} '.format(bam, genome, flag, "%.2f" % totals[bam])
commands[bam] = command
if start_only is True:
commands = [x+'-5 ' for x in comands]
if stranded is True:
stranded_cmds = {}
for bam, command in commands.iteritems():
stranded_cmds[bam] = []
stranded_cmds[bam].append(command+'-strand + ')
stranded_cmds[bam].append(command+'-strand - ')
commands = stranded_cmds
final_cmds = []
for bam, command in commands.iteritems():
if type(command) == list:
if untagged_other_dir and bam == untagged:
new_name = base_dir + untagged.split('/')[-1].split('.bam')[0]
final_cmds.append(command[0]+'> {0}_plus.bedgraph'.format(new_name))
final_cmds.append(command[1]+'> {0}_minus.bedgraph'.format(new_name))
else:
final_cmds.append(command[0]+'> {0}_plus.bedgraph'.format(bam.split('.bam')[0]))
final_cmds.append(command[1]+'> {0}_minus.bedgraph'.format(bam.split('.bam')[0]))
else:
if untagged_other_dir and bam == untagged:
new_name = base_dir + untagged.split('/')[-1].split('.bam')[0]
final_cmds.append(command+'> {0}.bedgraph'.format(new_name))
else:
final_cmds.append(command+'> {0}.bedgraph'.format(bam.split('.bam')[0]))
p = Pool(threads)
codes = p.map(run_genomeCoverageBed, final_cmds)
return codes
## Deprecated
def generate_scaled_bedgraphs(directory, organism='crypto', start_only=False, stranded=False, file_provided=False):
if 'crypto' in organism.lower():
genome = '/home/jordan/GENOMES/crypto_for_bedgraph.genome'
elif 'cerev' in organism.lower():
genome = '/home/jordan/GENOMES/S288C/S288C_for_bedgraph.genome'
elif 'pombe' in organism.lower():
genome = '/home/jordan/GENOMES/POMBE/Sp_for_bg.genome'
elif 'albicans' in organism.lower() or 'candida' in organism.lower():
genome = '/home/jordan/GENOMES/C_albicans_for_bg.genome'
bam_list = []
if not file_provided:
for file in os.listdir(directory):
if file.lower().endswith("sorted.bam"):
bam_list.append(directory+file)
else:
bam_list.append(directory)
total_aligned = []
for bam in bam_list:
print bam
command = 'samtools view -F 0x904 -c {0}'.format(bam)
aligned_reads = check_output(command.split(), shell=False)
total_aligned.append(aligned_reads)
print "Total aligned reads in "+bam
print aligned_reads
total_aligned = [float(x)/1000000 for x in total_aligned]
for n in range(len(bam_list)):
out = bam_list[n].split('/')[-1].split('.')[0]
if stranded is True:
if start_only is False:
command1 = 'genomeCoverageBed -ibam {0} -g {1} -bga -strand + -scale {2}'.format(bam_list[n], genome, str(total_aligned[n]))
elif start_only is True:
command1 = 'genomeCoverageBed -ibam {0} -g {1} -bg -strand + -5 -scale {2}'.format(bam_list[n], genome, str(total_aligned[n]))
print command1
bg1 = check_output(command1.split(), shell=False)
with open('{0}_plus.bedgraph'.format(out),'w') as fout:
fout.write(bg1)
if start_only is False:
command2 = 'genomeCoverageBed -ibam {0} -g {1} -bga -strand - -scale {2}'.format(bam_list[n], genome, str(total_aligned[n]))
elif start_only is True:
command2 = 'genomeCoverageBed -ibam {0} -g {1} -bg -strand - -5 -scale {2}'.format(bam_list[n], genome, str(total_aligned[n]))
bg2 = check_output(command2.split(), shell=False)
with open('{0}_minus.bedgraph'.format(out),'w') as fout:
fout.write(bg2)
else:
if start_only is False:
command = 'genomeCoverageBed -ibam {0} -g {1} -bga -scale {2}'.format(bam_list[n], genome, str(total_aligned[n]))
else:
command = 'genomeCoverageBed -ibam {0} -g {1} -bg -5 -scale {2}'.format(bam_list[n], genome, str(total_aligned[n]))
print command
bg = check_output(command.split(), shell=False)
with open('{0}.bedgraph'.format(out),'w') as fout:
fout.write(bg)
def list_bedgraphs(directory):
plus_list = []
minus_list = []
for file in os.listdir(directory):
if file.lower().endswith("plus.bedgraph"):
plus_list.append(directory+file)
elif file.lower().endswith("minus.bedgraph"):
minus_list.append(directory+file)
plus_list.sort()
minus_list.sort()
bedgraphs = zip(plus_list,minus_list)
return bedgraphs
############################################################
## Read bedgraph and sort by transcript into dictionary ##
############################################################
def build_bedgraph_dict(transcript_dict, bedgraph_file):
'''Function for sorting bedgraph files by gene.
Parameters
----------
bedgraph_file : str
Bedgraph file
transcript_dict : dict
Transcript dict generated by build_transcript_dict (in SeqTools module)
Output
-------
sorted bedgraph : file
Bedgraph file sorted by gene (ends in _by_gene.bedgraph).'''
print datetime.now()
bedgraph_dict = {}
transcript_by_chr = {}
for transcript, coords in transcript_dict.iteritems():
chromosome = coords[3]
bedgraph_dict[transcript] = [[],[]]
if chromosome in transcript_by_chr:
transcript_by_chr[chromosome].append(transcript)
else:
transcript_by_chr[chromosome] = []
transcript_by_chr[chromosome].append(transcript)
with open(bedgraph_file, "r") as bedgraph:
for line in bedgraph:
columns = re.split(r'\t', line)
bed_chr = columns[0].strip()
rom_lat = {'I':'chr1','II':'chr2','III':'chr3','MT':'MT'}
if bed_chr in rom_lat:
bed_chr = rom_lat[bed_chr]
bed_position = int(columns[1])
bed_peak = float(columns[3])
if bed_chr in transcript_by_chr:
transcript_list = transcript_by_chr[bed_chr]
for transcript in transcript_list:
#Dictionary for bedgraph. Values will be [list of genomic positions][reads starting at that position]
if bed_chr == transcript_dict[transcript][3].strip() and bed_position > transcript_dict[transcript][0] and bed_position < transcript_dict[transcript][1]:
bedgraph_dict[transcript][0].append(bed_position)
bedgraph_dict[transcript][1].append(bed_peak)
with open("{0}_by_gene.bedgraph".format(bedgraph_file.split("/")[-1].split(".")[0]), "a") as fout:
for transcript, values in bedgraph_dict.iteritems():
fout.write(transcript+"\n")
coord_list = map(str, bedgraph_dict[transcript][0])
coord_line = "\t".join(coord_list)
fout.write(coord_line+"\n")
count_list = map(str, bedgraph_dict[transcript][1])
count_line = "\t".join(count_list)
fout.write(count_line+"\n")
bedgraph_dict = collections.OrderedDict(sorted(bedgraph_dict.items()))
print datetime.now()
def read_sorted_bedgraph(bedgraph_dict_output, transcript_dict, organism=None):
'''Function for loading sorted bedgraph files.
Parameters
----------
bedgraph_dict_output : str
File output by build_bedgraph_dict (ends in _by_gene.bedgraph)
transcript_dict : dict
Transcript dict generated by build_transcript_dict (in SeqTools module)
organism : str, default ``None``
change to 'pombe' if working with S. pombe
Returns
-------
bg_dict : dict
Dictionary where keys are transcript name and values are pandas series of bedgraph data'''
bg_dict = {}
count = 0
dtype = [('coord', int), ('height', float)]
with open(bedgraph_dict_output,'r') as f:
n = 0
for line in f:
n += 1
if len(line) > 1:
#Read the transcript line
if n%3 == 1:
tx = line.strip()
count += 1
#print count
if tx[-2] != 'T' and organism != 'pombe':
tx = tx+'T0'
#Read the coordinate line
elif n%3 == 2:
coords = map(int, line.strip().split('\t'))
#Read the values line
elif n%3 == 0:
heights = map(float, line.strip().split('\t'))
if tx not in transcript_dict:
pass
else:
all_coords = set(range(min(coords),max(coords)))
missing = all_coords.difference(coords)
coords = coords + list(missing)
#Fill in missing coordinates with zeros
zero_fill = [0]*len(missing)
heights = heights + zero_fill
#Create a pandas series with all coordinates and sort so zeros are inserted appropriately
entry = pd.Series(heights, index=coords)
entry.sort_index(inplace=True)
selected_range = range(transcript_dict[tx][0],transcript_dict[tx][1])
entry = entry[entry.index.isin(selected_range)]
bg_dict[tx] = entry
return bg_dict
def decollapse_bedgraph(bedgraph):
new_bedgraph = bedgraph.split('.bedgraph')[0]+'_full.bedgraph'
counter1 = 0
counter2 = 0
with open(bedgraph) as f:
with open(new_bedgraph,'w') as fout:
for line in f:
counter1 += 1
data = line.split('\t')
chrom = data[0]
start = int(data[1])
end = int(data[2])
value = data[3].strip()
if end-start != 1:
n_lines = end-start
for n in range(n_lines):
counter2 += 1
new_start = str(start+n)
new_end = str(start+n+1)
new_line = '\t'.join([chrom, new_start, new_end, value+'\n'])
fout.write(new_line)
else:
counter2 += 1
fout.write(line)
#print counter1
#print counter2
def collapse_bedgraph(bedgraph):
new_bedgraph = bedgraph+'.tmp'
counter1 = 0
counter2 = 0
with open(bedgraph) as f:
with open(new_bedgraph, 'w') as fout:
for line in f:
data = line.split('\t')
chrom = data[0]
start = data[1]
end = data[2]
value = data[3].strip()
if counter1 == 0:
prev_chrom = chrom
prev_value = value
block_start = start
prev_end = end
else:
if prev_chrom != chrom:
new_line = [prev_chrom, block_start, prev_end, prev_value+'\n']
new_line = '\t'.join(new_line)
fout.write(new_line)
counter2 += 1
prev_chrom = chrom
prev_value = value
prev_end = end
block_start = start
elif prev_chrom == chrom and prev_value == value:
pass
elif prev_chrom == chrom and prev_value != value:
new_line = [chrom, block_start, end, prev_value+'\n']
new_line = '\t'.join(new_line)
fout.write(new_line)
counter2 += 1
prev_value = value
block_start = start
prev_end = end
counter1 += 1
#print counter1
#print counter2
os.remove(bedgraph)
os.rename(bedgraph+'.tmp', bedgraph)
def bedgraph_reader(bedgraph, chromosomes=None):
df = pd.read_csv(bedgraph, sep='\t', header=None, names=['chromosome','start','end','RPM'])
if chromosomes is not None:
df = df[df['chromosome'].isin(chromosomes)]
df.index = df['chromosome'].str.cat(df['start'].apply(str),sep=':')
return df
def write_bedgraph(dataframe, name):
dataframe.to_csv(name, index=False, header=False, sep='\t')
def bedgraph_reader2(bedgraph, chromosomes=None, write=False):
''' For expanded begraphs (made with -a flag)'''
df = pd.read_csv(bedgraph, sep='\t', header=None, names=['chromosome','start','RPM'])
df.loc[:,'end'] = df['start']+1
df = df[['chromosome','start','end','RPM']]
if chromosomes is not None:
df = df[df['chromosome'].isin(chromosomes)]
df.index = df['chromosome'].str.cat(df['start'].apply(str),sep=':')
if write:
write_bedgraph(df, bedgraph)
return df
def combine_stranded_bedgraph(directory, file_provided=False):
bg_pairs = []
if not file_provided:
for file in os.listdir(directory):
if file.endswith('plus.bedgraph'):
bg_pairs.append((file,file.split('plus.bedgraph')[0]+'minus.bedgraph'))
else:
name1 = directory.split('.bam')[0]+'plus.bedgraph'
name2 = directory.split('.bam')[0]+'minus.bedgraph'
bg_pairs.append((name1, name2))
for pair in bg_pairs:
name = pair[0].split('.bedgraph')[0].split('plus')[0]
if not name.endswith('_'):
name = name+'_'
plus = bedgraph_reader(pair[0])
minus = bedgraph_reader(pair[1])
minus[3] = minus[3].multiply(-1)
new = plus.append(minus)
new = new.sort_values([0,1])
write_bedgraph(new, name)
##
def normalize_bedgraph(tagged, untagged, smooth=False, last=False):
tagged_RPM = bedgraph_reader2(tagged, write=True)
untagged_RPM = bedgraph_reader2(untagged, write=last)
total = tagged_RPM.merge(untagged_RPM, right_index=True, left_index=True, how='left')
total.loc[:,'norm RPM'] = total['RPM_x']/total['RPM_y']
normalized = total[['chromosome_x','start_x','end_x','norm RPM']]
normalized = normalized.replace([np.inf,np.inf*-1],np.NaN).dropna(how='any')
normalized.to_csv(tagged.split('.bedgraph')[0]+'_norm.bedgraph', sep='\t', index=False, header=False)
if smooth is False:
collapse_bedgraph(tagged.split('.bedgraph')[0]+'_norm.bedgraph')
def smooth_bedgraphs(bedgraph_list, window):
for bedgraph in bedgraph_list:
bg_df = bedgraph_reader(bedgraph)
new_bg = pd.DataFrame(columns=bg_df.columns)
for chrom in set(bg_df['chromosome']):
chrom_df = bg_df[bg_df['chromosome'] == chrom]
chrom_df = chrom_df.sort_values(['start'])
new_intensities = chrom_df['RPM'].rolling(window=window, center=True).mean()
chrom_df.loc[:,'RPM'] = new_intensities
new_bg = new_bg.append(chrom_df.dropna(how='any'))
new_bg = new_bg.sort_values(['chromosome','start'])
new_bg.loc[:,'start'] = new_bg['start'].apply(int)
new_bg.loc[:,'end'] = new_bg['end'].apply(int)
new_bg.to_csv(bedgraph.split('.bedgraph')[0]+'_{0}bp_smooth.bedgraph'.format(str(window)), sep='\t', index=False, header=False)
collapse_bedgraph(bedgraph.split('.bedgraph')[0]+'_{0}bp_smooth.bedgraph'.format(str(window)))
def background_subtraction(bedgraph):
bg_df = bedgraph_reader2(bedgraph)
new_bg = pd.DataFrame(columns=bg_df.columns)
print "Calculating background..."
for chrom in set(bg_df['chromosome']):
chrom_df = bg_df[bg_df['chromosome'] == chrom]
chrom_df = chrom_df.sort_values(['start'])
new_intensities = chrom_df['RPM'].rolling(window=5000, center=True).mean()
chrom_df.loc[:,'RPM'] = new_intensities
chrom_df.iloc[:2500,2] = chrom_df.iloc[2500,2]
chrom_df.iloc[-2499:,2] = chrom_df.iloc[-2500,2]
new_bg = new_bg.append(chrom_df.dropna(how='any'))
print "Adjusting values..."
bg_df.loc[:,'RPM'] = bg_df['RPM'] - new_bg['RPM']
bg_df.loc[:,'end'] = bg_df['start'] + 1
final_bg = bg_df[['chromosome','start','end','RPM']]
neg_index = final_bg[final_bg['RPM'] < 0].index
final_bg.loc[neg_index,['RPM']] = 0
final_bg.to_csv(bedgraph.split('.bedgraph')[0]+'_sub.bedgraph', sep='\t', header=False, index=False)
print "Collapsing bedgraph..."
collapse_bedgraph(bedgraph.split('.bedgraph')[0]+'_sub.bedgraph') |
import yaml
import shutil
import os
def main():
with open("scores.yml") as f:
res = yaml.load(f, Loader=yaml.BaseLoader)
for score, files in res.items():
dirpath = f"scored/{score}/"
print(dirpath)
if not os.path.isdir(dirpath):
os.makedirs(dirpath)
for filename in files:
shutil.copy(f"img/{filename}", f"scored/{score}/{filename}")
return res
if __name__ == "__main__":
main()
|
__version__ = "$Id$"
import windowinterface, WMEVENTS
from usercmd import *
from wndusercmd import *
# @win32doc|TopLevelDialog
# There is one to one corespondance between a TopLevelDialog
# instance and a document, and a TopLevelDialog
# instance with an MDIFrameWnd. The document level commands
# are enabled. This class has acces to the document and
# can display its various views and its source
class TopLevelDialog:
def __init__(self):
pass
def set_commandlist(self):
self.viewcommandlist = self.viewcommandlist + [
PAUSE(callback = (self.pause_callback, ())),
STOP(callback = (self.stop_callback, ())),
]
if __debug__:
self.commandlist.append(SCHEDDUMP(callback = (self.__dump, ())))
if __debug__:
def __dump(self):
self.player.scheduler.dump()
def show(self):
if self.window is not None:
return
dict = self.getsettingsdict()
adornments = {
'pulldown': dict
}
self.window = windowinterface.newdocument(self,
adornments = adornments,commandlist = self.commandlist)
import Player
self.setplayerstate(Player.STOPPED)
def hide(self):
if self.window is None:
return
self.window.close()
self.window = None
def setbuttonstate(self, command, showing):
self.window.set_toggle(command, showing)
def setplayerstate(self, state):
self.window.setplayerstate(state)
self.update_undocommandlist()
def setsettingsdict(self, dict):
if self.window:
self.window.settoolbarpulldowns(dict)
def showsource(self, source = None, optional=0, readonly = 0):
if source is None:
if self.source is not None:
self.source.close()
self.source = None
else:
if self.source is not None:
self.source.settext(source)
else:
self.source = self.window.textwindow(source, readonly=1)
self.source.set_mother(self)
# doesn't seem to work
# kk: you must pass a context string as a second arg
def setcommands(self, commandlist):
self.window.set_commandlist(commandlist,'document')
def do_edit(self, tmp):
import os
# use only notepad for now
editor='Notepad'
stat1 = os.stat(tmp)
import win32api,win32con
try:
win32api.WinExec('%s %s' % (editor, tmp),win32con.SW_SHOW)
except:
# no editor found
self.edit_finished_callback()
stat2 = os.stat(tmp)
from stat import ST_INO, ST_DEV, ST_MTIME, ST_SIZE
if stat1[ST_INO] == stat2[ST_INO] and \
stat1[ST_DEV] == stat2[ST_DEV] and \
stat1[ST_MTIME] == stat2[ST_MTIME] and \
stat1[ST_SIZE] == stat2[ST_SIZE]:
# nothing changed
self.edit_finished_callback()
return
self.edit_finished_callback(tmp)
|
# Copyright (c) 2020 DDN. All rights reserved.
# Use of this source code is governed by a MIT-style
# license that can be found in the LICENSE file.
"""RPC facility for use in ChromaService services.
The outward facing parts of this module are the `ServiceRpc` class and the
RpcWaiter.initialize/shutdown methods.
Concurrent RPC invocations from a single process are handled by a global
instance of RpcWaiter, which requires explicit initialization and shutdown.
This is taken care of if your code is running within the `chroma_service`
management command.
"""
import logging
import socket
import threading
import uuid
import django
import errno
import os
import time
import jsonschema
import kombu
import kombu.pools
from kombu.common import maybe_declare
from kombu.mixins import ConsumerMixin
from kombu.messaging import Queue, Producer
from kombu.entity import TRANSIENT_DELIVERY_MODE
from chroma_core.services.log import log_register
from chroma_core.services import _amqp_connection, _amqp_exchange, dbutils
REQUEST_SCHEMA = {
"type": "object",
"properties": {
"request_id": {"type": "string"},
"method": {"type": "string"},
"args": {"type": "array"},
"kwargs": {"type": "object"},
"response_routing_key": {"type": "string"},
},
"required": ["request_id", "method", "args", "kwargs", "response_routing_key"],
}
RESPONSE_SCHEMA = {
"type": "object",
"properties": {
"exception": {"anyOf": [{"type": "string"}, {"type": "null"}]},
"result": {},
"request_id": {"type": "string"},
},
"required": ["exception", "result", "request_id"],
}
RESPONSE_TIMEOUT = 300
"""
Max number of lightweight RPCs that can be in flight concurrently. This must
be well within the rabbitmq server's connection limit
"""
LIGHTWEIGHT_CONNECTIONS_LIMIT = 10
RESPONSE_CONN_LIMIT = 10
tx_connections = None
rx_connections = None
lw_connections = None
log = log_register("rpc")
class RpcError(Exception):
def __init__(self, description, exception_type, **kwargs):
super(Exception, self).__init__(description)
self.description = description
self.remote_exception_type = exception_type
self.traceback = kwargs.get("traceback")
def __str__(self):
return "RpcError: %s" % self.description
class RpcTimeout(Exception):
pass
class RunOneRpc(threading.Thread):
"""Handle a single incoming RPC in a new thread, and send the
response (result or exception) from the execution thread."""
def __init__(self, rpc, body, response_conn_pool):
super(RunOneRpc, self).__init__()
self.rpc = rpc
self.body = body
self._response_conn_pool = response_conn_pool
def run(self):
try:
result = {
"result": self.rpc._local_call(self.body["method"], *self.body["args"], **self.body["kwargs"]),
"request_id": self.body["request_id"],
"exception": None,
}
except Exception as e:
import sys
import traceback
exc_info = sys.exc_info()
backtrace = "\n".join(traceback.format_exception(*(exc_info or sys.exc_info())))
# Utility to generate human readable errors
def translate_error(err):
from socket import error as socket_error
if type(err) == socket_error:
return "Cannot reach server"
return str(err)
result = {
"request_id": self.body["request_id"],
"result": None,
"exception": translate_error(e),
"exception_type": type(e).__name__,
"traceback": backtrace,
}
log.error("RunOneRpc: exception calling %s: %s" % (self.body["method"], backtrace))
finally:
django.db.connection.close()
with self._response_conn_pool[_amqp_connection()].acquire(block=True) as connection:
def errback(exc, _):
log.info("RabbitMQ rpc got a temporary error. May retry. Error: %r", exc, exc_info=1)
retry_policy = {"max_retries": 10, "errback": errback}
connection.ensure_connection(**retry_policy)
with Producer(connection) as producer:
maybe_declare(_amqp_exchange(), producer.channel, True, **retry_policy)
producer.publish(
result,
serializer="json",
routing_key=self.body["response_routing_key"],
delivery_mode=TRANSIENT_DELIVERY_MODE,
retry=True,
retry_policy=retry_policy,
immedate=True,
mandatory=True,
)
class RpcServer(ConsumerMixin):
def __init__(self, rpc, connection, service_name, serialize=False):
"""
:param rpc: A ServiceRpcInterface instance
:param serialize: If True, then process RPCs one after another in a single thread
rather than running a thread for each RPC.
"""
super(RpcServer, self).__init__()
self.serialize = serialize
self.rpc = rpc
self.connection = connection
self.queue_name = service_name
self.request_routing_key = "%s.requests" % self.queue_name
self._response_conn_pool = kombu.pools.Connections(limit=RESPONSE_CONN_LIMIT)
def get_consumers(self, Consumer, channel):
return [
Consumer(
queues=[
Queue(
self.request_routing_key, _amqp_exchange(), routing_key=self.request_routing_key, durable=False
)
],
callbacks=[self.process_task],
)
]
def process_task(self, body, message):
message.ack()
try:
jsonschema.validate(body, REQUEST_SCHEMA)
except jsonschema.ValidationError as e:
# Don't even try to send an exception response, because validation failure
# breaks our faith in request_id and response_routing_key
log.error("Invalid RPC body: %s" % e)
else:
RunOneRpc(self.rpc, body, self._response_conn_pool).start()
def stop(self):
self.should_stop = True
class ResponseWaitState(object):
"""State kept by for each outstanding RPC -- the response handler
must first populate result, then set the `complete` event."""
def __init__(self, rpc_timeout):
self.complete = threading.Event()
self.timeout = False
self.result = None
self.timeout_at = time.time() + rpc_timeout
class RpcClientResponseHandler(threading.Thread):
"""Handle responses for a particular named RPC service.
"""
def __init__(self, response_routing_key):
super(RpcClientResponseHandler, self).__init__()
self._stopping = False
self._response_states = {}
self._response_routing_key = response_routing_key
self._started = threading.Event()
def wait_for_start(self):
"""During initialization, caller needs to be able to block
on the handler thread starting up, to avoid attempting to issue
RPCs before the response handler is available
"""
self._started.wait()
def start_wait(self, request_id, rpc_timeout):
log.debug("start_wait %s" % request_id)
self._response_states[request_id] = ResponseWaitState(rpc_timeout)
def complete_wait(self, request_id):
log.debug("complete_wait %s" % request_id)
state = self._response_states[request_id]
state.complete.wait()
log.debug("complete_wait %s triggered" % request_id)
del self._response_states[request_id]
if state.timeout:
raise RpcTimeout()
else:
return state.result
def _age_response_states(self):
# FIXME: keep a sorted list by insertion time to avoid
# the need to check all the timeouts
t = time.time()
for request_id, state in self._response_states.items():
if not state.complete.is_set() and t > state.timeout_at:
log.debug("Aged out RPC %s" % request_id)
state.timeout = True
state.complete.set()
def timeout_all(self):
for request_id, state in self._response_states.items():
state.timeout = True
state.complete.set()
def run(self):
log.debug("ResponseThread.run")
def callback(body, message):
# log.debug(body)
try:
jsonschema.validate(body, RESPONSE_SCHEMA)
except jsonschema.ValidationError as e:
log.error("Malformed response: %s" % e)
else:
try:
state = self._response_states[body["request_id"]]
except KeyError:
log.debug("Unknown request ID %s" % body["request_id"])
else:
state.result = body
state.complete.set()
finally:
message.ack()
with rx_connections[_amqp_connection()].acquire(block=True) as connection:
# Prepare the response queue
with connection.Consumer(
queues=[
kombu.messaging.Queue(
self._response_routing_key,
_amqp_exchange(),
routing_key=self._response_routing_key,
auto_delete=True,
durable=False,
)
],
callbacks=[callback],
):
self._started.set()
while not self._stopping:
try:
connection.drain_events(timeout=1)
except socket.timeout:
pass
except IOError as e:
# See HYD-2551
if e.errno != errno.EINTR:
# if not [Errno 4] Interrupted system call
raise
self._age_response_states()
log.debug("%s stopped" % self.__class__.__name__)
def stop(self):
log.debug("%s stopping" % self.__class__.__name__)
self._stopping = True
class RpcClient(object):
"""
One instance of this is created for each named RPC service
that this process calls into.
"""
def __init__(self, service_name, lightweight=False):
self._service_name = service_name
self._request_routing_key = "%s.requests" % self._service_name
self._lightweight = lightweight
if not self._lightweight:
self._response_routing_key = "%s.responses_%s_%s" % (self._service_name, os.uname()[1], os.getpid())
self.response_thread = RpcClientResponseHandler(self._response_routing_key)
self.response_thread.start()
self.response_thread.wait_for_start()
def stop(self):
if not self._lightweight:
self.response_thread.stop()
def join(self):
if not self._lightweight:
self.response_thread.join()
def timeout_all(self):
if not self._lightweight:
self.response_thread.timeout_all()
def _send(self, connection, request):
"""
:param request: JSON serializable dict
"""
dbutils.exit_if_in_transaction(log)
log.debug("send %s" % request["request_id"])
request["response_routing_key"] = self._response_routing_key
def errback(exc, _):
log.info("RabbitMQ rpc got a temporary error. May retry. Error: %r", exc, exc_info=1)
retry_policy = {"max_retries": 10, "errback": errback}
with Producer(connection) as producer:
maybe_declare(_amqp_exchange(), producer.channel, True, **retry_policy)
producer.publish(
request,
serializer="json",
routing_key=self._request_routing_key,
delivery_mode=TRANSIENT_DELIVERY_MODE,
retry=True,
retry_policy=retry_policy,
)
def call(self, request, rpc_timeout=RESPONSE_TIMEOUT):
request_id = request["request_id"]
if not self._lightweight:
self.response_thread.start_wait(request_id, rpc_timeout)
with tx_connections[_amqp_connection()].acquire(block=True) as connection:
self._send(connection, request)
return self.response_thread.complete_wait(request_id)
else:
self._response_routing_key = "%s.responses_%s_%s_%s" % (
self._service_name,
os.uname()[1],
os.getpid(),
request_id,
)
self._complete = False
def callback(body, message):
# log.debug(body)
try:
jsonschema.validate(body, RESPONSE_SCHEMA)
except jsonschema.ValidationError as e:
log.debug("Malformed response: %s" % e)
else:
self._result = body
self._complete = True
finally:
message.ack()
with lw_connections[_amqp_connection()].acquire(block=True) as connection:
with connection.Consumer(
queues=[
kombu.messaging.Queue(
self._response_routing_key,
_amqp_exchange(),
routing_key=self._response_routing_key,
auto_delete=True,
durable=False,
)
],
callbacks=[callback],
):
self._send(connection, request)
timeout_at = time.time() + rpc_timeout
while not self._complete:
try:
connection.drain_events(timeout=1)
except socket.timeout:
pass
except IOError as e:
# See HYD-2551
if e.errno != errno.EINTR:
# if not [Errno 4] Interrupted system call
raise
if time.time() > timeout_at:
raise RpcTimeout()
return self._result
class RpcClientFactory(object):
"""
Provide sending and receiving AMQP RPC messages on behalf of
all concurrent operations within a process.
This class creates and destroys RpcClient instances
for each RPC service that this process makes calls to.
This class operates either in a 'lightweight' mode or
in a multi-threaded mode depending on whether `initialize_threads`
is called.
Lightweight mode does not spawn any additional threads to handle
RPC responses, but has the overhead of creating separate AMQP connections
for each concurrent RPC, and creating a separate response queue for each
call. This is for use in WSGI handlers performing comparatively rare
operations (things that happen when a user clicks a button).
Threaded mode spawns a response handler thread for each named RPC
service that the calling process interacts with. This reduces the number
of queues and connections to one per service rather than one per call. This
is for use when issuing large numbers of concurrent RPCs, such as when
performing a 1-per-server set of calls between backend processes.
"""
_instances = {}
_factory_lock = None
_available = True
_lightweight = True
_lightweight_initialized = False
@classmethod
def initialize_threads(cls):
"""Set up for multi-threaded operation. Calling this turns off
'lightweight' mode, and causes the rpc module to use multiple
threads when issuing RPCs. If this is not called, then no
extra threads are started when issuing RPCs
"""
if not cls._lightweight:
raise RuntimeError("Called %s.initialize_threads more than once!" % cls.__name__)
log.debug("%s enabling multi-threading" % cls.__name__)
# Cannot instantiate lock at module import scope because
# it needs to happen after potential gevent monkey patching
cls._factory_lock = threading.Lock()
cls._lightweight = False
global tx_connections
global rx_connections
tx_connections = kombu.pools.Connections(limit=10)
rx_connections = kombu.pools.Connections(limit=20)
@classmethod
def shutdown_threads(cls):
"""Join any threads created. Only necessary if `initialize` was called
"""
assert not cls._lightweight
with cls._factory_lock:
for instance in cls._instances.values():
instance.stop()
for instance in cls._instances.values():
instance.join()
for instance in cls._instances.values():
instance.timeout_all()
cls._available = False
@classmethod
def get_client(cls, queue_name):
if cls._lightweight:
if not cls._lightweight_initialized:
# connections.limit = LIGHTWEIGHT_CONNECTIONS_LIMIT
global lw_connections
lw_connections = kombu.pools.Connections(limit=LIGHTWEIGHT_CONNECTIONS_LIMIT)
cls._lightweight_initialized = True
return RpcClient(queue_name, lightweight=True)
else:
with cls._factory_lock:
if not cls._available:
raise RuntimeError("Attempted to acquire %s instance after shutdown" % cls.__name__)
try:
instance = cls._instances[queue_name]
except KeyError:
log.debug("Instantiating RpcWaiter for %s" % queue_name)
instance = RpcClient(queue_name)
cls._instances[queue_name] = instance
return instance
class ServiceRpcInterface(object):
"""Create a class inheriting from this to expose some methods of another
class for RPC. In your subclass, define the `methods` class attribute with a list
of RPC-callable attributes.
If you have a class `foo` and you want to expose some methods to the world:
::
class Foo(object):
def functionality(self):
pass
class FooRpc(ServiceRpcInterface):
methods = ['functionality']
server = FooRpc(Foo())
server.run()
To invoke this method from another process:
::
FooRpc().functionality()
"""
def __init__(self, wrapped=None):
self.worker = None
self.wrapped = wrapped
if wrapped:
# Raise an exception if any of the declared methods don't exist
# on the wrapped object
for method in self.methods:
getattr(wrapped, method)
def __getattr__(self, name):
if name in self.methods:
return lambda *args, **kwargs: self._call(name, *args, **kwargs)
else:
raise AttributeError(name)
def _call(self, fn_name, *args, **kwargs):
# If the caller specified rcp_timeout then fetch it from the args and remove.
rpc_timeout = kwargs.pop("rpc_timeout", RESPONSE_TIMEOUT)
request_id = uuid.uuid4().__str__()
request = {"method": fn_name, "args": args, "kwargs": kwargs, "request_id": request_id}
log.debug("Starting rpc: %s, id: %s " % (fn_name, request_id))
log.debug("_call: %s %s %s %s" % (request_id, fn_name, args, kwargs))
rpc_client = RpcClientFactory.get_client(self.__class__.__name__)
result = rpc_client.call(request, rpc_timeout)
if result["exception"]:
log.error(
"ServiceRpcInterface._call: exception %s: %s \ttraceback: %s"
% (result["exception"], result["exception_type"], result.get("traceback"))
)
raise RpcError(result["exception"], result.get("exception_type"), traceback=result.get("traceback"))
else:
# NB: 'result' can be very large, and almost cripple the various logs where
# rpcs are run: http.log, job_scheduler.log, etc.
# If you want to see response result data from rpcs at the INFO level, consider writing
# log messages into the JobSchedulerClient calls. Leaving this in for DEBUG.
if log.getEffectiveLevel() is not logging.DEBUG:
# Truncate message
result100 = str(result)[:100]
if str(result) != result100:
result100 += "..."
result_str = result100
else:
result_str = result
log.debug("Completed rpc: %s, id: %s, result: %s" % (fn_name, request_id, result_str))
return result["result"]
def _local_call(self, fn_name, *args, **kwargs):
log.debug("_local_call: %s %s %s" % (fn_name, args, kwargs))
assert fn_name in self.methods
fn = getattr(self.wrapped, fn_name)
return fn(*args, **kwargs)
def run(self):
with _amqp_connection() as connection:
self.worker = RpcServer(self, connection, self.__class__.__name__)
self.worker.run()
def stop(self):
# self.worker could be None if thread stopped before run() gets to the point of setting it
if self.worker is not None:
self.worker.stop()
|
# this file interact with the remote/local object detector and
# and collect the detection results by feeding them with one synthesized image
import requests
import time
from PIL import Image, ImageDraw,ImageColor,ExifTags
from io import BytesIO
def localize_objects(path):
"""Localize objects in the local image.
Args:
path: The path to the local file.
"""
image = Image.open(open(path, "rb"))
x, y = image.size
files = {'image': open(path, 'rb')}
params = (
('threshold', '0.7'),
)
response = requests.post('http://max-object-detector.max.us-south.containers.appdomain.cloud/model/predict', params = params, files=files)
analysis = response.json()
res = []
for label in analysis['predictions']:
box = label['detection_box']
top = y * box[0]
left = x * box[1]
height = (y * box[2] - y * box[0])
width = (x * box[3] - x * box[1])
if (top == 0.0):
top = 10.0
if (left == 0.0):
left = 10.0
if (height == 0.0):
height = 10.0
if (width == 0.0):
width = 10.0
box = ((left, left + width), (top, top+height))
n = label['label'].replace(" ", "_")
res.append((n, label['probability'] * 100, box))
return res
|
import bpy
import os
os.system('cls' if os.name == 'nt' else 'clear')
for texture in bpy.data.images:
flag = False
for mat in bpy.data.materials:
if mat.node_tree is not None:
for node in mat.node_tree.nodes:
if node.type == 'TEX_IMAGE':
if node.image == texture:
flag = True
path = bpy.data.images[node.image.name].filepath
obs = [o.name for o in bpy.data.objects if type(o.data) is bpy.types.Mesh and mat.name in o.data.materials]
print('\nThe Material', mat.name,'in the object',obs, 'is using the texture', node.image.name, '\nfrom', path)
if flag == False:
print('\nno users found for',texture.name) |
def compare(s,a):
for word in a:
if s.find(word) == 0 and iseither(s,len(word)) :
return 1
else :
return 0
def iseither(s,i): #문자열 s의 i번째 이후의 문자열이 문제 조건을 만족하는지 안하는지를 반환
global A
if i==len(s):
return 1
if compare(s[i:],A) == 0:
return 0
else:
return 1
dp = [0]*101
S = input()
N = int(input())
A = [input() for _ in range(N)]
A.sort(key=lambda x : -len(x))
print(compare(S,A)) |
from django.core.management import base
class Command(base.NoArgsCommand):
help = "Recalculates March Madness scores."
def handle_noargs(self, verbosity=0, **options):
from marchmadness.models import recalculate_all_scores
recalculate_all_scores()
# vi: set sw=4 ts=4 sts=4 tw=79 ai et nocindent:
|
import numpy as np
class Uni:
""" 1D uniform grid """
def __init__(self,args):
self.l = args['l']
self.R = args['R']
self.N = args['N']
self.A = np.pi*self.R**2.
def __call__(self):
l,A,N = self.l,self.A,self.N
z = np.linspace(0.,l,N)
dz = np.diff(z)
A0 = np.ones(N)*A
return [z,dz,A0]
class Cons:
""" 1D grid with constriction"""
def __init__(self,args):
self.l,self.R,self.r,self.N = args['l'],args['R'],args['r'],args['N']
self.A = np.pi*self.R**2.
self.a = np.pi*self.r**2.
def __call__(self):
l,A,a,N = self.l,self.A,self.a,self.N
z = np.linspace(0.,l,N)
dz = np.diff(z)
z_cons = np.linspace(-1.,1.,N)
D = np.sqrt((4./np.pi)*A)
d = np.sqrt((4./np.pi)*a)
D0 = (d-D)*(1.0 + np.cos(z_cons*np.pi))/2.
D0 = D + D0
A0 = (np.pi/4.)*D0**2.
return [z,dz,A0]
class Cone:
""" 1D cone grid """
def __init__(self,args):
self.l,self.R,self.r,self.N = args['l'],args['R'],args['r'],args['N']
self.A = np.pi*self.R**2.
self.a = np.pi*self.r**2.
def __call__(self):
l,A,a,N = self.l,self.A,self.a,self.N
z = np.linspace(0.,l,N)
dz = np.diff(z)
D = np.sqrt((4./np.pi)*A)
d = np.sqrt((4./np.pi)*a)
D0 = D + ((d-D)/l*z)
A0 = (np.pi/4.)*D0**2.
return [z,dz,A0]
|
#!/usr/bin/env python
"""Fetcher of curriculum for UIUC."""
import json
import requests
import _common
# TODO: complete the license and version info
__author__ = 'Pengyu CHEN'
__copyright__ = '2014 Deal College Inc.'
__credits__ = ['Pengyu CHEN']
__license__ = ''
__version__ = ''
__maintainer__ = 'Pengyu CHEN'
__email__ = 'pengyu@libstarrify.so'
__status__ = 'development'
strings = {
'semester_curriculum': {
'fall-2013': '120138',
'spring-2014': '120141',
'summer-2014': '120145',
'fall-2014': '120148',
},
}
def fetch_curriculum(username, password, semester, per_request_timeout):
"""Fetches curriculum data using given login info.
Args:
username: Username of 'https://my.illinois.edu/uPortal/Login'.
password: Password of 'https://my.illinois.edu/uPortal/Login'.
semester: Name of the semester, e.g. 'summer-2014'.
per_request_timeout: Per request timeout in seconds.
Returns:
A dictionary with these fields:
'status': 'success'/'error'/...
'message': Message describing the fetch.
'raw-data': Raw HTML for the fetched data. Contains one of:
- 'You are not registered for any courses for this term.' When
there is no course for the term.
- A <table> node with attribute 'title="Course List"'
Raises:
_common.FetchError: If the fetch cannot complete.
"""
try:
# Logging in to uPortal
session = requests.Session()
login_url = 'https://my.illinois.edu/uPortal/Login'
login_data = {
'action': 'login',
'userName': username,
'password': password,
'Login': 'Sign In',
}
session.headers.update({
'Referer': 'https://my.illinois.edu/uPortal/render.userLayoutRootNode.uP'
})
request = session.post(
login_url,
data=login_data,
timeout=per_request_timeout,
allow_redirects=True)
succ_msg = '<div id="portalWelcomeLogin">'
fail_msg = (
'The user name/password combination entered is not recognized. '
'Please try again!')
if fail_msg in request.text:
raise _common.FetchError(_common.strings['error-incorrect-login'])
elif succ_msg not in request.text:
raise _common.FetchError(_common.strings['error-authenticating'])
# Fetching academic data
academics_url = (
'https://my.illinois.edu/uPortal/render.userLayoutRootNode.uP?uP_root=root&uP_sparam=activeTabTag&activeTabTag=Academics'
)
session.get(academics_url, timeout=per_request_timeout)
academics_url = (
'https://my.illinois.edu/uPortal/render.userLayoutRootNode.target.u6l1n8.uP?pltc_target=870733.u6l1n8&pltc_type=ACTION'
)
academics_data = {
'termCode': semester,
'action': 'load',
}
request = session.post(
academics_url,
data=academics_data,
timeout=per_request_timeout)
raw_data = request.text
return {
'status': _common.strings['status-success'],
'message': _common.strings['message-success'],
'raw-data': raw_data
}
except (requests.exceptions.ConnectionError,
requests.exceptions.HTTPError, requests.exceptions.Timeout,
requests.exceptions.TooManyRedirects):
raise _common.FetchError(_common.strings['error-communicating'])
raise Exception('This line shall not be reached.')
pass
|
#!/usr/bin/env python
import sys
import socket
pattern = ''.join(chr(_) for _ in range(0,256) if chr(_).isalpha())
for sizes in (
[str(_) for _ in range(1,10)],
[str(1) for _ in range(0,0xff)],
['ffff',],
['20000',],
(),
):
for te_key in ('Transfer-Encoding', 'TE'):
for te_value in ('chunked', 'chunked, trailer','chunked,trailer','trailers, deflate;q=0.5', 'something;q=1.5, trailer, chunked,else;q=0.5', 'chunked, something;q=1.5'):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(('127.0.0.1', 3128))
request = \
'POST /empty/ HTTTP/1.1\r\n' \
'Host: 127.0.0.1\r\n' \
'%s' \
'Connection: close\r\n\r\n' % ('%s: %s\r\n' % (te_key,te_value) if te_value else '')
s.send(request)
debug = ['\n']
debug.append('=' * 80)
debug.append('sending chunk sizes (%s: %s) :' % (te_key,te_value))
debug.append('%s' % ' '.join(sizes))
debug.append('\n')
if 'chunk' in request and not ';q=1.5' in request:
sys.stdout.write('%-8s chunk size %-6d %-65s' % ('single' if len(sizes) == 1 else 'multiple', sum(int(_,16) for _ in sizes),'-' if not te_key else '%s: %s' % (te_key,te_value)))
for size in sizes:
sys.stdout.flush()
length = int(size,16)
repeat = length/len(pattern)+1
chunk = (pattern * repeat)[:length]
sent = '%s\r\n%s\r\n' % (size,chunk)
s.send(sent)
request += sent.replace('\t', '\\t').replace('\r', '\\r').replace('\n', '\\n\n')
request += '0\\r\\n\\r\\n\n'
s.send('0\r\n\r\n')
else:
sys.stdout.write('no chunk %-65s' % ('-' if not te_key else '%s: %s' % (te_key,te_value)))
debug.append('[[%s]]' % request.replace('\t', '\\t').replace('\r', '\\r').replace('\n', '\\n\n'))
debug.append('\n')
try:
data = s.recv(0x20000)
except KeyboardInterrupt:
print '\n'.join(debug)
sys.exit(1)
s.close()
if '200' in data:
sys.stdout.write('page received\n')
sys.stdout.flush()
elif '501 Method Not Implemented' in data:
sys.stdout.write('not implemented\n')
sys.stdout.flush()
else:
debug.append('[[%s]]' % data.replace('\t', '\\t').replace('\r', '\\r').replace('\n', '\\n\n'))
print '\n'.join(debug)
sys.stdout.flush()
sys.exit(1)
|
#PortiCode
def toplamAl(liste):
if type(liste) == list or type(liste) == tuple:
toplam = 0
for i in liste:
toplam +=i
return toplam
else:
raise ValueError("Girilen parametre liste veya tuple bekleniyordu")
"""
a = toplamAl([1,2,3,4,5,6])
b = toplamAl((1,2,3,4,5,6))
c = toplamAl("Porti")
print(c)# Hata alcak!
print(b)
print(a)
"""
try :
a = toplamAl(5)
print(a)
except ValueError:
print("Buraya girdi") |
from django.shortcuts import render, get_object_or_404
from django.views.generic.base import TemplateView
from django.views.generic import ListView
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.auth.models import User
from django.contrib.auth import logout, login, authenticate
from django.http import HttpResponse, HttpResponseRedirect
from bang_app.models import Customer, Product, ProductType, CustomerOrder
from bang_app import models
class ProductDetailView(TemplateView):
'''
Purpose-
This class provides the template for a customer to
select a product and view it's details: name, price, description, quantity
Methods-
get_product_detail
Author:
@whitneycormack
'''
template_name = 'product_detail.html'
model = models.Product
def get_product_detail(request, id):
product_detail = Product.objects.filter(id=id)
try:
cart = CustomerOrder.objects.get(customer=request.user.customer, active_order=1)
line_items = cart.line_items.all()
total = 0
for i in line_items:
total += 1
except CustomerOrder.DoesNotExist:
total = 0
except AttributeError:
total = 0
# create a list that can be looped over in the template to create the
# appropriate quantity options when buying a product
product_quantity = range(product_detail[0].quantity)
product_quantity = [x+1 for x in product_quantity]
return render(
request,
'product_detail.html',
{
'product_detail': product_detail,
'total': total,
'product_quantity': product_quantity
}
)
|
# -*- coding: utf-8 -*-
"""
Created on Mon Jul 10 19:11:01 2017
@author: Luis
"""
import pygame,sys
from pygame.locals import *
from random import randint # crear numeros aleatorios
ancho = 800
alto = 600
posX = 0
posY = 520
class Bola(pygame.sprite.Sprite): # heredar de pygame.Sprite..
""""Clase para la nave"""
def __init__(self):
pygame.sprite.Sprite.__init__(self) #usar sprite en la imagen
self.bola = pygame.image.load("Imagenes/bola.png")
self.bola = pygame.transform.scale(self.bola, (35,35))
self.rect = self.bola.get_rect()
self.velocidad = 40
self.change_x = 0
self.change_y = 0
self.vida = True
def actualizar(self,moneda):
if pygame.sprite.collide_rect(self, moneda):
print("Colision")
def dibujar(self, superficie, x, y ):
superficie.blit(self.bola, self.rect)
def cambiovelocidad(self,x,y):
"""Cambiamos la velocidad del protagonista"""
self.change_x += x
self.change_y += y
# LIMITES DE LA PANTALLA
def movimiento(self):
if self.vida == True:
if self.rect.left<=0:
self.rect.left =10
elif self.rect.right>=800:
self.rect.right = 800
elif self.rect.top >=560:
self.rect.top = 560
elif self.rect.bottom <=30:
self.rect.bottom = 30
# Encontramos una posición nueva para el protagonista
def update(self):
self.rect.x += self.change_x
self.rect.y += self.change_y
class Moneda(pygame.sprite.Sprite): # heredar de pygame.Sprite..
""""Clase para la nave"""
def __init__(self):
pygame.sprite.Sprite.__init__(self) #usar sprite en la imagen
self.moneda = pygame.image.load("Imagenes/moneda.png")
self.moneda = pygame.transform.scale(self.moneda, (35,35))
self.rect = self.moneda.get_rect()
self.rect.centerx =ancho/2
self.rect.centery =alto-60
def dibujar(self, superficie):
superficie.blit(self.moneda, self.rect)
pygame.init()
ventana = pygame.display.set_mode((ancho,alto))
pygame.display.set_caption("Juego")
imagenFondo = pygame.image.load("Imagenes/fondo1.jpg")
imagenFondo = pygame.transform.scale(imagenFondo, (800,600))
jugador = Bola()
monedas = Moneda()
listade_todoslos_sprites = pygame.sprite.Group()
listade_todoslos_sprites.add(jugador)
moneda = pygame.image.load("Imagenes/moneda.png")
moneda = pygame.transform.scale(moneda, (35,35))
ventana.blit(moneda,(225,500))
Blanco = (255,255,255)
#velocidad = 5 # velocidad de 5 pixeles
enJuego = True
reloj = pygame.time.Clock()
puntuacion = 0
while True:
jugador.movimiento()
for evento in pygame.event.get(): # regresa una lista de eventos
if evento.type == QUIT: # si se presiona x se cierra la venta
pygame.quit()
sys.exit()
if enJuego == True:
if evento.type == pygame.KEYDOWN: # SI ALGUNA TECLA FUE PRESIONADA
if evento.key == pygame.K_LEFT:
jugador.cambiovelocidad(-3,0)
elif evento.key == pygame.K_RIGHT:
jugador.cambiovelocidad(3,0)
elif evento.key == pygame.K_UP:
jugador.cambiovelocidad(0,-3)
elif evento.key == pygame.K_DOWN:
jugador.cambiovelocidad(0,3)
elif evento.type == pygame.KEYUP:
if evento.key == pygame.K_LEFT:
jugador.cambiovelocidad(3,0)
elif evento.key == pygame.K_RIGHT:
jugador.cambiovelocidad(-3,0)
elif evento.key == pygame.K_UP:
jugador.cambiovelocidad(0,3)
elif evento.key == pygame.K_DOWN:
jugador.cambiovelocidad(0,-3)
# Reseteamos la velocidad cuando la tecla es hacia arriba
# Esto mueve al bloque protagonista según la velocidad actual
listade_todoslos_sprites.update()
# Pausa
reloj.tick(60)
ventana.blit(imagenFondo, (0,0))
monedas.dibujar(ventana)
jugador.actualizar(monedas)
jugador.dibujar(ventana, 520,86)
ventana.blit(moneda,(450,500))
ventana.blit(moneda,(500,500))
ventana.blit(moneda,(550,500))
pygame.display.update() |
'''
1 - Faça um programa que peça dois números e imprima o maior deles
'''
print("Informe o primeiro numero")
num1 = int(input())
print("Informe o segundo numero")
num2 = int(input())
if (num1 > num2):
print(f'O maior número é o {num1}')
elif (num2 > num1):
print(f'O maio número é o {num2}')
else:
print("Os números são iguais") |
time_24 = int(input("Enter the time: "))
hours = time_24 // 100
minutes = time_24 % 100
am_pm = "am"
if hours == 12:
am_pm = "pm"
if hours % 12 == 0:
hours = 12
elif hours % 12 != hours:
hours = hours % 12
am_pm = "pm"
if minutes < 10:
min_str = "0" + str(minutes)
else:
min_str = str(minutes)
time_12 = str(hours) + ":" + min_str + am_pm
print("the currnet time is :" + time_12)
|
__author__ = 'kattaguy'
a = 1;
b = 1;
c = 1;
for a in range (1, 500):
for b in range(1, 500):
for c in range(1,500):
if (a ** 2 + b ** 2 == c ** 2) & (a+b+c == 1000):
product = a*b*c
print(product)
|
# author:lzt
# date: 2019/12/4 15:44
# file_name: dict_test
# 字典的生成
import random
dict1 = {1: 2, 2: 3, 3: 4, 5: "005", None: "6", "7": None}
print(dict1)
# 用字典类生成字典:字典参数!!!
dict2 = dict(张三=1, 李四=2, 王五=5)
print(dict2)
# 用字典创建字典
dict3_1 = {"1": 1, "2": 2}
dict3 = dict(**dict3_1)
print(dict3)
# 带字典参数的函数
def test_dict_parameter(**kwargs):
print(type(kwargs))
print(kwargs)
# 字典参数的传参
# test_dict_parameter({"1": 1, "2": 2})
test_dict_parameter(abc=1, bcd=2)
# 向字典参数传递字典:**:自动解包
dict_fun = {"1": 1, "2": 2}
test_dict_parameter(**dict_fun)
# 字典的操作:
# 增删改查
# 增加:
dict4 = {}
dict4["001"] = 123
dict4["002"] = 456
dict4["001"] = 1000
print(dict4)
# update
dict5 = {"004": 125}
dict4.update(**dict5)
print(dict4)
# print(dict4+dict5)
# 删除
# if "000" in dict4:
# del dict4["000"]
# dict4.pop("000")
# 修改:
dict4["008"] = 888
dict4["008"] = 999
# 查询
# [key]:会产生异常
# get(key):推荐!!!
# print(dict4[input("输入查询的key")])
# print(dict4.get(input("输入查询的key")))
# 字典的迭代
# 键的迭代
for i in dict4.keys():
print(i)
# 值的迭代
for i in dict4.values():
print(i)
# 键值对迭代
for k, v in dict4.items():
print(k, v)
# print(dict4)
# 现有对象列表一个 需要优化查询速度?
import day23.set_test as st
stus = [
st.Student(random.choice(["001", "002", "003"]), random.randint(19, 29), random.randint(1000, 9999)),
st.Student(random.choice(["001", "002", "003"]), random.randint(19, 29), random.randint(1000, 9999)),
st.Student(random.choice(["001", "002", "003"]), random.randint(19, 29), random.randint(1000, 9999)),
st.Student(random.choice(["001", "002", "003"]), random.randint(19, 29), random.randint(1000, 9999)),
st.Student(random.choice(["001", "002", "003"]), random.randint(19, 29), random.randint(1000, 9999))
]
for i in stus:
print(i)
print("***********************")
# 依据对象列表生成对象的字典
stus_dict = {i.pid: i for i in stus}
# 根据列表筛选对象进入字典
stus_dict2 = {i.pid: i for i in stus if str(i.pid).startswith("3")}
print(stus_dict2.get(stus[0].pid))
# 键值反向
# 天气预报:
# {城市1:天气1,城市2:天气2,城市3:天气3,城市4:天气1}
# {天气1:[城市1,城市4],天气2:[城市列表]}
dict5 = {"郑州": "晴天", "上海": "晴天", "北京": "雾霾", "杭州": "小雨", "开封": "小雪"}
# dict6 = {v: [i for i in dict5.keys() if dict5.get(i) == v] for k, v in dict5.items()}
dict6 = {v: [k] for k, v in dict5.items()}
for k, v in dict5.items():
# 得到所有的(城市:天气)
# v == dict6的key相同
city_list = dict6.get(v)
if k not in city_list:
city_list.append(k)
print(dict6)
|
# Generated by Django 3.0.2 on 2020-04-13 04:42
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('apis', '0008_auto_20200401_0847'),
]
operations = [
migrations.CreateModel(
name='RelPostShapeColour',
fields=[
('id', models.BigAutoField(primary_key=True, serialize=False)),
('cloth_style', models.ForeignKey(null='True', on_delete=django.db.models.deletion.CASCADE, to='apis.ShapeColour')),
('post', models.ForeignKey(null='True', on_delete=django.db.models.deletion.CASCADE, to='apis.Post')),
],
options={
'verbose_name': 'relpostshapecolour',
'verbose_name_plural': 'relpostshapecolours',
'db_table': 'relpostshapecolour',
},
),
migrations.CreateModel(
name='RelPostShape',
fields=[
('id', models.BigAutoField(primary_key=True, serialize=False)),
('cloth_style', models.ForeignKey(null='True', on_delete=django.db.models.deletion.CASCADE, to='apis.Shape')),
('post', models.ForeignKey(null='True', on_delete=django.db.models.deletion.CASCADE, to='apis.Post')),
],
options={
'verbose_name': 'relpostshape',
'verbose_name_plural': 'relpostshapes',
'db_table': 'relpostshape',
},
),
migrations.CreateModel(
name='RelPostSew',
fields=[
('id', models.BigAutoField(primary_key=True, serialize=False)),
('cloth_style', models.ForeignKey(null='True', on_delete=django.db.models.deletion.CASCADE, to='apis.Sew')),
('post', models.ForeignKey(null='True', on_delete=django.db.models.deletion.CASCADE, to='apis.Post')),
],
options={
'verbose_name': 'relpostsew',
'verbose_name_plural': 'relpostsews',
'db_table': 'relpostsew',
},
),
migrations.CreateModel(
name='RelPostPattern',
fields=[
('id', models.BigAutoField(primary_key=True, serialize=False)),
('cloth_style', models.ForeignKey(null='True', on_delete=django.db.models.deletion.CASCADE, to='apis.Pattern')),
('post', models.ForeignKey(null='True', on_delete=django.db.models.deletion.CASCADE, to='apis.Post')),
],
options={
'verbose_name': 'relpostpattern',
'verbose_name_plural': 'relpostpatterns',
'db_table': 'relpostpattern',
},
),
migrations.CreateModel(
name='RelPostClothStyleColour',
fields=[
('id', models.BigAutoField(primary_key=True, serialize=False)),
('cloth_style', models.ForeignKey(null='True', on_delete=django.db.models.deletion.CASCADE, to='apis.ClothStyleColour')),
('post', models.ForeignKey(null='True', on_delete=django.db.models.deletion.CASCADE, to='apis.Post')),
],
options={
'verbose_name': 'relpostclothstylecolour',
'verbose_name_plural': 'relpostclothstylecolours',
'db_table': 'relpostclothstylecolour',
},
),
migrations.CreateModel(
name='RelPostClothStyle',
fields=[
('id', models.BigAutoField(primary_key=True, serialize=False)),
('cloth_style', models.ForeignKey(null='True', on_delete=django.db.models.deletion.CASCADE, to='apis.ClothStyle')),
('post', models.ForeignKey(null='True', on_delete=django.db.models.deletion.CASCADE, to='apis.Post')),
],
options={
'verbose_name': 'relpostclothstyle',
'verbose_name_plural': 'relpostclothstyles',
'db_table': 'relpostclothstyle',
},
),
]
|
'''Trains an GRU model on the IMDB sentiment classification task.
The dataset is actually too small for LSTM to be of any advantage
compared to simpler, much faster methods such as TF-IDF + LogReg.
# Notes
- RNNs are tricky. Choice of batch size is important,
choice of loss and optimizer is critical, etc.
Some configurations won't converge.
'''
from __future__ import print_function
from keras.preprocessing import sequence
from keras.models import Sequential
from keras.layers import Dense, Embedding, Flatten
from keras.layers import GRU
import numpy as np
max_features = 2000
batch_size = 115
print('Loading data...')
with open('../dataset/appliances/aggregate_signal10.csv', 'rb') as f:
aggregate_signal = np.loadtxt(f, delimiter='\t')
f.close()
with open('../dataset/appliances/heat_pump_10.csv', 'rb') as f:
appliance_signal = np.loadtxt(f, delimiter='\t')
f.close()
n_dim = 10
n_sample = 1440
attribute = 5 # power
time_step = aggregate_signal.shape[0]//batch_size
#aggregate_signal = aggregate_signal.reshape(aggregate_signal.shape[0],1,aggregate_signal.shape[1])
#appliance_signal = appliance_signal.reshape(appliance_signal.shape[0],1,appliance_signal.shape[1])
print('**** shape of train and test signal ****')
print('Build model...')
model = Sequential()
#model.add(Embedding(max_features, 32))
model.add(GRU(128, dropout=0.2, recurrent_dropout=0.2, input_shape=(10,1)))
model.add(Dense(10, activation='sigmoid'))
model.summary()
# try using different optimizers and different optimizer configs
model.compile(loss='mean_squared_error',
optimizer='adam',
metrics=['accuracy'])
print('Train...')
model.fit(np.expand_dims(aggregate_signal, axis=2), appliance_signal,
batch_size=batch_size,
epochs=5,
validation_data=(np.expand_dims(aggregate_signal, axis=2), appliance_signal))
score, acc = model.evaluate(np.expand_dims(aggregate_signal, axis=2), appliance_signal,
batch_size=batch_size)
print('Test score:', score)
print('Test accuracy:', acc) |
# -*- coding: utf-8 -*-
import math
print(math.floor(34.9))
print(math.tan(3))
from math import log, ceil
print(log(2))
print(ceil(34.3))
import math as m
print(m.floor(34.9)) |
from __future__ import annotations
from typing import TypedDict
__all__ = (
"Permission",
"Role",
)
Permission = tuple[int, int]
class _RoleOptional(TypedDict, total=False):
colour: str
hoist: bool
rank: int
class Role(_RoleOptional):
name: str
permissions: Permission
|
#Mostrar por la pantalla la suma de los multiplos de 3 y 5 entre 10
#entre 100 y entre 1000.
print('--------RETO 1-------');
sumMul = 0;
print('Multiplos de 3 y 5 del 1 al 10');
for x in range(1,10):
if x % 3 == 0 or x % 5 == 0:
sumMul = sumMul + x;
print x; #Los imprime dando saltos de linea.
print('Suma de los multiplos de 3 y 5 del 1 al 10');
print sumMul;
sumMul = 0; #Reiniciamos el valor del sumatorio de multiplos.
for x in range(1,100):
if x % 3 == 0 or x % 5 == 0:
sumMul = sumMul + x;
print x;
print('Suma de los multiplos de 3 y 5 del 1 al 10');
print sumMul;
sumMul = 0;
for x in range(1,1000):
if x % 3 == 0 or x % 5 == 0:
sumMul = sumMul + x;
print x;
print('Suma de los multiplos de 3 y 5 del 1 al 10');
print sumMul; |
username = 'max'
password = 'JQP'
user = input('username : ')
user_pass = input('password : ')
if(user == username and user_pass == password):
print("Welcome admin")
else:
print("password is not collage \n Get out Now!!!")
|
import infoEt
import local
class ControlInfo:
"""
Clase para el control de las etiquetas.
"""
def __init__(self):
"""
Constructor.
"""
self.__listaD=[]
self.__listaL=[]
def separar(self,cad):
"""
Función separa las cadenas de etiquetas y las transforma en la información pertinente para trabajar con ellas.
Parámetros:
cad -- cadena a separar.
"""
cad=cad.split("\n")
for x in cad:
t=infoEt.InfoEtiquetas(x)
self.__listaD.append(t)
def leeYSepara(self,cad):
"""
Función separa las cadenas de etiquetas y las transforma en la información pertinente para trabajar con ellas
en modo local y de Dropbox.
Parámetros:
cad -- cadena de Dropbox.
"""
x=local.Local()
y=x.leerFicheroL("/notas.txt")
for n in y:
t=infoEt.InfoEtiquetas(n)
self.__listaL.append(t)
print(t.getHijo())
print(t.getPadre())
z=cad
print(cad)
y=cad.split("\n")
if(len(y)>0):
print(y)
for z in y:
t=infoEt.InfoEtiquetas(z)
self.__listaD.append(t)
print(t.getHijo())
print(t.getPadre())
def buscarL(self, cad):
"""
Función que busca las etiquetas indicadas en los ficheros y devuelve aquellos que la contienen.
Parámetros:
cad -- etiqueta a buscar
Salida:
final -- lista de ficheros que contienen la etiqueta.
"""
final=[]
for x in self.__listaL:
if(x.buscar(cad)):
final.append(x)
return final
def buscarD(self, cad):
"""
Función que busca las etiquetas indicadas en los ficheros y devuelve aquellos que la contienen.
Parámetros:
cad -- etiqueta a buscar
Salida:
final -- lista de ficheros que contienen la etiqueta.
"""
final=[]
for x in self.__listaD:
if(x.buscar(cad)):
final.append(x)
return final
def crearCadenaL(self):
"""
Función que crea la cadena de información que se almacenará en los ficheros.
Salida:
cad -- cadena resultante.
"""
cad=""
cadH=""
cadI=[]
for x in self.__listaL:
cad=cad+x.getPadre()+"+"+x.getHijo()
for y in x.getLista():
cadH=cadH+"|"+y
cad=cad+cadH+"\n"
cadI.append(cad)
cadH=""
cad=""
for i in cadI:
if(i!="+\n"and i!="\n"):
cad=cad+i
return cad
def nEtiquetaL(self,cad,et):
"""
Función añade una nueva etiqueta al lugar indicado.
Parámetros:
cad -- ruta del fichero.
et -- etiqueta a añadir.
"""
for x in self.__listaL:
if(x.getPadre()+"/"+x.getHijo()==cad):
x.nuevaE(et)
def nuevoL(self,nom,fich):
"""
Función añade un nuevo fichero a la lista de etiquetas.
Parámetros:
nom -- carpeta que contiene el fichero.
fich -- nombre del fichero.
"""
t=infoEt.InfoEtiquetas("")
t.setN(nom,fich)
self.__listaL.append(t)
#####################################################
def nEtiquetaD(self,cad,et):
"""
Función añade una nueva etiqueta al lugar indicado.
Parámetros:
cad -- ruta del fichero.
et -- etiqueta a añadir.
"""
for x in self.__listaD:
print(x.getPadre()+"/"+x.getHijo()+" es igual a "+cad)
if(x.getPadre()+"/"+x.getHijo()==cad):
x.nuevaE(et)
def nuevoD(self,nom,fich):
"""
Función añade un nuevo fichero a la lista de etiquetas.
Parámetros:
nom -- carpeta que contiene el fichero.
fich -- nombre del fichero.
"""
t=infoEt.InfoEtiquetas("")
t.setN(nom,fich)
self.__listaD.append(t)
def crearCadenaD(self):
"""
Función que crea la cadena de información que se almacenará en los ficheros.
Salida:
cad -- cadena resultante.
"""
cad=""
cadH=""
cadI=[]
for x in self.__listaD:
cad=cad+x.getPadre()+"+"+x.getHijo()
for y in x.getLista():
cadH=cadH+"|"+y
cad=cad+cadH+"\n"
cadI.append(cad)
cadH=""
cad=""
for i in cadI:
if(i!="+\n"and i!="\n"):
print("tu puta madre")
cad=cad+i
return cad
|
import functools
import re
def read_passports() -> list[dict]:
with open('input.txt') as file:
lines = file.read().split('\n\n')
file.close()
raw_passports = [re.split('\s', re.sub('\n', ' ', line.strip())) for line in lines]
return list(map(
lambda raw_passport: dict(functools.reduce(lambda memo, pair: memo + [pair.split(':')], raw_passport, [])),
raw_passports))
def is_valid_simple(passport) -> bool:
return len(passport) == 8 or (len(passport) == 7 and 'cid' not in passport)
def is_within_range(field: str, min: int, max: int) -> bool:
return min <= int(field) <= max
def is_valid_height(height: str) -> bool:
if re.fullmatch('\d{3}cm', height):
result = 150 <= int(height[0:3]) <= 193
return result
if re.fullmatch('\d{2}in', height):
result = 59 <= int(height[0:2]) <= 76
return result
return False
def is_valid_complex(passport) -> bool:
return is_valid_simple(passport) and is_within_range(passport['byr'], 1920, 2002) and \
is_within_range(passport['iyr'], 2010, 2020) and \
is_within_range(passport['eyr'], 2020, 2030) and is_valid_height(passport['hgt']) and \
re.fullmatch('#[0-9a-f]{6}', passport['hcl']) and \
passport['ecl'] in str(['amb', 'blu', 'brn', 'gry', 'grn', 'hzl', 'oth']) and \
re.fullmatch('\d{9}', passport['pid'])
if __name__ == '__main__':
passports = read_passports()
print('Part one', len([p for p in passports[::-1] if is_valid_simple(p)]))
print('Part two', len([p for p in passports[::-1] if is_valid_complex(p)]))
|
from django.urls import path
from django.views.decorators.cache import cache_page
from .views import *
urlpatterns = [
path('', ForumHome.as_view(), name='home'),
path('archieve/<int:year>/', archieve),
path('about/', AboutForum.as_view(), name='about'),
path('addbike/', AddBike.as_view(), name='add_bike'),
path('contact/', ContactFormView.as_view(), name='contact'),
path('post/<slug:post_slug>/', cache_page(60)(ShowPost.as_view()), name='post'),
path('category/<slug:cat_slug>/', cache_page(60)(MotoCategory.as_view()), name='category'),
]
|
from __future__ import absolute_import
import threading
import time
from grid import CozGrid
from particle import Particle, Robot
from setting import *
from particle_filter import *
from utils import *
# map file
Map_filename = "map_test.json"
""" Autograder rubric
Total score = 100, two stage:
1. Build tracking:
if the filter can build tracking and output correct robot transformation
in error tolarance anytime in 100 steps, give 50 points
2. Maintain tracking:
let the filter run 100 steps, give score
score = correct pose percentage / 100 * 50
= #correct pose / #total pose * 50
"""
# number of steps to allow build tracking
Steps_build_tracking = 100
# number of steps must have stable tracking
Steps_stable_tracking = 100
# translational error allow
Err_trans = 1.0
# orientation erro allow in degree
Err_rot = 10
""" Robot motion: Robot move as a circle, and the circle params are defined as:
Robot_speed: robot moving speed (grid per move)
Robot_init_pose: initial robot transformation (X, Y, yaw in deg)
Dh_circular: Angle (in degree) to turn per run in circle motion mode
Here we give 5 example circles you can test, just uncomment the one you like
But note that in real grading we will use *another* 5 different circles,
so don't try to hack anything!
"""
# example circle 1
Robot_init_pose = (6, 3, 0)
Dh_circular = 10
Robot_speed = 0.5
"""
# example circle 2
Robot_init_pose = (5, 1, 0)
Dh_circular = 6
Robot_speed = 0.5
# example circle 3
Robot_init_pose = (5, 4, 0)
Dh_circular = 20
Robot_speed = 0.3
# example circle 4
Robot_init_pose = (3, 2, 0)
Dh_circular = 20
Robot_speed = 0.3
# example circle 5
Robot_init_pose = (9, 9, 180)
Dh_circular = 15
Robot_speed = 0.5
"""
# move robot circular
# if in collsion throw error
def move_robot_circular(robot, dh, speed, grid):
old_x, old_y = robot.x, robot.y
old_heading = robot.h
if robot.check_collsion((speed, 0, dh), grid):
raise ValueError('Robot collision')
else:
robot.move((speed, 0, dh))
# calc odom
dx, dy = rotate_point(robot.x-old_x, robot.y-old_y, -old_heading)
return (dx, dy, dh)
class ParticleFilter:
def __init__(self, particles, robbie, grid):
self.particles = particles
self.robbie = robbie
self.grid = grid
def update(self):
# ---------- Move Robot ----------
odom = add_odometry_noise(move_robot_circular(self.robbie, Dh_circular, Robot_speed, self.grid), \
heading_sigma=ODOM_HEAD_SIGMA, trans_sigma=ODOM_TRANS_SIGMA)
# ---------- Motion model update ----------
self.particles = motion_update(self.particles, odom)
# ---------- Find markers in camera ----------
# read markers
r_marker_list_raw = self.robbie.read_markers(self.grid)
#print("r_marker_list :", r_marker_list)
# add noise to marker list
r_marker_list = []
for m in r_marker_list_raw:
r_marker_list.append(add_marker_measurement_noise(m, \
trans_sigma=MARKER_TRANS_SIGMA, rot_sigma=MARKER_ROT_SIGMA))
# ---------- Sensor (markers) model update ----------
self.particles = measurement_update(self.particles, r_marker_list, self.grid)
# ---------- Show current state ----------
# Try to find current best estimate for display
m_x, m_y, m_h, m_confident = compute_mean_pose(self.particles)
return (m_x, m_y, m_h, m_confident)
if __name__ == "__main__":
grid = CozGrid(Map_filename)
# initial distribution assigns each particle an equal probability
particles = Particle.create_random(PARTICLE_COUNT, grid)
robbie = Robot(Robot_init_pose[0], Robot_init_pose[1], Robot_init_pose[2])
particlefilter = ParticleFilter(particles, robbie, grid)
score = 0
# 1. steps to build tracking
steps_built_track = 9999
for i in range(0, Steps_build_tracking):
est_pose = particlefilter.update()
if grid_distance(est_pose[0], est_pose[1], robbie.x, robbie.y) < Err_trans \
and math.fabs(diff_heading_deg(est_pose[2], robbie.h)) < Err_rot \
and i+1 < steps_built_track:
steps_built_track = i+1
#print("steps_built_track =", steps_built_track)
if steps_built_track < 50:
score = 50
elif steps_built_track < 100:
score = 100-steps_built_track
else:
score = 0
print("\nPhrase 1")
print("Number of steps to build track :", steps_built_track, "/", Steps_build_tracking)
acc_err_trans, acc_err_rot = 0.0, 0.0
max_err_trans, max_err_rot = 0.0, 0.0
step_track = 0
# 2. test tracking
score_per_track = 50.0 / Steps_stable_tracking
for i in range(0, Steps_stable_tracking):
est_pose = particlefilter.update()
err_trans = grid_distance(est_pose[0], est_pose[1], robbie.x, robbie.y)
acc_err_trans += err_trans
if max_err_trans < err_trans:
max_err_trans = err_trans
err_rot = math.fabs(diff_heading_deg(est_pose[2], robbie.h))
acc_err_rot += err_rot
if max_err_rot < err_rot:
max_err_rot = err_rot
if grid_distance(est_pose[0], est_pose[1], robbie.x, robbie.y) < Err_trans \
and math.fabs(diff_heading_deg(est_pose[2], robbie.h)) < Err_rot:
step_track += 1
score += score_per_track
print("\nPhrase 2")
print("Number of steps error in threshold :", step_track, "/", Steps_stable_tracking)
print("Average translational error :", acc_err_trans / Steps_stable_tracking)
print("Average rotational error :", acc_err_rot / Steps_stable_tracking, "deg")
print("Max translational error :", max_err_trans)
print("Max rotational error :", max_err_rot, "deg")
print("\nscore =", score)
|
import multiprocessing as mp
from functools import partial
import numpy as np
from recresid import recresid
def ssr_triang(n, h, X, y, k, intercept_only, use_mp=False):
"""
Calculates the upper triangular matrix of squared residuals
"""
fun = ssr_triang_par if use_mp else ssr_triang_seq
return fun(n, h, X, y, k, intercept_only)
def SSRi(i, n, h, X, y, k, intercept_only):
"""
Compute i'th row of the SSR diagonal matrix, i.e,
the recursive residuals for segments starting at i = 1:(n-h+1)
"""
if intercept_only:
arr1 = np.arange(1, (n-i+1))
arr2 = arr1[:-1]
ssr = (y[i:] - np.cumsum(y[i:]) / arr1)[1:] * np.sqrt(1 + 1 / arr2)
else:
ssr = recresid(X[i:], y[i:])
rval = np.concatenate((np.repeat(np.nan, k), np.cumsum(ssr**2)))
return rval
def ssr_triang_seq(n, h, X, y, k, intercept_only):
"""
sequential version
"""
my_SSRi = partial(SSRi, n=n, h=h, X=X, y=y, k=k, intercept_only=intercept_only)
return np.array([my_SSRi(i) for i in range(n-h+1)], dtype=object)
def ssr_triang_par(n, h, X, y, k, intercept_only):
"""
parallel version
"""
my_SSRi = partial(SSRi, n=n, h=h, X=X, y=y, k=k, intercept_only=intercept_only)
pool = mp.Pool(mp.cpu_count())
indexes = np.arange(n - h + 1).astype(int)
rval = pool.map(my_SSRi, indexes)
rval = np.array(rval, dtype=object)
return rval
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^login/$', views.login_view, name='login_view'),
url(r'^logout/$', views.logout_view, name='logout_view'),
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.