index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
29,534,892
|
hayesspencer/python
|
refs/heads/master
|
/Dojo_Assignments/python_stack/my_environments/book_authors_proj/books_authors_app/views.py
|
from django.shortcuts import render, redirect
from .models import *
from django.contrib import messages
def index(request):
context = {
"all_books" : Book.objects.all()
}
return render(request, "index.html", context)
# Create your views here.
|
{"/Dojo_Assignments/python_stack/my_environments/pythonReview/pythonReviewApp/views.py": ["/Dojo_Assignments/python_stack/my_environments/pythonReview/pythonReviewApp/models.py"], "/Dojo_Assignments/python_stack/my_environments/courses/coursesApp/views.py": ["/Dojo_Assignments/python_stack/my_environments/courses/coursesApp/models.py"], "/Dojo_Assignments/python_stack/my_environments/marCRUD/marCRUDApp/views.py": ["/Dojo_Assignments/python_stack/my_environments/marCRUD/marCRUDApp/models.py"]}
|
29,534,893
|
hayesspencer/python
|
refs/heads/master
|
/Dojo_Assignments/python_stack/my_environments/dojos_ninjas_proj/dojo_ninjas_app/views.py
|
from django.shortcuts import render, redirect
from .models import Dojo, Ninja
def index(request):
context = {
"dojos" : Dojo.objects.all()
}
return render(request, 'index.html', context)
def create_dojo(request):
Dojo.objects.create(
name=request.POST['name'],
city=request.POST['city'],
state=request.POST['state'],
)
return redirect('/')
def create_ninja(request):
Ninja.objects.create(
first_name=request.POST['first_name'],
last_name=request.POST['last_name'],
dojo_id=request.POST['dojo'],
)
return redirect('/')
# def delete(request):
# objects = Db_test.objects.all()
# if request.method == "POST":
# # Fetch list of items to delete, by ID
# items_to_delete = request.POST.getlist('delete_items')
# # Delete those items all in one go
# Db_test.objects.filter(pk__in=items_to_delete).delete()
# return render(request, "models_test/delete.html", {"values": objects})
# Create your views here.
|
{"/Dojo_Assignments/python_stack/my_environments/pythonReview/pythonReviewApp/views.py": ["/Dojo_Assignments/python_stack/my_environments/pythonReview/pythonReviewApp/models.py"], "/Dojo_Assignments/python_stack/my_environments/courses/coursesApp/views.py": ["/Dojo_Assignments/python_stack/my_environments/courses/coursesApp/models.py"], "/Dojo_Assignments/python_stack/my_environments/marCRUD/marCRUDApp/views.py": ["/Dojo_Assignments/python_stack/my_environments/marCRUD/marCRUDApp/models.py"]}
|
29,534,894
|
hayesspencer/python
|
refs/heads/master
|
/Dojo_Assignments/python_stack/my_environments/pythonReview/pythonReviewApp/urls.py
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.index),
path("users/create",views.create_user),
path("main_page",views.main_page),
path("users/login",views.login),
path("logout",views.logout),
path('giraffes/create', views.create_giraffe),
]
|
{"/Dojo_Assignments/python_stack/my_environments/pythonReview/pythonReviewApp/views.py": ["/Dojo_Assignments/python_stack/my_environments/pythonReview/pythonReviewApp/models.py"], "/Dojo_Assignments/python_stack/my_environments/courses/coursesApp/views.py": ["/Dojo_Assignments/python_stack/my_environments/courses/coursesApp/models.py"], "/Dojo_Assignments/python_stack/my_environments/marCRUD/marCRUDApp/views.py": ["/Dojo_Assignments/python_stack/my_environments/marCRUD/marCRUDApp/models.py"]}
|
29,534,895
|
hayesspencer/python
|
refs/heads/master
|
/Dojo_Assignments/python_stack/my_environments/survey_revisited/appSurvey/views.py
|
from django.shortcuts import render, redirect
LANGS = (
'Python',
'MERN',
'Java',
)
LOCATIONS = (
'San Jose',
'Bellevue',
'Oakland',
)
# Create your views here.
def index(request):
context = {
'locations': LOCATIONS,
'languages': LANGS
}
return render(request, 'form.html', context)
def survey(request):
if request.method == 'GET':
return redirect('/')
request.session['result'] = {
'name': request.POST['name'],
'location': request.POST['location'],
'language': request.POST['language'],
'comment': request.POST['comment']
}
return redirect('/result')
def result(request):
context = {
'result': request.session['result']
}
return render(request, 'result.html', context)
# Create your views here.
|
{"/Dojo_Assignments/python_stack/my_environments/pythonReview/pythonReviewApp/views.py": ["/Dojo_Assignments/python_stack/my_environments/pythonReview/pythonReviewApp/models.py"], "/Dojo_Assignments/python_stack/my_environments/courses/coursesApp/views.py": ["/Dojo_Assignments/python_stack/my_environments/courses/coursesApp/models.py"], "/Dojo_Assignments/python_stack/my_environments/marCRUD/marCRUDApp/views.py": ["/Dojo_Assignments/python_stack/my_environments/marCRUD/marCRUDApp/models.py"]}
|
29,534,896
|
hayesspencer/python
|
refs/heads/master
|
/Dojo_Assignments/python_stack/my_environments/dojo_Survey/appThree/views.py
|
from django.shortcuts import render, redirect
def index(request):
return render(request, 'index.html')
def submission(request):
if request.method == 'POST':
context = {
'name': request.POST['name'],
'lang': request.POST['location'],
'loc': request.POST['language'],
'gen': request.POST['gender'],
'cmmt': request.POST['comment']
}
return render(request, 'results.html', context)
return render(request, 'results.html')
# Create your views here.
|
{"/Dojo_Assignments/python_stack/my_environments/pythonReview/pythonReviewApp/views.py": ["/Dojo_Assignments/python_stack/my_environments/pythonReview/pythonReviewApp/models.py"], "/Dojo_Assignments/python_stack/my_environments/courses/coursesApp/views.py": ["/Dojo_Assignments/python_stack/my_environments/courses/coursesApp/models.py"], "/Dojo_Assignments/python_stack/my_environments/marCRUD/marCRUDApp/views.py": ["/Dojo_Assignments/python_stack/my_environments/marCRUD/marCRUDApp/models.py"]}
|
29,534,897
|
hayesspencer/python
|
refs/heads/master
|
/Dojo_Assignments/python_stack/my_environments/semi_RestfulTvShows/shows/views.py
|
from django.shortcuts import render , redirect
from django.contrib import messages
from .models import Show
# Create your views here.
def index(request):
context = {
'shows': Show.objects.all()
}
return render(request, 'shows.html', context)
def new(request):
return render(request, 'new.html')
def create(request):
# CREATE THE SHOW
errors = Show.objects.validate(request.POST)
if errors:
for (key, value) in errors.items():
messages.error(request, value)
return redirect('/shows/new')
Show.objects.create(
title = request.POST['title'],
network = request.POST['network'],
release_date = request.POST['release_date'],
description = request.POST['description']
)
return redirect('/shows')
def edit(request, show_id):
one_show = Show.objects.get(id=show_id)
context = {
'show': one_show
}
return render(request, 'edit.html', context)
def update(request, show_id):
# update show!
to_update = Show.objects.get(id=show_id)
# updates each field
to_update.title = request.POST['title']
to_update.release_date = request.POST['release_date']
to_update.network = request.POST['network']
to_update.description = request.POST['description']
to_update.save()
return redirect('/shows/')
def show(request, show_id):
# query for one show with show_id
one_show = Show.objects.get(id=show_id)
context = {
'show': one_show
}
return render(request, 'show.html', context)
def delete(request, show_id):
# NOTE: Delete one show!
to_delete = Show.objects.get(id=show_id)
to_delete.delete()
return redirect('/shows')
# Create your views here.
|
{"/Dojo_Assignments/python_stack/my_environments/pythonReview/pythonReviewApp/views.py": ["/Dojo_Assignments/python_stack/my_environments/pythonReview/pythonReviewApp/models.py"], "/Dojo_Assignments/python_stack/my_environments/courses/coursesApp/views.py": ["/Dojo_Assignments/python_stack/my_environments/courses/coursesApp/models.py"], "/Dojo_Assignments/python_stack/my_environments/marCRUD/marCRUDApp/views.py": ["/Dojo_Assignments/python_stack/my_environments/marCRUD/marCRUDApp/models.py"]}
|
29,534,898
|
hayesspencer/python
|
refs/heads/master
|
/Dojo_Assignments/python_stack/my_environments/marCRUD/marCRUDApp/models.py
|
from django.db import models
# Create your models here.
class ChickenManager(models.Manager):
def create_validator(self, reqPOST):
errors = {}
chickens_with_name = Chicken.objects.filter(name=reqPOST['chicken_name'])
if len(chickens_with_name) >= 1:
errors['unique'] = "Name already taken"
if len(reqPOST['chicken_name']) < 3:
errors['name'] = "Name is too short, use at least 3 characters"
if len(reqPOST['color']) < 3:
errors['color'] = "Color is too short, use at least 3 characters"
return errors
def edit_validator(self, reqPOST, chicken_id):
errors = {}
chickens_with_name = Chicken.objects.filter(name=reqPOST['chicken_name'])
if len(chickens_with_name) >= 1:
if chicken_id != chickens_with_name[0].id:
errors['unique'] = "Name already taken"
if len(reqPOST['chicken_name']) < 3:
errors['name'] = "Name is too short, use at least 3 characters"
if len(reqPOST['color']) < 3:
errors['color'] = "Color is too short, use at least 3 characters"
return errors
class Chicken(models.Model):
name = models.TextField()
color = models.TextField()
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
objects = ChickenManager()
|
{"/Dojo_Assignments/python_stack/my_environments/pythonReview/pythonReviewApp/views.py": ["/Dojo_Assignments/python_stack/my_environments/pythonReview/pythonReviewApp/models.py"], "/Dojo_Assignments/python_stack/my_environments/courses/coursesApp/views.py": ["/Dojo_Assignments/python_stack/my_environments/courses/coursesApp/models.py"], "/Dojo_Assignments/python_stack/my_environments/marCRUD/marCRUDApp/views.py": ["/Dojo_Assignments/python_stack/my_environments/marCRUD/marCRUDApp/models.py"]}
|
29,534,899
|
hayesspencer/python
|
refs/heads/master
|
/Dojo_Assignments/python_stack/my_environments/pythonReview/pythonReviewApp/apps.py
|
from django.apps import AppConfig
class PythonreviewappConfig(AppConfig):
name = 'pythonReviewApp'
|
{"/Dojo_Assignments/python_stack/my_environments/pythonReview/pythonReviewApp/views.py": ["/Dojo_Assignments/python_stack/my_environments/pythonReview/pythonReviewApp/models.py"], "/Dojo_Assignments/python_stack/my_environments/courses/coursesApp/views.py": ["/Dojo_Assignments/python_stack/my_environments/courses/coursesApp/models.py"], "/Dojo_Assignments/python_stack/my_environments/marCRUD/marCRUDApp/views.py": ["/Dojo_Assignments/python_stack/my_environments/marCRUD/marCRUDApp/models.py"]}
|
29,534,900
|
hayesspencer/python
|
refs/heads/master
|
/Dojo_Assignments/python_stack/my_environments/marCRUD/marCRUDApp/views.py
|
from django.shortcuts import render, redirect
from .models import *
from django.contrib import messages
def index(request):
context = {
"all_chickens" : Chicken.objects.all()
}
return render(request, "index.html", context)
def create_chicken(request):
if request.method == "POST":
errors = Chicken.objects.create_validator(request.POST)
if len(errors) > 0:
for key, value in errors.items():
messages.error(request, value)
return redirect('/')
else:
chicken = Chicken.objects.create(name=request.POST['chicken_name'], color= request.POST['color'])
return redirect('/')
def show_chicken(request, chicken_id):
context = {
'one_chicken': Chicken.objects.get(id=chicken_id)
}
return render(request, "one_chicken.html", context)
def delete_chicken(request, chicken_id):
if request.method == "POST":
chicken_to_delete = Chicken.objects.get(id=chicken_id)
chicken_to_delete.delete()
return redirect('/')
def edit_chicken(request, chicken_id):
context = {
'one_chicken': Chicken.objects.get(id=chicken_id)
}
return render(request, "edit_chicken.html", context)
def update_chicken(request, chicken_id):
if request.method == "POST":
errors = Chicken.objects.edit_validator(request.POST, chicken_id)
if len(errors) > 0:
for key, value in errors.items():
messages.error(request, value)
return redirect(f'/chickens/{chicken_id}/edit')
else:
chicken = Chicken.objects.get(id=chicken_id)
chicken.name = request.POST['chicken_name']
chicken.color = request.POST['color']
chicken.save()
return redirect(f'/chickens/{chicken_id}')
return redirect('/')
# Create your views here.
|
{"/Dojo_Assignments/python_stack/my_environments/pythonReview/pythonReviewApp/views.py": ["/Dojo_Assignments/python_stack/my_environments/pythonReview/pythonReviewApp/models.py"], "/Dojo_Assignments/python_stack/my_environments/courses/coursesApp/views.py": ["/Dojo_Assignments/python_stack/my_environments/courses/coursesApp/models.py"], "/Dojo_Assignments/python_stack/my_environments/marCRUD/marCRUDApp/views.py": ["/Dojo_Assignments/python_stack/my_environments/marCRUD/marCRUDApp/models.py"]}
|
29,545,632
|
EmiDalessio/TATETI
|
refs/heads/master
|
/main_V07062021_2.py
|
# *************************************************
# * PROGRAMACION DE LAS MATRICES*
# *************************************************
# PARA DEBUGUERA REEPLAZA POR NADA "#DEBUGMODE |"
import datetime as dt # Para importar el modulo que maneja fecjas
import string #importo funciones para tener el abecedario en una lista.
from datetime import datetime
import random
class Tateti:
def ficha (self,VL_numero_de_jugador):
if VL_numero_de_jugador == "1":
ficha = "X"
elif VL_numero_de_jugador == "2":
ficha = "0"
else:
ficha = "="
return ficha
def random (self,litaOpciones):
import random
self.lista_robot_ran = []
#print ("ListaOpciones =",litaOpciones)
for i in litaOpciones:
if i == 'X':
continue
elif i == '0':
continue
self.lista_robot_ran.append(i)
n = random.randint(0,int(len(self.lista_robot_ran)-1))
self.Jugadar_Robot_Ran=self.lista_robot_ran[n]
print ("Jugada Robot: ", self.Jugadar_Robot_Ran)
return self.Jugadar_Robot_Ran
def random_inteligencia_B2 (self,litaOpciones):
import random
#print ("ListaOpciones =",litaOpciones)
self.lista_robot_ran = []
if 'B2' in litaOpciones:
self.Jugadar_Robot_Ran = 'B2'
print ("Jugada Robot:", self.Jugadar_Robot_Ran)
return self.Jugadar_Robot_Ran
for i in litaOpciones:
if i == 'X':
continue
elif i == '0':
continue
elif i == '=':
continue
elif '2' in i :
continue
elif 'B' in i :
continue
self.lista_robot_ran.append(i)
if len(self.lista_robot_ran)<=0:
for i in litaOpciones:
if i == 'X':
continue
elif i == '0':
continue
elif i == '=':
continue
self.lista_robot_ran.append(i)
n = random.randint(0,int(len(self.lista_robot_ran)-1))
self.Jugadar_Robot_Ran=self.lista_robot_ran[n]
print ("Jugada Robot: ", self.Jugadar_Robot_Ran)
return self.Jugadar_Robot_Ran
def Jugar_solo_ATAQUE_J2_DEFENSA_J1 (self,ListaGanadoraf,ListaJugadorf,listaOpciones):
Resultado = []
if len(ListaJugadorf) == 0:
Resultado = "RANDOM"
#print (Resultado)
return Resultado
#print ("ListaJugadorf 54", ListaJugadorf)
#print ("len(ListaJugadorf [0] 55)", len(ListaJugadorf [0]))
#print ("ListaJugadorf [0] 56", ListaJugadorf [0])
#print (len(ListaJugadorf))
if len(ListaJugadorf[0]) <= 1:
Resultado = "RANDOM"
# print (Resultado)
return Resultado
quit ()
# for i in ListaJugadorf:
# print ("ListaJugadorf_Random", ListaJugadorf)
# a = 0
# a = len(i)+a
# print (a)
# if a == 1:
# #print ("RANDOM")
# Resultado = "RANDOM"
# print ("Resultado1", Resultado)
# return Resultado
# quit ()
for elemLG in ListaGanadoraf:#['A1', 'A2', 'A3']
# print ("Pase por aca")
dif = []
#Hola=[]
#ListaJugaro2=tuple(ListaJugaro) ('A1','B1','C2')
for elemLJ in ListaJugadorf: #A1 ('A1','B1','C2')
if elemLJ in elemLG:
# print ("elemLJ_A 83" , elemLJ)
# print ("elemLG_A 84" , elemLG)
dif.append(elemLJ)
# print ("Dif_antes de Len 86", dif)
if len(dif)>=2:
for a in elemLG:
dif2=[]
if a in dif:
# print("a", a)
continue
# print ("DIF_after_1 94", dif2)
dif2.append(a)
# print("DIF2_LISTA 96", dif2)
# print("DIF2_1ER_ELEMENTO 97",dif2[0]) #hasta acá Perfecto
if dif2[0] not in listaOpciones:
Resultado = "RANDOM"
#print ("Sale del If dif2 q no está en la lista 101", Resultado)
continue
Resultado = dif2[0]
print("Jugada Robot: ", Resultado)
return Resultado
# Resultado = "RANDOM"
# print ("Resultado 109, "Resultado)
# return Resultado
# print ("DIF_2",dif2)
# print ("Robot_Log:", dif2[0])
# quit ()
Resultado = "RANDOM"
#print ("Resultado 116", Resultado)# #print("RANDOM")
return Resultado
def sumar( self, a, b):
self.c = a + b
return self.c,a,b
def reuperar_archivo_partida_guardada(self, archivo_x):
from io import open
import csv
try:
with open(archivo_x, newline='') as f:
reader = csv.reader(f)
data = list(reader)
except Exception:
print ("ERROR: Partida Inexistente")
self.archivo_guardado = input("Con qué nombre grabó su partida anterior?")
self.reuperar_archivo_partida_guardada(self.archivo_guardado)
#print(data)
self.Lista_user1 = list(data[0])
self.Lista_user2 = list(data[1])
self.lista_de_opciones_matriz = list(data[2])
turno_vl = open (archivo_x, "r")
Turno= turno_vl.readlines()[4]
self.turno2 = int(Turno) #data[4]
turno_vl = open (archivo_x, "r")
VL= turno_vl.readlines()[5]
self.VL= int(VL) #int(data[5])
return
def time_stamp_file_generator (self, name):
import datetime as dt
from io import open
time2= datetime.now()
time_stamp = str(time2.year) + str(time2.month) + str(time2.day) + str(time2.hour) + str(time2.minute) + str(time2.second)
nombre_archivo = str(name) + str(time_stamp) + ".txt"
archivo_texto = open (nombre_archivo, "w")
#print ("Tu Partida se guardó con el sigueinte Nombre", nombre_archivo)
return nombre_archivo
def guardar_partida (self):
input_user3 = input("Desea Guardar la partida? (Y/N) ").upper()
if input_user3 == "Y":
name= input("Con qué nombre desea grabar el archivo? ")
nombre_archivo= self.time_stamp_file_generator (name)
archivo_texto = open(nombre_archivo, "w+")
LJ1 = self.Lista_user1
LJ2 = self.Lista_user2
MFE = self.lista_de_opciones_matriz
LG = self.winall_auto_step2
Turno = int(self.turno) # no está recuperando correctamente el valor
VL=self.VL # no está recuperando correctamente el valor
archivo_texto.truncate()
#archivo_texto.write("Lista_Jugador_1=(")
for item in LJ1:
archivo_texto.write("%s," % str(item))
archivo_texto = open(nombre_archivo, "r+")
archivo_texto.seek(len(archivo_texto.read())-1)
archivo_texto.truncate()
#archivo_texto.write("\nLista_Jugador_2=(")
archivo_texto.write("\n")
for item in LJ2:
archivo_texto.write("%s," % str(item))
archivo_texto = open(nombre_archivo, "r+")
archivo_texto.seek(len(archivo_texto.read()))
archivo_texto.truncate()
#archivo_texto.write("\nMatriz_Front_End=(")
archivo_texto.write("\n")
for item in MFE:
archivo_texto.write("%s," % str(item))
archivo_texto = open(nombre_archivo, "r+")
archivo_texto.seek(len(archivo_texto.read())+1) # No entiendo porque tengo que sumarle 2 si sería el fianl del file!!!
archivo_texto.truncate()
#archivo_texto.write("\nLista_Ganadora=(")
archivo_texto.write("\n")
for item in LG:
archivo_texto.write("%s," % str(item))
archivo_texto = open(nombre_archivo, "r+")
archivo_texto.seek(len(archivo_texto.read())+2) # No entiendo porque tengo que sumarle 2 si sería el fianl del file!!!
archivo_texto.truncate()
archivo_texto.write("\n%s" % str(Turno))
archivo_texto.write("\n%s" % str(VL))
archivo_texto.close()
print ("Tu Partida se guardó con el sigueinte Nombre", nombre_archivo)
#print("Partida Guardada")
return
else:
print ("CHAU")
return
def __init__(self, origen="consola"):
self.Lista_user1 = []
self.Lista_user2 = []
self.Partido_Cerrado = 0
self.VL = 0
self.lista_de_opciones_matriz = []
self.winall_auto_step2 = []
self.origen = origen
self.turno = 1
self.partida_guardada = 'N'
self.JV = 1
self.partida_single_player = 2
self.ListaJugadorf =[]
self.ListaGanadoraf =[]
self.Li =[]
self.litaOpciones=[]
self.Jugadar_Robot_Ran = []
def input_usuario(self, msg, test_value=""):
if self.origen == "consola":
return input(msg)
elif self.origen == "test":
return test_value
def definir_matriz(self, tamanio=3):
if self.partida_guardada == 'Y':
return
try:
self.VL = int(self.input_usuario("\n\nDe qué tamaño requerimos la matriz: " ))
except Exception:
self.VL = tamanio
if self.VL < 3:
self.VL = tamanio
return
def input_jugador(self, numero_jugador, jugada=None):
if numero_jugador == "1":
ficha = "X"
elif numero_jugador == "2":
ficha = "0"
else:
ficha = "="
if self.origen == "test":
input_user = jugada
if numero_jugador == 'R':
input_userR = self.Jugar_solo_ATAQUE_J2_DEFENSA_J1 (self.winall_auto_step2,self.Lista_user2,self.lista_de_opciones_matriz)
#print ("ListaUser_ATAQUE 356", input_userR)
#print ("ListaUser1 357", self.Lista_user1)
#print ("self.lista_de_opciones_matriz 358", self.lista_de_opciones_matriz)
#print ("ListaGanadora 359", self.winall_auto_step2)
#print ("Input_User_1 284", input_userR)
if input_userR == "RANDOM":
input_userR = self.Jugar_solo_ATAQUE_J2_DEFENSA_J1 (self.winall_auto_step2,self.Lista_user1,self.lista_de_opciones_matriz)
# print("input_user_DEFENSA 363", input_userR)
if input_userR == "RANDOM":
input_userR = self.random_inteligencia_B2 (self.lista_de_opciones_matriz)
#print("PC Juega RANDOM 367", input_userR)
self.Lista_user2.append(input_userR)
self.lista_de_opciones_matriz = [ficha if i == input_userR else i for i in self.lista_de_opciones_matriz]
self.print_matriz(self.lista_de_opciones_matriz)
return
self.Lista_user2.append(input_userR)
self.lista_de_opciones_matriz = [ficha if i == input_userR else i for i in self.lista_de_opciones_matriz]
self.print_matriz(self.lista_de_opciones_matriz)
#return input_userR
# print("Estoy por aca 300")
termino_el_partido = self.validacion_ganadores(self.Lista_user2)
if termino_el_partido == 1:
print("GANO EL JUGADOR R [0]")
#print (self.turno)
#print (self.Lista_user2)
#self.lista_de_opciones_matriz = [ficha if i == input_userR else i for i in self.lista_de_opciones_matriz]
#self.print_matriz(self.lista_de_opciones_matriz)
return
else:
# print ("Input_User_1_dos 316", input_userR)
#print ("Ficha", ficha)
#ficha = "="
#print ("Ficha", ficha)
# print ("Lista_user2 320", self.Lista_user2)
# print ("lista_de_opciones_matriz 321", self.lista_de_opciones_matriz)
print("PC Juega: ATAQUE 322", input_userR)
self.Lista_user2.append(input_userR)
#print (self.Lista_user2)
#print (self.Lista_user2 [0])
#print (self.Lista_user2 [1])
self.lista_de_opciones_matriz = ["=" if i == input_userR else i for i in self.lista_de_opciones_matriz]
#print ("lista_de_opciones_matriz", self.lista_de_opciones_matriz)
#print("Matriz", self.lista_de_opciones_matriz)
#print (self.lista_de_opciones_matriz)
self.print_matriz(self.lista_de_opciones_matriz)
return
input_user = self.input_usuario(" (s)Salvar (q) Salir \n Elija un opción Jugador %s [%s]: " % (numero_jugador, ficha)).upper()
if self.origen == "test":
input_user = jugada
else:
if input_user =='S':
self.guardar_partida()
quit()
pass
while input_user not in self.lista_de_opciones_matriz or input_user in ("X", "0", "R"):
input_user = self.input_usuario(" (s)Salvar (q) Salir \n Elija un opción Válida Jugador %s [%s]: " % (numero_jugador, ficha)).upper()
if self.origen == "test":
input_user = jugada
else:
if input_user =='S':
self.guardar_partida()
quit()
if input_user in self.lista_de_opciones_matriz:
if numero_jugador == "1":
self.Lista_user1.append(input_user)
termino_el_partido = self.validacion_ganadores(self.Lista_user1)
if termino_el_partido == 1:
print("GANO EL JUGADOR 1 [X]")
self.lista_de_opciones_matriz = [ficha if i == input_user else i for i in self.lista_de_opciones_matriz]
self.print_matriz(self.lista_de_opciones_matriz)
#quit() #
return
elif numero_jugador == "2":
self.Lista_user2.append(input_user)
termino_el_partido = self.validacion_ganadores(self.Lista_user2)
if termino_el_partido == 1:
print("GANO EL JUGADOR 2 [0]")
#print (self.turno)
#print (self.Lista_user2)
self.lista_de_opciones_matriz = [ficha if i == input_user else i for i in self.lista_de_opciones_matriz]
self.print_matriz(self.lista_de_opciones_matriz)
return
self.lista_de_opciones_matriz = [ficha if i == input_user else i for i in self.lista_de_opciones_matriz]
# print(self.lista_de_opciones_matriz)
self.print_matriz(self.lista_de_opciones_matriz)
# DEBUGMODE print ("Lista_user1: ", Lista_user1)
# DEBUGMODE print ("Lista_user2:", Lista_user2)
else:
print("Crap")
def inicializar_tablero(self):
self.definir_matriz()
list_count = []
for i in range (self.VL):
list_count.append(i)
self.VL1 = self.VL
# Genera la Lista ganadora, que es una lista de listas.
WinAll_Auto_Step1 = [] #Genera la lista de items ganadores sin agruparlos, pero en orden del 1 al n -> Ejemplo A1,A2,A3
#Letra_ABC(0)
for a in list_count:
for i in range(self.VL):
#print ("I", i)
#print ("self.VL", self.VL)
letra = self.Letra_ABC(a)
WinAll_Auto_Step1.append(letra + str(i+1)) #saco la A y le meto la función. WinAll_Auto_Step1.append("A"+str(i+1))
# for i in range(self.VL):
# WinAll_Auto_Step1.append(Letra_ABC(1)+str(i+1))
# for i in range(self.VL):
# WinAll_Auto_Step1.append(Letra_ABC(2)+str(i+1))
for a in list_count:
for i in range(self.VL):
#print ("I", i)
#print ("self.VL", self.VL)
WinAll_Auto_Step1.append(self.Letra_ABC(i)+str(a+1))
# for i in range(self.VL):
# WinAll_Auto_Step1.append(Letra_ABC(i)+str())
for i in range(self.VL):
WinAll_Auto_Step1.append(self.Letra_ABC(i)+str(i+1)) # Diagonal
for i in range(self.VL):
WinAll_Auto_Step1.append(self.Letra_ABC(2-i)+str(i+1)) # Diagonal
# for i in range(self.VL):
# WinAll_Auto_Step1.append(Letra_ABC(0+i)+str(1))
# for i in range(self.VL):
# WinAll_Auto_Step1.append(Letra_ABC(0+i)+str(2))
# for i in range(self.VL):
# WinAll_Auto_Step1.append(Letra_ABC(0+i)+str(3))
self.winall_auto_step2=[] #Agrupa los resultados en elemento de 3 componentes -> ['A1','A2','A3'],['B1','B2','B3']
#print (len(WinAll_Auto_Step1))
# Go -Este paso es el que genera los Segmentos de elementos agrupados de a 3 o 4 o lo que defina self.VL. Que en resumen serán los segmentos que si algun jugador agruapa lo hace ganar
for i in range (0,len(WinAll_Auto_Step1),self.VL):
#print(i)
self.winall_auto_step2.append(list(WinAll_Auto_Step1[i:i+self.VL]))
# ******************************************************************************************
# Generar la lista de opciones a elegir para generar la matriz
# ******************************************************************************************
if self.partida_guardada == 'Y':
self.lista_de_opciones_matriz = self.lista_de_opciones_matriz
else:
self.lista_de_opciones_matriz = []
for a in list_count:
for i in range(self.VL):
#print ("I", i)
#print ("self.VL", self.VL)
self.lista_de_opciones_matriz.append(self.Letra_ABC(a)+str(i+1))
#print("Lista_de_Opciones_matri", self.lista_de_opciones_matriz)
#*********** PRINTS PARA CONTROLAR LOS RESULTADOS ******************************
#print(WinAll_Auto_Step1)
#print("Lista de elementos previos a la ganadora", WinAll_Auto_Step1)
#DEBUGMODE print("Lista de Segmentos Ganadora", self.winall_auto_step2)
#DEBUGMODE print("Cantidad de combinaciones pata ganar", len(self.winall_auto_step2))
# Con las siguiente 4 Linea se genera la matriz a presentar en el front End
self.print_matriz(self.lista_de_opciones_matriz)
# print(" ")
# print ("Lista Jugador 1" , Lista_user1)
# print ("Lista Jugador 2" , Lista_user2)
# print (" ")
# print(len(winall))
# print_matriz()
# print (" ")
def empezar(self):
if self.origen == "test":
self.cantida_de_jugadores = 2
else:
self.cantida_de_jugadores = input("HOLA!! BIENVENIDO AL JUEGO!! \n\nDesea empezar una partide de ctos jugadores (1 o 2)?: ",).upper()
# if self.cantida_de_jugadores == 1:
# print ("Nueva logica para el Jugador 2")
if self.origen == "test":
self.partida_guardada = 'N'
else:
self.partida_guardada = input("\n\nTiene alguna partida guardada? (Y/N): ",).upper()
while self.partida_guardada not in ('Y','N'):
self.partida_guardada = input("Ingrese un valor valido (Y/N): ",).upper()
continue
if self.partida_guardada == 'Y':
self.archivo_guardado = input("Con qué nombre grabó su partida anterior?")
self.reuperar_archivo_partida_guardada(self.archivo_guardado)
if self.partida_guardada == 'N':
self.turno = 1
else:
self.turno = self.turno2
self.inicializar_tablero()
if self.origen == "consola":
pass
elif self.origen == "test":
return self.partida_guardada == 'N'
self.termino_el_juego = 1 # Si teRmino del juego es == 0 se terminó el juego
while self.termino_el_juego == 1:
self.turno = self.turno + 1
# DEBUGMODE print ("Turno:" ,turno)
# self.winall_auto_step2=Actualizar_lista ()
# print ("SISMO",self.winall_auto_step2)
# Contar_X(self.winall_auto_step2)
# Entrada Jugardor 1
if self.turno > (self.VL * self.VL)+1:
print("TABLAS")
print ("TURNO", self.turno)
break
#print ("cantida_de_jugadores", self.cantida_de_jugadores)
if self.turno%2==0:
JV = "1"
else:
if self.cantida_de_jugadores == 1:
JV = 'R'
else:
JV = '2'
#print ("JV", JV)
self.input_jugador(JV)
self.termino_el_partido = self.validacion_ganadores(self.Lista_user1)
# print (termino_el_partido)
# print ("Partido_Cerrado After F", Partido_Cerrado)
if self.termino_el_partido == 1:
JV=1
fichas_f=self.ficha(str(JV))
print("GANO EL JUGADOR %s [%s]"% (JV,fichas_f))
break
else:
pass
# Entrada Jugardor 2
self.turno = self.turno + 1
# DEBUGMODE print ("Turno",turno)
# Actualizar_lista () # No entiendo porqué si actulizo la lista, cuando la toma para valider en el while lo hace con la antigua.
# print ("EMi",self.winall_auto_step2) # Acá es donde se envidencia que la liata self.winall_auto_step2 no es la que debería ser luego de correr la función.
if self.turno > (self.VL * self.VL)+1:
print("TABLAS")
print ("TURNO", self.turno)
break
#print ("antida_de_jugadores", self.cantida_de_jugadores)
if self.turno%2==0:
JV = "1"
else:
if self.cantida_de_jugadores == '1':
JV = 'R'
else:
JV = '2'
#print ("JV", JV)
self.input_jugador(JV)
self.termino_el_partido = self.validacion_ganadores(self.Lista_user2)
# print (termino_el_partido)
# print ("Partido_Cerrado After F", Partido_Cerrado)
if self.termino_el_partido == 1:
fichas_f=self.ficha (str(JV))
print("GANO EL JUGADOR %s [%s]"% (JV,fichas_f))
break
print("FIN DEL JUEGO")
@staticmethod
def Letra_ABC(posicion):
abecedario = list(string.ascii_uppercase)
letra = abecedario[posicion]
return letra
def print_matriz(self, lista):
print ("******"*self.VL)
Matriz_front_end= []
for i in range (0,len(lista),self.VL):
Matriz_front_end.append(list(lista[i:i+self.VL]))
for i in Matriz_front_end:
print (i)
print ("******"*self.VL)
print (self.turno)
return Matriz_front_end
def validacion_ganadores(self, listaPlayer): #Esta función valida si la lista de alguno de los ganadore es GANADORA
Partido_Cerrado = 0
for a in self.winall_auto_step2:
comparando = []
#DEBUGMODE print ("Fun_self.winall_auto_step2", self.winall_auto_step2)
#DEBUGMODE print ("FunC(a)", a)
if len(comparando) <= self.VL:
for i in listaPlayer:
if i in a:
#DEBUGMODE print ("ListaJugador a comparar",listaPlayer)# Lista_user1: ['B2', 'A2', 'B1']
#DEBUGMODE print ("Lista que compara", a) #a=['A2', 'B2', 'C2']
comparando.append(i)
if len(comparando) >= self.VL:
#DEBUGMODE print ("HAY GANADOR querido")
#DEBUGMODE print ("Partido_Cerrado Func", Partido_Cerrado)
return 1
else:
if len(comparando) >= self.VL:
break
else:
break
#print ("out of loop")
if __name__=='__main__':
JUGAR = Tateti()
JUGAR.empezar()
|
{"/test_V07062021_2.py": ["/main_V07062021.py"]}
|
29,545,633
|
EmiDalessio/TATETI
|
refs/heads/master
|
/test_V07062021_2.py
|
#
import unittest
from main_V07062021 import Tateti
class TestTateti:
def test_matriz(self): #1
assert(Tateti.Letra_ABC(0) == "A")
def test_jugador_1_gana_columna_1(self, capsys):#2
t = Tateti("test")
t.inicializar_tablero()
t.input_jugador("1", "A1")
t.input_jugador("2", "A2")
t.input_jugador("1", "B1")
t.input_jugador("2", "B2")
t.input_jugador("1", "C1")
captured = capsys.readouterr()
assert ("GANO EL JUGADOR 1 [X]" in captured.out)
assert ("GANO EL JUGADOR 2 [0]" not in captured.out)
def test_bug_2021(self, capsys):#3
t = Tateti("test")
t.inicializar_tablero()
t.input_jugador("1", "A1")
t.input_jugador("2", "A2")
t.input_jugador("1", "B1")
t.input_jugador("2", "A3")
t.input_jugador("1", "B3")
t.input_jugador("2", "B2")
t.input_jugador("1", "C1")
captured = capsys.readouterr()
assert ("GANO EL JUGADOR 1 [X]" in captured.out)
assert ("GANO EL JUGADOR 2 [0]" not in captured.out)
def test_jugador_2_gana_columna_1(self, capsys): #4 Agregado EMI
t = Tateti("test")
t.inicializar_tablero()
t.input_jugador("1", "A2")
t.input_jugador("2", "A1")
t.input_jugador("1", "A3")
t.input_jugador("2", "B1")
t.input_jugador("1", "B3")
t.input_jugador("2", "C1")
captured = capsys.readouterr()
assert ("GANO EL JUGADOR 2 [0]" in captured.out)
assert ("GANO EL JUGADOR 1 [X]" not in captured.out)
def test_jugador_1_gana_columna_2(self, capsys): # 5 Agregado EMI
t = Tateti("test")
t.inicializar_tablero()
t.input_jugador("1", "A2")
t.input_jugador("2", "A1")
t.input_jugador("1", "B2")
t.input_jugador("2", "B1")
t.input_jugador("1", "C2")
captured = capsys.readouterr()
assert("GANO EL JUGADOR 1 [X]" in captured.out)
assert ("GANO EL JUGADOR 2 [0]" not in captured.out)
def test_jugador_1_gana_columna_3(self, capsys): # 6 Agregado EMI
t = Tateti("test")
t.inicializar_tablero()
t.input_jugador("1", "A3")
t.input_jugador("2", "A1")
t.input_jugador("1", "B3")
t.input_jugador("2", "A2")
t.input_jugador("1", "C3")
captured = capsys.readouterr()
assert("GANO EL JUGADOR 1 [X]" in captured.out)
assert ("GANO EL JUGADOR 2 [0]" not in captured.out)
def test_jugador_1_gana_Fila_1(self, capsys): # 7 Agregado EMI
t = Tateti("test")
t.inicializar_tablero()
t.input_jugador("1", "A1")
t.input_jugador("2", "B1")
t.input_jugador("1", "A2")
t.input_jugador("2", "B2")
t.input_jugador("1", "A3")
captured = capsys.readouterr()
assert("GANO EL JUGADOR 1 [X]" in captured.out)
assert ("GANO EL JUGADOR 2 [0]" not in captured.out)
def test_jugador_1_gana_Fila_2(self, capsys): # 8 Agregado EMI
t = Tateti("test")
t.inicializar_tablero()
t.input_jugador("1", "B1")
t.input_jugador("2", "C1")
t.input_jugador("1", "B2")
t.input_jugador("2", "C2")
t.input_jugador("1", "B3")
captured = capsys.readouterr()
assert("GANO EL JUGADOR 1 [X]" in captured.out)
assert ("GANO EL JUGADOR 2 [0]" not in captured.out)
def test_jugador_1_gana_Fila_3(self, capsys): # 9 Agregado EMI
t = Tateti("test")
t.inicializar_tablero()
t.input_jugador("1", "C1")
t.input_jugador("2", "A1")
t.input_jugador("1", "C2")
t.input_jugador("2", "A2")
t.input_jugador("1", "C3")
captured = capsys.readouterr()
assert("GANO EL JUGADOR 1 [X]" in captured.out)
assert ("GANO EL JUGADOR 2 [0]" not in captured.out)
def test_jugador_1_gana_Diagonal_1(self, capsys): # 10 Agregado EMI
t = Tateti("test")
t.inicializar_tablero()
t.input_jugador("1", "A1")
t.input_jugador("2", "A2")
t.input_jugador("1", "B2")
t.input_jugador("2", "C1")
t.input_jugador("1", "C3")
captured = capsys.readouterr()
assert("GANO EL JUGADOR 1 [X]" in captured.out)
assert ("GANO EL JUGADOR 2 [0]" not in captured.out)
def test_jugador_1_gana_Diagonal_2(self, capsys): # 11 Agregado EMI
t = Tateti("test")
t.inicializar_tablero()
t.input_jugador("1", "C1")
t.input_jugador("2", "A2")
t.input_jugador("1", "B2")
t.input_jugador("2", "B1")
t.input_jugador("1", "A3")
captured = capsys.readouterr()
assert("GANO EL JUGADOR 1 [X]" in captured.out)
assert ("GANO EL JUGADOR 2 [0]" not in captured.out)
def test_jugador_2_gana_Columna_2(self, capsys): # 12 Agregado EMI
t = Tateti("test")
t.inicializar_tablero()
t.input_jugador("1", "A1")
t.input_jugador("2", "A2")
t.input_jugador("1", "B1")
t.input_jugador("2", "B2")
t.input_jugador("1", "A3")
t.input_jugador("2", "C2")
captured = capsys.readouterr()
assert("GANO EL JUGADOR 2 [0]" in captured.out)
assert ("GANO EL JUGADOR 1 [X]" not in captured.out)
def test_jugador_2_gana_Columna_3(self, capsys): # 13 Agregado EMI
t = Tateti("test")
t.inicializar_tablero()
t.input_jugador("1", "A1")
t.input_jugador("2", "A3")
t.input_jugador("1", "B1")
t.input_jugador("2", "B3")
t.input_jugador("1", "C2")
t.input_jugador("2", "C3")
captured = capsys.readouterr()
assert("GANO EL JUGADOR 2 [0]" in captured.out)
assert ("GANO EL JUGADOR 1 [X]" not in captured.out)
def test_jugador_2_gana_fila_1(self, capsys): # 14 Agregado EMI
t = Tateti("test")
t.inicializar_tablero()
t.input_jugador("1", "B1")
t.input_jugador("2", "A1")
t.input_jugador("1", "B2")
t.input_jugador("2", "A2")
t.input_jugador("1", "C3")
t.input_jugador("2", "A3")
captured = capsys.readouterr()
assert("GANO EL JUGADOR 2 [0]" in captured.out)
assert ("GANO EL JUGADOR 1 [X]" not in captured.out)
def test_jugador_2_gana_fila_2(self, capsys): # 15 Agregado EMI
t = Tateti("test")
t.inicializar_tablero()
t.input_jugador("1", "A1")
t.input_jugador("2", "B1")
t.input_jugador("1", "C1")
t.input_jugador("2", "B2")
t.input_jugador("1", "C2")
t.input_jugador("2", "B3")
captured = capsys.readouterr()
assert("GANO EL JUGADOR 2 [0]" in captured.out)
assert ("GANO EL JUGADOR 1 [X]" not in captured.out)
def test_jugador_2_gana_fila_3(self, capsys): # 16 Agregado EMI
t = Tateti("test")
t.inicializar_tablero()
t.input_jugador("1", "A1")
t.input_jugador("2", "C1")
t.input_jugador("1", "B1")
t.input_jugador("2", "C2")
t.input_jugador("1", "B2")
t.input_jugador("2", "C3")
captured = capsys.readouterr()
assert("GANO EL JUGADOR 2 [0]" in captured.out)
assert ("GANO EL JUGADOR 1 [X]" not in captured.out)
def test_jugador_2_gana_diagonal_1(self, capsys): # 17 Agregado EMI
t = Tateti("test")
t.inicializar_tablero()
t.input_jugador("1", "A2")
t.input_jugador("2", "A1")
t.input_jugador("1", "B3")
t.input_jugador("2", "B2")
t.input_jugador("1", "C2")
t.input_jugador("2", "C3")
captured = capsys.readouterr()
assert("GANO EL JUGADOR 2 [0]" in captured.out)
assert ("GANO EL JUGADOR 1 [X]" not in captured.out)
def test_jugador_2_gana_diagonal_2(self, capsys): # 18 Agregado EMI
t = Tateti("test")
t.inicializar_tablero()
t.input_jugador("1", "A1")
t.input_jugador("2", "C1")
t.input_jugador("1", "B1")
t.input_jugador("2", "B2")
t.input_jugador("1", "B3")
t.input_jugador("2", "A3")
captured = capsys.readouterr()
assert("GANO EL JUGADOR 2 [0]" in captured.out)
assert ("GANO EL JUGADOR 1 [X]" not in captured.out)
def test_TABLAS(self, capsys): # 19 Agregado EMI
t = Tateti("test")
t.inicializar_tablero()
t.input_jugador("1", "A1")
t.input_jugador("2", "B2")
t.input_jugador("1", "B1")
t.input_jugador("2", "C1")
t.input_jugador("1", "A3")
t.input_jugador("2", "A2")
t.input_jugador("1", "C2")
t.input_jugador("2", "B3")
#t.input_jugador("1", "C3")
captured = capsys.readouterr()
#t.self.turno = 10
assert ("GANO EL JUGADOR 1 [X]" not in captured.out)
assert ("GANO EL JUGADOR 2 [0]" not in captured.out)
#assert ("TABLAS" in captured.out) # el captured no me toma lo que verdaderamente se imprime en la pantalla
#
|
{"/test_V07062021_2.py": ["/main_V07062021.py"]}
|
29,545,634
|
EmiDalessio/TATETI
|
refs/heads/master
|
/captura_File.py
|
from io import open
import csv
with open('asdf2021610225153.txt', newline='') as f:
reader = csv.reader(f)
data = list(reader)
print(data)
Lista_Jugador_1 = list(data[0])
Lista_Jugador_2 = list(data[1])
Matriz_Front_End= list(data[2])
turno_vl = open ('asdf2021610225153.txt', "r")
Turno= turno_vl.readlines()[4]
turno_vl = open ('asdf2021610225153.txt', "r")
VL= turno_vl.readlines()[5]
print ("Lista_Jugador_1=",Lista_Jugador_1)
print ("Lista_Jugador_2=",Lista_Jugador_2)
print ("Matriz_Front_End=",Matriz_Front_End)
print ("Turno",Turno)
print ("VL",VL)
# import numpy as np
# data = np.genfromtxt ('mark.txt', delimiter=',')
# print( data)
# for i in data:
# print()
# Try this, using the csv module and its reader() method:
# import csv
# f = open('file.csv')
# the_data = list(csv.reader(f, delimiter r'\t'))[1:]
|
{"/test_V07062021_2.py": ["/main_V07062021.py"]}
|
29,545,635
|
EmiDalessio/TATETI
|
refs/heads/master
|
/test_divisones.py
|
a=2
b=2
c=3
div2= 1%2
div3= 2%2
div4= 3%2
div5= 4%2
div6= 5%2
div7= 6%2
div8= 7%2
div9= 8%2
print ("J1",div2)
print ("J2",div3)
print ("J1",div4)
print ("J2",div5)
print ("J1",div6)
print ("J2",div7)
print ("J1",div8)
print ("J2",div9)
|
{"/test_V07062021_2.py": ["/main_V07062021.py"]}
|
29,545,636
|
EmiDalessio/TATETI
|
refs/heads/master
|
/Test_script.py
|
#import datetime as dt
from datetime import datetime
from io import open
time2= datetime.now()
#date1= dt.date.today()
#time1= dt.time(10, 23, 30, 4566)
time_stamp = str(time2.year) + str(time2.month) + str(time2.day) + str(time2.hour) + str(time2.minute) + str(time2.second)
# print (time1)
# print (time2)
# print (time2.hour)
# print (time2.minute)
# print (time2.second)
print (time_stamp)
archivo_texto = open (str(time_stamp) + ".txt", "w")
#archivo_texto = open(+str(time2.hour)str(time2.minute)+str(time2.minute)+".txt", "w")
def time_stamp_file_generator (name):
import datetime as dt
from io import open
time2= datetime.now()
time_stamp = str(time2.year) + str(time2.month) + str(time2.day) + str(time2.hour) + str(time2.minute) + str(time2.second)
archivo_texto = open (str(name) + str(time_stamp) + ".txt", "w")
print (archivo_texto)
return archivo_texto
def guardar_partida ():
input_user = input("Desea Guardar la partida? (Y/N) ").upper()
if input_user == "Y":
name= input("Con qué nombre desea grabar el archivo? ")
time_stamp_file_generator (name)
print("ok_file generado")
print("arafue")
return
guardar_partida()
|
{"/test_V07062021_2.py": ["/main_V07062021.py"]}
|
29,545,637
|
EmiDalessio/TATETI
|
refs/heads/master
|
/Logs.py
|
from io import open
from datetime import datetime
#Listas a capturar
def time_stamp_file_generator (name):
import datetime as dt
from io import open
time2= datetime.now()
time_stamp = str(time2.year) + str(time2.month) + str(time2.day) + str(time2.hour) + str(time2.minute) + str(time2.second)
nombre_archivo = str(name) + str(time_stamp) + ".txt"
archivo_texto = open (nombre_archivo, "w")
print (nombre_archivo)
return nombre_archivo
Turno = 9
Lista_Jugador_1 = ('A1','A3','B2','C1','E2')
Lista_Jugador_2 = ('A2','B1','B3','B2')
Matriz_Front_End = ('A1','A2','A3','B1','B2','B3','C1','C2')
Lista_Ganadora = ('Ganar','Ganar','Ganar','pila')
nombre_archivo = time_stamp_file_generator('EAD')
print(nombre_archivo)
archivo_texto = open(nombre_archivo, "w+")
LJ1 = Lista_Jugador_1
LJ2 = Lista_Jugador_2
MFE = Matriz_Front_End
LG = Lista_Ganadora
archivo_texto.truncate()
archivo_texto.write("Lista_Jugador_1=(")
for item in LJ1:
archivo_texto.write("'%s'," % str(item))
archivo_texto = open(nombre_archivo, "r+")
archivo_texto.seek(len(archivo_texto.read())-1)
archivo_texto.truncate()
archivo_texto.write(")")
archivo_texto.write("\nLista_Jugador_2=(")
for item in LJ2:
archivo_texto.write("'%s'," % str(item))
archivo_texto = open(nombre_archivo, "r+")
archivo_texto.seek(len(archivo_texto.read()))
archivo_texto.truncate()
archivo_texto.write(")")
archivo_texto.write("\nMatriz_Front_End=(")
for item in MFE:
archivo_texto.write("'%s'," % str(item))
archivo_texto = open(nombre_archivo, "r+")
archivo_texto.seek(len(archivo_texto.read())+1) # No entiendo porque tengo que sumarle 2 si sería el fianl del file!!!
archivo_texto.write(")")
archivo_texto.write("\nLista_Ganadora=(")
for item in LG:
archivo_texto.write("'%s'," % str(item))
archivo_texto = open(nombre_archivo, "r+")
archivo_texto.seek(len(archivo_texto.read())+2) # No entiendo porque tengo que sumarle 2 si sería el fianl del file!!!
archivo_texto.write(")")
archivo_texto.close()
print("Partida Guardada")
|
{"/test_V07062021_2.py": ["/main_V07062021.py"]}
|
29,649,820
|
chaddymac/BackendResume
|
refs/heads/master
|
/Visitorcountread.py
|
import json
import boto3
region_name = "us-east-2"
def lambda_handler(event, context):
#calling dynamondb
dynamodb = boto3.client('dynamodb', region_name=region_name)
#getting the dictionary for the Visitorcount table
response=dynamodb.get_item(
TableName='Visitorcount',
Key={'id':{'S':"item"}})
#getting the current number for visitor count
count = response["Item"]["count"]['N']
#adding one each time the site is hit
count = int(count) + 1
#update the dynamodb table with the new number
# dynamodb.put_item(TableName='Visitorcount',
# Key={'id':{'S':"item"}}
dynamodb.put_item(
TableName='Visitorcount',
Item= {
'id': {
'S': 'item',
},
'count': {
'N': str(count),
},
},
)
print(response)
return {
'statusCode': 200,
'body': json.dumps({"visitor":str(count)}),
'headers': {
'Access-Control-Allow-Origin': '*',
# 'Access-Control-Allow-Credentials': True,
'Content-Type': 'application/json',
'Access-Control-Allow-Headers':'Content-Type'
},
}
if __name__ == '__main__':
lambda_handler('a','b')
|
{"/test.py": ["/Visitorcountread.py"]}
|
29,649,821
|
chaddymac/BackendResume
|
refs/heads/master
|
/visitor_count/app.py
|
import json
import boto3
def lambda_handler(event, context):
#calling dynamondb
print("Change")
dynamodb = boto3.client('dynamodb')
#getting the dictionary for the Visitorcount table
response=dynamodb.get_item(
TableName='htmlresume-Visitorcount-1SODZD7NQZA44',
Key={'id':{'S':"item"}})
print(response)
if "Item" not in response:
dynamodb.put_item(TableName='htmlresume-Visitorcount-1SODZD7NQZA44',
Item={'id':{'S':'item',},'count':{'N':'0',}})
else:
#getting the current number for visitor count
count = response["Item"]["count"]['N']
#adding one each time the site is hit
count = int(count) + 1
#update the dynamodb table with the new number
dynamodb.put_item(
TableName='htmlresume-Visitorcount-1SODZD7NQZA44',
Item= {
'id': {
'S': 'item',
},
'count': {
'N': str(count),
},
},
)
print(response)
return {
'statusCode': 200,
'body': json.dumps({"visitor":str(count)}),
'headers': {
'Access-Control-Allow-Origin': '*',
# 'Access-Control-Allow-Credentials': True,
'Content-Type': 'application/json',
'Access-Control-Allow-Headers':'Content-Type'
},
}
# {"visitor":str(count)}
if __name__ == '__main__':
lambda_handler('a','b')
|
{"/test.py": ["/Visitorcountread.py"]}
|
29,688,719
|
VyacheslavVG/parser
|
refs/heads/master
|
/bot/selectbd.py
|
import sqlite3
def sbor_key_words():
conn = sqlite3.connect(r'D:\parserandbot/bot/bd.db') # ПОДКЛЮЧЕНИЕ К БД
c = conn.cursor()
conn.commit()
c.execute("SELECT * FROM subscriptions ")
massive = c.fetchall()
key_words = []
for item in massive:
key_words.append(item[1].lower())
key_words = list(set(key_words))
return key_words
|
{"/tki.py": ["/my_parser.py"]}
|
29,688,720
|
VyacheslavVG/parser
|
refs/heads/master
|
/bot/bot.py
|
from aiogram import types , executor , Dispatcher ,Bot
from config import TOKEN
import logging
from aiogram.types import InlineKeyboardButton , InlineKeyboardMarkup
from aiogram.dispatcher import FSMContext
from aiogram.dispatcher.filters.state import State, StatesGroup
from aiogram.contrib.fsm_storage.memory import MemoryStorage
import sqlite3
available_time =['09:00','12:00','15:00','18:00','21:00','00:00','Готово']
class EnterTheData(StatesGroup):
enter_the_keyword = State()
enter_the_time = State()
logging.basicConfig(level=logging.INFO)
bot = Bot(token=TOKEN)
dp = Dispatcher(bot, storage=MemoryStorage())
data = {'id':None , 'key_word': None,'t1':False,'t2':False,'t3':False,'t4':False,'t5':False,'t6':False}
@dp.message_handler(commands='start')
async def start(message: types.Message) :
markup = InlineKeyboardMarkup()
btn_1 = InlineKeyboardButton('Далее', callback_data='btn_1')
markup.add(btn_1)
await bot.send_message(message.chat.id, 'Этот бот создан для помощи фрилансерам в поисках заказов.Нажмите кнопку далее чтобы использовать бота.', reply_markup=markup)
@dp.callback_query_handler(text='btn_1')
async def next(message: types.Message):
id = message.from_user.id
print(id)
# if(not db.subscriber_exists(message.from_user.id)):
# db.add_subscriber(message.from_user.id)
# else:
# db.update_subscription(message.from_user.id, True)
await bot.send_message(message.from_user.id,'Введите ключевое слво по которому будет осуществляться поиск заказов.')
await EnterTheData.enter_the_keyword.set()
data['id'] = id
@dp.message_handler(state=EnterTheData.enter_the_keyword)
async def key_word(message: types.Message, state: FSMContext):
answer = message.text
print(answer)
keyboard = types.ReplyKeyboardMarkup(resize_keyboard=True)
for name in available_time:
keyboard.add(name)
await state.update_data(answer1=answer)
await message.answer('Выберите время в которое присылать рассылку.',reply_markup=keyboard)
await EnterTheData.enter_the_time.set()
data['key_word'] = answer
@dp.message_handler(state=EnterTheData.enter_the_time)
async def time(message: types.Message, state: FSMContext):
time = message.text
print(time)
await state.update_data(time1=time)
if time == '09:00' :
data['t1'] = True
elif time == '12:00' :
data['t2'] = True
elif time == '15:00' :
data['t3'] = True
elif time == '18:00' :
data['t4'] = True
elif time == '21:00' :
data['t5'] = True
elif time == '00:00' :
data['t6'] = True
print(data)
# conn = sqlite3.connect('bd.db') # ПОДКЛЮЧЕНИЕ К БД
# cur = conn.cursor()
# conn.commit()
# cur.execute("INSERT INTO subscriptions(user_id, key_word, '09:00' , '12:00', '15:00', '18:00' , '21:00', '00:00') VALUES(:id, :key_word, :09_00 , :12_00 , :15_00, :18_00, :21_00, :00_00)",data) # доБАВЛЯЕМ ДАННЫЕ В ТАЬЛИЦУ бд
# conn.commit()
if message.text == 'Готово':
await message.answer('вы успешно подписаны')
conn = sqlite3.connect('bd.db') # ПОДКЛЮЧЕНИЕ К БД
cur = conn.cursor()
conn.commit()
cur.execute(
"INSERT INTO subscriptions(user_id, key_word, t0900 , t1200, t1500, t1800 , t2100, t0000) VALUES(:id, :key_word, :t1 , :t2 , :t3, :t4, :t5, :t6)",
data) # доБАВЛЯЕМ ДАННЫЕ В ТАЬЛИЦУ бд
conn.commit()
print(data)
await state.finish()
conn = sqlite3.connect("bd.db")
c = conn.cursor()
c.execute("SELECT * FROM subscriptions ")
massive = c.fetchall()
print (massive)
executor.start_polling(dp)
|
{"/tki.py": ["/my_parser.py"]}
|
29,764,608
|
pollitosabroson/fly
|
refs/heads/main
|
/api/core/schemas/schema.py
|
class BaseSchema:
"""
Basic schema class
"""
schema = None
def __init__(self, data):
self.schema = data
def validate(self):
"""Validate schema and return formated data
Returns:
TYPE: Formated data
"""
return self.schema.validate(self.data)
def is_valid(self):
"""Check if schema is valid
Returns:
bool: True if schema is valid. False otherwise
"""
return self.schema.is_valid(self.data)
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,609
|
pollitosabroson/fly
|
refs/heads/main
|
/api/contents/entities/__init__.py
|
from .entity import ContenSourceEntity
__all__ = ['ContenSourceEntity', ]
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,610
|
pollitosabroson/fly
|
refs/heads/main
|
/api/core/models/image_models.py
|
# -*- coding: utf-8 -*-
import os
from django.db import models
def content_file_name(instance, filename):
filename = os.path.basename(filename)
path = (
f'{instance._meta.verbose_name}/{instance.id}/{filename}' # NOQA
)
data = os.path.join(path)
return data
class ImageModel(models.Model):
"""Abstract model that defines base channels models
"""
image = models.ImageField(
upload_to=content_file_name,
blank=True, null=True
)
class Meta:
"""Defination abstract Model."""
abstract = True
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,611
|
pollitosabroson/fly
|
refs/heads/main
|
/api/envs/models.py
|
import logging
from core.models import TimeStampedModel
from django.db import models
logger = logging.getLogger(__name__)
class Attributes(TimeStampedModel):
"""Envs Model."""
name = models.CharField(max_length=255)
envs_type = models.ForeignKey(
'TypeEnvs',
models.DO_NOTHING,
blank=False, null=False,
related_name="type_envs"
)
class Meta:
verbose_name = "env"
verbose_name_plural = "envs"
def __str__(self):
return self.name
def to_json(self):
"""convery attribute value to json.
Return:
Dict: Dict with representation value for json.
"""
return {
"id": self.id,
"created_date": str(self.created_date),
"last_modified": str(self.last_modified),
"name": self.name,
"envs_type": {
"id": self.envs_type.id,
"name": self.envs_type.name,
}
}
@classmethod
def get_attributes_by_typeenvs(cls, env_type):
values = cls.objects.filter(
envs_type=env_type
)
return values
class TypeEnvs(TimeStampedModel):
"""Envs Model."""
LANGUAGE = "Language"
CONTENT = "Type Content"
name = models.CharField(max_length=255)
class Meta:
verbose_name = "env"
verbose_name_plural = "envs"
def __str__(self):
return self.name
def to_json(self):
"""conver ENV value to json for JSON.
Return:
Dict: Dict with representation value for redis.
"""
return {
"id": self.id,
"created_date": str(self.created_date),
"last_modified": str(self.last_modified),
"name": self.name,
"attributes": [
{
"id": x.id,
"name": x.name,
}
for x in self.type_envs.all()
]
}
@classmethod
def get_env(cls, env_name):
try:
return cls.objects.get(name=env_name)
except cls.DoesNotExist:
return None
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,612
|
pollitosabroson/fly
|
refs/heads/main
|
/api/core/utils.py
|
import json
def parse_response_content(response):
"""Try to load json response
If fail just return string response
"""
try:
return json.loads(response.content.decode())
except json.JSONDecodeError:
return response.content.decode()
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,613
|
pollitosabroson/fly
|
refs/heads/main
|
/api/channels/tests/factory.py
|
import logging
import random
from channels.models import Channel
from contents.entities import ContenSourceEntity
from contents.models import Content
from django.core.files.base import ContentFile
from envs.models import Attributes, TypeEnvs
logger = logging.getLogger(__name__)
class Contentfactory:
"""Class in charge of creating content objects."""
DEAFUALT_CONTENT_NAME = 'Test_content_factory'
DEFAULT_SCORE = 8.5
@classmethod
def _get_content(cls, channel, **kwargs):
"""
Return instance content
"""
value = kwargs.get('value', 1)
attributes = Attributes.objects.filter(
envs_type__name=TypeEnvs.CONTENT
)
kind = random.choice(attributes)
src = cls.get_sourcre()
content = Content(
channel=channel,
kind=kind,
src=src.to_dict()
)
content.save()
file_name = f'{cls.DEAFUALT_CONTENT_NAME}-{value}.{kind.name.lower()}' # NOQA
content.save()
content.content.save(
file_name,
ContentFile(
"This is the content".encode()
)
)
return content
@staticmethod
def get_sourcre(score=DEFAULT_SCORE):
"""Create source content.
Arg:
Score(Float): Total score for content
Return:
Dict: Dict with all source.
"""
src = ContenSourceEntity(
score=score,
directors=['directors 1', 'directors 3'],
author=['author 1', 'author 2'],
genres=['genres 1', 'genres 2'],
writers=['writers 1', 'writers 2'],
)
return src
@classmethod
def get_content(cls, channel, **kwargs):
"""Get single content.
Args:
name(str): name of content
channel(Instance): channel
Return:
Instance: Instance of content
"""
content = cls._get_content(
channel=channel,
**kwargs
)
return content
class ChannelFactory:
"""Class in charge of creating channel objects."""
DEFAULT_TEST_CHANNEL_NAME = 'channel_test'
DEFUALT_CONTENT = 5
def _get_channel(channel_name=DEFAULT_TEST_CHANNEL_NAME, **kwargs):
"""
Return instance channel
"""
languages = Attributes.objects.filter(
envs_type__name=TypeEnvs.LANGUAGE
)
language = random.choice(languages)
channel_name = f'{channel_name}-{language.name}'
channel, _ = Channel.objects.get_or_create(
title=channel_name,
language=language
)
channel.save()
return channel
@classmethod
def get_channel(cls, **kwargs):
"""get single channel
Return:
Instance: Channel instance
"""
return cls._get_channel(
**kwargs
)
@classmethod
def create_channel_wiht_content(cls, **kwargs):
"""Create Channel with multiples contest.
Return:
Instance: Channel instance
"""
if not kwargs.get('channel'):
channel = cls._get_channel(
name=kwargs.get('channel_name', cls.DEFAULT_TEST_CHANNEL_NAME)
)
for value in range(0, kwargs.get('total_contents', cls.DEFUALT_CONTENT)): # NOQA
Contentfactory.get_content(
channel=channel,
value=value
)
return channel
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,614
|
pollitosabroson/fly
|
refs/heads/main
|
/api/channels/managers/manager.py
|
import logging
from django.db import models
from django.db.models import Count, F, Sum
from django.db.models.fields.json import KeyTextTransform
from django.db.models.functions import Cast, Coalesce
logger = logging.getLogger(__name__)
class ChannelQueryset(models.QuerySet):
"""Channel Queryset."""
def calculate_score(self):
"""Calculate score.
Return:
Queryset: Queryset
"""
return self.annotate(
score_content=Coalesce(
Sum(
Cast(
KeyTextTransform("score", "content__src"),
models.FloatField()
)
), 0
),
total_content=Count('content'),
score_channels=Coalesce(
Sum(
Cast(
KeyTextTransform(
"score", "tree_channels__content__src"
),
models.FloatField()
)
), 0
),
total_channels=Count('tree_channels__content'),
).annotate(
sum_content=models.ExpressionWrapper(
F('score_content') + F('score_channels'), # NOQA
output_field=models.FloatField()
),
sum_tota=models.ExpressionWrapper(
F('total_content') + F('total_channels'),
output_field=models.FloatField()
),
).annotate(
average=models.ExpressionWrapper(
F('sum_content') / F('sum_tota'), # NOQA
output_field=models.FloatField()
)
)
class ChannelManager(models.Manager):
"""Channel Manager."""
def get_queryset(self):
"""Get Queryset.
Return:
Queryset: Queryset
"""
return ChannelQueryset(
self.model, using=self._db
)
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,615
|
pollitosabroson/fly
|
refs/heads/main
|
/api/contents/entities/entity.py
|
from core.schemas import BaseSchema
from schema import And, Schema
class ContenSourceEntity(BaseSchema):
"""Class for source Entity."""
schema = Schema(
{
'directors': list,
'author': list,
'genres': list,
'writers': list,
'stars': list,
'score': And(float, lambda n: 0 < n < 5)
}
)
data = {}
def __init__(
self, score, directors=None, author=None,
genres=None, writers=None, stars=None
):
self.data = {
"score": score,
"directors": directors or [],
"author": author or [],
"genres": genres or [],
"writers": writers or [],
"stars": stars or [],
}
def to_dict(self):
"""Convert values to dict.
Return:
dict: return all values in dict
"""
return self.data
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,616
|
pollitosabroson/fly
|
refs/heads/main
|
/api/core/csv/__init__.py
|
from .parse_csv import ParseCsv
__all__ = ['ParseCsv', ]
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,617
|
pollitosabroson/fly
|
refs/heads/main
|
/api/core/schemas/__init__.py
|
from .schema import BaseSchema
__all__ = ['BaseSchema', ]
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,618
|
pollitosabroson/fly
|
refs/heads/main
|
/api/channels/repositories/__init__.py
|
from .repository import ChannelRepository
__all__ = ['ChannelRepository', ]
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,619
|
pollitosabroson/fly
|
refs/heads/main
|
/api/contents/migrations/0003_auto_20210520_0934.py
|
# Generated by Django 3.1.11 on 2021-05-20 09:34
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('envs', '0001_initial'),
('contents', '0002_auto_20210519_1554'),
]
operations = [
migrations.AlterField(
model_name='content',
name='kind',
field=models.ForeignKey(limit_choices_to={'envs_type__name': 'content'}, on_delete=django.db.models.deletion.DO_NOTHING, related_name='kinds', to='envs.attributes'),
),
]
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,620
|
pollitosabroson/fly
|
refs/heads/main
|
/api/core/models/title_model.py
|
from django.db import models
from django.utils.translation import gettext_lazy as _
class TitleModel(models.Model):
"""
Abstract model that defines the auto populated 'name' and
'order' fields.
This model must be used as the base for all the models in the project.
"""
title = models.CharField(
max_length=500,
blank=False, null=False,
verbose_name=_('name')
)
class Meta:
"""
Defination abstract Model
"""
abstract = True
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,621
|
pollitosabroson/fly
|
refs/heads/main
|
/api/channels/tests/tests_score_single_channel.py
|
import logging
import pytest
from .factory import ChannelFactory, Contentfactory
logger = logging.getLogger(__name__)
@pytest.mark.django_db
@pytest.mark.urls('immfly.urls')
class TestSingleChannelScore:
"""Specific tests for list hotel."""
def test_calculte_single_channel(self, client):
"""Test to validate that a user will be edited with the parameters."""
total_content = 5
score = round(
total_content * Contentfactory.DEFAULT_SCORE / total_content,
2
)
channel = ChannelFactory.create_channel_wiht_content(
total_contents=total_content
)
assert score == channel.score
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,622
|
pollitosabroson/fly
|
refs/heads/main
|
/api/contents/migrations/0002_auto_20210519_1554.py
|
# Generated by Django 3.1.11 on 2021-05-19 15:54
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('channels', '0001_initial'),
('contents', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='content',
name='channel',
field=models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, related_name='content', to='channels.channel'),
),
]
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,623
|
pollitosabroson/fly
|
refs/heads/main
|
/api/channels/management/commands/import_channels.py
|
import logging
import random
import string
from channels.models import Channel
from contents.entities import ContenSourceEntity
from contents.models import Content
from django.core.files.base import ContentFile
from django.core.management.base import BaseCommand
from django.template.defaultfilters import slugify
from envs.models import Attributes, TypeEnvs
from tqdm import tqdm
logger = logging.getLogger(__name__)
class Command(BaseCommand):
"""Command for create channels."""
help = (
"Command for create channels."
"example to execution: python manage.py import_channels"
)
CHANNEL_NAME = [
'Show Stellar', 'Showorzo', 'Show Scoot', 'Tv Reboot', 'Tvjet',
'Show Port', 'Tv Truth', 'Tv Variable', 'Show Module', 'Tvpad',
'Showlia', 'Show Logic', 'Show Spirit', 'Tv Abstract', 'Tv Sign',
'Tvorzo', 'Show Omni', 'Show Sprite', 'Show Verse', 'Tv Scope',
'Showzilla', 'Show Cubed', 'Show Level', 'Show Insight'
]
SHOW_NAME = [
'Zombie Adventures', 'Stray Adventures', 'Voodoo Legend',
'Enigma Stories', 'Heaven Grave', 'Freak Oracle', 'Anonymous Kind',
'Angelbones', 'Angelgift', 'Moonring', 'Smoke Adventures', 'Paperkin',
'Naughty Drama', 'Warrior Fantasy', 'Rainy Legend', 'Monster Promises',
'Alternate Doom', 'Distant Doodles', 'Winterbot', 'Metalbow'
]
FILE_NAME = [
'Storm Fantasy', 'Anti-Social Story', 'Yesterdays Fantasies',
'Thunder Fantasy', 'Deadly Everyday', 'Shady Galaxy', 'Space Morning',
'Spiderstar', 'Dreamshock',
]
AUTHORS = [
'Julius Price', 'Jimmy Adkins', 'Wm Graham', 'Andy Kelley',
'Jacob Andrews', 'Shannon Bailey', 'Beulah Rose',
'Minnie Long', 'Brittany Blair', 'Ruth Banks', 'Melody Moody',
'Lamar Gardner',
]
DIRECTORS = [
'Alexander Bush', 'Eileen Fox', 'Mindy Rios', 'Carrie Owen',
'Billy Ellis', 'Hazel Owens', 'Ricky Powell', 'Steven Shaw',
'Vanessa Cummings', 'Marilyn Watts', 'Elisa Sanchez', 'Tonya Morris',
'Emma Ramos',
]
GENRES = [
'Action', 'Comedy', 'Drama', 'Fantasy', 'Horror', 'Mystery', 'Romance',
'Thriller', 'Western',
]
WRITERS = [
'Faye Lawrence', 'Morris Young', 'Violet Ray', 'Angelina Copeland',
'Mabel Ross', 'Jeff Greene', 'Harry Armstrong', 'Johnathan Brown',
'Elisa Wilkins', 'Ed Brady',
]
def handle(self, *args, **options):
languages = Attributes.objects.filter(
envs_type__name=TypeEnvs.LANGUAGE
)
attributes = Attributes.objects.filter(
envs_type__name=TypeEnvs.CONTENT
)
for channel_name in self.CHANNEL_NAME:
channel, _ = Channel.objects.get_or_create(
title=channel_name,
language=random.choice(languages)
)
for _ in tqdm(range(random.randrange(1, 10))):
src = ContenSourceEntity(
score=round(random.uniform(0, 10), 2),
directors=self.get_random_values(self.DIRECTORS),
author=self.get_random_values(self.AUTHORS),
genres=self.get_random_values(self.GENRES),
writers=self.get_random_values(
self.WRITERS, random_range=3
),
)
kind = random.choice(attributes)
content = Content(
channel=channel,
kind=kind,
src=src.to_dict()
)
file_name = f'{slugify(random.choice(self.FILE_NAME))}.{kind.name.lower()}' # NOQA
content.save()
content.content.save(
file_name,
ContentFile(
f"{','.join(x for x in string.ascii_letters)}".encode() # NOQA
)
)
@staticmethod
def get_random_values(values, random_range=5):
"""Create list with random values.
Args:
values(List): List with content
random_range(int): Number to calculate the random
Return:
List: List with random values
"""
return [
random.choice(values)
for _ in range(random.randrange(1, random_range))
]
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,624
|
pollitosabroson/fly
|
refs/heads/main
|
/api/channels/tests/tests_channels.py
|
import logging
import pytest
from core.utils import parse_response_content
from django.urls import reverse
from rest_framework import status
logger = logging.getLogger(__name__)
@pytest.mark.django_db
@pytest.mark.urls('immfly.urls')
class TestListChannels:
"""Specific tests for list Channels."""
url = reverse('channels:list_channel')
@staticmethod
def get_success_fixtures():
"""values list for cases where the endpoint
have an answer success
"""
return [
{
}
]
def make_get_request(self, client, params=None, **kwargs):
"""
Make the request to the endpoint and return the content and status
"""
headers = {
'content_type': 'application/json'
}
response = client.get(
self.url,
**headers
)
content = parse_response_content(response)
status = response.status_code
return content, status
def test_success(self, client):
"""Test to validate that a user will be edited with the parameters."""
for fixtures in self.get_success_fixtures():
response_content, response_status = self.make_get_request(
client,
params=fixtures
)
assert status.HTTP_200_OK == response_status
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,625
|
pollitosabroson/fly
|
refs/heads/main
|
/api/channels/tests/tests_score_multiple_channels.py
|
import logging
import pytest
from channels.repositories import ChannelRepository
from .factory import ChannelFactory, Contentfactory
logger = logging.getLogger(__name__)
@pytest.mark.django_db
@pytest.mark.urls('immfly.urls')
class TestSingleChannelScore:
"""Specific tests for list hotel."""
def test_calculte_single_channel(self, client):
"""Test to validate that a user will be edited with the parameters."""
total_content = 5
score = round(
total_content * Contentfactory.DEFAULT_SCORE / total_content,
2
)
test_channel = ChannelFactory.create_channel_wiht_content(
total_contents=total_content
)
channels = ChannelRepository.query_all_average().order_by('-average')
for channel in channels:
if test_channel.public_id == channel.public_id:
assert score == channel.score
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,626
|
pollitosabroson/fly
|
refs/heads/main
|
/api/channels/views.py
|
import logging
from channels.models import Channel
from channels.serializers import BaseChannelSerializers, ChannelSerializer
from contents.serializers import ContentSerializers
from django.shortcuts import get_object_or_404
from rest_framework.viewsets import ModelViewSet
logger = logging.getLogger(__name__)
class ChannelView(ModelViewSet):
"""List and create hotels."""
serializer_class = ChannelSerializer
lookup_field = 'public_id'
queryset = Channel.objects.all()
class SubChannelView(ModelViewSet):
"""List subchannel."""
serializer_class = BaseChannelSerializers
lookup_field = 'public_id'
queryset = Channel.objects.all()
def get_queryset(self):
"""we override this method to obtain the data according to the required query
"""
assert self.queryset is not None, (
"'%s' should either include a `queryset` attribute, "
"or override the `get_queryset()` method."
% self.__class__.__name__
)
filter_kwargs = {
'tree_channels__public_id': self.kwargs[self.lookup_field]
}
queryset = self.queryset
queryset = queryset.filter(**filter_kwargs)
return queryset
class ChannelContentView(ModelViewSet):
"""List subchannel."""
serializer_class = ContentSerializers
lookup_field = 'public_id'
queryset = Channel.objects.all()
def get_queryset(self):
"""we override this method to obtain the data according to the required query
"""
lookup_url_kwarg = self.lookup_url_kwarg or self.lookup_field
filter_kwargs = {self.lookup_field: self.kwargs[lookup_url_kwarg]}
assert self.queryset is not None, (
"'%s' should either include a `queryset` attribute, "
"or override the `get_queryset()` method."
% self.__class__.__name__
)
queryset = self.queryset
queryset = get_object_or_404(queryset, **filter_kwargs)
queryset = queryset.content.all()
return queryset
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,627
|
pollitosabroson/fly
|
refs/heads/main
|
/api/envs/repository.py
|
from .models import Attributes, TypeEnvs
class TypeEnvsRepository:
MODEL = TypeEnvs
@classmethod
def get_languages(cls):
return cls.MODEL.get_env(
cls.MODEL.LANGUAGE
)
class AttributesRepositoy:
MODEL = Attributes
@classmethod
def get_attributes_by_languages(cls):
return cls.MODEL.get_attributes_by_typeenvs(
TypeEnvsRepository.get_languages()
)
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,628
|
pollitosabroson/fly
|
refs/heads/main
|
/api/channels/models.py
|
import logging
from channels.managers import ChannelManager
from core.models import ImageModel, PublicIdModel, TimeStampedModel, TitleModel
from django.db import models
from envs.models import Attributes
logger = logging.getLogger(__name__)
class Channel(
TimeStampedModel, ImageModel, PublicIdModel, TitleModel
):
language = models.ForeignKey(
Attributes,
models.DO_NOTHING,
related_name='languages',
limit_choices_to={'envs_type__name': 'languages'}
)
tree_channel = models.ForeignKey(
"self",
models.DO_NOTHING,
related_name='tree_channels',
blank=True, null=True
)
objects = ChannelManager()
@property
def score(self):
"""Return score from channel.
Return:
Float: score
"""
value = self._calculate_sinlge_score(
channel_id=self.id
)
return round(value.average, 2)
@classmethod
def _calculate_sinlge_score(cls, channel_id):
"""Calculate score from one channel.
Args:
channel_id(int): Channel id
Return:
Queryset: queryset.
"""
return cls.objects.filter(
id=channel_id
).calculate_score().last()
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,629
|
pollitosabroson/fly
|
refs/heads/main
|
/api/channels/serializers/channels.py
|
import logging
from channels.models import Channel
from contents.serializers import ContentSerializers
from rest_framework import serializers
logger = logging.getLogger(__name__)
class BaseChannelSerializers(serializers.ModelSerializer):
id = serializers.CharField(
source='public_id'
)
contents = serializers.SerializerMethodField()
language = serializers.CharField(
source='language.name'
)
class Meta:
model = Channel
fields = ['title', 'id', 'score', 'image', 'contents', 'language', ]
def get_contents(self, obj):
"""Convert id for public_id.
Args:
obj(instance): Channel instance
Return:
str: Channel public_id
"""
return ContentSerializers(
obj.content.all(),
many=True
).data
class ChannelSerializer(BaseChannelSerializers):
"""List Channel Serializer"""
sub_channels = serializers.SerializerMethodField()
class Meta:
model = Channel
fields = BaseChannelSerializers.Meta.fields + ['sub_channels', ]
def get_sub_channels(self, obj):
"""Get all subchannels.
Args:
obj(instance): Channel instance
Return:
list: List all subchannels
"""
return BaseChannelSerializers(
obj.tree_channels.all(), many=True
).data
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,630
|
pollitosabroson/fly
|
refs/heads/main
|
/api/channels/urls.py
|
from django.urls import path, re_path
from . import views
app_name = 'channels'
urlpatterns = [
path(
'',
views.ChannelView.as_view(
{
'get': 'list'
}
),
name='list_channel'
),
re_path(
r'^(?P<public_id>[\w\-]+)/subchannels$',
views.SubChannelView.as_view(
{
'get': 'list'
}
),
name='list_subchanels'
),
re_path(
r'^(?P<public_id>[\w\-]+)/contents$',
views.ChannelContentView.as_view(
{
'get': 'list'
}
),
name='list_content'
),
]
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,631
|
pollitosabroson/fly
|
refs/heads/main
|
/api/envs/migrations/0001_initial.py
|
# Generated by Django 3.1.11 on 2021-05-17 10:15
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='TypeEnvs',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_date', models.DateTimeField(auto_now_add=True, null=True, verbose_name='created date')),
('last_modified', models.DateTimeField(auto_now=True, null=True, verbose_name='last modified')),
('name', models.CharField(max_length=255)),
],
options={
'verbose_name': 'env',
'verbose_name_plural': 'envs',
},
),
migrations.CreateModel(
name='Attributes',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_date', models.DateTimeField(auto_now_add=True, null=True, verbose_name='created date')),
('last_modified', models.DateTimeField(auto_now=True, null=True, verbose_name='last modified')),
('name', models.CharField(max_length=255)),
('envs_type', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, related_name='type_envs', to='envs.typeenvs')),
],
options={
'verbose_name': 'env',
'verbose_name_plural': 'envs',
},
),
]
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,632
|
pollitosabroson/fly
|
refs/heads/main
|
/api/envs/management/commands/first_command.py
|
from django.core import management
from django.core.management.base import BaseCommand
from django.utils.translation import gettext_lazy as _
class Command(BaseCommand):
"""
Command ofr initial proyect.
"""
help = _(
'Command to create the ElasticSearch index, create the envs and import'
' the country. Example: python manage.py first_command'
)
def handle(self, *args, **options):
"""Handle command."""
# Create envs
management.call_command(
'import_envs'
)
# Create Channels
management.call_command(
'import_channels'
)
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,633
|
pollitosabroson/fly
|
refs/heads/main
|
/api/envs/urls.py
|
from django.urls import path
from . import views
urlpatterns = [
path(
'',
views.EnvsView.as_view(
{
'get': 'list'
}
),
name='create_list_envs'
),
]
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,634
|
pollitosabroson/fly
|
refs/heads/main
|
/api/channels/serializers/__init__.py
|
from .channels import BaseChannelSerializers, ChannelSerializer
__all__ = ['ChannelSerializer', 'BaseChannelSerializers', ]
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,635
|
pollitosabroson/fly
|
refs/heads/main
|
/api/envs/management/commands/import_envs.py
|
import logging
from django.core.management.base import BaseCommand
from envs.models import Attributes, TypeEnvs
from tqdm import tqdm
logger = logging.getLogger(__name__)
class Command(BaseCommand):
"""Command to update all references."""
help = (
"Command to update all references."
"example to execution: python manage.py import_envs"
)
def handle(self, *args, **options):
type_envs = {
TypeEnvs.LANGUAGE: ['ES', 'EU', 'PT', 'IT'],
TypeEnvs.CONTENT: [
'gif', 'jpg', 'tif', 'png', 'bmp', 'svg',
'doc', 'pdf', 'txt', 'rtf', 'html', 'epub',
'avi', 'mpeg', 'mov', 'mkv', 'asf', 'QT', 'webM', 'flv', 'RM',
'DVD', 'xls', 'csv', 'ppt',
]
}
for key, value in tqdm(type_envs.items(), total=len(type_envs)):
type_env, _ = TypeEnvs.objects.get_or_create(
name=key
)
for attribute in value:
Attributes.objects.get_or_create(
name=attribute,
envs_type=type_env
)
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,636
|
pollitosabroson/fly
|
refs/heads/main
|
/api/channels/managers/__init__.py
|
from .manager import ChannelManager
__all__ = ['ChannelManager', ]
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,637
|
pollitosabroson/fly
|
refs/heads/main
|
/api/channels/repositories/repository.py
|
from channels.models import Channel
class ChannelRepository:
MODEL = Channel
@classmethod
def query_all_average(cls):
"""return query with average.
return:
Queryset: Queryser
"""
return cls.MODEL.objects.all().calculate_score()
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,638
|
pollitosabroson/fly
|
refs/heads/main
|
/api/core/models/__init__.py
|
from .image_models import ImageModel
from .public_id import PublicIdModel
from .time_stamped import TimeStampedModel
from .title_model import TitleModel
__all__ = [
'ImageModel', 'PublicIdModel', 'TimeStampedModel',
'TitleModel'
]
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,639
|
pollitosabroson/fly
|
refs/heads/main
|
/api/contents/serializers/__init__.py
|
from .content import ContentSerializers
__all__ = ['ContentSerializers', ]
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,640
|
pollitosabroson/fly
|
refs/heads/main
|
/api/envs/serializers/__init__.py
|
from .envs_list import EnvsSerializer
__all__ = ['EnvsSerializer', ]
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,641
|
pollitosabroson/fly
|
refs/heads/main
|
/api/channels/management/commands/create_csv.py
|
import logging
from datetime import datetime
from channels.repositories import ChannelRepository
from core.csv import ParseCsv
from django.core.management.base import BaseCommand
from tqdm import tqdm
logger = logging.getLogger(__name__)
def csv_name(str_name):
"""Add extension for file.
return:
String: File name with the extension .csv
"""
return f'{str_name}.csv'
class Command(BaseCommand):
"""Command for create csv with avergares."""
help = (
"Command for create csv with avergares."
"example to execution: python manage.py create_csv"
)
HEADERS = ['channel title', 'average rating', ]
def add_arguments(self, parser):
"""Add Arguments."""
super(Command, self).add_arguments(parser)
parser.add_argument(
"-cn",
"--csv_name",
required=False,
help=("csv name"),
type=csv_name,
default=f'average_channels_{datetime.now().strftime("%d_%b_%Y")}'
)
def handle(self, *args, **options):
list_channels = []
channels = ChannelRepository.query_all_average().order_by('-average')
list_channels.append(
self.HEADERS
)
for channel in tqdm(channels):
list_channels.append(
[
channel.title, channel.score
]
)
file_name = options.get('csv_name')
ParseCsv.create_csv(
file_name=file_name,
row_list=list_channels
)
self.stdout.write(
self.style.SUCCESS(
f'The file {file_name} was created with a total of {channels.count()} channels' # NOQA
)
)
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,642
|
pollitosabroson/fly
|
refs/heads/main
|
/api/envs/views.py
|
import logging
from envs.models import TypeEnvs
from envs.serializers import EnvsSerializer
from rest_framework.viewsets import ModelViewSet
logger = logging.getLogger(__name__)
class EnvsView(ModelViewSet):
"""List and create hotels."""
serializer_class = EnvsSerializer
lookup_field = 'public_id'
queryset = TypeEnvs.objects.all()
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,643
|
pollitosabroson/fly
|
refs/heads/main
|
/api/channels/migrations/0001_initial.py
|
# Generated by Django 3.1.11 on 2021-05-17 13:21
import core.models.image_models
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('envs', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Channel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ImageField(blank=True, null=True, upload_to=core.models.image_models.content_file_name)),
('public_id', models.CharField(db_index=True, editable=False, max_length=15, unique=True, verbose_name='public_id')),
('created_date', models.DateTimeField(auto_now_add=True, null=True, verbose_name='created date')),
('last_modified', models.DateTimeField(auto_now=True, null=True, verbose_name='last modified')),
('title', models.CharField(max_length=500, verbose_name='name')),
('language', models.ForeignKey(limit_choices_to={'envs_type_name': 'languages'}, on_delete=django.db.models.deletion.DO_NOTHING, related_name='languages', to='envs.attributes')),
('tree_channel', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='tree_channels', to='channels.channel')),
],
options={
'abstract': False,
},
),
]
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,644
|
pollitosabroson/fly
|
refs/heads/main
|
/api/channels/migrations/0002_auto_20210520_0920.py
|
# Generated by Django 3.1.11 on 2021-05-20 09:20
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('envs', '0001_initial'),
('channels', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='channel',
name='language',
field=models.ForeignKey(limit_choices_to={'language__type_name': 'languages'}, on_delete=django.db.models.deletion.DO_NOTHING, related_name='languages', to='envs.attributes'),
),
]
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,645
|
pollitosabroson/fly
|
refs/heads/main
|
/api/contents/models.py
|
from channels.models import Channel
from core.models import PublicIdModel, TimeStampedModel, TitleModel
from core.models.image_models import content_file_name
from django.db import models
from envs.models import Attributes
class Content(
TimeStampedModel, PublicIdModel, TitleModel
):
channel = models.ForeignKey(
Channel,
models.DO_NOTHING,
related_name='content',
blank=False, null=False
)
kind = models.ForeignKey(
Attributes,
models.DO_NOTHING,
related_name='kinds',
limit_choices_to={'envs_type__name': 'content'}
)
content = models.FileField(
upload_to=content_file_name,
)
src = models.JSONField()
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,646
|
pollitosabroson/fly
|
refs/heads/main
|
/api/contents/serializers/content.py
|
import logging
from contents.models import Content
from rest_framework import serializers
logger = logging.getLogger(__name__)
class ContentSerializers(serializers.ModelSerializer):
id = serializers.CharField(
source='public_id'
)
kind = serializers.CharField(
source='kind.name'
)
class Meta:
model = Content
fields = ['id', 'kind', 'content', 'src']
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,647
|
pollitosabroson/fly
|
refs/heads/main
|
/api/conftest.py
|
import logging
import pytest
from django.conf import settings
logger = logging.getLogger(__name__)
@pytest.fixture(scope='session')
def django_db_setup(django_db_blocker):
"""Establish a connection to the database being used
in the environment to be tested."""
settings.DATABASES['default'] = settings.DATABASES['default']
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,648
|
pollitosabroson/fly
|
refs/heads/main
|
/api/core/csv/parse_csv.py
|
import csv
class ParseCsv:
"""Parse CSV."""
@staticmethod
def create_csv(file_name, row_list):
"""Create file CSV.
Args:
file_name(str): Name file
row_list(list): List with the content of the CSV
"""
with open(file_name, 'w+') as file:
writer = csv.writer(
file, quoting=csv.QUOTE_NONNUMERIC,
delimiter=';'
)
writer.writerows(row_list)
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,764,649
|
pollitosabroson/fly
|
refs/heads/main
|
/api/envs/serializers/envs_list.py
|
import logging
from envs.models import TypeEnvs
from rest_framework import serializers
logger = logging.getLogger(__name__)
class EnvsSerializer(serializers.ModelSerializer):
"""List hotel Serializer"""
class Meta:
model = TypeEnvs
fields = ('name', 'id')
def to_representation(self, instance):
return instance.to_json()
|
{"/api/contents/entities/__init__.py": ["/api/contents/entities/entity.py"], "/api/core/csv/__init__.py": ["/api/core/csv/parse_csv.py"], "/api/core/schemas/__init__.py": ["/api/core/schemas/schema.py"], "/api/channels/repositories/__init__.py": ["/api/channels/repositories/repository.py"], "/api/channels/tests/tests_score_single_channel.py": ["/api/channels/tests/factory.py"], "/api/channels/tests/tests_score_multiple_channels.py": ["/api/channels/tests/factory.py"], "/api/envs/repository.py": ["/api/envs/models.py"], "/api/channels/serializers/__init__.py": ["/api/channels/serializers/channels.py"], "/api/channels/managers/__init__.py": ["/api/channels/managers/manager.py"], "/api/core/models/__init__.py": ["/api/core/models/image_models.py", "/api/core/models/title_model.py"], "/api/contents/serializers/__init__.py": ["/api/contents/serializers/content.py"], "/api/envs/serializers/__init__.py": ["/api/envs/serializers/envs_list.py"]}
|
29,806,625
|
johnmwenda/taskmanagement
|
refs/heads/develop
|
/app/tasksystem/tasks/signals.py
|
import django.dispatch
# define partial_update_done signal
task_progress_update = django.dispatch.Signal(providing_args=["progress_instance"])
|
{"/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/tasksystem/tasks/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/api/views.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/tests/test_models.py": ["/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/tasks/receivers.py": ["/app/tasksystem/tasks/signals.py"], "/app/tasksystem/tasks/api/serializers.py": ["/tasksystem/tasks/models.py", "/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/models.py": ["/tasksystem/accounts/models.py", "/tasksystem/departments/models.py", "/app/tasksystem/tasks/validators.py"], "/app/tasksystem/departments/api/serializers.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/accounts/api/serializers.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/api/views.py": ["/app/tasksystem/accounts/api/serializers.py"]}
|
29,806,626
|
johnmwenda/taskmanagement
|
refs/heads/develop
|
/app/tasksystem/tasks/api/views.py
|
from rest_framework import viewsets
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from tasksystem.tasks.models import Task
from tasksystem.tasks.api.serializers import (
TaskViewSerializer,
TaskCreatingSerializer,
)
class TaskViewSet(viewsets.ModelViewSet):
'''
API Endpoint for task objects
'''
serializer_class = TaskViewSerializer
permission_classes = (IsAuthenticated, )
def get_queryset(self):
'''
Determine queryset by checking type of user logged in (Supervisor or Junior)
'''
user = self.request.user
queryset = []
if self.request.user.is_manager:
queryset = Task.supervisor_objects.tasks(user)
elif user.is_member:
queryset = Task.junior_objects.tasks(user)
return queryset
def get_serializer_class(self):
if self.action in ['create', 'update', 'partial_update']:
return TaskCreatingSerializer
return TaskViewSerializer
def partial_update(self, request, *args, **kwargs):
instance = self.get_object()
serializer = self.get_serializer(
instance, data=request.data, partial=True)
serializer.is_valid(raise_exception=True)
serializer.save()
if getattr(instance, '_prefetched_objects_cache', None):
# If 'prefetch_related' has been applied to a queryset, we need to
# forcibly invalidate the prefetch cache on the instance.
instance._prefetched_objects_cache = {}
return Response(serializer.data)
def perform_create(self, serializer):
serializer.save(reporter=self.request.user)
|
{"/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/tasksystem/tasks/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/api/views.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/tests/test_models.py": ["/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/tasks/receivers.py": ["/app/tasksystem/tasks/signals.py"], "/app/tasksystem/tasks/api/serializers.py": ["/tasksystem/tasks/models.py", "/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/models.py": ["/tasksystem/accounts/models.py", "/tasksystem/departments/models.py", "/app/tasksystem/tasks/validators.py"], "/app/tasksystem/departments/api/serializers.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/accounts/api/serializers.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/api/views.py": ["/app/tasksystem/accounts/api/serializers.py"]}
|
29,806,627
|
johnmwenda/taskmanagement
|
refs/heads/develop
|
/app/tasksystem/accounts/tests/test_models.py
|
from django.test import TestCase
from tasksystem.departments.models import Department
from django.contrib.auth.models import User
# Create your tests here.
from tasksystem.accounts.models import User
class UserModelTest(TestCase):
def setUp(self):
self.user = User.objects.create(email='test@gmail.com', password='password',
first_name='test', last_name='last')
self.user_no_initial_password = User.objects.create(email='test1@gmail.com',
first_name='test1', last_name='last1')
def test_if_no_password_is_passed_a_default_one_is_created(self):
self.assertNotEqual(self.user_no_initial_password.password,'')
def test_string_representation_functions(self):
self.assertEqual(self.user.__str__(), 'test@gmail.com')
self.assertEqual(self.user.get_short_name(), 'test')
self.assertEqual(self.user.get_full_name(), 'test last')
|
{"/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/tasksystem/tasks/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/api/views.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/tests/test_models.py": ["/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/tasks/receivers.py": ["/app/tasksystem/tasks/signals.py"], "/app/tasksystem/tasks/api/serializers.py": ["/tasksystem/tasks/models.py", "/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/models.py": ["/tasksystem/accounts/models.py", "/tasksystem/departments/models.py", "/app/tasksystem/tasks/validators.py"], "/app/tasksystem/departments/api/serializers.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/accounts/api/serializers.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/api/views.py": ["/app/tasksystem/accounts/api/serializers.py"]}
|
29,806,628
|
johnmwenda/taskmanagement
|
refs/heads/develop
|
/app/tasksystem/tasks/emails.py
|
from django.conf import settings
from django.core.mail import EmailMessage
from django.template.loader import render_to_string
print(settings.EMAIL_HOST_USER, settings.EMAIL_HOST_PASSWORD)
def send_task_progress_email(notifications, progress):
"""Sends an email to user about a progress update."""
for notify in notifications:
context = {
"name": notify["name"],
"task": progress["task"],
"due_date": progress["due_date"],
"progress_percentage": progress["progress_percentage"],
"progress_comment": progress["progress_comment"],
"created_by": progress["created_by"],
"extra": progress.get('extra', None)
}
msg_html = render_to_string('email/progress_update.html', context)
msg = EmailMessage(
subject='Task Progress Update',
body=msg_html,
from_email=settings.EMAIL_HOST_USER,
to=[notify["email"]])
msg.content_subtype = "html" # Main content is now text/html
return msg.send()
|
{"/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/tasksystem/tasks/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/api/views.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/tests/test_models.py": ["/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/tasks/receivers.py": ["/app/tasksystem/tasks/signals.py"], "/app/tasksystem/tasks/api/serializers.py": ["/tasksystem/tasks/models.py", "/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/models.py": ["/tasksystem/accounts/models.py", "/tasksystem/departments/models.py", "/app/tasksystem/tasks/validators.py"], "/app/tasksystem/departments/api/serializers.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/accounts/api/serializers.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/api/views.py": ["/app/tasksystem/accounts/api/serializers.py"]}
|
29,806,629
|
johnmwenda/taskmanagement
|
refs/heads/develop
|
/app/tasksystem/urls.py
|
"""tasksystem URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from django.conf.urls.static import static
from django.conf import settings
from rest_framework import routers
from tasksystem.tasks.api import (
views as tasks_api_views
)
from tasksystem.accounts.api import (
views as accounts_api_views
)
# /api/v2 - django rest framework
router = routers.DefaultRouter()
# tasks app
router.register(r'tasks', tasks_api_views.TaskViewSet, base_name='tasks')
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^api/v2/login/', accounts_api_views.UserSignInView.as_view(), name='login'),
url(r'^api/v2/logout/', accounts_api_views.UserSignoutView.as_view(), name='logout'),
url(r'^api/v2/signup', accounts_api_views.UserSignUpView.as_view(), name='signup'),
url(r'^api/v2/', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls')),
]
if settings.DEBUG:
# import debug_toolbar
# from silk import urls
urlpatterns += [
url(r'^silk/', include('silk.urls', namespace='silk'))
]
|
{"/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/tasksystem/tasks/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/api/views.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/tests/test_models.py": ["/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/tasks/receivers.py": ["/app/tasksystem/tasks/signals.py"], "/app/tasksystem/tasks/api/serializers.py": ["/tasksystem/tasks/models.py", "/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/models.py": ["/tasksystem/accounts/models.py", "/tasksystem/departments/models.py", "/app/tasksystem/tasks/validators.py"], "/app/tasksystem/departments/api/serializers.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/accounts/api/serializers.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/api/views.py": ["/app/tasksystem/accounts/api/serializers.py"]}
|
29,806,630
|
johnmwenda/taskmanagement
|
refs/heads/develop
|
/app/tasksystem/tasks/validators.py
|
from django.utils import timezone
from django.core.exceptions import ValidationError
def validate_date_not_in_past(value):
if value < timezone.now():
raise ValidationError('due date cannot be in the past')
|
{"/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/tasksystem/tasks/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/api/views.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/tests/test_models.py": ["/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/tasks/receivers.py": ["/app/tasksystem/tasks/signals.py"], "/app/tasksystem/tasks/api/serializers.py": ["/tasksystem/tasks/models.py", "/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/models.py": ["/tasksystem/accounts/models.py", "/tasksystem/departments/models.py", "/app/tasksystem/tasks/validators.py"], "/app/tasksystem/departments/api/serializers.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/accounts/api/serializers.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/api/views.py": ["/app/tasksystem/accounts/api/serializers.py"]}
|
29,806,631
|
johnmwenda/taskmanagement
|
refs/heads/develop
|
/app/tasksystem/tasks/receivers.py
|
from django.dispatch import receiver
from .signals import task_progress_update
from tasksystem.tasks.jobs import delay_task_progress_notifications
@receiver(task_progress_update)
def task_progress_notifications(sender, **kwargs):
'''
This receiver will notify all interested parties of a progress updates to a task
'''
task = sender
progress_instance = kwargs['instance']
progress = dict()
progress["task"] = progress_instance.task.name
progress["due_date"] = progress_instance.task.due_date
progress["progress_percentage"] = progress_instance.progress_percentage
progress["progress_comment"] = progress_instance.progress_comment
progress["created_by"] = progress_instance.created_by.get_full_name() + ' ' + progress_instance.created_by.email
notifications = []
user_subscribers = task.user_subscribers.all()
department_subscribers = task.department_subscribers.all()
reporter = task.reporter
assignees = task.assignees.all()
created_by = progress_instance.created_by
for user in user_subscribers:
notifications.append({
"name": user.get_full_name(),
"email": user.email
})
for department in department_subscribers:
notifications.append({
"name": f'{department.name} members',
"email": department.email,
"extra": "You are receiving this email because your department has been subscribed to receive progress updates on this task"
})
for assignee in assignees:
if assignee == created_by:
# probably an assignee created a progress update, dont send them mail
continue
notifications.append({
"name": user.get_full_name(),
"email": user.email
})
if reporter != created_by:
# probably the reporter created a progress update, dont send them mail
notifications.append({
"name": reporter.get_full_name(),
"email": reporter.email
})
delay_task_progress_notifications.delay(notifications, progress)
|
{"/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/tasksystem/tasks/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/api/views.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/tests/test_models.py": ["/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/tasks/receivers.py": ["/app/tasksystem/tasks/signals.py"], "/app/tasksystem/tasks/api/serializers.py": ["/tasksystem/tasks/models.py", "/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/models.py": ["/tasksystem/accounts/models.py", "/tasksystem/departments/models.py", "/app/tasksystem/tasks/validators.py"], "/app/tasksystem/departments/api/serializers.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/accounts/api/serializers.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/api/views.py": ["/app/tasksystem/accounts/api/serializers.py"]}
|
29,806,632
|
johnmwenda/taskmanagement
|
refs/heads/develop
|
/app/tasksystem/tasks/api/serializers.py
|
from django.utils import timezone
from django.db import transaction
from rest_framework import serializers
from tasksystem.tasks.models import Task, TaskAttachment, TaskSubscription, TaskProgress
from tasksystem.departments.models import Category, Department
from tasksystem.accounts.models import User
from tasksystem.accounts.api.serializers import BasicUserSerializer
from tasksystem.tasks.signals import task_progress_update
class TaskAttachmentSerializer(serializers.ModelSerializer):
class Meta:
model = TaskAttachment
fields = '__all__'
class TaskSubscriptionSerializer(serializers.ModelSerializer):
created_by = serializers.HyperlinkedIdentityField(view_name='tasks-detail')
class Meta:
model = TaskSubscription
exclude = ('created_date', 'deleted_at', 'modified_date')
read_only_fields = ('id',)
class TaskProgressSerializer(serializers.ModelSerializer):
id = serializers.IntegerField(
allow_null=True, required=False)
created_by = BasicUserSerializer(read_only=True)
class Meta:
model = TaskProgress
fields = ('id', 'progress_comment', 'progress_percentage', 'created_by',)
@transaction.atomic
def create(self, validated_data):
task = validated_data.pop('task', None)
validated_data.pop('progress_instance', None)
instance = TaskProgress.objects.create(task=task, **validated_data)
# call custom signal to notify receivers of a partial update
task_progress_update.send(
sender=task, instance=instance)
return instance
def update(self, instance, validated_data):
instance = validated_data.pop('progress_instance', None)
validated_data.pop('created_by', None) # its already set, dont update it
for attr, value in validated_data.items():
setattr(instance, attr, value)
instance.save()
return instance
class TaskCreatingSerializer(serializers.ModelSerializer):
category = serializers.PrimaryKeyRelatedField(
queryset=Category.objects.all())
department = serializers.PrimaryKeyRelatedField(
queryset=Department.objects.all())
assignees = serializers.PrimaryKeyRelatedField(
many=True, queryset=User.objects.all(), allow_null=True, required=False)
user_subscribers = serializers.PrimaryKeyRelatedField(
many=True, queryset=User.objects.all(), allow_null=True, required=False)
department_subscribers = serializers.PrimaryKeyRelatedField(
many=True, queryset=Department.objects.all(), allow_null=True, required=False)
taskprogress_set = TaskProgressSerializer(many=True)
is_complete = serializers.BooleanField(required=False)
class Meta:
model = Task
fields = ('name', 'description', 'due_date', 'status', 'access_level', 'priority',
'category', 'department', 'user_subscribers', 'department_subscribers',
'assignees', 'taskprogress_set', 'is_complete', 'complete_time')
def validate_user_subscribers(self, value):
# validate reporter cannot subscribe to their own task
user = self.context.get('request').user
if user in value:
raise serializers.ValidationError(
[f'User with ID {user.pk} is the reporter, they dont need to be subscribed to this task'])
return value
def validate_assignees(self, value):
# validate junior cannot assign tasks to their supervisor
user = self.context.get('request').user
if(user.is_member):
for assignee in value:
if assignee.is_manager:
raise serializers.ValidationError(
[f'A junior cannot assign a task to a supervisor. ID {assignee.pk} - {assignee.email} is a supervisor'])
return value
def validate(self, attrs):
# if is_complete field is passed, pop, and set complete_time
is_complete = attrs.pop('is_complete', None)
if is_complete and is_complete is True:
attrs['complete_time'] = timezone.now()
return attrs
def save_taskprogress_set(self, task, taskprogress_set):
'''
Save taskprogress nested serializer
'''
for progress_data in taskprogress_set:
# remove passed id from data
id = progress_data.pop('id', None)
# use id to fetch TaskProgressInstance, in case its an update on TaskProgress
try:
progress_instance = TaskProgress.objects.get(
pk=id)
except TaskProgress.DoesNotExist:
progress_instance = None
# instantiate serializer using TaskProgressInstance or None
serializer = TaskProgressSerializer(
data=progress_data, instance=progress_instance)
serializer.is_valid(raise_exception=True)
# saving serializer will either create or update TaskProgress depending on whether we found
# an existing instance
serializer.save(progress_instance=progress_instance, task=task, created_by=self.context.get('request').user)
@transaction.atomic
def update(self, instance, validated_data):
# pop and save nested taskprogress serializer
taskprogress_set = validated_data.pop("taskprogress_set", [])
if taskprogress_set:
self.save_taskprogress_set(instance, taskprogress_set)
return super().update(instance, validated_data)
@transaction.atomic
def create(self, validated_data):
user = self.context.get('request').user
department_subscribers = validated_data.pop(
'department_subscribers', [])
user_subscribers = validated_data.pop('user_subscribers', [])
assignees = validated_data.pop('assignees', [])
taskprogress_set = validated_data.pop('taskprogress_set', [])
# create task
task = Task.objects.create(**validated_data)
# create user_subscribers
user_sub_list = []
for user_sub in user_subscribers:
subscription = TaskSubscription(
task=task,
user=user_sub,
created_by=user,
subscriber_type='USER'
)
user_sub_list.append(subscription)
TaskSubscription.objects.bulk_create(user_sub_list)
# create department_subscribers
department_sub_list = []
for dept_sub in department_subscribers:
subscription = TaskSubscription(
task=task,
department=dept_sub,
created_by=user,
subscriber_type='DEPARTMENT'
)
department_sub_list.append(subscription)
TaskSubscription.objects.bulk_create(department_sub_list)
# create task assignees
task.assignees = [assignee.pk for assignee in assignees]
# create initial task progress for this task
self.save_taskprogress_set(task, taskprogress_set)
return task
class TaskViewSerializer(serializers.ModelSerializer):
category = serializers.SlugRelatedField(
queryset=Category.objects.all(),
many=False, slug_field='name', read_only=False)
department = serializers.SlugRelatedField(
queryset=Department.objects.all(),
many=False, slug_field='name', read_only=False)
reporter = BasicUserSerializer(read_only=True)
assignees = BasicUserSerializer(read_only=True, many=True)
subscribers = TaskSubscriptionSerializer(many=True)
attachments = TaskAttachmentSerializer(many=True)
status = serializers.SerializerMethodField()
access_level = serializers.SerializerMethodField()
priority = serializers.SerializerMethodField()
is_complete = serializers.BooleanField(read_only=True)
taskprogress_set = TaskProgressSerializer(many=True)
class Meta:
model = Task
fields = ('id', 'name', 'reporter', 'assignees', 'subscribers', 'attachments',
'category', 'department', 'description', 'due_date', 'complete_time',
'status', 'access_level', 'priority', 'created_date',
'modified_date', 'is_complete', 'taskprogress_set')
read_only_fields = ('id', 'created_date',
'modified_date', 'complete_time')
# def create(self, validated_data):
# print(validated_data)
# return
def get_status(self, obj):
return obj.get_status_display()
def get_access_level(self, obj):
return obj.get_access_level_display()
def get_priority(self, obj):
return obj.get_priority_display()
|
{"/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/tasksystem/tasks/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/api/views.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/tests/test_models.py": ["/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/tasks/receivers.py": ["/app/tasksystem/tasks/signals.py"], "/app/tasksystem/tasks/api/serializers.py": ["/tasksystem/tasks/models.py", "/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/models.py": ["/tasksystem/accounts/models.py", "/tasksystem/departments/models.py", "/app/tasksystem/tasks/validators.py"], "/app/tasksystem/departments/api/serializers.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/accounts/api/serializers.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/api/views.py": ["/app/tasksystem/accounts/api/serializers.py"]}
|
29,806,633
|
johnmwenda/taskmanagement
|
refs/heads/develop
|
/app/tasksystem/departments/models.py
|
from django.db import models
# Create your models here.
class Department(models.Model):
name = models.CharField(max_length=100)
slug = models.SlugField(unique=True)
email = models.EmailField(max_length=255, null=False, blank=False, unique=True)
def __str__(self):
return self.name
class Category(models.Model):
name = models.CharField(max_length=100)
department = models.ForeignKey(Department, blank=True, null=True)
def __str__(self):
return f'{self.name} - {self.department}'
|
{"/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/tasksystem/tasks/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/api/views.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/tests/test_models.py": ["/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/tasks/receivers.py": ["/app/tasksystem/tasks/signals.py"], "/app/tasksystem/tasks/api/serializers.py": ["/tasksystem/tasks/models.py", "/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/models.py": ["/tasksystem/accounts/models.py", "/tasksystem/departments/models.py", "/app/tasksystem/tasks/validators.py"], "/app/tasksystem/departments/api/serializers.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/accounts/api/serializers.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/api/views.py": ["/app/tasksystem/accounts/api/serializers.py"]}
|
29,806,634
|
johnmwenda/taskmanagement
|
refs/heads/develop
|
/app/tasksystem/tasks/__init__.py
|
default_app_config = 'tasksystem.tasks.apps.TasksConfig'
|
{"/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/tasksystem/tasks/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/api/views.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/tests/test_models.py": ["/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/tasks/receivers.py": ["/app/tasksystem/tasks/signals.py"], "/app/tasksystem/tasks/api/serializers.py": ["/tasksystem/tasks/models.py", "/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/models.py": ["/tasksystem/accounts/models.py", "/tasksystem/departments/models.py", "/app/tasksystem/tasks/validators.py"], "/app/tasksystem/departments/api/serializers.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/accounts/api/serializers.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/api/views.py": ["/app/tasksystem/accounts/api/serializers.py"]}
|
29,806,635
|
johnmwenda/taskmanagement
|
refs/heads/develop
|
/app/tasksystem/accounts/models.py
|
import uuid
from django.db import models
from django.contrib.auth.models import AbstractBaseUser, PermissionsMixin
from django.db.models.signals import post_save
from django.dispatch import receiver
from rest_framework.authtoken.models import Token
from tasksystem.accounts.managers import UserManager
from tasksystem.departments.models import Department
# Create your models here.
class User(AbstractBaseUser, PermissionsMixin):
date_joined = models.DateTimeField(auto_now_add=True)
email = models.EmailField(max_length=255, null=False, blank=False, unique=True)
first_name = models.CharField(max_length=255, null=False, blank=False)
last_name = models.CharField(max_length=255, null=False, blank=False)
is_active = models.BooleanField(default=True)
is_staff = models.BooleanField(default=False)
is_manager = models.BooleanField(default=False)
is_member = models.BooleanField(default=False)
department = models.ForeignKey(Department, on_delete=models.SET_NULL, null=True, blank=True)
follows = models.ManyToManyField(
'self',
related_name='followed_by',
symmetrical=False,
blank=True,
)
objects = UserManager()
USERNAME_FIELD = 'email'
class Meta:
app_label = 'accounts'
def __str__(self):
return self.email
def get_short_name(self):
return self.first_name
def get_full_name(self):
return self.first_name + ' ' + self.last_name
def save(self, *args, **kwargs):
# if no password is provided generate a password, to avoid passwordless access
if not self.password:
self.password = str(uuid.uuid4()).replace('-', '')
super(User, self).save(*args, **kwargs)
@receiver(post_save, sender=User)
def create_auth_token(sender, instance=None, created=False, **kwargs):
if created:
Token.objects.create(user=instance)
|
{"/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/tasksystem/tasks/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/api/views.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/tests/test_models.py": ["/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/tasks/receivers.py": ["/app/tasksystem/tasks/signals.py"], "/app/tasksystem/tasks/api/serializers.py": ["/tasksystem/tasks/models.py", "/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/models.py": ["/tasksystem/accounts/models.py", "/tasksystem/departments/models.py", "/app/tasksystem/tasks/validators.py"], "/app/tasksystem/departments/api/serializers.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/accounts/api/serializers.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/api/views.py": ["/app/tasksystem/accounts/api/serializers.py"]}
|
29,806,636
|
johnmwenda/taskmanagement
|
refs/heads/develop
|
/app/tasksystem/tasks/models.py
|
from django.db import models
from django.db.models import Q
from django.contrib.auth import get_user_model
from django.core.validators import (
MaxValueValidator,
MinValueValidator
)
from django.utils import timezone
from tasksystem.utils.models import SoftDeleteModel, SoftDeleteManager
from tasksystem.accounts.models import User
from tasksystem.departments.models import Category, Department
from .validators import validate_date_not_in_past
def get_default_category(self):
return Category.objects.filter(name='DEFAULT')
class SupervisorManager(SoftDeleteManager):
'''
custom queries related to supervisor role tasks
initial queryset will be all the tasks that this user has authorization
this includes:
1. all my department tasks, whether private or public because i am the boss
2. all public tasks from other department
3. all private interdepartmental tasks involving my department whether public or private
'''
def __init__(self, *args, **kwargs):
super(SupervisorManager, self).__init__(*args, **kwargs)
def get_queryset(self):
# this queryset inherits SoftDeleteManager methods eg delete, hard_delete etc
return super(SupervisorManager, self).get_queryset()
def tasks(self, user):
qs = Task.objects.filter(
Q(department=user.department_id) |
(~Q(department=user.department_id) & Q(access_level='pub')) |
(
~Q(department=user.department_id) &
Q(assignees__department=user.department_id) &
Q(access_level='prv')
)
).select_related('department','category', 'reporter').prefetch_related(
'taskprogress_set__created_by', 'subscribers', 'assignees', 'attachments')
return qs.distinct()
class JuniorManager(SoftDeleteManager):
'''
custom queries related to junior role tasks
initial queryset will be all the tasks that this user has authorization
this includes:
1. all my tasks whether private or public (reporter)
3. all assigned tasks (private or public)
4. all subscribed tasks
5. all other public tasks
'''
def __init__(self, *args, **kwargs):
super(JuniorManager, self).__init__(*args, **kwargs)
def get_queryset(self):
# this queryset inherits SoftDeleteManager methods eg delete, hard_delete etc
return super(JuniorManager, self).get_queryset()
def tasks(self, user):
qs = Task.objects.filter(
Q(assignees=user) |
Q(reporter=user) |
Q(user_subscribers=user) |
Q(access_level='pub')
).select_related('department','category','reporter').prefetch_related(
'assignees', 'user_subscribers', 'taskprogress_set__created_by', 'subscribers', 'attachments')
return qs.distinct()
class Task(SoftDeleteModel):
'''
Most important model for this application. Allows users to create and list their tasks
'''
junior_objects = JuniorManager()
junior_objects_with_deleted = JuniorManager(with_deleted=True)
supervisor_objects = SupervisorManager()
supervisor_objects_with_deleted = SupervisorManager(with_deleted=True)
name = models.CharField(max_length=200)
description = models.TextField(
max_length=1000, help_text='Enter a brief description of the task', null=True, blank=True)
due_date = models.DateTimeField(help_text='Enter due date', validators=[
validate_date_not_in_past])
complete_time = models.DateTimeField(blank=True, null=True)
reporter = models.ForeignKey(
get_user_model(), verbose_name='Reporter', editable=True)
assignees = models.ManyToManyField(
get_user_model(),
related_name='assigned_tasks',
verbose_name='Assignees',
blank=True)
TASK_STATUS = (
('n', 'Not Started'),
('p', 'On Progress'),
('c', 'Complete'),
)
status = models.CharField(
max_length=1,
choices=TASK_STATUS,
blank=True,
default='n',
help_text='Status of Task')
ACCESS_LEVEL = (
('pub', 'public'),
('prv', 'private'),
)
access_level = models.CharField(
max_length=3, choices=ACCESS_LEVEL, default='pub', help_text='Access level')
PRIORITY = (
('l', 'Low'),
('m', 'Medium'),
('h', 'High'),
)
priority = models.CharField(max_length=1, choices=PRIORITY, default='m')
category = models.ForeignKey(Category, default=get_default_category)
department = models.ForeignKey(Department)
user_subscribers = models.ManyToManyField(
get_user_model(),
through='TaskSubscription',
through_fields=('task', 'user'),
related_name='subscribed_tasks',
verbose_name='Subsribe Users',
blank=True)
department_subscribers = models.ManyToManyField(
Department,
through='TaskSubscription',
through_fields=('task', 'department'),
related_name='subscribed_tasks',
verbose_name='Subsribe Department',
blank=True)
class Meta:
ordering = ['-created_date']
def __str__(self):
return self.name
@property
def is_complete(self):
return bool(self.complete_time and self.complete_time < timezone.now())
class TaskProgress(SoftDeleteModel):
task = models.ForeignKey(Task, verbose_name='Task Progress', null=False)
progress_comment = models.TextField(
max_length=1000, help_text='Add a progress message', null=False)
created_by = models.ForeignKey(User, null=False)
progress_percentage = models.IntegerField(verbose_name='percentage done',
help_text='What percentage of the task is done?',
default=0,
validators=[MinValueValidator(0), MaxValueValidator(100)])
class TaskAttachment(SoftDeleteModel):
'''
A task might have one or more supporting documents/images
'''
name = models.CharField(max_length=100)
task = models.ForeignKey(
Task, related_name='attachments', verbose_name='Task')
file_name = models.FileField(
upload_to='tasks/%Y/%m/%d/', max_length=255, null=True, blank=True)
class Meta:
ordering = ('-created_date',)
def delete(self, *args, **kwargs):
self.file_name.delete()
super().delete(*args, **kwargs)
class TaskSubscription(SoftDeleteModel):
'''
A user can subscribe to a task, or be added as part of a task user group by another user
'''
task = models.ForeignKey(Task, related_name='subscribers')
user = models.ForeignKey(get_user_model(), null=True, blank=True)
department = models.ForeignKey(Department, null=True, blank=True)
created_by = models.ForeignKey(get_user_model(), related_name='created_by')
TASK_STATUS = (
('DEPARTMENT', 'DEPARMENT'),
('USER', 'USER'),
)
subscriber_type = models.CharField(max_length=20, choices=TASK_STATUS)
def __str__(self):
return f'task subscribed by {self.user} or {self.department}'
|
{"/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/tasksystem/tasks/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/api/views.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/tests/test_models.py": ["/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/tasks/receivers.py": ["/app/tasksystem/tasks/signals.py"], "/app/tasksystem/tasks/api/serializers.py": ["/tasksystem/tasks/models.py", "/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/models.py": ["/tasksystem/accounts/models.py", "/tasksystem/departments/models.py", "/app/tasksystem/tasks/validators.py"], "/app/tasksystem/departments/api/serializers.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/accounts/api/serializers.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/api/views.py": ["/app/tasksystem/accounts/api/serializers.py"]}
|
29,806,637
|
johnmwenda/taskmanagement
|
refs/heads/develop
|
/app/tasksystem/environment.py
|
ENVIRONMENT = 'local'
# ENVIRONMENT = 'development'
# ENVIRONMENT = 'production'
SETTINGS_MODULE = 'tasksystem.settings.local'
if ENVIRONMENT == 'local':
SETTINGS_MODULE = 'tasksystem.settings.local'
if ENVIRONMENT == 'development':
SETTINGS_MODULE = 'tasksystem.settings.development'
if ENVIRONMENT == 'production':
SETTINGS_MODULE = 'tasksystem.settings.production'
|
{"/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/tasksystem/tasks/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/api/views.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/tests/test_models.py": ["/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/tasks/receivers.py": ["/app/tasksystem/tasks/signals.py"], "/app/tasksystem/tasks/api/serializers.py": ["/tasksystem/tasks/models.py", "/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/models.py": ["/tasksystem/accounts/models.py", "/tasksystem/departments/models.py", "/app/tasksystem/tasks/validators.py"], "/app/tasksystem/departments/api/serializers.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/accounts/api/serializers.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/api/views.py": ["/app/tasksystem/accounts/api/serializers.py"]}
|
29,806,638
|
johnmwenda/taskmanagement
|
refs/heads/develop
|
/app/tasksystem/departments/api/serializers.py
|
from rest_framework import serializers
from tasksystem.departments.models import Category, Department
class CategorySerializer(serializers.ModelSerializer):
class Meta:
model = Category
fields = '__all__'
read_only_fields = ('id')
class CategorySlimSerializer(serializers.ModelSerializer):
''' Serializer used for read only drop downs '''
class Meta:
model = Category
exclude = ('department')
read_only_fields = ('id', 'department', 'name')
class DepartmentSerializer(serializers.ModelSerializer):
class Meta:
model = Department
fields = '__all__'
read_only_fields = ('id')
class DepartmentSlimSerializer(serializers.ModelSerializer):
class Meta:
model = Department
fields = ('id', 'name')
read_only_fields = ('id', 'name')
|
{"/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/tasksystem/tasks/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/api/views.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/tests/test_models.py": ["/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/tasks/receivers.py": ["/app/tasksystem/tasks/signals.py"], "/app/tasksystem/tasks/api/serializers.py": ["/tasksystem/tasks/models.py", "/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/models.py": ["/tasksystem/accounts/models.py", "/tasksystem/departments/models.py", "/app/tasksystem/tasks/validators.py"], "/app/tasksystem/departments/api/serializers.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/accounts/api/serializers.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/api/views.py": ["/app/tasksystem/accounts/api/serializers.py"]}
|
29,806,639
|
johnmwenda/taskmanagement
|
refs/heads/develop
|
/app/tasksystem/wsgi.py
|
"""
WSGI config for tasksystem project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from tasksystem.environment import SETTINGS_MODULE
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", SETTINGS_MODULE)
application = get_wsgi_application()
|
{"/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/tasksystem/tasks/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/api/views.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/tests/test_models.py": ["/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/tasks/receivers.py": ["/app/tasksystem/tasks/signals.py"], "/app/tasksystem/tasks/api/serializers.py": ["/tasksystem/tasks/models.py", "/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/models.py": ["/tasksystem/accounts/models.py", "/tasksystem/departments/models.py", "/app/tasksystem/tasks/validators.py"], "/app/tasksystem/departments/api/serializers.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/accounts/api/serializers.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/api/views.py": ["/app/tasksystem/accounts/api/serializers.py"]}
|
29,806,640
|
johnmwenda/taskmanagement
|
refs/heads/develop
|
/app/tasksystem/tasks/apps.py
|
from django.apps import AppConfig
class TasksConfig(AppConfig):
name = 'tasksystem.tasks'
def ready(self):
import tasksystem.tasks.receivers
|
{"/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/tasksystem/tasks/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/api/views.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/tests/test_models.py": ["/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/tasks/receivers.py": ["/app/tasksystem/tasks/signals.py"], "/app/tasksystem/tasks/api/serializers.py": ["/tasksystem/tasks/models.py", "/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/models.py": ["/tasksystem/accounts/models.py", "/tasksystem/departments/models.py", "/app/tasksystem/tasks/validators.py"], "/app/tasksystem/departments/api/serializers.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/accounts/api/serializers.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/api/views.py": ["/app/tasksystem/accounts/api/serializers.py"]}
|
29,806,641
|
johnmwenda/taskmanagement
|
refs/heads/develop
|
/app/tasksystem/accounts/api/serializers.py
|
from rest_framework import serializers
from rest_framework.validators import UniqueValidator
from django.core import exceptions
from django.contrib.auth import get_user_model
from django.contrib.auth import authenticate
from django.contrib.auth import password_validation
from tasksystem.departments.api.serializers import DepartmentSlimSerializer
User = get_user_model()
from tasksystem.tasks.models import Department
class BasicUserSerializer(serializers.ModelSerializer):
"""User serializer to be used in model relations."""
url = serializers.HyperlinkedIdentityField(view_name='tasks-detail')
email = serializers.EmailField(read_only=True)
first_name = serializers.CharField(
max_length=30, allow_null=False, allow_blank=False, required=True)
last_name = serializers.CharField(
max_length=30, allow_null=False, allow_blank=False, required=True)
class Meta:
fields = ('id','url','email','first_name', 'last_name')
model = User
class UserSignInSerializer(serializers.Serializer):
email = serializers.EmailField()
password = serializers.CharField()
def validate(self, attrs):
email = attrs.get('email')
password = attrs.get('password')
if email:
try:
user = User.objects.get(email=email)
except User.DoesNotExist:
raise serializers.ValidationError("User not registered. Please sign up first")
if email and password:
user = authenticate(
request=self.context.get('request'),
email=email,
password=password
)
if not user:
raise serializers.ValidationError("Invalid email/password combination")
else:
raise serializers.ValidationError('Make sure you include "username" and "password".')
attrs['user'] = user
return attrs
class UserSerializer(serializers.ModelSerializer):
email = serializers.EmailField(
validators=[UniqueValidator(queryset=User.objects.all(),
message='A user with this email address exists')])
first_name = serializers.CharField(
max_length=30, allow_null=False, allow_blank=False, required=True)
last_name = serializers.CharField(
max_length=30, allow_null=False, allow_blank=False, required=True)
department = DepartmentSlimSerializer(read_only=True)
class Meta:
model=User
fields=('pk', 'email', 'first_name', 'last_name', 'is_active', 'is_member',
'is_manager', 'is_staff', 'is_superuser', 'department')
class UserSignupSerializer(serializers.Serializer):
email = serializers.EmailField(
validators=[UniqueValidator(queryset=User.objects.all(),
message='A user with this email address exists')] )
first_name = serializers.CharField(
max_length=30, allow_null=False, allow_blank=True, required=True)
last_name = serializers.CharField(
max_length=30, allow_null=False, allow_blank=True, required=True)
password = serializers.CharField(required=True)
department = serializers.PrimaryKeyRelatedField(queryset=Department.objects.all(),
allow_null=True, required=False)
class Meta:
fields = ('is_member', 'is_manager', 'is_staff')
def validate(self, attrs):
# valid/strong password checks
errors = {}
try:
password_validation.validate_password(password=attrs.get('password'), user=User)
except exceptions.ValidationError as e:
errors['password'] = list(e.messages)
if errors:
raise serializers.ValidationError(errors)
return attrs
def create(self, validated_data):
user = User.objects.create_user(
email=validated_data.pop('email'),
password=validated_data.pop('password'),
**validated_data)
return user
|
{"/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/tasksystem/tasks/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/api/views.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/tests/test_models.py": ["/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/tasks/receivers.py": ["/app/tasksystem/tasks/signals.py"], "/app/tasksystem/tasks/api/serializers.py": ["/tasksystem/tasks/models.py", "/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/models.py": ["/tasksystem/accounts/models.py", "/tasksystem/departments/models.py", "/app/tasksystem/tasks/validators.py"], "/app/tasksystem/departments/api/serializers.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/accounts/api/serializers.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/api/views.py": ["/app/tasksystem/accounts/api/serializers.py"]}
|
29,806,642
|
johnmwenda/taskmanagement
|
refs/heads/develop
|
/app/tasksystem/settings/development.py
|
from .base import *
DEBUG = True
# ALLOWED_HOSTS = ['0.0.0.0','127.0.0.1']
INTERNAL_IPS = [
'127.0.0.1',
]
INSTALLED_APPS.extend(['silk', 'django_extensions'])
MIDDLEWARE.insert(0, 'silk.middleware.SilkyMiddleware')
|
{"/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/tasksystem/tasks/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/api/views.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/tests/test_models.py": ["/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/tasks/receivers.py": ["/app/tasksystem/tasks/signals.py"], "/app/tasksystem/tasks/api/serializers.py": ["/tasksystem/tasks/models.py", "/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/models.py": ["/tasksystem/accounts/models.py", "/tasksystem/departments/models.py", "/app/tasksystem/tasks/validators.py"], "/app/tasksystem/departments/api/serializers.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/accounts/api/serializers.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/api/views.py": ["/app/tasksystem/accounts/api/serializers.py"]}
|
29,806,643
|
johnmwenda/taskmanagement
|
refs/heads/develop
|
/app/tasksystem/tasks/jobs.py
|
# Create your tasks here
from __future__ import absolute_import, unicode_literals
from celery import shared_task
from tasksystem.tasks.emails import send_task_progress_email
@shared_task
def delay_task_progress_notifications(notifications, progress_instance):
print("sending progress update email to subscribers")
send_task_progress_email(notifications, progress_instance)
print("completed sending progress update emails")
|
{"/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/tasksystem/tasks/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/api/views.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/tests/test_models.py": ["/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/tasks/receivers.py": ["/app/tasksystem/tasks/signals.py"], "/app/tasksystem/tasks/api/serializers.py": ["/tasksystem/tasks/models.py", "/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/models.py": ["/tasksystem/accounts/models.py", "/tasksystem/departments/models.py", "/app/tasksystem/tasks/validators.py"], "/app/tasksystem/departments/api/serializers.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/accounts/api/serializers.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/api/views.py": ["/app/tasksystem/accounts/api/serializers.py"]}
|
29,806,644
|
johnmwenda/taskmanagement
|
refs/heads/develop
|
/app/tasksystem/accounts/tests/test_views.py
|
from django.test import TestCase
from django.shortcuts import reverse
from django.contrib.auth import get_user_model
User=get_user_model()
class UserSignInViewTest(TestCase):
@classmethod
def setUpTestData(self):
self.user = User.objects.create_user(email='test@gmail.com', password='password')
def test_user_sign_in_successful(self):
data = {
'email':'test@gmail.com',
'password':'password'
}
resp = self.client.post(reverse('login'), data=data, format='json')
self.assertEqual(resp.status_code, 200)
self.assertIn('token', str(resp.data))
def test_user_sign_out_successful(self):
resp = self.client.get(reverse('logout'))
self.assertEqual(resp.status_code, 204)
class UserSignupViewTest(TestCase):
fixtures = ['tasksystem/fixtures/department.json',]
@classmethod
def setUpTestData(self):
self.data = {
'email':'test@gmail.com',
'password':'password',
'first_name':'test',
'last_name':'last',
'department_id':'1'
}
def test_user_sign_up_successful(self):
resp = self.client.post(reverse('signup'), data=self.data, format='json')
self.assertEqual(resp.status_code, 201)
def test_user_sign_up_no_password(self):
'''
If no password is pass, the custom behavior is to generate a password for the user
'''
no_pass_data = self.data.copy()
no_pass_data.pop('password')
resp = self.client.post(reverse('signup'), data=no_pass_data, format='json')
self.assertEqual(resp.status_code, 201)
def test_validation_error_weak_invalid_password(self):
'''
If password does not pass validators tests, ensure the errors are shown
'''
pass
|
{"/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/tasksystem/tasks/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/api/views.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/tests/test_models.py": ["/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/tasks/receivers.py": ["/app/tasksystem/tasks/signals.py"], "/app/tasksystem/tasks/api/serializers.py": ["/tasksystem/tasks/models.py", "/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/models.py": ["/tasksystem/accounts/models.py", "/tasksystem/departments/models.py", "/app/tasksystem/tasks/validators.py"], "/app/tasksystem/departments/api/serializers.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/accounts/api/serializers.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/api/views.py": ["/app/tasksystem/accounts/api/serializers.py"]}
|
29,806,645
|
johnmwenda/taskmanagement
|
refs/heads/develop
|
/app/tasksystem/accounts/api/views.py
|
from django.shortcuts import render
from django.contrib.auth import login as django_login, logout as django_logout
from rest_framework.views import APIView
from rest_framework.permissions import AllowAny
from rest_framework.response import Response
from rest_framework import status
from rest_framework.authentication import TokenAuthentication
from rest_framework.authtoken.models import Token
from .serializers import UserSignInSerializer, UserSerializer, UserSignupSerializer
# Create your views here.
class UserSignInView(APIView):
serializer_class = UserSignInSerializer
permission_classes = (AllowAny, )
def post(self, request):
serializer = self.serializer_class(data=request.data, context={'request':request})
serializer.is_valid(raise_exception=True)
user = serializer.validated_data['user']
django_login(request, user)
token, _ = Token.objects.get_or_create(user=user)
return Response({'token': token.key,
'user': UserSerializer(user, context={'request': request}).data},
status=status.HTTP_200_OK)
class UserSignUpView(APIView):
serializer_class = UserSignupSerializer
permission_classes = (AllowAny, )
def post(self, request):
serializer = self.serializer_class(data=request.data, context={'request':request})
if serializer.is_valid(raise_exception=True):
self.perform_create(serializer)
return Response(
data={"message": "User successfully registered."},
status=status.HTTP_201_CREATED
)
def perform_create(self, serializer):
serializer.save()
class UserSignoutView(APIView):
authentication_classes = (TokenAuthentication, )
def get(self, request):
django_logout(request)
return Response(status=status.HTTP_204_NO_CONTENT)
|
{"/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/tasksystem/tasks/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/api/views.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/tests/test_models.py": ["/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/tasks/receivers.py": ["/app/tasksystem/tasks/signals.py"], "/app/tasksystem/tasks/api/serializers.py": ["/tasksystem/tasks/models.py", "/tasksystem/departments/models.py", "/tasksystem/accounts/models.py"], "/app/tasksystem/accounts/models.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/tasks/models.py": ["/tasksystem/accounts/models.py", "/tasksystem/departments/models.py", "/app/tasksystem/tasks/validators.py"], "/app/tasksystem/departments/api/serializers.py": ["/tasksystem/departments/models.py"], "/app/tasksystem/accounts/api/serializers.py": ["/tasksystem/tasks/models.py"], "/app/tasksystem/accounts/api/views.py": ["/app/tasksystem/accounts/api/serializers.py"]}
|
29,856,194
|
JakeSo/Application-Searching
|
refs/heads/main
|
/local.py
|
import random
import nqueens
def simulatedAnnealing(initBoard, decayRate, T_Threshold):
current = initBoard
T = 100
print("Initial board:")
h = nqueens.numAttackingQueens(current)
print("h-value: " + str(h))
initBoard.printBoard()
print()
while True:
T = f(T, decayRate)
if T <= T_Threshold:
break
sucStates = nqueens.getSuccessorStates(current)
b = random.randint(0, len(sucStates) - 1)
if (nqueens.numAttackingQueens(sucStates[b]) < h):
current = sucStates[b]
h = nqueens.numAttackingQueens(current)
if h == 0:
break
print("Final board:")
print("h-value: " + str(h))
current.printBoard()
return h
def f(T, decayRate):
return T * decayRate
def runTests():
print('#' * 20)
print("Simulated Annealing:")
print("Decay rate: 0.5; Threshold: 1e-08")
print('#' * 20)
sizes = [4, 8, 16]
for i in sizes:
print('-' * 20)
print("Board size: " + str(i))
print('-'* 20)
b = nqueens.Board(i)
h = 0
for x in range(10):
print("Run #" + str(x + 1))
b.rand()
h += simulatedAnnealing(b, 0.5, 1e-08)
print()
print("Average h-value: " + str(h/10) + "\n")
|
{"/main.py": ["/gridops.py", "/uninformed.py", "/informed.py", "/local.py"], "/uninformed.py": ["/gridops.py"], "/informed.py": ["/gridops.py"]}
|
29,856,195
|
JakeSo/Application-Searching
|
refs/heads/main
|
/main.py
|
import gridops
import informed
import local
import nqueens
import uninformed
grid = gridops.readGrid('grid.txt')
start, goal = gridops.genStartGoal(grid)
uninformed.uninformedsearch(start, goal, grid)
path = [x.location for x in uninformed.bfspath]
informed.A_star(start, goal, grid)
informed.greedy(start, goal, grid)
local.runTests()
|
{"/main.py": ["/gridops.py", "/uninformed.py", "/informed.py", "/local.py"], "/uninformed.py": ["/gridops.py"], "/informed.py": ["/gridops.py"]}
|
29,869,714
|
lsliluu/android_package_tools
|
refs/heads/master
|
/Utils/PackageUtils.py
|
def execute_doc_shell():
pass
def execute_shell():
pass
def send_email():
pass
|
{"/Utils/PackageUtils.py": ["/Utils/Email.py"], "/app.py": ["/Utils/PackageUtils.py", "/Utils/Email.py", "/Utils/SystemUtils.py"]}
|
29,869,715
|
lsliluu/android_package_tools
|
refs/heads/master
|
/Utils/SystemUtils.py
|
def get_system_operating():
pass
def auto_email_apk():
print("abc")
import smtplib
from email.mime.text import MIMEText
from email.header import Header
# 第三方 SMTP 服务
mail_host = "smtp.139.com" # 设置服务器
mail_user = "18959137976@139.com" # 用户名
mail_pass = "ccdb794a0a6899e9b700" # 口令
sender = 'robot_hzf@139.com'
receivers = ["hezf@newland.com.cn", "dev.hezf@139.com"] # 接收邮件,可设置为你的QQ邮箱或者其他邮箱
message = MIMEText('这是一封自动发送的测试软件,请勿回复。', 'plain', 'utf-8')
message['From'] = Header("菜鸟教程", 'utf-8')
message['To'] = Header("测试", 'utf-8')
subject = 'Python SMTP 邮件测试'
message['Subject'] = Header(subject, 'utf-8')
try:
smtpObj = smtplib.SMTP()
smtpObj.connect(mail_host, 25) # 25 为 SMTP 端口号
smtpObj.login(mail_user, mail_pass)
smtpObj.sendmail(sender, receivers, message.as_string())
print
"邮件发送成功"
except smtplib.SMTPException:
print
"Error: 无法发送邮件"
pass
|
{"/Utils/PackageUtils.py": ["/Utils/Email.py"], "/app.py": ["/Utils/PackageUtils.py", "/Utils/Email.py", "/Utils/SystemUtils.py"]}
|
29,869,716
|
lsliluu/android_package_tools
|
refs/heads/master
|
/app.py
|
from flask import Flask
from Utils.SystemUtils import auto_email_apk
app = Flask(__name__)
@app.route('/say_hi')
def hello_world():
return 'hello'
@app.route('/package/feeler')
def package_feeler():
# 发起编译,执行编译脚本
# 设置编译状态
# 返回结果
pass
@app.route('/email/send')
def send_email():
auto_email_apk()
return "end"
pass
if __name__ == '__main__':
app.run()
|
{"/Utils/PackageUtils.py": ["/Utils/Email.py"], "/app.py": ["/Utils/PackageUtils.py", "/Utils/Email.py", "/Utils/SystemUtils.py"]}
|
29,881,468
|
mozjay0619/HMF
|
refs/heads/master
|
/HMF/__init__.py
|
__version__ = "0.0.b45"
from .core import BaseHMF
from .parallel import WriterProcessManager
from .hmf import HMF
from .hmf import open_file
from .hmf import is_hmf_directory
from .utils import write_memmap
from .utils import read_memmap
__all__ = [
"BaseHMF",
"WriterProcessManager",
"HMF",
"open_file",
"is_hmf_directory"
]
|
{"/HMF/hmf.py": ["/HMF/core.py", "/HMF/utils.py", "/HMF/parallel.py", "/HMF/__init__.py"], "/HMF/tests/test_hmf.py": ["/HMF/__init__.py"], "/HMF/__init__.py": ["/HMF/core.py", "/HMF/parallel.py", "/HMF/hmf.py", "/HMF/utils.py"], "/HMF/parallel.py": ["/HMF/utils.py", "/HMF/__init__.py"], "/HMF/core.py": ["/HMF/utils.py"], "/setup.py": ["/HMF/__init__.py"]}
|
29,881,469
|
mozjay0619/HMF
|
refs/heads/master
|
/HMF/utils.py
|
import numpy as np
import pickle
import functools
import time
import os
import select
def save_obj(obj, filepath):
with open(filepath, 'wb') as f:
pickle.dump(obj, f, pickle.HIGHEST_PROTOCOL)
def load_obj(filepath):
# with open(filepath, 'rb') as f:
# return pickle.load(f)
f = os.open(filepath, os.O_RDONLY|os.O_NONBLOCK)
readable = os.fdopen(f, "rb", 0)
if select.select([readable], [], [], 10)[0][0] == readable:
return pickle.load(readable)
def stride_util(array, window_size, skip_size, dtype):
return(np.lib.stride_tricks.as_strided(
array,
shape=[np.ceil(len(array)-1).astype(dtype), window_size],
strides=[array.strides[0]*skip_size, array.strides[0]]))
def border_idx_util(array):
border_idx = np.where(np.diff(array) != 0)[0] + 1
border_idx = border_idx.astype(int)
border_idx = np.insert(border_idx, 0, 0, axis=0)
border_idx = np.append(border_idx, [len(array)], axis=0)
return border_idx
def write_memmap(filepath, dtype, shape, array):
writable_memmap = np.memmap(filepath, dtype=dtype, mode="w+", shape=shape)
writable_memmap[:] = array[:]
del writable_memmap
def read_memmap(filepath, dtype, shape, idx=None):
readonly_memmap = np.memmap(filepath, dtype=dtype, mode="r", shape=shape)
if idx is None:
array = readonly_memmap[:]
else:
if is_nested_list(idx):
array = readonly_memmap[tuple(idx)]
else:
array = readonly_memmap[idx]
del readonly_memmap
return array
def is_nested_list(l):
if not isinstance(l, list):
return False
else:
return any(isinstance(i, list) for i in l)
def printProgressBar (iteration, total, prefix = '', suffix = '', decimals = 1, length = 100, fill = '█', printEnd = "\r"):
"""
Call in a loop to create terminal progress bar
@params:
iteration - Required : current iteration (Int)
total - Required : total iterations (Int)
prefix - Optional : prefix string (Str)
suffix - Optional : suffix string (Str)
decimals - Optional : positive number of decimals in percent complete (Int)
length - Optional : character length of bar (Int)
fill - Optional : bar fill character (Str)
printEnd - Optional : end character (e.g. "\r", "\r\n") (Str)
"""
percent = ("{0:." + str(decimals) + "f}").format(100 * (iteration / float(total)))
filledLength = int(length * iteration // total)
bar = fill * filledLength + '-' * (length - filledLength)
print(f'\r{prefix} |{bar}| {percent}% {suffix}', end = printEnd)
# Print New Line on Complete
if iteration == total:
print(f'\r', end = printEnd)
|
{"/HMF/hmf.py": ["/HMF/core.py", "/HMF/utils.py", "/HMF/parallel.py", "/HMF/__init__.py"], "/HMF/tests/test_hmf.py": ["/HMF/__init__.py"], "/HMF/__init__.py": ["/HMF/core.py", "/HMF/parallel.py", "/HMF/hmf.py", "/HMF/utils.py"], "/HMF/parallel.py": ["/HMF/utils.py", "/HMF/__init__.py"], "/HMF/core.py": ["/HMF/utils.py"], "/setup.py": ["/HMF/__init__.py"]}
|
29,881,470
|
mozjay0619/HMF
|
refs/heads/master
|
/HMF/hmf.py
|
from .core import BaseHMF
from .utils import stride_util
from .utils import border_idx_util
from .utils import save_obj
from .utils import load_obj
from .parallel import WriterProcessManager
import numpy as np
import pandas as pd
import os
import shutil
import psutil
from multiprocessing import sharedctypes
from collections import defaultdict
import copy
from . import constants
import warnings
def format_Warning(message, category, filename, lineno, line=''):
return str(filename) + ':' + str(lineno) + ': ' + category.__name__ + ': ' +str(message) + '\n'
class WRITE_TASK_FAILED(UserWarning):
pass
class READ_TASK_FAILED(UserWarning):
pass
def open_file(root_path, mode='w+', verbose=False):
"""
Available modes:
w: write only. Truncate if exists.
w+: read and write. Truncate if exists.
r: read only.
r+: read and write. Do not truncate if exists.
"""
if(mode=='w+'):
# check if path exists
# check if it is conforming
#
if(os.path.exists(root_path)):
shutil.rmtree(root_path)
os.mkdir(root_path)
from_existing = False
memmap_map = None
elif(mode=='r+' or mode=='r'):
# if(os.path.exists(root_path)):
is_hmf_directory_flag, memmap_map = is_hmf_directory(root_path)
# if(is_hmf_directory_flag):
# from_existing = True
# else:
# from_existing = False
# # warn that it is not
# # memmap not there
# # try to recover...
# pass
hmf = HMF(root_path, memmap_map, verbose)
return hmf
def is_hmf_directory(root_path):
"""Assuming root_path exists:
1. check memmap_map exists
2. check files are all present
Needs much improvements...
"""
# file_list = os.listdir(root_path)
# if MEMMAP_MAP_FILENAME not in file_list:
# print('memmap_map not present')
# return(False, None)
if not fail_safe_check_obj(root_path, constants.MEMMAP_MAP_FILENAME):
print('memmap_map not present')
return(False, None)
memmap_map = fail_safe_load_obj(os.path.join(root_path, constants.MEMMAP_MAP_FILENAME))
# array_file_list = get_all_array_dirpaths(memmap_map)
# if(not set(array_file_list) < set(file_list)):
# return(False, None)
return(True, memmap_map)
def get_all_array_dirpaths(m):
visited_dirpath = []
array_dirpaths = []
_depth_first_search(m, 'HMF_rootNodeKey', visited_dirpath, array_dirpaths)
return array_dirpaths
def _depth_first_search(m, k, visited_dirpaths, array_dirpaths):
"""
DFS(G, k):
mark k as visited
for node l pointed to by node k:
if node l is not visited:
DFS(G, l)
* we are marking visits by dirpath not by node name
* we need a separate list recording array dirpaths
* when we reach array node, we must stop the search
because that node does not have [ nodes ] key.
(or could make an empty nodes dict...)
Parameters
----------
m : dict
where we can query by k (the node name level map)
also, the map needs to have been positioned by level
"""
if(k == 'HMF_rootNodeKey'):
cur_dirpath = m['dirpath']
visited_dirpaths.append(cur_dirpath)
node_pos = m['nodes']
else:
cur_dirpath = m[k]['dirpath']
visited_dirpaths.append(cur_dirpath)
if(m[k]['node_type']=='array'):
array_dirpaths.append(cur_dirpath)
return
node_pos = m[k]['nodes']
for l, v in node_pos.items():
cur_dirpath = node_pos[l]['dirpath']
if(not cur_dirpath in visited_dirpaths):
_depth_first_search(node_pos, l, visited_dirpaths, array_dirpaths)
class HMF(BaseHMF):
def __init__(self, root_dirpath, memmap_map, verbose=True):
super(HMF, self).__init__(root_dirpath, memmap_map, verbose)
self.root_dirpath = root_dirpath
# 0.0.b31 update
self.arrays = defaultdict(list)
# self.arrays = list()
self.str_arrays = list()
self.node_attrs = list()
# 0.0.b31 update
self.pdfs = dict()
self.num_pdfs = 0
self.pdf_names = list()
self.current_dataframe_name = None
def from_pandas(self, pdf, groupby=None, orderby=None, ascending=True, group_name=None):
"""
need to numerify groupby col!"""
# 0.0.b31 update
dataframe_name = group_name
if dataframe_name is None:
dataframe_name = "{}_{}".format(constants.DATAFRAME_NAME, self.num_pdfs)
if not dataframe_name in self.pdf_names:
self.pdf_names.append(dataframe_name)
self.num_pdfs += 1
self.pdfs[dataframe_name] = pdf
self.current_dataframe_name = dataframe_name
if groupby and orderby:
self.pdfs[dataframe_name][constants.GROUPBY_ENCODER] = self.pdfs[dataframe_name][groupby].astype('category')
self.pdfs[dataframe_name][constants.GROUPBY_ENCODER] = self.pdfs[dataframe_name][constants.GROUPBY_ENCODER].cat.codes
self.pdfs[dataframe_name] = self.pdfs[dataframe_name].sort_values(by=[groupby, orderby]).reset_index(drop=True)
group_array = self.pdfs[dataframe_name][constants.GROUPBY_ENCODER].values
tmp = pd.DataFrame(self.pdfs[dataframe_name][groupby].unique(), columns=[groupby])
tmp = tmp.sort_values(by=groupby).reset_index(drop=True)
group_names = tmp[groupby].tolist()
self.memmap_map['grouped'][dataframe_name] = True
elif orderby:
self.pdfs[dataframe_name] = self.pdfs[dataframe_name].sort_values(by=[orderby]).reset_index(drop=True)
group_array = np.zeros(len(self.pdfs[dataframe_name]))
group_names = [constants.HMF_GROUPBY_DUMMY_NAME]
self.memmap_map['grouped'][dataframe_name] = False
elif groupby:
self.pdfs[dataframe_name][constants.GROUPBY_ENCODER] = self.pdfs[dataframe_name][groupby].astype('category')
self.pdfs[dataframe_name][constants.GROUPBY_ENCODER] = self.pdfs[dataframe_name][constants.GROUPBY_ENCODER].cat.codes
self.pdfs[dataframe_name] = self.pdfs[dataframe_name].sort_values(by=[groupby]).reset_index(drop=True)
group_array = self.pdfs[dataframe_name][constants.GROUPBY_ENCODER].values
tmp = pd.DataFrame(self.pdfs[dataframe_name][groupby].unique(), columns=[groupby])
tmp = tmp.sort_values(by=groupby).reset_index(drop=True)
group_names = tmp[groupby].tolist()
self.memmap_map['grouped'][dataframe_name] = True
else:
group_array = np.zeros(len(self.pdfs[dataframe_name]))
group_names = [constants.HMF_GROUPBY_DUMMY_NAME]
self.memmap_map['grouped'][dataframe_name] = False
border_idx = border_idx_util(group_array)
group_idx = stride_util(border_idx, 2, 1, np.int32)
self.memmap_map['group_sizes'][dataframe_name] = np.diff(border_idx)
self.memmap_map['group_names'][dataframe_name] = group_names
self.memmap_map['group_items'][dataframe_name] = list(zip(group_names, group_idx))
primary_default_key = "{}_{}".format(constants.DATAFRAME_NAME, 0)
if self.num_pdfs>1 or self.current_dataframe_name!=primary_default_key:
self.memmap_map['multi_pdfs'] = True
else:
self.memmap_map['multi_pdfs'] = False
def register_array(self, array_filename, columns, encoder=None, decoder=None):
"""Update memmap_map dictionary - which assumes all saves will be successful.
We need to validity check on arrays
Also put arrays into sharedctypes
"""
if(encoder):
data_array = encoder(self.pdfs[self.current_dataframe_name][columns])
else:
data_array = self.pdfs[self.current_dataframe_name][columns].values
self.arrays[self.current_dataframe_name].append((array_filename, data_array))
def has_groups(self):
if not self.memmap_map['multi_pdfs']:
primary_default_key = "{}_{}".format(constants.DATAFRAME_NAME, 0)
return self.memmap_map['grouped'][primary_default_key]
else:
return self.memmap_map['grouped']
def get_group_sizes(self):
if not self.memmap_map['multi_pdfs']:
primary_default_key = "{}_{}".format(constants.DATAFRAME_NAME, 0)
return self.memmap_map['group_sizes'][primary_default_key]
else:
return self.memmap_map['group_sizes']
def get_group_names(self):
if not self.memmap_map['multi_pdfs']:
primary_default_key = "{}_{}".format(constants.DATAFRAME_NAME, 0)
return self.memmap_map['group_names'][primary_default_key]
else:
return self.memmap_map['group_names']
def get_group_items(self):
if not self.memmap_map['multi_pdfs']:
primary_default_key = "{}_{}".format(constants.DATAFRAME_NAME, 0)
return {k: np.diff(v)[0] for k, v in self.memmap_map['group_items'][primary_default_key]}
else:
return {k:{k_: np.diff(v_)[0] for k_, v_ in v} for k, v in self.memmap_map['group_items'].items()}
def get_sorted_group_items(self):
if not self.memmap_map['multi_pdfs']:
primary_default_key = "{}_{}".format(constants.DATAFRAME_NAME, 0)
return sorted(zip(self.memmap_map['group_names'][primary_default_key], self.memmap_map['group_sizes'][primary_default_key]),
key=lambda x: x[1],
reverse=True)
else:
return {k:sorted(zip(self.memmap_map['group_names'][k], self.memmap_map['group_sizes'][k]),
key=lambda x: x[1],
reverse=True) for k in self.memmap_map['group_names'].keys()}
def get_sorted_group_names(self):
sorted_group_items = self.get_sorted_group_items()
if not self.memmap_map['multi_pdfs']:
return [elem[0] for elem in sorted_group_items]
else:
return {k: [elem[0] for elem in sorted_group_items[k]] for k in sorted_group_items.keys()}
def get_sorted_group_sizes(self):
sorted_group_items = self.get_sorted_group_items()
if not self.memmap_map['multi_pdfs']:
return [elem[1] for elem in sorted_group_items]
else:
return {k: [elem[1] for elem in sorted_group_items[k]] for k in sorted_group_items.keys()}
def register_node_attr(self, attr_dirpath, key, value):
self.node_attrs.append((attr_dirpath, key, value))
def register_dataframe(self, dataframe_filename, columns):
if not isinstance(columns, list):
columns = [columns]
self.register_array(dataframe_filename, columns)
self.memmap_map['dataframe_colnames'][self.current_dataframe_name][dataframe_filename] = columns
def get_dataframe(self, dataframe_filepath, idx=None):
array = self.get_array(dataframe_filepath, idx)
if self.memmap_map['multi_pdfs']:
dataframe_name = dataframe_filepath.split('/')[1]
else:
primary_default_key = "{}_{}".format(constants.DATAFRAME_NAME, 0)
dataframe_name = primary_default_key
dataframe_filename = dataframe_filepath.split('/')[-1]
columns = self.memmap_map['dataframe_colnames'][dataframe_name][dataframe_filename]
dataframe = pd.DataFrame(array, columns=columns)
return dataframe
def set_dataframe(self, dataframe_filepath, pdf, columns):
# print(self.memmap_map['dataframe_colnames'])
filepath_components = dataframe_filepath.split('/')
if len(filepath_components) > 2:
group_name = filepath_components[1]
# if longer, we need to take care of that. Not now.
dataframe_filename = filepath_components[-1]
if self.memmap_map['multi_pdfs']:
if len(filepath_components)==3:
self.memmap_map['dataframe_colnames'][group_name][dataframe_filename] = columns
elif len(filepath_components)==2:
self.memmap_map['dataframe_colnames'][dataframe_filename] = columns
else:
self.memmap_map['dataframe_colnames'][dataframe_filename] = columns
self.set_array(dataframe_filepath, pdf[columns].values)
def _write_registered_node_attrs(self):
"""
The logic used in this method largely mirrors those found in parallel.py.
Main difference:
1. no need to parallelize this
2. we can rely on baseHMF for this since we have access to the HMF object
"""
tasks = list(itertools.product(
range(len(self.node_attrs)),
range(len(self.groups))))
for task in tasks:
attr_dirpath_standalone = self.node_attrs[task[0]][0]
key_standalone = self.node_attrs[task[0]][1]
value_standalone = self.node_attrs[task[0]][2]
group_name = self.groups[task[1]][0]
print(group_name, attr_dirpath_standalone, key_standalone, value_standalone)
def close(self, zip_file=False, num_subprocs=None, show_progress=True):
"""
How we process the str_arrays should depend on how many arrays we have VS how many
subprocs we can open
"""
# Record remaining information on memmap_map
# is multi pdf recorded
# primary_default_key = "{}_{}".format(constants.DATAFRAME_NAME, 0)
# if self.num_pdfs>1 or self.current_dataframe_name!=primary_default_key:
# self.memmap_map['multi_pdfs'] = True
# else:
# self.memmap_map['multi_pdfs'] = False
self.group_items = self.memmap_map['group_items']
self.group_names = self.memmap_map['group_names']
memmap_map_copy = copy.copy(self.memmap_map)
if(len(self.arrays) > 0):
if(num_subprocs is None):
num_subprocs = psutil.cpu_count(logical=False) - 1
if(self.verbose):
print('Saving registered arrays using multiprocessing [ {} ] subprocs\n'.format(num_subprocs))
WPM = WriterProcessManager(self, num_subprocs=num_subprocs, verbose=self.verbose, show_progress=show_progress)
WPM.start()
self.failed_tasks = WPM.failed_tasks
if len(self.failed_tasks) > 0:
if len(WPM.failure_reasons):
warnings.warn(str(WPM.failure_reasons), WRITE_TASK_FAILED)
if len(WPM.shared_read_error_dict):
warnings.warn(str(WPM.shared_read_error_dict), READ_TASK_FAILED)
memmap_map_dirpath = os.path.join(self.root_dirpath, constants.MEMMAP_MAP_FILENAME)
fail_safe_save_obj(memmap_map_copy, memmap_map_dirpath)
self.del_pdf()
self.del_arrays()
def del_pdf(self):
try:
del self.pdfs
except Exception as e:
if not (type(e)==AttributeError):
raise Exception('failed to delete pdf')
def del_arrays(self):
try:
del self.arrays
except Exception as e:
if not (type(e)==AttributeError):
raise Exception('failed to delete arrays')
# PR 0.0.b16
def fail_safe_save_obj(obj, dirpath):
for i in range(constants.NUM_FILE_COPY):
try:
copy_dirpath = dirpath + str(i)
save_obj(obj, copy_dirpath)
except:
continue
def fail_safe_load_obj(dirpath):
for i in range(constants.NUM_FILE_COPY):
try:
copy_dirpath = dirpath + str(i)
return load_obj(copy_dirpath)
except:
continue
raise IOError("Damn it, failed to read file again")
def fail_safe_check_obj(root_path, filename):
file_list = os.listdir(root_path)
for i in range(constants.NUM_FILE_COPY):
copy_filename = filename + str(i)
if copy_filename in file_list:
return True
|
{"/HMF/hmf.py": ["/HMF/core.py", "/HMF/utils.py", "/HMF/parallel.py", "/HMF/__init__.py"], "/HMF/tests/test_hmf.py": ["/HMF/__init__.py"], "/HMF/__init__.py": ["/HMF/core.py", "/HMF/parallel.py", "/HMF/hmf.py", "/HMF/utils.py"], "/HMF/parallel.py": ["/HMF/utils.py", "/HMF/__init__.py"], "/HMF/core.py": ["/HMF/utils.py"], "/setup.py": ["/HMF/__init__.py"]}
|
29,881,471
|
mozjay0619/HMF
|
refs/heads/master
|
/HMF/parallel.py
|
from .utils import stride_util
from .utils import border_idx_util
from .utils import write_memmap
from .utils import read_memmap
from .utils import printProgressBar
from multiprocessing import Process, Manager, sharedctypes
import itertools
import numpy as np
from collections import defaultdict
import time
from . import constants
MAX_WRITE_ATTEMPT = 3
READ_WAIT_INTERVALS = [0.1, 1.0, 3.0]
MAX_READ_ATTEMPT = len(READ_WAIT_INTERVALS)
class WriterProcess(Process):
def __init__(self, shared_write_result_dict, shared_write_error_dict, task):
super(WriterProcess, self).__init__()
self.shared_write_result_dict = shared_write_result_dict
self.shared_write_error_dict = shared_write_error_dict
self.task_key = task
key, array_idx, group_idx = task
array_filename = SHARED_HMF_OBJ.arrays[key][array_idx][0]
group_name = SHARED_HMF_OBJ.group_items[key][group_idx][0]
start_idx, end_idx = SHARED_HMF_OBJ.group_items[key][group_idx][1]
self.array = SHARED_HMF_OBJ.arrays[key][array_idx][1][start_idx:end_idx]
# updated 0.0.b31
array_filepath = "{}/".format(SHARED_HMF_OBJ.root_dirpath)
# if there is only one key and its value is constants.DATAFRAME_NAME:
# array_filepath should not contain the key
keys = list(SHARED_HMF_OBJ.group_items.keys())
primary_default_key = "{}_{}".format(constants.DATAFRAME_NAME, 0)
if(len(keys)==1 and keys[0]==primary_default_key):
pass
else:
array_filepath += "{}__".format(str(key))
# if the key's group is only one and its value is constants.HMF_GROUPBY_DUMMY_NAME:
# array_filepath should not contain the group name
if(len(SHARED_HMF_OBJ.group_items[key])==1 and SHARED_HMF_OBJ.group_names[key][0]==constants.HMF_GROUPBY_DUMMY_NAME):
array_filepath += array_filename
else:
array_filename = SHARED_HMF_OBJ._assemble_dirpath(group_name, array_filename)
array_filepath += array_filename
self.array_filepath = array_filepath
# Note:
# 1. This is the actual file path
# 2. "ROOTPATH/PATH__NAME"
def run(self):
"""
We need a way to surface the failure reasons.
In case of actual failed task.
"""
# self.write_array(self.array_filepath, self.array)
try:
# start_time = time.time()
self.write_array(self.array_filepath, self.array)
# print(time.time() - start_time)
self.shared_write_result_dict[self.task_key] = 'success'
except Exception as e:
self.shared_write_error_dict[self.task_key] = str(e)
self.shared_write_result_dict[self.task_key] = 'failure'
def write_array(self, array_filepath, array):
dtype = str(array.dtype)
shape = array.shape
write_memmap(array_filepath, dtype, shape, array)
class WriterProcessManager():
def __init__(self, hmf_obj, num_subprocs=4, verbose=True, show_progress=True):
# start_time = time.time()
global SHARED_HMF_OBJ
SHARED_HMF_OBJ = hmf_obj
self.hmf_obj = hmf_obj
# update 0.0.b31
self.tasks = list()
data_keys = list(hmf_obj.arrays.keys())
for data_key in data_keys:
arrays = hmf_obj.arrays[data_key]
group_items = hmf_obj.group_items[data_key]
tasks = list(itertools.product(
range(len(arrays)),
range(len(group_items))))
tasks = [(data_key, *task) for task in tasks]
self.tasks += tasks
self.pending_tasks = []
self.failed_tasks = []
self.successful_tasks = []
self.write_attempt_dict = defaultdict(int)
self.read_attempt_dict = defaultdict(int)
self.successful_write_tasks = []
self.failed_write_tasks = []
self.successful_read_tasks = []
self.failed_read_tasks = []
manager = Manager()
self.shared_write_result_dict = manager.dict()
self.shared_write_error_dict = manager.dict()
self.shared_read_error_dict = dict()
self.subprocs = []
self.num_subprocs = num_subprocs
self.verbose = verbose
self.max_len = len(self.tasks)
self.show_progress = show_progress
def read_task(self, task):
# updated 0.0.b31
key, array_idx, group_idx = task
array_filename = self.hmf_obj.arrays[key][array_idx][0]
shared_whole_array = self.hmf_obj.arrays[key][array_idx][1]
group_name = self.hmf_obj.group_items[key][group_idx][0]
start_idx, end_idx = self.hmf_obj.group_items[key][group_idx][1]
array_filepath = ''
# if there is only one key and its value is constants.DATAFRAME_NAME:
# array_filepath should not contain the key
keys = list(self.hmf_obj.group_items.keys())
primary_default_key = "{}_{}".format(constants.DATAFRAME_NAME, 0)
if(len(keys)==1 and keys[0]==primary_default_key):
pass
else:
array_filepath = '/'.join((array_filepath, str(key)))
# if the key's group is only one and its value is constants.HMF_GROUPBY_DUMMY_NAME:
# array_filepath should not contain the group name
if(len(self.hmf_obj.group_items[key])==1 and self.hmf_obj.group_names[key][0]==constants.HMF_GROUPBY_DUMMY_NAME):
array_filepath = '/'.join((array_filepath, str(array_filename)))
else:
# array_filename = self.hmf_obj._assemble_dirpath(group_name, array_filename)
array_filepath = '/'.join((array_filepath, str(group_name), str(array_filename)))
# Note:
# 1. This is the fake file path
# 2. But it is constructed to mirror the actual filesystem tree structure with / separator
# 3. "/PATH/NAME"
try:
self.hmf_obj.get_array(array_filepath)
return('success')
except Exception as e:
self.shared_read_error_dict[task] = str(e)
return('failure')
def write_task(self, task):
"""Update memmap_map here.
This logic only supports single level group...
"""
subproc = WriterProcess(self.shared_write_result_dict, self.shared_write_error_dict,
task)
subproc.daemon = True
subproc.start()
return subproc
def update_memmap_map(self, task):
key, array_idx, group_idx = task
array_filename = self.hmf_obj.arrays[key][array_idx][0]
shared_whole_array = self.hmf_obj.arrays[key][array_idx][1]
group_name = self.hmf_obj.group_items[key][group_idx][0]
start_idx, end_idx = self.hmf_obj.group_items[key][group_idx][1]
# array_filename = self.hmf_obj._assemble_dirpath(group_name, array_filename)
array = np.ctypeslib.as_array(shared_whole_array)[start_idx:end_idx]
# updated 0.0.b31
array_filepath = ''
# if there is only one key and its value is constants.DATAFRAME_NAME:
# array_filepath should not contain the key
keys = list(self.hmf_obj.group_items.keys())
primary_default_key = "{}_{}".format(constants.DATAFRAME_NAME, 0)
if(len(keys)==1 and keys[0]==primary_default_key):
pass
else:
array_filepath = '/'.join((array_filepath, str(key)))
# if the key's group is only one and its value is constants.HMF_GROUPBY_DUMMY_NAME:
# array_filepath should not contain the group name
if(len(self.hmf_obj.group_items[key])==1 and self.hmf_obj.group_names[key][0]==constants.HMF_GROUPBY_DUMMY_NAME):
array_filepath = '/'.join((array_filepath, str(array_filename)))
else:
# array_filename = self.hmf_obj._assemble_dirpath(group_name, array_filename)
array_filepath = '/'.join((array_filepath, str(group_name), str(array_filename)))
# Note:
# 1. This is the fake file path
# 2. But it is constructed to mirror the actual filesystem tree structure with / separator
# 3. "/PATH/NAME"
self.hmf_obj.update_memmap_map_array(array_filepath, array)
def start(self):
if(self.show_progress):
printProgressBar(0, self.max_len, prefix = 'Progress:', suffix = '', length = 50)
# pending_tasks: written and either not read or failed to read < MAX_READ_ATTEMPT times
# tasks: not yet written or failed to read MAX_READ_ATTEMPT times
while(len(self.pending_tasks) > 0 or len(self.tasks) > 0):
# if we can run more subprocs and if there are tasks remaining
# run a task in subproc
if(len(self.subprocs) < self.num_subprocs and len(self.tasks) > 0):
# this task will not be put back for writing again
# until read is failed MAX_READ_ATTEMPT times
task = self.tasks.pop()
if self.verbose:
print('#######################################\n')
print('Working on {}'.format(task))
# it is pending until read is successful or failed MAX_READ_ATTEMPT times
# if read is successful, move this task to successful_tasks
# if read is failed MAX_READ_ATTEMPT times, move it back to tasks
# --> will count up the write attempt by 1
# if write count reaches MAX_WRITE_ATTEMPT, move this task to failed_tasks
# so we can have failures without crashing the program
self.pending_tasks.append(task)
subproc = self.write_task(task)
self.write_attempt_dict[task] += 1
self.subprocs.append(subproc)
if self.verbose:
print('\nCHECKING WRITE STATUS')
print('from shared_write_result_dict: {}\n'.format(self.shared_write_result_dict))
if(len(self.shared_write_result_dict.keys()) > 0):
for tried_write_task in self.shared_write_result_dict.keys():
if self.shared_write_result_dict[tried_write_task] == 'success':
if self.verbose:
print('Write successful: {}'.format(tried_write_task))
self.successful_write_tasks.append(tried_write_task)
if self.verbose:
print('Dropping {} from shared dict for success'.format(tried_write_task))
self.shared_write_result_dict.pop(tried_write_task, None)
if self.verbose:
print('Updating memmap map')
self.update_memmap_map(tried_write_task)
elif self.shared_write_result_dict[tried_write_task] == 'failure':
if self.verbose:
print('Write failed: {}'.format(tried_write_task))
# we need to push these back to tasks
# since we need to start the process again,
# no need for any wait time
# (there is nothing to wait for! it failed!)
self.failed_write_tasks.append(tried_write_task)
if self.verbose:
print('Dropping {} from shared dict for failure'.format(tried_write_task))
self.shared_write_result_dict.pop(tried_write_task, None)
if self.verbose:
print('\nCHECKING READ STATUS')
print('from successful_write_tasks: {}\n'.format(self.successful_write_tasks))
# we record the failed read tasks here
# the purpose is to temporarily hold these
# and escape while loop over successful write tasks
# add them back in later
# these are tasks with successful writes
# but fails at the read
# what we want for these:
# need to try reading some more
self.failed_read_tasks = []
while(len(self.successful_write_tasks) > 0):
successful_write_task = self.successful_write_tasks.pop()
read_result = self.read_task(successful_write_task)
self.read_attempt_dict[successful_write_task] += 1
if read_result == 'success':
if self.verbose:
print('Read successful: {}'.format(successful_write_task))
self.successful_read_tasks.append(successful_write_task)
elif read_result == 'failure':
if self.verbose:
print('Read failed: {}'.format(successful_write_task))
# since the read was not successful for this written task,
# put the written task back into the tasks list:
self.failed_read_tasks.append(successful_write_task)
read_attempt_cnt = self.read_attempt_dict[successful_write_task] - 1
time.sleep(READ_WAIT_INTERVALS[read_attempt_cnt])
# throw the failed read tasks back in
# for these, we don't want to try to write again
# we want to read again
self.successful_write_tasks += self.failed_read_tasks
self.successful_write_tasks = list(set(self.successful_write_tasks))
if self.verbose:
print('\n-------')
print('successful_write_tasks: ', self.successful_write_tasks)
print('successful_read_tasks: ', self.successful_read_tasks)
print('write_attempt_dict: ', self.write_attempt_dict)
print('read_attempt_dict: ', self.read_attempt_dict)
# print('shared_write_result_dict', )
# update pending, successful, failed tasks
if self.verbose:
print('\nUPDATE PENDINGS')
print('pending tasks: {}\n'.format(self.pending_tasks))
if(len(self.pending_tasks) > 0):
for _ in range(len(self.pending_tasks)):
pending_task = self.pending_tasks.pop()
if self.verbose:
print('Looking at pending task {}'.format(pending_task))
if pending_task in self.failed_write_tasks:
if self.verbose:
print(' pending --> tasks: {}'.format(pending_task))
print(' * write failed\n')
# don't append since that will try to do this again right away
self.tasks.insert(0, pending_task)
self.failed_write_tasks.remove(pending_task)
elif pending_task in self.successful_read_tasks:
if self.verbose:
print(' pending --> successful tasks: {}\n'.format(pending_task))
self.successful_tasks.append(pending_task)
# elif(self.write_attempt_dict[pending_task] == MAX_WRITE_ATTEMPT
# and self.read_attempt_dict[pending_task] == MAX_READ_ATTEMPT):
elif self.write_attempt_dict[pending_task] == MAX_WRITE_ATTEMPT:
if self.verbose:
print(' pending --> failed tasks: {}'.format(pending_task))
print(' * reached MAX_WRITE_ATTEMPT\n')
self.failed_tasks.append(pending_task)
try:
self.successful_write_tasks.remove(pending_task)
except:
# none of the write was successful
pass
elif self.read_attempt_dict[pending_task] == MAX_READ_ATTEMPT:
if self.verbose:
print(' pending --> tasks: {}'.format(pending_task))
print(' * reached MAX_READ_ATTEMPT\n')
self.tasks.insert(0, pending_task)
self.read_attempt_dict[pending_task] = 0
self.successful_write_tasks.remove(pending_task)
else:
if self.verbose:
print(' nothing to update\n')
self.pending_tasks.insert(0, pending_task)
if(self.show_progress):
cur_len = int(len(self.successful_tasks) + len(self.failed_tasks))
printProgressBar(cur_len, self.max_len, prefix = 'Progress:', suffix = '', length = 50)
# check status of subprocs after all the above is finished
# to give subprocs more time to write
self.subprocs = [elem for elem in self.subprocs if elem.is_alive()]
if self.verbose:
print('SUBPROC STATUS')
print('number of outstanding tasks: {} ( {} )'.format(len(self.tasks), self.tasks))
print('number of running procs: {}'.format(len(self.subprocs))); print()
# if all procs are busy, there is no gain from trying to
# assign work to them at the moment
if(len(self.subprocs) == self.num_subprocs):
time.sleep(0.01)
# if number of tasks is 0, there is no gain from trying to
# assign work to them at the moment
# also, if subproc is still running, sleep to give it time to finish
if(len(self.subprocs) > 0 and len(self.tasks) == 0):
time.sleep(0.01)
self.failure_reasons = dict(self.shared_write_error_dict)
if(self.show_progress):
printProgressBar(self.max_len, self.max_len, prefix = 'Progress:', suffix = 'Completed!', length = 50)
print()
|
{"/HMF/hmf.py": ["/HMF/core.py", "/HMF/utils.py", "/HMF/parallel.py", "/HMF/__init__.py"], "/HMF/tests/test_hmf.py": ["/HMF/__init__.py"], "/HMF/__init__.py": ["/HMF/core.py", "/HMF/parallel.py", "/HMF/hmf.py", "/HMF/utils.py"], "/HMF/parallel.py": ["/HMF/utils.py", "/HMF/__init__.py"], "/HMF/core.py": ["/HMF/utils.py"], "/setup.py": ["/HMF/__init__.py"]}
|
29,881,472
|
mozjay0619/HMF
|
refs/heads/master
|
/HMF/tests/test_hmf.py
|
import pytest
import multiprocessing
multiprocessing.set_start_method('fork')
import numpy as np
import pandas as pd
import HMF
def test_writing_arrays():
"""test writing arrays"""
f = HMF.open_file('test_file', mode='w+', verbose=False)
f.set_array('/group1/array1', np.arange(3))
f.set_array('/group1/array2', np.arange(3))
f.set_array('/group2/array2', np.arange(3))
f.set_array('/group2/subgroup1/array2', np.arange(3))
f.close()
assert np.array_equal(np.arange(3), f.get_array('/group1/array1'))
assert np.array_equal(np.arange(3), f.get_array('/group1/array2'))
assert np.array_equal(np.arange(3), f.get_array('/group2/array2'))
assert np.array_equal(np.arange(3), f.get_array('/group2/subgroup1/array2'))
def test_writing_and_reading_arrays():
f = HMF.open_file('test_file', mode='w+', verbose=False)
f.set_array('/group1/array1', np.arange(3))
f.set_array('/group1/array2', np.arange(3))
f.set_array('/group2/array2', np.arange(3))
f.set_array('/group2/subgroup1/array2', np.arange(3))
f.close()
f = HMF.open_file('test_file', mode='r+', verbose=False)
assert np.array_equal(np.arange(3), f.get_array('/group1/array1'))
assert np.array_equal(np.arange(3), f.get_array('/group1/array2'))
assert np.array_equal(np.arange(3), f.get_array('/group2/array2'))
assert np.array_equal(np.arange(3), f.get_array('/group2/subgroup1/array2'))
# def test_writing_arrays_from_pandas():
# data = np.arange(10*3).reshape((10, 3))
# groups = ['a'] * 3 + ['b'] * 3 + ['c'] * 4
# pdf = pd.DataFrame(data=data, columns=['a', 'b', 'c'])
# pdf['group'] = groups
# f = HMF.open_file('test_file', mode='w+', verbose=False)
# f.from_pandas(pdf, groupby='group', orderby='c')
# f.register_array('arrayA', ['b', 'c'])
# f.register_array('arrayB', ['a', 'b'])
# f.close()
# assert np.array_equal(f.get_array('/a/arrayA'), pdf[pdf['group']=='a'][['b', 'c']].values)
# assert np.array_equal(f.get_array('/b/arrayB'), pdf[pdf['group']=='b'][['a', 'b']].values)
# def test_writing_and_reading_arrays_from_pandas():
# data = np.arange(10*3).reshape((10, 3))
# groups = ['a'] * 3 + ['b'] * 3 + ['c'] * 4
# pdf = pd.DataFrame(data=data, columns=['a', 'b', 'c'])
# pdf['group'] = groups
# f = HMF.open_file('test_file', mode='w+', verbose=False)
# f.from_pandas(pdf, groupby='group', orderby='c')
# f.register_array('arrayA', ['b', 'c'])
# f.register_array('arrayB', ['a', 'b'])
# f.close()
# f = HMF.open_file('test_file', mode='r+', verbose=False)
# assert np.array_equal(f.get_array('/a/arrayA'), pdf[pdf['group']=='a'][['b', 'c']].values)
# assert np.array_equal(f.get_array('/b/arrayB'), pdf[pdf['group']=='b'][['a', 'b']].values)
# def test_writing_dataframes_from_pandas():
# data = np.arange(10*3).reshape((10, 3))
# groups = ['a'] * 3 + ['b'] * 3 + ['c'] * 4
# pdf = pd.DataFrame(data=data, columns=['a', 'b', 'c'])
# pdf['group'] = groups
# f = HMF.open_file('test_file', mode='w+', verbose=False)
# f.from_pandas(pdf, groupby='group', orderby='c')
# f.register_dataframe('dataframeA', ['b', 'c'])
# f.register_dataframe('dataframeB', ['a', 'b'])
# f.close()
# assert np.array_equal(f.get_array('/a/dataframeA'), pdf[pdf['group']=='a'][['b', 'c']].values)
# assert np.array_equal(f.get_array('/b/dataframeB'), pdf[pdf['group']=='b'][['a', 'b']].values)
# def test_writing_and_reading_dataframes_from_pandas():
# data = np.arange(10*3).reshape((10, 3))
# groups = ['a'] * 3 + ['b'] * 3 + ['c'] * 4
# pdf = pd.DataFrame(data=data, columns=['a', 'b', 'c'])
# pdf['group'] = groups
# f = HMF.open_file('test_file', mode='w+', verbose=False)
# f.from_pandas(pdf, groupby='group', orderby='c')
# f.register_dataframe('dataframeA', ['b', 'c'])
# f.register_dataframe('dataframeB', ['a', 'b'])
# f.close()
# f = HMF.open_file('test_file', mode='r+', verbose=False)
# assert np.array_equal(f.get_array('/a/dataframeA'), pdf[pdf['group']=='a'][['b', 'c']].values)
# assert np.array_equal(f.get_array('/b/dataframeB'), pdf[pdf['group']=='b'][['a', 'b']].values)
# def test_multi_pdf_writing():
# data = np.arange(10*3).reshape((10, 3))
# groups = ['a'] * 3 + ['b'] * 3 + ['c'] * 4
# pdf = pd.DataFrame(data=data, columns=['a', 'b', 'c'])
# pdf['group'] = groups
# f = HMF.open_file('test_file', mode='w+', verbose=False)
# f.from_pandas(pdf, groupby='group', orderby='c')
# f.register_dataframe('dataframeA', ['b', 'c'])
# f.register_dataframe('dataframeB', ['a', 'b'])
# f.from_pandas(pdf, groupby='group', orderby='c')
# f.register_dataframe('dataframeA', ['b', 'c'])
# f.register_dataframe('dataframeB', ['a', 'b'])
# f.close()
# np.array_equal(f.get_dataframe('/__specialHMF__dataFrameNumber_0/a/dataframeA'),
# pdf[pdf['group']=='a'][['b', 'c']].values)
# np.array_equal(f.get_dataframe('/__specialHMF__dataFrameNumber_0/b/dataframeB'),
# pdf[pdf['group']=='b'][['a', 'b']].values)
# def test_multi_pdf_writing_reading():
# data = np.arange(10*3).reshape((10, 3))
# groups = ['a'] * 3 + ['b'] * 3 + ['c'] * 4
# pdf = pd.DataFrame(data=data, columns=['a', 'b', 'c'])
# pdf['group'] = groups
# f = HMF.open_file('test_file', mode='w+', verbose=False)
# f.from_pandas(pdf, groupby='group', orderby='c')
# f.register_dataframe('dataframeA', ['b', 'c'])
# f.register_dataframe('dataframeB', ['a', 'b'])
# f.from_pandas(pdf, groupby='group', orderby='c')
# f.register_dataframe('dataframeA', ['b', 'c'])
# f.register_dataframe('dataframeB', ['a', 'b'])
# f.close()
# f = HMF.open_file('test_file', mode='r+', verbose=False)
# np.array_equal(f.get_dataframe('/__specialHMF__dataFrameNumber_0/a/dataframeA'),
# pdf[pdf['group']=='a'][['b', 'c']].values)
# np.array_equal(f.get_dataframe('/__specialHMF__dataFrameNumber_0/b/dataframeB'),
# pdf[pdf['group']=='b'][['a', 'b']].values)
# def test_multi_pdf_writing_using_group_names():
# data = np.arange(10*3).reshape((10, 3))
# groups = ['a'] * 3 + ['b'] * 3 + ['c'] * 4
# pdf = pd.DataFrame(data=data, columns=['a', 'b', 'c'])
# pdf['group'] = groups
# f = HMF.open_file('test_file', mode='w+', verbose=False)
# f.from_pandas(pdf, groupby='group', orderby='c', group_name='groupA')
# f.register_dataframe('dataframeA', ['b', 'c'])
# f.register_dataframe('dataframeB', ['a', 'b'])
# f.from_pandas(pdf, groupby='group', orderby='c', group_name='groupB')
# f.register_dataframe('dataframeA', ['b', 'c'])
# f.register_dataframe('dataframeB', ['a', 'b'])
# f.close()
# np.array_equal(f.get_dataframe('/groupA/a/dataframeA'),
# pdf[pdf['group']=='a'][['b', 'c']].values)
# np.array_equal(f.get_dataframe('/groupB/b/dataframeB'),
# pdf[pdf['group']=='b'][['a', 'b']].values)
# def test_multi_pdf_writing_and_reading_using_group_names():
# data = np.arange(10*3).reshape((10, 3))
# groups = ['a'] * 3 + ['b'] * 3 + ['c'] * 4
# pdf = pd.DataFrame(data=data, columns=['a', 'b', 'c'])
# pdf['group'] = groups
# f = HMF.open_file('test_file', mode='w+', verbose=False)
# f.from_pandas(pdf, groupby='group', orderby='c', group_name='groupA')
# f.register_dataframe('dataframeA', ['b', 'c'])
# f.register_dataframe('dataframeB', ['a', 'b'])
# f.from_pandas(pdf, groupby='group', orderby='c', group_name='groupB')
# f.register_dataframe('dataframeA', ['b', 'c'])
# f.register_dataframe('dataframeB', ['a', 'b'])
# f.close()
# f = HMF.open_file('test_file', mode='r+', verbose=False)
# np.array_equal(f.get_dataframe('/groupA/a/dataframeA'),
# pdf[pdf['group']=='a'][['b', 'c']].values)
# np.array_equal(f.get_dataframe('/groupB/b/dataframeB'),
# pdf[pdf['group']=='b'][['a', 'b']].values)
|
{"/HMF/hmf.py": ["/HMF/core.py", "/HMF/utils.py", "/HMF/parallel.py", "/HMF/__init__.py"], "/HMF/tests/test_hmf.py": ["/HMF/__init__.py"], "/HMF/__init__.py": ["/HMF/core.py", "/HMF/parallel.py", "/HMF/hmf.py", "/HMF/utils.py"], "/HMF/parallel.py": ["/HMF/utils.py", "/HMF/__init__.py"], "/HMF/core.py": ["/HMF/utils.py"], "/setup.py": ["/HMF/__init__.py"]}
|
29,881,473
|
mozjay0619/HMF
|
refs/heads/master
|
/HMF/core.py
|
from .utils import write_memmap
from .utils import read_memmap
from .utils import load_obj
import os
from collections import defaultdict
class BaseHMF():
def __init__(self, root_dirpath, memmap_map, verbose):
"""If root_dirpath exists, load the existing memmap so we can get arrays.
(root)
attributes
dirpath
node_type = root
nodes
group_name1
attributes
dirpath
node_type = group
nodes
array_name1
attributes
dirpath
node_type = array
dtype
shape
group_name2
group_name3 ...
Question: why not combine the node name and the dict following it into a single dict?
- this will in fact make searching thru nodes at nodes key much harder since we can't
query it using key name, but entire dictionaries will come up --> forcing us to
query using a dictionary.
"""
if(memmap_map):
if(verbose):
print('Loading from existing HMF')
self.memmap_map = memmap_map
else:
if(verbose):
print('Creating new HMF')
self.memmap_map = dict()
self.memmap_map['attributes'] = dict()
self.memmap_map['nodes'] = dict()
self.memmap_map['dirpath'] = root_dirpath
self.memmap_map['node_type'] = 'root'
self.memmap_map['dataframe_colnames'] = defaultdict(dict)
self.memmap_map['grouped'] = False
self.memmap_map['grouped'] = dict()
self.memmap_map['group_sizes'] = dict()
self.memmap_map['group_names'] = dict()
self.memmap_map['group_items'] = dict()
self.memmap_map['multi_pdfs'] = False
self.verbose = verbose
def set_group(self, group_dirpath):
"""
Todo: validate group_dirpath
"""
self.update_memmap_map_group(group_dirpath)
def update_memmap_map_group(self, group_dirpath):
memmap_map_pos = self.memmap_map['nodes']
# dict with node_names of group names / array names
if(group_dirpath=='/'):
return
# iterate through each level of group (horizontal traversal)
for idx, node_name in enumerate(group_dirpath.split('/')):
# skip / because root is not part of dict ( (root) )
if idx==0:
if(node_name!=''):
raise ValueError('Path much start with the root directory "/"')
continue
# if node_name is one of memmap_map_pos keys
if(node_name in memmap_map_pos):
if self.verbose:
print('{} found in memmap'.format(node_name))
if(not self._is_group_node(memmap_map_pos, node_name)):
if self.verbose:
print('{} is not a group node so skipping'.format(node_name))
continue
# take the current level dirpath
group_dirpath_pos = memmap_map_pos[node_name]['dirpath']
# move to the groups dict position of the current node_name
memmap_map_pos = memmap_map_pos[node_name]['nodes']
else:
if self.verbose:
print('{} not found in memmap'.format(node_name))
# if node_name is not part of the key, create the node_name's map position
memmap_map_pos[node_name] = dict()
memmap_map_pos[node_name]['nodes'] = dict() # create node names dict
memmap_map_pos[node_name]['attributes'] = dict()
memmap_map_pos[node_name]['node_type'] = 'group'
# if it's a node_name right after / root position
# will always visit - setting the group_dirpath_pos always
if idx==1:
# start the dirpath afresh
memmap_map_pos[node_name]['dirpath'] = node_name
group_dirpath_pos = memmap_map_pos[node_name]['dirpath']
else:
# if it's beyond the level right after root /
# the dirpath is combination of all previous node_name dirpaths
memmap_map_pos[node_name]['dirpath'] = self._assemble_dirpath(group_dirpath_pos, node_name)
# update the map position before moving to the next level of node_name (if there is any)
memmap_map_pos = memmap_map_pos[node_name]['nodes']
def retrieve_memmap_map_pos_group(self, group_dirpath):
"""
group_pos: the dictionary position where nodes, dirpath, node_type can be queried
i.e. right below the node names dict level
check if it is indeed a group
"""
if(group_dirpath=='/'):
return self.memmap_map
memmap_map_pos = self.memmap_map['nodes']
for idx, node_name in enumerate(group_dirpath.split('/')):
if idx==0:
continue
if node_name in memmap_map_pos:
memmap_map_group_pos = memmap_map_pos[node_name]
memmap_map_pos = memmap_map_group_pos['nodes']
else:
raise
return memmap_map_group_pos
def set_array(self, array_filepath, array):
# validate array
# validate dirpath
self.update_memmap_map_array(array_filepath, array)
self.write_array(array_filepath, array)
def update_memmap_map_array(self, array_filepath, array):
array_filepaths = array_filepath.split('/')
group_dirpath = '/'.join(array_filepaths[0:-1])
array_name = array_filepaths[-1]
if len(array_filepaths) > 2:
self.update_memmap_map_group(group_dirpath)
memmap_map_group_pos = self.retrieve_memmap_map_pos_group(group_dirpath)
else:
memmap_map_group_pos = self.memmap_map
# TODO: check if array_name is already in here
memmap_map_group_pos['nodes'][array_name] = dict()
memmap_map_group_pos['nodes'][array_name]['node_type'] = 'array'
memmap_map_group_pos['nodes'][array_name]['attributes'] = dict()
memmap_map_group_pos['nodes'][array_name]['nodes'] = dict()
if(memmap_map_group_pos['node_type']!='root'):
filepath = self._assemble_dirpath(memmap_map_group_pos['dirpath'], array_name)
else:
filepath = array_name
memmap_map_group_pos['nodes'][array_name]['dirpath'] = filepath
dtype = str(array.dtype)
shape = array.shape
memmap_map_group_pos['nodes'][array_name]['dtype'] = dtype
memmap_map_group_pos['nodes'][array_name]['shape'] = shape
def retrieve_memmap_map_pos_array(self, array_filepath):
array_filepaths = array_filepath.split('/')
group_dirpath = '/'.join(array_filepaths[0:-1])
array_name = array_filepaths[-1]
if len(array_filepaths) > 2:
self.update_memmap_map_group(group_dirpath)
memmap_map_group_pos = self.retrieve_memmap_map_pos_group(group_dirpath)
else:
memmap_map_group_pos = self.memmap_map
return memmap_map_group_pos['nodes'][array_name]
def write_array(self, array_filepath, array):
memmap_map_array_pos = self.retrieve_memmap_map_pos_array(array_filepath)
dtype = memmap_map_array_pos['dtype']
shape = memmap_map_array_pos['shape']
filepath = os.path.join(
self.memmap_map['dirpath'],
memmap_map_array_pos['dirpath'])
write_memmap(filepath, dtype, shape, array)
def get_array(self, array_filepath, idx=None):
memmap_map_array_pos = self.retrieve_memmap_map_pos_array(array_filepath)
dtype = memmap_map_array_pos['dtype']
shape = memmap_map_array_pos['shape']
filepath = os.path.join(
self.memmap_map['dirpath'],
memmap_map_array_pos['dirpath'])
return read_memmap(filepath, dtype, shape, idx)
def set_node_attr(self, attr_dirpath, key, value):
self.update_memmap_map_group(attr_dirpath)
memmap_map_group_pos = self.retrieve_memmap_map_pos_group(attr_dirpath)
memmap_map_group_pos['attributes'][key] = value
def get_node_attr(self, attr_dirpath, key):
memmap_map_group_pos = self.retrieve_memmap_map_pos_group(attr_dirpath)
return memmap_map_group_pos['attributes'][key]
def _assemble_dirpath(self, *args):
args = [str(elem) for elem in args]
return '__'.join(args)
def _is_group_node(self, memmap_map_pos, node_name):
return memmap_map_pos[node_name]['node_type'] == 'group'
def _is_array_node(self, memmap_map_pos, node_name):
return memmap_map_pos[node_name]['node_type'] == 'array'
|
{"/HMF/hmf.py": ["/HMF/core.py", "/HMF/utils.py", "/HMF/parallel.py", "/HMF/__init__.py"], "/HMF/tests/test_hmf.py": ["/HMF/__init__.py"], "/HMF/__init__.py": ["/HMF/core.py", "/HMF/parallel.py", "/HMF/hmf.py", "/HMF/utils.py"], "/HMF/parallel.py": ["/HMF/utils.py", "/HMF/__init__.py"], "/HMF/core.py": ["/HMF/utils.py"], "/setup.py": ["/HMF/__init__.py"]}
|
29,881,474
|
mozjay0619/HMF
|
refs/heads/master
|
/setup.py
|
from setuptools import setup, find_packages
import HMF
VERSION = HMF.__version__
with open("README.rst", "r") as fh:
long_description = fh.read()
setup(
name="hierarchical-memmap-format",
version=VERSION,
author="Jay Kim",
description="Hierarchical numpy memmap datasets for Python",
long_description=long_description,
long_description_content_type="text/x-rst",
url="https://github.com/mozjay0619/HMF",
license="DSB 3-clause",
packages=find_packages(),
install_requires=["numpy>=1.18.2", "pandas>=0.25.3", "psutil>=5.7.0"]
)
|
{"/HMF/hmf.py": ["/HMF/core.py", "/HMF/utils.py", "/HMF/parallel.py", "/HMF/__init__.py"], "/HMF/tests/test_hmf.py": ["/HMF/__init__.py"], "/HMF/__init__.py": ["/HMF/core.py", "/HMF/parallel.py", "/HMF/hmf.py", "/HMF/utils.py"], "/HMF/parallel.py": ["/HMF/utils.py", "/HMF/__init__.py"], "/HMF/core.py": ["/HMF/utils.py"], "/setup.py": ["/HMF/__init__.py"]}
|
29,881,475
|
mozjay0619/HMF
|
refs/heads/master
|
/HMF/constants.py
|
# constants for hmf
MEMMAP_MAP_FILENAME = "__specialHMF__memmapMap"
GROUPBY_ENCODER = "__specialHMF__groupByNumericEncoder"
NUM_FILE_COPY = 3
DATAFRAME_NAME = "__specialHMF__dataFrameNumber"
HMF_GROUPBY_DUMMY_NAME = "__specialHMF__groupbyDummyName"
|
{"/HMF/hmf.py": ["/HMF/core.py", "/HMF/utils.py", "/HMF/parallel.py", "/HMF/__init__.py"], "/HMF/tests/test_hmf.py": ["/HMF/__init__.py"], "/HMF/__init__.py": ["/HMF/core.py", "/HMF/parallel.py", "/HMF/hmf.py", "/HMF/utils.py"], "/HMF/parallel.py": ["/HMF/utils.py", "/HMF/__init__.py"], "/HMF/core.py": ["/HMF/utils.py"], "/setup.py": ["/HMF/__init__.py"]}
|
29,894,711
|
tifangel/tubes1_MLL_CNN
|
refs/heads/master
|
/Detector.py
|
import numpy as np
class Detector:
def __init__(self):
self.input = []
self.activation_type = ''
def activate(self, input_matrix, activation_type='sigmoid'):
self.input = input_matrix
result = []
for x in self.input:
if activation_type == 'sigmoid':
result.append(list(self.sigmoid(x)))
elif activation_type == 'relu':
result.append(list(self.relu(x)))
elif activation_type == 'softmax':
result.append(list(self.softmax(x)))
else:
result.append(list(x))
return result
def sigmoid(self, X):
return 1/(1+np.exp(-X))
def relu(self, X):
return np.maximum(0,X)
def softmax(self, X):
expo = np.exp(X)
expo_sum = np.sum(np.exp(X))
return expo/expo_sum
|
{"/dense.py": ["/Layer.py"], "/Pooling.py": ["/Layer.py"], "/main.py": ["/Layer.py", "/Convolution.py", "/Pooling.py", "/Detector.py", "/dense.py"], "/Convolution.py": ["/Layer.py"], "/Detector.py": ["/Layer.py"]}
|
29,894,712
|
tifangel/tubes1_MLL_CNN
|
refs/heads/master
|
/Pooling.py
|
import numpy as np
from numpy.lib.stride_tricks import as_strided
class Pooling:
def __init__(self):
self.matrix = []
self.kernel_size = (0,0)
self.stride= 0
self.padding = 0
self.pool_mode = 'max'
def apply(self, input_matrix, kernel_size=(2,2), stride=2, padding=0, pool_mode='max'):
self.kernel_size = kernel_size
self.stride= stride
self.padding = padding
self.pool_mode = pool_mode
self.matrix = np.pad(input_matrix, self.padding, mode='constant')
output_shape = (
(self.matrix.shape[0] - self.kernel_size[0])//self.stride + 1,
(self.matrix.shape[1] - self.kernel_size[1])//self.stride + 1
)
matrix_w = as_strided(
self.matrix,
shape = output_shape + self.kernel_size,
strides = (self.stride*self.matrix.strides[0], self.stride*self.matrix.strides[1]) + self.matrix.strides
)
matrix_w = matrix_w.reshape(-1, *self.kernel_size)
if self.pool_mode == 'max':
return matrix_w.max(axis=(1,2)).reshape(output_shape)
elif self.pool_mode == 'avg':
return matrix_w.mean(axis=(1,2)).reshape(output_shape)
else:
raise ValueError(pool_mode, 'is not valid!')
|
{"/dense.py": ["/Layer.py"], "/Pooling.py": ["/Layer.py"], "/main.py": ["/Layer.py", "/Convolution.py", "/Pooling.py", "/Detector.py", "/dense.py"], "/Convolution.py": ["/Layer.py"], "/Detector.py": ["/Layer.py"]}
|
29,899,397
|
usvimal/Pi-thon
|
refs/heads/rewrite-v2.0
|
/cogs/settings.py
|
import config
import discord
from discord.ext import commands
from utils.lyricsretriever import LyricsRetriever
class Settings(commands.Cog):
def __init__(self, bot):
self.bot = bot
self.config = config
self.lyrics_retriever = LyricsRetriever(bot)
@commands.guild_only()
@commands.group()
async def prefix(self, ctx):
""" Show current prefix for this guild"""
if ctx.invoked_subcommand is None:
if ctx.subcommand_passed:
em = discord.Embed(title='Oof! That was not a valid command 🤨 ',
description='Type ;help [command] for more info on a command.',
colour=0x3c1835)
await ctx.send(embed=em, delete_after=60)
else:
prefix = self.bot.all_prefixes[ctx.guild.id]
await ctx.send(f"Current prefix for this server is `{prefix}`.")
@commands.guild_only()
@commands.has_permissions(manage_guild=True)
@prefix.command(aliases=["change"])
async def set(self, ctx, prefix):
"""Set guild prefix"""
guild_id = ctx.guild.id
if len(prefix) > 5:
await ctx.send('Please keep your prefix to below 5 characters')
else:
async with self.bot.dbpool.acquire() as conn:
await conn.execute(
'UPDATE guildprop SET "prefix"=$1 WHERE "guild_id"=$2;',
prefix, guild_id)
self.bot.all_prefixes[ctx.guild.id] = prefix
await ctx.send(f"New prefix for this server is `{prefix}`.")
@commands.guild_only()
@commands.has_permissions(manage_guild=True)
@prefix.command(aliases=["default"])
async def reset(self, ctx):
"""Reset guild prefix"""
guild_id = ctx.guild.id
default_prefix = config.default_prefix
async with self.bot.dbpool.acquire() as conn:
await conn.execute(
'UPDATE guildprop SET "prefix"=$1 WHERE "guild_id"=$2;',
default_prefix, guild_id)
self.bot.all_prefixes[ctx.guild.id] = default_prefix
await ctx.send(f"Default prefix set to `{default_prefix}`.")
@commands.group()
async def profile(self, ctx):
"""View status of your settings saved in the bot"""
current_source = self.lyrics_retriever.get_main_source(ctx.author.id)
em = discord.Embed(title=f"{ctx.author}'s profile")
em.set_thumbnail(url=ctx.author.avatar_url)
em.add_field(name='Lyrics source', value=current_source)
await ctx.send(embed=em)
@profile.command(aliases=['del', 'remove'])
async def delete(self, ctx):
"""Deletes your information from the server"""
async with self.bot.dbpool.acquire() as db:
await db.execute("DELETE FROM userprop WHERE user_id=$1", ctx.author.id)
def setup(bot):
bot.add_cog(Settings(bot))
|
{"/cogs/settings.py": ["/config/__init__.py", "/utils/lyricsretriever.py"], "/main.py": ["/config/__init__.py", "/utils/db.py", "/utils/discord_handler.py"], "/utils/prettydiscordprinter/concrete_printers.py": ["/utils/text_formatter.py", "/utils/prettydiscordprinter/abstract_classes.py", "/utils/prettydiscordprinter/concrete_formatters.py"], "/cogs/personal_todo.py": ["/utils/text_formatter.py"], "/cogs/helper.py": ["/utils/prettydiscordprinter/concrete_printers.py"], "/cogs/fun.py": ["/config/__init__.py", "/utils/checks.py"], "/utils/discord_handler.py": ["/utils/prettydiscordprinter/__init__.py"], "/cogs/events.py": ["/config/__init__.py"], "/cogs/to-do.py": ["/utils/text_formatter.py"], "/utils/prettydiscordprinter/__init__.py": ["/utils/prettydiscordprinter/concrete_formatters.py", "/utils/prettydiscordprinter/concrete_printers.py"], "/utils/checks.py": ["/config/__init__.py"], "/utils/prettydiscordprinter/abstract_classes.py": ["/utils/text_formatter.py"], "/cogs/lyrics.py": ["/utils/text_formatter.py", "/utils/lyricsretriever.py"]}
|
29,899,398
|
usvimal/Pi-thon
|
refs/heads/rewrite-v2.0
|
/main.py
|
import aiohttp
import asyncio
import asyncpg
import config
import discord
import importlib
import jishaku
import json
import logging
import platform
import random
import sys
import traceback
from discord.ext import commands
from utils.db import Database
from utils.discord_handler import DiscordWriter
class MainBot(commands.Bot):
LOGGING_CHANNEL_ID = 573856996256776202
UPDATES_CHANNEL_ID = 574240405722234881
bot_requirements = {
'jishaku': 'jishaku',
'lyricsgenius': 'lyricsgenius',
'discord': 'discord.py',
'owotrans': 'owotranslator',
'psutil': 'psutil',
'urllib3': 'urllib3',
'asyncpg': 'asyncpg',
'bs4': 'beautifulsoup4'
}
def __init__(self, *args, **kwargs):
super().__init__(command_prefix=self._get_prefix, **kwargs)
self._logging_channel = None
self._updates_channel = None
self.all_prefixes = {}
self.lyrics_source = {}
self.nword1_counter = {}
self.nword2_counter = {}
async def on_ready(self):
self._logging_channel = self.get_channel(self.LOGGING_CHANNEL_ID)
self._updates_channel = self.get_channel(self.UPDATES_CHANNEL_ID)
self._add_handlers()
self._display_startup_message()
await self.init_postgres_connection()
self.database = Database(main_loop=self.loop, bot=self)
await self.batch_fetch_from_db()
await self._load_cogs()
await self.check_packages(self.bot_requirements)
await self._update_bot_games_frequently() # this must be the last
def _add_handlers(self):
""" Change stdout and stderr to also print out to discord. Outputs and errors will still be printed to console. """
sys.tracebacklimit = 0 # Reduce traceback stack size to 0
sys.stdout = DiscordWriter(sys.stdout, self._logging_channel)
sys.stderr = DiscordWriter(sys.stderr, self._logging_channel)
def _display_startup_message(self):
log_in_msg = "Logged in as {} (ID: {}) | Connected to {} servers | Connected to {} users"
version_msg = "Discord.py Version: {} | Python Version: {}"
print('=' * 100)
print("*Hackerman voice* I'm in")
print(log_in_msg.format(self.user.name, self.user.id, len(self.guilds), len(set(self.get_all_members()))))
print(version_msg.format(discord.__version__, platform.python_version()))
print('=' * 100)
async def _load_cogs(self):
# Also prints out cogs status on 'pi-thon updates' discord channel.
loaded_cogs = list()
failed_cogs = dict()
for cog in config.cogs:
try:
self.load_extension(cog)
loaded_cogs.append(cog)
except Exception as e:
print(f'Couldn\'t load cog {cog} due to ' + str(e))
print(traceback.format_exc())
failed_cogs[cog] = str(e)
await self._show_discord_startup_message(loaded_cogs, failed_cogs)
async def _show_discord_startup_message(self, loaded_cogs, failed_cogs):
if config.show_startup_message == "False":
return
loaded_cogs_string = "\n".join(loaded_cogs) if len(loaded_cogs) != 0 else "None"
if len(failed_cogs) != 0:
failed_cogs_string = "\n".join(f"{name} : {error_name}" for name, error_name in failed_cogs.items())
else:
failed_cogs_string = "None"
em = discord.Embed(title='S T A T U S', description='Pi-thon is up!', colour=0x3c1835)
em.add_field(name='Loaded cogs', value=loaded_cogs_string, inline=False)
em.add_field(name='Failed cogs', value=failed_cogs_string, inline=False)
em.set_author(name='Pi-thon', icon_url=self.user.avatar_url)
await self._updates_channel.send(embed=em)
async def _update_bot_games_frequently(self):
while True:
random_game = random.choice(config.games)
guild_count = len(self.guilds)
member_count = len(set(self.get_all_members()))
activity_type = random_game[0]
activity_name = random_game[1].format(guilds=guild_count, members=member_count)
new_activity = discord.Activity(type=activity_type, name=activity_name)
await self.change_presence(activity=new_activity)
await asyncio.sleep(config.gamestimer)
async def init_postgres_connection(self):
self.dbpool = await asyncpg.create_pool(dsn=config.DATABASE_URL)
async def batch_fetch_from_db(self):
async with self.dbpool.acquire() as conn:
await self.fetch_prefixes_from_db(conn)
await self.fetch_lyrics_source_from_db(conn)
await self.fetch_nword_from_db(conn)
async def fetch_prefixes_from_db(self, connection):
prefixes = await connection.fetch("SELECT guild_id, prefix FROM guildprop;")
for row in prefixes:
self.all_prefixes[row["guild_id"]] = row["prefix"]
def _get_prefix(self, bot, message):
if not message.guild:
return config.default_prefix # Use default prefix in DMs
try:
return commands.when_mentioned_or(self.all_prefixes[message.guild.id])(self, message)
except KeyError:
return commands.when_mentioned_or(config.default_prefix)(self, message)
async def fetch_lyrics_source_from_db(self, connection):
lyrics_source = await connection.fetch("SELECT user_id, lyrics_source FROM userprop;")
for row in lyrics_source:
self.lyrics_source[row["user_id"]] = row["lyrics_source"]
async def fetch_nword_from_db(self, connection):
nword1 = await connection.fetch("SELECT user_id, nword1 FROM nwordtable;")
nword2 = await connection.fetch("SELECT user_id, nword2 FROM nwordtable;")
for row in nword1:
self.nword1_counter[row["user_id"]] = row["nword1"]
for row in nword2:
self.nword2_counter[row["user_id"]] = row["nword2"]
async def is_owner(self, user: discord.User):
if user.id == config.MinID or user.id == config.creatorID: # Implement your own conditions here
return True
return False
async def check_packages(self, package_dict):
latest_version = {}
for package_name, online_name in package_dict.items():
mod = importlib.import_module(package_name)
session = aiohttp.ClientSession()
async with session.get("https://www.pypi.org/pypi/" + online_name + "/json") as resp:
data = await resp.read()
await session.close()
output = json.loads(data)
version_no = output['info']['version']
online_version = version_no[0:6]
try:
current_version = mod.__version__
if current_version != online_version:
latest_version[package_name] = version_no
except Exception as e:
print('Error retrieving info on', package_name, 'Reason:', e,
'\nPlease fix the dictionary items or remove them.')
if latest_version:
await self._updates_channel.send('The following modules have updates:' + str(latest_version))
if __name__ == "__main__":
bot = MainBot(pm_help=None, description='A personal project for fun')
token = config.DISCORD_BOT_SECRET
bot.run(token)
|
{"/cogs/settings.py": ["/config/__init__.py", "/utils/lyricsretriever.py"], "/main.py": ["/config/__init__.py", "/utils/db.py", "/utils/discord_handler.py"], "/utils/prettydiscordprinter/concrete_printers.py": ["/utils/text_formatter.py", "/utils/prettydiscordprinter/abstract_classes.py", "/utils/prettydiscordprinter/concrete_formatters.py"], "/cogs/personal_todo.py": ["/utils/text_formatter.py"], "/cogs/helper.py": ["/utils/prettydiscordprinter/concrete_printers.py"], "/cogs/fun.py": ["/config/__init__.py", "/utils/checks.py"], "/utils/discord_handler.py": ["/utils/prettydiscordprinter/__init__.py"], "/cogs/events.py": ["/config/__init__.py"], "/cogs/to-do.py": ["/utils/text_formatter.py"], "/utils/prettydiscordprinter/__init__.py": ["/utils/prettydiscordprinter/concrete_formatters.py", "/utils/prettydiscordprinter/concrete_printers.py"], "/utils/checks.py": ["/config/__init__.py"], "/utils/prettydiscordprinter/abstract_classes.py": ["/utils/text_formatter.py"], "/cogs/lyrics.py": ["/utils/text_formatter.py", "/utils/lyricsretriever.py"]}
|
29,899,399
|
usvimal/Pi-thon
|
refs/heads/rewrite-v2.0
|
/cogs/testing.py
|
import discord
from discord.ext import commands
class Testing(commands.Cog):
""" Testing cog for trying out experimental code. """
def __init__(self, bot):
self._bot = bot
@commands.is_owner()
@commands.command(hidden=True)
async def poke_testing(self, ctx):
""" Utility function to check testing cog is active. """
command_names = " | ".join(c.name for c in self.get_commands())
await ctx.send(f"`Testing cog active. Test commands = {command_names}`")
@commands.is_owner()
@commands.command(hidden=True)
async def raise_error(self, ctx, *, args):
""" Used to raise specific errors for the purpose of testing the error handler. """
class DummyResponse:
def __init__(self):
self.status = None
self.reason = None
if args == "a":
raise discord.Forbidden(DummyResponse(), "")
elif args == "b":
raise discord.NotFound(DummyResponse(), "")
elif args == "c":
raise discord.errors.NotFound(DummyResponse(), "")
elif args == "d":
raise discord.errors.Forbidden(DummyResponse(), "")
@commands.is_owner()
@commands.command(hidden=True)
async def test_emojis(self, ctx):
""" Check if certain emojis are compatible with discord. """
await ctx.send("\U0001F3B2")
await ctx.send("\U0000274E")
def setup(bot):
bot.add_cog(Testing(bot))
|
{"/cogs/settings.py": ["/config/__init__.py", "/utils/lyricsretriever.py"], "/main.py": ["/config/__init__.py", "/utils/db.py", "/utils/discord_handler.py"], "/utils/prettydiscordprinter/concrete_printers.py": ["/utils/text_formatter.py", "/utils/prettydiscordprinter/abstract_classes.py", "/utils/prettydiscordprinter/concrete_formatters.py"], "/cogs/personal_todo.py": ["/utils/text_formatter.py"], "/cogs/helper.py": ["/utils/prettydiscordprinter/concrete_printers.py"], "/cogs/fun.py": ["/config/__init__.py", "/utils/checks.py"], "/utils/discord_handler.py": ["/utils/prettydiscordprinter/__init__.py"], "/cogs/events.py": ["/config/__init__.py"], "/cogs/to-do.py": ["/utils/text_formatter.py"], "/utils/prettydiscordprinter/__init__.py": ["/utils/prettydiscordprinter/concrete_formatters.py", "/utils/prettydiscordprinter/concrete_printers.py"], "/utils/checks.py": ["/config/__init__.py"], "/utils/prettydiscordprinter/abstract_classes.py": ["/utils/text_formatter.py"], "/cogs/lyrics.py": ["/utils/text_formatter.py", "/utils/lyricsretriever.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.