index int64 | repo_name string | branch_name string | path string | content string | import_graph string |
|---|---|---|---|---|---|
61,982 | atorrese/SGAGRO | refs/heads/main | /catalog/mark/forms.py | from django import forms
from catalog.models import Mark
class MarkForm(forms.ModelForm):
Name = forms.CharField(min_length=2)
def clean(self):
cleaned_data = super(MarkForm,self).clean()
return cleaned_data
class Meta:
model = Mark
fields = ['Name']
widgets = {
'Name': forms.TextInput(
attrs={
'class': 'form-control'
}
)
}
| {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
61,983 | atorrese/SGAGRO | refs/heads/main | /sale/models.py | from decimal import Decimal
from django.utils.timezone import now
from django.db import models
from django.db.models.aggregates import Sum
# Create your models here.
from catalog.models import Product
from security.models import ModelBase
from SGAGRO.funciones2 import METHOD_PAYEMENT,STATUS_PAY
class Client(ModelBase):
Names = models.CharField(verbose_name='Nombres',max_length=80)
SurNames = models.CharField(verbose_name='Apellidos',max_length=80)
IdentificationCard = models.CharField(verbose_name='Cédula',max_length=10)
City = models.CharField(verbose_name='Ciudad',max_length=80)
Address = models.CharField(verbose_name='Dirección',max_length=120,blank=True, null=True)
Phone = models.CharField(verbose_name='Telefono',max_length=88)
Email = models.EmailField(verbose_name= 'Correo Electronico',max_length=200)
def __str__(self):
return '{} {}'.format(self.Names,self.SurNames)
def get_Names_SurNames(self):
return self.Names +' '+ self.SurNames
class Meta:
verbose_name= 'Cliente'
verbose_name_plural = 'Clientes'
ordering= ('-created_at',)
class Seller(ModelBase):
Names = models.CharField(verbose_name='Nombres',max_length=80)
SurNames = models.CharField(verbose_name='Apellidos',max_length=80)
IdentificationCard = models.CharField(verbose_name='Cédula',max_length=10)
Birthdate = models.DateField(verbose_name='Fecha de Nacimiento',null=True,blank=True)
City = models.CharField(verbose_name='Ciudad',max_length=80)
Address = models.CharField(verbose_name='Dirección',max_length=120)
Phone = models.CharField(verbose_name='Telefono',max_length=88)
Email = models.EmailField(verbose_name= 'Correo Electronico',max_length=200)
def __str__(self):
return '{} {}'.format(self.Names,self.SurNames)
def get_Names_SurNames(self):
return self.Names +' '+ self.SurNames
class Meta:
verbose_name= 'Vendedor'
verbose_name_plural = 'Vendedores'
ordering= ('-created_at',)
class Invoice(ModelBase):
ClientId = models.ForeignKey(Client,verbose_name='Cliente',on_delete=models.PROTECT)
SellerId = models.ForeignKey(Seller,verbose_name='Vendedor',on_delete=models.PROTECT)
DateInvoice =models.DateField(default=now)
WeekInvoice =models.PositiveIntegerField(verbose_name='Semana Factura',blank=True ,null=True)
StatusInvoice = models.IntegerField(choices=STATUS_PAY,blank=True ,null=True)
SubTotal = models.DecimalField(blank=True ,null=True, max_digits=19,decimal_places=2,default=0)
TotalPay = models.DecimalField(blank=True ,null=True, max_digits=19,decimal_places=2,default=0)
Discount = models.DecimalField(blank=True ,null=True,max_digits=19, decimal_places=2, default=0)
Num_Porcent_Des= models.IntegerField(blank=True ,null=True,)
def __str__(self):
return 'Fecha: {} Total:{}'.format(self.DateInvoice,self.TotalPay)
class Meta:
verbose_name ='Factura'
verbose_name_plural = 'Facturas'
def get_Details(self):
return DetailInvoice.objects.filter(InvoiceId=self)
class DetailInvoice(ModelBase):
ProductId = models.ForeignKey(Product,verbose_name='Producto',on_delete=models.PROTECT)
InvoiceId = models.ForeignKey(Invoice,verbose_name='Factura',on_delete=models.CASCADE)
Quantity = models.IntegerField(default=1)
Price = models.DecimalField(max_digits=19,decimal_places=2)
Cost = models.DecimalField(max_digits=19,decimal_places=2)
Utility = models.DecimalField(max_digits=19,decimal_places=2)
Total = models.DecimalField(max_digits=19,decimal_places=2)
#Discount = models.DecimalField(blank=True, null=True, max_digits=19, decimal_places=2, default=0)
def __str__(self):
return '{}'.format(self.Utility)
class Meta:
verbose_name ='Detalle Factura'
verbose_name_plural = 'Detalles de Factura' | {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
61,984 | atorrese/SGAGRO | refs/heads/main | /purchase/order/views.py | """ Invoice Views """
import json
# Django
from decimal import Decimal
from django.utils.timezone import datetime
from django.db.models import Q
from django.http import JsonResponse
from django.urls import reverse_lazy
from django.contrib.auth.mixins import LoginRequiredMixin
from django.views.generic import ListView, CreateView, UpdateView, DeleteView, DetailView
# App
from SGAGRO.funciones import Add_Data
from SGAGRO.funciones2 import STATUS_PAY,METHOD_PAYEMENT
from catalog.models import Product
from utils.mixins import OldDataMixin
from purchase.order.forms import OrderForm
from purchase.models import Order, Provider, DetailOrder
#from utils.conexion import Info
class Index(LoginRequiredMixin, ListView, OldDataMixin):
"""Lista las Invoices"""
template_name = 'purchase/orders/index.html'
model = Order
paginate_by = 15
context_object_name = 'orders'
attributes = {'search': ''}
def get_queryset(self):
search = self.get_old_data('search')
return Order.objects.filter(
Q(ProviderId__BussinessName__icontains=search)|
Q(ProviderId__Ruc__icontains=search)
).order_by('-created_at')
def get_context_data(self, *, object_list=None, **kwargs):
context = super(Index, self).get_context_data(**kwargs)
Add_Data(context)
return self.get_all_olds_datas(context=context, attributes=self.attributes)
class Show(LoginRequiredMixin, DetailView):
"""Muestra el detalle del dispositivo"""
template_name = 'purchase/orders/show.html'
model = Order
context_object_name = 'Order'
def get(self, request, *args, **kwargs):
request = super(Show, self).get(request, *args, **kwargs)
try:
if self.request.is_ajax():
detail =[{
'quantity':i.Quantity,
'price': i.Price,
'total': i.Total,
'product':{
'name':i.ProductId.Name,
'category':i.ProductId.CategoryId.Name,
'mark':i.ProductId.MarkId.Name,
},
} for i in self.get_object().get_Details()]
order = {
'provider': self.get_object().ProviderId.BussinessName,
'date': self.get_object().DateOrder.strftime("%d-%m-%Y"),
#'methodpay': self.get_object().PaymentMethod,
# 'status': self.get_object().StatusPay,
'total': self.get_object().TotalPay,
'detail': detail,
}
return JsonResponse({'resp':'ok','order':order})
except Exception as e:
print(e)
return self.get_template_names()
def get_context_data(self, **kwargs):
context = super(Show, self).get_context_data(**kwargs)
return context
class Create(LoginRequiredMixin, CreateView, OldDataMixin):
"""Crea una Invoice"""
model = Order
template_name = 'purchase/orders/create.html'
form_class = OrderForm
success_url = reverse_lazy('purchase:order.index')
attributes = {
'DateOrder':datetime.now().strftime('%Y-%m-%d'),
}
def form_valid(self, form):
new_order = form.save(commit=False)
new_order.WeekOrder = new_order.DateOrder.isocalendar()[1]
new_order.save()
print(new_order)
print(self.request.POST['details'])
details = json.loads(self.request.POST['details'])
for d in details:
product = Product.objects.get(pk=d['producto'])
detalle = DetailOrder(
ProductId=product,
OrderId=new_order,
Quantity=int(d['cantidad']),
Price=Decimal(d['precio']),
Total=Decimal(d['total']),
)
detalle.save()
product.Stock += int(d['cantidad'])
product.save()
print('Cantidad: ',d['cantidad'])
print('Producto: ',product)
print('Detalle: ',detalle)
return super(Create, self).form_valid(form)
def form_invalid(self, form):
print(form)
print(form.errors)
return super(Create, self).form_invalid(form)
def get_context_data(self, **kwargs):
context = super(Create, self).get_context_data(**kwargs)
Add_Data(context)
context['old_provider'] = self.post_old_data('ProviderId')
context['providers'] = Provider.objects.filter(status=True)
context['products'] = Product.objects.filter(status=True)
return self.post_all_olds_datas(context=context, attributes=self.attributes)
class Update(LoginRequiredMixin, UpdateView, OldDataMixin):
"""Actualiza una Invoice"""
model = Order
template_name = 'purchase/orders/edit.html'
form_class = OrderForm
success_url = reverse_lazy('purchase:order.index')
def get_attributes(self):
return {
'DateOrder': self.get_object().DateOrder.strftime('%Y-%m-%d'),
}
def form_valid(self, form):
order = form.save(commit=False)
order.WeekOrder = order.DateOrder.isocalendar()[1]
order.save()
# Eliminando Producto del detalle y Sumando el stock a el Producto
DetailsOrderOld =DetailOrder.objects.filter(OrderId=order)
for detailOrder in DetailsOrderOld:
detailOrder.ProductId.Stock -=detailOrder.Quantity
detailOrder.ProductId.save()
detailOrder.delete()
#Agregando Nuevo Detalle
details = json.loads(self.request.POST['details'])
for d in details:
product = Product.objects.get(pk=d['producto'])
detalle = DetailOrder(
ProductId=product,
OrderId=order,
Quantity=int(d['cantidad']),
Price=Decimal(d['precio']),
Total=Decimal(d['total']),
)
detalle.save()
product.Stock += int(d['cantidad'])
product.save()
print('Cantidad: ', d['cantidad'])
print('Producto: ', product)
print('Detalle: ', detalle)
return super().form_valid(form)
def form_invalid(self, form):
print(form.errors)
return super(Update, self).form_invalid(form)
def get_context_data(self, **kwargs):
context = super(Update, self).get_context_data(**kwargs)
Add_Data(context)
context['old_provider'] = self.post_old_data('ProviderId', self.get_object().ProviderId.pk)
context['providers'] = Provider.objects.filter(status=True)
context['products'] = Product.objects.filter(status=True)
context['DetailsOrder'] = DetailOrder.objects.filter(OrderId=self.get_object().pk)
context['TotalPay'] = self.get_object().TotalPay
return self.post_all_olds_datas(context=context, attributes=self.get_attributes())
class Delete(LoginRequiredMixin, DeleteView):
"""Elimina una Invoice"""
model = Order
http_method_names = ['delete']
def delete(self, request, *args, **kwargs):
order =self.get_object()
order.delete_detail()
order.delete()
data = {
'status' :True,
'message': '¡El registro ha sido eliminado correctamente!'
}
return JsonResponse(data)
| {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
61,985 | atorrese/SGAGRO | refs/heads/main | /catalog/category/views.py | '''Marks Views'''
#Django
from django.http import JsonResponse
from django.urls import reverse_lazy
from django.contrib.auth.mixins import LoginRequiredMixin
from django.views.generic import ListView, CreateView, UpdateView, DeleteView
#Sgv
from SGAGRO.funciones import Add_Data
from catalog.models import Category
from catalog.category.forms import CategoryForm
from utils.mixins import OldDataMixin
class Index(LoginRequiredMixin, ListView, OldDataMixin):
template_name = 'catalog/categories/index.html'
model = Category
paginate_by = 2
context_object_name = 'categories'
attributes = {'search':''}
def get_queryset(self):
search = self.get_old_data('search')
return Category.objects.filter(Name__icontains = search,status= True).order_by('-created_at')
def get_context_data(self, *, object_list=None, **kwargs):
context = super(Index,self).get_context_data(**kwargs)
Add_Data(context)
return self.get_all_olds_datas(context = context,attributes = self.attributes)
def get(self, request, *args, **kwargs):
response = super(Index,self).get(request,*args,**kwargs)
if self.request.is_ajax():
categories = self.get_queryset()
data={}
if categories:
data = [{'id': category.pk, 'value': category.Name} for category in categories]
return JsonResponse({'data': data})
return response
class Create(LoginRequiredMixin,CreateView,OldDataMixin):
model = Category
template_name = 'catalog/categories/create.html'
form_class = CategoryForm
success_url = reverse_lazy('catalog:category.index')
attributes = {'Name':''}
def form_valid(self, form):
form.save()
if self.request.is_ajax():
data = {
'status': True,
'message': '¡El registro ha sido creado correctamente!'
}
return JsonResponse(data)
return super().form_valid(form)
def form_invalid(self, form):
if self.request.is_ajax():
data = {
'status': False,
'message': '¡El Formulario Tiene errores!',
'form_errors': form.errors.as_json(),
}
return JsonResponse(data)
return super().form_invalid(form)
def get_context_data(self, **kwargs):
context = super(Create,self).get_context_data(**kwargs)
Add_Data(context)
return self.get_all_olds_datas(context=context,attributes=self.attributes)
class Update(LoginRequiredMixin, UpdateView, OldDataMixin):
"""Actualiza una marca"""
model = Category
template_name = 'catalog/categories/edit.html'
form_class = CategoryForm
success_url = reverse_lazy('catalog:category.index')
def get_attributes(self):
return {
'Name': self.get_object().Name,
}
def form_valid(self, form):
form.save()
return super().form_valid(form)
def get_context_data(self, **kwargs):
context = super(Update, self).get_context_data(**kwargs)
Add_Data(context)
return self.post_all_olds_datas(context=context, attributes=self.get_attributes())
class Delete(LoginRequiredMixin, DeleteView):
"""Elimina una marca"""
model = Category
http_method_names = ['delete']
def delete(self, request, *args, **kwargs):
data = {
'status': False,
'message': '¡No se Elimino el Regitro. Porque esta Asociado a un Producto o varios!'
}
if not self.get_object().product_set.exists():
self.get_object().delete()
data = {
'status': True,
'message': '¡El registro ha sido eliminado correctamente!'
}
return JsonResponse(data)
| {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
61,986 | atorrese/SGAGRO | refs/heads/main | /security/business/views.py | from django.http import JsonResponse
from django.urls import reverse_lazy
from django.contrib.auth.mixins import LoginRequiredMixin
from django.views.generic import ListView, CreateView, UpdateView, DeleteView
#Sgv
from SGAGRO.funciones import Add_Data
from security.models import Business
from security.business.forms import BusinessForm
from utils.mixins import OldDataMixin
class Create(LoginRequiredMixin,CreateView,OldDataMixin):
model = Business
form_class = BusinessForm
success_url = reverse_lazy('security:home')
attributes = {'name':'','alias':'','desciption':'','icon':''}
def form_valid(self, form):
form.save()
if self.request.is_ajax():
data = {
'status': True,
'message': '¡El registro ha sido creado correctamente!'
}
return JsonResponse(data)
return super().form_valid(form)
def form_invalid(self, form):
if self.request.is_ajax():
data = {
'status': False,
'message': '¡El Formulario Tiene errores!',
'form_errors': form.errors.as_json(),
}
return JsonResponse(data)
return super().form_invalid(form)
def get_context_data(self, **kwargs):
context = super(Create,self).get_context_data(**kwargs)
Add_Data(context)
return self.get_all_olds_datas(context=context,attributes=self.attributes)
class Update(LoginRequiredMixin, UpdateView, OldDataMixin):
"""Actualiza una marca"""
model = Business
form_class = BusinessForm
success_url = reverse_lazy('security:home')
template_name = 'auth/setting.html'
context_object_name = 'Business'
def get_attributes(self):
return {
'name': self.get_object().name,
'description': self.get_object().description,
'icon': self.get_object().icon,
'alias': self.get_object().alias,
}
def form_valid(self, form):
form.save(commit=False)
if self.request.FILES:
icon = self.request.FILES.get('icon')
print(icon)
print(self.request.FILES)
print(self.request.FILES['icon'])
l=form
print(l)
print(l.icon)
form.save()
return super().form_valid(form)
def form_invalid(self, form):
form.save(commit=False)
print(form.errors)
return super().form_invalid(form)
def get_context_data(self, **kwargs):
context = super(Update, self).get_context_data(**kwargs)
Add_Data(context)
return self.post_all_olds_datas(context=context, attributes=self.get_attributes())
| {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
61,987 | atorrese/SGAGRO | refs/heads/main | /catalog/urls.py | from django.urls import path
import catalog.mark.views as Mark
import catalog.category.views as Category
import catalog.product.views as Product
urlpatterns = [
#Routes Mark
path(route='mark/', view= Mark.Index.as_view(),name='mark.index'),
path(route='mark/create', view= Mark.Create.as_view(),name='mark.store'),
path(route='mark/edit/<pk>', view= Mark.Update.as_view(),name='mark.update'),
path(route='mark/delete/<pk>', view= Mark.Delete.as_view(),name='mark.delete'),
# Routes Category
path(route='category/', view=Category.Index.as_view(), name='category.index'),
path(route='category/create', view=Category.Create.as_view(), name='category.store'),
path(route='category/edit/<pk>', view=Category.Update.as_view(), name='category.update'),
path(route='category/delete/<pk>', view=Category.Delete.as_view(), name='category.delete'),
# Routes Product
path(route='product/', view=Product.Index.as_view(), name='product.index'),
path(route='product/create', view=Product.Create.as_view(), name='product.store'),
path(route='product/show/<pk>', view=Product.Show.as_view(), name='product.show'),
path(route='product/edit/<pk>', view=Product.Update.as_view(), name='product.update'),
path(route='product/delete/<pk>', view=Product.Delete.as_view(), name='product.delete'),
] | {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
61,988 | atorrese/SGAGRO | refs/heads/main | /security/migrations/0001_initial.py | # Generated by Django 3.1.3 on 2020-11-13 00:22
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0012_alter_user_first_name_max_length'),
]
operations = [
migrations.CreateModel(
name='Business',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('alias', models.CharField(max_length=20)),
('description', models.CharField(blank=True, max_length=200)),
('icon', models.ImageField(error_messages={'required': 'Cargar Un Imagen Para El icono de la Empresa'}, upload_to='media/Business/icon/')),
],
options={
'verbose_name': 'Empresa',
'verbose_name_plural': 'Empresas',
},
),
migrations.CreateModel(
name='Module',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('url', models.CharField(max_length=100)),
('name', models.CharField(max_length=100)),
('icon', models.CharField(max_length=100)),
('description', models.CharField(max_length=100)),
('available', models.BooleanField(default=True)),
('order', models.IntegerField(default=0)),
],
options={
'verbose_name': 'Módulo',
'verbose_name_plural': 'Módulos',
'ordering': ('order',),
},
),
migrations.CreateModel(
name='GroupModule',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('descripcion', models.CharField(blank=True, max_length=200)),
('icon', models.CharField(blank=True, max_length=100, null=True)),
('priority', models.IntegerField(blank=True, null=True)),
('groups', models.ManyToManyField(to='auth.Group')),
('modules', models.ManyToManyField(to='security.Module')),
],
options={
'verbose_name': 'Grupo de Módulos',
'verbose_name_plural': 'Grupos de Módulos',
'ordering': ('priority',),
},
),
]
| {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
61,989 | atorrese/SGAGRO | refs/heads/main | /purchase/urls.py | from django.urls import path
import purchase.provider.views as Provider
#import sale.seller.views as Seller
import purchase.order.views as Order
urlpatterns = [
#Routes provider
path(route='provider/', view= Provider.Index.as_view(),name='provider.index'),
path(route='provider/create', view= Provider.Create.as_view(),name='provider.store'),
path(route='provider/edit/<pk>', view= Provider.Update.as_view(),name='provider.update'),
path(route='provider/delete/<pk>', view= Provider.Delete.as_view(),name='provider.delete'),
# Routes order
path(route='order/', view=Order.Index.as_view(), name='order.index'),
path(route='order/create', view=Order.Create.as_view(), name='order.store'),
path(route='order/edit/<pk>', view=Order.Update.as_view(), name='order.update'),
path(route='order/show/<pk>', view=Order.Show.as_view(), name='order.show'),
path(route='order/delete/<pk>', view=Order.Delete.as_view(), name='order.delete'),
] | {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
61,990 | atorrese/SGAGRO | refs/heads/main | /catalog/migrations/0001_initial.py | # Generated by Django 3.1.3 on 2020-11-13 00:22
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('status', models.BooleanField(default=True)),
('Name', models.CharField(max_length=100, unique=True, verbose_name='Categoria')),
],
options={
'verbose_name': 'Categoria',
'verbose_name_plural': 'Categorias',
'ordering': ('Name',),
},
),
migrations.CreateModel(
name='Mark',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('status', models.BooleanField(default=True)),
('Name', models.CharField(max_length=100, unique=True, verbose_name='Marca')),
],
options={
'verbose_name': 'Marca',
'verbose_name_plural': 'Marcas',
'ordering': ('Name',),
},
),
migrations.CreateModel(
name='Product',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('status', models.BooleanField(default=True)),
('Name', models.CharField(max_length=100, unique=True, verbose_name='Producto')),
('Description', models.TextField(max_length=100, unique=True, verbose_name='Descripcion')),
('Cost', models.DecimalField(decimal_places=2, max_digits=19, verbose_name='Costo')),
('Price', models.DecimalField(decimal_places=2, max_digits=19, verbose_name='Precio')),
('Stock', models.IntegerField()),
('Availabel', models.BooleanField(default=True)),
('CategoryId', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='catalog.category')),
('MarkId', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='catalog.mark')),
],
options={
'verbose_name': 'Producto',
'verbose_name_plural': 'Productos',
'ordering': ('Name',),
},
),
]
| {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
61,991 | atorrese/SGAGRO | refs/heads/main | /catalog/product/forms.py | from django import forms
#App
from catalog.models import Product, Category, Mark
class ProductForm(forms.ModelForm):
CategoryId = forms.ModelChoiceField(required=False,queryset= Category.objects.filter(status=True))
MarkId = forms.ModelChoiceField(required=False,queryset= Mark.objects.filter(status=True))
Name = forms.CharField(min_length=4 )
Description = forms.Textarea()
Cost= forms.DecimalField()
Price = forms.DecimalField()
Stock = forms.IntegerField()
Availabel = forms.BooleanField()
def clean(self):
cleaned_data = super(ProductForm,self).clean()
return cleaned_data
class Meta:
model = Product
fields = ['Name','Description','Cost','Price','Stock','Availabel','CategoryId','MarkId',] | {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
61,992 | atorrese/SGAGRO | refs/heads/main | /SGAGRO/funciones.py |
from datetime import datetime
from django.db.models import Sum
#from tablib.formats import available
from security.models import GroupModule,Business
from security.business.forms import BusinessForm
from sale.models import DetailInvoice,Invoice
from purchase.models import Order
def Add_Data(context):
#context['GroupModules'] = GroupModule.objects.all().order_by('priority')
#b=Business.objects
#context['Business']= b.first() if b.exists() else None
#context['formBusiness']= BusinessForm(instance=b.first() )if b.exists() else None
context['now'] = datetime.now().date()
| {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
61,993 | atorrese/SGAGRO | refs/heads/main | /purchase/admin.py | from django.contrib import admin
from purchase.models import Provider
from import_export import resources
from import_export.admin import ImportExportModelAdmin
# Provider
class ProviderResource(resources.ModelResource):
class Meta:
model = Provider
class ProviderAdmin(ImportExportModelAdmin, admin.ModelAdmin):
search_fields = ['BussinessName','Ruc','Phone','Email']
list_display = ('BussinessName','Ruc','Phone','Email',)
resource_class = ProviderResource
admin.site.register(Provider,ProviderAdmin)
| {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
61,994 | atorrese/SGAGRO | refs/heads/main | /security/admin.py | from django.contrib import admin
from django.contrib.auth.models import Group
from security.models import GroupModule,Module,Business
from import_export import resources
from import_export.admin import ImportExportModelAdmin
# Business
class BusinessResource(resources.ModelResource):
class Meta:
model= Business
class BusinessAdmin(ImportExportModelAdmin,admin.ModelAdmin):
search_fields = ['name','description','alias']
list_display = ('name','description','alias','Icon')
resource_class = BusinessResource
# Module
class ModuleResource(resources.ModelResource):
class Meta:
model= Module
class ModuleAdmin(ImportExportModelAdmin,admin.ModelAdmin):
search_fields = ['name','description']
list_display = ('name','description','url','icon','order','available')
resource_class = ModuleResource
# GroupModule
class GroupModuleResource(resources.ModelResource):
class Meta:
model= GroupModule
class GroupModuleAdmin(ImportExportModelAdmin,admin.ModelAdmin):
search_fields = ['name','description']
list_display = ('name', 'descripcion','icon','get_groups','get_modules', 'priority')
resource_class = GroupModuleResource
def get_groups(self, obj):
return "\n".join([g.name for g in obj.groups.all()])
def get_modules(self, obj):
return "\n".join([m.name for m in obj.modules.all()])
# Group
'''class GroupResource(resources.ModelResource):
class Meta:
model = Group
class GroupAdmin(ImportExportModelAdmin, admin.ModelAdmin):
search_fields = ['name']
list_display = ('name',)
resource_class = GroupResource
admin.site.register(Group,GroupAdmin)'''
admin.site.register(Business,BusinessAdmin)
admin.site.register(Module,ModuleAdmin)
admin.site.register(GroupModule,GroupModuleAdmin)
| {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
61,995 | atorrese/SGAGRO | refs/heads/main | /sale/invoice/views.py | """ Invoice Views """
import json
# Django
from decimal import Decimal
from django.utils.timezone import datetime
from django.db.models import Q
from django.http import JsonResponse
from django.urls import reverse_lazy
from django.contrib.auth.mixins import LoginRequiredMixin
from django.views.generic import ListView, CreateView, UpdateView, DeleteView, DetailView
# App
from SGAGRO.funciones import Add_Data
from SGAGRO.funciones2 import STATUS_PAY,METHOD_PAYEMENT
from catalog.models import Product
from utils.mixins import OldDataMixin
from sale.invoice.forms import InvoiceForm
from sale.models import Invoice, Client, Seller, DetailInvoice
#from utils.conexion import Info
class Index(LoginRequiredMixin, ListView, OldDataMixin):
"""Lista las Invoices"""
template_name = 'sale/invoices/index.html'
model = Invoice
paginate_by = 15
context_object_name = 'invoices'
attributes = {'search': ''}
def get_queryset(self):
search = self.get_old_data('search')
return Invoice.objects.filter(
Q(ClientId__Names__icontains=search)| Q(ClientId__SurNames__icontains=search)|
Q(SellerId__Names__icontains=search)| Q(SellerId__SurNames__icontains=search)
).order_by('-created_at').filter(StatusInvoice__in =[2,3])
def get_context_data(self, *, object_list=None, **kwargs):
context = super(Index, self).get_context_data(**kwargs)
Add_Data(context)
return self.get_all_olds_datas(context=context, attributes=self.attributes)
class Show(LoginRequiredMixin, DetailView):
"""Muestra el detalle del dispositivo"""
template_name = 'sale/invoices/show.html'
model = Invoice
context_object_name = 'Invoice'
def get(self, request, *args, **kwargs):
request = super(Show, self).get(request, *args, **kwargs)
try:
if self.request.is_ajax():
detail =[{
'quantity':i.Quantity,
'price': i.Price,
'total': i.Total,
'product':{
'name':i.ProductId.Name,
'mark':i.ProductId.MarkId.Name,
'category':i.ProductId.CategoryId.Name,
},
} for i in self.get_object().get_Details()]
invoice = {
'client': self.get_object().ClientId.get_Names_SurNames(),
'seller': self.get_object().SellerId.get_Names_SurNames(),
'date': self.get_object().DateInvoice.strftime("%d-%m-%Y"),
'total': self.get_object().TotalPay,
'detail': detail,
}
return JsonResponse({'resp':'ok','invoice':invoice})
except Exception as e:
print(e)
return self.get_template_names()
def get_context_data(self, **kwargs):
context = super(Show, self).get_context_data(**kwargs)
return context
class Create(LoginRequiredMixin, CreateView, OldDataMixin):
"""Crea una Invoice"""
model = Invoice
template_name = 'sale/invoices/create.html'
form_class = InvoiceForm
success_url = reverse_lazy('sale:invoice.index')
attributes = {
'DateInvoice':datetime.now().strftime('%Y-%m-%d'),
}
def form_valid(self, form):
new_invoice = form.save(commit=False)
new_invoice.WeekInvoice= new_invoice.DateInvoice.isocalendar()[1]
new_invoice.StatusInvoice=3
new_invoice.Num_Porcent_Des= 0
new_invoice.save()
print(self.request.POST['details'])
details = json.loads(self.request.POST['details'])
for d in details:
product = Product.objects.get(pk=d['producto'])
detalle = DetailInvoice(
ProductId=product,
InvoiceId=new_invoice,
Quantity=int(d['cantidad']),
Price=Decimal(d['precio']),
Cost=product.Cost,
Utility=Decimal(d['precio'])-Decimal(product.Cost),
Total=Decimal(d['total'])
)
detalle.save()
product.Stock -= int(d['cantidad'])
product.save()
print('Cantidad: ',d['cantidad'])
print('Producto: ',product)
print('Detalle: ',detalle)
return super(Create, self).form_valid(form)
def form_invalid(self, form):
print(form.errors)
return super(Create, self).form_invalid(form)
def get_context_data(self, **kwargs):
context = super(Create, self).get_context_data(**kwargs)
Add_Data(context)
context['old_client'] = self.post_old_data('ClientId')
context['clients'] = Client.objects.filter(status=True)
context['old_seller'] = self.post_old_data('SellerId')
context['sellers'] = Seller.objects.filter(status=True)
context['products'] = Product.objects.filter(status=True)
return self.post_all_olds_datas(context=context, attributes=self.attributes)
class Update(LoginRequiredMixin, UpdateView, OldDataMixin):
"""Actualiza una Invoice"""
model = Invoice
template_name = 'sale/invoices/edit.html'
form_class = InvoiceForm
success_url = reverse_lazy('sale:invoice.index')
def get_attributes(self):
return {
'DateInvoice': self.get_object().DateInvoice.strftime('%Y-%m-%d')
}
def form_valid(self, form):
invoice = form.save(commit=False)
invoice.WeekInvoice = invoice.DateInvoice.isocalendar()[1]
invoice.save()
#Eliminando Producto del detalle y Sumando el stock a el Producto
DetailsInvoiceOld= DetailInvoice.objects.filter(InvoiceId=invoice)
for detailInvoice in DetailsInvoiceOld:
detailInvoice.ProductId.Stock += detailInvoice.Quantity
detailInvoice.ProductId.save()
detailInvoice.delete()
#Agregando nuevo Detalle
details = json.loads(self.request.POST['details'])
for d in details:
product = Product.objects.get(pk=d['producto'])
detalle = DetailInvoice(
ProductId=product,
InvoiceId=invoice,
Quantity=int(d['cantidad']),
Price=Decimal(d['precio']),
Cost=product.Cost,
Utility=Decimal(d['precio'])-Decimal(product.Cost),
Total=Decimal(d['total']),
Discount = 0.0
)
detalle.save()
detalle.ProductId.Stock -= detalle.Quantity
detalle.ProductId.save()
return super(Update, self).form_valid(form)
def form_invalid(self, form):
print(form.errors)
return super(Update, self).form_invalid(form)
def get_context_data(self, **kwargs):
context = super(Update, self).get_context_data(**kwargs)
Add_Data(context)
context['old_client'] = self.post_old_data('ClientId', self.get_object().ClientId.pk)
context['clients'] = Client.objects.filter(status=True)
context['old_seller'] = self.post_old_data('SellerId', self.get_object().SellerId.pk)
context['sellers'] = Seller.objects.filter(status=True)
context['DetailsInvoice']= DetailInvoice.objects.filter(InvoiceId=self.get_object().pk)
context['products'] = Product.objects.filter(status=True)
context['TotalPay'] = self.object.get_object().TotalPay
return self.post_all_olds_datas(context=context, attributes=self.get_attributes())
class Delete(LoginRequiredMixin, DeleteView):
"""Elimina una Invoice"""
model = Invoice
http_method_names = ['delete']
def delete(self, request, *args, **kwargs):
data = invoice =self.get_object()
return JsonResponse(data)
""" if invoice.PaymentMethod == 1:
if not invoice.have_pays():
data = {'status': True, 'message': '¡El Registro se eliminado correctamente!'}
#invoice.delete_Details()
else:
data = {'status': False, 'message': '¡El registro no se puede eliminar ya que tiene pagos asignados!'}
else:
data = {'status': True, 'message': '¡El Registro se eliminado correctamente!'}
#invoice.delete_Details()
"""
| {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
61,996 | atorrese/SGAGRO | refs/heads/main | /sale/admin.py | from django.contrib import admin
from import_export import resources
from import_export.admin import ImportExportModelAdmin
# Register your models here.
from sale.models import Client, Seller
# Client
class ClientResource(resources.ModelResource):
class Meta:
model = Client
class ClientAdmin(ImportExportModelAdmin, admin.ModelAdmin):
search_fields = ['Names','SurNames','City','Address','Email']
list_display = ('Names','SurNames','City','Address','Phone','Email',)
resource_class = ClientResource
# Seller
class SellerResource(resources.ModelResource):
class Meta:
model = Seller
class SellerAdmin(ImportExportModelAdmin, admin.ModelAdmin):
search_fields = ['Names','SurNames','IdentificationCard','Birthdate','City','Address','Email']
list_display = ('Names','SurNames','IdentificationCard','Birthdate','City','Address','Phone','Email',)
resource_class = SellerResource
# Supervisor
admin.site.register(Client,ClientAdmin)
admin.site.register(Seller,SellerAdmin)
| {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
61,997 | atorrese/SGAGRO | refs/heads/main | /security/forms.py | from django import forms
from django.contrib.auth.models import User
class RegisterForm(forms.ModelForm):
first_name = forms.CharField(label='Nombres',widget=forms.TextInput(attrs={'class':'form-control'}))
last_name = forms.CharField(label='Apellidos',widget=forms.TextInput(attrs={'class':'form-control'}))
username = forms.CharField( label='Nombre Usuario',widget=forms.TextInput(attrs={'class':'form-control'}))
email = forms.EmailField(label='Correo Electronico',widget=forms.TextInput(attrs={'class':'form-control'}))
password = forms.CharField(label='Contraseña',widget=forms.PasswordInput(attrs={'class':'form-control'}))
def clean(self):
cleaned_data = super(RegisterForm, self).clean()
return cleaned_data
class Meta:
model= User
fields = ('first_name','last_name','username','email','password',) | {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
61,998 | atorrese/SGAGRO | refs/heads/main | /catalog/models.py | from django.db import models
from security.models import ModelBase
class Mark(ModelBase):
Name = models.CharField(verbose_name='Marca', max_length=100,unique=True)
'''Image = models.ImageField()
Image = models.ImageField(upload_to='media/mark/icon/',null=False,blank=False,error_messages={'required':'Cargar Un Imagen Para El icono de la Empresa'})
def Icon(self):
if self.icon:
return mark_safe('<img src="%s" style="width:45px; height:45px;"/>'%self.icon.url)
else:
return 'imagen no disponible'
icon.short_description='Icon'''
def __str__(self):
return '{}'.format(self.Name)
class Meta:
verbose_name='Marca'
verbose_name_plural='Marcas'
ordering= ('Name',)
class Category(ModelBase):
Name = models.CharField(verbose_name='Categoria', max_length=100, unique=True)
'''Image = models.ImageField()
Image = models.ImageField(upload_to='media/mark/icon/',null=False,blank=False,error_messages={'required':'Cargar Un Imagen Para El icono de la Empresa'})
def Icon(self):
if self.icon:
return mark_safe('<img src="%s" style="width:45px; height:45px;"/>'%self.icon.url)
else:
return 'imagen no disponible'
icon.short_description='Icon'''
def __str__(self):
return '{}'.format(self.Name)
class Meta:
verbose_name='Categoria'
verbose_name_plural='Categorias'
ordering= ('Name',)
class Product(ModelBase):
CategoryId = models.ForeignKey(Category,on_delete=models.PROTECT)
MarkId = models.ForeignKey(Mark, on_delete=models.PROTECT)
Name = models.CharField(verbose_name='Producto', max_length=100, unique=True)
Description= models.TextField(verbose_name='Descripcion', max_length=100, unique=True)
Cost = models.DecimalField(verbose_name='Costo',max_digits= 19, decimal_places=2)
Price = models.DecimalField(verbose_name='Precio',max_digits= 19, decimal_places=2)
Stock = models.IntegerField()
Availabel = models.BooleanField(default=True)
def __str__(self):
return '{}'.format(self.Name)
class Meta:
verbose_name='Producto'
verbose_name_plural='Productos'
ordering= ('Name',)
| {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
61,999 | atorrese/SGAGRO | refs/heads/main | /purchase/provider/forms.py | """ Client Forms """
# Django
from django import forms
# App
from purchase.models import Provider
class ProviderForm(forms.ModelForm):
"""Formulario y validacion de Client"""
BussinessName= forms.CharField(min_length=1)
Ruc= forms.CharField(max_length=13)
Phone= forms.CharField(min_length=10)
Email = forms.EmailField(min_length=10)
def clean(self):
cleaned_data = super(ProviderForm, self).clean()
return cleaned_data
class Meta:
model = Provider
fields = ['BussinessName', 'Ruc','Phone','Email']
| {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
62,000 | atorrese/SGAGRO | refs/heads/main | /purchase/migrations/0001_initial.py | # Generated by Django 3.1.3 on 2020-11-13 00:22
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
('catalog', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Provider',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('status', models.BooleanField(default=True)),
('BussinessName', models.CharField(max_length=80, verbose_name='Razón Social')),
('Ruc', models.CharField(max_length=13, verbose_name='Razón Social')),
('Phone', models.CharField(max_length=80, verbose_name='Telefono')),
('Email', models.EmailField(max_length=80, verbose_name='Correo Electronico')),
],
options={
'verbose_name': 'Proveedor',
'verbose_name_plural': 'Proveedores',
'ordering': ('-created_at',),
},
),
migrations.CreateModel(
name='Order',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('status', models.BooleanField(default=True)),
('DateOrder', models.DateField(default=django.utils.timezone.now)),
('WeekOrder', models.PositiveIntegerField(blank=True, null=True, verbose_name='Semana Orden')),
('DelieverOrder', models.DateField(blank=True, null=True)),
('Delivery', models.BooleanField(default=False)),
('TotalPay', models.DecimalField(decimal_places=2, max_digits=19)),
('ProviderId', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='purchase.provider', verbose_name='Proveedor')),
],
options={
'verbose_name': 'Pedido de Compra',
'verbose_name_plural': 'Pedidos de Compras',
},
),
migrations.CreateModel(
name='DetailOrder',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('status', models.BooleanField(default=True)),
('Quantity', models.IntegerField(default=1)),
('Price', models.DecimalField(decimal_places=2, max_digits=19)),
('Discount', models.DecimalField(decimal_places=2, default=0.0, max_digits=19)),
('Total', models.DecimalField(decimal_places=2, max_digits=19)),
('OrderId', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='purchase.order', verbose_name='Orden de Compra')),
('ProductId', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='catalog.product', verbose_name='Producto')),
],
options={
'verbose_name': 'Detalle Pedido de Compra',
'verbose_name_plural': 'Detalles de Pedidos de Compra',
},
),
]
| {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
62,001 | atorrese/SGAGRO | refs/heads/main | /security/business/forms.py | from django import forms
#App
from security.models import Business
class BusinessForm(forms.ModelForm):
name = forms.CharField(min_length=4)
alias = forms.CharField(min_length=1)
description = forms.CharField()
icon = forms.FileField
def clean(self):
cleaned_data = super(BusinessForm,self).clean()
return cleaned_data
class Meta:
model = Business
fields = ['name','alias','description','icon'] | {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
62,002 | atorrese/SGAGRO | refs/heads/main | /sale/invoice/forms.py | """ Client Forms """
# Django
from django import forms
# App
from sale.models import Client, Seller, Invoice
from SGAGRO.funciones2 import METHOD_PAYEMENT,STATUS_PAY
class InvoiceForm(forms.ModelForm):
"""Formulario y validacion de Client"""
ClientId= forms.ModelChoiceField(queryset=Client.objects.filter(status=True))
SellerId = forms.ModelChoiceField( queryset=Seller.objects.filter(status=True))
DateInvoice= forms.DateTimeField()
#PaymentMethod= forms.ChoiceField(choices=METHOD_PAYEMENT)
#StatusPay= forms.ChoiceField(choices=STATUS_PAY)
#Num_Porcent_Des = forms.IntegerField()
#Discount = forms.DecimalField(required=False)
TotalPay = forms.DecimalField()
def clean(self):
cleaned_data = super(InvoiceForm, self).clean()
return cleaned_data
class Meta:
model = Invoice
fields = ['ClientId', 'SellerId', 'DateInvoice','TotalPay']
| {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
62,003 | atorrese/SGAGRO | refs/heads/main | /sale/seller/forms.py | """ Seller Forms """
# Django
from django import forms
# App
from sale.models import Seller
class SellerForm(forms.ModelForm):
"""Formulario y validacion de Seller"""
Names= forms.CharField(min_length=2)
SurNames= forms.CharField(min_length=2)
IdentificationCard = forms.CharField(min_length=10,max_length=13)
Birthdate = forms.DateField()
City= forms.CharField(min_length=2)
Address= forms.CharField(min_length=2)
Phone= forms.CharField(min_length=10)
Email = forms.EmailField(required=False,min_length=10)
def clean(self):
cleaned_data = super(SellerForm, self).clean()
return cleaned_data
class Meta:
model = Seller
fields = ['Names', 'SurNames','IdentificationCard','Birthdate','City','Address','Phone','Email']
| {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
62,004 | atorrese/SGAGRO | refs/heads/main | /sale/order/views.py | import json
# Django
from decimal import Decimal
from django.utils.timezone import datetime
from django.db.models import Q
from django.http import JsonResponse
from django.urls import reverse_lazy
from django.contrib.auth.mixins import LoginRequiredMixin
from django.views.generic import ListView, UpdateView, DeleteView ,DetailView
from SGAGRO.funciones import Add_Data
from SGAGRO.funciones2 import STATUS_PAY,METHOD_PAYEMENT
from catalog.models import Product
from utils.mixins import OldDataMixin
from sale.models import Invoice, Client, Seller, DetailInvoice
class Index(LoginRequiredMixin, ListView, OldDataMixin):
"""Lista las Invoices"""
template_name = 'sale/orders/index.html'
model = Invoice
paginate_by = 15
context_object_name = 'invoices'
attributes = {'search': ''}
def get_queryset(self):
search = self.get_old_data('search')
print(search.split(' '))
return Invoice.objects.filter(
Q(ClientId__Names__icontains=search)| Q(ClientId__SurNames__icontains=search)|
Q(SellerId__Names__icontains=search)| Q(SellerId__SurNames__icontains=search)
).order_by('-created_at').filter(StatusInvoice__in=[1,2])
def get_context_data(self, *, object_list=None, **kwargs):
context = super(Index, self).get_context_data(**kwargs)
Add_Data(context)
return self.get_all_olds_datas(context=context, attributes=self.attributes)
class Show(LoginRequiredMixin, DetailView):
"""Muestra el detalle del dispositivo"""
template_name = 'sale/orders/show.html'
model = Invoice
context_object_name = 'Invoice'
def get(self, request, *args, **kwargs):
request = super(Show, self).get(request, *args, **kwargs)
try:
if self.request.is_ajax():
detail =[{
'quantity':i.Quantity,
'price': i.Price,
'total': i.Total,
'product':{
'name':i.ProductId.Name,
'mark':i.ProductId.MarkId.Name,
'category':i.ProductId.CategoryId.Name,
},
} for i in self.get_object().get_Details()]
invoice = {
'client': self.get_object().ClientId.get_Names_SurNames(),
'seller': self.get_object().SellerId.get_Names_SurNames(),
'date': self.get_object().DateInvoice.strftime("%d-%m-%Y"),
'total': self.get_object().TotalPay,
'detail': detail,
}
return JsonResponse({'resp':'ok','invoice':invoice})
except Exception as e:
print(e)
return self.get_template_names()
def get_context_data(self, **kwargs):
context = super(Show, self).get_context_data(**kwargs)
return context
class Show(LoginRequiredMixin, DetailView):
"""Muestra el detalle del dispositivo"""
template_name = 'sale/invoices/show.html'
model = Invoice
context_object_name = 'Invoice'
def get(self, request, *args, **kwargs):
request = super(Show, self).get(request, *args, **kwargs)
try:
if self.request.is_ajax():
detail =[{
'quantity':i.Quantity,
'price': i.Price,
'total': i.Total,
'product':{
'name':i.ProductId.Name,
'mark':i.ProductId.MarkId.Name,
'category':i.ProductId.CategoryId.Name,
},
} for i in self.get_object().get_Details()]
invoice = {
'client': self.get_object().ClientId.get_Names_SurNames(),
'seller': self.get_object().SellerId.get_Names_SurNames(),
'date': self.get_object().DateInvoice.strftime("%d-%m-%Y"),
'total': self.get_object().TotalPay,
'detail': detail,
}
return JsonResponse({'resp':'ok','invoice':invoice})
except Exception as e:
print(e)
return self.get_template_names()
def get_context_data(self, **kwargs):
context = super(Show, self).get_context_data(**kwargs)
return context
def change(request):
print(request.POST['pk'])
print(request.POST['StatusInvoice'])
invoice = Invoice.objects.get(id=request.POST['pk'])
invoice.StatusInvoice =request.POST['StatusInvoice']
invoice.save()
return JsonResponse({}) | {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
62,005 | atorrese/SGAGRO | refs/heads/main | /security/views.py | import json
from django.shortcuts import render
from datetime import datetime
from django.db.models import Sum
from django.contrib.auth.models import User,Group
from django.http import JsonResponse
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.contrib.auth import views as auth_views
from django.contrib.auth.mixins import LoginRequiredMixin
from django.urls import reverse_lazy
from django.views.generic import ListView, TemplateView, FormView, View
from SGAGRO.funciones import Add_Data
#
from security.models import Business
from sale.models import Invoice,DetailInvoice,Client,Seller
from catalog.models import Product
from security.forms import RegisterForm
from purchase.models import Order
from django.views.defaults import page_not_found
from django.views.decorators.csrf import csrf_exempt
from django.utils.decorators import method_decorator
def mi_error_404(request):
nombre_template = '404.html'
return page_not_found(request, template_name=nombre_template)
class LoginView(auth_views.LoginView):
context={}
#context['Business']= Business.objects.first()
extra_context = context
template_name = 'auth/login.html'
redirect_authenticated_user = True
class LogoutView(LoginRequiredMixin,auth_views.LogoutView):
pass
class HomeView(LoginRequiredMixin,TemplateView):
context={}
Add_Data(context)
date_now=datetime.now()
print(date_now.year)
context['year']=date_now.year
context['week']=date_now.isocalendar()[1]#Semana empieza desde el dia Lunes y Termina el dia Domingo
context['TotalSales']=Invoice.objects.filter(
StatusInvoice=3,
DateInvoice=date_now
).count()
revenue =Invoice.objects.filter(
StatusInvoice=3,
DateInvoice = date_now
).aggregate(Sum('TotalPay'))['TotalPay__sum']
context['Revenue'] = round(revenue,2) if revenue else 0.00
expenses = Order.objects.filter(
#StatusInvoice=3,
DateOrder__year = date_now.year
).aggregate(Sum('TotalPay'))['TotalPay__sum']
context['Expenses'] = round(expenses,2) if expenses else 0.00
context['TotalOrders']= Order.objects.filter(
DateOrder=date_now
).count()
totalu=DetailInvoice.objects.filter(
InvoiceId__DateInvoice = date_now ,
).aggregate(Sum('Utility'))['Utility__sum']
context['TotalUtility']= round(totalu,2) if totalu else 0.00
#context['TopSales']=Invoice.objects.all()[10].order_by('-created_at')
extra_context = context
template_name = 'auth/home.html'
class ProfileView(LoginRequiredMixin,TemplateView):
context={}
Add_Data(context)
date_now=datetime.now()
extra_context = context
template_name = 'auth/profile.html'
class RegisterView(FormView):
template_name = 'auth/register.html'
form_class = RegisterForm
success_url = reverse_lazy('security:login')
context={}
#context['Business']= Business.objects.first()
extra_context = context
def form_valid(self, form):
f = super(RegisterView, self).form_valid(form)
print(form.cleaned_data['username'])
user = User.objects.create_user(form.cleaned_data['username'], form.cleaned_data['email'],form.cleaned_data['password'])
group = Group.objects.get(name='Empleados')
#user.group_set.add(group)
group.user_set.add(user)
user.first_name = form.cleaned_data['first_name']
user.last_name = form.cleaned_data['last_name']
user.save()
return f
def form_invalid(self, form):
print(form)
return super(RegisterView, self).form_invalid(form)
def get(self, request, *args, **kwargs):
get = super(RegisterView, self).get(self,request,*args,**kwargs)
if self.request.is_ajax():
if 'usu' in self.request.GET:
response= User.objects.filter(username__search=self.request.GET['usu'])
json = [{'id':resp.id,'username':resp.username}for resp in response]
return JsonResponse({'resp': 'ok','data':json})
return get
def Filterdashboard(request):
#year = request.GET['year']
#week = request.GET['week']
date = str(request.GET['date'])
date = datetime.strptime(date, '%d-%m-%Y')
print(date)
data = {}
#data['year'] = year
#data['week'] = week
data['date'] = date
print(date)
data['TotalSales'] = Invoice.objects.filter(
DateInvoice=date,
).count()
revenue =Invoice.objects.filter(
StatusInvoice=3,
DateInvoice= date
).aggregate(Sum('TotalPay'))['TotalPay__sum']
data['Revenue'] = round(revenue,2) if revenue else 0.00
expenses = Order.objects.filter(
#StatusInvoice=3,
DateOrder = date ,
).aggregate(Sum('TotalPay'))['TotalPay__sum']
data['Expenses'] = round(expenses,2) if expenses else 0.00
data['TotalOrders'] = Order.objects.filter(
DateOrder=date,
).count()
totalu = DetailInvoice.objects.filter(
InvoiceId__DateInvoice=date
).aggregate(Sum('Utility'))['Utility__sum']
data['TotalUtility'] = round(totalu, 2)if totalu else 0.00
return JsonResponse(data)
# @csrf_exempt
# @api_view(['POST'])
# def Webhook(request):
# print(request)
# json_data = json.loads(request.body)
# print(json_data)
# return JsonResponse({})
@method_decorator(csrf_exempt, name='dispatch')
class Webhook(View):
def post(self, request, *args, **kwargs):
json_data = json.loads(request.body)
for dialog in json_data:
print(dialog)
texto=''
#Calculo de factura
if json_data['queryResult']['intent']['displayName'] == 'pedido':
factura='Pedidos{'
total =0.0
valor =0.0
item=json_data['queryResult']['parameters']['pedidoInsumo']
for pro in item:
product = Product.objects.get(Name= pro['insumo'])
valor= (product.Price*int(pro['number']))
total+=float(valor)
factura +='({}, {}, ${}, ${})'.format(pro['insumo'],pro['number'],product.Price,valor)
if pro == item[-1]:
factura += ".\n} \n"
else:
factura += "; \n"
factura +='Total_Pagar (${})'.format(total)
texto +=factura
texto +='\n¿Desea realizar la compra?Digite si o no'
#print(json_data)
#Busqueda de producto
if json_data['queryResult']['intent']['displayName'] == 'catalogo':
products = Product.objects.all()
p=''
for pro in products:
p += "{} STOCK {} PVP $ {}".format(pro.Name,pro.Stock,pro.Price)
if pro == products.last():
p += ". \n"
else:
p += ", \n"
texto =json_data['queryResult']['fulfillmentText']+ "\n "+p
#Confirmar Pedido
if json_data['queryResult']['intent']['displayName'] == 'confirmarPedido':
params =json_data['queryResult']['outputContexts'][1]['parameters']
client= Client.objects.filter(IdentificationCard=params['cedula']['dni-person'])
if not client.exists():
client = Client(
Names= params['nombres-apellidos']['nombres.original'],
SurNames= params['nombres-apellidos']['apellidos.original'],
IdentificationCard=params['cedula']['dni-person'],
City= params['geo-city'],
Address= params['street-address.original'],
Phone = params['phone-number'],
Email= params['email.original']
)
client.save()
else:
client= Client.objects.get(IdentificationCard=params['cedula']['dni-person'])
seller= Seller.objects.get(IdentificationCard='0940113315')
factura= Invoice(ClientId= client, SellerId=seller)
factura.save()
items=json_data['queryResult']['outputContexts'][0]['parameters']['pedidoInsumo']
print(items)
print('---------------------------------¬\n')
for i in items:
print(i)
total = 0.0
for item in items:
product = Product.objects.get(Name= item['insumo'])
detail=DetailInvoice(
ProductId = product,
InvoiceId = factura,
Quantity = int(item['number']),
Price = product.Price,
Cost =product.Cost,
Utility= product.Price - product.Cost,
Total = (product.Price*int(item['number']))
)
detail.save()
product.Stock -= detail.Quantity
product.save()
total += float(detail.Total)
factura.TotalPay=total
factura.save()
texto='Su Pedido se Encuentra Reservado.Puede acercarse al AgroServicio con el siguiente ticket {} y con el valor a pagar e ${} '.format('1122',factura.TotalPay)
json_data['fulfillmentMessages'] =[
{
"text": {
"text": [
texto
]
}
}
]
return JsonResponse(json_data) | {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
62,006 | atorrese/SGAGRO | refs/heads/main | /catalog/product/views.py | '''Marks Views'''
#Django
from django.http import JsonResponse
from django.template.loader import render_to_string
from django.urls import reverse_lazy
from django.contrib.auth.mixins import LoginRequiredMixin
from django.views.generic import ListView, CreateView, UpdateView, DeleteView, DetailView
#Sgv
from SGAGRO.funciones import Add_Data
from catalog.models import Product, Category, Mark
from catalog.product.forms import ProductForm
from utils.mixins import OldDataMixin
class Index(LoginRequiredMixin, ListView, OldDataMixin):
template_name = 'catalog/products/index.html'
model = Product
paginate_by = 15
context_object_name = 'products'
attributes = {'search':''}
def get_queryset(self):
search = self.get_old_data('search')
return Product.objects.filter(Name__icontains = search,status= True).order_by('-created_at')
def get_context_data(self, *, object_list=None, **kwargs):
context = super(Index,self).get_context_data(**kwargs)
Add_Data(context)
return self.get_all_olds_datas(context = context,attributes = self.attributes)
class Show(LoginRequiredMixin, DetailView):
"""Muestra el detalle del dispositivo"""
template_name = 'catalog/products/show.html'
model = Product
context_object_name = 'product'
def get(self, request, *args, **kwargs):
request = super(Show, self).get(request, *args, **kwargs)
if self.request.is_ajax():
product = {
'CategoryId': self.get_object().CategoryId,
'MarkId': self.get_object().MarkId,
'Description': self.get_object().Description,
'Name': self.get_object().Name,
'Cost': self.get_object().Cost,
'Price': self.get_object().Price,
'Stock': self.get_object().Stock,
'pk': self.get_object().pk,
}
item = render_to_string(self.request.GET['item_html'], context= product)
product_resp = {
'Mark': self.get_object().MarkId.Name if self.get_object().MarkId else 'Sin Marca',
'Category': self.get_object().CategoryId.Name if self.get_object().CategoryId else 'Sin Marca',
'Stock': self.get_object().Stock,
}
return JsonResponse({'resp':'ok','item':item,'product':product_resp,'tipo':self.request.GET['tipo']})
return self.get_template_names()
def get_context_data(self, **kwargs):
context = super(Show, self).get_context_data(**kwargs)
return context
class Create(LoginRequiredMixin,CreateView,OldDataMixin):
model = Product
template_name = 'catalog/products/create.html'
form_class = ProductForm
success_url = reverse_lazy('catalog:product.index')
attributes = {
'Name':'',
'Description':'',
'Cost':'',
'Price':'',
'Stock':'',
'Availabel':True,
}
def form_valid(self, form):
new_product = form.save(commit=False)
new_product.save()
print(new_product)
return super(Create, self).form_valid(form)
def form_invalid(self, form):
print(form.errors)
return super().form_invalid(form)
def get_context_data(self, **kwargs):
context = super(Create,self).get_context_data(**kwargs)
Add_Data(context)
context['old_category']= self.post_old_data('CategoryId')
context['categories']= Category.objects.filter(status=True)
context['old_mark']= self.post_old_data('MarkId')
context['marks']= Mark.objects.filter(status=True)
context['products'] = Product.objects.filter(status=True)
return self.get_all_olds_datas(context=context,attributes=self.attributes)
class Update(LoginRequiredMixin, UpdateView, OldDataMixin):
"""Actualiza una marca"""
model = Product
template_name = 'catalog/products/edit.html'
form_class = ProductForm
success_url = reverse_lazy('catalog:product.index')
def get_attributes(self):
return {
'Name': self.get_object().Name,
'Description': self.get_object().Description,
'Cost': self.get_object().Cost,
'Price': self.get_object().Price,
'Stock': self.get_object().Stock,
'Availabel': self.get_object().Availabel,
'CategoryId': self.get_object().CategoryId,
'MarkId': self.get_object().MarkId
}
def form_valid(self, form):
update_product = form.save(commit=False)
update_product.save()
return super(Update, self).form_valid(form)
def get_context_data(self, **kwargs):
context = super(Update, self).get_context_data(**kwargs)
Add_Data(context)
context['old_mark']= self.post_old_data('MarkId',self.get_object().MarkId.pk)
context['marks']= Mark.objects.filter(status=True)
context['products'] = Product.objects.filter(status=True)
context['old_category'] = self.post_old_data('CategoryId',self.get_object().CategoryId.pk)
context['categories'] = Category.objects.filter(status=True)
return self.post_all_olds_datas(context=context, attributes=self.get_attributes())
class Delete(LoginRequiredMixin, DeleteView):
"""Elimina una marca"""
model = Product
http_method_names = ['delete']
def delete(self, request, *args, **kwargs):
data = {
'status': False,
'message': '¡No se Elimino el Regitro. Porque esta Asociado a una Factura u Orden Compra!'
}
if not (self.get_object().detailinvoice_set.exists() or self.get_object().detailorder_set.exists()):
self.get_object().delete()
data = {
'status': True,
'message': '¡El registro ha sido eliminado correctamente!'
}
return JsonResponse(data)
| {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
62,007 | atorrese/SGAGRO | refs/heads/main | /catalog/category/forms.py | from django import forms
#App
from catalog.models import Category
class CategoryForm(forms.ModelForm):
Name = forms.CharField(min_length=4)
def clean(self):
cleaned_data = super(CategoryForm,self).clean()
return cleaned_data
class Meta:
model = Category
fields = ['Name']
widgets = {
'Name': forms.TextInput(
attrs={
'class': 'form-control'
}
)
} | {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
62,008 | atorrese/SGAGRO | refs/heads/main | /sale/client/views.py | """ Client Views """
# Django
from django.http import JsonResponse
from django.urls import reverse_lazy
from django.contrib.auth.mixins import LoginRequiredMixin
from django.views.generic import ListView, CreateView, UpdateView, DeleteView, DetailView
# App
from SGAGRO.funciones import Add_Data
from utils.mixins import OldDataMixin
from sale.client.forms import ClientForm
from sale.models import Client
#from utils.conexion import Info
class Index(LoginRequiredMixin, ListView, OldDataMixin):
"""Lista las Clients"""
template_name = 'sale/clients/index.html'
model = Client
paginate_by = 2
context_object_name = 'clients'
attributes = {'search': ''}
def get_queryset(self):
search = self.get_old_data('search')
return Client.objects.filter(Names__icontains=search).order_by('-created_at')
def get_context_data(self, *, object_list=None, **kwargs):
context = super(Index, self).get_context_data(**kwargs)
Add_Data(context)
return self.get_all_olds_datas(context=context, attributes=self.attributes)
def get(self, request, *args, **kwargs):
response = super(Index,self).get(request,*args,**kwargs)
if self.request.is_ajax():
clients= self.get_queryset()
if clients:
data = [{'id':client.pk,'value':client.get_Names_SurNames()} for client in clients ]
else:
data={}
return JsonResponse({'data':data})
return response
# class Show(LoginRequiredMixin, DetailView):
# """Muestra el detalle del dispositivo"""
# template_name = 'Clients/Clients/show.html'
# model = Client
# context_object_name = 'Client'
# info = Info()
#
# def get_context_data(self, **kwargs):
# context = super(Show, self).get_context_data(**kwargs)
# try:
# ip_address = IpAddress.objects.get(Client=self.get_object())
# context['informations'] = self.info.get_status(target=ip_address.address)
# context['interfaces'] = self.info.get_interface(target=ip_address.address)
# context['addresses '] = self.info.get_ip_address(target=ip_address.address)
# except(Exception,):
# context['informations'] = []
# context['interfaces'] = []
# context['addresses'] = []
#
# return context
class Create(LoginRequiredMixin, CreateView, OldDataMixin):
"""Crea una Client"""
model = Client
template_name = 'sale/clients/create.html'
form_class = ClientForm
success_url = reverse_lazy('sale:client.index')
attributes = {
'Names': '',
'SurNames':'',
'IdentificationCard':'',
'City':'',
'Address':'',
'Phone':'',
'Email':''
}
def form_valid(self, form):
form.save()
if self.request.is_ajax():
data = {
'status': True,
'message': '¡El registro ha sido creado correctamente!'
}
return JsonResponse(data)
return super().form_valid(form)
def form_invalid(self, form):
if self.request.is_ajax():
data = {
'status': False,
'message': '¡El Formulario Tiene errores!',
'form_errors': form.errors.as_json(),
}
return JsonResponse(data)
return super().form_invalid(form)
def get_context_data(self, **kwargs):
context = super(Create, self).get_context_data(**kwargs)
Add_Data(context)
return self.post_all_olds_datas(context=context, attributes=self.attributes)
class Update(LoginRequiredMixin, UpdateView, OldDataMixin):
"""Actualiza una Client"""
model = Client
template_name = 'sale/clients/edit.html'
form_class = ClientForm
success_url = reverse_lazy('sale:client.index')
def get_attributes(self):
return {
'Names': self.get_object().Names,
'SurNames': self.get_object().SurNames,
'IdentificationCard': self.get_object().IdentificationCard,
'City': self.get_object().City,
'Address': self.get_object().Address,
'Phone': self.get_object().Phone,
'Email': self.get_object().Email
}
def form_valid(self, form):
form.save()
return super().form_valid(form)
def get_context_data(self, **kwargs):
context = super(Update, self).get_context_data(**kwargs)
Add_Data(context)
return self.post_all_olds_datas(context=context, attributes=self.get_attributes())
class Delete(LoginRequiredMixin, DeleteView):
"""Elimina una Client"""
model = Client
http_method_names = ['delete']
def delete(self, request, *args, **kwargs):
data = {
'status': False,
'message': '¡No se Elimino el Regitro. Porque esta Asociado a una Factura o varias Facturas!'
}
if not self.get_object().invoice_set.exists():
self.get_object().delete()
data = {
'status': True,
'message': '¡El registro ha sido eliminado correctamente!'
}
return JsonResponse(data)
| {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
62,009 | atorrese/SGAGRO | refs/heads/main | /catalog/admin.py | from django.contrib import admin
# Register your models here.
from catalog.models import Mark, Category,Product
from import_export import resources
from import_export.admin import ImportExportModelAdmin
# Category
class CategoryResource(resources.ModelResource):
class Meta:
model =Category
class CategoryAdmin(ImportExportModelAdmin,admin.ModelAdmin):
search_fields = ['Name']
list_display = ('Name',)
resource_class = CategoryResource
# Mark
class MarkResource(resources.ModelResource):
class Meta:
model =Mark
class MarkAdmin(ImportExportModelAdmin,admin.ModelAdmin):
search_fields = ['Name']
list_display = ('Name',)
resource_class = MarkResource
# Product
class ProductResource(resources.ModelResource):
class Meta:
model = Product
class ProductAdmin(ImportExportModelAdmin, admin.ModelAdmin):
search_fields = ['Name','CategoryId','MarkId']
list_display = ('Name','get_category','MarkId','Cost','Price','Stock','Availabel',)
resource_class = ProductResource
def get_category(self,obj):
return "\n".join([c.Name for c in obj.CategoryId.all()])
admin.site.register(Mark,MarkAdmin)
admin.site.register(Category,CategoryAdmin)
admin.site.register(Product,ProductAdmin)
| {"/sale/urls.py": ["/sale/client/views.py", "/sale/invoice/views.py", "/sale/order/views.py"], "/security/urls.py": ["/security/views.py", "/security/business/views.py"], "/purchase/models.py": ["/SGAGRO/funciones2.py", "/catalog/models.py", "/security/models.py"], "/purchase/order/forms.py": ["/purchase/models.py", "/SGAGRO/funciones2.py"], "/sale/client/forms.py": ["/sale/models.py"], "/catalog/mark/forms.py": ["/catalog/models.py"], "/sale/models.py": ["/catalog/models.py", "/security/models.py", "/SGAGRO/funciones2.py"], "/purchase/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/purchase/order/forms.py", "/purchase/models.py"], "/catalog/category/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/category/forms.py"], "/security/business/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/security/business/forms.py"], "/catalog/urls.py": ["/catalog/category/views.py", "/catalog/product/views.py"], "/purchase/urls.py": ["/purchase/order/views.py"], "/catalog/product/forms.py": ["/catalog/models.py"], "/SGAGRO/funciones.py": ["/security/models.py", "/security/business/forms.py", "/sale/models.py", "/purchase/models.py"], "/purchase/admin.py": ["/purchase/models.py"], "/security/admin.py": ["/security/models.py"], "/sale/invoice/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/invoice/forms.py", "/sale/models.py"], "/sale/admin.py": ["/sale/models.py"], "/catalog/models.py": ["/security/models.py"], "/purchase/provider/forms.py": ["/purchase/models.py"], "/security/business/forms.py": ["/security/models.py"], "/sale/invoice/forms.py": ["/sale/models.py", "/SGAGRO/funciones2.py"], "/sale/seller/forms.py": ["/sale/models.py"], "/sale/order/views.py": ["/SGAGRO/funciones.py", "/SGAGRO/funciones2.py", "/catalog/models.py", "/sale/models.py"], "/security/views.py": ["/SGAGRO/funciones.py", "/security/models.py", "/sale/models.py", "/catalog/models.py", "/security/forms.py", "/purchase/models.py"], "/catalog/product/views.py": ["/SGAGRO/funciones.py", "/catalog/models.py", "/catalog/product/forms.py"], "/catalog/category/forms.py": ["/catalog/models.py"], "/sale/client/views.py": ["/SGAGRO/funciones.py", "/sale/client/forms.py", "/sale/models.py"], "/catalog/admin.py": ["/catalog/models.py"]} |
62,010 | networkgangster/JustGet10 | refs/heads/master | /possibles.py | """
Project name: JustGet10
Copyright,
ALEV Samuel (226430@supinfo.com)
STOCKMAN Jim (227078@supinfo.com)
(C) 2016 - 2017
This script was tested with Python 3.5.2 and PyGame 1.9.2b1
"""
# Prüft, ob eine Zelle mit den Koordinaten i und j eine benachbarte Zelle hat
def possessAdjacent(n: int, board: list, i: int, j: int):
# Debuggen, falls i oder j 'out of range' liegt
if not 0 <= i < n or not 0 <= j < n: return 'Error'
if i + 1 < n:
if board[i + 1][j] == board[i][j]: return True
if j + 1 < n:
if board[i][j + 1] == board[i][j]: return True
if 0 <= j - 1:
if board[i][j - 1] == board[i][j]: return True
if 0 <= i - 1:
if board[i - 1][j] == board[i][j]: return True
return False
# Prüft, ob das Spielbrett noch benachbarte Felder hat (also mögliche Züge)
def playableCase(n: int, board: list):
for i in range(n):
for j in range(n):
if possessAdjacent(n, board, i, j):
return True
return False
# Sucht das Feld mit der höchsten Zahl
def maxNumber(n: int, board: list):
nbMax = 1
for i in range(n):
for j in range(n):
if board[i][j] > nbMax:
nbMax = board[i][j]
return nbMax | {"/merge.py": ["/bases.py"], "/justGetTenGUI.py": ["/bases.py", "/possibles.py", "/merge.py"]} |
62,011 | networkgangster/JustGet10 | refs/heads/master | /merge.py | """
Project name: JustGet10
Copyright,
ALEV Samuel (226430@supinfo.com)
STOCKMAN Jim (227078@supinfo.com)
(C) 2016 - 2017
This script was tested with Python 3.5.2 and PyGame 1.9.2b1
"""
import bases
def propagation(n: int, board: list, coord: tuple, liste: list):
if 0 <= coord[0] < n and 0 <= coord[1] < n:
if coord[0] - 1 >= 0:
if board[coord[0] - 1][coord[1]] == board[coord[0]][coord[1]]:
if (coord[0] - 1, coord[1]) not in liste:
liste.append((coord[0] - 1, coord[1]))
propagation(n, board, (coord[0] - 1, coord[1]), liste)
if coord[0] + 1 < n:
if board[coord[0] + 1][coord[1]] == board[coord[0]][coord[1]]:
if (coord[0] + 1, coord[1]) not in liste:
liste.append((coord[0] + 1, coord[1]))
propagation(n, board, (coord[0] + 1, coord[1]), liste)
if coord[1] - 1 >= 0:
if board[coord[0]][coord[1] - 1] == board[coord[0]][coord[1]]:
if (coord[0], coord[1] - 1) not in liste:
liste.append((coord[0], coord[1] - 1))
propagation(n, board, (coord[0], coord[1] - 1), liste)
if coord[1] + 1 < n:
if board[coord[0]][coord[1] + 1] == board[coord[0]][coord[1]]:
if (coord[0], coord[1] + 1) not in liste:
liste.append((coord[0], coord[1] + 1))
propagation(n, board, (coord[0], coord[1] + 1), liste)
def modification(n: int, board: list, liste: list):
board[liste[0][0]][liste[0][1]] += 1
for i in range(1, len(liste)):
board[liste[i][0]][liste[i][1]] = 0
def gravity(n: int, board: list, proba: tuple):
for i in range(n):
for j in range(n):
if board[i][j] == 0 and i >= 1:
if i == 1:
board[i][j] = board[i - 1][j]
board[0][j] = 0
elif i == 2:
board[2][j] = board[i - 1][j]
board[1][j] = board[i - 2][j]
board[0][j] = 0
elif i == 3:
board[i][j] = board[i - 1][j]
board[2][j] = board[i - 2][j]
board[1][j] = board[i - 3][j]
board[0][j] = 0
elif i == 4:
board[i][j] = board[i - 1][j]
board[3][j] = board[i - 2][j]
board[2][j] = board[i - 3][j]
board[1][j] = board[i - 4][j]
board[0][j] = 0
elif i == 5:
board[i][j] = board[i - 1][j]
board[4][j] = board[i - 2][j]
board[3][j] = board[i - 3][j]
board[2][j] = board[i - 4][j]
board[1][j] = board[i - 5][j]
board[0][j] = 0
for p in range(n):
for m in range(n):
if board[p][m] == 0:
board[p][m] = bases.element(proba)
| {"/merge.py": ["/bases.py"], "/justGetTenGUI.py": ["/bases.py", "/possibles.py", "/merge.py"]} |
62,012 | networkgangster/JustGet10 | refs/heads/master | /bases.py | """
Project name: JustGet10
Copyright,
ALEV Samuel (226430@supinfo.com)
STOCKMAN Jim (227078@supinfo.com)
(C) 2016 - 2017
This script was tested with Python 3.5.2 and PyGame 1.9.2b1
"""
import random
# Erzeugt die Elemente mit Wahrscheinlichkeit
def element(tuple: tuple):
nb = random.random()
if nb < tuple[0]:
return 4
elif tuple[0] < nb < tuple[1]:
return 3
elif tuple[1] < nb < tuple[2]:
return 2
else:
return 1
# Gibt das Spielfeld mit Zufallselementen zurück
def newBoard(n: int, tuple: tuple):
board = []
for i in range(n): # Fügt dem Spielfeld die Zahlen hinzu
board.append([element(tuple) for i in range(n)])
return board
# Spielfeld anzeigen
def display(board: list, n: int):
for n in board:
for number in n:
print(number, end=' ')
print('\n') | {"/merge.py": ["/bases.py"], "/justGetTenGUI.py": ["/bases.py", "/possibles.py", "/merge.py"]} |
62,013 | networkgangster/JustGet10 | refs/heads/master | /environment.py | import pygame, bases, possibles, merge, justGetTenGUI
import numpy as np
import matplotlib.pyplot as plt
import pickle
from matplotlib import style
import time
import pyautogui
# Größe des Environments
SIZE = 4
# how many episodes
HM_EPISODES = 25000
# Rewards
GAMEOVER_PENALTY = 20
MOVE_REWARD = 1
HIGHER_REWARD = 5
TEN_REWARD = 50
# Q learning parameters
LEARNING_RATE = 0.1
DISCOUNT = 0.95
epsilon = 0.9
EPS_DECAY = 0.9998 # rückgang von epsilon, wird vielleicht nicht verwendet
SHOW_EVERY = 3000 # nur jede 3000. episode anzeigen damit schneller trainiert
# training mit einer bereits vorhandenen Q table fortsetzen
start_q_table = None # or filename
#alle möglichen aktionen:
action_space = {1: (0,0),
2: (0,1),
3: (0,2),
4: (0,3),
5: (1, 0),
6: (1, 1),
7: (1, 2),
8: (1, 3),
9: (2, 0),
10: (2, 1),
11: (2, 2),
12: (2, 3),
13: (3, 0),
14: (3, 1),
15: (3, 2),
16: (3, 3)}
# alle möglichen Zustände
# zufällige aktion
def action(x, y, field):
x = np.random.randint(0, 4)
y = np.random.randint(0, 4)
field.append([x, y])
if start_q_table is None:
''' q_table = np.random.uniform(low = 1, high = 5, size = (4 * 4 * 16))
q_table = np.zeros([board, action_space]) '''
else:
with open(start_q_table, "rb") as f:
q_table = pickle.load(f)
episode_rewards = []
for episode in range(HM_EPISODES):
# initalisierungen von spielfeld usw
if episode % SHOW_EVERY == 0:
print(f"on # {episode}, epsilon: {epsilon}")
print(f"{SHOW_EVERY} ep mean {np.mean(episode_rewards[-SHOW_EVERY])}")
show = True
else:
show = False
episode_reward = 0
for not gameover:
obs = (board)
if np.random.random() > epsilon:
action = np.argmax(q_table[obs])
else:
action = np.random.randint(1,17)
if no moves left:
reward = -GAMEOVER_PENALTY
elif successful move:
reward = MOVE_REWARD
elif higher number:
reward = HIGHER_REWARD
elif got ten:
reward = TEN_REWARD
new_obs = (board)
max_future_q = np.max(q_table[new_obs])
current_q = q_table[obs][action]
if reward == TEN_REWARD:
new_q = TEN_REWARD
elif reward == -GAMEOVER_PENALTY:
new_q = -GAMEOVER_PENALTY
else:
new_q = (1 - LEARNING_RATE) * current_q + LEARNING_RATE * (reward + DISCOUNT * max_future_q)
q_table[obs][action] = new_q
episode_reward += reward
if reward == TEN_REWARD or reward == -GAMEOVER_PENALTY:
break bzw gameover
episode_rewards.append(episode_reward)
epsilon *= EPS_DECAY
moving_avg = np.convolve(episode_rewards, np.ones((SHOW_EVERY,)) / SHOW_EVERY, mode = "valid")
plt.plot([i for i in range(len(moving_avg))], moving_avg)
plt.ylabel(f"reward {SHOW_EVERY}")
plt.xlabel("episode #")
plt.show()
with open (f"qtable-{int(time.time())}.pickle", "wb") as f:
pickle.dump(q_table, f)
| {"/merge.py": ["/bases.py"], "/justGetTenGUI.py": ["/bases.py", "/possibles.py", "/merge.py"]} |
62,014 | networkgangster/JustGet10 | refs/heads/master | /justGetTenGUI.py | '''
Project name: JustGet10
Copyright,
ALEV Samuel (226430@supinfo.com)
STOCKMAN Jim (227078@supinfo.com)
(C) 2016 - 2017
This script was tested with Python 3.5.2 and PyGame 1.9.2b1
'''
import pygame, bases, possibles, merge
pygame.init()
pygame.font.init()
# Var font
roboto = pygame.font.Font("fonts/Roboto.ttf", 30)
# Var couleur
black = (0, 0, 0)
# Var Höhe und Breite des Fensters
display_width = 800
display_height = 600
# Dictionary für Bilder
dic = {
'logo': {
'img': pygame.image.load('images/logo.png'),
'x': (display_width * 0.5) - 251 / 2,
'y': (display_height * 0.2) - 103 / 2
},
'play': {
'img': pygame.image.load('images/boutons/jouer.png'),
'img_pressed': pygame.image.load('images/boutons/jouer_pressed.png'),
'x': (display_width * 0.5) - 290 / 2,
'y': (display_height * 0.55) - 72 / 2
},
'quitter': {
'img': pygame.image.load('images/boutons/quitter.png'),
'img_pressed': pygame.image.load('images/boutons/quitter_pressed.png'),
'x': (display_width * 0.5) - 290 / 2,
'y': (display_height * 0.7) - 72 / 2
},
'petite': {
'img': pygame.image.load('images/boutons/petite.png'),
'img_pressed': pygame.image.load('images/boutons/petite_pressed.png'),
'x': (display_width * 0.5) - 290 / 2,
'y': (display_height * 0.55) - 72 / 2
},
'moyenne': {
'img': pygame.image.load('images/boutons/moyenne.png'),
'img_pressed': pygame.image.load('images/boutons/moyenne_pressed.png'),
'x': (display_width * 0.5) - 290 / 2,
'y': (display_height * 0.7) - 72 / 2
},
'grande': {
'img': pygame.image.load('images/boutons/grande.png'),
'img_pressed': pygame.image.load('images/boutons/grande_pressed.png'),
'x': (display_width * 0.5) - 290 / 2,
'y': (display_height * 0.85) - 72 / 2,
},
'retour': {
'img': pygame.image.load('images/boutons/retour.png'),
'img_pressed': pygame.image.load('images/boutons/retour_pressed.png'),
'x': (display_width * 0.5) - 128 / 2,
'y': (display_height * 0.43) - 45 / 2,
},
'limPerCase': {
'enable': pygame.image.load('images/boutons/enable.png'),
'disable': pygame.image.load('images/boutons/disable.png'),
'x': (display_width * 0.7) - 290 / 2 + 140,
'y': (display_height * 0.55) - 72 / 2
},
'limPerGame': {
'enable': pygame.image.load('images/boutons/enable.png'),
'disable': pygame.image.load('images/boutons/disable.png'),
'x': (display_width * 0.7) - 290 / 2 + 140,
'y': (display_height * 0.7) - 72 / 2
},
'start': {
'img': pygame.image.load('images/boutons/commencer.png'),
'img_pressed': pygame.image.load('images/boutons/commencer_pressed.png'),
'x': (display_width * 0.5) - 290 / 2,
'y': (display_height * 0.85) - 72 / 2
},
'restart': {
'img': pygame.image.load('images/boutons/restart.png'),
'img_pressed': pygame.image.load('images/boutons/restart_pressed.png'),
'x': (display_width * 0.85) - 145 / 2,
'y': (display_height * 0.5) - 37 / 2
},
'quitter2': {
'img': pygame.image.load('images/boutons/quitterPetit.png'),
'img_pressed': pygame.image.load('images/boutons/quitterPetit_pressed.png'),
'x': (display_width * 0.85) - 145 / 2,
'y': (display_height * 0.6) - 37 / 2
},
'back': {
'img': pygame.image.load('images/back.png'),
'x': 590,
'y': 32
},
'back2': {
'img': pygame.image.load('images/back.png'),
'x': 590,
'y': 72
},
'lose': {
'sound': pygame.mixer.Sound("sounds/lose.ogg")
},
'doritos': {
'img': pygame.image.load('images/dorito.png'),
'x': 1,
'y': 1,
},
'airhorn': {
'sound': pygame.mixer.Sound("sounds/airhorn.ogg")
},
}
# Var für Spielstart
n = 5
proba = (0.05, 0.30, 0.6)
board = []
limCase = False
limGame = False
saved = False
# Bestimmt die Größe des Fensters, setzt einen Titel und ein Symbol
gameDisplay = pygame.display.set_mode((display_width, display_height), 0, 32)
pygame.display.set_caption("Just Get 10")
pygame.display.set_icon(dic['logo']['img'])
# Var Spieluhr
clock = pygame.time.Clock()
FPS = 60
# Var texte
texte1 = roboto.render('Temps limité par coup', True, (0, 0, 0))
texte2 = roboto.render('Temps limité pour terminer', True, (0, 0, 0))
mlg2 = True
def blit(angle):
for i in range(10):
gameDisplay.blit(pygame.transform.rotate(dic['doritos']['img'], angle), (dic['doritos']['x'] * i * 100, dic['doritos']['y']))
# noinspection PyTypeChecker
def menu():
# Hintergrund
gameDisplay.fill((255, 251, 234))
# Anzeige der Buttons im Hauptmenü
gameDisplay.blit(dic['logo']['img'], (dic['logo']['x'], dic['logo']['y']))
gameDisplay.blit(dic['play']['img'], (dic['play']['x'], dic['play']['y']))
gameDisplay.blit(dic['quitter']['img'], (dic['quitter']['x'], dic['quitter']['y']))
inMenu1 = True
while inMenu1:
# Ruft die x-, y-Position des Cursors bei jedem Schleifendurchlauf ab
mouse = pygame.mouse.get_pos()
# Erlaubt events zu verwalten
for event in pygame.event.get():
# Fügt dem Schließen-Button eine Aktion hinzu
if event.type == pygame.QUIT or event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:
pygame.quit()
quit()
# Test click auf "Play" Button
if dic['play']['x'] + 290 > mouse[0] > dic['play']['x'] and dic['play']['y'] + 72 > mouse[1] > dic['play']['y']:
if event.type == pygame.MOUSEBUTTONUP:
gameDisplay.blit(dic['play']['img'], (dic['play']['x'], dic['play']['y']))
inMenu1 = False
if event.type == pygame.MOUSEBUTTONDOWN:
gameDisplay.blit(dic['play']['img_pressed'], (dic['play']['x'], dic['play']['y']))
else:
gameDisplay.blit(dic['play']['img'], (dic['play']['x'], dic['play']['y']))
# Test click auf "Quit" Button
if dic['quitter']['x'] + 290 > mouse[0] > dic['quitter']['x'] and dic['quitter']['y'] + 72 > mouse[1] > dic['quitter']['y']:
if event.type == pygame.MOUSEBUTTONUP:
gameDisplay.blit(dic['quitter']['img'], (dic['quitter']['x'], dic['quitter']['y']))
pygame.quit()
quit()
if event.type == pygame.MOUSEBUTTONDOWN:
gameDisplay.blit(dic['quitter']['img_pressed'], (dic['quitter']['x'], dic['quitter']['y']))
else:
gameDisplay.blit(dic['quitter']['img'], (dic['quitter']['x'], dic['quitter']['y']))
pygame.display.update()
clock.tick(FPS)
# Wechselt zum Menü mit der Schwierigkeit, wenn die Schleife beendet ist
menu2()
# noinspection PyTypeChecker
def menu2():
# Hintergrund
gameDisplay.fill((255, 251, 234))
# Anzeige der Buttons im Hauptmenü
gameDisplay.blit(dic['logo']['img'], (dic['logo']['x'], dic['logo']['y']))
gameDisplay.blit(dic['petite']['img'], (dic['petite']['x'], dic['petite']['y']))
gameDisplay.blit(dic['moyenne']['img'], (dic['moyenne']['x'], dic['moyenne']['y']))
gameDisplay.blit(dic['grande']['img'], (dic['grande']['x'], dic['grande']['y']))
gameDisplay.blit(dic['retour']['img'], (dic['retour']['x'], dic['retour']['y']))
inMenu2 = True
while inMenu2:
global n
# Ruft die x-, y-Position des Cursors bei jedem Schleifendurchlauf ab
mouse = pygame.mouse.get_pos()
# Erlaubt events zu verwalten
for event in pygame.event.get():
# Fügt dem Schließen-Button eine Aktion hinzu
if event.type == pygame.QUIT or event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:
pygame.quit()
quit()
# Test click auf "Zurück" Button
if dic['retour']['x'] + 128 > mouse[0] > dic['retour']['x'] and dic['retour']['y'] + 45 > mouse[1] > dic['retour']['y']:
if event.type == pygame.MOUSEBUTTONUP:
gameDisplay.blit(dic['retour']['img'], (dic['retour']['x'], dic['retour']['y']))
# Zurück zum Hauptmenü
inMenu2 = False
menu()
if event.type == pygame.MOUSEBUTTONDOWN:
gameDisplay.blit(dic['retour']['img_pressed'], (dic['retour']['x'], dic['retour']['y']))
else:
gameDisplay.blit(dic['retour']['img'], (dic['retour']['x'], dic['retour']['y']))
# Test click auf "Petite" Button
if dic['petite']['x'] + 290 > mouse[0] > dic['petite']['x'] and dic['petite']['y'] + 72 > mouse[1] > dic['petite']['y']:
if event.type == pygame.MOUSEBUTTONUP:
gameDisplay.blit(dic['petite']['img'], (dic['petite']['x'], dic['petite']['y']))
n = 6
inMenu2 = False
if event.type == pygame.MOUSEBUTTONDOWN:
gameDisplay.blit(dic['petite']['img_pressed'], (dic['petite']['x'], dic['petite']['y']))
else:
gameDisplay.blit(dic['petite']['img'], (dic['petite']['x'], dic['petite']['y']))
# Test click auf "Moyenne" Button
if dic['moyenne']['x'] + 290 > mouse[0] > dic['moyenne']['x'] and dic['moyenne']['y'] + 72 > mouse[1] > dic['moyenne']['y']:
if event.type == pygame.MOUSEBUTTONUP:
gameDisplay.blit(dic['moyenne']['img'], (dic['moyenne']['x'], dic['moyenne']['y']))
n = 5
inMenu2 = False
if event.type == pygame.MOUSEBUTTONDOWN:
gameDisplay.blit(dic['moyenne']['img_pressed'], (dic['moyenne']['x'], dic['moyenne']['y']))
else:
gameDisplay.blit(dic['moyenne']['img'], (dic['moyenne']['x'], dic['moyenne']['y']))
# Test click auf "Grande" Button
if dic['grande']['x'] + 290 > mouse[0] > dic['grande']['x'] and dic['grande']['y'] + 72 > mouse[1] > dic['grande']['y']:
if event.type == pygame.MOUSEBUTTONUP:
gameDisplay.blit(dic['grande']['img'], (dic['grande']['x'], dic['grande']['y']))
n = 4
inMenu2 = False
if event.type == pygame.MOUSEBUTTONDOWN:
gameDisplay.blit(dic['grande']['img_pressed'], (dic['grande']['x'], dic['grande']['y']))
else:
gameDisplay.blit(dic['grande']['img'], (dic['grande']['x'], dic['grande']['y']))
pygame.display.update()
clock.tick(FPS)
# Wechselt zum Menü mit den Optionen, wenn die Schleife beendet ist
menu3()
# noinspection PyTypeChecker
def menu3():
gameDisplay.fill((255, 251, 234))
# Zeigt die Buttons des Hauptmenüs an
gameDisplay.blit(dic['logo']['img'], (dic['logo']['x'], dic['logo']['y']))
gameDisplay.blit(dic['limPerCase']['disable'], (dic['limPerCase']['x'], dic['limPerCase']['y']))
gameDisplay.blit(dic['limPerGame']['disable'], (dic['limPerGame']['x'], dic['limPerGame']['y']))
gameDisplay.blit(texte1, (dic['limPerCase']['x'] - 375, dic['limPerCase']['y'] + 10))
gameDisplay.blit(texte2, (dic['limPerGame']['x'] - 375, dic['limPerGame']['y'] + 10))
gameDisplay.blit(dic['retour']['img'], (dic['retour']['x'], dic['retour']['y']))
gameDisplay.blit(dic['start']['img'], (dic['start']['x'], dic['start']['y']))
global limCase
global limGame
inMenu3 = True
while inMenu3:
# Ruft die x-, y-Position des Cursors bei jedem Schleifendurchlauf ab
mouse = pygame.mouse.get_pos()
# Erlaubt events zu verwalten
for event in pygame.event.get():
# Fügt dem Schließen-Button eine Aktion hinzu
if event.type == pygame.QUIT or event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:
pygame.quit()
quit()
# Test click auf "Zurück" Button
if dic['retour']['x'] + 128 > mouse[0] > dic['retour']['x'] and dic['retour']['y'] + 45 > mouse[1] > dic['retour']['y']:
if event.type == pygame.MOUSEBUTTONUP:
gameDisplay.blit(dic['retour']['img'], (dic['retour']['x'], dic['retour']['y']))
# Zurück zum Hauptmenü
inMenu3 = False
menu2()
if event.type == pygame.MOUSEBUTTONDOWN:
gameDisplay.blit(dic['retour']['img_pressed'], (dic['retour']['x'], dic['retour']['y']))
else:
gameDisplay.blit(dic['retour']['img'], (dic['retour']['x'], dic['retour']['y']))
# Test click auf "Start" Button
if dic['start']['x'] + 290 > mouse[0] > dic['start']['x'] and dic['start']['y'] + 72 > mouse[1] > dic['start']['y']:
if event.type == pygame.MOUSEBUTTONUP:
gameDisplay.blit(dic['start']['img'], (dic['start']['x'], dic['start']['y']))
# Stoppt die Schleife um das Spiel zu beginnen
inMenu3 = False
if event.type == pygame.MOUSEBUTTONDOWN:
gameDisplay.blit(dic['start']['img_pressed'], (dic['start']['x'], dic['start']['y']))
else:
gameDisplay.blit(dic['start']['img'], (dic['start']['x'], dic['start']['y']))
# Test click auf "limPerCase" Button
if dic['limPerCase']['x'] + 290 > mouse[0] > dic['limPerCase']['x'] and dic['limPerCase']['y'] + 72 > mouse[1] > dic['limPerCase']['y']:
if event.type == pygame.MOUSEBUTTONDOWN:
if limCase:
limCase = False
else:
limCase = True
if limCase:
gameDisplay.blit(dic['limPerCase']['enable'], (dic['limPerCase']['x'], dic['limPerCase']['y']))
else:
gameDisplay.blit(dic['limPerCase']['disable'], (dic['limPerCase']['x'], dic['limPerCase']['y']))
# Test click auf "limPerGame" Button
if dic['limPerGame']['x'] + 290 > mouse[0] > dic['limPerGame']['x'] and dic['limPerGame']['y'] + 72 > mouse[1] > dic['limPerGame']['y']:
if event.type == pygame.MOUSEBUTTONDOWN:
if limGame:
limGame = False
else:
limGame = True
if limGame:
gameDisplay.blit(dic['limPerGame']['enable'], (dic['limPerGame']['x'], dic['limPerGame']['y']))
else:
gameDisplay.blit(dic['limPerGame']['disable'], (dic['limPerGame']['x'], dic['limPerGame']['y']))
pygame.display.update()
clock.tick(FPS)
# Spielen...
game(n, board)
palette = [
(127, 0, 255), #0 Purple
(0, 255, 255), #1 Cyan
(0, 128, 255), #2 Sky
(0, 0, 255), #3 Blue
(178, 255, 102),#4 Lime
(0, 255, 0), #5 Green
(255, 255, 0), #6 Yellow
(255, 128, 0), #7 Orange
(255, 0, 0), #8 Red
(255, 0, 255), #9 Pink
]
def cellColor(board: list, surface: pygame.Surface, coord: tuple, selected: bool):
x, y = 60,60
number = board[coord[0]][coord[1]]%10
if selected:
color = (255, 255, 255)
else:
color = palette[number]
pygame.draw.rect(surface, color, ((coord[1]*(128-32*(n-4)))+x, (coord[0]*(128-32*(n-4)))+y, (128-32*(n-4)),(128-32*(n-4))), 0)
def cellValue(board: list, surface: pygame.Surface, coord: tuple, selected: bool):
x, y = 60 + (64-16*(n-4)), 60 + (64-16*(n-4))
textSurface = roboto.render(str(board[coord[0]][coord[1]]), True, (0, 0, 0))
textRect = textSurface.get_rect()
textRect.center = ((coord[1]*(128-32*(n-4)))+x, (coord[0]*(128-32*(n-4)))+y)
surface.blit(textSurface, textRect)
def displayBoard(board: list, n: int, surface: pygame.Surface):
for p in range(len(board)):
for m in range(len(board[0])):
cellColor(board, surface, (p, m), False)
cellValue(board, surface, (p, m), False)
occurence = 0
def maxScore(n, board: list):
global occurence
gameDisplay.fill((255, 251, 234))
maxNumber = possibles.maxNumber(n, board)
scoreSurface = roboto.render(str(maxNumber), True, (0, 0, 0))
gameDisplay.blit(scoreSurface, (680, 200))
textSurface = roboto.render('Current Score', True, (0, 0, 0))
gameDisplay.blit(textSurface, (600, 150))
while maxNumber == 10 and occurence == 0:
dic['airhorn']['sound'].set_volume(0.2)
dic['airhorn']['sound'].play()
angle = 0
while mlg2:
gameDisplay.fill((255, 251, 234))
# Update button
displayBoard(board, n, gameDisplay)
gameDisplay.blit(textSurface, (600, 150))
gameDisplay.blit(scoreSurface, (680, 200))
angle += 10
blit(angle)
if dic['doritos']['y'] > 500 and occurence < 1:
angle = 0
dic['doritos']['y'] = 1
occurence += 1
blit(angle)
if occurence >= 1 and dic['doritos']['y'] > 600:
break
dic['doritos']['y'] += 20
pygame.display.update()
clock.tick(20)
break
def game(n, board):
gameDisplay.fill((255, 251, 234))
global saved
if not saved:
board = bases.newBoard(n, proba)
doubleclick = 0
click = []
maxScore(n, board)
displayBoard(board, n, gameDisplay)
InGame2 = possibles.playableCase(n, board)
counterGame, textGame = 300, str(300)
counterCase, textCase = 10, str(10)
pygame.time.set_timer(pygame.USEREVENT, 1000)
inGame = True
while inGame:
while InGame2:
InGame2 = possibles.playableCase(n, board)
mouse = pygame.mouse.get_pos()
# Erlaubt Events zu verwalten
for event in pygame.event.get():
# Fügt dem Schließen-Button eine Aktion hinzu
if event.type == pygame.QUIT or event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:
pygame.quit()
quit()
# Timer für Spielende
if limGame:
if event.type == pygame.USEREVENT:
counterGame -= 1
if counterGame >= 0:
textGame = str(counterGame)
else:
gameDisplay.blit(dic['back']['img'], (dic['back']['x'], dic['back']['y']))
InGame2 = False
else:
gameDisplay.blit(dic['back']['img'], (dic['back']['x'], dic['back']['y']))
gameDisplay.blit(roboto.render(textGame + ' sec', True, (0, 0, 0)), (600, 32))
# Zeitlimit-Stoppuhr pro Box
if limCase:
if event.type == pygame.USEREVENT:
counterCase -= 1
if counterCase >= 0:
textCase = str(counterCase)
else:
gameDisplay.blit(dic['back2']['img'], (dic['back2']['x'], dic['back2']['y']))
InGame2 = False
else:
gameDisplay.blit(dic['back2']['img'], (dic['back2']['x'], dic['back2']['y']))
gameDisplay.blit(roboto.render(textCase + ' sec', True, (0, 0, 0)), (600, 72))
# Test click auf Kästchen
for colonne in range(len(board)):
for ligne in range(len(board)):
if (ligne*(128-32*(n-4)))+60 + (128-32*(n-4)) > mouse[1] > (ligne*(128-32*(n-4)))+60 and (colonne*(128-32*(n-4)))+60 + (128-32*(n-4)) > mouse[0] > (colonne*(128-32*(n-4)))+60:
if event.type == pygame.MOUSEBUTTONDOWN:
click.append((ligne, colonne))
if possibles.possessAdjacent(n, board, ligne, colonne):
current = (ligne, colonne)
listeAdja = [current]
merge.propagation(n, board, current, listeAdja)
for elem in range(len(listeAdja)):
cellColor(board, gameDisplay, (listeAdja[elem][0], listeAdja[elem][1]), True)
cellValue(board, gameDisplay, (listeAdja[elem][0], listeAdja[elem][1]), True)
doubleclick += 1
try:
if click[0][0] == click[1][0] and click[0][1] == click[1][1] and doubleclick == 2:
merge.modification(n, board, listeAdja)
merge.gravity(n, board, proba)
counterCase, textCase = 10, str(10)
maxScore(n, board)
else:
for elem in range(len(listeAdja)):
cellColor(board, gameDisplay, (listeAdja[elem][0], listeAdja[elem][1]), False)
cellValue(board, gameDisplay, (listeAdja[elem][0], listeAdja[elem][1]), False)
doubleclick = 0
click = []
displayBoard(board, n, gameDisplay)
if limGame:
gameDisplay.blit(roboto.render(textGame + ' sec', True, (0, 0, 0)), (600, 32))
if limCase:
gameDisplay.blit(roboto.render(textCase + ' sec', True, (0, 0, 0)), (600, 72))
except IndexError: pass
pygame.display.update()
clock.tick(FPS)
# Verloren? Neu anfangen oder aufhören?
gameDisplay.blit(dic['restart']['img'], (dic['restart']['x'], dic['restart']['y']))
gameDisplay.blit(dic['quitter2']['img'], (dic['quitter2']['x'], dic['quitter2']['y']))
dic['lose']['sound'].set_volume(0.2)
dic['lose']['sound'].play()
mouse = pygame.mouse.get_pos()
# Erlaubt events zu verwalten
for event in pygame.event.get():
# Fügt dem Schließen-Button eine Aktion hinzu
if event.type == pygame.QUIT:
pygame.quit()
quit()
# Test click auf "Restart" Button
if dic['restart']['x'] + 148 > mouse[0] > dic['restart']['x'] and dic['restart']['y'] + 37 > mouse[1] > dic['restart']['y']:
if event.type == pygame.MOUSEBUTTONUP:
gameDisplay.blit(dic['restart']['img'], (dic['restart']['x'], dic['restart']['y']))
dic['lose']['sound'].stop()
game(n, bases.newBoard(n, proba))
if event.type == pygame.MOUSEBUTTONDOWN:
gameDisplay.blit(dic['restart']['img_pressed'], (dic['restart']['x'], dic['restart']['y']))
else:
gameDisplay.blit(dic['restart']['img'], (dic['restart']['x'], dic['restart']['y']))
# Test click auf "Quitter" Button
if dic['quitter2']['x'] + 148 > mouse[0] > dic['quitter2']['x'] and dic['quitter2']['y'] + 37 > mouse[1] > dic['quitter2']['y']:
if event.type == pygame.MOUSEBUTTONUP:
gameDisplay.blit(dic['quitter2']['img'], (dic['quitter2']['x'], dic['quitter2']['y']))
dic['lose']['sound'].stop()
saved = False
menu()
if event.type == pygame.MOUSEBUTTONDOWN:
gameDisplay.blit(dic['quitter2']['img_pressed'], (dic['quitter2']['x'], dic['quitter2']['y']))
else:
gameDisplay.blit(dic['quitter2']['img'], (dic['quitter2']['x'], dic['quitter2']['y']))
pygame.display.update()
clock.tick(FPS)
# Starten der Funktion, die das Menü anzeigt
menu()
# Prozess abschließen
pygame.quit()
quit()
| {"/merge.py": ["/bases.py"], "/justGetTenGUI.py": ["/bases.py", "/possibles.py", "/merge.py"]} |
62,015 | jvallee/Battleship | refs/heads/master | /Battleship/Game.py | from Models.Player import *
from Models.Fleet import *
from package1.Order import *
from package1.Ship import *
class Game(object):
players = []
fleets = {}
gameover = False
fleetsandcoordinates = {}
def initGame(self):
self.initBoard()
self.makePlayers()
self.makeFleets()
p:Player
playerset = set(self.players)
for p in self.players:
p.postPlayers(playerset)
self.placePlayersFleets()
def initBoard(self):
self.computerboard = [[0 for x in range(20)] for y in range(20)]
self.playerboard = [[0 for x in range(20)] for y in range(20)]
for y in range(0,20):
for x in range(0,20):
self.computerboard[y][x] = '0'
self.playerboard[y][x] = '0'
def printBoard(self):
print('\n\n\n\n\n\n\n\n')
for row in self.computerboard:
print(row)
print('\n\n\n Your\n')
for row in self.playerboard:
print(row)
def makePlayers(self):
self.players.append(Player("Player1"))
self.players.append(userPlayer("Player2"))
def makeFleets(self):
p: Player
for p in self.players:
p.getFleet()
self.fleets[p.name] = p.f #need to be able to handle name collisions
return
def placePlayersFleets(self):
print("Placing fleets")
for fname in self.fleets:
self.fleetsandcoordinates[fname] = {}
self.placefleet(self.fleets[fname])
def placefleet(self, fleet :Fleet):
s: Ship
for s in fleet.GetShips():
self.placeship(s, fleet.name)
s.initializeDamage()
x = self.fleetsandcoordinates
print("Ships placed \n\n\n\n\n\n\n")
def placeship(self, ship:Ship, name:str):
s = ship
x = ship.position[0]
y = ship.position[1]
playerships = self.fleetsandcoordinates[name]
for i in range(0, ship.length):
if ship.shipOrientation == Orientation.Horizantal:
c = (x+i, y)
elif ship.shipOrientation == Orientation.Vertical:
c = (x, y+i)
else:
print("Why are we here?")
if c in playerships:
raise Exception("Ship already here")
if c[0] < 0 or c[0] > 19 or c[1] < 0 or c[1] > 19:
raise Exception("Ship is outside of battlefield")
else:
playerships[c] = ship
print(type(ship), " placed at coordinate ", c[0], c[1])
if not ship.fleetName == 'Player1':
self.playerboard[c[1]][c[0]] = 'S'
x = playerships
y = self.fleetsandcoordinates
def startGame(self):
while not self.gameover:
p: Player
print('\n\n\n\n')
self.printBoard()
for p in self.players:
if p.isSunk == True:
continue
print("\n\n")
o = p.getNextMove()
o.orderfrom = p.name
#o.coordinates = (15,0)
self.ExecuteOrder(o)
if o.coordinates == (17,17):
print("here")
if o.orderresult == OrderResult.Shiphit:
print("hit here")
self.isGameOver()
if self.gameover:
break
for p in self.players:
if not p.isSunk == True:
x = p.f.isSunk()
print(p.name, " has won")
def ExecuteOrder(self, o):
if o.ordertype == OrderType.Attack:
self.ExecuteAttack(o)
elif o.ordertype == OrderType.Broadcast:
#ExecuteBroadcast()
print("Data is ", o.message)
elif o.ordertype == OrderType.Recon:
ExecuteRecon(o)
else:
raise Excepetion("Should not be here, ordertype")
print("Executing Order")
def ExecuteAttack(self, o: Order):
print("attack")
if o.coordinates == (0,10):
print("here")
pass
if o.coordinates in self.fleetsandcoordinates[o.attacking]:
ship :Ship
ship = self.fleetsandcoordinates[o.attacking][o.coordinates]
if o.coordinates[0] == ship.position[0]:
offset = o.coordinates[1] - ship.position[1]
else:
offset = o.coordinates[0] - ship.position[0]
ship.damage[offset] = True
o.orderresult = OrderResult.Shiphit
if o.attacking == 'Player1':
self.computerboard[o.coordinates[1]][o.coordinates[0]] = '*'
print(o.orderfrom, " hit ", o.attacking, " at ", o.coordinates)
else:
print(o.orderfrom, " missed ", o.attacking, " at ", o.coordinates)
o.orderresult = OrderResult.Shipmiss
if o.attacking == 'Player1':
self.computerboard[o.coordinates[1]][o.coordinates[0]] = 'X'
def ExecuteRecon(Self, o):
print("Execiting Recon")
#recon is not supported in this iteration of battleship
def isGameOver(self):
p: Player
nonSunkPlayers = 0
for p in self.players:
if not p.isSunk:
if p.f.isSunk():
p.isSunk =True
else:
nonSunkPlayers += 1
if nonSunkPlayers == 1:
self.gameover = True
elif nonSunkPlayers < 1:
raise Exception("Should not be here")
| {"/Battleship/package1/Ships.py": ["/Battleship/package1/Ship.py"]} |
62,016 | jvallee/Battleship | refs/heads/master | /Battleship/package1/Ship.py | from enum import Enum
class Orientation(Enum):
Vertical = 1
Horizantal = 2
class Ship(object):
def __init__(self, x = 0, y = 0, fleetName = ""):
self.position = (x,y)
self.shipOrientation = Orientation.Vertical
self.fleetName = fleetName
def initializeDamage(self):
self.damage = [False]*self.length
def isSunk(self):
damage = self.damage
for cell in damage:
if cell == False:
return False
return True
| {"/Battleship/package1/Ships.py": ["/Battleship/package1/Ship.py"]} |
62,017 | jvallee/Battleship | refs/heads/master | /Battleship/package1/Ships.py | from .Ship import *
class AircraftCarrier(Ship):
"""description of class"""
length = 5
name = 'AircraftCarrier'
class Battleship(Ship):
"""description of class"""
length = 4
name = 'Battleship'
class Destroyer(Ship):
"""description of class"""
length = 4
name = 'Destroyer'
class PTBoat(Ship):
"""description of class"""
length = 2
name = 'PTBoat'
class Submarine(Ship):
"""description of class"""
length = 3
name = 'Submarine'
| {"/Battleship/package1/Ships.py": ["/Battleship/package1/Ship.py"]} |
62,018 | jvallee/Battleship | refs/heads/master | /Battleship/Models/Fleet.py | from package1 import *
from package1.Ship import *
class Fleet(object):
"""description of class"""
def __init__(self, name):
self.aircraftcarrier = Ships.AircraftCarrier(15, 0, name)
self.aircraftcarrier.shipOrientation = Orientation.Horizantal
self.battleship = Ships.Battleship(0, 10, name)
self.battleship.shipOrientation = Orientation.Horizantal
self.destroyer = Ships.Destroyer(3,4,name)
self.destroyer.shipOrientation = Orientation.Horizantal
self.ptboat = Ships.PTBoat(17,16,name)
self.submarine = Ships.Submarine(16,15,name)
self.name = name
def GetShips(self):
ships = []
ships.append(self.aircraftcarrier)
ships.append(self.battleship)
ships.append(self.destroyer)
ships.append(self.ptboat)
ships.append(self.submarine)
return ships
def isSunk(self):
ships = self.GetShips()
for ship in ships:
if not ship.isSunk():
return False
return True
| {"/Battleship/package1/Ships.py": ["/Battleship/package1/Ship.py"]} |
62,019 | jvallee/Battleship | refs/heads/master | /Battleship/package1/__init__.py | __all__ = ["Order", "Ships", "Coordinate"]
| {"/Battleship/package1/Ships.py": ["/Battleship/package1/Ship.py"]} |
62,020 | jvallee/Battleship | refs/heads/master | /Battleship/Models/Player.py | from package1 import *
from package1.Order import *
from package1.Ship import *
from Models.Fleet import *
import random
class Player(object):
"""description of class"""
f:Fleet
players:set
isSunk: bool
lastcoor = (-1, 0)
def __init__(self, name, URI = None):
self.name = name
self.URI = URI
self.isSunk = False
def getNextMove(self):
order = Order.Order()
currX = self.lastcoor[0]+1
currY = self.lastcoor[1]
if currX >= 20:
currX = 0
currY +=1
self.lastcoor = (currX, currY)
order.coordinates = self.lastcoor
order.attacking = self.players[0]
return order
def postPlayers(self, players : set):
p :Player
newplayers = []
for p in players:
if not p.name == self.name:
newplayers.append(p.name)
self.players = newplayers
def getFleet(self):
self.f = Fleet(self.name)
self.f.name = self.name
return self.f
class userPlayer(Player):
def __init__(self, name, URI = None):
self.name = input("What is your name?\n")
self.isSunk = False
def getShipPlacemnet(self, ship):
while True:
try:
print("\n Placing ", ship.name, " of length ", ship.length)
x = int(input(" enter X Coordinate:\n "))
y = int(input(" enter Y Coordinate:\n "))
orientation = input(" enter orientation (h or v):\n ")
if orientation not in ['v','h']:
raise Exception(" orientation has to be 'v' for vertical or 'h' for horizantal")
break
except Exception as inst:
print("Issue here with try again")
print(inst)
ship.position = (x, y)
if orientation == 'v':
ship.shipOrientation = Orientation.Vertical
elif orientation == 'h':
ship.shipOrientation = Orientation.Horizantal
def getFleet(self):
self.f = Fleet(self.name)
self.f.name = self.name
print("Getting ready to place your Fleet")
for ship in self.f.GetShips():
self.getShipPlacemnet(ship)
return self.f
def getNextMove(self):
# add try catch
order = Order.Order()
while True:
try:
x = int(input("please enter the X coordinate of where you would like to attack: \n"))
y = int(input("please enter the Y coordinate of where you would like to attack: \n"))
#x = random.randint(0,19)
#y = random.randint(0,19)
break
except:
print("try again")
if x < 0 or y < 0:
print("Shooting outside the game board, sorry that's your turn")
order.coordinates = (x,y)
order.attacking = "Player1"
return order
| {"/Battleship/package1/Ships.py": ["/Battleship/package1/Ship.py"]} |
62,021 | jvallee/Battleship | refs/heads/master | /Battleship/Battleship.py | #from Models.Ship import *
#from Models.PTBoat import *
from Models.Fleet import *
from Models.Player import *
from Game import *
game = Game()
game.initGame()
game.startGame()
print("made it") | {"/Battleship/package1/Ships.py": ["/Battleship/package1/Ship.py"]} |
62,022 | jvallee/Battleship | refs/heads/master | /Battleship/package1/Order.py | from enum import Enum
class OrderType(Enum):
Attack = 1
Recon = 2
Broadcast = 3
class OrderResult(Enum):
notexecuted = 0
Shiphit = 1
Shipmiss = 2
Shipfound = 3
shipnotfound = 4
Shipsunk = 5
class Order(object):
coordinates = (-1,-1)
orderfrom = ""
ordertype = OrderType.Attack
attacking = ""
orderfrom = ""
message = ""
orderresult = OrderResult.notexecuted
| {"/Battleship/package1/Ships.py": ["/Battleship/package1/Ship.py"]} |
62,024 | KaighnKevlin/feedme | refs/heads/master | /feedme/feeder/urls.py | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'results', views.result, name='result'),
url(r'profile', views.profile, name='profile'),
] | {"/feedme/feeder/views.py": ["/feedme/feeder/models.py"]} |
62,025 | KaighnKevlin/feedme | refs/heads/master | /feedme/feeder/models.py | from django.db import models
# Create your models here.
class Restaurant(models.Model):
name = models.CharField(max_length=100)
address = models.CharField(max_length=50)
city = models.CharField(max_length=15)
zip_code = models.IntegerField()
rating = models.IntegerField()
categories = models.CharField(max_length=100)
class Feedme_Users(models.Model):
username = models.CharField(max_length= 100)
email = models.CharField(max_length=20)
| {"/feedme/feeder/views.py": ["/feedme/feeder/models.py"]} |
62,026 | KaighnKevlin/feedme | refs/heads/master | /feedme/feeder/views.py | from django.shortcuts import render
from django.http import HttpResponse
from .models import Restaurant
import random
def index(request):
city_hash = Restaurant.objects.values('city')
cities = []
for c in city_hash:
cities.append(c['city'])
cities = set(cities)
cats = Restaurant.objects.values('categories')
categories = []
for c in cats:
cat_list = c['categories'][1:].split('*')
for cat in cat_list:
categories.append(cat)
categories = sorted(set(categories))
context = {'cities': cities, 'categories': categories }
return render(request, 'index.html', context)
def result(request):
other = Restaurant.objects.filter(city="Durham")
count = len(other)
restaurant = other[random.randint(0, count)]
c = restaurant.categories[1:].split('*')
context = {'restaurant': restaurant, 'count': count, 'cats': c}
return render(request, 'result.html', context)
def profile(request):
#user = User.objects.all()[2]
#context = {'user': user}
context = {}
return render(request, 'profile.html', context) | {"/feedme/feeder/views.py": ["/feedme/feeder/models.py"]} |
62,027 | KaighnKevlin/feedme | refs/heads/master | /feedme/feeder/import_data.py | import json
from django.core.files import File
print "starting data import"
#execfile('feeder/import_data.py')
with open('feeder/yelp_data_durham.json') as data_file:
data = json.load(data_file)
for rst in data:
r = Restaurant(name=rst["name"], address=rst["address"], city=rst["city"], zip_code=rst["zip"], rating=rst["rating"], categories=rst["categories"])
r.save()
with open('feeder/yelp_data_ch.json') as data_file:
data = json.load(data_file)
for rst in data:
r = Restaurant(name=rst["name"], address=rst["address"], city=rst["city"], zip_code=rst["zip"], rating=rst["rating"], categories=rst["categories"])
r.save()
with open('feeder/yelp_data_dc.json') as data_file:
data = json.load(data_file)
for rst in data:
r = Restaurant(name=rst["name"], address=rst["address"], city=rst["city"], zip_code=rst["zip"], rating=rst["rating"], categories=rst["categories"])
r.save()
with open('feeder/yelp_data_nyc.json') as data_file:
data = json.load(data_file)
for rst in data:
r = Restaurant(name=rst["name"], address=rst["address"], city=rst["city"], zip_code=rst["zip"], rating=rst["rating"], categories=rst["categories"])
r.save() | {"/feedme/feeder/views.py": ["/feedme/feeder/models.py"]} |
62,028 | KaighnKevlin/feedme | refs/heads/master | /feedme/myprojectenv/lib/python2.7/codecs.py | /home/asim/.pythonbrew/pythons/Python-2.7.5/lib/python2.7/codecs.py | {"/feedme/feeder/views.py": ["/feedme/feeder/models.py"]} |
62,029 | CannonLock/CAIR | refs/heads/master | /Car.py | import numpy as np
from math import *
def genMoveDict():
def mergeSortDict(arr):
if len(arr) > 1:
mid = len(arr) // 2
L = arr[:mid]
R = arr[mid:]
mergeSortDict(L)
mergeSortDict(R)
i = j = k = 0
# Copy data to temp arrays L[] and R[]
while i < len(L) and j < len(R):
if list(L[i].keys())[0] < list(R[j].keys())[0]:
arr[k] = L[i]
i += 1
else:
arr[k] = R[j]
j += 1
k += 1
# Checking if any element was left
while i < len(L):
arr[k] = L[i]
i += 1
k += 1
while j < len(R):
arr[k] = R[j]
j += 1
k += 1
moveArr = [[] for i in range(7)]
for x in range(13):
for y in range(13):
adjPos = np.array([6, 6]) - np.array([x, y])
if (round(hypot(adjPos[1], adjPos[0]), 0) < 7):
d = round(hypot(adjPos[1], adjPos[0]))
a = round(atan2(adjPos[0], adjPos[1]), 2)
if a < 0:
a = a + round(2 * pi, 2)
moveArr[d].append({a: adjPos.tolist()})
for d in range(7):
mergeSortDict(moveArr[d])
moveArr[d] = [list(innerDict.values())[0] for innerDict in moveArr[d]]
return moveArr
# Holds all information that pertains to each individual car
class Car:
moveDict = genMoveDict()
def __init__(self, position = [0,0]):
self.position = position
self.a = 0
self.v = 0
def updatePosition(self):
self.position = map(sum, [self.velocity*x for x in self.direction], self.position)
def updateEdges(self, edges):
self.edges = edges
def right(self):
if self.position[0] < self.position[1]:
if sum(self.position) == 2:
self.direction = map(sum, self.direction, [1,-1])
if self.direction[1] >= 0:
self.direction = map(sum, self.direction, [1,1])
else:
if sum(self.position) == -2:
self.direction = map(sum, self.direction, [-1, 1])
else:
self.direction = map(sum, self.direction, [-1, -1])
def left(self):
if self.position[0] < self.position[1]:
if sum(self.position) == -2:
self.direction = map(sum, self.direction, [1,-1])
if self.direction[0] < 1:
self.direction = map(sum, self.direction, [-1,-1])
else:
if sum(self.position) == 2:
self.direction = map(sum, self.direction, [-1, 1])
else:
self.direction = map(sum, self.direction, [1, 1])
def velocityUp(self):
if self.velocity < 5:
self.velocity += 1
def velocityDown(self):
if self.velocity >= 0:
self.velocity -= 1
def getPosition(self):
return self.position
def genPossMoves(self):
"""
Generates the array of all valid next moves for the input car
:param car: The car that is going to move
:return: An array of possible next moves
"""
# all v = 1 moves valid for stopped car
if self.v == 0:
return moveDict[1]
# when v > 0
positionRatio = self.a / 2 * pi
possMoves = []
# adjacent distances
for i in range(-1, 2):
if self.v + i < 0 or self.v + i > 6:
continue
elif self.v + i == 0:
possMoves.append([0, 0])
continue
currAlignment = round(positionRatio * len(moveDict[self.v + i]))
# adjacent turns
for j in range(-1, 2):
if currAlignment + j > len(moveDict[self.v + i]):
j = 0
elif currAlignment + j < 0:
j = len(moveDict[self.v + i]) - 1
possMoves.append(moveDict[self.v + i][currAlignment + j])
return possMoves
| {"/RaceTrack.py": ["/Car.py"], "/AITest.py": ["/AI.py", "/RaceTrack.py"], "/UI.py": ["/RaceTrack.py", "/AI.py"], "/test.py": ["/AI.py"], "/AI.py": ["/PriorityQueue.py"]} |
62,030 | CannonLock/CAIR | refs/heads/master | /RaceTrack.py | import numpy as np
from pygame import *
import pygame
from Car import Car
import sys
import time
goalColor = Color(168, 50, 50)
startColor = Color(26, 163, 8)
wallColor = Color(0, 0, 0)
nullColor = Color(255, 255, 255)
carColor = Color(14, 19, 161)
class RaceTrack:
"""
This class defines the racetrack for the car to drive on
"""
def __init__(self, size=60, scale=10):
# Create clock
self.size = size
self.scale = scale
# Set up the backend track
self.goal = (2, 2)
self.start = (size - 3, size - 3)
self.track = np.zeros((size, size))
# Set up the user side track
pygame.init()
self.clock = pygame.time.Clock()
# Initialize Screen
self.screen = self.blankScreen()
def blankScreen(self):
scale = self.scale
size = self.size
# Open a window on the screen
screen_width = size * scale
screen_height = size * scale
screen = pygame.display.set_mode([screen_width, screen_height])
screen.fill(nullColor)
# Draw the start and the end
screenGoal = Rect(scale, scale, scale * 3, scale * 3)
screenStart = Rect(size * scale - (4 * scale), size * scale - (4 * scale),
scale * 3, scale * 3)
draw.rect(screen, startColor, screenStart)
draw.rect(screen, goalColor, screenGoal)
display.flip();
return screen
def updateScreen(self, rect=None):
display.update(rect)
pygame.event.get()
self.clock.tick()
def visualRectangle(self, trackCoor, size):
"""Creates a rectangle that is to scale with the visual"""
screenPos = [(x * self.scale) - (x * self.scale) % self.scale for x in trackCoor]
return Rect(screenPos[0], screenPos[1], self.scale * size, self.scale * size)
def addTrackWall(self, location):
if self.track[location[0]][location[1]] == 1:
return False
self.track[location[0]][location[1]] = 1
return True
def addWall(self, position):
"""
Adds a wall to the race track
:param position:
:return:
"""
# Find the position in terms of the track
trackPos = [(x // self.scale) for x in position]
# If you do not already have a wall placed place one
if self.addTrackWall(trackPos):
# Add the scaled wall to the screen
rect = self.visualRectangle(trackPos, 1)
draw.rect(self.screen, wallColor, rect)
self.updateScreen(rect)
def addWalls(self):
"""
Collects all user entered walls well their mouse is held down and adds them to the track
:param self: The RaceTrack
"""
pygame.event.set_blocked(None)
pygame.event.set_allowed(MOUSEBUTTONUP)
running = True
while running:
# If the user stops holding down the mouse
if len(pygame.event.get()):
break
self.addWall(mouse.get_pos())
pygame.event.set_allowed(None)
def clearTrack(self):
self.track = np.zeros((self.size, self.size))
self.screen = self.blankScreen()
def addPath(self, ai):
"""
Adds a path to the visualization using the specified ai algorithm
:param ai: The ai algorithm used to add the path
"""
def numSplit(number, parts):
"""
Splits a number into an array of size parts of roughly equal values
Used to figure how many frames should be used for each move
i.e. numSplit(10, 2) = [5, 5]
:param number: The number to split
:param parts: The # of ~parts to split the number into
:return: The array of ~parts
"""
div = number // parts
return_array = [div] * parts
rem = number % parts
for i in range(rem):
return_array[i] += 1
return return_array
# Get the path from the passed in ai
path = ai(self)
# Begin printing the path to the screen
time.sleep(2)
# Iterate through each move given by the AI
for move in path:
positionTime = numSplit(60, len(move))
# Iterate through each position of the given move
for i in range(len(move)):
rect = self.visualRectangle(move[i], 1)
draw.rect(self.screen, carColor, rect)
self.updateScreen(rect)
# Sleep for amount of move execution time
for j in range(positionTime[i]):
time.sleep(.00166666666666667)
| {"/RaceTrack.py": ["/Car.py"], "/AITest.py": ["/AI.py", "/RaceTrack.py"], "/UI.py": ["/RaceTrack.py", "/AI.py"], "/test.py": ["/AI.py"], "/AI.py": ["/PriorityQueue.py"]} |
62,031 | CannonLock/CAIR | refs/heads/master | /PriorityQueue.py | import heapq as hq
import random
class PriorityQueue:
"""
A priority queue using a heap and a dictionary for quick retrieval of the top option and
"""
def __init__(self):
self.heap = []
self.queue = {}
self.max_len = 0
# Establish tie breaking system policies
self.randomIdList = random.sample(range(10000), 10000)
self.maxId = 10000
def __str__(self):
return str(self.queue)
def getEntryNumber(self):
if len(self.randomIdList) == 0:
self.randomIdList = random.sample(range(self.maxId, self.maxId + 10000), 10000)
self.maxId = self.maxId + 10000
return self.randomIdList.pop()
def isEmpty(self):
return len(self.queue) == 0
def enqueue(self, car_dict):
"""
- All items in the queue are dictionaries
'state' = ((position tuple), velocity, angle)
'h' = heuristic value
'parent' = reference to the previous state
'g' = the number of more to get to this state from initial
'f' = g(n) + h(n)
"""
in_open = False
# search for duplicate states
if car_dict["state"] in self.queue:
in_open = True
if self.queue[car_dict["state"]]["g"] > car_dict["g"]:
# remove old item
oldState = self.queue.pop(car_dict["state"])
oldState['r'] = 1
# add new
self.queue[car_dict["state"]] = car_dict
hq.heappush(self.heap, (car_dict['f'], self.getEntryNumber(), car_dict))
if not in_open:
self.queue[car_dict["state"]] = car_dict
hq.heappush(self.heap, (car_dict['f'], self.getEntryNumber(), car_dict))
# track the maximum queue length
if len(self.queue) > self.max_len:
self.max_len = len(self.queue)
def pop(self):
"""
Remove and return the dictionary with the smallest f(n)=g(n)+h(n)
"""
while True:
priority, count, state = hq.heappop(self.heap)
if state['state'] in self.queue:
# Delete current entry
del self.queue[state['state']]
# If it has not been removed return the state
if 'r' not in state:
return state
| {"/RaceTrack.py": ["/Car.py"], "/AITest.py": ["/AI.py", "/RaceTrack.py"], "/UI.py": ["/RaceTrack.py", "/AI.py"], "/test.py": ["/AI.py"], "/AI.py": ["/PriorityQueue.py"]} |
62,032 | CannonLock/CAIR | refs/heads/master | /AITest.py | import unittest
from random import *
import AI as ai
from RaceTrack import *
from math import *
class TestAI(unittest.TestCase):
def test_queue_norm(self):
queue = ai.PriorityQueue()
queue.enqueue({'h': 0, 'g': 0, 'f': 1, 'state': ((1, 0), 0, 0), 'parent': None})
for i in range(5):
for j in range(100):
queue.enqueue({'h': 0, 'g' : 1, 'f' : uniform(10,40), 'state' : ((j,0),0,0), 'parent' : None})
queue.enqueue({'h': 0, 'g': 0, 'f': 1, 'state': ((1, 0), 0, 0), 'parent': None})
self.assertEqual(len(queue.queue), 100)
def test_succ_states(self):
size = 50
track = RaceTrack((size - 3, size - 3), (2, 2), size)
referenceMoveArray = ai.genMoveReferenceArray()
for i in range(1, 13):
state = {'h': 0, 'g': 0, 'f': 1, 'state': ((1, 1), 2, (2*pi)/i - .01), 'parent': None}
succStates = ai.findSuccessorStates(track, state, referenceMoveArray)
for state in succStates:
print(state)
self.assertEqual(True, True)
if __name__ == '__main__':
unittest.main() | {"/RaceTrack.py": ["/Car.py"], "/AITest.py": ["/AI.py", "/RaceTrack.py"], "/UI.py": ["/RaceTrack.py", "/AI.py"], "/test.py": ["/AI.py"], "/AI.py": ["/PriorityQueue.py"]} |
62,033 | CannonLock/CAIR | refs/heads/master | /UI.py | from RaceTrack import *
import AI
import os
os.environ['PYGAME_HIDE_SUPPORT_PROMPT'] = "hide"
from pygame import *
import pygame
def main(size, scale):
# Initialize a new track
track = RaceTrack(size, scale)
# Begin the user input loop
running = True
while running:
# Check and process event queue
for event in pygame.event.get():
# On click add walls
if event.type == pygame.MOUSEBUTTONDOWN:
track.addWalls()
if event.type == pygame.KEYDOWN:
# On enter run the algorithm on the current track
if event.__dict__['unicode'] == '\r':
track.addPath(AI.AStar);
# On delete clear the board for a new run
if event.__dict__['unicode'] == '\b':
track.clearTrack();
# only do something if the event is of type QUIT
if event.type == pygame.QUIT:
# change the value to False, to exit the main loop
running = False
if __name__ == '__main__':
print(
"\n\n"
"Welcome to CAIR!\n"
"To interact with the UI upon parameter entry use the following commands:\n"
"Click and Hold: Draw walls that the car must navigate around\n"
"Enter: Run the pathfinding algorithm and trace the cars path\n"
"Delete: Remove the current path and draw a new one\n\n"
"You can choose to use custom size and scale parameters or press enter for the default.\n"
"I have found a size of 60 to be a sweet spot where you can have many obstacles and \n"
"sub minute runtimes on a slow laptop processor."
)
while True:
# Initialize the default
userInts = (60, 10)
# Ask for user input
userVariables = input(
"\nPress Enter for default, or input size and scale in form '60 10':"
).split(" ")
# Check for default and print if chosen
if userVariables == ['']:
print("60 10")
break
try:
userInts = list(map(lambda x : int(x), userVariables))
except:
print("All values must be castable to integers")
continue
if len(userInts) == 2:
break
elif len(userInts) != 2:
print("Invalid # of parameters")
main(*userInts) | {"/RaceTrack.py": ["/Car.py"], "/AITest.py": ["/AI.py", "/RaceTrack.py"], "/UI.py": ["/RaceTrack.py", "/AI.py"], "/test.py": ["/AI.py"], "/AI.py": ["/PriorityQueue.py"]} |
62,034 | CannonLock/CAIR | refs/heads/master | /test.py | import numpy as np
import time
from math import *
import AI as ai
# pos to pos movement array and its visualization
def moveArray(start, end):
def numSplit(num, parts):
div = num // parts
return_array = [div] * parts
rem = num % parts
for i in range(rem):
return_array[i] += 1
return return_array
move = end - start
currPos = start.copy()
if abs(move[0]) > abs(move[1]):
p = (0,1)
else:
p = (1,0)
arr = numSplit(abs(move[p[0]]), abs(move[p[1]]) + 1)
retArr = []
i = 0
while True:
#Do
for increment in range(arr[i]):
currPos[p[0]] += move[p[0]]/abs(move[p[0]])
retArr.append((currPos[0], currPos[1]))
#While
if i > (len(arr) - 2):
break
currPos[p[1]] += move[p[1]]/abs(move[p[1]])
retArr.append((currPos[0], currPos[1]))
i +=1
return retArr
def moveVis(start, end):
space = np.zeros((11, 11))
space[start[0]][start[1]] = 1
space[end[0]][end[1]] = 9
arr = moveArray(start, end)
for move in arr:
space[move[0]][move[1]] = 5
print(space)
# non-relative move array and its visualization
def genMoveArr():
def mergeSortDict(arr):
if len(arr) > 1:
mid = len(arr) // 2
L = arr[:mid]
R = arr[mid:]
mergeSortDict(L)
mergeSortDict(R)
i = j = k = 0
# Copy data to temp arrays L[] and R[]
while i < len(L) and j < len(R):
if list(L[i].keys())[0] < list(R[j].keys())[0]:
arr[k] = L[i]
i += 1
else:
arr[k] = R[j]
j += 1
k += 1
# Checking if any element was left
while i < len(L):
arr[k] = L[i]
i += 1
k += 1
while j < len(R):
arr[k] = R[j]
j += 1
k += 1
moveArr = [[] for i in range(7)]
for x in range(13):
for y in range(13):
adjPos = np.array([6, 6]) - np.array([x, y])
if (round(hypot(adjPos[1], adjPos[0]), 0) < 7):
d = round(hypot(adjPos[1], adjPos[0]))
a = round(atan2(adjPos[0], adjPos[1]), 2)
if a < 0:
a = a + round(2 * pi, 2)
moveArr[d].append({a: adjPos.tolist()})
for d in range(7):
mergeSortDict(moveArr[d])
print(moveArr[d])
moveArr[d] = [list(innerDict.values())[0] for innerDict in moveArr[d]]
return moveArr
def moveCircleVis(type, radius):
diameter = radius*2 + 1
ad = np.zeros((diameter,diameter))
aa = np.zeros((diameter,diameter))
ad[radius][radius] = aa[radius][radius] = 1
circleDict = {'angle' : []}
c = np.array([radius,radius])
if(type == 0):
for x in range(diameter):
for y in range(diameter):
adjPos = c - np.array([x,y])
if(round(hypot(adjPos[1], adjPos[0]), 0) == radius):
d = round(hypot(adjPos[0], adjPos[1]), 2)
a = round(atan2(adjPos[0], adjPos[1]), 2)
if a < 0:
a = a + 2*pi
circleDict[a] = (adjPos[0], adjPos[1])
ad[y][x] = d
aa[y][x] = a
if (type == 1):
for x in range(13):
for y in range(13):
adjPos = c - np.array([x, y])
if (ceil(hypot(adjPos[1], adjPos[0])) == radius):
d = round(hypot(adjPos[0], adjPos[1]), 2)
a = round(atan2(adjPos[0], adjPos[1]), 2)
if a < 0:
a = a + 2 * pi
circleDict[a] = (adjPos[0], adjPos[1])
ad[y][x] = d
aa[y][x] = a
if (type == 2):
for x in range(13):
for y in range(13):
adjPos = c - np.array([x, y])
if (floor(hypot(adjPos[1], adjPos[0])) == radius):
d = round(hypot(adjPos[0], adjPos[1]), 2)
a = round(atan2(adjPos[0], adjPos[1]), 2)
if a < 0:
a = a + 2 * pi
circleDict[a] = (adjPos[0], adjPos[1])
ad[y][x] = d
aa[y][x] = a
np.set_printoptions(linewidth=100, precision=2)
print(ad, "\n", aa)
def genPossMoves(state):
"""
Generates the array of all valid next moves for the input car state
:param state: The state of a car that is going to move {'v':?, 'a':?, 'p':?}
:return: An array of possible next moves
"""
# all v = 1 moves valid for stopped car
if state['v'] == 0:
return moveDict[1]
# when v > 0
positionRatio = state['a'] / 2 * pi
possMoves = []
# adjacent distances
for i in range(-1,2):
if state['v'] + i < 0 or state['v'] + i > 6:
continue
elif state['v'] + i == 0:
possMoves.append([0,0])
continue
currAlignment = round(positionRatio * len(moveDict[state['v'] + i]))
# adjacent turns
for j in range(-1,2):
if currAlignment + j > len(moveDict[state['v'] + i]):
j = 0
elif currAlignment + j < 0:
j = len(moveDict[state['v'] + i]) - 1
possMoves.append(moveDict[state['v'] + i][currAlignment + j])
return possMoves
if __name__ == '__main__':
moveDict = genMoveArr()
print(ai.genMoveReferenceArray(0))
for i in range(8):
moveCircleVis(0, i)
| {"/RaceTrack.py": ["/Car.py"], "/AITest.py": ["/AI.py", "/RaceTrack.py"], "/UI.py": ["/RaceTrack.py", "/AI.py"], "/test.py": ["/AI.py"], "/AI.py": ["/PriorityQueue.py"]} |
62,035 | CannonLock/CAIR | refs/heads/master | /AI.py | from PriorityQueue import *
import numpy as np
from math import *
"""
These functions are to make like easier
"""
def tupleAdd(t0, t1):
r = []
i = 0
while i < len(t0):
r.append(t0[i] + t1[i])
i += 1
return tuple(r)
def tupleSubtract(t0, t1):
r = []
i = 0
while i < len(t0):
r.append(t0[i] - t1[i])
i += 1
return tuple(r)
def genMoveArray(start, end):
"""
Creates a array that maps a move from start to end
:param start: Start pos
:param end: End pos
:return: Array of positions the car enters during the move
"""
def numSplit(num, parts):
"""
Divides a number into a list of n parts that when summed equal num
:param num: number to split into ~equal parts
:param parts: length of list
:return: A list of length parts of ~equal values that sum to num
"""
div = num // parts
return_array = [div] * parts
rem = num % parts
for i in range(rem):
return_array[i] += 1
return return_array
move = tupleSubtract(end, start)
currPos = list(start)
if abs(move[0]) > abs(move[1]):
p = (0, 1)
else:
p = (1, 0)
arr = numSplit(abs(move[p[0]]), abs(move[p[1]]) + 1)
retArr = [start]
i = 0
while True:
# Do
for increment in range(arr[i]):
currPos[p[0]] += move[p[0]] / abs(move[p[0]])
retArr.append((currPos[0], currPos[1]))
# While
if i > (len(arr) - 2):
break
currPos[p[1]] += move[p[1]] / abs(move[p[1]])
retArr.append((currPos[0], currPos[1]))
i += 1
return retArr
def genPossMoves(state, referenceArray):
"""
Generates the array of all valid next moves for the input car state
:param state: {'h': heuristic, 'g': moves to this position, 'f': g(n) + h(n),
'state': ((position tuple), velocity, angle), 'parent': state}
:param moveArray: Passed in reference array of possible moves at each speed
:return: An array of possible next moves
"""
position, velocity, angle = state['state']
# all velocity = 1 moves valid for stopped car
if velocity == 0:
return referenceArray[1]
# when velocity > 0
positionRatio = angle / (2 * pi)
possibleMoves = []
# adjacent distances
for i in (0, -1, 1):
# If the new velocity is invalid
if velocity + i < 0 or velocity + i > 6:
continue
if velocity + i == 0:
possibleMoves.append((0, 0))
continue
# Find the most closely representative square at this speed and angle
currentAlignment = round(positionRatio * len(referenceArray[velocity + i]))
# adjacent turns
for j in (0, -1, 1):
# If end of list is reached wrap to first item
if currentAlignment + j == len(referenceArray[velocity + i]):
j = 0
elif currentAlignment + j < 0:
j = len(referenceArray[velocity + i]) - 1
possibleMoves.append(referenceArray[velocity + i][currentAlignment + j])
return possibleMoves
def genMoveReferenceArray(type):
"""
Generates the array that contains all move offsets for a specific velocity
:param type: Dictates the function used to calculate the integer value (round, ceil, floor)
:return: A reference array to be used to calculate successors
"""
def mergeSortDict(arr):
"""
Recursive function to do a merge sort
! Very unnecessary optimization !
:param arr: The array to sort
:return: The sorted array
"""
if len(arr) > 1:
mid = len(arr) // 2
L = arr[:mid]
R = arr[mid:]
mergeSortDict(L)
mergeSortDict(R)
i = j = k = 0
# Copy data to temp arrays L[] and R[]
while i < len(L) and j < len(R):
if list(L[i].keys())[0] < list(R[j].keys())[0]:
arr[k] = L[i]
i += 1
else:
arr[k] = R[j]
j += 1
k += 1
# Checking if any element was left
while i < len(L):
arr[k] = L[i]
i += 1
k += 1
while j < len(R):
arr[k] = R[j]
j += 1
k += 1
moveArr = [[] for i in range(7)]
if type == 0:
for x in range(13):
for y in range(13):
adjPos = np.array([6, 6]) - np.array([x, y])
if (round(hypot(adjPos[1], adjPos[0])) < 7):
d = round(hypot(adjPos[1], adjPos[0]))
a = round(atan2(adjPos[0], adjPos[1]), 2)
if a < 0:
a = a + 2 * pi
moveArr[d].append({a: tuple(adjPos)})
if type == 1:
for x in range(13):
for y in range(13):
adjPos = np.array([6, 6]) - np.array([x, y])
if (ceil(hypot(adjPos[1], adjPos[0])) < 7):
d = round(hypot(adjPos[1], adjPos[0]))
a = round(atan2(adjPos[0], adjPos[1]), 2)
if a < 0:
a = a + 2 * pi
moveArr[d].append({a: tuple(adjPos)})
if type == 2:
for x in range(13):
for y in range(13):
adjPos = np.array([6, 6]) - np.array([x, y])
if (floor(hypot(adjPos[1], adjPos[0]), 0) < 7):
d = round(hypot(adjPos[1], adjPos[0]))
a = round(atan2(adjPos[0], adjPos[1]), 2)
if a < 0:
a = a + 2 * pi
moveArr[d].append({a: tuple(adjPos)})
for d in range(7):
mergeSortDict(moveArr[d])
moveArr[d] = [innerDict.popitem()[1] for innerDict in moveArr[d]]
return moveArr
def calcF(g, h):
"""
Calculate the f value of this state
:param g: The g value
:param h: The h value
:return: The f value
"""
return g + h
def calcG(parentG):
"""
Calculate the g value for this state
:param parentG: The parent state
:return: The new states g value
"""
return parentG + 1
def calcH(state, goal):
"""
Calculate the heuristic value of the current state
:param state: The given car state
:param goal: The location of the goal
:return: The heuristic value
"""
currentPosition = state[0]
moveVector = tupleSubtract(goal, currentPosition)
distanceFromGoal = hypot(*moveVector)
minimumMoves = distanceFromGoal / 6.5
return minimumMoves
def hitsWall(currentPosition, parentPosition, track):
"""
Check if during the move from the parent to the current position a wall is present
:param currentPosition: The current position
:param parentPosition: The previous position
:param track: The array that contains the wall locations
:return: True if it hits a wall, else false
"""
# Generate the spaces this move will occupy
moveArray = genMoveArray(parentPosition, currentPosition)
# Iterate through spaces moved through and check if a wall occupies it
hitWall = False
for pos in moveArray:
if (pos[0] < 0 or pos[0] >= track.size) or (pos[1] < 0 or pos[1] >= track.size):
hitWall = True
break
if track.track[int(pos[0])][int(pos[1])] == 1:
hitWall = True
break
return hitWall
def findSuccessorStates(track, state, moveArr):
"""
Finds all states that can follow the input
:param state: {'h': heuristic, 'g': moves to this position, 'f': g(n) + h(n),
'state': np.array(), 'parent': state}
:return: All states that can succeed this one
"""
possMoves = genPossMoves(state, moveArr)
# Generate all of the possible successor states
succStates = []
for move in possMoves:
parentPosition = state['state'][0]
currentPosition = tupleAdd(state['state'][0], move)
if not hitsWall(currentPosition, parentPosition, track):
v = round(hypot(*move))
a = round(atan2(move[0], move[1]), 2)
# Make sure that the angle is positive
if a < 0:
a = a + 2 * pi
stateTuple = (tuple(currentPosition), v, a)
if state['parent'] is None:
g = 0
else:
g = calcG(state['parent']['g'])
h = calcH(stateTuple, track.goal)
f = calcF(g, h)
succStates.append(
{
'state': stateTuple,
'h': h,
'g': g,
'f': f,
'parent': state
}
)
return succStates
def getSolution(goalState):
"""
Traces the path of the given state from the goal back to the start
:param goalState: The state that ends the optimal path
:return: An array of arrays were each inner array represents the tile moves in one piece of time
"""
moveList = []
while goalState['parent'] != None:
moveList.append(genMoveArray(goalState["parent"]['state'][0], goalState['state'][0]))
goalState = goalState["parent"]
print("Solution is ", len(moveList), " steps long!")
return list(reversed(moveList))
def AStar(track):
"""
Uses A* search to find a path for the car
:param track: the track data
:return: An array of moves for the car to make to reach the goal
"""
# Pre-Generate the move reference array
referenceArray = genMoveReferenceArray(0)
openQueue = PriorityQueue()
closed = {}
goal = track.goal
openQueue.enqueue({
'state': (track.start, 0, 0),
'parent': None,
'f': 0 + calcH((track.start, 0, 0), track.goal),
'g': 0,
'h': calcH((track.start, 0, 0), track.goal)
})
while (not openQueue.isEmpty()):
currentState = openQueue.pop()
closed[currentState['state']] = currentState
if (currentState['state'][0] == goal):
return getSolution(currentState)
else:
succStates = findSuccessorStates(track, currentState, referenceArray)
for state in succStates:
if state['state'] in closed:
if closed[state['state']]['g'] > state['g']:
del closed[state['state']]
openQueue.enqueue(state)
else:
openQueue.enqueue(state)
raise Exception("Error: No Path Found")
| {"/RaceTrack.py": ["/Car.py"], "/AITest.py": ["/AI.py", "/RaceTrack.py"], "/UI.py": ["/RaceTrack.py", "/AI.py"], "/test.py": ["/AI.py"], "/AI.py": ["/PriorityQueue.py"]} |
62,066 | chadn4u/BetaFishClassification | refs/heads/master | /classifier.py | from prepare import load_data
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
(feature,labels) = load_data()
x_train,x_test, y_train,y_test = train_test_split(feature,labels,test_size = 0.1)
categories = ['Black Samurai','Blue Rim','Crown Tail','Cupang Sawah','Halfmoon']
input_layer = tf.keras.layers.Input([224,224,3])
conv1 = tf.keras.layers.Conv2D(filters=32,kernel_size=(5,5),padding='same',activation='relu')(input_layer)
pool1 = tf.keras.layers.MaxPooling2D(pool_size=(2,2))(conv1)
conv2 = tf.keras.layers.Conv2D(filters=64,kernel_size=(3,3),padding='same',activation='relu')(pool1)
pool2 = tf.keras.layers.MaxPooling2D(pool_size=(2,2),strides=(2,2))(conv2)
conv3 = tf.keras.layers.Conv2D(filters=96,kernel_size=(3,3),padding='same',activation='relu')(pool2)
pool3 = tf.keras.layers.MaxPooling2D(pool_size=(2,2),strides=(2,2))(conv3)
conv4 = tf.keras.layers.Conv2D(filters=96,kernel_size=(3,3),padding='same',activation='relu')(pool3)
pool4 = tf.keras.layers.MaxPooling2D(pool_size=(2,2),strides=(2,2))(conv4)
flt1 = tf.keras.layers.Flatten()(pool4)
dn1 = tf.keras.layers.Dense(512,activation='relu')(flt1)
out = tf.keras.layers.Dense(5,activation='softmax')(dn1)
model = tf.keras.Model(input_layer,out)
model.compile(optimizer = 'adam',loss = 'sparse_categorical_crossentropy',metrics = ['accuracy'])
x_train = np.array(x_train)
y_train = np.array(y_train)
model.fit(x_train,y_train,batch_size = 16,epochs = 10)
model.save('d:/Python/beta/BettaFishClassification/model/betafish.h5')
| {"/classifier.py": ["/prepare.py"]} |
62,067 | chadn4u/BetaFishClassification | refs/heads/master | /prediction.py | #from prepare import load_data
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
import requests
from io import BytesIO
from PIL import Image
def preprocess(img,input_size):
nimg = img.convert('RGB').resize(input_size, resample= 0)
img_arr = (np.array(nimg))/255
return img_arr
def reshape(imgs_arr):
return np.stack(imgs_arr, axis=0)
# Parameters
input_size = (224,224)
#define input shape
channel = (3,)
input_shape = input_size + channel
#(feature,labels) = load_data()
#x_train,x_test, y_train,y_test = train_test_split(feature,labels,test_size = 0.1)
categories = ['Black Samurai','Blue Rim','Crown Tail','Cupang Sawah','Halfmoon']
model = tf.keras.models.load_model('d:/Python/beta/BettaFishClassification/keras_model.h5',compile=False)
#model.evaluate(np.array(x_test),np.array(y_test),verbose = 1)
#prediction = model.predict(x_test)
# read image
im = Image.open('D:/Python/beta/BettaFishClassification/images.jpg')
X = preprocess(im,input_size)
X = reshape([X])
y = model.predict(X)
accuracy = str(np.max(y) * 100)
#if float(accuracy) > 90:
print( categories[np.argmax(y)], accuracy )
#else:
# print( 'unknown '+categories[np.argmax(y)], accuracy )
#print( categories[np.argmax(y)], np.max(y) ) | {"/classifier.py": ["/prepare.py"]} |
62,068 | chadn4u/BetaFishClassification | refs/heads/master | /prepare.py | import os
import numpy as np
import matplotlib.pyplot as plt
import cv2
import pickle
myPath = 'd:/Python/beta/BettaFishClassification/betafish/'
#categories = ['daisy', 'dandelion', 'rose', 'sunflower', 'tulip']
categories = ['Black Samurai','Blue Rim','CrownTail','Cupang Sawah','Halfmoon']
data = []
def make_data():
for category in categories:
path = os.path.join(myPath,category)
label = categories.index(category)
for img_name in os.listdir(path):
image_path = os.path.join(path,img_name)
image = cv2.imread(image_path)
try:
image = cv2.cvtColor(image,cv2.COLOR_BGR2RGB)
image = cv2.resize(image,(224,224))
image = np.array(image,dtype=np.float32)
data.append([image,label])
except Exception as e:
pass
print(len(data))
pik = open('data.pickle','wb')
pickle.dump(data,pik,protocol=pickle.HIGHEST_PROTOCOL)
pik.close
make_data()
def load_data():
pick = open('data.pickle','rb')
data = pickle.load(pick)
pick.close()
np.random.shuffle(data)
feature = []
labels = []
for img,label in data:
feature.append(img)
labels.append(label)
feature = np.array(feature,dtype=np.float32)
label = np.array(labels)
feature = feature/255.0
return [feature,labels] | {"/classifier.py": ["/prepare.py"]} |
62,074 | katrina-m/RecModels_Pytorch | refs/heads/master | /dao/tgat_data_loader_dgl.py | import numpy as np
from torch.utils.data import Dataset
from torch.utils.data.dataloader import DataLoader
import random
from utility.dao_helper import Graph
import pandas as pd
class GraphData(object):
def __init__(self, src_idx_list, dst_idx_list, ts_list, e_type_list, label_list):
self.src_idx_list = src_idx_list
self.dst_idx_list = dst_idx_list
self.ts_list = ts_list
self.e_type_list = e_type_list
self.label_list = label_list
self.rand_sampler = RandEdgeSampler(src_idx_list, dst_idx_list)
class RandEdgeSampler(object):
def __init__(self, src_list, dst_list):
self.src_list = np.unique(src_list)
self.dst_list = np.unique(dst_list)
def sample(self, size):
src_index = np.random.randint(0, len(self.src_list), size)
dst_index = np.random.randint(0, len(self.dst_list), size)
return self.src_list[src_index], self.dst_list[dst_index]
class FeatureGen():
def __init__(self, uniform=True, device="cpu"):
self.uniform = uniform
self.device = device
self.num_nodes = None
self.num_relations = None
pass
def prepare_loader(self, g_df, batch_size, valid_batch_size):
train_graph_data, val_graph_data, test_graph_data, new_node_val_graph_data, \
new_node_test_graph_data, train_graph, full_graph = self.split_data(g_df)
train_dataset = TGATDataset(train_graph_data, train_graph, mode="train", device=self.device)
val_dataset = TGATDataset(val_graph_data, full_graph, mode="valid", device=self.device)
nn_val_dataset = TGATDataset(new_node_val_graph_data, full_graph, mode="valid_new_node", device=self.device)
train_dataloader = DataLoader(train_dataset, batch_size=batch_size, collate_fn=train_dataset.collate_fn)
val_dataloader = DataLoader(val_dataset, batch_size=valid_batch_size, collate_fn=val_dataset.collate_fn)
nn_val_dataloader = DataLoader(nn_val_dataset, batch_size=valid_batch_size, collate_fn=nn_val_dataset.collate_fn)
return train_dataloader, val_dataloader, nn_val_dataloader
def split_data(self, g_df):
val_time, test_time = list(np.quantile(g_df.timestamp, [0.70, 0.85]))
src_idx_list = g_df.srcId.values
dst_idx_list = g_df.dstId.values
e_type_list = g_df.eType.values
label_list = g_df.label.values
ts_list = g_df.timestamp.values
total_node_set = set(np.unique(np.hstack([g_df.srcId.values, g_df.dstId.values])))
self.num_relations = len(set(e_type_list))
max_idx = max(src_idx_list.max(), dst_idx_list.max())
self.num_nodes = max_idx+1
# random selected 10% of nodes from the validation+test sets
mask_node_set = set(
random.sample(set(src_idx_list[ts_list > val_time]).union(set(dst_idx_list[ts_list > val_time])),
int(0.1 * self.num_nodes)))
mask_src_flag = g_df.srcId.map(lambda x: x in mask_node_set).values
mask_dst_flag = g_df.dstId.map(lambda x: x in mask_node_set).values
none_new_node_flag = (1 - mask_src_flag) * (1 - mask_dst_flag) # 两边都不包含new node set
train_flag = (ts_list <= val_time) * (none_new_node_flag > 0)
train_src_list = src_idx_list[train_flag]
train_dst_list = dst_idx_list[train_flag]
train_ts_list = ts_list[train_flag]
train_e_type_list = e_type_list[train_flag]
train_label_list = label_list[train_flag]
train_graph_data = GraphData(train_src_list, train_dst_list, train_ts_list, train_e_type_list, train_label_list)
# define the new nodes sets for testing inductiveness of the model
train_node_set = set(train_src_list).union(train_dst_list)
assert (len(train_node_set - mask_node_set) == len(train_node_set))
new_node_set = total_node_set - train_node_set
# select validation and test dataset
val_flag = (ts_list <= test_time) * (ts_list > val_time)
test_flag = ts_list > test_time
is_new_node_edge = np.array([(a in new_node_set or b in new_node_set) for a, b in zip(src_idx_list, dst_idx_list)])
new_node_val_flag = val_flag * is_new_node_edge
new_node_test_flag = test_flag * is_new_node_edge
# validation and test with all edges
val_src_list = src_idx_list[val_flag]
val_dst_list = dst_idx_list[val_flag]
val_ts_list = ts_list[val_flag]
val_e_type_list = e_type_list[val_flag]
val_label_list = label_list[val_flag]
val_graph_data = GraphData(val_src_list, val_dst_list, val_ts_list, val_e_type_list, val_label_list)
test_src_list = src_idx_list[test_flag]
test_dst_list = dst_idx_list[test_flag]
test_ts_list = ts_list[test_flag]
test_e_type_list = e_type_list[test_flag]
test_label_list = label_list[test_flag]
test_graph_data = GraphData(test_src_list, test_dst_list, test_ts_list, test_e_type_list, test_label_list)
# validation and test with edges that at least has one new node (not in training set)
new_node_val_src_list = src_idx_list[new_node_val_flag]
new_node_val_dst_list = dst_idx_list[new_node_val_flag]
new_node_val_ts_list = ts_list[new_node_val_flag]
new_node_val_e_type_list = e_type_list[new_node_val_flag]
new_node_val_label_list = label_list[new_node_val_flag]
new_node_val_graph_data = GraphData(new_node_val_src_list, new_node_val_dst_list, new_node_val_ts_list, new_node_val_e_type_list, new_node_val_label_list)
new_node_test_src_list = src_idx_list[new_node_test_flag]
new_node_test_dst_list = dst_idx_list[new_node_test_flag]
new_node_test_ts_list = ts_list[new_node_test_flag]
new_node_test_e_type_list = e_type_list[new_node_test_flag]
new_node_test_label_list = label_list[new_node_test_flag]
new_node_test_graph_data = GraphData(new_node_test_src_list, new_node_test_dst_list, new_node_test_ts_list, new_node_test_e_type_list, new_node_test_label_list)
train_kg = pd.DataFrame({"h":train_graph_data.src_idx_list, "t":train_graph_data.dst_idx_list, "r":train_graph_data.e_type_list, "timestamp":train_graph_data.ts_list})
train_graph = Graph(train_kg, fan_outs=[15, 15], device=self.device)
# full graph with all the data for the test and validation purpose
full_kg = pd.DataFrame({"h":src_idx_list, "t":dst_idx_list, "r":e_type_list, "timestamp":ts_list})
full_graph = Graph(full_kg, fan_outs=[15, 15], device=self.device)
return train_graph_data, val_graph_data, test_graph_data, new_node_val_graph_data, \
new_node_test_graph_data, train_graph, full_graph
class TGATDataset(Dataset):
def __init__(self, graph_data, graph, mode="train", device="cpu"):
super().__init__()
self.mode = mode
self.device = device
self.src_idx_list = graph_data.src_idx_list
self.dst_idx_list = graph_data.dst_idx_list
self.ts_list = graph_data.ts_list
self.label_list = graph_data.label_list
self.rand_sampler = graph_data.rand_sampler
self.ngh_finder = graph
def __getitem__(self, index):
src_l_cut, dst_l_cut = self.src_idx_list[index], self.dst_idx_list[index]
ts_l_cut = self.ts_list[index]
label_l_cut = self.label_list[index]
return src_l_cut, dst_l_cut, ts_l_cut, label_l_cut
def collate_fn(self, batch):
src_list, dst_list, ts_list, label_list = zip(*batch)
src_list_fake, dst_list_fake = self.rand_sampler.sample(len(src_list))
return np.array(src_list), np.array(dst_list), np.array(ts_list), \
np.array(src_list_fake), np.array(dst_list_fake)
def __len__(self):
return len(self.src_idx_list) | {"/dao/tgat_data_loader_dgl.py": ["/utility/dao_helper.py"], "/model/SASRec.py": ["/model/BaseModel.py", "/utility/components.py"], "/train/train_TGAT.py": ["/train/parse_args.py", "/dao/load_test_data.py", "/dao/tgat_data_loader_dgl.py", "/model/TGAT.py"], "/model/BaseModel.py": ["/utility/metrics.py"], "/model/SASGFRec.py": ["/model/BaseModel.py", "/utility/components.py"], "/train/train_SASRec.py": ["/train/parse_args.py", "/utility/metrics.py", "/utility/dao_helper.py", "/model/SASRec.py", "/dao/SASRec_dataloader.py", "/dao/load_test_data.py"], "/dao/SASRec_dataloader.py": ["/utility/dao_helper.py"], "/dao/SeqGFRec_dataloader.py": ["/utility/dao_helper.py"]} |
62,075 | katrina-m/RecModels_Pytorch | refs/heads/master | /model/SASRec.py | import torch
import numpy as np
from model.BaseModel import BaseModel
from utility.components import PointWiseFeedForward
class SASRec(BaseModel):
def __init__(self, num_user, num_item, args):
super(SASRec, self).__init__(args)
self.num_user = num_user
self.num_item = num_item
self.args = args
self.item_emb = torch.nn.Embedding(self.num_item + 1, self.hidden_units, padding_idx=0)
self.pos_emb = torch.nn.Embedding(self.maxlen, self.hidden_units) # TO IMPROVE
self.dropout = torch.nn.Dropout(p=self.dropout_rate)
self.attention_layernorms = torch.nn.ModuleList() # to be Q for self-attention
self.attention_layers = torch.nn.ModuleList()
self.forward_layernorms = torch.nn.ModuleList()
self.forward_layers = torch.nn.ModuleList()
self.last_layernorm = torch.nn.LayerNorm(self.hidden_units, eps=1e-8)
for _ in range(args.num_blocks):
new_attn_layernorm = torch.nn.LayerNorm(self.hidden_units, eps=1e-8)
self.attention_layernorms.append(new_attn_layernorm)
new_attn_layer = torch.nn.MultiheadAttention(self.hidden_units,
self.num_heads,
self.dropout_rate)
self.attention_layers.append(new_attn_layer)
new_fwd_layernorm = torch.nn.LayerNorm(self.hidden_units, eps=1e-8)
self.forward_layernorms.append(new_fwd_layernorm)
new_fwd_layer = PointWiseFeedForward(self.hidden_units, self.dropout_rate)
self.forward_layers.append(new_fwd_layer)
# self.pos_sigmoid = torch.nn.Sigmoid()
# self.neg_sigmoid = torch.nn.Sigmoid()
self.criterion = torch.nn.BCEWithLogitsLoss()
def log2feats(self, log_seqs):
seqs = self.item_emb(log_seqs)
seqs *= self.item_emb.embedding_dim ** 0.5
positions = np.tile(np.array(range(log_seqs.shape[1])), [log_seqs.shape[0], 1])
seqs += self.pos_emb(torch.LongTensor(positions).to(self.device))
seqs = self.dropout(seqs)
timeline_mask = log_seqs == 0
seqs *= ~timeline_mask.unsqueeze(-1) # broadcast in last dim
tl = seqs.shape[1] # time dim len for enforce causality
attention_mask = ~torch.tril(torch.ones((tl, tl), dtype=torch.bool, device=self.device))
for i in range(len(self.attention_layers)):
seqs = torch.transpose(seqs, 0, 1)
Q = self.attention_layernorms[i](seqs)
mha_outputs, _ = self.attention_layers[i](Q, seqs, seqs,
attn_mask=attention_mask)
# key_padding_mask=timeline_mask
# need_weights=False) this arg do not work?
seqs = Q + mha_outputs
seqs = torch.transpose(seqs, 0, 1)
seqs = self.forward_layernorms[i](seqs)
seqs = self.forward_layers[i](seqs)
seqs *= ~timeline_mask.unsqueeze(-1)
log_feats = self.last_layernorm(seqs) # (U, T, C) -> (U, -1, C)
#log_feats = log_feats[:, -1, :].unsqueeze(1)
return log_feats
def forward(self, user_ids, log_seqs, pos_seqs, neg_seqs): # for training
log_feats = self.log2feats(log_seqs) # user_ids hasn't been used yet
pos_embs = self.item_emb(pos_seqs)
neg_embs = self.item_emb(neg_seqs)
pos_logits = (log_feats * pos_embs).sum(dim=-1)
neg_logits = (log_feats * neg_embs).sum(dim=-1)
return pos_logits, neg_logits # pos_pred, neg_pred
def predict(self, user_ids, log_seqs, item_indices): # for inference
log_feats = self.log2feats(log_seqs) # user_ids hasn't been used yet
final_feat = log_feats[:, -1, :].unsqueeze(1) # only use last QKV classifier, a waste
item_embs = self.item_emb(item_indices)#.squeeze(1) # (I, C)
logits = final_feat.matmul(item_embs.transpose(1, 2))
return logits.squeeze(1) # preds # (U, I)
def calc_loss(self, optimizer, batch_data):
(u, seq, pos, neg) = batch_data
pos_logits, neg_logits = self.forward(u, seq, pos, neg)
pos_labels, neg_labels = torch.ones(pos_logits.shape, device=self.device), torch.zeros(
neg_logits.shape, device=self.device)
optimizer.zero_grad()
indices = pos != 0
loss = self.criterion(pos_logits[indices], pos_labels[indices])
loss += self.criterion(neg_logits[indices], neg_labels[indices])
for param in self.item_emb.parameters():
loss += self.args.l2_emb * torch.norm(param)
loss.backward()
optimizer.step()
return loss
def reset_parameters(self):
for name, param in self.named_parameters():
try:
torch.nn.init.xavier_uniform_(param.data)
except:
pass # just ignore those failed init layers
| {"/dao/tgat_data_loader_dgl.py": ["/utility/dao_helper.py"], "/model/SASRec.py": ["/model/BaseModel.py", "/utility/components.py"], "/train/train_TGAT.py": ["/train/parse_args.py", "/dao/load_test_data.py", "/dao/tgat_data_loader_dgl.py", "/model/TGAT.py"], "/model/BaseModel.py": ["/utility/metrics.py"], "/model/SASGFRec.py": ["/model/BaseModel.py", "/utility/components.py"], "/train/train_SASRec.py": ["/train/parse_args.py", "/utility/metrics.py", "/utility/dao_helper.py", "/model/SASRec.py", "/dao/SASRec_dataloader.py", "/dao/load_test_data.py"], "/dao/SASRec_dataloader.py": ["/utility/dao_helper.py"], "/dao/SeqGFRec_dataloader.py": ["/utility/dao_helper.py"]} |
62,076 | katrina-m/RecModels_Pytorch | refs/heads/master | /train/train_TGAT.py | from train.parse_args import parse_tgat_args
from dao.load_test_data import load_data
from dao.tgat_data_loader_dgl import FeatureGen
from model.TGAT import TGAT
import torch
import os
import logging
import random
import numpy as np
from utility.log_helper import *
#os.environ['CUDA_VISIBLE_DEVICES'] = '1'
os.environ['CUDA_LAUNCH_BLOCKING'] = "1"
def train(args):
#random.seed(args.seed)
#np.random.seed(args.seed)
#torch.manual_seed(args.seed)
log_save_id = create_log_id(args.save_dir)
logging_config(folder=args.save_dir, name='log{:d}'.format(log_save_id), no_console=False)
logging.info(args)
#args.device = "cpu"
g_df = load_data("ml-1m").sample(frac=0.05)
featureGen = FeatureGen(uniform=args.uniform, device=args.device)
train_dataloader, val_dataloader, nn_val_dataloader = featureGen.prepare_loader(g_df, args.batch_size, args.valid_batch_size)
model = TGAT(featureGen.num_nodes, featureGen.num_relations, args)
optimizer = torch.optim.Adam(model.parameters(), lr=args.lr)
model.fit(train_dataloader, val_dataloader, nn_val_dataloader, optimizer)
if __name__ == '__main__':
args = parse_tgat_args()
train(args) | {"/dao/tgat_data_loader_dgl.py": ["/utility/dao_helper.py"], "/model/SASRec.py": ["/model/BaseModel.py", "/utility/components.py"], "/train/train_TGAT.py": ["/train/parse_args.py", "/dao/load_test_data.py", "/dao/tgat_data_loader_dgl.py", "/model/TGAT.py"], "/model/BaseModel.py": ["/utility/metrics.py"], "/model/SASGFRec.py": ["/model/BaseModel.py", "/utility/components.py"], "/train/train_SASRec.py": ["/train/parse_args.py", "/utility/metrics.py", "/utility/dao_helper.py", "/model/SASRec.py", "/dao/SASRec_dataloader.py", "/dao/load_test_data.py"], "/dao/SASRec_dataloader.py": ["/utility/dao_helper.py"], "/dao/SeqGFRec_dataloader.py": ["/utility/dao_helper.py"]} |
62,077 | katrina-m/RecModels_Pytorch | refs/heads/master | /model/TGAT.py | import torch
import numpy as np
from sklearn.metrics import roc_auc_score, f1_score, average_precision_score
import logging
from time import time
import dgl
class MergeLayer(torch.nn.Module):
def __init__(self, dim1, dim2, dim3, dim4):
super().__init__()
self.fc1 = torch.nn.Linear(dim1 + dim2, dim3)
self.fc2 = torch.nn.Linear(dim3, dim4)
self.act = torch.nn.ReLU()
torch.nn.init.xavier_normal_(self.fc1.weight)
torch.nn.init.xavier_normal_(self.fc2.weight)
def forward(self, x1, x2):
x = torch.cat([x1, x2], dim=1)
h = self.act(self.fc1(x))
return self.fc2(h)
class ScaledDotProductAttention(torch.nn.Module):
"""
Scaled Dot-Product Attention
"""
def __init__(self, temperature, attn_dropout=0.1):
super().__init__()
self.temperature = temperature
self.dropout = torch.nn.Dropout(attn_dropout)
self.softmax = torch.nn.Softmax(dim=2)
def forward(self, q, k, v, mask=None):
attn = torch.bmm(q, k.transpose(1, 2))
attn = attn / self.temperature
if mask is not None:
attn = attn.masked_fill(mask, -1e10)
attn = self.softmax(attn) # [n * b, l_q, l_k]
attn = self.dropout(attn) # [n * b, l_v, d]
output = torch.bmm(attn, v)
return output, attn
class MultiHeadAttention(torch.nn.Module):
"""
Multi-Head Attention module
"""
def __init__(self, n_head, d_model, d_k, d_v, dropout=0.1):
super().__init__()
self.n_head = n_head
self.d_k = d_k
self.d_v = d_v
self.w_qs = torch.nn.Linear(d_model, n_head * d_k, bias=False)
self.w_ks = torch.nn.Linear(d_model, n_head * d_k, bias=False)
self.w_vs = torch.nn.Linear(d_model, n_head * d_v, bias=False)
torch.nn.init.normal_(self.w_qs.weight, mean=0, std=np.sqrt(2.0 / (d_model + d_k)))
torch.nn.init.normal_(self.w_ks.weight, mean=0, std=np.sqrt(2.0 / (d_model + d_k)))
torch.nn.init.normal_(self.w_vs.weight, mean=0, std=np.sqrt(2.0 / (d_model + d_v)))
self.attention = ScaledDotProductAttention(temperature=np.power(d_k, 0.5), attn_dropout=dropout)
self.layer_norm = torch.nn.LayerNorm(d_model)
self.fc = torch.nn.Linear(n_head * d_v, d_model)
torch.nn.init.xavier_normal_(self.fc.weight)
self.dropout = torch.nn.Dropout(dropout)
def forward(self, q, k, v, mask=None):
d_k, d_v, n_head = self.d_k, self.d_v, self.n_head
sz_b, len_q, _ = q.size()
sz_b, len_k, _ = k.size()
sz_b, len_v, _ = v.size()
residual = q
q = self.w_qs(q).view(sz_b, len_q, n_head, d_k)
k = self.w_ks(k).view(sz_b, len_k, n_head, d_k)
v = self.w_vs(v).view(sz_b, len_v, n_head, d_v)
q = q.permute(2, 0, 1, 3).contiguous().view(-1, len_q, d_k) # (n*b) x lq x dk
k = k.permute(2, 0, 1, 3).contiguous().view(-1, len_k, d_k) # (n*b) x lk x dk
v = v.permute(2, 0, 1, 3).contiguous().view(-1, len_v, d_v) # (n*b) x lv x dv
mask = mask.repeat(n_head, 1, 1) # (n*b) x .. x ..
output, attn = self.attention(q, k, v, mask=mask)
output = output.view(n_head, sz_b, len_q, d_v)
output = output.permute(1, 2, 0, 3).contiguous().view(sz_b, len_q, -1) # b x lq x (n*dv)
output = self.dropout(self.fc(output))
output = self.layer_norm(output + residual)
return output, attn
class MapBasedMultiHeadAttention(torch.nn.Module):
''' Multi-Head Attention module '''
def __init__(self, n_head, d_model, d_k, d_v, dropout=0.1):
super().__init__()
self.n_head = n_head
self.d_k = d_k
self.d_v = d_v
self.wq_node_transform = torch.nn.Linear(d_model, n_head * d_k, bias=False)
self.wk_node_transform = torch.nn.Linear(d_model, n_head * d_k, bias=False)
self.wv_node_transform = torch.nn.Linear(d_model, n_head * d_k, bias=False)
self.layer_norm = torch.nn.LayerNorm(d_model)
self.fc = torch.nn.Linear(n_head * d_v, d_model)
self.act = torch.nn.LeakyReLU(negative_slope=0.2)
self.weight_map = torch.nn.Linear(2 * d_k, 1, bias=False)
torch.nn.init.xavier_normal_(self.fc.weight)
self.dropout = torch.nn.Dropout(dropout)
self.softmax = torch.nn.Softmax(dim=2)
self.dropout = torch.nn.Dropout(dropout)
def forward(self, q, k, v, mask=None):
d_k, d_v, n_head = self.d_k, self.d_v, self.n_head
sz_b, len_q, _ = q.size()
sz_b, len_k, _ = k.size()
sz_b, len_v, _ = v.size()
residual = q
q = self.wq_node_transform(q).view(sz_b, len_q, n_head, d_k)
k = self.wk_node_transform(k).view(sz_b, len_k, n_head, d_k)
v = self.wv_node_transform(v).view(sz_b, len_v, n_head, d_v)
q = q.permute(2, 0, 1, 3).contiguous().view(-1, len_q, d_k) # (n*b) x lq x dk
q = torch.unsqueeze(q, dim=2) # [(n*b), lq, 1, dk]
q = q.expand(q.shape[0], q.shape[1], len_k, q.shape[3]) # [(n*b), lq, lk, dk]
k = k.permute(2, 0, 1, 3).contiguous().view(-1, len_k, d_k) # (n*b) x lk x dk
k = torch.unsqueeze(k, dim=1) # [(n*b), 1, lk, dk]
k = k.expand(k.shape[0], len_q, k.shape[2], k.shape[3]) # [(n*b), lq, lk, dk]
v = v.permute(2, 0, 1, 3).contiguous().view(-1, len_v, d_v) # (n*b) x lv x dv
mask = mask.repeat(n_head, 1, 1) # (n*b) x lq x lk
# Map based Attention
#output, attn = self.attention(q, k, v, mask=mask)
q_k = torch.cat([q, k], dim=3) # [(n*b), lq, lk, dk * 2]
attn = self.weight_map(q_k).squeeze(dim=3) # [(n*b), lq, lk]
if mask is not None:
attn = attn.masked_fill(mask, -1e10)
attn = self.softmax(attn) # [n * b, l_q, l_k]
attn = self.dropout(attn) # [n * b, l_q, l_k]
# [n * b, l_q, l_k] * [n * b, l_v, d_v] >> [n * b, l_q, d_v]
output = torch.bmm(attn, v)
output = output.view(n_head, sz_b, len_q, d_v)
output = output.permute(1, 2, 0, 3).contiguous().view(sz_b, len_q, -1) # b x lq x (n*dv)
output = self.dropout(self.act(self.fc(output)))
output = self.layer_norm(output + residual)
return output, attn
def expand_last_dim(x, num):
view_size = list(x.size()) + [1]
expand_size = list(x.size()) + [num]
return x.view(view_size).expand(expand_size)
class TimeEncode(torch.nn.Module):
def __init__(self, expand_dim, factor=5):
super(TimeEncode, self).__init__()
time_dim = expand_dim
self.factor = factor
self.basis_freq = torch.nn.Parameter((torch.from_numpy(1 / 10 ** np.linspace(0, 9, time_dim))).float())
self.phase = torch.nn.Parameter(torch.zeros(time_dim).float())
def forward(self, ts):
# ts: [N, L]
batch_size = ts.size(0)
seq_len = ts.size(1)
ts = ts.view(batch_size, seq_len, 1) # [N, L, 1]
basis_freq = self.basis_freq.view(1, 1, -1)
map_ts = ts * basis_freq # [N, L, time_dim]
map_ts += self.phase.view(1, 1, -1)
harmonic = torch.cos(map_ts)
return harmonic
class PosEncode(torch.nn.Module):
def __init__(self, expand_dim, seq_len):
super().__init__()
self.pos_embeddings = torch.nn.Embedding(num_embeddings=seq_len, embedding_dim=expand_dim)
def forward(self, ts):
# ts: [N, L]
order = ts.argsort()
ts_emb = self.pos_embeddings(order)
return ts_emb
class EmptyEncode(torch.nn.Module):
def __init__(self, expand_dim):
super().__init__()
self.expand_dim = expand_dim
def forward(self, ts):
out = torch.zeros_like(ts).float()
out = torch.unsqueeze(out, dim=-1)
out = out.expand(out.shape[0], out.shape[1], self.expand_dim)
return out
class LSTMPool(torch.nn.Module):
def __init__(self, feat_dim, edge_dim, time_dim):
super(LSTMPool, self).__init__()
self.feat_dim = feat_dim
self.time_dim = time_dim
self.edge_dim = edge_dim
self.att_dim = feat_dim + edge_dim + time_dim
self.act = torch.nn.ReLU()
self.lstm = torch.nn.LSTM(input_size=self.att_dim,
hidden_size=self.feat_dim,
num_layers=1,
batch_first=True)
self.merger = MergeLayer(feat_dim, feat_dim, feat_dim, feat_dim)
def forward(self, src, src_t, seq, seq_t, seq_e, mask):
# seq [B, N, D]
# mask [B, N]
seq_x = torch.cat([seq, seq_e, seq_t], dim=2)
_, (hn, _) = self.lstm(seq_x)
hn = hn[-1, :, :] # hn.squeeze(dim=0)
out = self.merger.forward(hn, src)
return out, None
class MeanPool(torch.nn.Module):
def __init__(self, feat_dim, edge_dim):
super(MeanPool, self).__init__()
self.edge_dim = edge_dim
self.feat_dim = feat_dim
self.act = torch.nn.ReLU()
self.merger = MergeLayer(edge_dim + feat_dim, feat_dim, feat_dim, feat_dim)
def forward(self, src, src_t, seq, seq_t, seq_e, mask):
# seq [B, N, D]
# mask [B, N]
src_x = src
seq_x = torch.cat([seq, seq_e], dim=2) # [B, N, De + D]
hn = seq_x.mean(dim=1) # [B, De + D]
output = self.merger(hn, src_x)
return output, None
class AttnModel(torch.nn.Module):
"""Attention based temporal layers
"""
def __init__(self, feat_dim, edge_dim, time_dim,
attn_mode='prod', n_head=2, drop_out=0.1):
"""
args:
feat_dim: dim for the node features
edge_dim: dim for the temporal edge features
time_dim: dim for the time encoding
attn_mode: choose from 'prod' and 'map'
n_head: number of heads in attention
drop_out: probability of dropping a neural.
"""
super(AttnModel, self).__init__()
self.feat_dim = feat_dim
self.time_dim = time_dim
self.edge_in_dim = (feat_dim + edge_dim + time_dim)
self.model_dim = self.edge_in_dim
#self.edge_fc = torch.nn.Linear(self.edge_in_dim, self.feat_dim, bias=False)
self.merger = MergeLayer(self.model_dim, feat_dim, feat_dim, feat_dim)
#self.act = torch.nn.ReLU()
assert(self.model_dim % n_head == 0)
self.logger = logging.getLogger(__name__)
self.attn_mode = attn_mode
if attn_mode == 'prod':
self.multi_head_target = MultiHeadAttention(n_head,
d_model=self.model_dim,
d_k=self.model_dim // n_head,
d_v=self.model_dim // n_head,
dropout=drop_out)
self.logger.info('Using scaled prod attention')
elif attn_mode == 'map':
self.multi_head_target = MapBasedMultiHeadAttention(n_head,
d_model=self.model_dim,
d_k=self.model_dim // n_head,
d_v=self.model_dim // n_head,
dropout=drop_out)
self.logger.info('Using map based attention')
else:
raise ValueError('attn_mode can only be prod or map')
def forward(self, src, src_t, seq, seq_t, seq_e, mask):
""""Attention based temporal attention forward pass
args:
src: float Tensor of shape [B, D]
src_t: float Tensor of shape [B, Dt], Dt == D
seq: float Tensor of shape [B, N, D]
seq_t: float Tensor of shape [B, N, Dt]
seq_e: float Tensor of shape [B, N, De], De == D
mask: boolean Tensor of shape [B, N], where the true value indicate a null value in the sequence.
returns:
output, weight
output: float Tensor of shape [B, D]
weight: float Tensor of shape [B, N]
"""
src_ext = torch.unsqueeze(src, dim=1) # src [B, 1, D]
src_e_ph = torch.zeros_like(src_ext)
q = torch.cat([src_ext, src_e_ph, src_t], dim=2) # [B, 1, D + De + Dt] -> [B, 1, D]
k = torch.cat([seq, seq_e, seq_t], dim=2) # [B, 1, D + De + Dt] -> [B, 1, D]
mask = torch.unsqueeze(mask, dim=2) # mask [B, N, 1]
mask = mask.permute([0, 2, 1]) # mask [B, 1, N]
# # target-attention
output, attn = self.multi_head_target(q=q, k=k, v=k, mask=mask) # output: [B, 1, D + Dt], attn: [B, 1, N]
output = output.squeeze()
attn = attn.squeeze()
output = self.merger(output, src)
return output, attn
class TGAT(torch.nn.Module):
def __init__(self, num_node, num_relation, args):
super(TGAT, self).__init__()
self.__dict__.update(vars(args))
self.num_relations = num_relation
self.num_nodes = num_node
self.num_layers = self.num_layers
self.logger = logging.getLogger(__name__)
#self.n_feat_th = torch.nn.Parameter(torch.from_numpy(n_feat.astype(np.float32)))
#self.e_feat_th = torch.nn.Parameter(torch.from_numpy(e_feat.astype(np.float32)))
self.edge_raw_embed = torch.nn.Embedding(num_relation, self.node_dim, padding_idx=0)
# from_pretrained(self.e_feat_th, padding_idx=0, freeze=True)
self.node_raw_embed = torch.nn.Embedding(num_node, self.node_dim, padding_idx=0)
# from_pretrained(self.n_feat_th, padding_idx=0, freeze=True)
self.feat_dim = self.node_dim
self.n_feat_dim = self.feat_dim
self.e_feat_dim = self.feat_dim
self.model_dim = self.feat_dim
self.W_R = torch.nn.Parameter(torch.Tensor(self.num_relations, self.n_feat_dim, self.e_feat_dim))
self.merge_layer = MergeLayer(self.feat_dim, self.feat_dim, self.feat_dim, self.feat_dim)
if self.agg_method == 'attn':
self.logger.info('Aggregation uses attention model')
self.attn_model_list = torch.nn.ModuleList([AttnModel(self.feat_dim,
self.feat_dim,
self.feat_dim,
attn_mode=self.attn_mode,
n_head=self.num_heads,
drop_out=self.drop_out) for _ in range(self.num_layers)])
elif self.agg_method == 'lstm':
self.logger.info('Aggregation uses LSTM model')
self.attn_model_list = torch.nn.ModuleList([LSTMPool(self.feat_dim,
self.feat_dim,
self.feat_dim) for _ in range(self.num_layers)])
elif self.agg_method == 'mean':
self.logger.info('Aggregation uses constant mean model')
self.attn_model_list = torch.nn.ModuleList([MeanPool(self.feat_dim,
self.feat_dim) for _ in range(self.num_layers)])
else:
raise ValueError('invalid agg_method value, use attn or lstm')
if self.use_time == 'time':
self.logger.info('Using time encoding')
self.time_encoder = TimeEncode(expand_dim=self.node_dim)
elif self.use_time == 'pos':
assert(self.num_neighbors is not None)
self.logger.info('Using positional encoding')
self.time_encoder = PosEncode(expand_dim=self.node_dim, seq_len=self.num_neighbors)
elif self.use_time == 'empty':
self.logger.info('Using empty encoding')
self.time_encoder = EmptyEncode(expand_dim=self.node_dim)
else:
raise ValueError('invalid time option!')
self.affinity_score = MergeLayer(self.feat_dim, self.feat_dim, self.feat_dim, 1)
self.criterion = torch.nn.BCELoss()
#torch.nn.Bilinear(self.feat_dim, self.feat_dim, 1, bias=True)
def forward(self, src_idx_l, target_idx_l, cut_time_l, num_neighbors=20):
src_embed = self.tem_conv(src_idx_l, cut_time_l, self.num_layers, num_neighbors)
target_embed = self.tem_conv(target_idx_l, cut_time_l, self.num_layers, num_neighbors)
# Merge layer
score = self.affinity_score(src_embed, target_embed).squeeze(dim=-1)
return score
def contrast(self, src_idx_l, pos_idx_l, neg_idx_l, cut_time_l, num_neighbors=20):
src_embed = self.tem_conv_v1(src_idx_l, cut_time_l, self.num_layers, num_neighbors)
pos_embed = self.tem_conv_v1(pos_idx_l, cut_time_l, self.num_layers, num_neighbors)
neg_embed = self.tem_conv_v1(neg_idx_l, cut_time_l, self.num_layers, num_neighbors)
pos_score = self.affinity_score(src_embed, pos_embed).squeeze(dim=-1)
neg_score = self.affinity_score(src_embed, neg_embed).squeeze(dim=-1)
return pos_score.sigmoid(), neg_score.sigmoid()
def att_score(self, edges):
# Equation (4)
src_node_feat = edges.data["src_node_feat"]
dst_node_feat = self.node_raw_embed(edges.dst[dgl.NID])
mask = edges.data["mask"]
r_mul_t = torch.matmul(src_node_feat, self.W_r) # (n_edge, relation_dim)
r_mul_h = torch.matmul(dst_node_feat, self.W_r) # (n_edge, relation_dim)
r_embed = self.edge_raw_embed(edges.data['type']) # (1, relation_dim)
att = torch.bmm(r_mul_t.unsqueeze(1), torch.tanh(r_mul_h + r_embed).unsqueeze(2)).squeeze(-1) # (n_edge, 1)
att_feat = mask*att*src_node_feat
return {'att': att, 'att_feat':att_feat}
def tem_conv(self, src_idx_l, cut_time_l, curr_layers, num_neighbors=20):
assert(curr_layers >= 0)
batch_size = len(src_idx_l)
src_node_batch_th = torch.LongTensor(src_idx_l).to(self.device)
cut_time_l_th = torch.FloatTensor(cut_time_l).to(self.device)
cut_time_l_th = torch.unsqueeze(cut_time_l_th, dim=1)
# query node always has the start time -> time span == 0
src_node_t_embed = self.time_encoder(torch.zeros_like(cut_time_l_th).to(self.device))
src_node_feat = self.node_raw_embed(src_node_batch_th)
if curr_layers == 0:
return src_node_feat
else:
src_node_conv_feat = self.tem_conv(src_idx_l,
cut_time_l,
curr_layers=curr_layers - 1,
num_neighbors=num_neighbors)
src_ngh_node_batch, src_ngh_eType_batch, src_ngh_t_batch = self.ngh_finder.get_temporal_neighbor(
src_idx_l,
cut_time_l,
num_neighbors=num_neighbors)
src_ngh_node_batch_th = torch.from_numpy(src_ngh_node_batch).long().to(self.device)
src_ngh_eType_batch = torch.from_numpy(src_ngh_eType_batch).long().to(self.device)
src_ngh_t_batch_delta = cut_time_l[:, np.newaxis] - src_ngh_t_batch
src_ngh_t_batch_th = torch.from_numpy(src_ngh_t_batch_delta).float().to(self.device)
# get previous layer's node features
src_ngh_node_batch_flat = src_ngh_node_batch.flatten() # reshape(batch_size, -1)
src_ngh_t_batch_flat = src_ngh_t_batch.flatten() # reshape(batch_size, -1)
src_ngh_node_conv_feat = self.tem_conv(src_ngh_node_batch_flat,
src_ngh_t_batch_flat,
curr_layers=curr_layers - 1,
num_neighbors=num_neighbors)
src_ngh_feat = src_ngh_node_conv_feat.view(batch_size, num_neighbors, -1)
# get edge time features and node features
src_ngh_t_embed = self.time_encoder(src_ngh_t_batch_th)
src_ngn_edge_feat = self.edge_raw_embed(src_ngh_eType_batch)
# attention aggregation
mask = src_ngh_node_batch_th == 0
attn_m = self.attn_model_list[curr_layers - 1]
local, weight = attn_m(src_node_conv_feat,
src_node_t_embed,
src_ngh_feat,
src_ngh_t_embed,
src_ngn_edge_feat,
mask)
return local
def fit(self, train_loader, val_loader, nn_val_loader, optimizer):
# Training use only training graph
self.g = train_loader.dataset.ngh_finder
self.to(self.device)
# if torch.cuda.device_count() > 1:
# print("Let's use", torch.cuda.device_count(), "GPUs!")
# # dim = 0 [30, xxx] -> [10, ...], [10, ...], [10, ...] on 3 GPUs
# self = torch.nn.DataParallel(self)
self.train()
for epoch in range(self.num_epochs):
acc, ap, f1, auc, m_loss = [], [], [], [], []
n_batch = int(len(train_loader.dataset) / self.batch_size)
time1 = time()
total_loss = 0
time2 = time()
for step, batch in enumerate(train_loader):
src_l_cut, dst_l_cut, ts_l_cut, src_l_fake, dst_l_fake = batch
size = len(src_l_cut)
with torch.no_grad():
pos_label = torch.ones(size, dtype=torch.float, device=self.device)
neg_label = torch.zeros(size, dtype=torch.float, device=self.device)
optimizer.zero_grad()
pos_prob, neg_prob = self.contrast(src_l_cut, dst_l_cut, dst_l_fake, ts_l_cut, self.num_neighbors)
#print("output_size", pos_prob.size())
loss = self.criterion(pos_prob, pos_label)
loss += self.criterion(neg_prob, neg_label)
loss.backward()
optimizer.step()
# get training results
with torch.no_grad():
self.eval()
pred_score = np.concatenate([(pos_prob).cpu().detach().numpy(), (neg_prob).cpu().detach().numpy()])
pred_label = pred_score > 0.5
true_label = np.concatenate([np.ones(size), np.zeros(size)])
acc.append((pred_label == true_label).mean())
ap.append(average_precision_score(true_label, pred_score))
# f1.append(f1_score(true_label, pred_label))
m_loss.append(loss.item())
total_loss += loss.item()
auc.append(roc_auc_score(true_label, pred_score))
if self.verbose and step % self.print_every == 0 and step != 0:
logging.info(
'Training: Epoch {:04d} Iter {:04d} / {:04d} | Time {:.1f}s | Iter Loss {:.4f} | Iter Mean '
'Loss {:.4f}'.format(
epoch, step, n_batch, time() - time2, loss.item(), total_loss / step))
time2 = time()
# validation phase use all information
self.eval()
self.ngh_finder = val_loader.dataset.ngh_finder
val_acc, val_ap, val_f1, val_auc = self.evaluate(val_loader)
nn_val_acc, nn_val_ap, nn_val_f1, nn_val_auc = self.evaluate(nn_val_loader)
self.train()
logging.info('epoch: {}:'.format(epoch))
logging.info('Epoch mean loss: {}'.format(np.mean(m_loss)))
logging.info('train acc: {}, val acc: {}, new node val acc: {}'.format(np.mean(acc), val_acc, nn_val_acc))
logging.info('train auc: {}, val auc: {}, new node val auc: {}'.format(np.mean(auc), val_auc, nn_val_auc))
logging.info('train ap: {}, val ap: {}, new node val ap: {}'.format(np.mean(ap), val_ap, nn_val_ap))
def evaluate(self, val_loader):
val_acc, val_ap, val_f1, val_auc = [], [], [], []
batch_size = val_loader.batch_size
with torch.no_grad():
for batch in val_loader:
src_l_cut, dst_l_cut, ts_l_cut, src_l_fake, dst_l_fake = batch
pos_prob, neg_prob = self.contrast(src_l_cut, dst_l_cut, dst_l_fake, ts_l_cut, self.num_neighbors)
pred_score = np.concatenate([(pos_prob).cpu().numpy(), (neg_prob).cpu().numpy()])
pred_label = pred_score > 0.5
true_label = np.concatenate([np.ones(batch_size), np.zeros(batch_size)])
val_acc.append((pred_label == true_label).mean())
val_ap.append(average_precision_score(true_label, pred_score))
val_f1.append(f1_score(true_label, pred_label))
val_auc.append(roc_auc_score(true_label, pred_score))
return np.mean(val_acc), np.mean(val_ap), np.mean(val_f1), np.mean(val_auc)
| {"/dao/tgat_data_loader_dgl.py": ["/utility/dao_helper.py"], "/model/SASRec.py": ["/model/BaseModel.py", "/utility/components.py"], "/train/train_TGAT.py": ["/train/parse_args.py", "/dao/load_test_data.py", "/dao/tgat_data_loader_dgl.py", "/model/TGAT.py"], "/model/BaseModel.py": ["/utility/metrics.py"], "/model/SASGFRec.py": ["/model/BaseModel.py", "/utility/components.py"], "/train/train_SASRec.py": ["/train/parse_args.py", "/utility/metrics.py", "/utility/dao_helper.py", "/model/SASRec.py", "/dao/SASRec_dataloader.py", "/dao/load_test_data.py"], "/dao/SASRec_dataloader.py": ["/utility/dao_helper.py"], "/dao/SeqGFRec_dataloader.py": ["/utility/dao_helper.py"]} |
62,078 | katrina-m/RecModels_Pytorch | refs/heads/master | /model/BaseModel.py | import torch
from time import time
import logging
from utility.model_helper import *
from utility.metrics import *
import abc
from tqdm import tqdm
import numpy as np
class BaseModel(torch.nn.Module):
def __init__(self, args):
super(BaseModel, self).__init__()
self.__dict__.update(vars(args))
@abc.abstractmethod
def update_loss(self, optimizer, batch_data):
"""
This method is used for calculate the loss and update the gradient based on the given batch_data.
:param model:
:param optimizer:
:param batch_data:
:return:
"""
pass
@abc.abstractmethod
def reset_parameters(self):
pass
@abc.abstractmethod
def predict(self, data):
"""
This method is used for prediction based on the given data.
:param data:
:return:
"""
pass
def fit(self, loader_train, loader_val, optimizer):
self.reset_parameters()
earlyStopper = EarlyStopping(self.stopping_steps, self.verbose)
self.train().to(device=self.device)
logging.info(self)
epoch_start_idx = 0
# initialize metrics
best_epoch = -1
n_batch = int(len(loader_train.dataset) / self.batch_size)
for epoch in range(epoch_start_idx, self.num_epochs + 1):
time1 = time()
total_loss = 0
time2 = time()
for step, batch_data in enumerate(loader_train):
loss = self.calc_loss(optimizer, batch_data)
total_loss += loss.item()
if self.verbose and step % self.print_every == 0 and step != 0:
logging.info(
'Training: Epoch {:04d} Iter {:04d} / {:04d} | Time {:.1f}s | Iter Loss {:.4f} | Iter Mean '
'Loss {:.4f}'.format(
epoch, step, n_batch, time() - time2, loss.item(), total_loss / step))
time2 = time()
logging.info(
'Training: Epoch {:04d} Total Iter {:04d} | Total Time {:.1f}s | Iter Mean Loss {:.4f}'.format(epoch,
n_batch,
time() - time1,
total_loss / n_batch))
if epoch % self.evaluate_every == 0:
time1 = time()
self.eval()
ndcg, recall = self.evaluate(loader_val)
f1, auc = self.evaluate_ctr(loader_val)
logging.info(
'Evaluation: Epoch {:04d} | Total Time {:.1f}s | Recall {:.4f} NDCG {'':.4f}'.format(
epoch, time() - time1, recall, ndcg))
earlyStopper(recall, self, self.save_dir, epoch, best_epoch)
if earlyStopper.early_stop:
break
self.train()
adjust_learning_rate(optimizer, epoch, self.lr)
def evaluate(self, loader):
num_test_user = len(loader.dataset)
NDCG = 0
HT = 0
with torch.no_grad():
with tqdm(total=int(num_test_user / self.valid_batch_size + 1), desc='Evaluating Iteration') as pbar:
for batch_input in loader:
predictions = -self.predict(*batch_input)
rank_indices = torch.argsort(predictions).argsort()
rank_indices = rank_indices.cpu().numpy()[:,0]
NDCG += np.sum((rank_indices < self.K) * (1 / np.log2(rank_indices + 2)))
HT += np.sum(rank_indices < self.K)
pbar.update(1)
return NDCG / num_test_user, HT / num_test_user
# ----------------------------------------Used for calculating F1 score-------------------------------------------
def fit_ctr(self, loader_train, loader_val, optimizer):
self.reset_parameters()
earlyStopper = EarlyStopping(self.patience, self.verbose)
self.train().to(device=self.device)
logging.info(self)
best_epoch = -1
epoch_start_idx = 0
n_batch = int(len(loader_train.dataset) / self.batch_size)
for epoch in range(epoch_start_idx, self.num_epochs + 1):
time1 = time()
total_loss = 0
time2 = time()
for step, batch_data in enumerate(loader_train):
optimizer.zero_grad()
batch_feature, batch_labels = batch_data
logits = self.predict(batch_feature)
loss = self.criterion(logits, batch_labels)
loss.backward()
optimizer.step()
total_loss += loss.item()
if self.verbose and step % self.print_every == 0 and step != 0:
logging.info(
'Training: Epoch {:04d} Iter {:04d} / {:04d} | Time {:.1f}s | Iter Loss {:.4f} | Iter Mean '
'Loss {:.4f}'.format(
epoch, step, n_batch, time() - time2, loss.item(), total_loss / step))
time2 = time()
logging.info(
'Training: Epoch {:04d} Total Iter {:04d} | Total Time {:.1f}s | Iter Mean Loss {:.4f}'.format(epoch,
n_batch,
time() - time1,
total_loss / n_batch))
if epoch % self.evaluate_every == 0:
time1 = time()
self.eval()
accuracy, f1_score = self.evaluate_ctr(loader_val)
logging.info(
'Evaluation: Epoch {:04d} | Total Time {:.1f}s | Accuracy {:.4f} F1 {:.4f}'.format(
epoch, time() - time1, accuracy, f1_score))
earlyStopper(f1_score, self)
# 若满足 early stopping 要求
if earlyStopper.early_stop:
earlyStopper.save_checkpoint(f1_score, self, self.save_dir, epoch, best_epoch)
best_epoch = epoch
# def evaluate_ctr(self, loader_val):
#
# targets = []
# predicts = []
# with torch.no_grad():
# with tqdm(total=len(loader_val.dataset) / self.valid_batch_size + 1, desc='Evaluating Iteration') as pbar:
# for batch_features, batch_labels in loader_val:
# logits = self.predict(batch_features)
# preds = (torch.sigmoid(logits) > 0.5)
# targets = targets + batch_labels.cpu().numpy().tolist()
# predicts = predicts + preds.cpu().numpy().tolist()
# pbar.update(1)
#
# return calc_metrics_at_k_ctr(predicts, targets)
def evaluate_ctr(self, loader_val):
f1_scores = []
roc_auc_scores = []
with torch.no_grad():
with tqdm(total=int(len(loader_val.dataset) / self.valid_batch_size) + 1, desc='Evaluating Iteration') as pbar:
for batch_input in loader_val:
logits = self.predict(*batch_input)
pos_logits = logits[:, 0]
neg_logits = logits[:, 1]
pos_preds = (torch.sigmoid(pos_logits) > 0.5).cpu().numpy().flatten()
neg_preds = (torch.sigmoid(neg_logits) > 0.5).cpu().numpy().flatten()
pos_labels = np.ones(len(batch_input[0])*1)
neg_labels = np.zeros(len(batch_input[0])*1)
f1_scores.append(f1_score(np.concatenate([pos_preds, neg_preds]), np.concatenate([pos_labels, neg_labels])))
roc_auc_scores.append(roc_auc_score(np.concatenate([pos_preds, neg_preds]), np.concatenate([pos_labels, neg_labels])))
pbar.update(1)
f1_scores = np.mean(f1_scores)
auc_scores = np.mean(roc_auc_scores)
print(f"F1:{f1_scores}, AUC:{auc_scores}")
return f1_scores, auc_scores | {"/dao/tgat_data_loader_dgl.py": ["/utility/dao_helper.py"], "/model/SASRec.py": ["/model/BaseModel.py", "/utility/components.py"], "/train/train_TGAT.py": ["/train/parse_args.py", "/dao/load_test_data.py", "/dao/tgat_data_loader_dgl.py", "/model/TGAT.py"], "/model/BaseModel.py": ["/utility/metrics.py"], "/model/SASGFRec.py": ["/model/BaseModel.py", "/utility/components.py"], "/train/train_SASRec.py": ["/train/parse_args.py", "/utility/metrics.py", "/utility/dao_helper.py", "/model/SASRec.py", "/dao/SASRec_dataloader.py", "/dao/load_test_data.py"], "/dao/SASRec_dataloader.py": ["/utility/dao_helper.py"], "/dao/SeqGFRec_dataloader.py": ["/utility/dao_helper.py"]} |
62,079 | katrina-m/RecModels_Pytorch | refs/heads/master | /dao/tgat_data_loader.py | import numpy as np
from torch.utils.data import Dataset
from torch.utils.data.dataloader import DataLoader
import random
class GraphData(object):
def __init__(self, src_idx_list, dst_idx_list, ts_list, e_type_list, label_list):
self.src_idx_list = src_idx_list
self.dst_idx_list = dst_idx_list
self.ts_list = ts_list
self.e_type_list = e_type_list
self.label_list = label_list
self.rand_sampler = RandEdgeSampler(src_idx_list, dst_idx_list)
class RandEdgeSampler(object):
def __init__(self, src_list, dst_list):
self.src_list = np.unique(src_list)
self.dst_list = np.unique(dst_list)
def sample(self, size):
src_index = np.random.randint(0, len(self.src_list), size)
dst_index = np.random.randint(0, len(self.dst_list), size)
return self.src_list[src_index], self.dst_list[dst_index]
class NeighborFinder:
def __init__(self, adj_list, uniform=False):
"""
Params
------
node_idx_list: List[int], contains the list of node index.
node_ts_list: List[int], contain the list of timestamp for the nodes in node_idx_list.
off_set_list: List[int], such that node_idx_list[off_set_list[i]:off_set_list[i + 1]] = adjacent_list[i]. \
Using this can help us quickly find the adjacent node indexes.
"""
node_idx_l, node_ts_l, edge_type_l, off_set_l = self.init_off_set(adj_list)
self.node_idx_list = node_idx_l
self.node_ts_list = node_ts_l
self.edge_type_list = edge_type_l
self.off_set_list = off_set_l
self.uniform = uniform
def init_off_set(self, adj_list):
"""
Params ------ Input: adj_list: List[List[(node_idx, edge_idx, node_ts)]], the inner list at each index is the
adjacent node info of the node with the given index.
Return:
n_idx_list: List[int], contain the node index.
n_ts_list: List[int], contain the timestamp of node index.
e_idx_list: List[int], contain the edge index.
off_set_list: List[int], such that node_idx_list[off_set_list[i]:off_set_list[i + 1]] = adjacent_list[i]. \
Using this can help us quickly find the adjacent node indexes.
"""
n_idx_list = []
n_ts_list = []
e_type_list = []
off_set_list = [0]
for i in range(len(adj_list)):
curr = adj_list[i]
curr = sorted(curr, key=lambda x: x[1])
n_idx_list.extend([x[0] for x in curr])
e_type_list.extend([x[1] for x in curr])
n_ts_list.extend([x[2] for x in curr])
off_set_list.append(len(n_idx_list))
n_idx_list = np.array(n_idx_list)
n_ts_list = np.array(n_ts_list)
e_type_list = np.array(e_type_list)
off_set_list = np.array(off_set_list)
assert(len(n_idx_list) == len(n_ts_list))
assert(off_set_list[-1] == len(n_ts_list))
return n_idx_list, n_ts_list, e_type_list, off_set_list
def find_before(self, src_idx, cut_time):
"""
Find the neighbors for src_idx with edge time right before the cut_time.
Params
------
Input:
src_idx: int
cut_time: float
Return:
neighbors_idx: List[int]
neighbors_e_idx: List[int]
neighbors_ts: List[int]
"""
node_idx_list = self.node_idx_list
node_ts_list = self.node_ts_list
edge_type_list = self.edge_type_list
off_set_list = self.off_set_list
neighbors_idx = node_idx_list[off_set_list[src_idx]:off_set_list[src_idx + 1]]
neighbors_ts = node_ts_list[off_set_list[src_idx]:off_set_list[src_idx + 1]]
neighbors_e_type = edge_type_list[off_set_list[src_idx]:off_set_list[src_idx + 1]]
if (neighbors_ts == 0).any():
# If the edge is stationary, set the edge time to the same as the cut_time.
return neighbors_idx, neighbors_e_type, np.ones_like(neighbors_ts)*cut_time
# If no neighbor find, returns the empty list.
if len(neighbors_idx) == 0 or len(neighbors_ts) == 0:
return neighbors_idx, neighbors_ts, neighbors_e_type
# Find the neighbors which has timestamp < cut_time.
left = 0
right = len(neighbors_idx) - 1
while left + 1 < right:
mid = (left + right) // 2
curr_t = neighbors_ts[mid]
if curr_t < cut_time:
left = mid
else:
right = mid
if neighbors_ts[right] < cut_time:
return neighbors_idx[:right], neighbors_e_type[:right], neighbors_ts[:right]
else:
return neighbors_idx[:left], neighbors_e_type[:left], neighbors_ts[:left]
def get_temporal_neighbor(self, src_idx_list, cut_time_list, num_neighbors=20):
"""
Find the neighbor nodes before cut_time in batch.
Params
------
Input:
src_idx_list: List[int]
cut_time_list: List[float],
num_neighbors: int
Return:
out_ngh_node_batch: int32 matrix (len(src_idx_list), num_neighbors)
out_ngh_t_batch: int32 matrix (len(src_idx_list), num_neighbors)
out_ngh_eType_batch: int32 matrix (len(src_idx_list), num_neighbors)
"""
assert(len(src_idx_list) == len(cut_time_list))
out_ngh_node_batch = np.zeros((len(src_idx_list), num_neighbors)).astype(np.int32)
out_ngh_t_batch = np.zeros((len(src_idx_list), num_neighbors)).astype(np.float32)
out_ngh_eType_batch = np.zeros((len(src_idx_list), num_neighbors)).astype(np.int32)
for i, (src_idx, cut_time) in enumerate(zip(src_idx_list, cut_time_list)):
ngh_idx, ngh_eType, ngh_ts = self.find_before(src_idx, cut_time)
if len(ngh_idx) > 0:
if self.uniform:
sampled_idx = np.random.randint(0, len(ngh_idx), num_neighbors)
out_ngh_node_batch[i, :] = ngh_idx[sampled_idx]
out_ngh_t_batch[i, :] = ngh_ts[sampled_idx]
out_ngh_eType_batch[i, :] = ngh_eType[sampled_idx]
# resort based on time
pos = out_ngh_t_batch[i, :].argsort()
out_ngh_node_batch[i, :] = out_ngh_node_batch[i, :][pos]
out_ngh_t_batch[i, :] = out_ngh_t_batch[i, :][pos]
out_ngh_eType_batch[i, :] = out_ngh_eType_batch[i, :][pos]
else:
ngh_ts = ngh_ts[:num_neighbors]
ngh_idx = ngh_idx[:num_neighbors]
ngh_eType = ngh_eType[:num_neighbors]
assert(len(ngh_idx) <= num_neighbors)
assert(len(ngh_ts) <= num_neighbors)
assert(len(ngh_eType) <= num_neighbors)
out_ngh_node_batch[i, num_neighbors - len(ngh_idx):] = ngh_idx
out_ngh_t_batch[i, num_neighbors - len(ngh_ts):] = ngh_ts
out_ngh_eType_batch[i, num_neighbors - len(ngh_eType):] = ngh_eType
return out_ngh_node_batch, out_ngh_eType_batch, out_ngh_t_batch
class FeatureGen():
def __init__(self, uniform=True, device="cpu"):
self.uniform = uniform
self.device = device
self.num_nodes = None
self.num_relations = None
pass
def prepare_loader(self, g_df, batch_size, valid_batch_size):
train_graph_data, val_graph_data, test_graph_data, new_node_val_graph_data, \
new_node_test_graph_data, train_ngh_finder, full_ngh_finder = self.split_data(g_df)
train_dataset = TGATDataset(train_graph_data, train_ngh_finder, mode="train", device=self.device)
val_dataset = TGATDataset(val_graph_data, full_ngh_finder, mode="valid", device=self.device)
nn_val_dataset = TGATDataset(new_node_val_graph_data, full_ngh_finder, mode="valid_new_node", device=self.device)
train_dataloader = DataLoader(train_dataset, batch_size=batch_size, collate_fn=train_dataset.collate_fn)
val_dataloader = DataLoader(val_dataset, batch_size=valid_batch_size, collate_fn=val_dataset.collate_fn)
nn_val_dataloader = DataLoader(nn_val_dataset, batch_size=valid_batch_size, collate_fn=nn_val_dataset.collate_fn)
return train_dataloader, val_dataloader, nn_val_dataloader
def split_data(self, g_df):
val_time, test_time = list(np.quantile(g_df.timestamp, [0.70, 0.85]))
src_idx_list = g_df.srcId.values
dst_idx_list = g_df.dstId.values
e_type_list = g_df.eType.values
label_list = g_df.label.values
ts_list = g_df.timestamp.values
total_node_set = set(np.unique(np.hstack([g_df.srcId.values, g_df.dstId.values])))
self.num_relations = len(set(e_type_list))
max_idx = max(src_idx_list.max(), dst_idx_list.max())
self.num_nodes = max_idx+1
# random selected 10% of nodes from the validation+test sets
mask_node_set = set(
random.sample(set(src_idx_list[ts_list > val_time]).union(set(dst_idx_list[ts_list > val_time])),
int(0.1 * self.num_nodes)))
mask_src_flag = g_df.srcId.map(lambda x: x in mask_node_set).values
mask_dst_flag = g_df.dstId.map(lambda x: x in mask_node_set).values
none_new_node_flag = (1 - mask_src_flag) * (1 - mask_dst_flag) # 两边都不包含new node set
train_flag = (ts_list <= val_time) * (none_new_node_flag > 0)
train_src_list = src_idx_list[train_flag]
train_dst_list = dst_idx_list[train_flag]
train_ts_list = ts_list[train_flag]
train_e_type_list = e_type_list[train_flag]
train_label_list = label_list[train_flag]
train_graph_data = GraphData(train_src_list, train_dst_list, train_ts_list, train_e_type_list, train_label_list)
# define the new nodes sets for testing inductiveness of the model
train_node_set = set(train_src_list).union(train_dst_list)
assert (len(train_node_set - mask_node_set) == len(train_node_set))
new_node_set = total_node_set - train_node_set
# select validation and test dataset
val_flag = (ts_list <= test_time) * (ts_list > val_time)
test_flag = ts_list > test_time
is_new_node_edge = np.array([(a in new_node_set or b in new_node_set) for a, b in zip(src_idx_list, dst_idx_list)])
new_node_val_flag = val_flag * is_new_node_edge
new_node_test_flag = test_flag * is_new_node_edge
# validation and test with all edges
val_src_list = src_idx_list[val_flag]
val_dst_list = dst_idx_list[val_flag]
val_ts_list = ts_list[val_flag]
val_e_type_list = e_type_list[val_flag]
val_label_list = label_list[val_flag]
val_graph_data = GraphData(val_src_list, val_dst_list, val_ts_list, val_e_type_list, val_label_list)
test_src_list = src_idx_list[test_flag]
test_dst_list = dst_idx_list[test_flag]
test_ts_list = ts_list[test_flag]
test_e_type_list = e_type_list[test_flag]
test_label_list = label_list[test_flag]
test_graph_data = GraphData(test_src_list, test_dst_list, test_ts_list, test_e_type_list, test_label_list)
# validation and test with edges that at least has one new node (not in training set)
new_node_val_src_list = src_idx_list[new_node_val_flag]
new_node_val_dst_list = dst_idx_list[new_node_val_flag]
new_node_val_ts_list = ts_list[new_node_val_flag]
new_node_val_e_type_list = e_type_list[new_node_val_flag]
new_node_val_label_list = label_list[new_node_val_flag]
new_node_val_graph_data = GraphData(new_node_val_src_list, new_node_val_dst_list, new_node_val_ts_list, new_node_val_e_type_list, new_node_val_label_list)
new_node_test_src_list = src_idx_list[new_node_test_flag]
new_node_test_dst_list = dst_idx_list[new_node_test_flag]
new_node_test_ts_list = ts_list[new_node_test_flag]
new_node_test_e_type_list = e_type_list[new_node_test_flag]
new_node_test_label_list = label_list[new_node_test_flag]
new_node_test_graph_data = GraphData(new_node_test_src_list, new_node_test_dst_list, new_node_test_ts_list, new_node_test_e_type_list, new_node_test_label_list)
adj_list = [[] for _ in range(max_idx + 1)]
for src, dst, eType, ts in zip(train_graph_data.src_idx_list, train_graph_data.dst_idx_list, train_graph_data.e_type_list, train_graph_data.ts_list):
adj_list[src].append((dst, eType, ts))
adj_list[dst].append((src, eType, ts))
train_ngh_finder = NeighborFinder(adj_list, uniform=self.uniform)
# full graph with all the data for the test and validation purpose
full_adj_list = [[] for _ in range(max_idx + 1)]
for src, dst, eType, ts in zip(src_idx_list, dst_idx_list, e_type_list, ts_list):
full_adj_list[src].append((dst, eType, ts))
full_adj_list[dst].append((src, eType, ts))
full_ngh_finder = NeighborFinder(full_adj_list, uniform=self.uniform)
return train_graph_data, val_graph_data, test_graph_data, new_node_val_graph_data, \
new_node_test_graph_data, train_ngh_finder, full_ngh_finder
class TGATDataset(Dataset):
def __init__(self, graph_data, ngh_finder, mode="train", device="cpu"):
super().__init__()
self.mode = mode
self.device = device
self.src_idx_list = graph_data.src_idx_list
self.dst_idx_list = graph_data.dst_idx_list
self.ts_list = graph_data.ts_list
self.label_list = graph_data.label_list
self.rand_sampler = graph_data.rand_sampler
self.ngh_finder = ngh_finder
def __getitem__(self, index):
src_l_cut, dst_l_cut = self.src_idx_list[index], self.dst_idx_list[index]
ts_l_cut = self.ts_list[index]
label_l_cut = self.label_list[index]
return src_l_cut, dst_l_cut, ts_l_cut, label_l_cut
def collate_fn(self, batch):
src_list, dst_list, ts_list, label_list = zip(*batch)
src_list_fake, dst_list_fake = self.rand_sampler.sample(len(src_list))
return np.array(src_list), np.array(dst_list), np.array(ts_list), \
np.array(src_list_fake), np.array(dst_list_fake)
def __len__(self):
return len(self.src_idx_list) | {"/dao/tgat_data_loader_dgl.py": ["/utility/dao_helper.py"], "/model/SASRec.py": ["/model/BaseModel.py", "/utility/components.py"], "/train/train_TGAT.py": ["/train/parse_args.py", "/dao/load_test_data.py", "/dao/tgat_data_loader_dgl.py", "/model/TGAT.py"], "/model/BaseModel.py": ["/utility/metrics.py"], "/model/SASGFRec.py": ["/model/BaseModel.py", "/utility/components.py"], "/train/train_SASRec.py": ["/train/parse_args.py", "/utility/metrics.py", "/utility/dao_helper.py", "/model/SASRec.py", "/dao/SASRec_dataloader.py", "/dao/load_test_data.py"], "/dao/SASRec_dataloader.py": ["/utility/dao_helper.py"], "/dao/SeqGFRec_dataloader.py": ["/utility/dao_helper.py"]} |
62,080 | katrina-m/RecModels_Pytorch | refs/heads/master | /utility/metrics.py | import torch
import numpy as np
from sklearn.metrics import roc_auc_score, f1_score
def precision_at_k_batch(hits, k):
"""
calculate Precision@k
:param hits: array, element is binary (0 / 1), 2-dim
:param k:
:return:
"""
res = hits[:, :k].mean(axis=1)
return res
def ndcg_at_k_batch(hits, k):
"""
calculate NDCG@k
:param hits: array, element is binary (0 / 1), 2-dim
:param k:
:return:
"""
hits_k = hits[:, :k]
dcg = np.sum((2 ** hits_k - 1) / np.log2(np.arange(2, k + 2)), axis=1)
sorted_hits_k = np.flip(np.sort(hits), axis=1)[:, :k]
idcg = np.sum((2 ** sorted_hits_k - 1) / np.log2(np.arange(2, k + 2)), axis=1)
idcg[idcg == 0] = np.inf
res = (dcg / idcg)
return res
def recall_at_k_batch(hits, k):
"""
calculate Recall@k
hits: array, element is binary (0 / 1), 2-dim
"""
res = (hits[:, :k].sum(axis=1) / hits.sum(axis=1))
return res
def calc_metrics_at_k(cf_scores, train_user_dict, test_user_dict, user_ids, item_ids, K):
"""
cf_scores: (n_eval_users, n_eval_items)
"""
test_pos_item_binary = np.zeros([len(user_ids), len(item_ids)], dtype=np.float32)
for idx, u in enumerate(user_ids):
train_pos_item_list = train_user_dict[u]
test_pos_item_list = test_user_dict[u]
cf_scores[idx][train_pos_item_list] = 0
test_pos_item_binary[idx][test_pos_item_list] = 1
try:
_, rank_indices = torch.sort(cf_scores.cuda(), descending=True) # try to speed up the sorting process
except:
_, rank_indices = torch.sort(cf_scores, descending=True)
rank_indices = rank_indices.cpu()
binary_hit = []
for i in range(len(user_ids)):
binary_hit.append(test_pos_item_binary[i][rank_indices[i]])
binary_hit = np.array(binary_hit, dtype=np.float32)
precision = precision_at_k_batch(binary_hit, K)
recall = recall_at_k_batch(binary_hit, K)
ndcg = ndcg_at_k_batch(binary_hit, K)
return precision, recall, ndcg
def calc_metrics_at_k_ctr(preds, grounds):
auc = roc_auc_score(grounds, preds)
f1 = f1_score(grounds, preds)
return auc, f1 | {"/dao/tgat_data_loader_dgl.py": ["/utility/dao_helper.py"], "/model/SASRec.py": ["/model/BaseModel.py", "/utility/components.py"], "/train/train_TGAT.py": ["/train/parse_args.py", "/dao/load_test_data.py", "/dao/tgat_data_loader_dgl.py", "/model/TGAT.py"], "/model/BaseModel.py": ["/utility/metrics.py"], "/model/SASGFRec.py": ["/model/BaseModel.py", "/utility/components.py"], "/train/train_SASRec.py": ["/train/parse_args.py", "/utility/metrics.py", "/utility/dao_helper.py", "/model/SASRec.py", "/dao/SASRec_dataloader.py", "/dao/load_test_data.py"], "/dao/SASRec_dataloader.py": ["/utility/dao_helper.py"], "/dao/SeqGFRec_dataloader.py": ["/utility/dao_helper.py"]} |
62,081 | katrina-m/RecModels_Pytorch | refs/heads/master | /model/SASGFRec.py | import torch
import logging
from time import time
from model.BaseModel import BaseModel
from utility.model_helper import EarlyStopping, adjust_learning_rate
from utility.components import PointWiseFeedForward, MultiHeadAttention
from utility.components import TimeEncode, PosEncode, EmptyEncode, TemporalAggregator, MergeLayer
# reference: https://github.com/pmixer/SASRec.pytorch.git
class SASGFRec(BaseModel):
def __init__(self, num_user, num_node, num_relation, args):
super(SASGFRec, self).__init__(args)
self.num_user = num_user
self.num_node = num_node
self.num_relation = num_relation
self.args = args
# TODO: loss += args.l2_emb for regularizing embedding vectors during training
# https://stackoverflow.com/questions/42704283/adding-l1-l2-regularization-in-pytorch
self.node_emb = torch.nn.Embedding(self.num_node + 1, self.hidden_units, padding_idx=0)
if self.use_time == 'time':
self.time_encoder = TimeEncode(expand_dim=self.hidden_units)
elif self.use_time == 'pos':
self.time_encoder = PosEncode(time_dim=self.hidden_units, seq_len=self.maxlen)
elif self.use_time == 'empty':
self.time_encoder = EmptyEncode(time_dim=self.hidden_units)
else:
raise ValueError('invalid time option!')
#self.pos_emb = torch.nn.Embedding(self.maxlen, self.hidden_units) # TO IMPROVE
self.dropout = torch.nn.Dropout(p=self.dropout_rate)
self.attention_layernorms = torch.nn.ModuleList() # to be Q for self-attention
self.attention_layers = torch.nn.ModuleList()
self.forward_layernorms = torch.nn.ModuleList()
self.forward_layers = torch.nn.ModuleList()
self.last_layernorm = torch.nn.LayerNorm(self.hidden_units, eps=1e-8)
hidden_units = self.hidden_units # node_dim + time_dim
for _ in range(self.num_blocks):
new_attn_layernorm = torch.nn.LayerNorm(hidden_units, eps=1e-8)
self.attention_layernorms.append(new_attn_layernorm)
# new_attn_layer = torch.nn.MultiheadAttention(hidden_units,
# self.num_heads,
# self.dropout_rate)
new_attn_layer = MultiHeadAttention(self.num_blocks, hidden_units,\
hidden_units, hidden_units, dropout=self.dropout_rate)
self.attention_layers.append(new_attn_layer)
new_fwd_layernorm = torch.nn.LayerNorm(hidden_units, eps=1e-8)
self.forward_layernorms.append(new_fwd_layernorm)
new_fwd_layer = PointWiseFeedForward(hidden_units, self.dropout_rate)
self.forward_layers.append(new_fwd_layer)
self.criterion = torch.nn.BCEWithLogitsLoss()
self.new_attn_layer_graph = torch.nn.MultiheadAttention(self.hidden_units,
self.num_heads,
self.dropout_rate)
self.temporal_aggregator = TemporalAggregator(self.fan_outs, self.hidden_units, self.num_node, \
self.num_relation, num_layers=len(self.fan_outs), drop_out=self.dropout_rate,\
num_heads=self.num_heads, use_time=self.use_time)
self.temporal_aggregator.node_embed = self.node_emb
# Used for kg pre-train
self.kg_aggregator = TemporalAggregator(self.fan_outs, self.hidden_units, self.num_node, \
self.num_relation, num_layers=len(self.fan_outs), drop_out=self.dropout_rate, \
num_heads=self.num_heads, use_time="empty")
self.kg_aggregator.node_embed = self.node_emb
self.kg_aggregator.edge_embed = self.temporal_aggregator.edge_embed
self.affinity_score = MergeLayer(self.hidden_units, self.hidden_units, self.hidden_units, 1)
def temporal_graph_embedding(self, src_idx, cut_time_list, blocks):
batch_size, maxlen = src_idx.shape
for i, (src_ngh_idx, src_ngh_node_type, src_ngh_ts) in enumerate(reversed(blocks)):
src_ngh_idx_reshape = src_ngh_idx.view(-1, self.num_neighbors)
if len(blocks) == 1:
dst_node_embed = self.node_emb(src_idx).view(-1, self.hidden_units).unsqueeze(
1) # (batch_size * maxlen, 1, node_dim)
else:
dst_node_embed = self.node_emb(blocks[i + 1]).view(-1, self.hidden_units).unsqueeze(1)
if i == 0:
dst_node_t_embed = self.time_encoder(torch.zeros_like(cut_time_list)).view(-1, self.hidden_units).unsqueeze(1)
src_node_embed = self.node_emb(src_ngh_idx_reshape) # (batch_size * maxlen, num_neighbors, node_dim)
src_node_t_embed = self.time_encoder(src_ngh_ts.view(-1, self.num_neighbors))
src_node_feat = src_node_embed + src_node_t_embed
mask = ~(src_ngh_idx_reshape == 0).unsqueeze(-1)
src_node_feat *= mask
dst_node_feat = dst_node_embed + dst_node_t_embed
# used for next iteration.
src_node_feat = torch.transpose(src_node_feat, 0, 1)
dst_node_feat = torch.transpose(dst_node_feat, 0, 1)
src_node_embed, _ = self.attention_layers[i](dst_node_feat, src_node_feat, src_node_feat)
src_node_embed = dst_node_feat + src_node_embed
src_node_embed = src_node_embed.transpose(0, 1)
src_node_embed = self.forward_layernorms[i](src_node_embed)
src_node_embed = self.forward_layers[i](src_node_embed)
dst_node_t_embed = self.time_encoder(src_ngh_ts.view(-1, self.num_neighbors))
return src_node_embed.squeeze(1).reshape(batch_size, maxlen, self.hidden_units)
def log2feats(self, log_seqs, seq_ts, blocks):
seqs = self.node_emb(log_seqs)
temporal_embedding = self.temporal_aggregator(blocks).view(-1, self.graph_maxlen, self.hidden_units)
seqs[:, -self.graph_maxlen:, :] = temporal_embedding
#seqs *= self.node_emb.embedding_dim ** 0.5
seqs += self.time_encoder(seq_ts)
seqs = self.dropout(seqs)
timeline_mask = (log_seqs == 0).unsqueeze(-1)
#seqs *= ~timeline_mask.unsqueeze(-1) # broadcast in last dim
tl = seqs.shape[1] # time dim len for enforce causality
attention_mask = ~torch.tril(torch.ones((tl, tl), dtype=torch.bool, device=self.device))
for i in range(len(self.attention_layers)):
#seqs = torch.transpose(seqs, 0, 1)
Q = self.attention_layernorms[i](seqs)
mha_outputs, _ = self.attention_layers[i](Q, seqs, seqs,
attn_mask=attention_mask, mask=timeline_mask)
# key_padding_mask=timeline_mask
# need_weights=False) this arg do not work?
seqs = Q + mha_outputs
#seqs = torch.transpose(seqs, 0, 1)
seqs = self.forward_layernorms[i](seqs)
seqs = self.forward_layers[i](seqs)
#seqs *= ~timeline_mask.unsqueeze(-1)
log_feats = self.last_layernorm(seqs) # (U, T, C) -> (U, -1, C)
return log_feats
def forward(self, user_ids, log_seqs, seq_ts, pos_seqs, neg_seqs, block): # for training
log_feats = self.log2feats(log_seqs, seq_ts, block) # user_ids hasn't been used yet
pos_embs = self.node_emb(pos_seqs)
neg_embs = self.node_emb(neg_seqs)
pos_logits = (log_feats * pos_embs).sum(dim=-1)
neg_logits = (log_feats * neg_embs).sum(dim=-1)
return pos_logits, neg_logits # pos_pred, neg_pred
def predict(self, user_ids, log_seqs, seq_ts, item_indices, block): # for inference
log_feats = self.log2feats(log_seqs, seq_ts, block) # user_ids hasn't been used yet
final_feat = log_feats[:, -1, :].unsqueeze(1) # only use last QKV classifier, a waste
item_embs = self.node_emb(item_indices) # .squeeze(1) # (I, C)
logits = final_feat.matmul(item_embs.transpose(1, 2))
return logits.squeeze(1) # preds # (U, I)
def calc_loss(self, optimizer, batch_data):
(u, seq, seq_ts, pos, neg, block) = batch_data
pos_logits, neg_logits = self.forward(*batch_data)
pos_labels, neg_labels = torch.ones(pos_logits.shape, device=self.device), torch.zeros(
neg_logits.shape, device=self.device)
optimizer.zero_grad()
indices = pos != 0
loss = self.criterion(pos_logits[indices], pos_labels[indices])
loss += self.criterion(neg_logits[indices], neg_labels[indices])
for param in self.node_emb.parameters():
loss += self.args.l2_emb * torch.norm(param)
loss.backward()
optimizer.step()
return loss
def calc_kg_loss(self, optimizer, batch_data):
src_blocks, dst_blocks, src_fake_blocks = batch_data
src_embed = self.kg_aggregator(src_blocks)
pos_embed = self.kg_aggregator(dst_blocks)
neg_embed = self.kg_aggregator(src_fake_blocks)
pos_score = self.affinity_score(src_embed, pos_embed).squeeze(dim=-1)
neg_score = self.affinity_score(src_embed, neg_embed).squeeze(dim=-1)
size = len(src_blocks[0][0])
with torch.no_grad():
pos_label = torch.ones(size, dtype=torch.float, device=self.device)
neg_label = torch.zeros(size, dtype=torch.float, device=self.device)
optimizer.zero_grad()
loss = self.criterion(pos_score, pos_label)
loss += self.criterion(neg_score, neg_label)
loss.backward()
optimizer.step()
return loss
def reset_parameters(self):
for name, param in self.named_parameters():
try:
torch.nn.init.xavier_uniform_(param.data)
except:
pass # just ignore those failed init layers
def fit(self, loader_train, loader_val, loader_kg, optimizer):
self.reset_parameters()
earlyStopper = EarlyStopping(self.stopping_steps, self.verbose)
self.train().to(device=self.device)
logging.info(self)
# Train CF
best_epoch = -1
n_kg_batch = int(len(loader_kg.dataset) / self.kg_batch_size)
n_batch = int(len(loader_train.dataset) / self.batch_size)
epoch_start_idx = 0
for epoch in range(epoch_start_idx, self.num_epochs + 1):
# if epoch % 5 == 0:
# time1 = time()
# total_loss = 0
# time2 = time()
# for step, batch in enumerate(loader_kg):
# loss = self.calc_kg_loss(optimizer, batch)
# total_loss += loss.item()
# if self.verbose and step % self.print_every == 0 and step != 0:
# logging.info(
# 'KG Training: Epoch {:04d} Iter {:04d} / {:04d} | Time {:.1f}s | Iter Loss {:.4f} | Iter Mean '
# 'Loss {:.4f}'.format(
# epoch, step, n_kg_batch, time() - time2, loss.item(), total_loss / step))
# time2 = time()
# logging.info(
# 'Training: Epoch {:04d} Total Iter {:04d} | Total Time {:.1f}s | Iter Mean Loss {:.4f}'.format(epoch,
# n_kg_batch,
# time() - time1,
# total_loss / n_kg_batch))
time1 = time()
total_loss = 0
time2 = time()
for step, batch_data in enumerate(loader_train):
loss = self.calc_loss(optimizer, batch_data)
total_loss += loss.item()
if self.verbose and step % self.print_every == 0 and step != 0:
logging.info(
'Training: Epoch {:04d} Iter {:04d} / {:04d} | Time {:.1f}s | Iter Loss {:.4f} | Iter Mean '
'Loss {:.4f}'.format(
epoch, step, n_batch, time() - time2, loss.item(), total_loss / step))
time2 = time()
logging.info(
'Training: Epoch {:04d} Total Iter {:04d} | Total Time {:.1f}s | Iter Mean Loss {:.4f}'.format(epoch,
n_batch,
time() - time1,
total_loss / n_batch))
if epoch % self.evaluate_every == 0:
time1 = time()
self.eval()
ndcg, recall = self.evaluate(loader_val)
logging.info(
'Evaluation: Epoch {:04d} | Total Time {:.1f}s | Recall {:.4f} NDCG {'':.4f}'.format(
epoch, time() - time1, recall, ndcg))
earlyStopper(recall, self, self.save_dir, epoch, best_epoch)
if earlyStopper.early_stop:
break
self.train()
adjust_learning_rate(optimizer, epoch, self.lr)
| {"/dao/tgat_data_loader_dgl.py": ["/utility/dao_helper.py"], "/model/SASRec.py": ["/model/BaseModel.py", "/utility/components.py"], "/train/train_TGAT.py": ["/train/parse_args.py", "/dao/load_test_data.py", "/dao/tgat_data_loader_dgl.py", "/model/TGAT.py"], "/model/BaseModel.py": ["/utility/metrics.py"], "/model/SASGFRec.py": ["/model/BaseModel.py", "/utility/components.py"], "/train/train_SASRec.py": ["/train/parse_args.py", "/utility/metrics.py", "/utility/dao_helper.py", "/model/SASRec.py", "/dao/SASRec_dataloader.py", "/dao/load_test_data.py"], "/dao/SASRec_dataloader.py": ["/utility/dao_helper.py"], "/dao/SeqGFRec_dataloader.py": ["/utility/dao_helper.py"]} |
62,082 | katrina-m/RecModels_Pytorch | refs/heads/master | /train/parse_args.py | import argparse
import torch
def common_args(parser):
parser.add_argument('--seed', type=int, default=123, help='Random seed.')
parser.add_argument('--corpus_name', nargs='?', default='ml-1m', help='Choose a dataset from {ml_1m}')
parser.add_argument('--use_pretrain', type=int, default=0,
help='0: No pretrain, 1: Pretrain with the learned embeddings, 2: Pretrain with stored model.')
parser.add_argument('--pretrain_model_path', nargs='?', default='../trained_model/model.pth',
help='Path of stored model.')
parser.add_argument('--lr', type=float, default=0.001, help='Learning rate.')
parser.add_argument('--num_epochs', type=int, default=1000, help='Number of epoch.')
parser.add_argument('--stopping_steps', type=int, default=20, help='Number of epoch for early stopping')
parser.add_argument('--print_every', type=int, default=10, help='Iter interval of printing CF loss.')
parser.add_argument('--evaluate_every', type=int, default=1, help='Epoch interval of evaluating CF.')
parser.add_argument('--K', type=int, default=10, help='Calculate metric@K when evaluating.')
parser.add_argument('--device', default=torch.device("cuda" if torch.cuda.is_available() else "cpu"), type=str)
parser.add_argument('--verbose', type=bool, default=True, help='Verbose.')
return parser
def parse_tgat_args():
parser = argparse.ArgumentParser('Interface for TGAT experiments on link predictions')
parser.add_argument('--data_name', type=str, help='data sources to use, try wikipedia or reddit', default='ml-1m')
parser.add_argument('--batch_size', type=int, default=1024, help='batch_size')
parser.add_argument('--valid_batch_size', type=int, default=1024, help='valid_batch_size')
parser.add_argument('--prefix', type=str, default='', help='prefix to name the checkpoints')
parser.add_argument('--num_degree', type=int, default=20, help='number of neighbors to sample')
parser.add_argument('--num_heads', type=int, default=1, help='number of heads used in attention layer')
parser.add_argument('--num_epochs', type=int, default=50, help='number of epochs')
parser.add_argument('--num_layers', type=int, default=2, help='number of network layers')
parser.add_argument('--num_neighbors', type=int, default=20, help='number of neighbors')
parser.add_argument('--lr', type=float, default=0.0001, help='learning rate')
parser.add_argument('--drop_out', type=float, default=0.1, help='dropout probability')
parser.add_argument('--gpu', type=int, default=0, help='idx for the gpu to use')
parser.add_argument('--node_dim', type=int, default=100, help='Dimentions of the node embedding')
parser.add_argument('--time_dim', type=int, default=100, help='Dimentions of the time embedding')
parser.add_argument('--agg_method', type=str, choices=['attn', 'lstm', 'mean'], help='local aggregation method', default='attn')
parser.add_argument('--attn_mode', type=str, choices=['prod', 'map'], default='prod', help='use dot product attention or mapping based')
parser.add_argument('--use_time', type=str, choices=['time', 'pos', 'empty'], help='how to use time information', default='time')
parser.add_argument('--uniform', action='store_true', help='take uniform sampling from temporal neighbors')
parser.add_argument('--device', default=torch.device("cuda" if torch.cuda.is_available() else "cpu"), type=str)
parser.add_argument('--verbose', default=1, type=int)
parser.add_argument('--print_every', default=50, type=int)
args = parser.parse_args()
save_dir = '../trained_model/TGAT/{}/'.format(
args.data_name)
args.save_dir = save_dir
return args
def parse_SASGFRec_args(args_dict=None):
parser = argparse.ArgumentParser(description="Run SASGFRec.")
parser.add_argument('--batch_size', default=64, type=int, help='Batch size')
parser.add_argument('--kg_batch_size', default=512, type=int, help='Batch size')
parser.add_argument('--valid_batch_size', default=64, type=int, help='Valid batch size')
parser.add_argument('--maxlen', default=50, type=int, help='Max sequence lengths')
parser.add_argument('--graph_maxlen', default=20, type=int, help='Max sequence lengths for graph seeds')
parser.add_argument('--hidden_units', default=50, type=int)
parser.add_argument('--num_blocks', default=2, type=int)
parser.add_argument('--num_heads', default=1, type=int)
parser.add_argument('--dropout_rate', default=0.5, type=float, help="Dropout rate.")
parser.add_argument('--l2_emb', default=0.0, type=float)
parser.add_argument('--fan_outs', type=list, default=[15, 15], help='Fan outs')
parser.add_argument('--num_neighbors', type=int, default=20, help='number of neighbors')
parser.add_argument('--use_time', type=str, default="pos", choices=['time', 'pos', 'empty'], help='number of neighbors')
parser = common_args(parser)
args = parser.parse_args()
if args_dict is not None:
for key, value in args_dict.items():
setattr(args, key, value)
save_dir = '../trained_model/SASGFRec/{}/hiddendim{}_blocks{}_heads{}_lr{}/'.format(args.corpus_name, args.hidden_units, \
args.num_blocks, args.num_heads, args.lr)
args.save_dir = save_dir
return args
def parse_SASRec_args(args_dict=None):
parser = argparse.ArgumentParser(description="Run SASRec.")
parser.add_argument('--batch_size', default=128, type=int, help='Batch size')
parser.add_argument('--valid_batch_size', default=300, type=int, help='Valid batch size')
parser.add_argument('--maxlen', default=50, type=int, help='Max sequence lengths')
parser.add_argument('--hidden_units', default=50, type=int)
parser.add_argument('--num_blocks', default=2, type=int)
parser.add_argument('--num_heads', default=1, type=int)
parser.add_argument('--dropout_rate', default=0.5, type=float, help="Dropout rate.")
parser.add_argument('--l2_emb', default=0.0, type=float)
parser = common_args(parser)
args = parser.parse_args()
if args_dict is not None:
for key, value in args_dict.items():
setattr(args, key, value)
save_dir = '../trained_model/SASRec/{}/hiddendim{}_blocks{}_heads{}_lr{}/'.format(args.corpus_name, args.hidden_units, \
args.num_blocks, args.num_heads, args.lr)
args.save_dir = save_dir
return args
| {"/dao/tgat_data_loader_dgl.py": ["/utility/dao_helper.py"], "/model/SASRec.py": ["/model/BaseModel.py", "/utility/components.py"], "/train/train_TGAT.py": ["/train/parse_args.py", "/dao/load_test_data.py", "/dao/tgat_data_loader_dgl.py", "/model/TGAT.py"], "/model/BaseModel.py": ["/utility/metrics.py"], "/model/SASGFRec.py": ["/model/BaseModel.py", "/utility/components.py"], "/train/train_SASRec.py": ["/train/parse_args.py", "/utility/metrics.py", "/utility/dao_helper.py", "/model/SASRec.py", "/dao/SASRec_dataloader.py", "/dao/load_test_data.py"], "/dao/SASRec_dataloader.py": ["/utility/dao_helper.py"], "/dao/SeqGFRec_dataloader.py": ["/utility/dao_helper.py"]} |
62,083 | katrina-m/RecModels_Pytorch | refs/heads/master | /utility/dao_helper.py | import dgl
import numpy as np
import torch
class RandEdgeSampler(object):
def __init__(self, src_list, dst_list):
self.src_list = np.unique(src_list)
self.dst_list = np.unique(dst_list)
def sample(self, size):
src_index = np.random.randint(0, len(self.src_list), size)
dst_index = np.random.randint(0, len(self.dst_list), size)
return self.src_list[src_index], self.dst_list[dst_index]
class NeighborFinder:
def __init__(self, kg_data, uniform=False, bidirectional=True):
"""
Params
------
node_idx_list: List[int], contains the list of node index.
node_ts_list: List[int], contain the list of timestamp for the nodes in node_idx_list.
off_set_list: List[int], such that node_idx_list[off_set_list[i]:off_set_list[i + 1]] = adjacent_list[i]. \
Using this can help us quickly find the adjacent node indexes.
"""
self.bidirectional = bidirectional
adj_list = self.init_data(kg_data)
node_idx_l, node_ts_l, s_type_l, d_type_l, edge_type_l, off_set_l = self.init_off_set(adj_list)
self.node_idx_list = node_idx_l
self.node_ts_list = node_ts_l
self.edge_type_list = edge_type_l
self.src_type_list = s_type_l
self.dst_type_list = d_type_l
self.off_set_list = off_set_l
self.uniform = uniform
def init_data(self, kg_data):
src_idx_list = kg_data.h
dst_idx_list = kg_data.t
e_type_list = kg_data.r
h_type_list = kg_data.r
t_type_list = kg_data.r
ts_list = kg_data.timestamp.values
max_idx = max(max(src_idx_list), max(dst_idx_list))
# The graph is bi-directional
if self.bidirectional is True:
adj_list = [[] for _ in range(max_idx + 1)]
for src, dst, hType, tType, eType, ts in zip(src_idx_list, dst_idx_list, h_type_list, t_type_list, e_type_list, ts_list):
adj_list[src].append((dst, hType, tType, eType, ts))
adj_list[dst].append((src, tType, hType, eType, ts))
else:
adj_list = [[] for _ in range(max_idx + 1)]
for src, dst, hType, tType, eType, ts in zip(src_idx_list, dst_idx_list, h_type_list, t_type_list, e_type_list, ts_list):
adj_list[src].append((dst, hType, tType, eType, ts))
return adj_list
def init_off_set(self, adj_list):
"""
Params ------ Input: adj_list: List[List[(node_idx, edge_idx, node_ts)]], the inner list at each index is the
adjacent node info of the node with the given index.
Return:
n_idx_list: List[int], contain the node index.
n_ts_list: List[int], contain the timestamp of node index.
e_idx_list: List[int], contain the edge index.
off_set_list: List[int], such that node_idx_list[off_set_list[i]:off_set_list[i + 1]] = adjacent_list[i]. \
Using this can help us quickly find the adjacent node indexes.
"""
n_idx_list = []
n_ts_list = []
s_type_list = []
d_type_list = []
e_type_list = []
off_set_list = [0]
for i in range(len(adj_list)):
curr = adj_list[i]
curr = sorted(curr, key=lambda x: x[4])
n_idx_list.extend([x[0] for x in curr])
s_type_list.extend([x[1] for x in curr])
d_type_list.extend([x[2] for x in curr])
e_type_list.extend([x[3] for x in curr])
n_ts_list.extend([x[4] for x in curr])
off_set_list.append(len(n_idx_list))
n_idx_list = np.array(n_idx_list)
s_type_list = np.array(s_type_list)
d_type_list = np.array(d_type_list)
n_ts_list = np.array(n_ts_list)
e_type_list = np.array(e_type_list)
off_set_list = np.array(off_set_list)
assert(len(n_idx_list) == len(n_ts_list))
assert(off_set_list[-1] == len(n_ts_list))
return n_idx_list, n_ts_list, s_type_list, d_type_list, e_type_list, off_set_list
def find_before(self, src_idx, cut_time=None, sort_by_time=True):
"""
Find the neighbors for src_idx with edge time right before the cut_time.
Params
------
Input:
src_idx: int
cut_time: float
Return:
neighbors_idx: List[int]
neighbors_e_idx: List[int]
neighbors_ts: List[int]
"""
node_idx_list = self.node_idx_list
src_type_list = self.src_type_list
dst_type_list = self.dst_type_list
edge_type_list = self.edge_type_list
off_set_list = self.off_set_list
node_ts_list = self.node_ts_list
neighbors_ts = node_ts_list[off_set_list[src_idx]:off_set_list[src_idx + 1]]
neighbors_idx = node_idx_list[off_set_list[src_idx]:off_set_list[src_idx + 1]]
neighbors_e_type = edge_type_list[off_set_list[src_idx]:off_set_list[src_idx + 1]]
neighbors_src_type = src_type_list[off_set_list[src_idx]:off_set_list[src_idx + 1]]
neighbors_dst_type = dst_type_list[off_set_list[src_idx]:off_set_list[src_idx + 1]]
if sort_by_time is False:
return neighbors_idx, neighbors_src_type, neighbors_dst_type, neighbors_e_type, neighbors_ts
# If no neighbor find, returns the empty list.
if len(neighbors_idx) == 0 or len(neighbors_ts) == 0:
return neighbors_idx, neighbors_src_type, neighbors_dst_type, neighbors_e_type, neighbors_ts
# Find the neighbors which has timestamp < cut_time.
left = 0
right = len(neighbors_idx) - 1
while left + 1 < right:
mid = (left + right) // 2
curr_t = neighbors_ts[mid]
if curr_t < cut_time:
left = mid
else:
right = mid
if neighbors_ts[right] < cut_time:
return neighbors_idx[:right], neighbors_src_type[:right], neighbors_dst_type[:right], neighbors_e_type[:right], neighbors_ts[:right]
else:
return neighbors_idx[:left], neighbors_src_type[:left], neighbors_dst_type[:left], neighbors_e_type[:left], neighbors_ts[:left]
def get_temporal_neighbor(self, src_idx_list, cut_time_list, num_neighbors=20, sort_by_time=True):
"""
Find the neighbor nodes before cut_time in batch.
Params
------
Input:
src_idx_list: List[int]
cut_time_list: List[float],
num_neighbors: int
Return:
out_ngh_node_batch: int32 matrix (len(src_idx_list), num_neighbors)
out_ngh_t_batch: int32 matrix (len(src_idx_list), num_neighbors)
out_ngh_eType_batch: int32 matrix (len(src_idx_list), num_neighbors)
out_ngh_sType_batch: int32 matrix (len(src_type_list), num_neighbors)
out_ngh_dType_batch: int32 matrix (len(dst_type_list), num_neighbors)
"""
#assert(len(src_idx_list) == len(cut_time_list))
out_ngh_node_batch = np.zeros((len(src_idx_list), num_neighbors)).astype(np.int32)
out_ngh_t_batch = np.zeros((len(src_idx_list), num_neighbors)).astype(np.float32)
out_ngh_eType_batch = np.zeros((len(src_idx_list), num_neighbors)).astype(np.int32)
out_ngh_sType_batch = np.zeros((len(src_idx_list), num_neighbors)).astype(np.int32)
out_ngh_dType_batch = np.zeros((len(src_idx_list), num_neighbors)).astype(np.int32)
for i, (src_idx, cut_time) in enumerate(zip(src_idx_list, cut_time_list)):
ngh_idx, ngh_sType, ngh_dType, ngh_eType, ngh_ts = self.find_before(src_idx, cut_time, sort_by_time)
ngh_ts[ngh_ts == 0] = cut_time
if len(ngh_idx) > 0:
if self.uniform:
sampled_idx = np.random.randint(0, len(ngh_idx), num_neighbors)
out_ngh_node_batch[i, :] = ngh_idx[sampled_idx]
out_ngh_t_batch[i, :] = ngh_ts[sampled_idx]
out_ngh_sType_batch[i, :] = ngh_sType[sampled_idx]
out_ngh_dType_batch[i, :] = ngh_dType[sampled_idx]
out_ngh_eType_batch[i, :] = ngh_eType[sampled_idx]
# resort based on time
pos = out_ngh_t_batch[i, :].argsort()
out_ngh_node_batch[i, :] = out_ngh_node_batch[i, :][pos]
out_ngh_sType_batch = out_ngh_sType_batch[i, :][pos]
out_ngh_dType_batch = out_ngh_dType_batch[i, :][pos]
out_ngh_t_batch[i, :] = out_ngh_t_batch[i, :][pos]
out_ngh_eType_batch[i, :] = out_ngh_eType_batch[i, :][pos]
else:
ngh_ts = ngh_ts[:num_neighbors]
ngh_idx = ngh_idx[:num_neighbors]
ngh_eType = ngh_eType[:num_neighbors]
ngh_sType = ngh_sType[:num_neighbors]
ngh_dType = ngh_dType[:num_neighbors]
assert(len(ngh_idx) <= num_neighbors)
assert(len(ngh_ts) <= num_neighbors)
assert(len(ngh_eType) <= num_neighbors)
assert(len(ngh_sType) <= num_neighbors)
assert(len(ngh_dType) <= num_neighbors)
out_ngh_node_batch[i, num_neighbors - len(ngh_idx):] = ngh_idx
out_ngh_sType_batch[i, num_neighbors - len(ngh_sType):] = ngh_sType
out_ngh_dType_batch[i, num_neighbors - len(ngh_dType):] = ngh_dType
out_ngh_t_batch[i, num_neighbors - len(ngh_ts):] = ngh_ts
out_ngh_eType_batch[i, num_neighbors - len(ngh_eType):] = ngh_eType
return out_ngh_node_batch, out_ngh_sType_batch, out_ngh_dType_batch, out_ngh_eType_batch, out_ngh_t_batch
def find_k_hop_temporal(self, src_idx_l, cut_time_l=None, fan_outs=[15], sort_by_time=True):
"""Sampling the k-hop sub graph before the cut_time
"""
x, s, d, y, z = self.get_temporal_neighbor(src_idx_l, cut_time_l, fan_outs[0], sort_by_time=sort_by_time)
node_records = [x]
sType_records = [s]
dType_records = [d]
eType_records = [y]
t_records = [z]
for i in range(1, len(fan_outs)):
ngn_node_est, ngh_t_est = node_records[-1], t_records[-1] # [N, *([num_neighbors] * (k - 1))]
orig_shape = ngn_node_est.shape
ngn_node_est = ngn_node_est.flatten()
ngn_t_est = ngh_t_est.flatten()
out_ngh_node_batch, out_ngh_sType_batch, out_ngh_dType_batch, out_ngh_eType_batch, out_ngh_t_batch = self.get_temporal_neighbor(ngn_node_est, ngn_t_est, fan_outs[i])
out_ngh_node_batch = out_ngh_node_batch.reshape(*orig_shape, fan_outs[i]) # [N, *([num_neighbors] * k)]
out_ngh_sType_batch = out_ngh_sType_batch.reshape(*orig_shape, fan_outs[i]) # [N, *([num_neighbors] * k)]
out_ngh_dType_batch = out_ngh_dType_batch.reshape(*orig_shape, fan_outs[i]) # [N, *([num_neighbors] * k)]
out_ngh_eType_batch = out_ngh_eType_batch.reshape(*orig_shape, fan_outs[i])
out_ngh_t_batch = out_ngh_t_batch.reshape(*orig_shape, fan_outs[i])
node_records.append(out_ngh_node_batch)
sType_records.append(out_ngh_sType_batch)
dType_records.append(out_ngh_dType_batch)
eType_records.append(out_ngh_eType_batch)
t_records.append(out_ngh_t_batch)
return node_records, sType_records, dType_records, eType_records, t_records
class Graph(object):
def __init__(self, kg_df, num_nodes, device="cpu"):
self.num_relations = None
self.device = device
self.num_nodes = num_nodes
self.g = self.construct_graph(kg_df)
pass
def construct_graph(self, kg_df):
g = dgl.DGLGraph()
g.add_nodes(self.num_nodes)
g.add_edges(kg_df['t'].astype(np.int32), kg_df['h'].astype(np.int32))
#g.edata["timestamp"] = torch.LongTensor(kg_df["timestamp"])#.to(self.device)
g.edata["type"] = torch.LongTensor(kg_df["r"])#.to(self.device)
self.num_nodes = g.num_nodes()
self.num_relations = kg_df.r.nunique()
return g
def sample_blocks(self, seeds, fan_outs):
seeds = torch.LongTensor(np.asarray(seeds))
blocks = []
for fan_out in fan_outs:
frontier = dgl.sampling.sample_neighbors(self.g, seeds, fan_out, replace=True)
block = dgl.to_block(frontier, seeds)
seeds = block.srcdata[dgl.NID]
blocks.insert(0, block)
return [block.to(self.device) for block in blocks]
def sample_neg_items_for_u(pos_items, start_item_id, end_item_id, n_sample_neg_items, sequential=False):
"""!
Sample the negative items for a user, if sequential is true, the items are sampled only with respect to one positive item.
"""
sample_neg_items = []
if sequential is True:
for pos_item in pos_items:
for _ in range(n_sample_neg_items):
while True:
neg_item_id = np.random.randint(low=start_item_id, high=end_item_id, size=1)[0]
if neg_item_id != pos_item and neg_item_id not in sample_neg_items:
sample_neg_items.append(neg_item_id)
break
else:
while True:
if len(sample_neg_items) == n_sample_neg_items:
break
else:
neg_item_id = np.random.randint(low=start_item_id, high=end_item_id, size=1)[0]
if neg_item_id not in pos_items and neg_item_id not in sample_neg_items:
sample_neg_items.append(neg_item_id)
return np.array(sample_neg_items)
def sequence_data_partition(df, with_time=False, discretize_time=False):
"""
partition the data into train/val/test for sequence modeling.
:param df:
:return:
"""
# Hard-coded, filtered the invalid items and users.
user_count = df.groupby("userId")[['itemId']].nunique()
item_count = df.groupby("itemId")[['userId']].nunique()
valid_user_count = user_count.query("itemId>=5").reset_index()
valid_item_count = item_count.query("userId>=5").reset_index()
df = df.merge(valid_user_count[["userId"]], on="userId", how="right")
df = df.merge(valid_item_count[["itemId"]], on="itemId", how="right")
def norm_time(time_vectors):
time_vectors = np.array(time_vectors)
time_min = time_vectors.min()
time_diff = np.diff(time_vectors)
if len(time_diff) == 1:
time_scale = 1
else:
time_scale = time_diff.min()
time_vectors = int(np.round((time_vectors - time_min)/time_scale) + 1)
return time_vectors
if discretize_time:
user_dict = generate_user_dict(df, sort=True, with_time=with_time, norm_func = norm_time)
else:
user_dict = generate_user_dict(df, sort=True, with_time=with_time, norm_func = None)
user_train = {}
user_valid = {}
user_test = {}
for user, item_infos in user_dict.items():
nfeedback = len(item_infos)
if nfeedback < 3:
user_train[user] = item_infos
else:
user_train[user] = item_infos[:-2]
user_valid[user] = []
user_valid[user].append(item_infos[-2])
user_test[user] = []
user_test[user].append(item_infos[-1])
print('Preparing done...')
return [user_train, user_valid, user_test]
def generate_user_dict(df, sort=True, with_time=False, norm_func=None):
"""
Generate the user dict: {userId: [item list]}, or {userId: ([item list], [timestamp list])}
:param with_time:
:param df:
:param sort:
:return:
"""
# def offset_timestamp(sub):
# timestamps = sub.sort_values("timestamp")["timestamp"].values
# time_scale = min(np.diff(timestamps))
# time_scale = time_scale if time_scale > 0 else 1
if with_time is True:
if sort is True:
tmp = df.groupby("userId").apply(lambda sub: sub.sort_values("timestamp")["itemId"].tolist()).reset_index().rename(columns={0:"itemId"})
tmp_time = df.groupby("userId").apply(lambda sub: sub.sort_values("timestamp")["timestamp"].tolist()).reset_index().rename(columns={0:"timestamp"})
tmp = tmp.merge(tmp_time, on="userId")
else:
tmp = df.groupby("userId").apply(lambda sub: sub["itemId"].tolist()).reset_index().rename(columns={0:"itemId"})
tmp_time = df.groupby("userId").apply(lambda sub: sub["timestamp"].tolist()).reset_index().rename(columns={0:"timestamp"})
tmp = tmp.merge(tmp_time, on="userId")
userInfo = []
if norm_func is not None:
tmp["timestamp"] = tmp.timestamp.apply(lambda time_vectors: norm_func(time_vectors))
for itemIds, timestamps in zip(tmp.itemId.values, tmp.timestamp.values):
userInfo.append(list(zip(itemIds, timestamps)))
return dict(zip(tmp.userId.values, userInfo))
else:
if sort is True:
tmp = df.groupby("userId").apply(lambda sub: sub.sort_values("timestamp")["itemId"].tolist()).reset_index().rename(columns={0:"itemId"})
else:
tmp = df.groupby("userId").apply(lambda sub: sub["itemId"].tolist()).reset_index().rename(columns={0:"itemId"})
return dict(zip(tmp.userId.values, tmp.itemId.values))
def computeRePos(time_seq, time_span):
size = time_seq.shape[0]
time_matrix = np.zeros([size, size], dtype=np.int32)
for i in range(size):
for j in range(size):
span = abs(time_seq[i]-time_seq[j])
if span > time_span:
time_matrix[i][j] = time_span
else:
time_matrix[i][j] = span
return time_matrix | {"/dao/tgat_data_loader_dgl.py": ["/utility/dao_helper.py"], "/model/SASRec.py": ["/model/BaseModel.py", "/utility/components.py"], "/train/train_TGAT.py": ["/train/parse_args.py", "/dao/load_test_data.py", "/dao/tgat_data_loader_dgl.py", "/model/TGAT.py"], "/model/BaseModel.py": ["/utility/metrics.py"], "/model/SASGFRec.py": ["/model/BaseModel.py", "/utility/components.py"], "/train/train_SASRec.py": ["/train/parse_args.py", "/utility/metrics.py", "/utility/dao_helper.py", "/model/SASRec.py", "/dao/SASRec_dataloader.py", "/dao/load_test_data.py"], "/dao/SASRec_dataloader.py": ["/utility/dao_helper.py"], "/dao/SeqGFRec_dataloader.py": ["/utility/dao_helper.py"]} |
62,084 | katrina-m/RecModels_Pytorch | refs/heads/master | /train/train_SASRec.py | import random
from train.parse_args import parse_SASRec_args
from utility.log_helper import *
from utility.metrics import *
from utility.dao_helper import *
from model.SASRec import SASRec
from dao.SASRec_dataloader import FeatureGen
from dao.load_test_data import load_data
os.environ['CUDA_VISIBLE_DEVICES'] = '0'
os.environ['CUDA_LAUNCH_BLOCKING'] = "1"
def train(args):
random.seed(args.seed)
np.random.seed(args.seed)
torch.manual_seed(args.seed)
log_save_id = create_log_id(args.save_dir)
logging_config(folder=args.save_dir, name='log{:d}'.format(log_save_id), no_console=False)
logging.info(args)
# GPU / CPU
n_gpu = torch.cuda.device_count()
if n_gpu > 0:
torch.cuda.manual_seed_all(args.seed)
df = load_data(args.corpus_name)
featureGen = FeatureGen(df, input_max_length=args.maxlen, device=args.device)
loader_train, loader_val = featureGen.prepare_loader(df, batch_size=args.batch_size, valid_batch_size=args.valid_batch_size)
model = SASRec(featureGen.num_users, featureGen.num_items, args)
adam_optimizer = torch.optim.Adam(model.parameters(), lr=args.lr, betas=(0.9, 0.98))
model.fit(loader_train, loader_val, adam_optimizer)
if __name__ == '__main__':
args = parse_SASRec_args()
train(args)
| {"/dao/tgat_data_loader_dgl.py": ["/utility/dao_helper.py"], "/model/SASRec.py": ["/model/BaseModel.py", "/utility/components.py"], "/train/train_TGAT.py": ["/train/parse_args.py", "/dao/load_test_data.py", "/dao/tgat_data_loader_dgl.py", "/model/TGAT.py"], "/model/BaseModel.py": ["/utility/metrics.py"], "/model/SASGFRec.py": ["/model/BaseModel.py", "/utility/components.py"], "/train/train_SASRec.py": ["/train/parse_args.py", "/utility/metrics.py", "/utility/dao_helper.py", "/model/SASRec.py", "/dao/SASRec_dataloader.py", "/dao/load_test_data.py"], "/dao/SASRec_dataloader.py": ["/utility/dao_helper.py"], "/dao/SeqGFRec_dataloader.py": ["/utility/dao_helper.py"]} |
62,085 | katrina-m/RecModels_Pytorch | refs/heads/master | /dao/SASRec_dataloader.py | from torch.utils.data import Dataset
from utility.dao_helper import *
from torch.utils.data.dataloader import DataLoader
import torch
class FeatureGen(object):
def __init__(self, df, input_max_length, device="cpu"):
self.user_id_map = None
self.item_id_map = None
self.num_users = None
self.num_items = None
self.device = device
self.input_max_length = input_max_length
user_ids = list(df.userId.unique())
self.user_id_map = dict(zip(user_ids, range(0, len(user_ids))))
self.num_users = df.userId.nunique()
item_ids = list(df.itemId.unique())
self.item_id_map = dict(zip(item_ids, range(1, len(item_ids)+1)))
self.num_items = df.itemId.nunique()
#data = self.format_data(df)
#elf.user_dict = generate_user_dict(df, sort=True)
def prepare_loader(self, data, batch_size, valid_batch_size):
data = self.format_data(data)
user_train, user_valid, user_test = sequence_data_partition(data)
train_data = SASRecDataset(train_user_dict=user_train, valid_user_dict=user_valid, test_user_dict=user_test, num_items=self.num_items, input_max_length=self.input_max_length, mode="train", device=self.device)
valid_data = SASRecDataset(train_user_dict=user_train, valid_user_dict=user_valid, test_user_dict=user_test, num_items=self.num_items, input_max_length=self.input_max_length, mode="valid", device=self.device)
test_data = SASRecDataset(train_user_dict=user_train, valid_user_dict=user_valid, test_user_dict=user_test, num_items=self.num_items, input_max_length=self.input_max_length, mode="test", device=self.device)
loader_train = DataLoader(train_data, batch_size=batch_size, collate_fn=train_data.collate_fn)
loader_val = DataLoader(valid_data, batch_size=valid_batch_size, collate_fn=valid_data.collate_fn)
return loader_train, loader_val
def format_data(self, data):
tmp_data = data.copy()
tmp_data.loc[:, "userId"] = [self.user_id_map[u] for u in tmp_data.userId]
tmp_data.loc[:, "itemId"] = [self.item_id_map[u] for u in tmp_data.itemId]
return tmp_data
def generate_feature(self, userId, itemIds):
return torch.LongTensor(np.array([userId])).to(self.device),\
torch.LongTensor(np.array(self.user_dict[userId][-self.input_max_length:])).unsqueeze(1).to(self.device), \
torch.LongTensor(np.array(itemIds)).unsqueeze(0).to(self.device)
def format_data_single(self, userId, itemIds):
if userId not in self.user_id_map:
return None, None
return self.user_id_map[userId], [self.item_id_map[itemId] for itemId in itemIds]
class SASRecDataset(Dataset):
"""
SASRec dataset class in order to use Pytorch DataLoader
"""
def __init__(self, train_user_dict, num_items, valid_user_dict=None, test_user_dict=None, input_max_length=200, mode="train", device="cpu"):
super().__init__()
self.mode = mode
self.device = device
self.num_items = num_items
self.input_max_length = input_max_length
self.train_user_dict = train_user_dict
self.test_user_dict = test_user_dict
self.valid_user_dict = valid_user_dict
self.train_data = list(self.train_user_dict.items())
self.test_data = list(self.test_user_dict.items())
self.valid_data = list(self.valid_user_dict.items())
if mode == "valid":
assert valid_user_dict is not None
elif mode == "test":
assert valid_user_dict, test_user_dict is not None
def collate_fn(self, batch):
if self.mode == "train":
user, seq, pos, neg = zip(
*batch)
return torch.LongTensor(user).to(self.device), torch.LongTensor(seq).to(self.device), \
torch.LongTensor(pos).to(self.device), torch.LongTensor(neg).to(self.device)
else:
user, seq, valid_item_idx = zip(*batch)
return torch.LongTensor(user).to(self.device), torch.LongTensor(seq).to(self.device), \
torch.LongTensor(valid_item_idx).to(self.device)
def __getitem__(self, index):
if self.mode == "train":
user, item_list = self.train_data[index]
seq = np.zeros([self.input_max_length], dtype=np.long)
pos = np.zeros([self.input_max_length], dtype=np.long)
neg = np.zeros([self.input_max_length], dtype=np.long)
nxt = item_list[-1]
idx = self.input_max_length - 1
ts = set(item_list)
for i in reversed(item_list[:-1]):
seq[idx] = i
pos[idx] = nxt
if nxt != 0:
neg[idx] = sample_neg_items_for_u(ts, n_sample_neg_items=1, start_item_id=1, end_item_id=self.num_items, sequential=False)
nxt = i
idx -= 1
if idx == -1:
break
return user, seq, pos, neg
elif self.mode == "valid":
seq = np.zeros([self.input_max_length], dtype=np.long)
idx = self.input_max_length - 1
user, target_item = self.valid_data[index]
for i in reversed(self.train_user_dict[user]):
seq[idx] = i
idx -= 1
if idx == -1: break
rated = set(self.train_user_dict[user])
rated.add(target_item[0])
valid_item_idx = [target_item[0]]
for _ in range(100):
t = sample_neg_items_for_u(rated, n_sample_neg_items=1, start_item_id=1, end_item_id=self.num_items, sequential=False)[0]
valid_item_idx.append(t)
return user, seq, valid_item_idx
elif self.mode == "test":
seq = np.zeros([self.input_max_length], dtype=np.long)
idx = self.input_max_length - 1
user, target_item = self.test_data[index]
valid_user_info = self.valid_user_dict[user]
seq[idx] = valid_user_info[0]
idx -= 1
for i in reversed(self.train_user_dict[user]):
seq[idx] = i
idx -= 1
if idx == -1: break
rated = set(self.train_user_dict[user])
rated.add(target_item[0])
rated.add(valid_user_info[0])
test_item_idx = [target_item[0]]
for _ in range(100):
t = sample_neg_items_for_u(rated, n_sample_neg_items=1, start_item_id=1, end_item_id=self.num_items, sequential=False)[0]
test_item_idx.append(t)
return user, seq, test_item_idx
def __len__(self):
if self.mode == "train":
return len(self.train_data)
elif self.mode == "valid":
return len(self.valid_data)
elif self.mode == 'test':
return len(self.test_data) | {"/dao/tgat_data_loader_dgl.py": ["/utility/dao_helper.py"], "/model/SASRec.py": ["/model/BaseModel.py", "/utility/components.py"], "/train/train_TGAT.py": ["/train/parse_args.py", "/dao/load_test_data.py", "/dao/tgat_data_loader_dgl.py", "/model/TGAT.py"], "/model/BaseModel.py": ["/utility/metrics.py"], "/model/SASGFRec.py": ["/model/BaseModel.py", "/utility/components.py"], "/train/train_SASRec.py": ["/train/parse_args.py", "/utility/metrics.py", "/utility/dao_helper.py", "/model/SASRec.py", "/dao/SASRec_dataloader.py", "/dao/load_test_data.py"], "/dao/SASRec_dataloader.py": ["/utility/dao_helper.py"], "/dao/SeqGFRec_dataloader.py": ["/utility/dao_helper.py"]} |
62,086 | katrina-m/RecModels_Pytorch | refs/heads/master | /dao/SeqGFRec_dataloader.py | from torch.utils.data import Dataset
from torch.utils.data.dataloader import DataLoader
import torch
from utility.dao_helper import NeighborFinder, sequence_data_partition, sample_neg_items_for_u, RandEdgeSampler
from sklearn.preprocessing import LabelEncoder
import numpy as np
import pandas as pd
class FeatureGen(object):
def __init__(self, df, kg_df, input_max_length, fan_outs, device="cpu"):
self.user_id_map = None
self.item_id_map = None
self.num_users = None
self.num_items = None
self.fan_outs = fan_outs
self.device = device
self.input_max_length = input_max_length
if kg_df is not None:
item_ids = set(df.itemId.unique())
node_ids = set(list(kg_df.h.unique())+list(kg_df.t.unique()))
rest_node_ids = node_ids - item_ids # kg already contains the itemIds, we want to put itemId at the begining
self.user_offset = max(max(node_ids), max(item_ids))
user_ids = list(df.userId.unique()) + self.user_offset # assuming user started from 1
node_ids = list(item_ids)+list(user_ids)+list(rest_node_ids)
self.node_id_map = dict(zip(node_ids, range(1, len(node_ids)+1)))
else:
item_ids = set(df.itemId.unique())
self.user_offset = max(item_ids)
user_ids = list(df.userId.unique()) + self.user_offset # assuming user started from 1
node_ids = list(item_ids)+list(user_ids)#+list(rest_node_ids)
self.node_id_map = dict(zip(node_ids, range(1, len(node_ids)+1)))
self.num_items = len(item_ids)
self.num_nodes = len(node_ids)
# used for inference
# formated_data, formated_kg_data = self.format_data(df, kg_df)
# self.user_dict = generate_user_dict(formated_data, sort=True)
def prepare_loader(self, data, kg_data, batch_size, valid_batch_size, kg_batch_size):
data, kg_data = self.format_data(data, kg_data)
graph = self.create_graph(data, kg_data)
user_train, user_valid, user_test = sequence_data_partition(data, with_time=True)
train_data = SASGFRecDataset(train_user_dict=user_train, valid_user_dict=user_valid, test_user_dict=user_test, g=graph, num_items=self.num_items, fan_outs=self.fan_outs, input_max_length=self.input_max_length, mode="train", device=self.device)
valid_data = SASGFRecDataset(train_user_dict=user_train, valid_user_dict=user_valid, test_user_dict=user_test, g=graph, num_items=self.num_items, fan_outs=self.fan_outs, input_max_length=self.input_max_length, mode="valid", device=self.device)
pre_train_graph = self.create_pre_train_graph(kg_data)
pre_train_data = GraphDataset(kg_data.h, kg_data.t, pre_train_graph, fan_outs=self.fan_outs, device=self.device)
loader_kg = DataLoader(pre_train_data, batch_size=kg_batch_size, collate_fn=pre_train_data.collate_fn)
loader_train = DataLoader(train_data, batch_size=batch_size, collate_fn=train_data.collate_fn)
loader_val = DataLoader(valid_data, batch_size=valid_batch_size, collate_fn=valid_data.collate_fn)
return loader_train, loader_val, loader_kg
def create_graph(self, cf_data, kg_data):
"""
Create the graph based on the item knowledge graph and user-item interaction data.
:param user_item_data:
:param item_kg_data:
:return:
"""
item_kg_data = kg_data.copy()
user_item_data = cf_data.copy()
if item_kg_data is not None:
item_kg_data["timestamp"] = np.zeros(len(item_kg_data))
#item_kg_data["r"] += 1
#cf_kg_data = user_item_data[["userId", "itemId", "timestamp"]].rename(columns={"userId":"h", "itemId":"t"})
#cf_kg_data["r"] = 0
#cf_kg_data["hType"] = 0
#cf_kg_data["tType"] = 1
#kg_data = pd.concat([cf_kg_data, item_kg_data])
kg_data = item_kg_data
self.num_relations = kg_data.r.nunique()
else:
kg_data = user_item_data[["userId", "itemId", "timestamp"]].rename(columns={"userId":"h", "itemId":"t"})
kg_data["r"] = 0
kg_data["hType"] = 0
kg_data["tType"] = 1
self.num_realtions = 1
graph = NeighborFinder(kg_data)
return graph
def create_pre_train_graph(self, kg_data):
item_kg_data = kg_data.copy()
item_kg_data["timestamp"] = np.zeros(len(item_kg_data))
item_kg_data["r"] += 1
item_kg_data["hType"] = item_kg_data.r
item_kg_data["tType"] = item_kg_data.r
graph = NeighborFinder(item_kg_data)
return graph
def format_data(self, df, kg_df):
tmp_data = df.copy()
tmp_data.loc[:, "userId"] = [self.node_id_map[u + self.user_offset] for u in tmp_data.userId]
tmp_data.loc[:, "itemId"] = [self.node_id_map[u] for u in tmp_data.itemId]
if kg_df is not None:
tmp_kg_df = kg_df.copy()
tmp_kg_df.loc[:, "h"] = [self.node_id_map[u] for u in tmp_kg_df.h]
tmp_kg_df.loc[:, "t"] = [self.node_id_map[u] for u in tmp_kg_df.t]
tmp_kg_df["r"] = LabelEncoder().fit_transform(tmp_kg_df.r)
else:
tmp_kg_df = None
return tmp_data, tmp_kg_df
def generate_feature(self, userId, itemIds):
pass
# return torch.LongTensor(np.array([userId])).to(self.device),\
# torch.LongTensor(np.array(self.user_dict[userId][-self.input_max_length:])).unsqueeze(1).to(self.device), \
# torch.LongTensor(np.array(itemIds)).unsqueeze(0).to(self.device)
def format_data_single(self, userId, itemIds):
pass
# if userId not in self.user_id_map:
# return None, None
# return self.user_id_map[userId], [self.item_id_map[itemId] for itemId in itemIds]
class SASGFRecDataset(Dataset):
"""
SASRec dataset class in order to use Pytorch DataLoader
"""
def __init__(self, train_user_dict, g, num_items, valid_user_dict=None, test_user_dict=None, fan_outs=[20], input_max_length=200, mode="train", device="cpu"):
super().__init__()
self.mode = mode
self.device = device
self.num_items = num_items
self.fan_outs = fan_outs
self.input_max_length = input_max_length
self.train_user_dict = train_user_dict
self.test_user_dict = test_user_dict
self.valid_user_dict = valid_user_dict
self.train_data = list(self.train_user_dict.items())
self.test_data = list(self.test_user_dict.items())
self.valid_data = list(self.valid_user_dict.items())
self.g = g
if mode == "valid":
assert valid_user_dict is not None
elif mode == "test":
assert valid_user_dict, test_user_dict is not None
def collate_fn(self, batch):
if self.mode == "train":
user, seq, seq_ts, pos, neg, blocks = zip(
*batch)
blocks = zip(*blocks)
block_tensors = []
seeds = torch.LongTensor(np.array(seq)[:, -self.graph_maxlen:]).to(self.device)
seeds_ts = torch.LongTensor(np.array(seq)[:, -self.graph_maxlen:]).to(self.device)
for i, block in enumerate(blocks):
ngh_batch, ngh_src_type, ngh_dst_type, ngh_edge_type, ngh_ts = zip(*block)
ngh_batch = torch.LongTensor(ngh_batch).to(self.device)
ngh_src_type = torch.LongTensor(ngh_src_type).to(self.device)
ngh_dst_type = torch.LongTensor(ngh_dst_type).to(self.device)
ngh_edge_type = torch.LongTensor(ngh_edge_type).to(self.device)
ngh_ts = torch.FloatTensor(ngh_ts).to(self.device)
block_tensors.append((ngh_batch.view(-1, self.fan_outs[i]), \
seeds.flatten(), ngh_src_type.view(-1, self.fan_outs[i]), \
ngh_dst_type.view(-1, self.fan_outs[i]), \
ngh_edge_type.view(-1, self.fan_outs[i]), \
ngh_ts.view(-1, self.fan_outs[i]), \
seeds_ts.flatten()))
seeds = ngh_batch.view(-1)
seeds_ts = ngh_ts.view(-1)
return torch.LongTensor(user).to(self.device), torch.LongTensor(seq).to(self.device), torch.LongTensor(seq_ts).to(self.device), \
torch.LongTensor(pos).to(self.device), torch.LongTensor(neg).to(self.device), block_tensors
else:
user, seq, seq_ts, valid_item_idx, blocks = zip(*batch)
blocks = zip(*blocks)
block_tensors = []
seeds = torch.LongTensor(np.array(seq)[:, -20:]).to(self.device)
seeds_ts = torch.LongTensor(np.array(seq)[:, -20:]).to(self.device)
for i, block in enumerate(blocks):
ngh_batch, ngh_src_type, ngh_dst_type, ngh_edge_type, ngh_ts = zip(*block)
ngh_batch = torch.LongTensor(ngh_batch).to(self.device)
ngh_src_type = torch.LongTensor(ngh_src_type).to(self.device)
ngh_dst_type = torch.LongTensor(ngh_dst_type).to(self.device)
ngh_edge_type = torch.LongTensor(ngh_edge_type).to(self.device)
ngh_ts = torch.FloatTensor(ngh_ts).to(self.device)
block_tensors.append((ngh_batch.view(-1, self.fan_outs[i]), \
seeds.flatten(), ngh_src_type.view(-1, self.fan_outs[i]), \
ngh_dst_type.view(-1, self.fan_outs[i]), \
ngh_edge_type.view(-1, self.fan_outs[i]), \
ngh_ts.view(-1, self.fan_outs[i]), \
seeds_ts.flatten()))
seeds = ngh_batch.view(-1)
seeds_ts = ngh_ts.view(-1)
return torch.LongTensor(user).to(self.device), torch.LongTensor(seq).to(self.device), torch.FloatTensor(seq_ts).to(self.device),\
torch.LongTensor(valid_item_idx).to(self.device), block_tensors
def __getitem__(self, index):
if self.mode == "train":
user, item_list = self.train_data[index]
seq = np.zeros([self.input_max_length], dtype=np.long)
seq_time = np.zeros([self.input_max_length], dtype=np.long)
pos = np.zeros([self.input_max_length], dtype=np.long)
neg = np.zeros([self.input_max_length], dtype=np.long)
nxt, nxt_time = item_list[-1]
idx = self.input_max_length - 1
ts = set([item for item, time in item_list])
for itemInfo in reversed(item_list[:-1]):
seq[idx] = itemInfo[0]
seq_time[idx] = itemInfo[1]
pos[idx] = nxt
if nxt != 0:
neg[idx] = sample_neg_items_for_u(ts, n_sample_neg_items=1, start_item_id=1, end_item_id=self.num_items, sequential=False)
nxt = itemInfo[0]
idx -= 1
if idx == -1:
break
blocks = self.g.find_k_hop_temporal(seq[-20:], seq_time[-20:], self.fan_outs)
blocks = list(zip(*blocks))
return user, seq, seq_time, pos, neg, blocks
elif self.mode == "valid":
seq = np.zeros([self.input_max_length], dtype=np.long)
seq_time = np.zeros([self.input_max_length], dtype=np.long)
idx = self.input_max_length - 1
user, target_item = self.valid_data[index]
for itemInfo in reversed(self.train_user_dict[user]):
seq[idx] = itemInfo[0]
seq_time[idx] = itemInfo[1]
idx -= 1
if idx == -1: break
rated = set([item for item, time in self.train_user_dict[user]])
rated.add(target_item[0][0])
valid_item_idx = [target_item[0][0]]
for _ in range(100):
t = sample_neg_items_for_u(rated, n_sample_neg_items=1, start_item_id=1, end_item_id=self.num_items, sequential=False)[0]
valid_item_idx.append(t)
blocks = self.g.find_k_hop_temporal(seq[-20:], seq_time[-20:], self.fan_outs)
blocks = list(zip(*blocks))
return user, seq, seq_time, valid_item_idx, blocks
elif self.mode == "test":
seq = np.zeros([self.input_max_length], dtype=np.long)
idx = self.input_max_length - 1
user, target_item = self.test_data[index]
valid_user_info = self.valid_user_dict[user]
seq[idx] = valid_user_info[0]
idx -= 1
for i in reversed(self.train_user_dict[user]):
seq[idx] = i
idx -= 1
if idx == -1: break
rated = set(self.train_user_dict[user])
rated.add(target_item[0])
rated.add(valid_user_info[0])
test_item_idx = [target_item[0]]
for _ in range(100):
t = sample_neg_items_for_u(rated, n_sample_neg_items=1, start_item_id=1, end_item_id=self.num_items, sequential=False)[0]
test_item_idx.append(t)
return user, seq, test_item_idx
def __len__(self):
if self.mode == "train":
return len(self.train_data)
elif self.mode == "valid":
return len(self.valid_data)
elif self.mode == 'test':
return len(self.test_data)
class GraphDataset(Dataset):
def __init__(self, src_idx_list, dst_idx_list, ngh_finder, fan_outs, device="cpu"):
super().__init__()
self.device = device
self.fan_outs = fan_outs
self.src_idx_list = src_idx_list
self.dst_idx_list = dst_idx_list
self.rand_sampler = RandEdgeSampler(src_idx_list, dst_idx_list)
self.g = ngh_finder
def __getitem__(self, index):
src_l_cut, dst_l_cut = self.src_idx_list[index], self.dst_idx_list[index]
return src_l_cut, dst_l_cut
def collate_fn(self, batch):
src_list, dst_list = zip(*batch)
src_list_fake, dst_list_fake = self.rand_sampler.sample(len(src_list))
src_blocks = self.g.find_k_hop_temporal(src_list, cut_time_l=np.zeros_like(src_list), fan_outs=self.fan_outs, sort_by_time=False)
dst_blocks = self.g.find_k_hop_temporal(dst_list, cut_time_l=np.zeros_like(src_list), fan_outs=self.fan_outs, sort_by_time=False)
src_fake_blocks = self.g.find_k_hop_temporal(src_list_fake, cut_time_l=np.zeros_like(src_list), fan_outs=self.fan_outs, sort_by_time=False)
return self.convert_block_to_gpu(src_blocks, src_list), self.convert_block_to_gpu(dst_blocks, dst_list), self.convert_block_to_gpu(src_fake_blocks, src_list_fake)
def convert_block_to_gpu(self, blocks, seeds):
blocks = zip(*blocks)
block_tensors = []
seeds = torch.LongTensor(seeds).to(self.device)
seeds_ts = torch.zeros_like(seeds).to(self.device)
for i, block in enumerate(blocks):
ngh_batch, ngh_src_type, ngh_dst_type, ngh_edge_type, ngh_ts = block
ngh_batch = torch.LongTensor(ngh_batch).to(self.device)
ngh_src_type = torch.LongTensor(ngh_src_type).to(self.device)
ngh_dst_type = torch.LongTensor(ngh_dst_type).to(self.device)
ngh_edge_type = torch.LongTensor(ngh_edge_type).to(self.device)
ngh_ts = torch.FloatTensor(ngh_ts).to(self.device)
block_tensors.append((ngh_batch.view(-1, self.fan_outs[i]), \
seeds.flatten(), ngh_src_type.view(-1, self.fan_outs[i]), \
ngh_dst_type.view(-1, self.fan_outs[i]), \
ngh_edge_type.view(-1, self.fan_outs[i]), \
ngh_ts.view(-1, self.fan_outs[i]), \
seeds_ts.flatten()))
seeds = ngh_batch.view(-1)
seeds_ts = ngh_ts.view(-1)
return block_tensors
def __len__(self):
return len(self.src_idx_list) | {"/dao/tgat_data_loader_dgl.py": ["/utility/dao_helper.py"], "/model/SASRec.py": ["/model/BaseModel.py", "/utility/components.py"], "/train/train_TGAT.py": ["/train/parse_args.py", "/dao/load_test_data.py", "/dao/tgat_data_loader_dgl.py", "/model/TGAT.py"], "/model/BaseModel.py": ["/utility/metrics.py"], "/model/SASGFRec.py": ["/model/BaseModel.py", "/utility/components.py"], "/train/train_SASRec.py": ["/train/parse_args.py", "/utility/metrics.py", "/utility/dao_helper.py", "/model/SASRec.py", "/dao/SASRec_dataloader.py", "/dao/load_test_data.py"], "/dao/SASRec_dataloader.py": ["/utility/dao_helper.py"], "/dao/SeqGFRec_dataloader.py": ["/utility/dao_helper.py"]} |
62,087 | katrina-m/RecModels_Pytorch | refs/heads/master | /dao/load_test_data.py | import pandas as pd
import os
import numpy as np
def load_data(data_name):
if data_name == "ml-1m":
data_dir = "/tf/data/chenjiazhen/movie_data/movielens_data/ml_1m/"
rating_df = pd.read_csv(os.path.join(data_dir, "processed_rating.csv"))
#offset = rating_df.userId.max()
#rating_df["itemId"] = rating_df.itemId + offset
#rating_df.rename(columns={"userId":"srcId", "itemId":"dstId"}, inplace=True)
#rating_df["eType"] = 0
return rating_df
else:
pass
def load_movie_data(corpus_name, kg=False):
if corpus_name == "ml-1m":
data_dir = "/tf/data/chenjiazhen/movie_data/movielens_data/"
rating_df = pd.read_csv(os.path.join(data_dir, "ml_1m", "ratings.csv"))
if kg is True:
kg_df = pd.read_csv(os.path.join(data_dir, "kg.csv"))
mapping_df = pd.read_csv(os.path.join(data_dir, "mapping.csv")).astype(np.int32)
item_id_map = dict(zip(mapping_df.itemId.values, mapping_df.entityId.values))
# some itemId is not included in the knowledge graph, will append the id to the last.
offset = max(kg_df.h.max(), kg_df.t.max())
itemIds = []
i=1
for itemId in rating_df.itemId:
if itemId in item_id_map:
itemIds.append(item_id_map[itemId])
else:
itemIds.append(offset+i)
item_id_map[itemId] = offset+i
i += 1
rating_df["itemId"] = itemIds
return rating_df, kg_df
else:
return rating_df, None
| {"/dao/tgat_data_loader_dgl.py": ["/utility/dao_helper.py"], "/model/SASRec.py": ["/model/BaseModel.py", "/utility/components.py"], "/train/train_TGAT.py": ["/train/parse_args.py", "/dao/load_test_data.py", "/dao/tgat_data_loader_dgl.py", "/model/TGAT.py"], "/model/BaseModel.py": ["/utility/metrics.py"], "/model/SASGFRec.py": ["/model/BaseModel.py", "/utility/components.py"], "/train/train_SASRec.py": ["/train/parse_args.py", "/utility/metrics.py", "/utility/dao_helper.py", "/model/SASRec.py", "/dao/SASRec_dataloader.py", "/dao/load_test_data.py"], "/dao/SASRec_dataloader.py": ["/utility/dao_helper.py"], "/dao/SeqGFRec_dataloader.py": ["/utility/dao_helper.py"]} |
62,088 | katrina-m/RecModels_Pytorch | refs/heads/master | /utility/components.py | import torch
import numpy as np
import logging
import dgl
class Aggregator(torch.nn.Module):
"""
Neighbor aggregator.
"""
def __init__(self, in_dim, out_dim, num_relations, dropout, aggregator_type="graphsage", propagate_type="residual"):
super(Aggregator, self).__init__()
self.in_dim = in_dim
self.out_dim = out_dim
self.dropout = dropout
self.num_relations = num_relations
self.aggregator_type = aggregator_type
self.propagate_type = propagate_type
self.message_dropout = torch.nn.Dropout(dropout)
self.relation_embed = torch.nn.Embedding(self.num_relations, self.in_dim, padding_idx=0) # updated later
# updated transformation relation matrix
self.W_R = torch.nn.Parameter(torch.Tensor(self.num_relations, self.in_dim, self.in_dim))
if aggregator_type == 'gcn':
self.W = torch.nn.Linear(self.in_dim, self.out_dim) # W in Equation (6)
elif aggregator_type == 'graphsage':
self.W = torch.nn.Linear(self.in_dim * 2, self.out_dim) # W in Equation (7)
elif aggregator_type == 'bi-interaction':
self.W1 = torch.nn.Linear(self.in_dim, self.out_dim) # W1 in Equation (8)
self.W2 = torch.nn.Linear(self.in_dim, self.out_dim) # W2 in Equation (8)
else:
raise NotImplementedError
if propagate_type == "residual":
if self.in_dim != out_dim:
self.res_fc = torch.nn.Linear(
self.in_dim, out_dim, bias=False)
else:
self.res_fc = torch.nn.Identity()
self.activation = torch.nn.LeakyReLU()
def edge_attention(self, edges):
r_mul_t = torch.matmul(edges.srcdata['node_feat'], self.W_r) # (n_edge, relation_dim)
r_mul_h = torch.matmul(edges.srcdata['node_feat'], self.W_r) # (n_edge, relation_dim)
r_embed = self.relation_embed(edges.data['type']) # (1, relation_dim)
att = torch.bmm(r_mul_t.unsqueeze(1), torch.tanh(r_mul_h + r_embed).unsqueeze(2)).squeeze(-1) # (n_edge, 1)
return {'attention_score': att}
def compute_attention(self, g):
with g.local_scope():
for i in range(self.num_relations):
edge_idxs = g.filter_edges(lambda edge: edge.data['type'] == i)
self.W_r = self.W_R[i]
g.apply_edges(self.edge_attention, edge_idxs)
return g.edata.pop('attention_score')
def forward(self, g, entity_embed):
g = g.local_var()
if g.is_block:
h_src = entity_embed
h_dst = entity_embed[:g.num_dst_nodes()]
g.srcdata['node_feat'] = h_src
g.dstdata['node_feat'] = h_dst
else:
g.ndata['node_feat'] = entity_embed
h_dst = entity_embed
g.edata["attention_score"] = self.compute_attention(g)
g.update_all(dgl.function.u_mul_e('node_feat', 'attention_score', 'side_feat'),
dgl.function.sum('side_feat', 'neighbor_feat'))
if self.aggregator_type == 'gcn':
# Equation (6) & (9)
out = self.activation(
self.W(g.dstdata['node_feat'] + g.dstdata['neighbor_feat'])) # (n_users + n_entities, out_dim)
elif self.aggregator_type == 'graphsage':
# Equation (7) & (9)
out = self.activation(
self.W(torch.cat([g.dstdata['node_feat'], g.dstdata['neighbor_feat']],
dim=1))) # (n_users + n_entities, out_dim)
elif self.aggregator_type == 'bi-interaction':
# Equation (8) & (9)
out1 = self.activation(
self.W1(g.dstdata['node_feat'] + g.dstdata['neighbor_feat'])) # (n_users + n_entities, out_dim)
out2 = self.activation(
self.W2(g.dstdata['node_feat'] * g.dstdata['neighbor_feat'])) # (n_users + n_entities, out_dim)
out = out1 + out2
else:
raise NotImplementedError
out = self.message_dropout(out)
if self.propagate_type == "residual":
# residual
if self.res_fc is not None:
resval = self.res_fc(h_dst).view(h_dst.shape[0], -1, self.out_dim)
out = out + resval.squeeze(1)
return out
class TimeEncode(torch.nn.Module):
def __init__(self, time_dim, factor=5):
super(TimeEncode, self).__init__()
self.factor = factor
self.basis_freq = torch.nn.Parameter((torch.from_numpy(1 / 10 ** np.linspace(0, 9, time_dim))).float())
self.phase = torch.nn.Parameter(torch.zeros(time_dim).float())
def forward(self, ts):
# ts: [N, L]
batch_size = ts.size(0)
seq_len = ts.size(1)
ts = ts.view(batch_size, seq_len, 1) # [N, L, 1]
basis_freq = self.basis_freq.view(1, 1, -1)
map_ts = ts * basis_freq # [N, L, time_dim]
map_ts += self.phase.view(1, 1, -1)
harmonic = torch.cos(map_ts)
return harmonic
class PosEncode(torch.nn.Module):
def __init__(self, time_dim, seq_len):
super().__init__()
self.pos_embeddings = torch.nn.Embedding(num_embeddings=seq_len, embedding_dim=time_dim)
def forward(self, ts):
# ts: [N, L]
order = ts.argsort()
ts_emb = self.pos_embeddings(order)
return ts_emb
class EmptyEncode(torch.nn.Module):
def __init__(self, time_dim):
super().__init__()
self.time_dim = time_dim
def forward(self, ts):
out = torch.zeros_like(ts).float()
out = torch.unsqueeze(out, dim=-1)
out = out.expand(out.shape[0], out.shape[1], self.time_dim)
return out
class PointWiseFeedForward(torch.nn.Module):
def __init__(self, hidden_units, dropout_rate):
super(PointWiseFeedForward, self).__init__()
self.conv1 = torch.nn.Conv1d(hidden_units, hidden_units, kernel_size=1)
self.dropout1 = torch.nn.Dropout(p=dropout_rate)
self.relu = torch.nn.ReLU()
self.conv2 = torch.nn.Conv1d(hidden_units, hidden_units, kernel_size=1)
self.dropout2 = torch.nn.Dropout(p=dropout_rate)
def forward(self, inputs):
outputs = self.dropout2(self.conv2(self.relu(self.dropout1(self.conv1(inputs.transpose(-1, -2))))))
outputs = outputs.transpose(-1, -2) # as Conv1D requires (N, C, Length)
outputs += inputs
return outputs
class TemporalAggregator(torch.nn.Module):
def __init__(self, fan_outs, hidden_units, num_nodes, num_relations, num_layers, use_time, num_heads, drop_out, attn_mode="prod", agg_method="attn"):
super(TemporalAggregator, self).__init__()
self.drop_out = drop_out
self.fan_outs = fan_outs
self.num_layers = num_layers
self.num_relations = num_relations
self.num_nodes = num_nodes
self.hidden_units = hidden_units
self.use_time = use_time
self.agg_method = agg_method
self.attn_mode = attn_mode
self.num_heads = num_heads
self.logger = logging.getLogger(__name__)
self.edge_embed = torch.nn.Embedding(self.num_relations, self.hidden_units, padding_idx=0)
self.node_embed = torch.nn.Embedding(self.num_nodes + 1, self.hidden_units, padding_idx=0)
self.W_R = torch.nn.Parameter(torch.Tensor(self.num_relations, self.hidden_units, self.hidden_units))
self.merge_layer = MergeLayer(self.hidden_units, self.hidden_units, self.hidden_units, self.hidden_units)
if self.use_time == 'time':
self.logger.info('Using time encoding')
self.time_encoder = TimeEncode(time_dim=self.hidden_units)
elif self.use_time == 'pos':
assert(self.fan_outs is not None)
self.logger.info('Using positional encoding')
self.time_encoder = PosEncode(time_dim=self.hidden_units, seq_len=self.fan_outs[0])
elif self.use_time == 'empty':
self.logger.info('Using empty encoding')
self.time_encoder = EmptyEncode(time_dim=self.hidden_units)
else:
raise ValueError('invalid time option!')
if self.agg_method == 'attn':
self.logger.info('Aggregation uses attention model')
self.attn_model_list = torch.nn.ModuleList([AttnModel(self.hidden_units,
self.hidden_units,
self.hidden_units,
attn_mode=self.attn_mode,
n_head=self.num_heads,
drop_out=self.drop_out) for _ in range(self.num_layers)])
elif self.agg_method == 'lstm':
self.logger.info('Aggregation uses LSTM model')
self.attn_model_list = torch.nn.ModuleList([LSTMPool(self.hidden_units,
self.hidden_units,
self.hidden_units) for _ in range(self.num_layers)])
elif self.agg_method == 'mean':
self.logger.info('Aggregation uses constant mean model')
self.attn_model_list = torch.nn.ModuleList([MeanPool(self.hidden_units,
self.hidden_units) for _ in range(self.num_layers)])
else:
raise ValueError('invalid agg_method value, use attn or lstm')
pass
def forward(self, blocks):
return self.tem_conv(blocks)
def tem_conv(self, blocks):
for i, (src_ngh_idx, dst_idx, src_node_type, dst_node_type, src_ngh_edge_type, src_ngh_ts, dst_ts) in enumerate(reversed(blocks)): # The first contains the original src nodes.
# Reshape
dst_ts = dst_ts.unsqueeze(1)
src_node_raw_feat = self.node_embed(src_ngh_idx) # (batch_size, -1)
if i == 0:
src_ngh_feat = src_node_raw_feat
else:
src_ngh_feat = src_ngh_feat.view(-1, self.fan_outs[i], self.hidden_units)
# query node always has the start time -> time span == 0
dst_node_t_embed = self.time_encoder(torch.zeros_like(dst_ts))
dst_node_feat = self.node_embed(dst_idx)
src_ngh_t_delta = dst_ts - src_ngh_ts
src_ngh_t_embed = self.time_encoder(src_ngh_t_delta)
src_ngn_edge_feat = self.edge_embed(src_ngh_edge_type)
# attention aggregation
mask = src_ngh_idx == 0
attn_m = self.attn_model_list[i]
local, weight = attn_m(dst_node_feat,
dst_node_t_embed,
src_ngh_feat,
src_ngh_t_embed,
src_ngn_edge_feat,
mask)
src_ngh_feat = dst_node_feat + local
return src_ngh_feat
class MergeLayer(torch.nn.Module):
def __init__(self, dim1, dim2, dim3, dim4):
super().__init__()
self.fc1 = torch.nn.Linear(dim1 + dim2, dim3)
self.fc2 = torch.nn.Linear(dim3, dim4)
self.act = torch.nn.ReLU()
torch.nn.init.xavier_normal_(self.fc1.weight)
torch.nn.init.xavier_normal_(self.fc2.weight)
def forward(self, x1, x2):
x = torch.cat([x1, x2], dim=1)
h = self.act(self.fc1(x))
return self.fc2(h)
class AttnModel(torch.nn.Module):
"""Attention based temporal layers
"""
def __init__(self, feat_dim, edge_dim, time_dim,
attn_mode='prod', n_head=2, drop_out=0.1):
"""
args:
feat_dim: dim for the node features
edge_dim: dim for the temporal edge features
time_dim: dim for the time encoding
attn_mode: choose from 'prod' and 'map'
n_head: number of heads in attention
drop_out: probability of dropping a neural.
"""
super(AttnModel, self).__init__()
self.feat_dim = feat_dim
self.time_dim = time_dim
self.edge_in_dim = (feat_dim + edge_dim + time_dim)
self.model_dim = self.edge_in_dim
self.merger = MergeLayer(self.model_dim, feat_dim, feat_dim, feat_dim)
assert(self.model_dim % n_head == 0)
self.logger = logging.getLogger(__name__)
self.attn_mode = attn_mode
if attn_mode == 'prod':
self.multi_head_target = MultiHeadAttention(n_head,
d_model=self.model_dim,
d_k=self.model_dim // n_head,
d_v=self.model_dim // n_head,
dropout=drop_out)
self.logger.info('Using scaled prod attention')
elif attn_mode == 'map':
self.multi_head_target = MapBasedMultiHeadAttention(n_head,
d_model=self.model_dim,
d_k=self.model_dim // n_head,
d_v=self.model_dim // n_head,
dropout=drop_out)
self.logger.info('Using map based attention')
else:
raise ValueError('attn_mode can only be prod or map')
def forward(self, src, src_t, seq, seq_t, seq_e, mask):
""""Attention based temporal attention forward pass
args:
src: float Tensor of shape [B, D]
src_t: float Tensor of shape [B, Dt], Dt == D
seq: float Tensor of shape [B, N, D]
seq_t: float Tensor of shape [B, N, Dt]
seq_e: float Tensor of shape [B, N, De], De == D
mask: boolean Tensor of shape [B, N], where the true value indicate a null value in the sequence.
returns:
output, weight
output: float Tensor of shape [B, D]
weight: float Tensor of shape [B, N]
"""
src_ext = torch.unsqueeze(src, dim=1) # src [B, 1, D]
src_e_ph = torch.zeros_like(src_ext)
q = torch.cat([src_ext, src_e_ph, src_t], dim=2) # [B, 1, D + De + Dt] -> [B, 1, D]
k = torch.cat([seq, seq_e, seq_t], dim=2) # [B, 1, D + De + Dt] -> [B, 1, D]
mask = torch.unsqueeze(mask, dim=2) # mask [B, N, 1]
mask = mask.permute([0, 2, 1]) # mask [B, 1, N]
# # target-attention
output, attn = self.multi_head_target(q=q, k=k, v=k, mask=mask) # output: [B, 1, D + Dt], attn: [B, 1, N]
output = output.squeeze()
attn = attn.squeeze()
output = self.merger(output, src)
return output, attn
class ScaledDotProductAttention(torch.nn.Module):
''' Scaled Dot-Product Attention '''
def __init__(self, temperature, attn_dropout=0.1):
super().__init__()
self.temperature = temperature
self.dropout = torch.nn.Dropout(attn_dropout)
self.softmax = torch.nn.Softmax(dim=2)
def forward(self, q, k, v, mask=None, attn_mask=None):
attn = torch.bmm(q, k.transpose(1, 2))
attn = attn / self.temperature
if attn_mask is not None:
attn = attn.masked_fill(attn_mask, -1e10)
if mask is not None:
attn = attn.masked_fill(mask, -1e10)
attn = self.softmax(attn) # [n * b, l_q, l_k]
attn = self.dropout(attn) # [n * b, l_v, d]
output = torch.bmm(attn, v)
return output, attn
class MultiHeadAttention(torch.nn.Module):
''' Multi-Head Attention module '''
def __init__(self, n_head, d_model, d_k, d_v, dropout=0.1):
super().__init__()
self.n_head = n_head
self.d_k = d_k
self.d_v = d_v
self.w_qs = torch.nn.Linear(d_model, n_head * d_k, bias=False)
self.w_ks = torch.nn.Linear(d_model, n_head * d_k, bias=False)
self.w_vs = torch.nn.Linear(d_model, n_head * d_v, bias=False)
torch.nn.init.normal_(self.w_qs.weight, mean=0, std=np.sqrt(2.0 / (d_model + d_k)))
torch.nn.init.normal_(self.w_ks.weight, mean=0, std=np.sqrt(2.0 / (d_model + d_k)))
torch.nn.init.normal_(self.w_vs.weight, mean=0, std=np.sqrt(2.0 / (d_model + d_v)))
self.attention = ScaledDotProductAttention(temperature=np.power(d_k, 0.5), attn_dropout=dropout)
self.layer_norm = torch.nn.LayerNorm(d_model)
self.fc = torch.nn.Linear(n_head * d_v, d_model)
torch.nn.init.xavier_normal_(self.fc.weight)
self.dropout = torch.nn.Dropout(dropout)
def forward(self, q, k, v, mask=None, attn_mask=None):
d_k, d_v, n_head = self.d_k, self.d_v, self.n_head
sz_b, len_q, _ = q.size()
sz_b, len_k, _ = k.size()
sz_b, len_v, _ = v.size()
residual = q
q = self.w_qs(q).view(sz_b, len_q, n_head, d_k)
k = self.w_ks(k).view(sz_b, len_k, n_head, d_k)
v = self.w_vs(v).view(sz_b, len_v, n_head, d_v)
q = q.permute(2, 0, 1, 3).contiguous().view(-1, len_q, d_k) # (n*b) x lq x dk
k = k.permute(2, 0, 1, 3).contiguous().view(-1, len_k, d_k) # (n*b) x lk x dk
v = v.permute(2, 0, 1, 3).contiguous().view(-1, len_v, d_v) # (n*b) x lv x dv
mask = mask.repeat(n_head, 1, 1) # (n*b) x .. x ..
output, attn = self.attention(q, k, v, mask=mask, attn_mask=attn_mask)
output = output.view(n_head, sz_b, len_q, d_v)
output = output.permute(1, 2, 0, 3).contiguous().view(sz_b, len_q, -1) # b x lq x (n*dv)
output = self.dropout(self.fc(output))
output = self.layer_norm(output + residual)
return output, attn
class MapBasedMultiHeadAttention(torch.nn.Module):
''' Multi-Head Attention module '''
def __init__(self, n_head, d_model, d_k, d_v, dropout=0.1):
super().__init__()
self.n_head = n_head
self.d_k = d_k
self.d_v = d_v
self.wq_node_transform = torch.nn.Linear(d_model, n_head * d_k, bias=False)
self.wk_node_transform = torch.nn.Linear(d_model, n_head * d_k, bias=False)
self.wv_node_transform = torch.nn.Linear(d_model, n_head * d_k, bias=False)
self.layer_norm = torch.nn.LayerNorm(d_model)
self.fc = torch.nn.Linear(n_head * d_v, d_model)
self.act = torch.nn.LeakyReLU(negative_slope=0.2)
self.weight_map = torch.nn.Linear(2 * d_k, 1, bias=False)
torch.nn.init.xavier_normal_(self.fc.weight)
self.dropout = torch.nn.Dropout(dropout)
self.softmax = torch.nn.Softmax(dim=2)
self.dropout = torch.nn.Dropout(dropout)
def forward(self, q, k, v, mask=None):
d_k, d_v, n_head = self.d_k, self.d_v, self.n_head
sz_b, len_q, _ = q.size()
sz_b, len_k, _ = k.size()
sz_b, len_v, _ = v.size()
residual = q
q = self.wq_node_transform(q).view(sz_b, len_q, n_head, d_k)
k = self.wk_node_transform(k).view(sz_b, len_k, n_head, d_k)
v = self.wv_node_transform(v).view(sz_b, len_v, n_head, d_v)
q = q.permute(2, 0, 1, 3).contiguous().view(-1, len_q, d_k) # (n*b) x lq x dk
q = torch.unsqueeze(q, dim=2) # [(n*b), lq, 1, dk]
q = q.expand(q.shape[0], q.shape[1], len_k, q.shape[3]) # [(n*b), lq, lk, dk]
k = k.permute(2, 0, 1, 3).contiguous().view(-1, len_k, d_k) # (n*b) x lk x dk
k = torch.unsqueeze(k, dim=1) # [(n*b), 1, lk, dk]
k = k.expand(k.shape[0], len_q, k.shape[2], k.shape[3]) # [(n*b), lq, lk, dk]
v = v.permute(2, 0, 1, 3).contiguous().view(-1, len_v, d_v) # (n*b) x lv x dv
mask = mask.repeat(n_head, 1, 1) # (n*b) x lq x lk
# Map based Attention
#output, attn = self.attention(q, k, v, mask=mask)
q_k = torch.cat([q, k], dim=3) # [(n*b), lq, lk, dk * 2]
attn = self.weight_map(q_k).squeeze(dim=3) # [(n*b), lq, lk]
if mask is not None:
attn = attn.masked_fill(mask, -1e10)
attn = self.softmax(attn) # [n * b, l_q, l_k]
attn = self.dropout(attn) # [n * b, l_q, l_k]
# [n * b, l_q, l_k] * [n * b, l_v, d_v] >> [n * b, l_q, d_v]
output = torch.bmm(attn, v)
output = output.view(n_head, sz_b, len_q, d_v)
output = output.permute(1, 2, 0, 3).contiguous().view(sz_b, len_q, -1) # b x lq x (n*dv)
output = self.dropout(self.act(self.fc(output)))
output = self.layer_norm(output + residual)
return output, attn
class MeanPool(torch.nn.Module):
def __init__(self, feat_dim, edge_dim):
super(MeanPool, self).__init__()
self.edge_dim = edge_dim
self.feat_dim = feat_dim
self.act = torch.nn.ReLU()
self.merger = MergeLayer(edge_dim + feat_dim, feat_dim, feat_dim, feat_dim)
def forward(self, src, src_t, seq, seq_t, seq_e, mask):
# seq [B, N, D]
# mask [B, N]
src_x = src
seq_x = torch.cat([seq, seq_e], dim=2) # [B, N, De + D]
hn = seq_x.mean(dim=1) # [B, De + D]
output = self.merger(hn, src_x)
return output, None
class LSTMPool(torch.nn.Module):
def __init__(self, feat_dim, edge_dim, time_dim):
super(LSTMPool, self).__init__()
self.feat_dim = feat_dim
self.time_dim = time_dim
self.edge_dim = edge_dim
self.att_dim = feat_dim + edge_dim + time_dim
self.act = torch.nn.ReLU()
self.lstm = torch.nn.LSTM(input_size=self.att_dim,
hidden_size=self.feat_dim,
num_layers=1,
batch_first=True)
self.merger = MergeLayer(feat_dim, feat_dim, feat_dim, feat_dim)
def forward(self, src, src_t, seq, seq_t, seq_e, mask):
# seq [B, N, D]
# mask [B, N]
seq_x = torch.cat([seq, seq_e, seq_t], dim=2)
_, (hn, _) = self.lstm(seq_x)
hn = hn[-1, :, :] # hn.squeeze(dim=0)
out = self.merger.forward(hn, src)
return out, None
| {"/dao/tgat_data_loader_dgl.py": ["/utility/dao_helper.py"], "/model/SASRec.py": ["/model/BaseModel.py", "/utility/components.py"], "/train/train_TGAT.py": ["/train/parse_args.py", "/dao/load_test_data.py", "/dao/tgat_data_loader_dgl.py", "/model/TGAT.py"], "/model/BaseModel.py": ["/utility/metrics.py"], "/model/SASGFRec.py": ["/model/BaseModel.py", "/utility/components.py"], "/train/train_SASRec.py": ["/train/parse_args.py", "/utility/metrics.py", "/utility/dao_helper.py", "/model/SASRec.py", "/dao/SASRec_dataloader.py", "/dao/load_test_data.py"], "/dao/SASRec_dataloader.py": ["/utility/dao_helper.py"], "/dao/SeqGFRec_dataloader.py": ["/utility/dao_helper.py"]} |
62,096 | loywer/model_v2 | refs/heads/master | /atmosphere.py | from math import log10
MOLAR_MASS = 28.964420
UNIVRSAL_GAS_CONST = 8314.32
SPECIFIC_GAS_CONST = 287.05287
ADIABATIC_EXP = 1.4
CONVENTIONAL_RADIUS = 6356767
ACCELERATION_OF_GRAVITY = 9.80665
KELVIN_TEMP = 273.15
T_GRAD = -0.0065
class Atmosphere:
"""
This class will contain model of the atmosphere according to GOST - 4401-81
"""
def __init__(self,T_base=288.15,H_base=0,pressure_base=101325):
self.T_base = T_base
self.H_base = H_base
self.pressure_base = pressure_base
def set_H(self,H_in):
self.H = H_in
self.H_geo = (CONVENTIONAL_RADIUS * self.H) / (CONVENTIONAL_RADIUS + self.H)
self.accel_of_gravity = ACCELERATION_OF_GRAVITY * ((CONVENTIONAL_RADIUS / (CONVENTIONAL_RADIUS + self.H)) ** 2)
self.T = self.T_base +T_GRAD*(self.H_geo-self.H_base)
self.pressure = self.pressure_base*10**(-ACCELERATION_OF_GRAVITY/(T_GRAD * SPECIFIC_GAS_CONST)*log10(self.T/self.T_base))
self.Density = self.pressure * MOLAR_MASS/(self.T * UNIVRSAL_GAS_CONST)
self.V_sound = 20.046796*self.T**0.5
def get_accel_of_gravity(self):
"""
Function for calculating the acceleration of gravity as a function of height.
"""
return self.accel_of_gravity
def get_temperature(self):
"""
Function for calculating temperature as a fucntion of height.
"""
return self.T
def get_density(self):
"""
Function for calculating Density as a function of height.
"""
return self.Density
def get_pressure(self):
"""
Function for calculating pressure as a function of height.
"""
return self.pressure
def get_sound_speed(self):
"""
Function for calculating sound speed as a function of height.
"""
return self.V_sound
| {"/aerodym.py": ["/c172.py"], "/Aircraft_model.py": ["/Engine.py", "/SystemAutomaticControl_release.py", "/aerodym.py", "/atmosphere.py"]} |
62,097 | loywer/model_v2 | refs/heads/master | /SystemAutomaticControl_release.py | import numpy as np
from matplotlib import pyplot as plt
#eps_theta = 0.01 # |(theta_now - theta_spec)| > eps_theta
dt = 0.002
kp_elev = 14.0
ki_elev = 2.5
kd_elev = 1.0
kp_eleron = 1.8
kd_eleron = 0.005
t = 0
T_elev = 5
T_eleron = 2.1
I = 0
elev_spec = 0
last = 0
class Control:
def set_data (self, theta_spec, theta_now, gamma_spec, gamma_now, w0) :
self.theta_spec = theta_spec
self.theta_now = theta_now
self.gamma_spec = gamma_spec
self.gamma_now = gamma_now
self.w0 = w0
def get_data (self) :
self.get_elev_and_theta_new(self.theta_spec,self.theta_now)
self.get_GammaAngle_and_eleron_now(self.gamma_spec,self.gamma_now,self.w0)
return self.elev_new, self.eleron_now
def aperiodic_link (self, T) :
dt = 0.002
return (1 - np.exp(-dt/T))
# Функция получения положения руля высоты и текущей перегрузки
# theta_spec - заданное значение перегрузки
def get_elev_and_theta_new (self, theta_spec, theta_now) :
global last
global I
global elev_spec
self.transition_function_elev = self.aperiodic_link(T_elev)
self.delta_theta = theta_now - theta_spec
dd_theta = (self.delta_theta - last) / dt
I = I + self.delta_theta * (ki_elev*dt)
last = self.delta_theta
elev_spec = I + dd_theta*kd_elev + self.delta_theta * kp_elev
#delta_elev = elev_spec - elev_new
self.elev_new = elev_spec
if (self.elev_new * 180.0/np.pi >= 26) :
self.elev_new = 26/180.0*np.pi
if (self.elev_new * 180.0/np.pi <= -28) :
self.elev_new = -28/180.0*np.pi
return self.elev_new
# Функция получения положения элерона и крена самолета
# gamma_spec - заданное значение крена самолета (желаемое)
def get_GammaAngle_and_eleron_now (self, gamma_spec, gamma_now, w0) :
self.transition_function_eleron = self.aperiodic_link(T_eleron)
self.delta_gamma = gamma_spec - gamma_now
#self.eleron_spec = self.delta_gamma * kp_eleron - 3*w0
eleron_spec = self.delta_gamma * kp_eleron - kd_eleron * w0
self.eleron_now = eleron_spec
if (self.eleron_now * 180.0/np.pi >= 20) :
self.eleron_now = 20/180.0*np.pi
if (self.eleron_now * 180.0/np.pi <= -15) :
self.eleron_now = -15/180.0*np.pi
return self.eleron_now
"""
control = Control()
theta = 1
theta_s = 0.5
a1 = control.get_elev_and_theta_new(theta, theta_s)
print(a1)
gamma1 = 0.52
gamma_2 = 0.65
w_0 = 0.01
a2 = control.get_GammaAngle_and_eleron_now(gamma1, gamma_2, w_0)
print(a2)
"""
| {"/aerodym.py": ["/c172.py"], "/Aircraft_model.py": ["/Engine.py", "/SystemAutomaticControl_release.py", "/aerodym.py", "/atmosphere.py"]} |
62,098 | loywer/model_v2 | refs/heads/master | /aerodym.py | import numpy as np
import c172
from math import sin
from math import cos, sqrt
import matplotlib.pyplot as plt
class Aerodunamic():
def __init__(self):
self.alpha = 0
self.betta = 0
self.alpha_dot = 0
self.V_abs = 50
self.elevator = -0.0
self.rudder = 0
self.w= np.array([0,0,0])
self.NSK_SSK=np.array([[0,0,0],[0,0,0],[0,0,0]])
self.VSK_SSK=np.array([[0,0,0],[0,0,0],[0,0,0]])
self.V=np.array([50,0,0])
self.g = 9.81
self.G=np.array([0,-self.g,0])
self.X = np.array([0,500,0])
self.gamma = 0
self.theta = 0
self.psi = 0
self.P=np.array([00000,0,0])
self.ro = 1.25
self.aileron = 0.0
self.M_d =np.array([0,0,0])
def get_acceleration(self):
Cx = c172.get_Cx(self.alpha,self.betta)
Cy = c172.get_Cy(self.alpha,self.alpha_dot,self.elevator,self.w,self.V_abs)
Cz = c172.get_Cz(self.alpha,self.betta,self.w,self.rudder,self.aileron,self.V_abs)
q = self.ro*self.V_abs**2/(2*c172.mass)*c172.Sw
F_aero = np.array([-Cx,Cy,Cz])*q
buff = np.dot(self.VSK_SSK,F_aero)
F = buff-np.cross(self.w,self.V)+np.dot(self.NSK_SSK,self.G) +self.P/c172.mass
return F
def get_acceleration_angle(self):
J = c172.inertia
mx = c172.get_mx(self.alpha,self.betta,self.w,self.V_abs,self.aileron,self.rudder)
my = c172.get_my(self.betta,self.w,self.V_abs,self.rudder,self.aileron)
mz = c172.get_mz(self.alpha,self.alpha_dot,self.w,self.V_abs,self.elevator)
q = self.ro*self.V_abs**2/(2.0)*c172.Sw
M=np.array([mx*c172.b,my*c172.b,mz*c172.c])*q
buff = np.cross(self.w,np.dot(J,self.w))
e = np.dot(np.linalg.inv(J),M-buff)
return e
def get_NSK_SSK(self):
result = np.array([[cos(self.theta)*cos(self.psi),sin(self.theta), -cos(self.theta)*sin(self.psi)],
[sin(self.gamma)*sin(self.psi)-cos(self.gamma)*sin(self.theta)*cos(self.psi),cos(self.gamma)*cos(self.theta),sin(self.gamma)*cos(self.psi)+cos(self.gamma)*sin(self.psi)*sin(self.theta)],
[cos(self.gamma)*sin(self.psi)+sin(self.gamma)*sin(self.theta)*cos(self.psi),-sin(self.gamma)*cos(self.theta),cos(self.gamma)*cos(self.psi)-sin(self.gamma)*sin(self.theta)*sin(self.psi)]])
return result
def get_VSK_SSK(self):
result = np.array([[cos(self.betta)*cos(self.alpha),sin(self.alpha),-sin(self.betta)*cos(self.alpha)],
[-cos(self.betta)*sin(self.alpha), cos(self.alpha),sin(self.betta)*sin(self.alpha)],
[sin(self.betta),0,cos(self.betta)]])
return result
def AngleSpeed_Ailer (self ):
Speed_teta = self.w[1]*sin(self.gamma) + self.w[2]*cos(self.gamma)
Speed_gamma = self.w[0] - (self.w[1]*cos(self.gamma) - self.w[2]*sin(self.gamma)) * np.tan(self.theta)
Speed_psi = 1/cos(self.theta) * (self.w[1]*cos(self.gamma) - self.w[2]*sin(self.gamma))
return Speed_teta, Speed_gamma, Speed_psi
def get_V_abs(self):
return sqrt(self.V[0]**2+self.V[1]**2+self.V[2]**2)
def get_alpha(self):
self.alpha = -np.arctan2(self.V[1],self.V[0])
def get_betta(self):
self.betta = np.arctan2(self.V[2],self.V[0])
def get_alpha_dot(self):
self.apha_dot = (self.alpha-self.alpha_last)/0.002
def Integrator(self,left,right,dt):
return left+right*dt
def set_data(self,elevator,aileron,rudder,P,M_d,ro,g):
self.elevator = elevator
self.aileron = aileron
self.rudder = rudder
self.P = P
self.ro = ro
self.g = g
self.M_d = M_d
self.G = np.array([0,-self.g,0])
def get_data(self):
self.alpha_last = self.alpha
self.get_alpha()
self.get_alpha_dot()
self.get_betta()
self.betta =-self.betta
self.NSK_SSK=self.get_NSK_SSK()
self.VSK_SSK=self.get_VSK_SSK()
self.V_abs = self.get_V_abs()
a = self.get_acceleration()
self.n = a - np.dot(self.NSK_SSK,self.G)
self.n = self.n/self.g
e = self.get_acceleration_angle()
s_theta,s_gamma,s_psi = self.AngleSpeed_Ailer()
self.V = self.Integrator(self.V,a,0.002)
self.w = self.Integrator(self.w,e,0.002)
self.X = self.Integrator(self.X,np.dot(self.NSK_SSK.T,self.V),0.002)
self.theta = self.Integrator(self.theta,s_theta,0.002)
self.gamma = self.Integrator(self.gamma,s_gamma,0.002)
self.psi = self.Integrator(self.psi,s_psi,0.002)
return self.X,self.n,self.w,self.V,self.V_abs,self.gamma,self.theta,self.psi,self.alpha,self.betta
| {"/aerodym.py": ["/c172.py"], "/Aircraft_model.py": ["/Engine.py", "/SystemAutomaticControl_release.py", "/aerodym.py", "/atmosphere.py"]} |
62,099 | loywer/model_v2 | refs/heads/master | /plane.py | from direct.showbase.ShowBase import ShowBase
from direct.showbase import DirectObject
from direct.task import Task
from direct.interval.IntervalGlobal import *
from direct.gui.OnscreenText import OnscreenText
from direct.gui.DirectGui import *
from panda3d.core import *
from numpy import append
from sys import exit
class MyApp(ShowBase, DirectObject.DirectObject):
def __init__(self):
ShowBase.__init__(self)
# create buttons for menu
self.menuLbl = DirectLabel (text = "MENU", pos = Vec3(0, 0, 0.9), scale = 0.1, textMayChange = 1)
self.phiLbl = DirectLabel(text = "Enter latitude", pos = Vec3(0, 0, 0.8), scale = 0.08, textMayChange = 1)
self.phiEnt = DirectEntry(scale = 0.04, pos = Vec3(-0.2, 0, 0.72))
self.lambdaLbl = DirectLabel(text = "Enter longitude", pos = Vec3(0, 0, 0.6), scale = 0.08, textMayChange = 1)
self.lambdaEnt = DirectEntry(scale = 0.04, pos = Vec3(-0.2, 0, 0.5))
self.heightLbl = DirectLabel(text = "Enter height", pos = Vec3(0, 0, 0.4), scale = 0.08, textMayChange = 1)
self.heightEnt = DirectEntry(scale = 0.04, pos = Vec3(-0.19, 0, 0.3))
self.speedLbl = DirectLabel(text = "Enter speed", pos = Vec3(0, 0, 0.2), scale = 0.08, textMayChange = 1)
self.speedEnt = DirectEntry(scale = 0.04, pos = Vec3(-0.19, 0, 0.1))
self.rollLbl = DirectLabel(text = "Enter roll angle", pos = Vec3(0, 0, 0), scale = 0.08, textMayChange = 1)
self.rollEnt = DirectEntry(scale = 0.04, pos = Vec3(-0.19, 0, -0.1))
self.pitchLbl = DirectLabel(text = "Enter pitch angle", pos = Vec3(0, 0, -0.2), scale = 0.08, textMayChange = 1)
self.pitchEnt = DirectEntry(scale = 0.04, pos = Vec3(-0.19, 0, -0.3))
self.yawingLbl = DirectLabel(text = "Enter yawing angle", pos = Vec3(0, 0, -0.4), scale = 0.08, textMayChange = 1)
self.yawingEnt = DirectEntry(scale = 0.04, pos = Vec3(-0.19, 0, -0.5))
self.startBtn = DirectButton(text = "Start", scale = 0.1, command = self.setScene, pos = Vec3(0, 0, -0.7))
self.points = []
# binding keys
self.accept("mouse1", self.set_coords)
self.accept("escape", exit)
self.accept("time-a-repeat", self.inc_roll)
self.accept("time-d-repeat", self.dec_roll)
self.accept("time-+-repeat", self.inc_speed)
self.accept("time---repeat", self.dec_speed)
self.accept("time-w-repeat", self.inc_overload)
self.accept("time-s-repeat", self.dec_overload)
self.overload = 0.0 # should be the result of the some function
def setScene(self):
self.acceptDlg = YesNoDialog(text = "Are you sure?", command = self.createScene)
def createScene(self, clickedYes):
if clickedYes:
# hide menu elements
self.acceptDlg.hide()
self.menuLbl.hide()
self.phiEnt.hide()
self.phiLbl.hide()
self.lambdaLbl.hide()
self.lambdaEnt.hide()
self.heightLbl.hide()
self.heightEnt.hide()
self.speedLbl.hide()
self.speedEnt.hide()
self.rollLbl.hide()
self.rollEnt.hide()
self.pitchLbl.hide()
self.pitchEnt.hide()
self.yawingLbl.hide()
self.yawingEnt.hide()
self.startBtn.hide()
self.disableMouse()
self.plane = loader.loadModel("/c/Panda3D-1.10.6-x64/models/boeing707.egg")
self.plane.setScale(0.005, 0.005, 0.005)
self.plane.setPos(0,0,0)
self.cam.setPos(25.3, 2.26, 2.46)
self.cam.lookAt(self.plane)
self.plane.reparentTo(self.render)
self.taskMgr.add(self.plane_coordiantes, "plane_coordiantes")
# posInterval = time to move, finalPosition, startPosition
posInterval1 = self.plane.posInterval(5, Point3(0, -6, -2), startPos=Point3(0,6,2))
posInterval2 = self.plane.posInterval(5, Point3(0, 6, 2), startPos=Point3(0,-6,-2))
self.get_Var() # read input from textboxes
self.planePace = Sequence(posInterval1, posInterval2, name = "planePace")
self.planePace.loop()
else:
exit()
def plane_coordiantes(self, task):
cam_coords = []
cam_coords.append(self.plane.getPos())
return Task.cont
def setNameLabel(self):
# read input values
self.phi = self.phiEnt.get()
self.lambd = self.lambdaEnt.get()
self.height = self.heightEnt.get()
self.speed = self.speedEnt.get()
self.roll = self.rollEnt.get()
self.pitch = self.pitchEnt.get()
self.yawing = self.yawingEnt.get()
# show new values
# TODO
def get_Var(self):
show = Func(self.setNameLabel)
show.start()
def set_coords(self):
points = []
if base.mouseWatcherNode.hasMouse():
x = base.mouseWatcherNode.getMouseX()
y = base.mouseWatcherNode.getMouseY()
points = append(x,y)
print(points)
def inc_roll(self, when):
self.roll = float(self.roll) + 0.5
print(self.roll)
return Task.cont
def inc_speed(self, when):
self.speed = float(self.speed) + 0.5
print(self.speed)
return Task.cont
def inc_overload(self, when):
self.overload = float(self.overload) + 0.1
print(self.overload)
return Task.cont
def dec_roll(self, when):
self.roll = float(self.roll) - 0.5
print(self.roll)
return Task.cont
def dec_speed(self, when):
self.speed = float(self.speed) - 0.5
print(self.speed)
return Task.cont
def dec_overload(self, when):
self.overload = float(self.overload) - 0.1
print(self.overload)
return Task.cont
app = MyApp()
app.run()
| {"/aerodym.py": ["/c172.py"], "/Aircraft_model.py": ["/Engine.py", "/SystemAutomaticControl_release.py", "/aerodym.py", "/atmosphere.py"]} |
62,100 | loywer/model_v2 | refs/heads/master | /Engine.py | import numpy as np
from math import pi
# время моделирования
dt = 1/500
"Режим 1/0"
mode = 1
# режимы полёта
mode_arr = [0, 1]
# обороты двигателя в зависимости
omega_arr = [1000, 2800]
"Коэф-т J"
J = [0.0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.45, 0.5, 0.55, 0.6, 0.65, 0.7, 0.75, 0.76, 0.77, 0.78, 0.79, 0.8, 0.81, 0.82, 0.83, 0.84, 0.85, 0.86, 0.87, 0.88, 0.89, 0.9, 0.91, 0.92, 0.93, 0.94]
"Коэф-т Ct(J)"
Ct = [0.102122, 0.11097, 0.107621, 0.105191, 0.102446, 0.09947, 0.096775, 0.094706, 0.092341, 0.088912, 0.083878, 0.076336, 0.066669, 0.056342, 0.045688, 0.034716, 0.032492, 0.030253, 0.028001, 0.025735, 0.023453, 0.021159, 0.018852, 0.016529, 0.014194, 0.011843, 0.009479, 0.0071, 0.004686, 0.002278, -0.0002, -0.002638, -0.005145, -0.007641, -0.010188]
# Класс двигатель
class Engine(object):
def __init__(self,propeller_R = 1.2):
self.propeller_R = propeller_R
self.integral_result = 0
self.mode = 0.5
self.last_dv = 0
self.I = 0.3 #интегрирующее звено
def function_upr_get_mode(self):
self.I += self.dv*0.05
self.mode = self.I*dt*10+(self.dv-self.last_dv)/dt*0.1 + 0.01*self.dv
self.last_dv = self.dv
if (self.mode > 1):
self.mode = 1
elif (self.mode < 0):
self.mode = 0
# Функция получения входных данных
def Set_data(self, current_speed, spec_speed, AirDensity):
self.current_speed = current_speed
self.spec_speed = spec_speed
self.air_density = AirDensity
self.dv = self.delta_speed(self.spec_speed, self.current_speed)
# Функция расчёта режима полёта
def Get_mode(self,mode):
return 2*pi*(np.interp(mode,mode_arr,omega_arr,omega_arr[0],omega_arr[-1]))/60
# функция расчёта тяговой мощности двигателя
def get_tractive_power(self, omega, speed):
J_new = pi * speed / (omega * self.propeller_R)
Ct_new = self.Get_Ct(J_new)
tractive = (2/pi)**2*self.air_density*(omega*self.propeller_R**2)**2*Ct_new
tractive_power = np.array([tractive, 0, 0])
return tractive_power
# Функция вычисления текущего значения Ct
def Get_Ct(self,J_new):
return np.interp(J_new,J,Ct,Ct[0],Ct[-1])
def delta_speed(self,speed1,speed2):
return speed1-speed2
# Функция вычисления крутящего момента двигателя
def torque(self, omega, spec_speed):
tractive_power = self.get_tractive_power(omega, spec_speed)
param = (7023.52273*tractive_power[0])/omega
array_moment = np.array([param, 0, 0])
return array_moment
# Функция вывода значений
def Get_data(self):
self.function_upr_get_mode()
sp_omega = self.Get_mode(self.mode)
thrust = self.get_tractive_power(sp_omega, self.current_speed)
moment = self.torque(sp_omega, self.spec_speed)
return thrust, moment,sp_omega | {"/aerodym.py": ["/c172.py"], "/Aircraft_model.py": ["/Engine.py", "/SystemAutomaticControl_release.py", "/aerodym.py", "/atmosphere.py"]} |
62,101 | loywer/model_v2 | refs/heads/master | /Aircraft_model.py | import Engine as e
import SystemAutomaticControl_release as sys
import aerodym as aero
import atmosphere as atm
import matplotlib.pyplot as plt
import numpy as np
from math import sqrt
class Aircraft:
def __init__(self,H,V,angle,dt):
self.engine = e.Engine()
self.atmos = atm.Atmosphere()
self.atmos.set_H(H)
self.aerodynamic = aero.Aerodunamic()
self.control = sys.Control()
def run(self,v_zad,gama_zad,theta_zad,dt):
X,n,w,V,V_abs,gamma,theta,psi,alpha,betta = self.aerodynamic.get_data()
self.atmos.set_H(X[1])
ro = self.atmos.get_density()
g = self.atmos.get_accel_of_gravity()
self.control.set_data(theta_zad,theta,gama_zad,gamma,w[0])
elevator,aileron = self.control.get_data()
self.engine.Set_data(V_abs,v_zad,ro)
P,M,omega = self.engine.Get_data()
#P = np.array([5000,0,0])
#aileron = 0.01
# elevator = -0.07
self.aerodynamic.set_data(elevator,aileron,0.0,P,M,ro,g)
return theta
H=2000
angle=np.array([0,0,0])
V = np.array([50,0.0,0])
plane = Aircraft(H,V,angle,0.02)
T=80
t=0
X=[]
TT=[]
while(T>t):
theta = np.sin(t*2*np.pi)/2.0
x=plane.run(55,0.0,theta,0.02)
t+=0.002
X.append(x)
TT.append(t)
#X=np.array(X)
plt.plot(TT,X)
plt.grid()
plt.show()
| {"/aerodym.py": ["/c172.py"], "/Aircraft_model.py": ["/Engine.py", "/SystemAutomaticControl_release.py", "/aerodym.py", "/atmosphere.py"]} |
62,102 | loywer/model_v2 | refs/heads/master | /c172.py | import numpy as np
# блилиотека геометрических и аэродинамических параметров самолета cesna 172 в соотвестии
# с параметрами для движка JSBsim
# геометрические параметры
Sw = 16.2 #м2 #Площадь крыла
b = 10.91184 #м #Размах
c = 1.49352 #м #Хорда
mass = 1043.2
inertia = np.diag([948, 1346, 1967])*1.35581
# Коэфиценты силы лобового сопротивления
# нулевый коэфециент лобогово сопотивления
Cx0 = 0.026
Cx_alpha =np.array([[-0.0873, 0.0041, 0.0000, 0.0005, 0.0014],
[-0.0698, 0.0013, 0.0004, 0.0025, 0.0041],
[-0.0524, 0.0001, 0.0023, 0.0059, 0.0084],
[-0.0349, 0.0003, 0.0057, 0.0108, 0.0141],
[-0.0175, 0.0020, 0.0105, 0.0172, 0.0212],
[0.0000, 0.0052, 0.0168, 0.0251, 0.0299],
[0.0175, 0.0099, 0.0248, 0.0346, 0.0402],
[0.0349, 0.0162, 0.0342, 0.0457, 0.0521],
[0.0524, 0.0240, 0.0452, 0.0583, 0.0655],
[0.0698, 0.0334, 0.0577, 0.0724, 0.0804],
[0.0873, 0.0442, 0.0718, 0.0881, 0.0968],
[0.1047, 0.0566, 0.0874, 0.1053, 0.1148],
[0.1222, 0.0706, 0.1045, 0.1240, 0.1343],
[0.1396, 0.0860, 0.1232, 0.1442, 0.1554],
[0.1571, 0.0962, 0.1353, 0.1573, 0.1690],
[0.1745, 0.1069, 0.1479, 0.1708, 0.1830],
[0.1920, 0.1180, 0.1610, 0.1849, 0.1975],
[0.2094, 0.1298, 0.1746, 0.1995, 0.2126],
[0.2269, 0.1424, 0.1892, 0.2151, 0.2286],
[0.2443, 0.1565, 0.2054, 0.2323, 0.2464],
[0.2618, 0.1727, 0.2240, 0.2521, 0.2667],
[0.2793, 0.1782, 0.2302, 0.2587, 0.2735],
[0.2967, 0.1716, 0.2227, 0.2507, 0.2653],
[0.3142, 0.1618, 0.2115, 0.2388, 0.2531],
[0.3316, 0.1475, 0.1951, 0.2214, 0.2351],
[0.3491, 0.1097, 0.1512, 0.1744, 0.1866]])
Сx_betta = 0.170
def get_Cx(alpha,betta):
Cx = Cx0 + np.interp(alpha,Cx_alpha.T[0],Cx_alpha.T[1]) +Сx_betta*betta
return Cx
# коэффиценет боковой силы
#
Cz_betta=np.array([[-0.3490, 0.1370],
[0.0000, 0.0000],
[0.3490, -0.1370]])
Сz_eleron = -0.05
Cz_rudder = 0.1870
Сz_roll_rate = - 0.0370
Cz_yaw_rate = 0.210
def get_Cz(alpha,betta,w,ruder,eleron,V_abs):
Cz = np.interp(betta,Cz_betta.T[0],Cz_betta.T[1]) + Cz_rudder*ruder + Сz_roll_rate*(b/(2.0 * V_abs))*w[0] + Cz_yaw_rate*(b/(2.0 * V_abs))*w[1]
return Cz
# коэфицент подьемной силы
Сy_alpha = np.array([[-0.0900, -0.2200, -0.2200],
[0.0000, 0.2500, 0.2500],
[0.0900, 0.7300, 0.7300],
[0.1000, 0.8300, 0.7800],
[0.1200, 0.9200, 0.7900],
[0.1400, 1.0200, 0.8100],
[0.1600, 1.0800, 0.8200],
[0.1700, 1.1300, 0.8300],
[0.1900, 1.1900, 0.8500],
[0.2100, 1.2500, 0.8600],
[0.2400, 1.3500, 0.8800],
[0.2600, 1.4400, 0.9000],
[0.2800, 1.4700, 0.9200],
[0.3000, 1.4300, 0.9500],
[0.3200, 1.3800, 0.9900],
[0.3400, 1.3000, 1.0500],
[0.3600, 1.1500, 1.1500]])
Cy_elevator = 0.347
Cy_alpha_dot = 1.7
Cy_pitch = 3.9
def get_Cy(alpha,alpha_dot,elevator,w,V_abs):
Cy1=np.interp(alpha,Сy_alpha.T[0],Сy_alpha.T[1])
Cy2=Cy_elevator*elevator
Cy3 = Cy_alpha_dot*(c/(2.0 * V_abs))*alpha_dot
Cy4 = Cy_pitch*(c/(2.0 * V_abs))*w[2]
Cy = Cy1+Cy2 + Cy3 + Cy4
return Cy
# момент вращения по крену( вдоль продольной оси ЛА)
mx_betta=np.array([[-0.3490, 0.0322],
[0.0000, 0.0000],
[0.3490, -0.0322]])
mx_yaw_rate =np.array([[0.0000, 0.0798],
[0.0940, 0.1869]])
mx_aileron = 0.2290
mx_roll_rate = -0.4840
mx_rudder = 0.0147
def get_mx(alfa,betta,w,V_abs,aileron,rudder):
mx = np.interp(betta,mx_betta.T[0],mx_betta.T[1]) + mx_roll_rate *(b/(2.0 * V_abs))*w[0] +np.interp(alfa,mx_yaw_rate.T[0],mx_yaw_rate.T[1])*(b/(2.0*V_abs))*w[1] + mx_aileron * aileron + mx_rudder*rudder
return mx
# момент по тангажу
mz_0 = 0.025000
mz_alfa = -1.8000
mz_pitch_rate = -12.4000
mz_alfa_dot = -5.2000
mz_elevator = -1.280
def get_mz(alfa,alfa_dot,w,V_abs,elevator):
mz = mz_0 + mz_alfa*alfa + mz_pitch_rate*(c/(2.0*V_abs))*w[2] + mz_alfa_dot*alfa_dot*(c/(2.0*V_abs)) +mz_elevator*elevator
return mz
# момент по курсу
my_betta = np.array([[-0.3490, -0.0205],
[0.0000, 0.0000],
[0.3490, 0.0205]])
my_roll_rate = 0.0278
my_yaw_rate = -0.0937
my_aileron = -0.0053
my_rudder = -0.0430
def get_my(betta,w,V_abs,rudder,aileron):
my = np.interp(betta,my_betta.T[0],my_betta.T[1]) + my_roll_rate*(b/(2.0 *V_abs))*w[0] + my_yaw_rate*(b/(2.0 *V_abs))*w[1] + my_aileron*aileron + my_rudder*rudder
return my
| {"/aerodym.py": ["/c172.py"], "/Aircraft_model.py": ["/Engine.py", "/SystemAutomaticControl_release.py", "/aerodym.py", "/atmosphere.py"]} |
62,118 | yss-810/test | refs/heads/master | /test_case/test_zhuce.py | import time
import unittest
from unittest import result
from selenium import webdriver
import driver
from driver.browser import chrome_browser
from lib.utils import read_excel
from page.zhuce_page import ZhucePage
class ZhuceTestCase(unittest.TestCase):
def setUp(self):
self.driver = chrome_browser()
def tearDown(self):
self.driver.quit()
def test_zhuce(self):
zp=ZhucePage(self.driver)
# content=read_excel()
# print('读取成功',content)
# uname = content[1][0]
# email = content[1][1]
# password = content[1][2]
# mobile = content[1][3]
result=zp.zhuce('yss9','1053741200@qq.com','yss123321','yss123321','120537114','15928561321')
time.sleep(3)
self.assertEqual("yss9", result)
if __name__ == '__main__':
unittest.main()
| {"/test_case/test_zhuce.py": ["/driver/browser.py", "/lib/utils.py", "/page/zhuce_page.py"], "/dame/test_suite_a.py": ["/dame/test_dame.py", "/dame/test_dame_baidu_sreach.py"], "/page/zhuce_page.py": ["/page/base_page.py"], "/test_case/test_login.py": ["/driver/browser.py", "/page/login_page.py"]} |
62,119 | yss-810/test | refs/heads/master | /dame/demo_es_houtai.py | """
case:后台添加商品
import:
用户名:admin
密码:LS514320ls
商品名称:牛仔外套
本店售价:100
促销日期:2020-08-20
修改页商品名称:羊毛大衣
step:
1、登录后台
2、进入左侧导航菜单【商品管理-添加新商品】
2.1点击【商品管理】
2.2点击【添加新商品】
3、进入右侧商品添加页面,添加商品信息
3.1进入右侧添加页面
3.2一次输入商品各项信息
3.3点击【确定】按钮完成添加
4、进入商品列表,查看商品
4.1进入右侧列表页面
4.2点击【查看】
4.3切换回上一个窗口
5、进入商品列表,修改商品
5.1进入修改页面
5.2修改商品名
5.3点击保存
6、进入商品列表,删除商品
6.1点击【删除】
6.2处理弹窗确定
7、退出
"""
import time
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.select import Select
from selenium import webdriver
from selenium.webdriver.support.wait import WebDriverWait
driver = webdriver.Chrome()
driver.maximize_window()
driver.implicitly_wait(30)
#访问。打开浏览器登录页面
driver.get('http://192.168.4.223/upload/admin')
driver.implicitly_wait(10)#隐式等待
time.sleep(2)
print('当前url',driver.current_url)
#进行登录
# driver.find_element_by_name("remember").click()
# driver.implicitly_wait(10)#隐式等待
# driver.find_element_by_name("username").send_keys('admin')
# driver.find_element_by_name("password").send_keys('LS514320ls')
# driver.find_element_by_id("remember").click()
# # driver.find_element_by_name("remember").click()
# driver.find_element_by_class_name("button").click()
# driver.implicitly_wait(10)#隐式等待
# driver.find_element(By.NAME,'remember').click()
driver.find_element(By.NAME,'username').send_keys('admin')
driver.find_element(By.NAME,'password').send_keys('LS514320ls')
# driver.find_element(By.ID,'remember').click()
driver .find_element(By.CLASS_NAME,'button').click()
driver.implicitly_wait(30)
#切换窗口
# handles=driver.window_handles
# driver.switch_to.window(handles[-1])
# time.sleep(2)
# print('当前url',driver.current_url)
#进入frame标签
driver.switch_to.frame('menu-frame')
#选择添加商品
driver.find_element_by_xpath('//ul[@id="menu-ul"]/li[1]').click()
time.sleep(2)
driver.find_element_by_link_text('添加新商品').click()
time.sleep(2)
#退出frame
driver.switch_to.parent_frame()
#跳入输入frame
driver.switch_to.frame('main-frame')
driver.find_element_by_xpath('//table[@id="general-table"]/tbody/tr[1]/td[2]/input[1]').send_keys('牛仔外套')
driver.find_element_by_xpath('//table[@id="general-table"]/tbody/tr[3]/td[2]/select').click()
time.sleep(2)
# select=Select(locator)
# select.select_by_visible_text('女装')
#显示等待
wait=WebDriverWait(driver,10,0.5)
wait.until(expected_conditions.presence_of_element_located((By.XPATH,'//select[@name="cat_id"]/option[3]')))
driver.find_element_by_xpath('//select[@name="cat_id"]/option[3]').click()
driver.find_element_by_name('shop_price').send_keys('100')
time.sleep(2)
driver.find_element(By.XPATH,'//input[@id="is_promote"]').click()#点击促销价
time.sleep(2)
#去除前端readonly属性
js = "document.getElementById('promote_start_date').removeAttribute('readonly')"
driver.execute_script(js)
time.sleep(2)
driver.find_element(By.ID,'promote_start_date').clear()
driver.find_element(By.ID,'promote_start_date').send_keys('2020-08-20')
# #图片上传
# # driver.find_element(By.XPATH,'//tablet[@id="general-table"]/tbody/tr[15]/td[2]/input[1]').click()
# driver.find_element(By.XPATH,'//table[@id="general-table"]/tbody/tr[15]/td[2]/input[2]').clear()
# time.sleep(2)
# driver.find_element(By.XPATH,'//table[@id="general-table"]/tbody/tr[15]/td[2]/input[2]').send_keys(r'F:\1.jpg')
time.sleep(2)
driver.find_element_by_xpath('//div[@id="tabbody-div"]/form/div/input[2]').click()
time.sleep(3)
#查看商品
driver.find_element(By.XPATH,'//div[@id="listDiv"]/table[1]/tbody/tr[3]/td[11]/a[1]/img').click()
time.sleep(2)
#切换窗口到商品管理页面
handles=driver.window_handles
driver.switch_to.window(handles[-2])
time.sleep(2)
#修改商品
driver.switch_to.frame('main-frame')#切入frame标签
driver.find_element(By.XPATH,'//div[@id="listDiv"]/table[1]/tbody/tr[3]/td[11]/a[2]/img').click()
time.sleep(1)
driver.find_element(By.XPATH,'//table[@id="general-table"]/tbody/tr[1]/td[2]/input[1]').clear()
driver.find_element(By.XPATH,'//table[@id="general-table"]/tbody/tr[1]/td[2]/input[1]').send_keys('羊毛大衣')
driver.find_element(By.XPATH,'//div[@id="tabbody-div"]/form/div/input[2]').click()
driver.switch_to.parent_frame()#跳出frame标签
#删除商品
driver.switch_to.frame('main-frame')
time.sleep(2)
driver.find_element(By.XPATH,'//div[@id="listDiv"]/table[1]/tbody/tr[3]/td[11]/a[4]/img').click()
time.sleep(2)
driver.switch_to.alert.dismiss()
time.sleep(2)
driver.find_element(By.XPATH,'//div[@id="listDiv"]/table[1]/tbody/tr[3]/td[11]/a[4]/img').click()
time.sleep(2)
driver.switch_to.alert.accept()
driver.switch_to.parent_frame()#跳出frame标签
# driver.implicitly_wait(20)
time.sleep(20)
driver.switch_to.parent_frame()
driver.quit() | {"/test_case/test_zhuce.py": ["/driver/browser.py", "/lib/utils.py", "/page/zhuce_page.py"], "/dame/test_suite_a.py": ["/dame/test_dame.py", "/dame/test_dame_baidu_sreach.py"], "/page/zhuce_page.py": ["/page/base_page.py"], "/test_case/test_login.py": ["/driver/browser.py", "/page/login_page.py"]} |
62,120 | yss-810/test | refs/heads/master | /dame/dame.ec_tongji.py | """
case:后台登录--报表统计--流量分析--客户统计--订单统计-销售明细-销售排行
import:
用户名:admin
密码:yss123321
step:
1、登录后台
2、进入左侧导航菜单【报表统计-流量分析】
2.1点击【报表统计】
2.2点击【流量分析】
3、进入左侧导航菜单【报表统计-客户统计】
3.1进入右侧添加页面
3.2点击【客户统计报表下载】
4、进入左侧导航菜单【报表统计-订单统计】
4.1进入右侧列表页面
4.2搜索订单
4.3切换回上一个窗口
5、进入左侧导航菜单【报表统计-销售明细】
5.1进入右侧页面
5.2搜索销售明细
5.3切换回上一个窗口
6、进入左侧导航菜单【报表统计-销售排行】
6.1进入右侧页面
6.2搜索销售明细
6.3切换回上一个窗口
7、退出
"""
import time
from selenium.webdriver.common.by import By
from selenium.webdriver.support.select import Select
from selenium import webdriver
driver = webdriver.Chrome()
driver.maximize_window()
driver.implicitly_wait(30)
#访问。打开浏览器登录页面
driver.get('http://192.168.4.231/upload/admin')
driver.implicitly_wait(10)#隐式等待
time.sleep(2)
print('当前url',driver.current_url)
# driver.find_element(By.NAME,'remember').click()
driver.find_element(By.NAME,'username').send_keys('admin')
driver.find_element(By.NAME,'password').send_keys('yss123321')
# driver.find_element(By.ID,'remember').click()
driver .find_element(By.CLASS_NAME,'button').click()
driver.implicitly_wait(30)
#客户统计
driver.switch_to.frame('menu-frame')
time.sleep(1)
driver.find_element(By.XPATH,('//ul[@id="menu-ul"]/li[5]')).click()
time.sleep(1)
driver.find_element(By.LINK_TEXT,('流量分析')).click()
time.sleep(2)
driver.switch_to.parent_frame()
#流量分析-查询
driver.switch_to.frame('main-frame')
time.sleep(2)
driver.find_element(By.XPATH,('//form[@id="selectForm"]/input[1]')).clear()
time.sleep(1)
driver.find_element(By.XPATH,('//form[@id="selectForm"]/input[1]')).send_keys('2020-01-01')
time.sleep(1)
driver.find_element(By.XPATH,('//Form[@id="selectForm"]/input[2]')).clear()
time.sleep(1)
driver.find_element(By.XPATH,('//Form[@id="selectForm"]/input[2]')).send_keys('2020-08-17')
time.sleep(1)
driver.find_element(By.XPATH,('//Form[@id="selectForm"]/input[3]')).click()
time.sleep(1)
driver.switch_to.parent_frame()
#客户统计
driver.switch_to.frame('menu-frame')
time.sleep(3)
driver.find_element(By.XPATH,('//ul[@id="menu-ul"]/li[5]/ul/li[2]/a')).click()
time.sleep(1)
driver.switch_to.parent_frame()
#订单统计
driver.switch_to.frame('menu-frame')
time.sleep(2)
driver.find_element(By.LINK_TEXT,('订单统计')).click()
time.sleep(2)
driver.switch_to.parent_frame()
driver.switch_to.frame('main-frame')
driver.find_element(By.XPATH,('//Form[@id="selectForm"]/input[1]')).clear()
time.sleep(1)
driver.find_element(By.XPATH,('//Form[@id="selectForm"]/input[1]')).send_keys('2020-01-01')
time.sleep(1)
driver.find_element(By.XPATH,('//Form[@id="selectForm"]/input[2]')).clear()
time.sleep(1)
driver.find_element(By.XPATH,('//Form[@id="selectForm"]/input[2]')).send_keys('2020-08-17')
time.sleep(1)
driver.find_element(By.XPATH,('//Form[@id="selectForm"]/input[3]')).click()
driver.switch_to.parent_frame()
#销售明细
driver.switch_to.frame('menu-frame')
driver.find_element(By.XPATH,('//ul[@id="menu-ul"]/li[5]/ul/li[6]/a')).click()
driver.switch_to.parent_frame()
#销售排行
driver.switch_to.frame('menu-frame')
driver.find_element(By.XPATH,('//ul[@id="menu-ul"]/li[5]/ul/li[8]/a')).click()
driver.switch_to.parent_frame()
time.sleep(3)
driver.quit() | {"/test_case/test_zhuce.py": ["/driver/browser.py", "/lib/utils.py", "/page/zhuce_page.py"], "/dame/test_suite_a.py": ["/dame/test_dame.py", "/dame/test_dame_baidu_sreach.py"], "/page/zhuce_page.py": ["/page/base_page.py"], "/test_case/test_login.py": ["/driver/browser.py", "/page/login_page.py"]} |
62,121 | yss-810/test | refs/heads/master | /dame/test_dame_baidu_sreach.py | import time
import unittest
class BaiDuTestCase(unittest.TestCase):
def setUp(self):
print('开始')
from selenium import webdriver
self.driver = webdriver.Chrome()
self.driver.maximize_window()
def test_baidu(self):
self.driver.get("http://www.baidu.com")
self.driver.implicitly_wait(20)
# 操作
self.driver.find_element_by_id('kw').send_keys("python")
self.driver.find_element_by_id('su').click()
time.sleep(3)
self.driver.find_element_by_partial_link_text('Python(计算机程序设计语言)_百度百科').click()
time.sleep(5)
title=self.driver.title
self.assertIn("python",title)
def test_baidu_search(self):
# 访问
self.driver.get("http://www.baidu.com")
self.driver.implicitly_wait(20)
self.driver.find_element_by_name('wd').send_keys("linux")
self.driver.find_element_by_id('su').click()
time.sleep(1)
self.driver.find_element_by_name('wd').clear()
time.sleep(1)
self.driver.find_element_by_class_name('s_ipt').send_keys("自动化")
self.driver.find_element_by_id('su').click()
time.sleep(5)
def tearDown(self):
print('结束')
self.driver.quit()
if __name__ == '__main__':
unittest.main()
| {"/test_case/test_zhuce.py": ["/driver/browser.py", "/lib/utils.py", "/page/zhuce_page.py"], "/dame/test_suite_a.py": ["/dame/test_dame.py", "/dame/test_dame_baidu_sreach.py"], "/page/zhuce_page.py": ["/page/base_page.py"], "/test_case/test_login.py": ["/driver/browser.py", "/page/login_page.py"]} |
62,122 | yss-810/test | refs/heads/master | /dame/dame.py | import time#引入时间模块
from selenium import webdriver#引入webdriver
driver = webdriver.Chrome()#启动chrome浏览器,实例化driver对象
driver.maximize_window()#最大化浏览器窗口
# driver.set_window_size(480,700)设置窗口大小
# driver.minimize_window()窗口最小化
driver.get("http://www.baidu.com")#打开浏览器并访问网页
driver.back()#后退一步
time.sleep(3)#强制等待
driver.forward()#前进一步
driver.get("http://www.taobao.com")
time.sleep(2)#强制等待
driver.refresh()
time.sleep(3)#强制等待
driver.get("http://www.jd.com")
time.sleep(3)#强制等待
# driver.close()#关闭浏览器
driver.quit()#关闭并退出 | {"/test_case/test_zhuce.py": ["/driver/browser.py", "/lib/utils.py", "/page/zhuce_page.py"], "/dame/test_suite_a.py": ["/dame/test_dame.py", "/dame/test_dame_baidu_sreach.py"], "/page/zhuce_page.py": ["/page/base_page.py"], "/test_case/test_login.py": ["/driver/browser.py", "/page/login_page.py"]} |
62,123 | yss-810/test | refs/heads/master | /dame/demo_ecshop_tankuang.py | """
case:ecshop收藏本站
step:
1、打开网页
2、登录
3、收藏网页
4、退出
"""
import time
from selenium.webdriver.common.by import By
from selenium.webdriver.support.select import Select
from selenium import webdriver
driver = webdriver.Chrome()
driver.maximize_window()
#打开浏览器登录页面
driver.get('http://192.168.4.223/upload/')
driver.implicitly_wait(10)#隐式等待
time.sleep(2)
#登录
driver.find_element_by_link_text('登录').click()
time.sleep(2)
#输入
driver.find_element_by_name("username").send_keys('admin3')
driver.find_element_by_name("password").send_keys('LS514320ls')
driver.find_element_by_name("remember").click()
driver.find_element_by_name("submit").click()
time.sleep(1)
#收藏本站,弹框处理
driver.find_element(By.LINK_TEXT,'收藏本站').click()
time.sleep(3)
# driver.switch_to.alert.text
driver.switch_to.alert.accept()
time.sleep(3)
driver.quit() | {"/test_case/test_zhuce.py": ["/driver/browser.py", "/lib/utils.py", "/page/zhuce_page.py"], "/dame/test_suite_a.py": ["/dame/test_dame.py", "/dame/test_dame_baidu_sreach.py"], "/page/zhuce_page.py": ["/page/base_page.py"], "/test_case/test_login.py": ["/driver/browser.py", "/page/login_page.py"]} |
62,124 | yss-810/test | refs/heads/master | /dame/text_damo.py | import unittest
class DemoTest(unittest.TestCase):
def test_dame(self):
print('dame A')
def test_damo_b(self):
print("dame B")
if __name__=='__main__':
unittest.main()
| {"/test_case/test_zhuce.py": ["/driver/browser.py", "/lib/utils.py", "/page/zhuce_page.py"], "/dame/test_suite_a.py": ["/dame/test_dame.py", "/dame/test_dame_baidu_sreach.py"], "/page/zhuce_page.py": ["/page/base_page.py"], "/test_case/test_login.py": ["/driver/browser.py", "/page/login_page.py"]} |
62,125 | yss-810/test | refs/heads/master | /page/commodity_add_page.py | import unittest
from selenium.webdriver.common.by import By
class CommodityaddTestCase(unittest.TestCase):
def __init__(self,driver):
self.driver=driver
self.locator_ele_commodity_management=(By.XPATH, ('//ul[@id="menu-ul"]/li[1]'))
self.locator_ele_add_goods=(By.LINK_TEXT,('添加新商品'))
def ele_switchframe(self):
pass
def ele_commodity_management(self):
self.driver.find_element(*self.locator_ele_commodity_management).click()
def ele_add_goods(self):
self.driver.find_element(self.locator_ele_add_goods).click()
def ele_switchparent(self):
pass
def ele_switchframe1(self):
pass
def ele_
self.assertEqual(True, False)
if __name__ == '__main__':
unittest.main()
| {"/test_case/test_zhuce.py": ["/driver/browser.py", "/lib/utils.py", "/page/zhuce_page.py"], "/dame/test_suite_a.py": ["/dame/test_dame.py", "/dame/test_dame_baidu_sreach.py"], "/page/zhuce_page.py": ["/page/base_page.py"], "/test_case/test_login.py": ["/driver/browser.py", "/page/login_page.py"]} |
62,126 | yss-810/test | refs/heads/master | /dame/demo_es_wenzhang.py | """
case:后台登录--商品管理--文章管理--文章列表--添加-修改-发布-删除
import:
用户名:admin
密码:LS514320ls
文章标题:逆商
step:
1、登录后台
2、进入左侧导航菜单【文章管理-文章列表】
2.1点击【文章管理】
2.2点击【文章列表】
3、进入右侧文章列表页面,添加文章信息
3.1进入右侧添加页面
3.2一次输入文章各项信息
3.3点击【确定】按钮完成添加
4、进入文章列表,查看文章
4.1进入右侧列表页面
4.2点击【查看】
4.3切换回上一个窗口
5、进入文章列表,修改文章
5.1进入修改页面
5.2修改文章名
5.3点击保存
6、进入文章发布
6.1选择文章
6.2选择时间
6.3点击【批量发布】
7、进入文章列表,删除文章
7.1点击【删除】
7.2处理弹窗确定
8、退出
"""
import time
from selenium.webdriver.common.by import By
from selenium.webdriver.support.select import Select
from selenium import webdriver
driver = webdriver.Chrome()
driver.maximize_window()
driver.implicitly_wait(30)
#访问。打开浏览器登录页面
driver.get('http://localhost/upload/admin')
driver.implicitly_wait(10)#隐式等待
time.sleep(2)
print('当前url',driver.current_url)
# driver.find_element(By.NAME,'remember').click()
driver.find_element(By.NAME,'username').send_keys('admin')
driver.find_element(By.NAME,'password').send_keys('yss123321')
# driver.find_element(By.ID,'remember').click()
driver .find_element(By.CLASS_NAME,'button').click()
driver.implicitly_wait(30)
#进入frame标签
driver.switch_to.frame('menu-frame')
#选择文章列表
driver.find_element_by_xpath('//ul[@id="menu-ul"]/li[6]').click()
time.sleep(1)
driver.find_element_by_link_text('文章列表').click()
time.sleep(1)
#退出frame
driver.switch_to.parent_frame()
#切入frame
driver.switch_to.frame('main-frame')
time.sleep(1)
driver.find_element(By.LINK_TEXT,'添加新文章').click()
time.sleep(1)
#退出frame
driver.switch_to.parent_frame()
#切入frame
driver.switch_to.frame('main-frame')
time.sleep(1)
driver.find_element(By.XPATH,'//table[@id="general-table"]/tbody/tr[1]/td[2]/input').send_keys('逆商3')
time.sleep(1)
driver.find_element(By.XPATH,'//table[@id="general-table"]/tbody/tr[2]/td[2]/select').click()
time.sleep(1)
driver.find_element(By.XPATH,'//table[@id="general-table"]/tbody/tr[2]/td[2]/select/option[2]').click()
time.sleep(1)
driver.find_element(By.XPATH,'//div[@id="tabbody-div"]/form/div/input[4]').click()
time.sleep(5)
#退出frame
driver.switch_to.parent_frame()
#切入frame,选择文章列表
driver.switch_to.frame('menu-frame')
driver.find_element_by_xpath('//ul[@id="menu-ul"]/li[6]').click()
time.sleep(1)
driver.find_element_by_link_text('文章列表').click()
time.sleep(1)
#退出frame
driver.switch_to.parent_frame()
#切入frame,查看文章
driver.switch_to.frame('main-frame')
time.sleep(1)
driver.find_element(By.XPATH,'//table[@id="list-table"]/tbody/tr[2]/td[7]/span/a[1]/img').click()
time.sleep(2)
# 切换窗口
handles=driver.window_handles
driver.switch_to.window(handles[-2])
time.sleep(1)
#退出frame
driver.switch_to.parent_frame()
#修改文章
#切入frame,选择文章列表
driver.switch_to.frame('menu-frame')
driver.find_element_by_xpath('//ul[@id="menu-ul"]/li[6]').click()
time.sleep(1)
driver.find_element_by_link_text('文章列表').click()
time.sleep(1)
#退出frame
driver.switch_to.parent_frame()
#修改文章
driver.switch_to.frame('main-frame')
driver.find_element(By.XPATH,'//table[@id="list-table"]/tbody/tr[7]/td[7]/span/a[2]/img').click()
time.sleep(1)
driver.find_element(By.XPATH,'//table[@id="general-table"]/tbody/tr[1]/td[2]/input').clear()
time.sleep(1)
driver.find_element(By.XPATH,'//table[@id="general-table"]/tbody/tr[1]/td[2]/input').send_keys('3G普及')
time.sleep(1)
driver.find_element(By.XPATH,'//div[@id="tabbody-div"]/form/div/input[4]').click()
driver.switch_to.parent_frame()#跳出frame标签
#文章发布
driver.switch_to.frame('menu-frame')
driver.find_element_by_xpath('//ul[@id="menu-ul"]/li[6]').click()
time.sleep(1)
driver.find_element_by_link_text('文章自动发布').click()
time.sleep(1)
#退出frame
driver.switch_to.parent_frame()
#切入右侧发布页
driver.switch_to.frame('main-frame')
driver.find_element(By.XPATH,'//div[@id="listDiv"]/table[1]/tbody/tr[2]/td[1]/input').click()
time.sleep(1)
#日期控件
js="document.getElementById('date').removeAttribute('readonly')"
driver.execute_script(js)
time.sleep(2)
driver.find_element(By.ID,"date").send_keys('2020-08-18')
driver.find_element(By.ID,('btnSubmit1')).click()
driver.switch_to.parent_frame()#跳出frame标签
#删除文章
driver.switch_to.frame('menu-frame')
driver.find_element(By.LINK_TEXT,('文章列表')).click()
driver.switch_to.parent_frame()
#切入右侧发布页
driver.switch_to.frame('main-frame')
time.sleep(2)
driver.find_element(By.XPATH,('//table[@id="list-table"]/tbody/tr[7]/td[7]/span/a[3]/img')).click()
time.sleep(2)
driver.switch_to.alert.accept()
time.sleep(2)
driver.switch_to.parent_frame()
#删除文章
time.sleep(3)
driver.quit() | {"/test_case/test_zhuce.py": ["/driver/browser.py", "/lib/utils.py", "/page/zhuce_page.py"], "/dame/test_suite_a.py": ["/dame/test_dame.py", "/dame/test_dame_baidu_sreach.py"], "/page/zhuce_page.py": ["/page/base_page.py"], "/test_case/test_login.py": ["/driver/browser.py", "/page/login_page.py"]} |
62,127 | yss-810/test | refs/heads/master | /dame/dame_es_db.py | """
case:后台登录--数据库管理--数据备份--数据表优化--SQL查询-转换数据
import:
用户名:admin
密码:yss123321
step:
1、登录后台
2、进入左侧导航菜单【数据库管理-数据备份】
2.1点击【数据库管理】
2.2点击【数据备份】
3、进入左侧导航菜单【数据库管理-数据表优化】
3.1点击【数据库管理】
3.2点击【数据表优化】
4、进入左侧导航菜单【数据库管理-SQL查询】
4.1点击【数据库管理】
4.2点击【SQL查询】
5、进入左侧导航菜单【数据库管理-转换数据】
5.1点击【数据库管理】
5.2点击【转换数据】
6、退出
"""
import time
from selenium.webdriver.common.by import By
from selenium.webdriver.support.select import Select
from selenium import webdriver
driver = webdriver.Chrome()
driver.maximize_window()
driver.implicitly_wait(30)
#访问。打开浏览器登录页面
driver.get('http://192.168.4.231/upload/admin')
driver.implicitly_wait(10)#隐式等待
time.sleep(2)
print('当前url',driver.current_url)
# driver.find_element(By.NAME,'remember').click()
driver.find_element(By.NAME,'username').send_keys('admin')
driver.find_element(By.NAME,'password').send_keys('yss123321')
# driver.find_element(By.ID,'remember').click()
driver .find_element(By.CLASS_NAME,'button').click()
driver.implicitly_wait(30)
#数据备份
driver.switch_to.frame('menu-frame')
time.sleep(4)
driver.find_element(By.XPATH,('//ul[@id="menu-ul"]/li[11]/ul/li[1]/a')).click()
time.sleep(1)
driver.switch_to.parent_frame()
driver.switch_to.frame('main-frame')
time.sleep(4)
driver.find_element(By.XPATH,('//div[@id="listDiv"]/center/input')).click()
time.sleep(1)
driver.switch_to.parent_frame()
time.sleep(3)
driver.quit() | {"/test_case/test_zhuce.py": ["/driver/browser.py", "/lib/utils.py", "/page/zhuce_page.py"], "/dame/test_suite_a.py": ["/dame/test_dame.py", "/dame/test_dame_baidu_sreach.py"], "/page/zhuce_page.py": ["/page/base_page.py"], "/test_case/test_login.py": ["/driver/browser.py", "/page/login_page.py"]} |
62,128 | yss-810/test | refs/heads/master | /dame/test_dame.py | import unittest
class Dame_a(unittest.TestCase):
def setUp(self):
print("开始")
def test_A(self):
print("A用例")
def test_B(self):
print("用例B")
def test_C(self):
print("用例C")
def tearDown(sel):
print("结束")
if __name__ == '__main__':
unittest.main()
| {"/test_case/test_zhuce.py": ["/driver/browser.py", "/lib/utils.py", "/page/zhuce_page.py"], "/dame/test_suite_a.py": ["/dame/test_dame.py", "/dame/test_dame_baidu_sreach.py"], "/page/zhuce_page.py": ["/page/base_page.py"], "/test_case/test_login.py": ["/driver/browser.py", "/page/login_page.py"]} |
62,129 | yss-810/test | refs/heads/master | /dame/demo__jde_chf.py | #京东充话费
import time
from selenium import webdriver
driver=webdriver.Chrome()
driver.maximize_window()
driver.get('http://www.jd.com')
driver.implicitly_wait("10")
print("当前title:",driver.title)
print("当前url",driver.current_url)
print("当前窗口句柄",driver.current_window_handle)
print("所有窗口句柄",driver.window_handles)
#充值业务
driver.find_element_by_link_text('话费').click()
#切换窗口
handles=driver.window_handles
driver.switch_to.window(handles[1])
print("当前title:",driver.title)
print("当前url",driver.current_url)
print("当前窗口句柄",driver.current_window_handle)
print("所有窗口句柄",driver.window_handles)
# driver.find_element_by_class_name('mobile gray').send_keys('15928561321')
#流量充值
driver.find_element_by_xpath('/html/body/div[5]/div/div[1]/ul/li[2]').click()
time.sleep(2)
driver.find_element_by_xpath('/html/body/div[4]/div/ul/li[2]/a').click()
time.sleep(2)
#跳入
driver.switch_to.frame('flowiframe')
driver.find_element_by_xpath('//div[@id="phoneitem"]/div/input').send_keys('15928561321')
time.sleep(2)
driver.find_element_by_xpath('//div[@id="flowItem"]/div/ul/li[3]').click()
time.sleep(2)
#跳出
driver.switch_to.parent_frame()
#话费充值
driver.find_element_by_xpath('/html/body/div[5]/div/div[1]/ul/li[1]').click()
time.sleep(2)
#跳入
driver.switch_to.frame('fast-cziframe')
driver .find_element_by_xpath('//div[@id="phoneitem"]/div/input').send_keys('15928561321')
time.sleep(1)
driver.find_element_by_xpath('//div[@id="rechargeItem"]/div/ul/li[3]').click()
time.sleep(1)
driver.find_element_by_xpath('//div[@id="submitItem"]/div/input').click()
time.sleep(2)
#跳出
driver.switch_to.parent_frame()
time.sleep(5)
driver.quit() | {"/test_case/test_zhuce.py": ["/driver/browser.py", "/lib/utils.py", "/page/zhuce_page.py"], "/dame/test_suite_a.py": ["/dame/test_dame.py", "/dame/test_dame_baidu_sreach.py"], "/page/zhuce_page.py": ["/page/base_page.py"], "/test_case/test_login.py": ["/driver/browser.py", "/page/login_page.py"]} |
62,130 | yss-810/test | refs/heads/master | /page/base_page.py | class BasePage():
def open(self):
self.driver.get(self.url) | {"/test_case/test_zhuce.py": ["/driver/browser.py", "/lib/utils.py", "/page/zhuce_page.py"], "/dame/test_suite_a.py": ["/dame/test_dame.py", "/dame/test_dame_baidu_sreach.py"], "/page/zhuce_page.py": ["/page/base_page.py"], "/test_case/test_login.py": ["/driver/browser.py", "/page/login_page.py"]} |
62,131 | yss-810/test | refs/heads/master | /dame/run_all.py | import unittest
from HTMLTestRunner import HTMLTestRunner
#批量匹配用例
discover=unittest.defaultTestLoader.discover(start_dir="../", pattern='test*.py')
# runner=unittest.TextTestRunner()
# runner.run(discover)
#执行用例生成报告
with open("report.html", "wb")as file:
runner=HTMLTestRunner(stream=file, #注意缩进
description="自动化测试报告详情",
title="ECShop自动化测试报告")
runner.run(discover) | {"/test_case/test_zhuce.py": ["/driver/browser.py", "/lib/utils.py", "/page/zhuce_page.py"], "/dame/test_suite_a.py": ["/dame/test_dame.py", "/dame/test_dame_baidu_sreach.py"], "/page/zhuce_page.py": ["/page/base_page.py"], "/test_case/test_login.py": ["/driver/browser.py", "/page/login_page.py"]} |
62,132 | yss-810/test | refs/heads/master | /dame/demo_jd_seach.py | import time
from selenium import webdriver
driver = webdriver.Chrome()
driver.maximize_window()
driver.get("http://www.jd.com")
driver.implicitly_wait(10)#隐式等待
driver.find_element_by_id('key').send_keys('手机')
driver.implicitly_wait(10)
driver.find_element_by_class_name('button').click()
driver.implicitly_wait(10)
driver.find_element_by_partial_link_text('荣耀Play4T Pro 麒麟810芯片').click()
time.sleep(10)
driver.quit() | {"/test_case/test_zhuce.py": ["/driver/browser.py", "/lib/utils.py", "/page/zhuce_page.py"], "/dame/test_suite_a.py": ["/dame/test_dame.py", "/dame/test_dame_baidu_sreach.py"], "/page/zhuce_page.py": ["/page/base_page.py"], "/test_case/test_login.py": ["/driver/browser.py", "/page/login_page.py"]} |
62,133 | yss-810/test | refs/heads/master | /script/login.py | import time
from selenium import webdriver
from selenium.webdriver.common.by import By
driver=webdriver.Chrome()
#打开浏览器
driver.get('http://192.168.4.223/upload/')
#业务登录
driver.find_element(By.XPATH,('//*[@id="ECS_MEMBERZONE"]/a[1]')).click()
driver.find_element(By.XPATH,('/html/body/div[5]/div[3]/div[1]/form/table/tbody/tr[1]/td[2]/input')).send_keys('admin3')
driver.find_element(By.XPATH,('/html/body/div[5]/div[3]/div[1]/form/table/tbody/tr[2]/td[2]/input')).send_keys('LS514320ls')
driver.find_element(By.XPATH,('/html/body/div[5]/div[3]/div[1]/form/table/tbody/tr[4]/td[2]/input[3]')).click()
time.sleep(3)
driver.quit() | {"/test_case/test_zhuce.py": ["/driver/browser.py", "/lib/utils.py", "/page/zhuce_page.py"], "/dame/test_suite_a.py": ["/dame/test_dame.py", "/dame/test_dame_baidu_sreach.py"], "/page/zhuce_page.py": ["/page/base_page.py"], "/test_case/test_login.py": ["/driver/browser.py", "/page/login_page.py"]} |
62,134 | yss-810/test | refs/heads/master | /dame/test_suite_a.py | import unittest
from dame.test_dame import Dame_a
from dame.test_dame_baidu_sreach import BaiDuTestCase
suite=unittest.TestSuite()
suite.addTest(BaiDuTestCase("test_baidu_search"))
suite.addTest(Dame_a("test_A"))
runner=unittest.TextTestRunner()
runner.run(suite)
| {"/test_case/test_zhuce.py": ["/driver/browser.py", "/lib/utils.py", "/page/zhuce_page.py"], "/dame/test_suite_a.py": ["/dame/test_dame.py", "/dame/test_dame_baidu_sreach.py"], "/page/zhuce_page.py": ["/page/base_page.py"], "/test_case/test_login.py": ["/driver/browser.py", "/page/login_page.py"]} |
62,135 | yss-810/test | refs/heads/master | /lib/utils.py | import xlrd
import xlwt
# import
def read_excel():
file_path=r'F:\gitroot\Autoproject\data\data.xlsx'
workbook = xlrd.open_workbook(file_path) #实例化工作簿对象
sheet_names = workbook.sheet_names() #获取所有工作表的名字
print('获取所有工作表的名字',sheet_names)
sheet = workbook.sheet_by_name('register')
rows=sheet.nrows
cols=sheet.ncols
print('总行数=',rows,'总列数=',cols)
#遍历所有行内容
content=[]
for line in range(1,rows):
lines=sheet.row_values(line,0,4)
lines[3] = str(int(lines[3]))
content.append(lines)
print('行内容',lines)
return content
content=read_excel()
print('读取成功',content)
# def read_csv(): | {"/test_case/test_zhuce.py": ["/driver/browser.py", "/lib/utils.py", "/page/zhuce_page.py"], "/dame/test_suite_a.py": ["/dame/test_dame.py", "/dame/test_dame_baidu_sreach.py"], "/page/zhuce_page.py": ["/page/base_page.py"], "/test_case/test_login.py": ["/driver/browser.py", "/page/login_page.py"]} |
62,136 | yss-810/test | refs/heads/master | /page/zhuce_page.py | from selenium.webdriver.common.by import By
from page.base_page import BasePage
class ZhucePage(BasePage):
def __init__(self,driver):
self.driver=driver
# 定位器
self.locator_ele_username = (By.NAME, ("username")) # 用户名
self.locator_ele_email = (By.NAME, ("email")) # 邮箱
self.locator_ele_password = (By.NAME, ("password")) # 密码
self.locator_ele_confirm_password = (By.NAME, ("confirm_password")) # 确认密码
self.locator_ele_qq = (By.NAME, ("extend_field2")) # qq号
self.locator_ele_mobile = (By.NAME, ("extend_field5")) # 手机号
self.locator_ele_Submit = (By.NAME, ("Submit")) # 提交注册
self.locator_ele_assert = (By.XPATH, ('//font[@id="ECS_MEMBERZONE"]/a[1]')) # 断言
self.url='http://localhost/upload/user.php?act=register'
def ele_username(self,username):
self.driver.find_element(*self.locator_ele_username).send_keys(username)
def ele_email(self,email):
self.driver.find_element(*self.locator_ele_email).send_keys(email)
def ele_password(self,password):
self.driver.find_element(*self.locator_ele_password).send_keys(password)
def ele_confirm_password(self,confirm_password):
self.driver.find_element(*self.locator_ele_confirm_password).send_keys(confirm_password)
def ele_qq(self,qq):
self.driver.find_element(*self.locator_ele_qq).send_keys(qq)
def ele_mobile(self,mobile):
self.driver.find_element(*self.locator_ele_mobile).send_keys(mobile)
def ele_Submit(self):
self.driver.find_element(*self.locator_ele_Submit).click()
def ele_assert(self):
result=self.driver.find_element(*self.locator_ele_assert).text
return result
def zhuce(self,username,email,password,confirm_password,qq,mobile):
self.open()
self.ele_username(username)
self.ele_email(email)
self.ele_password(password)
self.ele_confirm_password(confirm_password)
self.ele_qq(qq)
self.ele_mobile(mobile)
self.ele_Submit()
assert_result=self.ele_assert()
return assert_result
| {"/test_case/test_zhuce.py": ["/driver/browser.py", "/lib/utils.py", "/page/zhuce_page.py"], "/dame/test_suite_a.py": ["/dame/test_dame.py", "/dame/test_dame_baidu_sreach.py"], "/page/zhuce_page.py": ["/page/base_page.py"], "/test_case/test_login.py": ["/driver/browser.py", "/page/login_page.py"]} |
62,137 | yss-810/test | refs/heads/master | /driver/browser.py | from selenium import webdriver
"""封装浏览器驱动"""
def chrome_browser():
driver=webdriver.Chrome()
driver.maximize_window()
driver.implicitly_wait(30)
return driver
def firefox_browser():
driver = webdriver.Firefox()
driver.maximize_window()
driver.implicitly_wait(30)
return driver
# chrome_browser() | {"/test_case/test_zhuce.py": ["/driver/browser.py", "/lib/utils.py", "/page/zhuce_page.py"], "/dame/test_suite_a.py": ["/dame/test_dame.py", "/dame/test_dame_baidu_sreach.py"], "/page/zhuce_page.py": ["/page/base_page.py"], "/test_case/test_login.py": ["/driver/browser.py", "/page/login_page.py"]} |
62,138 | yss-810/test | refs/heads/master | /dame/demo_ecshop_zhuce.py | #有问题
import time
from selenium.webdriver.support.select import Select
from selenium import webdriver
driver = webdriver.Chrome()
driver.maximize_window()
#打开浏览器登录页面
driver.get('http://192.168.4.223/upload/')
driver.implicitly_wait(10)#隐式等待
time.sleep(2)
#注册
driver.find_element_by_link_text('注册').click()
driver.implicitly_wait(10)#隐式等待
#输入
driver.find_element_by_name("username").send_keys('ysss')
driver.find_element_by_name("email").send_keys('120537114@qq.com')
driver.find_element_by_name("password").send_keys('yss123321')
driver.find_element_by_name("confirm_password").send_keys('yss123321')
driver.find_element_by_name("extend_field1").send_keys('120537114@qq.com')
driver.find_element_by_name("extend_field2").send_keys('123321')
driver.find_element_by_name("extend_field3").send_keys('123321')
driver.find_element_by_name("extend_field4").send_keys('15928561321')
driver.find_element_by_name("extend_field5").send_keys('15928561321')
question=driver.find_element_by_name("sel_question")
xuanzeqi=Select(question)
xuanzeqi.select_by_index(1)
driver.find_element_by_name("passwd_answer").send_keys('2020')
#提交
driver.find_element_by_name("Submit").click()
time.sleep(5)
driver.quit() | {"/test_case/test_zhuce.py": ["/driver/browser.py", "/lib/utils.py", "/page/zhuce_page.py"], "/dame/test_suite_a.py": ["/dame/test_dame.py", "/dame/test_dame_baidu_sreach.py"], "/page/zhuce_page.py": ["/page/base_page.py"], "/test_case/test_login.py": ["/driver/browser.py", "/page/login_page.py"]} |
62,139 | yss-810/test | refs/heads/master | /test_case/test_login.py | import unittest
import time
from selenium import webdriver
from selenium.webdriver.common.by import By
from driver.browser import chrome_browser
from page.login_page import LoginPage
class LoginTestCase(unittest.TestCase):
def setUp(self) -> None:
self.driver=chrome_browser()
def tearDown(self) -> None:
self.driver.quit()
def test_login(self):
lp = LoginPage(self.driver)
result=lp.login('yss','yss123321')
time.sleep(3)
print(result)
self.assertEqual('yss', result)
# self.assertEqual('admin3', result)
if __name__ == '__main__':
unittest.main()
| {"/test_case/test_zhuce.py": ["/driver/browser.py", "/lib/utils.py", "/page/zhuce_page.py"], "/dame/test_suite_a.py": ["/dame/test_dame.py", "/dame/test_dame_baidu_sreach.py"], "/page/zhuce_page.py": ["/page/base_page.py"], "/test_case/test_login.py": ["/driver/browser.py", "/page/login_page.py"]} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.