index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
33,552,793
|
nvvaulin/medical_imaging
|
refs/heads/master
|
/models/__init__.py
|
from .basic_classifier import BasicClassifierModel
|
{"/models/__init__.py": ["/models/basic_classifier.py", "/models/svd_densenet.py"], "/utils/__init__.py": ["/utils/load_object.py"], "/train.py": ["/config.py", "/models/__init__.py", "/utils/existed_checkpoint.py", "/datasets/__init__.py", "/utils/__init__.py"], "/datasets/chest_xray.py": ["/datasets/basic_dataset.py"], "/datasets/covid_aid.py": ["/datasets/basic_dataset.py"], "/datasets/__init__.py": ["/datasets/coronahack.py", "/datasets/chest_xray.py", "/datasets/join_datasets.py", "/datasets/covid_chest_xray.py", "/datasets/covid_aid.py"], "/shared_train.py": ["/models/__init__.py", "/utils/existed_checkpoint.py", "/datasets/__init__.py", "/utils/__init__.py"], "/datasets/covid_chest_xray.py": ["/datasets/basic_dataset.py"], "/datasets/coronahack.py": ["/datasets/basic_dataset.py"]}
|
33,613,454
|
mverasotelo/darwinstudios
|
refs/heads/main
|
/darwinstudiosApp/views.py
|
from django.http import HttpResponse
import datetime
from django.template import Template,loader
from django.template.loader import get_template
from django.shortcuts import render
from django.core.mail import send_mail
from .forms import FormContacto
def index(request):
return render(request,"index.html")
def espacios(request):
return render(request,"espacios.html")
def trabajos(request):
return render(request,"trabajos.html")
def nosotros(request):
return render(request,"nosotros.html")
def contacto(request):
if request.method=="POST":
formulario=FormContacto(request.POST)
if formulario.is_valid():
formulario.save()
nombre=formulario.cleaned_data['nombre']
email=formulario.cleaned_data['email']
telefono=str(formulario.cleaned_data['telefono'])
mensaje=formulario.cleaned_data['mensaje']
confirmacion="Tu mensaje ha sido enviado"
send_mail("Consulta de "+nombre, "\n Nombre: "+nombre+"\n Email: "+email+"\n Teléfono: "+telefono+"\n Mensaje: "+mensaje, "merverasotelo@gmail.com", [""], fail_silently=False)
return render(request,"contacto.html", {"formulario": FormContacto(), "confirmacion":confirmacion})
else:
formulario=FormContacto()
mensaje=""
return render(request,"contacto.html",{"formulario": formulario})
|
{"/darwinstudiosApp/admin.py": ["/darwinstudiosApp/models.py"], "/darwinstudiosApp/urls.py": ["/darwinstudiosApp/views.py"]}
|
33,613,455
|
mverasotelo/darwinstudios
|
refs/heads/main
|
/darwinstudiosApp/urls.py
|
from django.contrib import admin
from django.urls import path
from darwinstudiosApp.views import index, espacios, nosotros, trabajos, contacto
urlpatterns = [
path('', index, name="Home"),
path('espacios/', espacios, name="Espacios"),
path('nosotros/', nosotros, name="Nosotros"),
path('trabajos/', trabajos, name="Trabajos"),
path('contacto/', contacto, name="Contacto")
]
|
{"/darwinstudiosApp/admin.py": ["/darwinstudiosApp/models.py"], "/darwinstudiosApp/urls.py": ["/darwinstudiosApp/views.py"]}
|
33,613,456
|
mverasotelo/darwinstudios
|
refs/heads/main
|
/darwinstudiosApp/apps.py
|
from django.apps import AppConfig
class DarwinstudiosappConfig(AppConfig):
name = 'darwinstudiosApp'
|
{"/darwinstudiosApp/admin.py": ["/darwinstudiosApp/models.py"], "/darwinstudiosApp/urls.py": ["/darwinstudiosApp/views.py"]}
|
33,615,563
|
Talanoc/Papybot
|
refs/heads/main
|
/wikipedia.py
|
class Wikipedia:
|
{"/test_methods.py": ["/geocoding.py", "/traitement_question.py", "/wiki.py"], "/app.py": ["/traitement_question.py", "/geocoding.py", "/wiki.py"]}
|
33,615,564
|
Talanoc/Papybot
|
refs/heads/main
|
/app.py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Mar 20 22:01:21 2021
@author:
"""
from flask import Flask, render_template,jsonify,request,url_for
from flask_cors import CORS
from stop_words_french import stop_words
app = Flask(__name__)
CORS(app)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/question/<user_question>',methods=['get','post'])
def question(user_question):
user_question_sw=[]
user_question=user_question.split()
user_question_sw=[element for element in user_question if element not in stop_words]
return jsonify(user_question_sw)
app.run(debug=True)
|
{"/test_methods.py": ["/geocoding.py", "/traitement_question.py", "/wiki.py"], "/app.py": ["/traitement_question.py", "/geocoding.py", "/wiki.py"]}
|
33,615,565
|
Talanoc/Papybot
|
refs/heads/main
|
/papybot.py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Mar 20 22:01:21 2021
@author: 33633
"""
from flask import Flask, render_template
from flask_cors import CORS
app = Flask(__name__)
CORS(app)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/test')
def test():
return "toto"
app.run(debug=True)
|
{"/test_methods.py": ["/geocoding.py", "/traitement_question.py", "/wiki.py"], "/app.py": ["/traitement_question.py", "/geocoding.py", "/wiki.py"]}
|
33,697,891
|
Raun98/Acviss_Assignment
|
refs/heads/main
|
/acviss_app/acvUI/views.py
|
from django.contrib.auth import login, authenticate
from django.contrib.auth.forms import UserCreationForm
from django.shortcuts import render, redirect
from .forms import NumberForm
from .services import populateDB, register_batch_update, load_user_table
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.permissions import IsAuthenticated
class ApiView(APIView):
permission_classes = (IsAuthenticated,)
def get(self, request):
content = {'message': 'Hello, World!'}
return Response(content)
class SearchView(APIView):
#permission_classes = (IsAuthenticated,)
def get(self, request):
print("search started")
resp = []
search = request.query_params.get('name')
name = request.user
if name.is_authenticated:
name = str(name)
content = load_user_table(name)
print(content)
for i,j in content:
if name == i:
resp.append((i,j))
return Response(resp)
def signup(request):
if request.method == 'POST':
form = UserCreationForm(request.POST)
if form.is_valid():
form.save()
username = form.cleaned_data.get('username')
raw_password = form.cleaned_data.get('password1')
user = authenticate(username=username, password=raw_password)
login(request, user)
return redirect('home')
else:
form = UserCreationForm()
return render(request, 'signup.html', {'form': form})
def FormPage(request):
name = request.user
if name.is_authenticated:
name = str(name)
else:
name = 'Guest'
#number = NumberForm.number_of_codes
print(request.user)
print('test triggered')
print(type(str(request.user)))
result = None
if request.method == 'POST':
form = NumberForm(request.POST)
if form.is_valid():
batch_name = form.cleaned_data['batch_name']
number_of_codes = form.cleaned_data['number_of_codes']
#print(batch_name," ", number_of_codes)
populateDB(number_of_codes, batch_name)
register_batch_update(name, batch_name)
result = load_user_table(name)
form=NumberForm()
return render(request, 'index.html', {'form': form,'result':result})
def HomePage(request):
name = request.user
result = None
if name.is_authenticated:
name = str(name)
result = load_user_table(name)
#print(name," ", result)
return render(request, 'index.html', {'result' : result})
|
{"/acviss_app/acvUI/views.py": ["/acviss_app/acvUI/forms.py", "/acviss_app/acvUI/services.py"]}
|
33,762,348
|
dvaibhavim/NewEnrollapp
|
refs/heads/main
|
/Enrollapp/migrations/0007_auto_20210414_2304.py
|
# Generated by Django 3.1.3 on 2021-04-14 17:34
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Enrollapp', '0006_auto_20210414_2159'),
]
operations = [
migrations.AddField(
model_name='userprofileinfo',
name='school_namme',
field=models.CharField(default='NagaEd Digital School', max_length=200),
preserve_default=False,
),
migrations.AlterField(
model_name='userprofileinfo',
name='gender',
field=models.CharField(max_length=1),
),
]
|
{"/Enrollment/urls.py": ["/Enrollapp/views.py"], "/Enrollapp/admin.py": ["/Enrollapp/models.py"], "/Enrollapp/forms.py": ["/Enrollapp/models.py"], "/Enrollapp/views.py": ["/Enrollapp/forms.py", "/Enrollapp/models.py"]}
|
33,762,349
|
dvaibhavim/NewEnrollapp
|
refs/heads/main
|
/Enrollapp/migrations/0004_userprofileinfo_sis_id.py
|
# Generated by Django 3.1.3 on 2021-04-13 12:00
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Enrollapp', '0003_userprofileinfo_standard'),
]
operations = [
migrations.AddField(
model_name='userprofileinfo',
name='sis_id',
field=models.IntegerField(default=1),
),
]
|
{"/Enrollment/urls.py": ["/Enrollapp/views.py"], "/Enrollapp/admin.py": ["/Enrollapp/models.py"], "/Enrollapp/forms.py": ["/Enrollapp/models.py"], "/Enrollapp/views.py": ["/Enrollapp/forms.py", "/Enrollapp/models.py"]}
|
33,762,350
|
dvaibhavim/NewEnrollapp
|
refs/heads/main
|
/Enrollapp/migrations/0006_auto_20210414_2159.py
|
# Generated by Django 3.1.3 on 2021-04-14 16:29
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Enrollapp', '0005_auto_20210414_2157'),
]
operations = [
migrations.AlterField(
model_name='userprofileinfo',
name='Role',
field=models.CharField(max_length=20),
),
migrations.AlterField(
model_name='userprofileinfo',
name='standard',
field=models.CharField(max_length=4),
),
]
|
{"/Enrollment/urls.py": ["/Enrollapp/views.py"], "/Enrollapp/admin.py": ["/Enrollapp/models.py"], "/Enrollapp/forms.py": ["/Enrollapp/models.py"], "/Enrollapp/views.py": ["/Enrollapp/forms.py", "/Enrollapp/models.py"]}
|
33,762,351
|
dvaibhavim/NewEnrollapp
|
refs/heads/main
|
/Enrollapp/migrations/0003_userprofileinfo_standard.py
|
# Generated by Django 3.1.3 on 2021-03-24 11:55
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Enrollapp', '0002_schools'),
]
operations = [
migrations.AddField(
model_name='userprofileinfo',
name='standard',
field=models.CharField(choices=[('8', 'Class 8'), ('9', 'Class 9'), ('10', 'Class 10'), ('11', 'Class 11'), ('12', 'Class 12')], default=8, max_length=4),
preserve_default=False,
),
]
|
{"/Enrollment/urls.py": ["/Enrollapp/views.py"], "/Enrollapp/admin.py": ["/Enrollapp/models.py"], "/Enrollapp/forms.py": ["/Enrollapp/models.py"], "/Enrollapp/views.py": ["/Enrollapp/forms.py", "/Enrollapp/models.py"]}
|
33,762,352
|
dvaibhavim/NewEnrollapp
|
refs/heads/main
|
/Enrollapp/migrations/0005_auto_20210414_2157.py
|
# Generated by Django 3.1.3 on 2021-04-14 16:27
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Enrollapp', '0004_userprofileinfo_sis_id'),
]
operations = [
migrations.AlterField(
model_name='userprofileinfo',
name='date_of_birth',
field=models.CharField(max_length=8),
),
]
|
{"/Enrollment/urls.py": ["/Enrollapp/views.py"], "/Enrollapp/admin.py": ["/Enrollapp/models.py"], "/Enrollapp/forms.py": ["/Enrollapp/models.py"], "/Enrollapp/views.py": ["/Enrollapp/forms.py", "/Enrollapp/models.py"]}
|
33,762,353
|
dvaibhavim/NewEnrollapp
|
refs/heads/main
|
/Enrollapp/views.py
|
from django.shortcuts import render
from django.http import JsonResponse
from Enrollapp.forms import UserForm,UserProfileInfoForm
from canvasapi import Canvas
from django.views.decorators.csrf import csrf_exempt
from Enrollapp.models import UserProfileInfo, Schools
from django.contrib.auth.models import User
# Create your views here.
@csrf_exempt
def success(request):
return render(request,'enrollapp/update_school_sucess.html')
# Create your views here.
def handle404(request, exception):
return render(request, 'enrollapp/404.html', status=404)
#logic to create account in NagaedEd digital and enroll user to corresponsing class courses
@csrf_exempt
def create_account_NDS(request):
if 'term' in request.GET:
qs = Schools.objects.filter(Name__istartswith = request.GET.get('term')) #i stands for case insensitive
school_name = list() #the url in javascript expects an json response
for school in qs:
school_name.append(school.Name)
return JsonResponse(school_name, safe=False)
elif request.method=="POST":
registered = False
user = User.objects.create_user(request.POST.get("q55_emailAddress"), request.POST.get("q55_emailAddress"))
user.set_password(user.set_unusable_password())
user.save()
sis_id = UserProfileInfo.objects.order_by('sis_id').last()
profile = UserProfileInfo.objects.create(user = user,date_of_birth=request.POST.get("q39_birthDate39[day]")+"/"+request.POST.get("q39_birthDate39[month]")+"/"+request.POST.get("q39_birthDate39[year]"),firstname = request.POST.get("q56_name[first]"),lastname = request.POST.get("q56_name[last]"),gender = request.POST.get("Gender"),Role = request.POST.get("role"),standard = request.POST.get("class_school"),sis_id = int(sis_id.sis_id)+1, school_namme = school_name )
profile.save()
API_URL = "https://learn.nagaed.com/"
# Canvas API key
API_KEY = "puoPtPQS1lGkhuaPEhmTreh2MZTtj1clp4OEiZ1UVVpugZBOn76WBue5Zf3MKBl5"
#SIS_ID LOGIC
# Initialize a new Canvas object
canvas = Canvas(API_URL, API_KEY)
if request.method=="POST":
account = canvas.get_account(1)
sis_user_id = int(profile.sis_id)
#sis_user_id generation logic
if sis_user_id <10:
Sis_id = 'S00000'+str(sis_user_id)
elif sis_user_id <100:
Sis_id = 'S0000'+str(sis_user_id)
else:
Sis_id = 'S000'+str(sis_user_id)
user_Canvas = account.create_user(
user={
'name': profile.firstname + " " + profile.lastname,
'skip_registration': False
},
pseudonym={
'sis_user_id': Sis_id,
'unique_id' :request.POST.get("q55_emailAddress"),
'send_confirmation':True,
'address':request.POST.get("q55_emailAddress")
},
communication_channel={
'type': 'email',
'skip_confirmation': False
}
)
registered = True
school_name = "NagaEd Digital School"
sub_accounts = account.get_subaccounts()
for accounts in sub_accounts:
if accounts.name.lower() == school_name.lower():
sub_account = accounts
#logic to get all courses for the standard of corresponding school school_name
courses = sub_account.get_courses()
#get the school code and append the standard to it
standard = request.POST.get("class_school")
standard = int(standard)
print(sub_account, standard)
#school_code = "CHS"
try:
if standard<10:
std ="CL0"+str(standard)
else:
std = "CL"+str(standard)
for c in courses:
if std in c.sis_course_id:
if not c.blueprint:
c.enroll_user(user_Canvas.id)
except Exception as e:
print(e)
else:
return render(request,'enrollapp/Enroll_form_Nagaed_Digital.html')
return render(request,'enrollapp/update_school_sucess.html',{"name":profile.firstname + " " + profile.lastname,"school":school_name})
#logic to register Naged Digital student to another school
@csrf_exempt
def create_account_CHSS(request):
if request.method=="POST":
registered = False
user = User.objects.create_user(request.POST.get("q55_emailAddress"), request.POST.get("q55_emailAddress"))
user.set_password(user.set_unusable_password())
user.save()
sis_id = UserProfileInfo.objects.order_by('sis_id').last()
school_name = "Christian Standard Higher Secondary School"
profile = UserProfileInfo.objects.create(user = user,date_of_birth=request.POST.get("q39_birthDate39[day]")+"/"+request.POST.get("q39_birthDate39[month]")+"/"+request.POST.get("q39_birthDate39[year]"),firstname = request.POST.get("q56_name[first]"),lastname = request.POST.get("q56_name[last]"),gender = request.POST.get("Gender"),Role = request.POST.get("role"),standard = request.POST.get("class_school"),sis_id = int(sis_id.sis_id)+1, school_namme = school_name )
profile.save()
API_URL = "https://learn.nagaed.com/"
# Canvas API key
API_KEY = "puoPtPQS1lGkhuaPEhmTreh2MZTtj1clp4OEiZ1UVVpugZBOn76WBue5Zf3MKBl5"
#SIS_ID LOGIC
# Initialize a new Canvas object
canvas = Canvas(API_URL, API_KEY)
if request.method=="POST":
account = canvas.get_account(1)
sis_user_id = int(profile.sis_id)
#sis_user_id generation logic
if sis_user_id <10:
Sis_id = 'S00000'+str(sis_user_id)
elif sis_user_id <100:
Sis_id = 'S0000'+str(sis_user_id)
else:
Sis_id = 'S000'+str(sis_user_id)
user_Canvas = account.create_user(
user={
'name': profile.firstname + " " + profile.lastname,
'skip_registration': False
},
pseudonym={
'sis_user_id': Sis_id,
'unique_id' :request.POST.get("q55_emailAddress"),
'send_confirmation':True,
'address':request.POST.get("q55_emailAddress")
},
communication_channel={
'type': 'email',
'skip_confirmation': False
}
)
registered = True
sub_accounts = account.get_subaccounts()
for accounts in sub_accounts:
if accounts.name.lower() == school_name.lower():
sub_account = accounts
#logic to get all courses for the standard of corresponding school school_name
courses = sub_account.get_courses()
#get the school code and append the standard to it
standard = request.POST.get("class_school")
standard = int(standard)
#school_code = "CHS"
try:
if standard<10:
std ="CL0"+str(standard)
else:
std = "CL"+str(standard)
for c in courses:
if std in c.sis_course_id:
if not c.blueprint:
c.enroll_user(user_Canvas.id)
except Exception as e:
print(e)
else:
return render(request,'enrollapp/enroll_form.html')
return render(request,'enrollapp/update_school_sucess.html',{"name":profile.firstname + " " + profile.lastname,"school":school_name})
"""function to create new user and send registration email. If the account gets created, course is also assigned to the student. """
@csrf_exempt
def create_account(request):
#Canvas Api Url
API_URL = "https://learn.nagaed.com/"
# Canvas API key
API_KEY = "puoPtPQS1lGkhuaPEhmTreh2MZTtj1clp4OEiZ1UVVpugZBOn76WBue5Zf3MKBl5"
# Initialize a new Canvas object
canvas = Canvas(API_URL, API_KEY)
if request.method=="POST":
account = canvas.get_account(1)
#sis_user_id generation logic
user_Canvas = account.create_user(
user={
'name': request.POST.get("q56_name[first]")+" "+ request.POST.get("q56_name[last]"), # profile.firstname + " " + profile.lastname,
'skip_registration': False
},
pseudonym={
'sis_user_id': 'vd_test6',
'unique_id' :request.POST.get("q55_emailAddress"),
'send_confirmation':True,
'address':request.POST.get("q55_emailAddress")
},
communication_channel={
'type': 'email',
'skip_confirmation': False
}
)
school_name = request.POST.get("schoolname")
sub_accounts = account.get_subaccounts()
for accounts in sub_accounts:
print(accounts)
if accounts.name.lower() == school_name.lower():
sub_account = accounts
#logic to get all courses for the standard of corresponding school school_name
print(sub_account)
courses = sub_account.get_courses()
#get the school code and append the standard to it
school_code = "CHS"
for c in courses:
if school_code in c.sis_course_id:
if not c.blueprint:
c.enroll_user(user_Canvas.id)
return render(request,'enrollapp/update_school_sucess.html')
|
{"/Enrollment/urls.py": ["/Enrollapp/views.py"], "/Enrollapp/admin.py": ["/Enrollapp/models.py"], "/Enrollapp/forms.py": ["/Enrollapp/models.py"], "/Enrollapp/views.py": ["/Enrollapp/forms.py", "/Enrollapp/models.py"]}
|
33,762,354
|
dvaibhavim/NewEnrollapp
|
refs/heads/main
|
/Enrollapp/models.py
|
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class UserProfileInfo(models.Model):
user = models.OneToOneField(User,on_delete=models.CASCADE)
firstname = models.CharField(max_length=20)
lastname = models.CharField(max_length=20)
#GENDER_CHOICES = (('M', 'Male'), ('F', 'Female'),)
gender = models.CharField(max_length=1)
date_of_birth = models.CharField(max_length=8)
#User_type_choices = (('Student','Student'),('Parent','Parent'), ('Teacher','Teacher'),('Administrator','Administrator'))
Role = models.CharField(max_length=20)
#CLASS_CHOICES = (('8','Class 8'),('9','Class 9'),('10','Class 10'),('11','Class 11'),('12','Class 12'))
standard = models.CharField(max_length=4)
sis_id = models.IntegerField(default=1)
school_namme = models.CharField(max_length=200)
class Schools(models.Model):
Name = models.CharField(max_length=500)
Reg_no = models.CharField(max_length=50)
def __str__(self):
return self.Name
|
{"/Enrollment/urls.py": ["/Enrollapp/views.py"], "/Enrollapp/admin.py": ["/Enrollapp/models.py"], "/Enrollapp/forms.py": ["/Enrollapp/models.py"], "/Enrollapp/views.py": ["/Enrollapp/forms.py", "/Enrollapp/models.py"]}
|
33,767,369
|
rishabhb63/PerSquareFoot-Online-Printing-Store
|
refs/heads/master
|
/persquarefoot/persquarefoot/accounts/migrations/0006_delete_emailmarketingsignup.py
|
# Generated by Django 3.1 on 2020-09-06 22:59
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('accounts', '0005_emailmarketingsignup'),
]
operations = [
migrations.DeleteModel(
name='EmailMarketingSignUp',
),
]
|
{"/shop/admin.py": ["/shop/models.py"], "/orders/admin.py": ["/orders/models.py"], "/shop/views.py": ["/shop/models.py"], "/orders/views.py": ["/orders/models.py"]}
|
33,767,370
|
rishabhb63/PerSquareFoot-Online-Printing-Store
|
refs/heads/master
|
/persquarefoot/persquarefoot/marketing/admin.py
|
from django.contrib import admin
from .models import EmailMarketingSignUp
# Register your models here.
class EmailMarketingSignUpAdmin(admin.ModelAdmin):
list_display = ['email', 'timestamp',]
class Meta:
model = EmailMarketingSignUp
admin.site.register(EmailMarketingSignUp, EmailMarketingSignUpAdmin)
|
{"/shop/admin.py": ["/shop/models.py"], "/orders/admin.py": ["/orders/models.py"], "/shop/views.py": ["/shop/models.py"], "/orders/views.py": ["/orders/models.py"]}
|
33,767,371
|
rishabhb63/PerSquareFoot-Online-Printing-Store
|
refs/heads/master
|
/persquarefoot/persquarefoot/persquarefoot/email_settings.py
|
host = "smtp.gmail.com"
user = "onlinepersquarefoot@gmail.com"
password = "jnkshgrhkjvnhxgf"
|
{"/shop/admin.py": ["/shop/models.py"], "/orders/admin.py": ["/orders/models.py"], "/shop/views.py": ["/shop/models.py"], "/orders/views.py": ["/orders/models.py"]}
|
33,767,372
|
rishabhb63/PerSquareFoot-Online-Printing-Store
|
refs/heads/master
|
/persquarefoot/persquarefoot/accounts/admin.py
|
from django.contrib import admin
from .models import UserStripe, EmailConfirmed
# Register your models here.
admin.site.register(UserStripe)
admin.site.register(EmailConfirmed)
|
{"/shop/admin.py": ["/shop/models.py"], "/orders/admin.py": ["/orders/models.py"], "/shop/views.py": ["/shop/models.py"], "/orders/views.py": ["/orders/models.py"]}
|
33,767,373
|
rishabhb63/PerSquareFoot-Online-Printing-Store
|
refs/heads/master
|
/persquarefoot/persquarefoot/orders/admin.py
|
from django.contrib import admin
from .models import Order, UserAddress, UserDefaultAddress
# Register your models here.
class UserAddressAdmin(admin.ModelAdmin):
class Meta:
model = UserAddress
admin.site.register(Order)
admin.site.register(UserAddress, UserAddressAdmin)
admin.site.register(UserDefaultAddress)
|
{"/shop/admin.py": ["/shop/models.py"], "/orders/admin.py": ["/orders/models.py"], "/shop/views.py": ["/shop/models.py"], "/orders/views.py": ["/orders/models.py"]}
|
33,767,374
|
rishabhb63/PerSquareFoot-Online-Printing-Store
|
refs/heads/master
|
/persquarefoot/persquarefoot/carts/admin.py
|
from django.contrib import admin
from .models import Cart, CartItems
# Register your models here.
class CartAdmin(admin.ModelAdmin):
class Meta:
model = Cart
admin.site.register(Cart, CartAdmin)
admin.site.register(CartItems)
|
{"/shop/admin.py": ["/shop/models.py"], "/orders/admin.py": ["/orders/models.py"], "/shop/views.py": ["/shop/models.py"], "/orders/views.py": ["/orders/models.py"]}
|
33,768,360
|
jeffreypaul15/sunpy
|
refs/heads/main
|
/sunpy/util/_table_attribute.py
|
"""
This file backports the TableAttribute functionality from astropy 4.1 it can be
removed when we depend on astropy 4.1.
"""
from copy import deepcopy
from astropy.table.table import QTable
try:
from astropy.table.table import TableAttribute
except ImportError:
class MetaAttribute:
"""
Descriptor to define custom attribute which gets stored in the object
``meta`` dict and can have a defined default.
This descriptor is intended to provide a convenient way to add attributes
to a subclass of a complex class such as ``Table`` or ``NDData``.
This requires that the object has an attribute ``meta`` which is a
dict-like object. The value of the MetaAttribute will be stored in a
new dict meta['__attributes__'] that is created when required.
Classes that define MetaAttributes are encouraged to support initializing
the attributes via the class ``__init__``. For example::
for attr in list(kwargs):
descr = getattr(self.__class__, attr, None)
if isinstance(descr, MetaAttribute):
setattr(self, attr, kwargs.pop(attr))
The name of a ``MetaAttribute`` cannot be the same as any of the following:
- Keyword argument in the owner class ``__init__``
- Method or attribute of the "parent class", where the parent class is
taken to be ``owner.__mro__[1]``.
:param default: default value
"""
def __init__(self, default=None):
self.default = default
def __get__(self, instance, owner):
# When called without an instance, return self to allow access
# to descriptor attributes.
if instance is None:
return self
# Get the __attributes__ dict and create if not there already.
attributes = instance.meta.setdefault('__attributes__', {})
try:
value = attributes[self.name]
except KeyError:
if self.default is not None:
attributes[self.name] = deepcopy(self.default)
# Return either specified default or None
value = attributes.get(self.name)
return value
def __set__(self, instance, value):
# Get the __attributes__ dict and create if not there already.
attributes = instance.meta.setdefault('__attributes__', {})
attributes[self.name] = value
def __set_name__(self, owner, name):
import inspect
params = [param.name for param in inspect.signature(owner).parameters.values()
if param.kind not in (inspect.Parameter.VAR_KEYWORD,
inspect.Parameter.VAR_POSITIONAL)]
# Reject names from existing params or best guess at parent class
if name in params or hasattr(owner.__mro__[1], name):
raise ValueError(f'{name} not allowed as {self.__class__.__name__}')
self.name = name
def __repr__(self):
return f'<{self.__class__.__name__} name={self.name} default={self.default}>'
class TableAttribute(MetaAttribute):
"""
Descriptor to define a custom attribute for a Table subclass.
The value of the ``TableAttribute`` will be stored in a dict named
``__attributes__`` that is stored in the table ``meta``. The attribute
can be accessed and set in the usual way, and it can be provided when
creating the object.
Defining an attribute by this mechanism ensures that it will persist if
the table is sliced or serialized, for example as a pickle or ECSV file.
See the `~astropy.utils.metadata.MetaAttribute` documentation for additional
details.
Parameters
----------
default : object
Default value for attribute
"""
class QTable(QTable):
def __init__(self, *args, **kwargs):
# Handle custom (subclass) table attributes that are stored in meta.
# These are defined as class attributes using the TableAttribute
# descriptor. Any such attributes get removed from kwargs here and
# stored for use after the table is otherwise initialized. Any values
# provided via kwargs will have precedence over existing values from
# meta (e.g. from data as a Table or meta via kwargs).
meta_table_attrs = {}
if kwargs:
for attr in list(kwargs):
descr = getattr(self.__class__, attr, None)
if isinstance(descr, TableAttribute):
meta_table_attrs[attr] = kwargs.pop(attr)
super().__init__(*args, **kwargs)
__all__ = ['QTable', 'TableAttribute']
|
{"/sunpy/instr/goes.py": ["/sunpy/extern/sunkit_instruments/goes_xrs/__init__.py"]}
|
33,768,361
|
jeffreypaul15/sunpy
|
refs/heads/main
|
/sunpy/visualization/_quadrangle.py
|
# Vendored from astropy.visualization.wcsaxes.patches
# Licensed under a 3-clause BSD style license - see Astropy's LICENSE.rst
#
# This file can be removed when our minimum Astropy dependency is >= 4.2
import numpy as np
from matplotlib.patches import Polygon
from astropy import units as u
__all__ = ['Quadrangle']
class Quadrangle(Polygon):
"""
Create a patch representing a latitude-longitude quadrangle.
The edges of the quadrangle lie on two lines of constant longitude and two
lines of constant latitude (or the equivalent component names in the
coordinate frame of interest, such as right ascension and declination).
Note that lines of constant latitude are not great circles.
Unlike `matplotlib.patches.Rectangle`, the edges of this patch will render
as curved lines if appropriate for the WCS transformation.
Parameters
----------
anchor : tuple or `~astropy.units.Quantity`
This can be either a tuple of two `~astropy.units.Quantity` objects, or
a single `~astropy.units.Quantity` array with two elements.
width : `~astropy.units.Quantity`
The width of the quadrangle in longitude (or, e.g., right ascension)
height : `~astropy.units.Quantity`
The height of the quadrangle in latitude (or, e.g., declination)
resolution : int, optional
The number of points that make up each side of the quadrangle -
increase this to get a smoother quadrangle.
vertex_unit : `~astropy.units.Unit`
The units in which the resulting polygon should be defined - this
should match the unit that the transformation (e.g. the WCS
transformation) expects as input.
Notes
-----
Additional keyword arguments are passed to `~matplotlib.patches.Polygon`
"""
def __init__(self, anchor, width, height, resolution=100, vertex_unit=u.degree, **kwargs):
# Extract longitude/latitude, either from a tuple of two quantities, or
# a single 2-element Quantity.
longitude, latitude = u.Quantity(anchor).to_value(vertex_unit)
# Convert the quadrangle dimensions to the appropriate units
width = width.to_value(vertex_unit)
height = height.to_value(vertex_unit)
# Create progressions in longitude and latitude
lon_seq = longitude + np.linspace(0, width, resolution + 1)
lat_seq = latitude + np.linspace(0, height, resolution + 1)
# Trace the path of the quadrangle
lon = np.concatenate([lon_seq[:-1],
np.repeat(lon_seq[-1], resolution),
np.flip(lon_seq[1:]),
np.repeat(lon_seq[0], resolution)])
lat = np.concatenate([np.repeat(lat_seq[0], resolution),
lat_seq[:-1],
np.repeat(lat_seq[-1], resolution),
np.flip(lat_seq[1:])])
# Create polygon vertices
vertices = np.array([lon, lat]).transpose()
super().__init__(vertices, **kwargs)
|
{"/sunpy/instr/goes.py": ["/sunpy/extern/sunkit_instruments/goes_xrs/__init__.py"]}
|
33,798,821
|
PatCondit03/PyDaVinci
|
refs/heads/master
|
/DaVinci.py
|
import PaintFunctions as PF
import time
import random
size_options = ['medium small','medium large','large']
brush_types_xy =['default brush','calligraphy brush 1','calligraphy brush 2',
'airbrush','oil brush','crayon','marker','natural pencil','oil brush',
'watercolor brush'
]
#in this file it will draw paintings using the current and upcoming functions avaliable in PaintFunctions.py.
fill_tool_xy = (379, 110)
pencil_tool_xy = (345, 110)
canvas_width = 1500
canvas_height = (250, 756) #we used a margin on either side of the height to avoid potential problems in the gui
#iteration variable
x = 0
while x==0:
#maybe input a status parameter in the function call below to not have it loop back through the color scheme
#options
current_colors_xy = PF.create_color_theme()
if current_colors_xy=='exit':
x = 0
else:
feedback = input('Proceed with selected color scheme? (y/n) \n>')
if feedback == 'y':
x = 10
while x > 0:
print(x)
x = x-1
time.sleep(1)
print('Painting started')
try:
# Painting types: Oil, crayon simple
current_brush_type = PF.select_brush(random.choice(brush_types_xy))
count = 0
while count < 300:
choice = random.randint(1,15)
if choice < 2:
PF.fill(current_colors_xy, fill_tool_xy, current_brush_type, canvas_width, canvas_height)
count = count + 1
elif choice < 4:
PF.select_size(random.choice(size_options))
count = count + 1
elif choice < 5:
current_brush_type = PF.select_brush(random.choice(brush_types_xy))
if current_brush_type=='airbrush':
PF.select_size('large')
PF.squarespiral(current_colors_xy, canvas_width, canvas_height)
choice+=1
elif choice < 7:
PF.horizontalLine(current_colors_xy, canvas_width, canvas_height)
count = count + 1
elif choice < 9:
PF.verticalLine(current_colors_xy, canvas_width, canvas_height)
count = count + 1
else:
PF.diamond(current_colors_xy, canvas_width, canvas_height)
count = count + 1
print('Program complete')
except:
print('Program cancelled')
|
{"/Final Project Main.py": ["/ChooseColor.py"], "/DaVinci.py": ["/PaintFunctions.py"], "/Master_PyDaVinci.py": ["/PaintFunctions.py"]}
|
33,798,822
|
PatCondit03/PyDaVinci
|
refs/heads/master
|
/PaintFunctions.py
|
import pyautogui as gui
import random as r
import time
# early definition of the distance variable **why is this here**
distance = 200
# color_horiz_location is the horizontal location of the color grids
# color_horiz_location = [1090, 1123, 1153, 1189, 1223, 1255, 1290, 1323, 1353, 1390]
# definition of all colors with their location in MS Paint
all_colors_xy = {
'black': (1090, 100),
'light_gray': (1123, 120),
'gray': (1123, 100),
'white': (1090, 120),
'red': (1189, 100),
'maroon': (1153, 100),
'orange': (1223, 100),
'light_orange': (1223, 120),
'yellow': (1255, 100),
'brown': (1153, 120),
'pink': (1189, 120),
'peach': (1255, 120),
'green': (1290, 100),
'light_green': (1290, 120),
'blue': (1353, 100),
'light_blue': (1323, 100),
'sky': (1323, 120),
'navy': (1353, 120),
'purple': (1390, 100),
'light_purple': (1390, 120)
}
def create_color_theme():
print('Color Library:')
print('black, gray, light gray, white')
print('red, maroon')
print('navy, blue, light blue, sky')
print('green, light green')
print('orange, light orange, yellow, brown')
print('pink, peach, purple, light purple\n')
print('Would you like to choose your own color scheme? (y/n)')
choice = input('>')
if choice == 'n':
# dictionary of color scheme selected for current painting
current_colors_xy = {}
# iteration variable
count = 0
# list of color names for color scheme selecte for current painting
current_colors_names = []
# a random integer is selected to determine the number of colors in the color theme for the current painting
number_current_colors = r.randint(2 , 3)
# black will be added to every color theme
current_colors_xy['black'] = (1090,100)
current_colors_names.append('black')
print(str(number_current_colors+1)+' colors chosen:\nblack')
# we add new colors to the current theme,
# saving and printing the name of each until we reach number_current_colors
while count < number_current_colors:
name, (place, row) = r.choice(list(all_colors_xy.items()))
if name not in current_colors_names:
current_colors_xy[name] = (place,row)
print(name)
current_colors_names.append(current_colors_xy[name])
count+=1
# as mentioned above, black is added to every theme
# we return the dict of the current color theme to be used in the color selection function
else:
current_colors_xy = {}
passvar = 0
while passvar == 0:
try:
print('\nPlease enter the colors you would like used, separated by one space. or enter exit to end program')
colors_desired = input('>').split()
if str(colors_desired[0]) == 'exit':
return('exit')
for color in colors_desired:
current_colors_xy[color] = all_colors_xy[color]
current_colors_xy['black'] = all_colors_xy['black']
print('success!')
passvar = 1
except:
print('ERROR: could not understand your input. Lets try again.')
return(current_colors_xy)
def select_brush(brush_type):
brush_types_xy = {
'default brush': (475, 200),
'calligraphy brush 1': (525, 200),
'calligraphy brush 2': (600, 200),
'airbrush': (650, 200),
'oil brush': (475, 250),
'crayon': (525, 250),
'marker': (600, 250),
'natural pencil': (650, 250),
'oil brush': (475, 250),
'watercolor brush': (475, 300),
}
brushes_xy = (475, 150)
gui.click(brushes_xy)
a, b = brush_types_xy[brush_type]
gui.click(a, b, 2, .3)
return(brush_type)
def select_size(size):
sizes_options_xy = {
'small': (900, 200),
'medium small': (900, 250),
'medium large': (900, 325),
'large': (900, 375)
}
sizes_xy = (900, 150)
gui.click(sizes_xy)
a, b = sizes_options_xy[size]
gui.click(a, b, 2, .3)
# function to draw a square shaped spiral
def squarespiral(current_colors_xy, canvas_width, canvas_height):
name, (place, row) = r.choice(list(current_colors_xy.items()))
gui.click(place, row, 2, 0.2)
# ^^ choose a color
a, b = canvas_height
gui.click(r.randint(0,canvas_width),r.randint(a, b))
distance = r.randint(20,300)
increment = r.randint(4,10)
while distance > 0:
gui.dragRel(distance, 0) # move right
distance -= increment
gui.dragRel(0, distance) # move down
gui.dragRel(-distance, 0) # move left
distance -= increment
gui.dragRel(0, -distance) # move up
# function to draw a square diamond
def diamond(current_colors_xy, canvas_width, canvas_height):
name, (place, row) = r.choice(list(current_colors_xy.items()))
gui.click(place, row, 2, 0.2)
# ^^ choose a color
gui.click(r.randint(0,canvas_width),r.randint(300, 700))
distance = r.randint(1,300)
gui.dragRel(distance, -distance) # move right
gui.dragRel(distance, distance) # move down
gui.dragRel(-distance, distance) # move left
gui.dragRel(-distance, -distance) # move up
# function to draw a horizontal line
def horizontalLine(current_colors_xy, canvas_width, canvas_height):
name, (place, row) = r.choice(list(current_colors_xy.items()))
gui.click(place, row, 2, 0.2)
# ^^ choose a color
a, b = canvas_height
gui.click(r.randint(0,canvas_width),r.randint(a, b))
gui.dragRel(r.randint(-300,300), 0)
# function to draw a vertical line
def verticalLine(current_colors_xy, canvas_width, canvas_height):
name, (place, row) = r.choice(list(current_colors_xy.items()))
gui.click(place, row, 2, 0.2)
# ^^ choose a color
a, b = canvas_height
gui.click(r.randint(0,canvas_width),r.randint(a, b))
gui.dragRel(0, r.randint(-300,300))
# function to choose the "fill" tool and fill in the area clicked with a color
def fill(current_colors_xy, fill_tool_xy, current_brush_type, canvas_width, canvas_height):
name, (place, row) = r.choice(list(current_colors_xy.items()))
gui.click(place, row, 2, 0.2)
a, b = fill_tool_xy
gui.click(a, b)
a, b = canvas_height
gui.click(r.randint(0,canvas_width),r.randint(a, b))
select_brush(current_brush_type)
def cut_and_paste(canvas_width, canvas_height):
select_xy = (180, 110)
a, b = select_xy
gui.click(a, b, 2, .3)
a, b = canvas_height
gui.click(r.randint(0,canvas_width),r.randint(a, b))
gui.dragRel(r.randint(-600,600), r.randint(-600,600))
gui.move(-30, -30)
time.sleep(5)
a, b = r.randint(0,100), r.randint(0,100)
gui.click(a, b)
gui.dragRel(a, b)
|
{"/Final Project Main.py": ["/ChooseColor.py"], "/DaVinci.py": ["/PaintFunctions.py"], "/Master_PyDaVinci.py": ["/PaintFunctions.py"]}
|
33,798,823
|
PatCondit03/PyDaVinci
|
refs/heads/master
|
/Master_PyDaVinci.py
|
# import DaVinci.py
import PaintFunctions as PF
#in this file the timing, color theme and function selection of DaVinci will be graded and saved into a (json, csv, text?) file
import time
canvas_width = 1500
canvas_height = (250, 756)
time.sleep(2)
PF.cut_and_paste(canvas_width, canvas_height)
|
{"/Final Project Main.py": ["/ChooseColor.py"], "/DaVinci.py": ["/PaintFunctions.py"], "/Master_PyDaVinci.py": ["/PaintFunctions.py"]}
|
33,833,216
|
me76ss/SDM_SMRZA_IC
|
refs/heads/master
|
/ye git dge/Django api image caption/myapp/models.py
|
from django.db import models
class Document(models.Model):
image = models.FileField(upload_to='%Y_%m_%d')
|
{"/ye git dge/Django api image caption/myapp/views.py": ["/ye git dge/Django api image caption/myapp/models.py"]}
|
33,833,217
|
me76ss/SDM_SMRZA_IC
|
refs/heads/master
|
/ye git dge/Django api image caption/myapp/views.py
|
from django.http import HttpResponse
from django.shortcuts import render
from django.views.decorators.csrf import csrf_exempt
from .models import Document
from .forms import DocumentForm
@csrf_exempt
def my_view(request):
print(f"Great! You're using Python 3.6+. If you fail here, use the right version.")
message = 'Upload as many files as you want!'
# Handle file upload
if request.method == 'POST':
form = DocumentForm(request.POST, request.FILES)
if form.is_valid():
newdoc = Document(image=request.FILES['docfile'])
newdoc.save()
#f(voroodi) khorooji be return
return HttpResponse(f'test: {newdoc.image.name}')
#.url
else:
message = 'The form is not valid. Fix the following error:'
else:
form = DocumentForm() # An empty, unbound form
# Load documents for the list page
documents = Document.objects.all()
for d in documents:
print(d.image)
# Render list page with the documents and the form
context = {'documents': documents, 'form': form, 'message': message}
return render(request, 'list.html', context)
|
{"/ye git dge/Django api image caption/myapp/views.py": ["/ye git dge/Django api image caption/myapp/models.py"]}
|
33,872,350
|
kishoresahoo2050/Resume_project_py
|
refs/heads/main
|
/core/views.py
|
from django.shortcuts import render
from .forms import ContactFrm
from .models import ContactModal
# Create your views here.
def index(req):
return render(req,'core/index.htm',{'home':'active'})
def contact(req):
if req.method == "POST":
cnFrm = ContactFrm(req.POST)
if cnFrm.is_valid():
name = cnFrm.cleaned_data['name']
email = cnFrm.cleaned_data['email']
sub = cnFrm.cleaned_data['subject']
msg = cnFrm.cleaned_data['msg']
InsTble = ContactModal(name=name,email=email,sub=sub,msg=msg)
InsTble.save()
cnFrm = ContactFrm()
return render(req,'core/contact.htm',{'contact':'active',"form":cnFrm,"msg":"Request Send Succcessfully,Our Team Contact Very Soon."})
else:
return render(req,'core/contact.htm',{'contact':'active',"form":cnFrm})
else:
cnFrm = ContactFrm()
return render(req,'core/contact.htm',{'contact':'active',"form":cnFrm})
|
{"/core/views.py": ["/core/forms.py", "/core/models.py"], "/core/admin.py": ["/core/models.py"]}
|
33,872,351
|
kishoresahoo2050/Resume_project_py
|
refs/heads/main
|
/core/admin.py
|
from django.contrib import admin
from .models import ContactModal
# Register your models here.
@admin.register(ContactModal)
class AdminContact(admin.ModelAdmin):
list_display = ['name','email','sub','msg']
|
{"/core/views.py": ["/core/forms.py", "/core/models.py"], "/core/admin.py": ["/core/models.py"]}
|
33,872,352
|
kishoresahoo2050/Resume_project_py
|
refs/heads/main
|
/edu/views.py
|
from django.shortcuts import render
# Create your views here.
def index(req):
return render(req,'serv/skill.htm',{"skill":"active"})
|
{"/core/views.py": ["/core/forms.py", "/core/models.py"], "/core/admin.py": ["/core/models.py"]}
|
33,872,353
|
kishoresahoo2050/Resume_project_py
|
refs/heads/main
|
/core/forms.py
|
from django import forms
class ContactFrm(forms.Form):
name = forms.CharField(label="Your Name",label_suffix=" ",error_messages={"required":"Your Name Must Be Required"},widget=forms.TextInput(attrs={
"class":"form-control",
"id" :"name",
"placeholder":"Enter Your Name "
}))
email = forms.EmailField(label="Your Email",label_suffix=" ",error_messages={"required":"Your Email Must Be Required"},widget=forms.TextInput(attrs={
"class":"form-control",
"id" :"email",
"placeholder":"Enter Your Email "
}))
subject = forms.CharField(label="Your Subject",label_suffix=" ",error_messages={"required":"Your Subject Must Be Required"},widget=forms.Textarea(attrs={
"class":"form-control",
"id" :"sub",
"placeholder":"Enter Your Subject ",
"cols":"",
"rows":""
}))
msg = forms.CharField(label="Your Message",label_suffix=" ",error_messages={"required":"Your Message Must Be Required"},widget=forms.Textarea(attrs={
"class":"form-control",
"id" :"msg",
"placeholder":"Enter Your Message ",
"cols":"",
"rows":""
}))
|
{"/core/views.py": ["/core/forms.py", "/core/models.py"], "/core/admin.py": ["/core/models.py"]}
|
33,872,354
|
kishoresahoo2050/Resume_project_py
|
refs/heads/main
|
/serv/views.py
|
from django.shortcuts import render
# Create your views here.
def index(req):
return render(req,'serv/service.htm',{'serv':'active'})
|
{"/core/views.py": ["/core/forms.py", "/core/models.py"], "/core/admin.py": ["/core/models.py"]}
|
33,872,355
|
kishoresahoo2050/Resume_project_py
|
refs/heads/main
|
/core/models.py
|
from django.db import models
# Create your models here.
class ContactModal(models.Model):
name = models.CharField(max_length=55)
email = models.EmailField(max_length=100)
sub = models.CharField(max_length=55)
msg = models.CharField(max_length=100)
|
{"/core/views.py": ["/core/forms.py", "/core/models.py"], "/core/admin.py": ["/core/models.py"]}
|
33,893,286
|
jiangyutan/delfin
|
refs/heads/master
|
/delfin/drivers/dell_emc/vnx/vnx_block/navicli_client.py
|
# Copyright 2021 The SODA Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from subprocess import Popen, PIPE
from oslo_log import log as logging
from delfin.drivers.dell_emc.vnx.vnx_block import consts
LOG = logging.getLogger(__name__)
class NaviClient(object):
@staticmethod
def exec(command_str, stdin_value=None):
"""execute command_str using Popen
:param command_str: should be list type
:param stdin_value: same as stdin of Popen
:return: output of Popen.communicate
"""
result = None
p = Popen(command_str, stdin=PIPE, stdout=PIPE, stderr=PIPE,
shell=False)
"""Call method when input is needed"""
if stdin_value:
out, err = p.communicate(
input=bytes(stdin_value, encoding='utf-8'))
else:
"""Call method when no input is required"""
out = p.stdout.read()
if isinstance(out, bytes):
out = out.decode("utf-8")
result = out.strip()
if result:
"""
Determine whether an exception occurs according
to the returned information
"""
for exception_key in consts.EXCEPTION_MAP.keys():
if stdin_value is None or stdin_value == consts.CER_STORE:
if exception_key == consts.CER_ERR:
continue
if exception_key in result:
raise consts.EXCEPTION_MAP.get(exception_key)(result)
return result
|
{"/delfin/tests/unit/drivers/dell_emc/unity/test_emc_unity.py": ["/delfin/drivers/dell_emc/unity/rest_handler.py"], "/delfin/tests/unit/drivers/dell_emc/vmax/test_vmax.py": ["/delfin/drivers/dell_emc/vmax/vmax.py"], "/delfin/tests/unit/drivers/dell_emc/vnx/vnx_block/test_vnx_block.py": ["/delfin/drivers/dell_emc/vnx/vnx_block/navicli_client.py"]}
|
33,893,287
|
jiangyutan/delfin
|
refs/heads/master
|
/delfin/tests/unit/drivers/dell_emc/vmax/test_vmax.py
|
# Copyright 2020 The SODA Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import TestCase, mock
from requests.sessions import Session
from delfin import context
from delfin import exception
from delfin.common import config # noqa
from delfin.common import constants
from delfin.drivers.dell_emc.vmax.rest import VMaxRest
from delfin.drivers.dell_emc.vmax.vmax import VMAXStorageDriver
class Request:
def __init__(self):
self.environ = {'delfin.context': context.RequestContext()}
pass
VMAX_STORAGE_CONF = {
"storage_id": "12345",
"vendor": "dell_emc",
"model": "vmax",
"rest": {
"host": "10.0.0.1",
"port": 8443,
"username": "user",
"password": "cGFzc3dvcmQ="
},
"extra_attributes": {
"array_id": "00112233"
}
}
class TestVMAXStorageDriver(TestCase):
@mock.patch.object(VMaxRest, 'get_array_detail')
@mock.patch.object(VMaxRest, 'get_uni_version')
@mock.patch.object(VMaxRest, 'get_unisphere_version')
def test_init(self, mock_unisphere_version,
mock_version, mock_array):
kwargs = VMAX_STORAGE_CONF
mock_version.return_value = ['V9.0.2.7', '90']
mock_unisphere_version.return_value = ['V9.0.2.7', '90']
mock_array.return_value = {'symmetrixId': ['00112233']}
driver = VMAXStorageDriver(**kwargs)
self.assertEqual(driver.client.uni_version, '90')
self.assertEqual(driver.storage_id, "12345")
self.assertEqual(driver.client.array_id, "00112233")
with self.assertRaises(Exception) as exc:
mock_version.side_effect = exception.InvalidIpOrPort
VMAXStorageDriver(**kwargs)
self.assertIn('Invalid ip or port', str(exc.exception))
with self.assertRaises(Exception) as exc:
mock_version.side_effect = exception.InvalidUsernameOrPassword
VMAXStorageDriver(**kwargs)
self.assertIn('Invalid username or password.', str(exc.exception))
@mock.patch.object(VMaxRest, 'get_system_capacity')
@mock.patch.object(VMaxRest, 'get_vmax_array_details')
@mock.patch.object(VMaxRest, 'get_array_detail')
@mock.patch.object(VMaxRest, 'get_uni_version')
@mock.patch.object(VMaxRest, 'get_unisphere_version')
def test_get_storage(self, mock_unisphere_version,
mock_version, mock_array,
mock_array_details, mock_capacity):
expected = {
'name': 'VMAX250F-00112233',
'vendor': 'Dell EMC',
'description': '',
'model': 'VMAX250F',
'firmware_version': '5978.221.221',
'status': 'normal',
'serial_number': '00112233',
'location': '',
'total_capacity': 109951162777600,
'used_capacity': 82463372083200,
'free_capacity': 27487790694400,
'raw_capacity': 1610612736000,
'subscribed_capacity': 219902325555200
}
system_capacity = {
'system_capacity': {
'usable_total_tb': 100,
'usable_used_tb': 75,
'subscribed_total_tb': 200
},
'physicalCapacity': {
'total_capacity_gb': 1500
}
}
system_capacity_84 = {
'total_usable_cap_gb': 100 * 1024,
'total_allocated_cap_gb': 75 * 1024,
'total_subscribed_cap_gb': 200 * 1024,
'physicalCapacity': {
'total_capacity_gb': 1500
}
}
kwargs = VMAX_STORAGE_CONF
mock_version.return_value = ['V9.0.2.7', '90']
mock_unisphere_version.return_value = ['V9.0.2.7', '90']
mock_array.return_value = {'symmetrixId': ['00112233']}
mock_array_details.return_value = {
'model': 'VMAX250F',
'ucode': '5978.221.221',
'display_name': 'VMAX250F-00112233'}
mock_capacity.return_value = system_capacity
driver = VMAXStorageDriver(**kwargs)
self.assertEqual(driver.storage_id, "12345")
self.assertEqual(driver.client.array_id, "00112233")
ret = driver.get_storage(context)
self.assertDictEqual(ret, expected)
driver.client.uni_version = '84'
mock_capacity.return_value = system_capacity_84
ret = driver.get_storage(context)
self.assertDictEqual(ret, expected)
mock_array_details.side_effect = exception.StorageBackendException
with self.assertRaises(Exception) as exc:
driver.get_storage(context)
self.assertIn('Failed to get array details from VMAX',
str(exc.exception))
mock_array_details.side_effect = [{
'model': 'VMAX250F',
'ucode': '5978.221.221',
'display_name': 'VMAX250F-00112233'}]
mock_capacity.side_effect = exception.StorageBackendException
with self.assertRaises(Exception) as exc:
driver.get_storage(context)
self.assertIn('Failed to get capacity from VMAX',
str(exc.exception))
@mock.patch.object(VMaxRest, 'get_srp_by_name')
@mock.patch.object(VMaxRest, 'get_array_detail')
@mock.patch.object(VMaxRest, 'get_uni_version')
@mock.patch.object(VMaxRest, 'get_unisphere_version')
def test_list_storage_pools(self, mock_unisphere_version,
mock_version,
mock_array, mock_srp):
expected = [{
'name': 'SRP_1',
'storage_id': '12345',
'native_storage_pool_id': 'SRP_ID',
'description': 'Dell EMC VMAX Pool',
'status': 'normal',
'storage_type': 'block',
'total_capacity': 109951162777600,
'used_capacity': 82463372083200,
'free_capacity': 27487790694400,
'subscribed_capacity': 219902325555200
}]
pool_info = {
'srp_capacity': {
'usable_total_tb': 100,
'usable_used_tb': 75,
'subscribed_total_tb': 200
},
'srpId': 'SRP_ID'
}
kwargs = VMAX_STORAGE_CONF
mock_version.return_value = ['V9.0.2.7', '90']
mock_unisphere_version.return_value = ['V9.0.2.7', '90']
mock_array.return_value = {'symmetrixId': ['00112233']}
mock_srp.side_effect = [{'srpId': ['SRP_1']}, pool_info]
driver = VMAXStorageDriver(**kwargs)
self.assertEqual(driver.storage_id, "12345")
self.assertEqual(driver.client.array_id, "00112233")
ret = driver.list_storage_pools(context)
self.assertDictEqual(ret[0], expected[0])
mock_srp.side_effect = [{'srpId': ['SRP_1']},
exception.StorageBackendException]
with self.assertRaises(Exception) as exc:
driver.list_storage_pools(context)
self.assertIn('Failed to get pool metrics from VMAX',
str(exc.exception))
mock_srp.side_effect = [exception.StorageBackendException, pool_info]
with self.assertRaises(Exception) as exc:
driver.list_storage_pools(context)
self.assertIn('Failed to get pool metrics from VMAX',
str(exc.exception))
@mock.patch.object(VMaxRest, 'get_system_capacity')
@mock.patch.object(VMaxRest, 'get_storage_group')
@mock.patch.object(VMaxRest, 'get_volume')
@mock.patch.object(VMaxRest, 'get_volume_list')
@mock.patch.object(VMaxRest, 'get_array_detail')
@mock.patch.object(VMaxRest, 'get_uni_version')
@mock.patch.object(VMaxRest, 'get_unisphere_version')
def test_list_volumes(self, mock_unisphere_version,
mock_version, mock_array,
mock_vols, mock_vol, mock_sg, mock_capacity):
expected = \
[
{
'name': 'volume_1',
'storage_id': '12345',
'description': "Dell EMC VMAX 'thin device' volume",
'type': 'thin',
'status': 'available',
'native_volume_id': '00001',
'wwn': 'wwn123',
'total_capacity': 104857600,
'used_capacity': 10485760,
'free_capacity': 94371840,
'native_storage_pool_id': 'SRP_1',
'compressed': True
},
{
'name': 'volume_2:id',
'storage_id': '12345',
'description': "Dell EMC VMAX 'thin device' volume",
'type': 'thin',
'status': 'available',
'native_volume_id': '00002',
'wwn': 'wwn1234',
'total_capacity': 104857600,
'used_capacity': 10485760,
'free_capacity': 94371840,
'native_storage_pool_id': 'SRP_1'
}
]
volumes = {
'volumeId': '00001',
'cap_mb': 100,
'allocated_percent': 10,
'status': 'Ready',
'type': 'TDEV',
'wwn': 'wwn123',
'num_of_storage_groups': 1,
'storageGroupId': ['SG_001'],
'emulation': 'FBA'
}
volumes1 = {
'volumeId': '00002',
'volume_identifier': 'id',
'cap_mb': 100,
'allocated_percent': 10,
'status': 'Ready',
'type': 'TDEV',
'wwn': 'wwn1234',
'num_of_storage_groups': 0,
'storageGroupId': [],
'emulation': 'FBA'
}
volumes2 = {
'volumeId': '00003',
'cap_mb': 100,
'allocated_percent': 10,
'status': 'Ready',
'type': 'TDEV',
'wwn': 'wwn1234',
'num_of_storage_groups': 0,
'storageGroupId': [],
'emulation': 'CKD'
}
storage_group_info = {
'srp': 'SRP_1',
'compression': True
}
default_srps = {
'default_fba_srp': 'SRP_1',
'default_ckd_srp': 'SRP_2'
}
kwargs = VMAX_STORAGE_CONF
mock_version.return_value = ['V9.0.2.7', '90']
mock_unisphere_version.return_value = ['V9.0.2.7', '90']
mock_array.return_value = {'symmetrixId': ['00112233']}
mock_vols.side_effect = [['volume_1', 'volume_2', 'volume_3']]
mock_vol.side_effect = [volumes, volumes1, volumes2]
mock_sg.side_effect = [storage_group_info]
mock_capacity.return_value = default_srps
driver = VMAXStorageDriver(**kwargs)
self.assertEqual(driver.storage_id, "12345")
self.assertEqual(driver.client.array_id, "00112233")
ret = driver.list_volumes(context)
self.assertDictEqual(ret[0], expected[0])
self.assertDictEqual(ret[1], expected[1])
mock_vols.side_effect = [['volume_1']]
mock_vol.side_effect = [volumes]
mock_sg.side_effect = [exception.StorageBackendException]
with self.assertRaises(Exception) as exc:
driver.list_volumes(context)
self.assertIn('Failed to get list volumes from VMAX',
str(exc.exception))
mock_vols.side_effect = [['volume_1']]
mock_vol.side_effect = [exception.StorageBackendException]
mock_sg.side_effect = [storage_group_info]
with self.assertRaises(Exception) as exc:
driver.list_volumes(context)
self.assertIn('Failed to get list volumes from VMAX',
str(exc.exception))
mock_vols.side_effect = [exception.StorageBackendException]
mock_vol.side_effect = [volumes]
mock_sg.side_effect = [storage_group_info]
with self.assertRaises(Exception) as exc:
driver.list_volumes(context)
self.assertIn('Failed to get list volumes from VMAX',
str(exc.exception))
@mock.patch.object(VMaxRest, 'post_request')
@mock.patch.object(VMaxRest, 'get_vmax_array_details')
@mock.patch.object(VMaxRest, 'get_array_detail')
@mock.patch.object(VMaxRest, 'get_uni_version')
@mock.patch.object(VMaxRest, 'get_unisphere_version')
def test_get_storage_performance(self, mock_unisphere_version,
mock_version, mock_array,
mock_array_details,
mock_performnace):
vmax_array_perf_resp_historic = {
"expirationTime": 1600172441701,
"count": 4321,
"maxPageSize": 1000,
"id": "d495891f-1607-42b7-ba8d-44d0786bd335_0",
"resultList": {
"result": [
{
"HostIOs": 296.1,
"HostMBWritten": 0.31862956,
"ReadResponseTime": 4.4177675,
"HostMBReads": 0.05016927,
"HostReads": 14.056666,
"HostWrites": 25.78,
"WriteResponseTime": 4.7228317,
"timestamp": 1598875800000
},
{
"HostIOs": 350.22998,
"HostMBWritten": 0.40306965,
"ReadResponseTime": 4.396796,
"HostMBReads": 0.043291014,
"HostReads": 13.213333,
"HostWrites": 45.97333,
"WriteResponseTime": 4.7806735,
"timestamp": 1598876100000
},
{
"HostIOs": 297.63333,
"HostMBWritten": 0.25046548,
"ReadResponseTime": 4.3915706,
"HostMBReads": 0.042753905,
"HostReads": 13.176666,
"HostWrites": 28.643333,
"WriteResponseTime": 4.8760557,
"timestamp": 1598876400000
}
]
}
}
vmax_array_perf_resp_real_time = {
"expirationTime": 1600172441701,
"count": 4321,
"maxPageSize": 1000,
"id": "d495891f-1607-42b7-ba8d-44d0786bd335_0",
"resultList": {
"result": [
{
"HostIOs": 296.1,
"HostMBWritten": 0.31862956,
"ReadResponseTime": 4.4177675,
"HostMBReads": 0.05016927,
"HostReads": 14.056666,
"HostWrites": 25.78,
"WriteResponseTime": 4.7228317,
"timestamp": 1598875800000
}
]
}
}
expected_historic = [
constants.metric_struct(name='responseTime',
labels={
'storage_id': '12345',
'resource_type':
'array'},
values={
1598875800000:
9.1405992,
1598876400000:
9.2676263,
1598876100000:
9.1774695}
),
constants.metric_struct(name='throughput',
labels={
'storage_id': '12345',
'resource_type':
'array'},
values={
1598875800000:
0.36879882999999997,
1598876400000:
0.293219385,
1598876100000:
0.446360664}
),
constants.metric_struct(name='readThroughput',
labels={
'storage_id': '12345',
'resource_type':
'array'},
values={
1598875800000:
0.05016927,
1598876100000:
0.043291014,
1598876400000:
0.042753905}
),
constants.metric_struct(name='writeThroughput',
labels={
'storage_id': '12345',
'resource_type':
'array'},
values={
1598875800000:
0.31862956,
1598876100000:
0.40306965,
1598876400000:
0.25046548}
),
constants.metric_struct(name='requests',
labels={
'storage_id': '12345',
'resource_type':
'array'},
values={
1598875800000: 296.1,
1598876100000:
350.22998,
1598876400000:
297.63333}
),
constants.metric_struct(name='readRequests',
labels={
'storage_id': '12345',
'resource_type':
'array'},
values={
1598875800000:
14.056666,
1598876100000:
13.213333,
1598876400000:
13.176666}
),
constants.metric_struct(name='writeRequests',
labels={
'storage_id': '12345',
'resource_type':
'array'},
values={
1598875800000: 25.78,
1598876100000:
45.97333,
1598876400000:
28.643333}
)
]
expected_realtime = [
constants.metric_struct(name='responseTime',
labels={
'storage_id': '12345',
'resource_type':
'array'},
values={
1598875800000:
9.1405992
}
),
constants.metric_struct(name='throughput',
labels={
'storage_id': '12345',
'resource_type':
'array'},
values={
1598875800000:
0.36879882999999997
}
),
constants.metric_struct(name='readThroughput',
labels={
'storage_id': '12345',
'resource_type':
'array'},
values={
1598875800000:
0.05016927
}
),
constants.metric_struct(name='writeThroughput',
labels={
'storage_id': '12345',
'resource_type': 'array'
},
values={
1598875800000: 0.31862956
}
),
constants.metric_struct(name='requests',
labels={
'storage_id': '12345',
'resource_type':
'array'},
values={
1598875800000: 296.1
}
),
constants.metric_struct(name='readRequests',
labels={
'storage_id': '12345',
'resource_type':
'array'},
values={
1598875800000:
14.056666
}
),
constants.metric_struct(name='writeRequests',
labels={
'storage_id': '12345',
'resource_type':
'array'},
values={
1598875800000: 25.78
}
)
]
kwargs = VMAX_STORAGE_CONF
mock_version.return_value = ['V9.0.2.7', '90']
mock_unisphere_version.return_value = ['V9.0.2.7', '90']
mock_array.return_value = {'symmetrixId': ['00112233']}
mock_array_details.return_value = {
'model': 'VMAX250F',
'ucode': '5978.221.221',
'display_name': 'VMAX250F-00112233'}
mock_performnace.return_value = 200, vmax_array_perf_resp_historic
driver = VMAXStorageDriver(**kwargs)
self.assertEqual(driver.storage_id, "12345")
self.assertEqual(driver.client.array_id, "00112233")
ret = driver.collect_perf_metrics(context, '12345', "", 10000000,
10900000)
self.assertEqual(ret, expected_historic)
mock_performnace.return_value = 200, vmax_array_perf_resp_real_time
ret = driver.collect_perf_metrics(context, '12345', "", 10900000,
10900000)
self.assertEqual(ret, expected_realtime)
mock_performnace.side_effect = \
exception.StoragePerformanceCollectionFailed
with self.assertRaises(Exception) as exc:
ret = driver.collect_perf_metrics(context, '12345', "", 10000000,
10900000)
self.assertIn('Failed to collect performance metrics. Reason',
str(exc.exception))
@mock.patch.object(Session, 'request')
@mock.patch.object(VMaxRest, 'get_array_detail')
@mock.patch.object(VMaxRest, 'get_uni_version')
@mock.patch.object(VMaxRest, 'get_unisphere_version')
def test_rest(self, mock_unisphere_version,
mock_version, mock_array,
mock_request):
kwargs = VMAX_STORAGE_CONF
mock_version.return_value = ['V9.0.2.7', '90']
mock_unisphere_version.return_value = ['V9.0.2.7', '90']
mock_array.return_value = {'symmetrixId': ['00112233']}
driver = VMAXStorageDriver(**kwargs)
self.assertEqual(driver.client.uni_version, '90')
self.assertEqual(driver.storage_id, "12345")
self.assertEqual(driver.client.array_id, "00112233")
mock_request.return_value = mock.Mock()
mock_request.return_value.json = mock.Mock(return_value={})
driver.reset_connection(context, **kwargs)
driver.client.rest.session = None
driver.client.rest.request('/session', 'GET')
self.assertEqual(driver.client.uni_version, '90')
@mock.patch.object(VMaxRest, 'get_array_detail')
@mock.patch.object(VMaxRest, 'get_uni_version')
@mock.patch.object(VMaxRest, 'get_unisphere_version')
def test_get_capabilities(self, mock_unisphere_version,
mock_version, mock_array):
kwargs = VMAX_STORAGE_CONF
mock_version.return_value = ['V9.0.2.7', '90']
mock_unisphere_version.return_value = ['V9.0.2.7', '90']
mock_array.return_value = {'symmetrixId': ['00112233']}
driver = VMAXStorageDriver(**kwargs)
self.assertEqual(driver.client.uni_version, '90')
self.assertEqual(driver.storage_id, "12345")
self.assertEqual(driver.client.array_id, "00112233")
capabilities = driver.get_capabilities(context)
self.assertIsNotNone(capabilities)
self.assertIsInstance(capabilities, dict)
self.assertEqual(capabilities['is_historic'], True)
self.assertIsInstance(capabilities['resource_metrics'], dict)
# Only support storage metrics
self.assertEqual(len(capabilities['resource_metrics']), 1)
|
{"/delfin/tests/unit/drivers/dell_emc/unity/test_emc_unity.py": ["/delfin/drivers/dell_emc/unity/rest_handler.py"], "/delfin/tests/unit/drivers/dell_emc/vmax/test_vmax.py": ["/delfin/drivers/dell_emc/vmax/vmax.py"], "/delfin/tests/unit/drivers/dell_emc/vnx/vnx_block/test_vnx_block.py": ["/delfin/drivers/dell_emc/vnx/vnx_block/navicli_client.py"]}
|
33,893,288
|
jiangyutan/delfin
|
refs/heads/master
|
/delfin/drivers/dell_emc/vnx/vnx_block/alert_handler.py
|
# Copyright 2021 The SODA Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import hashlib
import time
import six
from oslo_log import log as logging
from oslo_utils import units
from delfin import exception
from delfin.common import constants
from delfin.drivers.dell_emc.vnx.vnx_block import consts
from delfin.i18n import _
LOG = logging.getLogger(__name__)
class AlertHandler(object):
def __init__(self, navi_handler):
self.navi_handler = navi_handler
@staticmethod
def parse_alert(alert):
try:
alert_model = dict()
alert_model['alert_id'] = AlertHandler.check_event_code(
alert.get(consts.OID_MESSAGECODE))
alert_model['alert_name'] = alert.get(consts.OID_DETAILS)
alert_model['severity'] = consts.TRAP_LEVEL_MAP.get(
alert.get(consts.OID_SEVERITY),
constants.Severity.INFORMATIONAL)
alert_model['category'] = constants.Category.FAULT
alert_model['type'] = constants.EventType.EQUIPMENT_ALARM
alert_model['occur_time'] = int(time.time() * units.k)
alert_model['description'] = alert.get(consts.OID_DETAILS)
alert_model['resource_type'] = constants.DEFAULT_RESOURCE_TYPE
alert_model['match_key'] = hashlib.md5(
alert.get(consts.OID_DETAILS, '').encode()).hexdigest()
return alert_model
except Exception as e:
LOG.error(e)
msg = (_("Failed to build alert model as some attributes missing "
"in alert message."))
raise exception.InvalidResults(msg)
def handle_alerts(self, alerts):
alert_list = []
for alert in alerts:
alert_model = {
'alert_id': AlertHandler.check_event_code(
alert.get('event_code')),
'alert_name': alert.get('message'),
'severity': consts.SEVERITY_MAP.get(
alert.get('event_code')[0:2]),
'category': constants.Category.FAULT,
'type': constants.EventType.EQUIPMENT_ALARM,
'occur_time': alert.get('log_time_stamp'),
'description': alert.get('message'),
'resource_type': constants.DEFAULT_RESOURCE_TYPE,
'match_key': hashlib.md5(
alert.get('message', '').encode()).hexdigest()
}
alert_list.append(alert_model)
return alert_list
def list_alerts(self, query_para):
alert_lists = []
domains = self.navi_handler.get_domain()
host_ip_list = []
if domains:
for domain in domains:
host_ip = domain.get('ip_address')
if host_ip:
host_ip_list.append(host_ip)
else:
host_ip_list.append(self.navi_handler.navi_host)
for host_ip in host_ip_list:
alerts = self.navi_handler.get_log(host_ip, query_para)
alert_list = self.handle_alerts(alerts)
if alert_list:
alert_lists.extend(alert_list)
alert_lists = self.remove_duplication_alert(alert_lists)
return alert_lists
def get_sort_key(self, alert):
return '%s%s%s' % (
alert.get('alert_id'), alert.get('description'),
str(alert.get('occur_time')))
def remove_duplication_alert(self, alert_lists):
try:
if alert_lists:
alert_lists.sort(key=self.get_sort_key, reverse=True)
alert = alert_lists[-1]
for i in range(len(alert_lists) - 2, -1, -1):
main_alert_key = '%s%s' % (
alert.get('alert_id'), alert.get('description'))
other_alert_key = '%s%s' % (alert_lists[i].get('alert_id'),
alert_lists[i].get(
'description'))
if main_alert_key == other_alert_key:
if alert.get('occur_time') > alert_lists[i].get(
'occur_time'):
alert_lists.remove(alert_lists[i])
else:
alert_lists.remove(alert)
alert = alert_lists[i]
else:
alert = alert_lists[i]
return alert_lists
except Exception as e:
err_msg = "remove duplication failed: %s" % (six.text_type(e))
LOG.error(err_msg)
raise exception.InvalidResults(err_msg)
@staticmethod
def check_event_code(event_code):
if '0x' not in event_code:
event_code = '0x%s' % event_code
return event_code
|
{"/delfin/tests/unit/drivers/dell_emc/unity/test_emc_unity.py": ["/delfin/drivers/dell_emc/unity/rest_handler.py"], "/delfin/tests/unit/drivers/dell_emc/vmax/test_vmax.py": ["/delfin/drivers/dell_emc/vmax/vmax.py"], "/delfin/tests/unit/drivers/dell_emc/vnx/vnx_block/test_vnx_block.py": ["/delfin/drivers/dell_emc/vnx/vnx_block/navicli_client.py"]}
|
33,893,289
|
jiangyutan/delfin
|
refs/heads/master
|
/delfin/drivers/utils/tools.py
|
# Copyright 2021 The SODA Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from oslo_log import log as logging
from oslo_utils import units
LOG = logging.getLogger(__name__)
class Tools(object):
def time_str_to_timestamp(self, time_str, time_pattern):
""" Time str to time stamp conversion
"""
time_stamp = ''
if time_str:
time_array = time.strptime(time_str, time_pattern)
time_stamp = int(time.mktime(time_array) * units.k)
return time_stamp
def timestamp_to_time_str(self, time_stamp, time_pattern):
""" Time stamp to time str conversion
"""
time_str = ''
if time_stamp:
time_stamp = time_stamp / units.k
time_array = time.localtime(time_stamp)
time_str = time.strftime(time_pattern, time_array)
return time_str
|
{"/delfin/tests/unit/drivers/dell_emc/unity/test_emc_unity.py": ["/delfin/drivers/dell_emc/unity/rest_handler.py"], "/delfin/tests/unit/drivers/dell_emc/vmax/test_vmax.py": ["/delfin/drivers/dell_emc/vmax/vmax.py"], "/delfin/tests/unit/drivers/dell_emc/vnx/vnx_block/test_vnx_block.py": ["/delfin/drivers/dell_emc/vnx/vnx_block/navicli_client.py"]}
|
33,893,290
|
jiangyutan/delfin
|
refs/heads/master
|
/delfin/tests/unit/drivers/dell_emc/vnx/vnx_block/test_vnx_block.py
|
# Copyright 2021 The SODA Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from unittest import TestCase, mock
sys.modules['delfin.cryptor'] = mock.Mock()
from delfin import context
from delfin.drivers.dell_emc.vnx.vnx_block.navi_handler import NaviHandler
from delfin.drivers.dell_emc.vnx.vnx_block.navicli_client import NaviClient
from delfin.drivers.dell_emc.vnx.vnx_block.vnx_block import VnxBlockStorDriver
ACCESS_INFO = {
"storage_id": "12345",
"vendor": "dell_emc",
"model": "vnx_block",
"cli": {
"host": "110.143.132.231",
"port": 22,
"username": "user",
"password": "cGFzc3dvcmQ="
}
}
AGENT_INFOS = """
Agent Rev: 7.33.1 (0.38)
Name: K10
Desc:
Revision: 05.33.000.5.038
Model: VNX5400
Serial No: CETV00000001
"""
DOMAIN_INFOS = """
Node: APM00011111111
IP Address: 111.222.33.55
(Master)
Name: CX300I_33_55
Port: 80
Secure Port: 443
IP Address: 111.222.33.44
Name: CX300I_33_44
Port: 80
Secure Port: 443
"""
DISK_INFOS = """
Bus 0 Enclosure 0 Disk 0
State: Enabled
Capacity: 54969
"""
POOL_INFOS = """
Pool Name: Pool 1
Pool ID: 1
Description:
State: Offline
Status: Storage Pool requires recovery. service provider(0x712d8518)
User Capacity (GBs): 8583.732
Consumed Capacity (GBs): 8479.780
Available Capacity (GBs): 103.953
Total Subscribed Capacity (GBs): 8479.780
"""
RAID_INFOS = """
RaidGroup ID: 0
RaidGroup State: Valid_luns
Raw Capacity (Blocks): 1688426496
Logical Capacity (Blocks): 1688420352
Free Capacity (Blocks,non-contiguous): 522260480
"""
LUN_INFOS = """
LOGICAL UNIT NUMBER 239
Name: sun_data_VNX_2
User Capacity (GBs): 9.000
Consumed Capacity (GBs): 1.753
Pool Name: Migration_pool
Current State: Ready
Status: OK(0x0)
Is Thin LUN: Yes
Is Compressed: No
"""
GET_ALL_LUN_INFOS = """
LOGICAL UNIT NUMBER 186
Name LN_10G_01
RAIDGroup ID: 1
State: Bound
LUN Capacity(Megabytes): 10240
Is Thin LUN: YES
"""
LOG_INFOS = """
09/14/2020 19:03:25 N/A (7606)Thinpool (Migration_pool) is (
03/25/2020 13:30:17 N/A (2006)Able to read events from the W
"""
OTHER_LOG_INFOS = """
03/25/2020 00:13:03 N/A (4600)'Capture the array configurati
03/25/2020 13:30:17 N/A (76cc)Navisphere Agent, version 7.33
09/14/2020 20:03:25 N/A (7606)Thinpool (Migration_pool) is (
"""
AGENT_RESULT = {
'agent_rev': '7.33.1 (0.38)',
'name': 'K10',
'desc': '',
'revision': '05.33.000.5.038',
'model': 'VNX5400',
'serial_no': 'CETV00000001'
}
STORAGE_RESULT = {
'name': 'APM00011111111',
'vendor': 'DELL EMC',
'model': 'VNX5400',
'status': 'normal',
'serial_number': 'CETV00000001',
'firmware_version': '05.33.000.5.038',
'total_capacity': 10081183274631,
'raw_capacity': 57639174144,
'used_capacity': 9702168298782,
'free_capacity': 379016049590
}
DOMAIN_RESULT = [
{
'node': 'APM00011111111',
'ip_address': '111.222.33.55',
'master': 'True',
'name': 'CX300I_33_55',
'port': '80',
'secure_port': '443'
}]
POOLS_RESULT = [
{
'name': 'Pool 1',
'storage_id': '12345',
'native_storage_pool_id': '1',
'description': '',
'status': 'offline',
'storage_type': 'block',
'total_capacity': 9216712054407,
'subscribed_capacity': 9105094444318,
'used_capacity': 9105094444318,
'free_capacity': 111618683830
}]
RAID_RESULT = [
{
'raidgroup_id': '0',
'raidgroup_state': 'Valid_luns',
'raw_capacity_blocks': '1688426496',
'logical_capacity_blocks': '1688420352',
'free_capacity_blocks,non-contiguous': '522260480'
}]
ALL_LUN_RESULT = [
{
'logical_unit_number': '186',
'name': 'LN_10G_01',
'raidgroup_id': '1',
'state': 'Bound',
'lun_capacitymegabytes': '10240',
'is_thin_lun': 'YES'
}]
LOG_RESULT = [
{
'log_time': '09/14/2020 19:03:25',
'log_time_stamp': 1600081405000,
'event_code': '7606',
'message': 'Thinpool (Migration_pool) is ('
}]
POOLS_ANALYSE_RESULT = [{
'pool_name': 'Pool 1',
'pool_id': '1',
'description': '',
'state': 'Offline',
'status': 'Storage Pool requires recovery. service provider(0x712d8518)',
'user_capacity_gbs': '8583.732',
'consumed_capacity_gbs': '8479.780',
'available_capacity_gbs': '103.953',
'total_subscribed_capacity_gbs': '8479.780'
}]
VOLUMES_RESULT = [
{
'name': 'sun_data_VNX_2',
'storage_id': '12345',
'status': 'normal',
'native_volume_id': '239',
'native_storage_pool_id': '',
'type': 'thin',
'total_capacity': 9663676416,
'used_capacity': 1882269417,
'free_capacity': 7781406998,
'compressed': False
}]
ALERTS_RESULT = [
{
'alert_id': '0x76cc',
'alert_name': 'Navisphere Agent, version 7.33',
'severity': 'Critical',
'category': 'Fault',
'type': 'EquipmentAlarm',
'occur_time': 1585114217000,
'description': 'Navisphere Agent, version 7.33',
'resource_type': 'Storage',
'match_key': 'b969bbaa22b62ebcad4074618cc29b94'
},
{
'alert_id': '7606',
'alert_name': 'Thinpool (Migration_pool) is (',
'severity': 'Critical',
'category': 'Fault',
'type': 'EquipmentAlarm',
'occur_time': 1600081405000,
'description': 'Thinpool (Migration_pool) is (',
'resource_type': 'Storage',
'match_key': '65a5b90e11842a2aedf3bfab471f7701'
}]
ALERT_RESULT = {
'alert_id': '0x761f',
'alert_name': 'Unisphere can no longer manage',
'severity': 'Critical',
'category': 'Fault',
'type': 'EquipmentAlarm',
'occur_time': 1614310456716,
'description': 'Unisphere can no longer manage',
'resource_type': 'Storage',
'match_key': '8e97fe0af779d78bad8f2de52e15c65c'
}
def create_driver():
NaviHandler.login = mock.Mock(return_value={"05.33.000.5.038_test"})
return VnxBlockStorDriver(**ACCESS_INFO)
class TestVnxBlocktorageDriver(TestCase):
driver = create_driver()
def test_init(self):
NaviHandler.login = mock.Mock(return_value="05.33.000.5.038_test")
vnx = VnxBlockStorDriver(**ACCESS_INFO)
self.assertEqual(vnx.version, "05.33.000.5.038_test")
def test_get_storage(self):
NaviClient.exec = mock.Mock(
side_effect=[DOMAIN_INFOS, AGENT_INFOS, DISK_INFOS, POOL_INFOS,
RAID_INFOS])
storage = self.driver.get_storage(context)
self.assertDictEqual(storage, STORAGE_RESULT)
def test_get_pools(self):
NaviClient.exec = mock.Mock(side_effect=[POOL_INFOS, RAID_INFOS])
pools = self.driver.list_storage_pools(context)
self.assertDictEqual(pools[0], POOLS_RESULT[0])
def test_get_volumes(self):
NaviClient.exec = mock.Mock(
side_effect=[LUN_INFOS, POOL_INFOS, GET_ALL_LUN_INFOS])
volumes = self.driver.list_volumes(context)
self.assertDictEqual(volumes[0], VOLUMES_RESULT[0])
def test_get_alerts(self):
NaviClient.exec = mock.Mock(
side_effect=[DOMAIN_INFOS, LOG_INFOS, OTHER_LOG_INFOS])
alerts = self.driver.list_alerts(context, None)
ALERTS_RESULT[0]['occur_time'] = alerts[0]['occur_time']
self.assertDictEqual(alerts[0], ALERTS_RESULT[0])
def test_parse_alert(self):
alert = {
'1.3.6.1.6.3.1.1.4.1.0': '1.3.6.1.4.1.1981.0.6',
'1.3.6.1.4.1.1981.1.4.3': 'A-CETV00000001',
'1.3.6.1.4.1.1981.1.4.4': 'K10',
'1.3.6.1.4.1.1981.1.4.5': '761f',
'1.3.6.1.4.1.1981.1.4.6': 'Unisphere can no longer manage',
'1.3.6.1.4.1.1981.1.4.7': 'VNX5400'
}
alert = self.driver.parse_alert(context, alert)
ALERT_RESULT['occur_time'] = alert['occur_time']
self.assertDictEqual(alert, ALERT_RESULT)
def test_cli_res_to_dict(self):
navi_handler = NaviHandler(**ACCESS_INFO)
agent_re = navi_handler.cli_res_to_dict(AGENT_INFOS)
self.assertDictEqual(agent_re, AGENT_RESULT)
def test_cli_res_to_list(self):
navi_handler = NaviHandler(**ACCESS_INFO)
re_list = navi_handler.cli_res_to_list(POOL_INFOS)
self.assertDictEqual(re_list[0], POOLS_ANALYSE_RESULT[0])
def test_cli_domain_to_dict(self):
navi_handler = NaviHandler(**ACCESS_INFO)
re_list = navi_handler.cli_domain_to_dict(DOMAIN_INFOS)
self.assertDictEqual(re_list[0], DOMAIN_RESULT[0])
def test_cli_lun_to_list(self):
navi_handler = NaviHandler(**ACCESS_INFO)
re_list = navi_handler.cli_lun_to_list(GET_ALL_LUN_INFOS)
self.assertDictEqual(re_list[0], ALL_LUN_RESULT[0])
def test_cli_log_to_list(self):
navi_handler = NaviHandler(**ACCESS_INFO)
re_list = navi_handler.cli_log_to_list(LOG_INFOS)
LOG_RESULT[0]['log_time_stamp'] = re_list[0]['log_time_stamp']
self.assertDictEqual(re_list[0], LOG_RESULT[0])
@mock.patch.object(NaviClient, 'exec')
def test_init_cli(self, mock_exec):
mock_exec.return_value = 'test'
navi_handler = NaviHandler(**ACCESS_INFO)
re = navi_handler.navi_exe('abc')
self.assertEqual(re, 'test')
self.assertEqual(mock_exec.call_count, 1)
@mock.patch.object(NaviClient, 'exec')
def test_remove_cer(self, mock_exec):
navi_handler = NaviHandler(**ACCESS_INFO)
navi_handler.remove_cer()
self.assertEqual(mock_exec.call_count, 1)
def test_err_cli_res_to_dict(self):
with self.assertRaises(Exception) as exc:
navi_handler = NaviHandler(**ACCESS_INFO)
navi_handler.cli_res_to_dict({})
self.assertIn('arrange resource info error', str(exc.exception))
def test_err_cli_res_to_list(self):
with self.assertRaises(Exception) as exc:
navi_handler = NaviHandler(**ACCESS_INFO)
navi_handler.cli_res_to_list({})
self.assertIn('cli resource to list error', str(exc.exception))
|
{"/delfin/tests/unit/drivers/dell_emc/unity/test_emc_unity.py": ["/delfin/drivers/dell_emc/unity/rest_handler.py"], "/delfin/tests/unit/drivers/dell_emc/vmax/test_vmax.py": ["/delfin/drivers/dell_emc/vmax/vmax.py"], "/delfin/tests/unit/drivers/dell_emc/vnx/vnx_block/test_vnx_block.py": ["/delfin/drivers/dell_emc/vnx/vnx_block/navicli_client.py"]}
|
33,893,291
|
jiangyutan/delfin
|
refs/heads/master
|
/delfin/tests/unit/drivers/hpe/hpe_3par/test_hpe_3parstor.py
|
# Copyright 2020 The SODA Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from unittest import TestCase, mock
from delfin.drivers.hpe.hpe_3par.alert_handler import AlertHandler
sys.modules['delfin.cryptor'] = mock.Mock()
from delfin import exception
from delfin import context
from delfin.drivers.hpe.hpe_3par.hpe_3parstor import Hpe3parStorDriver
from delfin.drivers.hpe.hpe_3par.rest_handler import RestHandler
from delfin.drivers.hpe.hpe_3par.ssh_handler import SSHHandler
from delfin.drivers.utils.rest_client import RestClient
from requests import Session
class Request:
def __init__(self):
self.environ = {'delfin.context': context.RequestContext()}
pass
ACCESS_INFO = {
"storage_id": "12345",
"vendor": "hpe",
"model": "3par",
"rest": {
"host": "10.0.0.1",
"port": 8443,
"username": "user",
"password": "cGFzc3dvcmQ="
},
"ssh": {
"host": "110.143.132.231",
"port": 22,
"username": "user",
"password": "cGFzc3dvcmQ="
}
}
def create_driver():
kwargs = ACCESS_INFO
SSHHandler.login = mock.Mock(
return_value={"result": "success", "reason": "null"})
m = mock.MagicMock(status_code=201)
with mock.patch.object(Session, 'post', return_value=m):
m.raise_for_status.return_value = 201
m.json.return_value = {
'key': 'deviceid123ABC456'
}
return Hpe3parStorDriver(**kwargs)
class TestHpe3parStorageDriver(TestCase):
def test_a_init(self):
kwargs = ACCESS_INFO
SSHHandler.login = mock.Mock(
return_value={""})
RestHandler.login = mock.Mock(
return_value={""})
Hpe3parStorDriver(**kwargs)
def test_b_initrest(self):
m = mock.MagicMock()
with mock.patch.object(Session, 'post', return_value=m):
m.raise_for_status.return_value = 201
m.json.return_value = {
'key': '1&2F28CA9FC1EA0B8EAB80E9D8FD'
}
kwargs = ACCESS_INFO
rc = RestClient(**kwargs)
RestHandler(rc)
def test_d_get_storage(self):
driver = create_driver()
expected = {
'name': 'hp3parf200',
'vendor': 'HPE',
'model': 'InServ F200',
'status': 'abnormal',
'serial_number': '1307327',
'firmware_version': '3.1.2.484',
'location': None,
'total_capacity': 7793486594048,
'raw_capacity': 9594956939264,
'used_capacity': 6087847706624,
'free_capacity': 1705638887424
}
ret = {
"id": 7327,
"name": "hp3parf200",
"systemVersion": "3.1.2.484",
"IPv4Addr": "100.157.92.213",
"model": "InServ F200",
"serialNumber": "1307327",
"totalNodes": 2,
"masterNode": 0,
"onlineNodes": [
0,
1
],
"clusterNodes": [
0,
1
],
"chunkletSizeMiB": 256,
"totalCapacityMiB": 9150464,
"allocatedCapacityMiB": 5805824,
"freeCapacityMiB": 1626624,
"failedCapacityMiB": 1718016,
"timeZone": "Asia/Shanghai"
}
RestHandler.get_capacity = mock.Mock(
return_value={
"allCapacity": {
"totalMiB": 9150464,
"allocated": {
"system": {
"totalSystemMiB": 1232384,
"internalMiB": 303104,
"spareMiB": 929280,
"spareUsedMiB": 307456,
"spareUnusedMiB": 621824
}
}
}
}
)
health_state = 'PDs that are degraded'
SSHHandler.get_health_state = mock.Mock(return_value=health_state)
m = mock.MagicMock(status_code=200)
with mock.patch.object(RestHandler, 'call', return_value=m):
m.raise_for_status.return_value = 200
m.json.return_value = ret
storage = driver.get_storage(context)
self.assertDictEqual(storage, expected)
def test_e_list_storage_pools(self):
driver = create_driver()
expected = [
{
'name': 'test',
'storage_id': '12345',
'native_storage_pool_id': '0',
'description': 'Hpe 3par CPG:test',
'status': 'normal',
'storage_type': 'block',
'total_capacity': 2003870679040,
'subscribed_capacity': 2917892358144,
'used_capacity': 1448343502848,
'free_capacity': 555527176192
}, {
'name': 'cxd',
'storage_id': '12345',
'native_storage_pool_id': '1',
'description': 'Hpe 3par CPG:cxd',
'status': 'normal',
'storage_type': 'block',
'total_capacity': 1744025157632,
'subscribed_capacity': 2200095948800,
'used_capacity': 1696512081920,
'free_capacity': 47513075712
}
]
ret = [
{
"total": 2,
"members": [
{
"id": 0,
"uuid": "aa43f218-d3dd-4626-948f-8a160b0eac1d",
"name": "test",
"numFPVVs": 21,
"numTPVVs": 25,
"UsrUsage": {
"totalMiB": 1381504,
"rawTotalMiB": 1842004,
"usedMiB": 1376128,
"rawUsedMiB": 712703
},
"SAUsage": {
"totalMiB": 140800,
"rawTotalMiB": 422400,
"usedMiB": 5120,
"rawUsedMiB": 15360
},
"SDUsage": {
"totalMiB": 388736,
"rawTotalMiB": 518315,
"usedMiB": 0,
"rawUsedMiB": 0
},
"SAGrowth": {
"incrementMiB": 8192,
"LDLayout": {
"HA": 3,
"diskPatterns": [
{
"diskType": 1
}
]
}
},
"SDGrowth": {
"incrementMiB": 32768,
"LDLayout": {
"RAIDType": 3,
"HA": 3,
"setSize": 4,
"chunkletPosPref": 1,
"diskPatterns": [
{
"diskType": 1
}
]
}
},
"state": 1,
"failedStates": [],
"degradedStates": [],
"additionalStates": []
},
{
"id": 1,
"uuid": "c392910e-7648-4972-b594-47dd3d28f3ec",
"name": "cxd",
"numFPVVs": 14,
"numTPVVs": 319,
"UsrUsage": {
"totalMiB": 1418752,
"rawTotalMiB": 1702500,
"usedMiB": 1417984,
"rawUsedMiB": 568934
},
"SAUsage": {
"totalMiB": 56832,
"rawTotalMiB": 170496,
"usedMiB": 42752,
"rawUsedMiB": 128256
},
"SDUsage": {
"totalMiB": 187648,
"rawTotalMiB": 225179,
"usedMiB": 157184,
"rawUsedMiB": 188620
},
"SAGrowth": {
"incrementMiB": 8192,
"LDLayout": {
"HA": 3,
"diskPatterns": [
{
"diskType": 1
}
]
}
},
"SDGrowth": {
"incrementMiB": 32768,
"LDLayout": {
"RAIDType": 3,
"HA": 3,
"setSize": 6,
"chunkletPosPref": 1,
"diskPatterns": [
{
"diskType": 1
}
]
}
},
"state": 1,
"failedStates": [],
"degradedStates": [],
"additionalStates": []
}
]
}
]
with mock.patch.object(RestHandler, 'get_resinfo_call',
side_effect=ret):
pools = driver.list_storage_pools(context)
self.assertDictEqual(pools[0], expected[0])
self.assertDictEqual(pools[1], expected[1])
with mock.patch.object(RestHandler, 'get_all_pools',
side_effect=exception.DelfinException):
with self.assertRaises(Exception) as exc:
driver.list_storage_pools(context)
self.assertIn('An unknown exception occurred',
str(exc.exception))
def test_f_list_volumes(self):
driver = create_driver()
expected = [{
'name': 'admin',
'storage_id': '12345',
'description': None,
'status': 'normal',
'native_volume_id': '0',
'native_storage_pool_id': '',
'wwn': '50002AC000001C9F',
'type': 'thick',
'total_capacity': 10737418240,
'used_capacity': 10737418240,
'free_capacity': 0,
'compressed': True,
'deduplicated': True
}]
ret = [{
"members": [{
"id": 0,
"name": "admin",
"provisioningType": 1,
"copyType": 1,
"baseId": 0,
"readOnly": False,
"state": 1,
"userSpace": {
"reservedMiB": 10240,
"rawReservedMiB": 20480,
"usedMiB": 10240,
"freeMiB": 0
},
"sizeMiB": 10240,
"wwn": "50002AC000001C9F"
}]
}]
pool_ret = {
"members": [{
"id": 0,
"uuid": "aa43f218-d3dd-4626-948f-8a160b0eac1d",
"name": "test"
}]
}
RestHandler.get_all_pools = mock.Mock(return_value=pool_ret)
with mock.patch.object(RestHandler, 'get_resinfo_call',
side_effect=ret):
volumes = driver.list_volumes(context)
self.assertDictEqual(volumes[0], expected[0])
def test_h_parse_alert(self):
""" Success flow with all necessary parameters"""
driver = create_driver()
alert = {
'sysUpTime': '1399844806',
'snmpTrapOID': 'alertNotify',
'1.3.6.1.4.1.12925.1.7.1.5.1': 'test_trap',
'1.3.6.1.4.1.12925.1.7.1.6.1': 'This is a test trap',
'nodeID': '0',
'1.3.6.1.4.1.12925.1.7.1.2.1': '6',
'1.3.6.1.4.1.12925.1.7.1.3.1': 'test time',
'1.3.6.1.4.1.12925.1.7.1.7.1': '89',
'1.3.6.1.4.1.12925.1.7.1.8.1': '2555934',
'1.3.6.1.4.1.12925.1.7.1.9.1': '5',
'serialNumber': '1307327',
'transport_address': '100.118.18.100',
'storage_id': '1c094309-70f2-4da3-ac47-e87cc1492ad5'
}
expected_alert_model = {
'alert_id': '0x027001e',
'alert_name': 'CPG growth non admin limit',
'severity': 'NotSpecified',
'category': 'Recovery',
'type': 'EquipmentAlarm',
'sequence_number': '89',
'description': 'This is a test trap',
'resource_type': 'Storage',
'location': 'test_trap',
'occur_time': '',
'clear_category': 'Automatic'
}
context = {}
alert_model = driver.parse_alert(context, alert)
# Verify that all other fields are matching
self.assertDictEqual(expected_alert_model, alert_model)
def test_list_alert(self):
""" Success flow with all necessary parameters"""
driver = create_driver()
alert = """
Id : 1
State : New
MessageCode : 0x2200de
Time : 2015-07-17 20:14:29 PDT
Severity : Degraded
Type : Component state change
Message : Node 0, Power Supply 1, Battery 0 Degraded
Component: 100.118.18.100
"""
expected_alert = [{
'alert_id': '0x2200de',
'alert_name': 'Component state change',
'severity': 'Warning',
'category': 'Fault',
'type': 'EquipmentAlarm',
'sequence_number': '1',
'occur_time': 1437135269000,
'description': 'Node 0, Power Supply 1, Battery 0 Degraded',
'resource_type': 'Storage',
'location': '100.118.18.100'
}]
SSHHandler.get_all_alerts = mock.Mock(return_value=alert)
alert_list = driver.list_alerts(context, None)
expected_alert[0]['occur_time'] = alert_list[0]['occur_time']
self.assertDictEqual(alert_list[0], expected_alert[0])
@mock.patch.object(AlertHandler, 'clear_alert')
def test_clear_alert(self, mock_clear_alert):
driver = create_driver()
alert_id = '230584300921369'
driver.clear_alert(context, alert_id)
self.assertEqual(mock_clear_alert.call_count, 1)
|
{"/delfin/tests/unit/drivers/dell_emc/unity/test_emc_unity.py": ["/delfin/drivers/dell_emc/unity/rest_handler.py"], "/delfin/tests/unit/drivers/dell_emc/vmax/test_vmax.py": ["/delfin/drivers/dell_emc/vmax/vmax.py"], "/delfin/tests/unit/drivers/dell_emc/vnx/vnx_block/test_vnx_block.py": ["/delfin/drivers/dell_emc/vnx/vnx_block/navicli_client.py"]}
|
33,893,292
|
jiangyutan/delfin
|
refs/heads/master
|
/delfin/tests/unit/drivers/huawei/oceanstor/test_rest_client.py
|
# Copyright 2020 The SODA Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import TestCase, mock
from requests.sessions import Session
from delfin import exception
from delfin.common import config # noqa
from delfin.drivers.huawei.oceanstor.rest_client import RestClient
ACCESS_INFO = {
"storage_id": "12345",
"vendor": "huawei",
"model": "oceanstor",
"rest": {
"host": "10.0.0.1",
"port": 1234,
"username": "user",
"password": "cGFzc3dvcmQ="
},
"extra_attributes": {
"array_id": "00112233"
}
}
RESP = {
"error": {
"code": 0
},
"data": {
"data": "dummy",
"deviceid": "0123456",
"iBaseToken": "112233",
"accountstate": "GREEN"
}
}
class TestOceanStorRestClient(TestCase):
def _mock_response(
self,
status=200,
content="CONTENT",
json_data=None,
raise_for_status=None):
mock_resp = mock.Mock()
mock_resp.raise_for_status = mock.Mock()
if raise_for_status:
mock_resp.raise_for_status.side_effect = raise_for_status
mock_resp.status_code = status
mock_resp.content = content
if json_data:
mock_resp.json = mock.Mock(
return_value=json_data
)
return mock_resp
# @mock.patch.object(RestClient, 'login')
@mock.patch.object(Session, 'post')
def test_init(self, mock_rest):
mock_resp = self._mock_response(json_data=RESP)
mock_rest.return_value = mock_resp
kwargs = ACCESS_INFO
rest_client = RestClient(**kwargs)
self.assertEqual(rest_client.rest_host, "10.0.0.1")
self.assertEqual(rest_client.rest_port, 1234)
self.assertEqual(rest_client.session.headers['iBaseToken'], '112233')
@mock.patch.object(RestClient, 'login')
def test_reset_connection(self, mock_login):
mock_login.return_value = None
kwargs = ACCESS_INFO
rest_client = RestClient(**kwargs)
self.assertEqual(rest_client.rest_host, "10.0.0.1")
self.assertEqual(rest_client.rest_port, 1234)
mock_login.side_effect = exception.StorageBackendException
with self.assertRaises(Exception) as exc:
RestClient(**kwargs)
self.assertIn('The credentials are invalid',
str(exc.exception))
@mock.patch.object(RestClient, 'call')
@mock.patch.object(RestClient, 'login')
def test_get_storage(self, mock_login, mock_call):
mock_login.return_value = None
mock_call.return_value = RESP
kwargs = ACCESS_INFO
rest_client = RestClient(**kwargs)
data = rest_client.get_storage()
self.assertEqual(data['data'], 'dummy')
mock_call.return_value = {
"error": {
"code": 0
}
}
with self.assertRaises(Exception) as exc:
rest_client.get_storage()
self.assertIn('Exception from Storage Backend',
str(exc.exception))
mock_call.return_value['error']['code'] = 1
with self.assertRaises(Exception) as exc:
rest_client.get_storage()
self.assertIn('Exception from Storage Backend',
str(exc.exception))
@mock.patch.object(RestClient, 'call')
@mock.patch.object(RestClient, 'login')
def test_get_controller(self, mock_login, mock_call):
mock_login.return_value = None
mock_call.return_value = RESP
kwargs = ACCESS_INFO
rest_client = RestClient(**kwargs)
data = rest_client.get_all_controllers()
self.assertEqual(data['data'], 'dummy')
mock_call.assert_called_with("/controller",
log_filter_flag=True, method='GET')
@mock.patch.object(RestClient, 'paginated_call')
@mock.patch.object(RestClient, 'login')
def test_get_all_pools(self, mock_login, mock_call):
mock_login.return_value = None
mock_call.return_value = RESP
kwargs = ACCESS_INFO
rest_client = RestClient(**kwargs)
data = rest_client.get_all_pools()
self.assertEqual(data['data']['data'], 'dummy')
mock_call.assert_called_with("/storagepool", None,
'GET', log_filter_flag=True)
@mock.patch.object(RestClient, 'paginated_call')
@mock.patch.object(RestClient, 'login')
def test_get_volumes(self, mock_login, mock_call):
mock_login.return_value = None
mock_call.return_value = RESP
kwargs = ACCESS_INFO
rest_client = RestClient(**kwargs)
data = rest_client.get_all_volumes()
self.assertEqual(data['data']['data'], 'dummy')
mock_call.assert_called_with("/lun", None, 'GET',
log_filter_flag=True)
|
{"/delfin/tests/unit/drivers/dell_emc/unity/test_emc_unity.py": ["/delfin/drivers/dell_emc/unity/rest_handler.py"], "/delfin/tests/unit/drivers/dell_emc/vmax/test_vmax.py": ["/delfin/drivers/dell_emc/vmax/vmax.py"], "/delfin/tests/unit/drivers/dell_emc/vnx/vnx_block/test_vnx_block.py": ["/delfin/drivers/dell_emc/vnx/vnx_block/navicli_client.py"]}
|
33,931,781
|
generalsrobotics/HelmetProject
|
refs/heads/master
|
/detect.py
|
import pyrealsense2 as rs
import numpy as np
import cv2
import json
class Detector:
def __init__(self,server,w=640,h=480,fps=30):
# Configure depth and color streams
self.pipeline = rs.pipeline()
self.config = rs.config()
self.width = w
self.height = h
self.fps = fps
#configure the realsense's color and depth stream given dimensions for width and height
self.config.enable_stream(rs.stream.depth, self.width, self.height, rs.format.z16, fps)
self.config.enable_stream(rs.stream.color, self.width, self.height, rs.format.bgr8, fps)
self.box_width = 64
self.box_height = 64
#create array containing range values of interest in feet
self.ranges = [20,10,5]
self.colors = [(0,255,0),(0,255,255),(0,0,255)] #Green, Yellow, Red
#defining measure area/box coordinates within image
self.box_width_min = int((self.width - self.box_width)//2 -1) #coordinates to center box within image. This is the first horizontal coordinate for the measure box
self.box_height_min = int((self.height - self.box_height)//2 -1) #y-coordinate to center box within image
self.box_width_max = int(self.box_width_min + self.box_width)
self.box_height_max = int(self.box_height_min + self.box_height)
self.profile = None
self.depth_scale = None
self.server = server
self.counter = 0
def startStream(self):
#start streaming
self.profile = self.pipeline.start(self.config)
#get data scale from the realsense to convert distance to meters
self.depth_scale = self.profile.get_device().first_depth_sensor().get_depth_scale()
def startDetecting(self):
try:
while True:
#wait for a coherent pair of frames: depth and color
frames = self.pipeline.wait_for_frames()
depth_frame = frames.get_depth_frame()
color_frame = frames.get_color_frame()
# Convert images to numpy arrays
depth_image = np.asanyarray(depth_frame.get_data())
color_image = np.asanyarray(color_frame.get_data())
#view depth data (as a matrix/array) of a meas_width x meas_height box in the center of the image
resized_depth_image = depth_image[self.box_height_min : self.box_height_max : 1,self.box_width_min : self.box_width_max : 1].astype(float)
# Get data scale from the device and convert to meters
resized_depth_image = resized_depth_image * self.depth_scale
#averaging range information inside the measurement box at center of image
avg_dist = cv2.mean(resized_depth_image)[0]
#convert meters to feet
avg_dist = round(avg_dist * 3.28084)
#apply colors according to distance and notify user
text_color = self.colors[2]
box_color = text_color
if avg_dist >= self.ranges[0]:
text_color = self.colors[0]
box_color = text_color
elif avg_dist >= self.ranges[1]:
text_color = self.colors[1]
box_color = text_color
elif avg_dist >= self.ranges[2]:
text_color = self.colors[2]
box_color = text_color
if self.counter > 10:
self.server.send_message_to_all(json.dumps({"range":self.colors.index(text_color),"distance":avg_dist}))
self.counter = 0
else:
self.counter = self.counter + 1
#print rectangle on color image for cyclist's benefit
cv2.rectangle(color_image, (self.box_width_min, self.box_height_min), (self.box_width_max, self.box_height_max), box_color, 2)
#creating an 'image' with dimensions as shown below to display the distance information stacked below the color image
dist_bar_height = 100
distance_bar = np.zeros([dist_bar_height,self.width,3], dtype=np.uint8) #numpy array that will represent the 'image' or bar to print the distance measurement
#filling up numpy array with 255's to create a white background
distance_bar.fill(127)
#print distance measurement onto the distance bar image
cv2.putText(distance_bar, "Average distance to object: "+str(avg_dist) +" feet", (60,30), cv2.FONT_HERSHEY_COMPLEX, .9, text_color)
#stack images vertically for displaying
images = np.vstack((color_image,distance_bar))
#Show images
cv2.namedWindow('SmartHelmet', cv2.WND_PROP_FULLSCREEN)
cv2.setWindowProperty('SmartHelmet',cv2.WND_PROP_FULLSCREEN, cv2.WINDOW_FULLSCREEN)
cv2.imshow('SmartHelmet', images)
cv2.waitKey(1)
finally:
# Stop streaming
self.pipeline.stop()
|
{"/server.py": ["/detect.py"], "/app.py": ["/detect.py"], "/detect_class_test.py": ["/detect.py"]}
|
33,931,782
|
generalsrobotics/HelmetProject
|
refs/heads/master
|
/server.py
|
from websocket_server import WebsocketServer
from detect import Detector
import threading
# Called for every client connecting (after handshake)
def new_client(client, server):
print("New client connected and was given id %d" % client['id'])
# Called for every client disconnecting
def client_left(client, server):
print("Client(%d) disconnected" % client['id'])
# Called when a client sends a message
def message_received(client, server, message):
print("Client(%d) said %s" % client['id'],message)
PORT=9001
server = WebsocketServer(PORT,host="0.0.0.0")
server.set_fn_new_client(new_client)
server.set_fn_client_left(client_left)
server.set_fn_message_received(message_received)
detector = Detector(server)
detector.startStream()
threading.Thread(target=server.run_forever, daemon = True).start()
detector.startDetecting()
|
{"/server.py": ["/detect.py"], "/app.py": ["/detect.py"], "/detect_class_test.py": ["/detect.py"]}
|
33,950,226
|
erenarkan04/Python
|
refs/heads/master
|
/Classes/ClassesPt6.py
|
# Can make any new class a child of a built in Python class
class Text(str):
def duplicate(self):
return self + self
text = Text("Python")
print(text.duplicate())
class TrackableList(list):
def append(self, object):
print("append called: " + object)
super().append(object)
list = TrackableList()
list.append("1")
print(list)
|
{"/PythonStandardLibrary/SQLite.py": ["/PythonStandardLibrary/JSON.py"], "/PopularPackages/PyText.py": ["/PopularPackages/config.py"]}
|
33,950,227
|
erenarkan04/Python
|
refs/heads/master
|
/Intro/TutorialPt2.py
|
age = 22
if age >= 18:
print("adult")
elif age >= 13:
print("teenager")
else:
print("child")
name = "eren"
if not name:
print("name is empty")
if age <= 65 and age >= 18:
print("eligible")
# alternate version
if 18 <= age <= 65:
print("eligible")
# Ternary operator
print("adult") if age >= 18 else print("not adult")
# Loops
for x in "python":
print(x)
for x in ["a", "b", "c"]:
print(x)
for x in range(2, 5):
print(x)
for x in range(0, 10, 2):
print(x)
# For...else block - will execute if loop completes without breaking
names = ["john", "eren"]
for name in names:
if name.startswith('j'):
print("Found")
break
else:
print("Not found")
# While loop
guess = 0
answer = 5
# while guess != answer:
# guess = int(input("Input: "))
# Functions
def increment(number, by):
return (number, number + by)
# Can return a tuple (a function returning more than one value)
print(increment(2, by=3))
# using keyword argument to make code more readable
# can also set default values
def increment(number, by=1):
return (number, number + by)
print(increment(3))
# can pass array of arguments through function
def multiply(*list):
total = 1
for number in list:
total *= number
return total
print(multiply(1, 2, 3, 4))
# can also pass objects through function
def save_user (**list):
print(list)
save_user(id=1, name="Eren")
|
{"/PythonStandardLibrary/SQLite.py": ["/PythonStandardLibrary/JSON.py"], "/PopularPackages/PyText.py": ["/PopularPackages/config.py"]}
|
33,950,228
|
erenarkan04/Python
|
refs/heads/master
|
/Data Structures/lists2.py
|
numbers = [5, 6, 34, 7, 9]
numbers.sort()
print(numbers)
numbers.sort(reverse=True)
print(numbers)
print(sorted(numbers))
print(sorted(numbers, reverse=True))
items = [("a", 3), ("b", 6), ("c", 1)]
def sort (item):
return item[1]
# items.sort(key=sort)
# same thing using lambda expression (temporary function to pass through the sort function
items.sort(key= lambda item: item[1])
print(items)
# Map function
prices = list(map(lambda item:item[1], items))
print(prices)
# Filter function
filtered_prices = list(filter(lambda item: item[1] >= 3, items))
print(filtered_prices)
# Iterator comprehension - same result as map function
prices2 = [item[1] for item in items]
print(prices2)
print(prices2)
prices3 = [item for item in items if item[1] >= 3]
print(prices3)
# Zip function
list1 = [1, 2, 3]
list2 = [4, 5, 6]
print(list(zip(list1, list2)))
# Stack
# with browser back button logic
browsing_session = []
browsing_session.append(1)
browsing_session.append(2)
browsing_session.append(3)
print(browsing_session.pop())
if not browsing_session:
print("exit")
|
{"/PythonStandardLibrary/SQLite.py": ["/PythonStandardLibrary/JSON.py"], "/PopularPackages/PyText.py": ["/PopularPackages/config.py"]}
|
33,950,229
|
erenarkan04/Python
|
refs/heads/master
|
/Data Structures/dataStructuresEx.py
|
from pprint import pprint
sentence = "this is a common interview question"
unique_letters = []
for z in sentence:
if not unique_letters.__contains__(z) and not z == " ":
unique_letters.append(z)
print(unique_letters)
values = {}
for x in unique_letters:
count = 0
for y in sentence:
if y == x:
count += 1
values[x] = count
pprint(values, width=2)
# .items() return the key-value pairs of each item in the dictionary
sorted_values = sorted(values.items(), key=lambda kv: kv[1], reverse=True)
print(sorted_values[0])
|
{"/PythonStandardLibrary/SQLite.py": ["/PythonStandardLibrary/JSON.py"], "/PopularPackages/PyText.py": ["/PopularPackages/config.py"]}
|
33,950,230
|
erenarkan04/Python
|
refs/heads/master
|
/PopularPackages/config.py
|
api_key = "vsbmPwnvEJuNpzB9Aa7hO6dxr4LxNSWlLbAYIKl_D5qUHkM3lcZx_xvJ6E9_2E_RhXSQoTXyIX8Bc1ZAUNZOrOGTkBXSzWuAOFK_E9GpXhr_NvDcRjdRqkb4J_oqYHYx"
account_sid = "ACc7556a365c48236fbd1a9cbc6ebdabba"
auth_token = "284dd2dce01fc8419d526824135e71db"
|
{"/PythonStandardLibrary/SQLite.py": ["/PythonStandardLibrary/JSON.py"], "/PopularPackages/PyText.py": ["/PopularPackages/config.py"]}
|
33,950,231
|
erenarkan04/Python
|
refs/heads/master
|
/PythonStandardLibrary/SQLite.py
|
import sqlite3
import json
from pathlib import Path
from PythonStandardLibrary.JSON import movies
# movies = json.loads(Path("movies.json").read_text())
# print(movies)
# if SQLite object called 'db' doesn't exist the connect function will create it
# with sqlite3.connect("db.sqlite3") as connection:
# command = "INSERT INTO Movies VALUES(?, ? , ?)"
# iterate over the movie list and execute the command for each item
# for movie in movies:
# # put each set of key/values in a tuple
# connection.execute(command, tuple(movie.values()))
# connection.commit()
# pulling from the database
with sqlite3.connect("db.sqlite3") as connection:
command = "SELECT * FROM Movies"
cursor = connection.execute(command)
for row in cursor:
print(row)
|
{"/PythonStandardLibrary/SQLite.py": ["/PythonStandardLibrary/JSON.py"], "/PopularPackages/PyText.py": ["/PopularPackages/config.py"]}
|
33,950,232
|
erenarkan04/Python
|
refs/heads/master
|
/PythonStandardLibrary/Time.py
|
from datetime import datetime, timedelta
import time
print(time.time())
def send_emails():
for i in range(1000):
pass
start = time.time()
send_emails()
end = time.time()
print(end - start)
date = datetime(2018, 1, 1)
now = datetime.now()
datetime.strptime("2018/1/1", "%Y/%m/%d")
print(datetime.fromtimestamp(time.time()))
print(f"{date.year}, {date.month}")
duration = date - now
print(duration)
# adds +1 day
date2018 = datetime(2018,1,1) + timedelta(days=1)
print(date2018)
|
{"/PythonStandardLibrary/SQLite.py": ["/PythonStandardLibrary/JSON.py"], "/PopularPackages/PyText.py": ["/PopularPackages/config.py"]}
|
33,950,233
|
erenarkan04/Python
|
refs/heads/master
|
/Data Structures/lists.py
|
letters = ["a", "b", "c"]
matrix = [[1,2], [3,4]]
zeros = [0] * 5
combined = zeros + letters
print(combined)
# can use list() command to convert any iterable to a list
list1 = list(range(1,20))
print(list1)
print(list("Hello world"))
print(list1.__len__())
print(len(list1))
print(letters[0:2])
# Can also add a step to get every nth item in the list
print(letters[::2])
print(letters[::-1])
# List unpacking
first, second, third = letters
print(first)
print(second)
print(third)
# can pack the rest of the elements into *others if not needed
a, b, *others = letters
# enumerate gives tuple with the index and value of each item in the list
for letter in enumerate(letters):
print(letter)
# can also unpack what you enumerate previously to show like this. Can use to get the index while iterating a string
for index, letter in enumerate(letters):
print(index, letter)
# use .append to add, .insert to add in the middle or beginning
letters.append("d")
letters.append("e")
letters.append("f")
letters.insert(0, "d")
letters.remove("b")
letters.pop()
letters.pop(2)
# letters.clear() to clear the whole list
print(letters)
del letters[0:2]
print(letters)
if "e" in letters:
(letters.index("e"))
letters.append("d")
letters.append("d")
print(letters.count("d"))
|
{"/PythonStandardLibrary/SQLite.py": ["/PythonStandardLibrary/JSON.py"], "/PopularPackages/PyText.py": ["/PopularPackages/config.py"]}
|
33,950,234
|
erenarkan04/Python
|
refs/heads/master
|
/Classes/ClassesPt7.py
|
from collections import namedtuple
# for classes with only data (fields) and no methods, it's easier to use a namedtuple
Point = namedtuple("Point", ["x", "y"])
p1 = Point(x=1, y=2)
p2 = Point(x=3, y=4)
print(p1 == p2)
|
{"/PythonStandardLibrary/SQLite.py": ["/PythonStandardLibrary/JSON.py"], "/PopularPackages/PyText.py": ["/PopularPackages/config.py"]}
|
33,950,235
|
erenarkan04/Python
|
refs/heads/master
|
/PythonStandardLibrary/ecommerce/sales.py
|
def calc_tax():
pass
def calc_shipping():
pass
# __name__ will equal __main__ only if the module is being run directly, not if module is called from a different module
if __name__ == "__main__":
print("sales started")
calc_tax()
|
{"/PythonStandardLibrary/SQLite.py": ["/PythonStandardLibrary/JSON.py"], "/PopularPackages/PyText.py": ["/PopularPackages/config.py"]}
|
33,950,236
|
erenarkan04/Python
|
refs/heads/master
|
/PythonStandardLibrary/JSON.py
|
import json
from pathlib import Path
# make a dictionary of movies
movies = [
{"id": 1, "title": "Terminator", "year": 1989},
{"id": 2, "title": "School of Rock", "year": 1993}
]
# store json dump into movies_json variable
movies_json = json.dumps(movies)
# create movies.json file and write contents of json variable to the file
Path("movies.json").write_text(movies_json)
# make new variable with the contents of the json file created and store in variable
data = Path("movies.json").read_text()
# print iterated values of the variables with .loads() function
print(json.loads(data))
|
{"/PythonStandardLibrary/SQLite.py": ["/PythonStandardLibrary/JSON.py"], "/PopularPackages/PyText.py": ["/PopularPackages/config.py"]}
|
33,950,237
|
erenarkan04/Python
|
refs/heads/master
|
/PythonStandardLibrary/OtherPrograms.py
|
import subprocess
# capture output makes it not display results on the terminal
completed = subprocess.run(["ls", "-l"])
# run another python script
completed2 = subprocess.run(["python3", "ComplexScript.py"])
|
{"/PythonStandardLibrary/SQLite.py": ["/PythonStandardLibrary/JSON.py"], "/PopularPackages/PyText.py": ["/PopularPackages/config.py"]}
|
33,950,238
|
erenarkan04/Python
|
refs/heads/master
|
/PythonStandardLibrary/RandomNumber.py
|
import random
import string
print(random.random())
print(random.randint(1, 20))
print(random.choice([1, 2, 3, 4, 5]))
print(random.choice([1, 2, 3, 4, 5]))
print(random.choices([1, 2, 3, 4, 5], k=2))
# generate random PW
print("".join(random.choices(string.ascii_lowercase + string.digits, k=8)))
# shuffle
numbers = [1, 2, 3, 4, 5, 6, 7]
random.shuffle(numbers)
print(numbers)
|
{"/PythonStandardLibrary/SQLite.py": ["/PythonStandardLibrary/JSON.py"], "/PopularPackages/PyText.py": ["/PopularPackages/config.py"]}
|
33,950,239
|
erenarkan04/Python
|
refs/heads/master
|
/PopularPackages/PyText.py
|
from twilio.rest import Client
from PopularPackages.config import account_sid, auth_token
client = Client(account_sid, auth_token)
client.messages.create(
to="+16123230029",
from_="+15022375417",
body="sent from python"
)
|
{"/PythonStandardLibrary/SQLite.py": ["/PythonStandardLibrary/JSON.py"], "/PopularPackages/PyText.py": ["/PopularPackages/config.py"]}
|
33,950,240
|
erenarkan04/Python
|
refs/heads/master
|
/PythonStandardLibrary/SendEmails.py
|
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.mime.image import MIMEImage
import smtplib
from string import Template
from pathlib import Path
template = Template(Path("template.html").read_text())
message = MIMEMultipart()
message["from"] = "eren"
message["to"] = "ahmeta04@gmail.com"
message["subject"] = "test"
body = template.substitute({"user": "John"})
message.attach(MIMEText(body, "html"))
with smtplib.SMTP(host="smtp.gmail.com", port=587) as smtp:
smtp.ehlo()
smtp.starttls()
smtp.login("ahmetarkan0424@gmail.com", "pieguy521")
smtp.send_message(message)
print("message sent")
|
{"/PythonStandardLibrary/SQLite.py": ["/PythonStandardLibrary/JSON.py"], "/PopularPackages/PyText.py": ["/PopularPackages/config.py"]}
|
33,950,241
|
erenarkan04/Python
|
refs/heads/master
|
/Data Structures/lists3.py
|
from collections import deque
from array import array
# Queues
queue = deque([])
queue.append(1)
queue.append(2)
queue.append(3)
queue.popleft()
# Tuples
list = [1, 2, 3]
tuple(list)
print(type(list))
# Swapping variables
x = 1
y = 2
x, y = y, x
print(x)
print(y)
# Arrays - need to specify typecode "i" = integer
numbers = array("i", [1, 2, 3])
# Sets - can't have duplicate values, defined by {}
unique = set([1, 1, 2, 3])
second = {1, 4, 5}
print(unique | second)
# Print only shared elements in two sets
print(unique & second)
# Print only unique items in first set that dont exist in the second set
print(unique - second)
# Print only items not in both sets
print(unique ^ second)
# Dictionary - similar to hashtable
point = dict(x=1, y=2)
print(point)
print(point["x"])
print(point.get("a", "Null"))
del point["x"]
# loop variable takes the value of the key in each iteration
for key in point:
print(key, point[key])
# Comprehension
list = []
for x in range(5):
list.append(x * 2)
# alternate way with comprehension
values = [x * 2 for x in range(5)]
print(values)
# can also use comprehension to create dictionary objects
dictionary = {x : x * 2 for x in range(5)}
print(dictionary)
# Generator objects - can create one by using () with comprehension expressions
values = (x * 2 for x in range(5))
for x in values:
print(x)
# Unpacking operator
nums = [1, 2, 3]
print(nums)
# Unpacks the list and passes each item as a separate argument to the print function
print(*nums)
# values1 = list(range(5))
values2 = [*range(5)]
# print(values1)
print(values2)
list1 = [1, 2]
list2 = [3, 4]
print([*list1, *list2])
dict1 = {"x": 10, "y":20}
dict2 = {"z": 15, "u":25}
print({**dict1, **dict2})
|
{"/PythonStandardLibrary/SQLite.py": ["/PythonStandardLibrary/JSON.py"], "/PopularPackages/PyText.py": ["/PopularPackages/config.py"]}
|
33,950,242
|
erenarkan04/Python
|
refs/heads/master
|
/Intro/ModulexEx.py
|
from ecommerce.sales import calc_shipping, calc_tax
from ecommerce import sales
sales.calc_tax()
sales.calc_shipping()
calc_tax()
calc_shipping()
print(dir(sales))
print(sales.__name__)
print(sales.__package__)
print(sales.__file__)
|
{"/PythonStandardLibrary/SQLite.py": ["/PythonStandardLibrary/JSON.py"], "/PopularPackages/PyText.py": ["/PopularPackages/config.py"]}
|
33,950,243
|
erenarkan04/Python
|
refs/heads/master
|
/PythonStandardLibrary/Path2.py
|
import shutil
from pathlib import Path
from PythonStandardLibrary import ecommerce
path = Path("ecommerce/new.txt")
target = Path("ecommerce")
# can use the shell util library for high level directory commands
shutil.copy(path, target)
|
{"/PythonStandardLibrary/SQLite.py": ["/PythonStandardLibrary/JSON.py"], "/PopularPackages/PyText.py": ["/PopularPackages/config.py"]}
|
33,950,244
|
erenarkan04/Python
|
refs/heads/master
|
/Intro/Exceptions.py
|
# Try block - execute commands in the try block, if exception is encountered execute except block, if not excecute else block
try:
file = open("Exceptions.py")
age = int(input("Age: "))
xfactor = 10 / age
except (ValueError, ZeroDivisionError):
print("invalid input")
else:
print("No exceptions were thrown")
finally:
file.close()
# finally block excecutes no matter if an exception is thrown, commonly used to close files/database connections
# can also use a with clause to automatically close files/database connections by automatically calling the .__exit method
try:
with open("Exceptions.py") as file, open("FizzBuzz.py") as target:
print("file opened")
except (ValueError, ZeroDivisionError):
print("invalid input")
else:
print("No exceptions were thrown")
# How to throw exceptions
def calculate_xfactor(age):
if age <= 0:
raise ValueError("age cannot be < 0")
return 10 / age
try:
calculate_xfactor(-1)
except ValueError() as error:
print(error)
|
{"/PythonStandardLibrary/SQLite.py": ["/PythonStandardLibrary/JSON.py"], "/PopularPackages/PyText.py": ["/PopularPackages/config.py"]}
|
33,950,245
|
erenarkan04/Python
|
refs/heads/master
|
/Intro/TutorialPt1.py
|
import math
# **Strings**
hello = "hello world"
print(hello)
print("*" * 10)
print(len(hello))
print(hello[0])
print(hello[0: 5])
print(hello[:5])
print(hello[6:])
firstname = "eren"
lastname = "arkan"
# Formatted Strings: whats between curly braces replaced at runtime
full = f"{ firstname} {lastname}"
print(full)
print(full.upper())
print(full.lower())
print(full.title())
print(hello.strip())
print(full.find("a"))
print(full.replace("a", "x"))
print("eren" in full)
print("eren" not in full)
# **Integers**
number = 5
number += 1
print(number / 2)
print(number // 2)
print(number ** 2)
print(number + 2)
# uppercase denotes constants
PI = 3.14
print(round(PI))
print(round(PI))
# convert variable types
# int()
# float()
# bool()
# str()
#
x = input("x: ")
print(int(x) + 2)
|
{"/PythonStandardLibrary/SQLite.py": ["/PythonStandardLibrary/JSON.py"], "/PopularPackages/PyText.py": ["/PopularPackages/config.py"]}
|
33,950,246
|
erenarkan04/Python
|
refs/heads/master
|
/PythonStandardLibrary/Path.py
|
from ctypes import Array
from pathlib import Path
from time import ctime
Path().home()
Path("/usr/local/bin")
path = Path("ecommerce")
filepath = Path("ecommerce/new.txt")
print(path.exists())
print(path.is_file())
print(path.is_dir())
print(path.name)
print(path.parent)
# returns a new path with the same object but a different name
path2 = path.with_name("file.txt")
# same with a different suffix
path3 = path.with_suffix(".txt")
print(path3)
# path.mkdir()
# path.rmdir()
# path.rename("ecommerce")
for p in path.iterdir():
print(p)
paths = [p for p in path.iterdir()]
print(paths)
# glob or rglob (recursive glob) can be used to filter items
paths2 = [p for p in path.glob("*.py")]
print(paths2)
paths3 = [p for p in path.rglob("*.py")]
print(paths3)
# use ctime function to get human readable time of the time file was created
print(ctime(path.stat().st_ctime))
print(filepath.write_text("abcabc test"))
print(filepath.read_text())
|
{"/PythonStandardLibrary/SQLite.py": ["/PythonStandardLibrary/JSON.py"], "/PopularPackages/PyText.py": ["/PopularPackages/config.py"]}
|
33,950,247
|
erenarkan04/Python
|
refs/heads/master
|
/PythonStandardLibrary/CSV.py
|
import csv
# open in write mode to write data to the file with "w"
with open("data.csv", "w") as file:
writer = csv.writer(file)
writer.writerow(["transaction id", "product id", "price"])
writer.writerow([1000, 1, 5])
writer.writerow([25, 3, 123])
csv.writer(file).writerow([1, 2, 3])
with open("data.csv", "r") as file:
reader = csv.reader(file)
# print(list(reader))
for row in reader:
print(row)
|
{"/PythonStandardLibrary/SQLite.py": ["/PythonStandardLibrary/JSON.py"], "/PopularPackages/PyText.py": ["/PopularPackages/config.py"]}
|
33,950,248
|
erenarkan04/Python
|
refs/heads/master
|
/PythonStandardLibrary/OpenWebpage.py
|
import webbrowser
print("deployment complete")
webbrowser.open("https://google.com")
|
{"/PythonStandardLibrary/SQLite.py": ["/PythonStandardLibrary/JSON.py"], "/PopularPackages/PyText.py": ["/PopularPackages/config.py"]}
|
33,950,249
|
erenarkan04/Python
|
refs/heads/master
|
/Classes/ClassesPt3.py
|
class Product:
def __init__(self, price):
self.price = price
# making getter/setter for the object with built in logic to prevent < 0 input and making it into a property
@property
def price(self):
return self.__price
@price.setter
def price(self, price):
if price < 0:
raise ValueError("price cannot be < 0")
self.__price = price
# Inheritance - Animal is the base/parent class and Mamal and Fish are the base class
class Animal:
def __init__(self):
self.age = 1
def eat(self):
print("eat")
class Mamal(Animal):
# .super() refers to the parent class to call it's constructor inside the constructor of the child class
def __init__(self):
super().__init__()
self.weight = 1
def walk(self):
print("walk")
class Fish(Animal):
def swim(self):
print("swim")
m = Mamal()
print(isinstance(m, Mamal))
print(issubclass(Mamal, Animal))
|
{"/PythonStandardLibrary/SQLite.py": ["/PythonStandardLibrary/JSON.py"], "/PopularPackages/PyText.py": ["/PopularPackages/config.py"]}
|
33,950,250
|
erenarkan04/Python
|
refs/heads/master
|
/PopularPackages/YelpAPI.py
|
import requests
import config
url = "https://api.yelp.com/v3/businesses/search"
header = {
"Authorization": "Bearer " + config.api_key
}
params = {
"term": "bakery",
"location": "11238"
}
response = requests.get(url, headers=header, params=params)
businesses = response.json()["businesses"]
print(businesses)
for business in businesses:
print(business["name"])
businesses2 = [business["name"] for business in businesses if business["rating"] > 4]
print(businesses2)
|
{"/PythonStandardLibrary/SQLite.py": ["/PythonStandardLibrary/JSON.py"], "/PopularPackages/PyText.py": ["/PopularPackages/config.py"]}
|
33,950,251
|
erenarkan04/Python
|
refs/heads/master
|
/PythonStandardLibrary/ZipFiles.py
|
from pathlib import Path
from zipfile import ZipFile
# coping contents of all files in the ecommerce directory into a zip file
# using rglob to recursively find all files that end with "."
# for path in Path("ecommerce").rglob("*.*"):
# zip.write(path)
# zip.close()
with ZipFile("file.zip") as zip:
print(zip.namelist())
|
{"/PythonStandardLibrary/SQLite.py": ["/PythonStandardLibrary/JSON.py"], "/PopularPackages/PyText.py": ["/PopularPackages/config.py"]}
|
33,950,252
|
erenarkan04/Python
|
refs/heads/master
|
/PythonStandardLibrary/ComplexScript.py
|
print("This is a complex script")
|
{"/PythonStandardLibrary/SQLite.py": ["/PythonStandardLibrary/JSON.py"], "/PopularPackages/PyText.py": ["/PopularPackages/config.py"]}
|
33,950,253
|
erenarkan04/Python
|
refs/heads/master
|
/Classes/Classes.py
|
class Point:
default_color = "red "
def __init__(self, x, y):
self.x = x
self.y = y
@classmethod
def zero(cls):
return cls(0,0)
def __str__(self):
return f"({self.x}, {self.y})"
def __eq__(self, other):
return self.x == other.x and self.y == other.y
def __gt__(self, other):
return self.x > other.x and self.y > other.y
def __add__(self, other):
return Point(self.x + other.x, self.y + other.y)
def draw(self):
print(f"point({self.x}, {self.y})")
point = Point(1, 2)
other = Point(1, 2)
point_zero = Point.zero()
point.draw()
point_zero.draw()
print(point)
print(point == other)
total = point + other
print(total)
|
{"/PythonStandardLibrary/SQLite.py": ["/PythonStandardLibrary/JSON.py"], "/PopularPackages/PyText.py": ["/PopularPackages/config.py"]}
|
33,950,254
|
erenarkan04/Python
|
refs/heads/master
|
/Classes/ClassesPt4.py
|
from abc import ABC, abstractmethod
# Define custom error class (should inherit from the 'Exception' class)
class InvalidOperationError(Exception):
pass
class Stream(ABC):
def __init__(self):
self.opened = False
def open(self):
if self.opened:
raise InvalidOperationError("Invalid operation")
self.opened = True
def close(self):
if not self.opened:
raise InvalidOperationError("Invalid operation")
self.opened = False
@abstractmethod
def read(self):
pass
# Have to implement the read() method from the abstract parent class otherwise the children classes will automatically
# become abstract and wont be able to be called
class FileStream(Stream):
def read(self):
print("read data from file")
class NetworkStream(Stream):
def read(self):
print("read data from network")
class MemoryStream(Stream):
def read(self):
print("read data from Memory")
|
{"/PythonStandardLibrary/SQLite.py": ["/PythonStandardLibrary/JSON.py"], "/PopularPackages/PyText.py": ["/PopularPackages/config.py"]}
|
33,950,255
|
erenarkan04/Python
|
refs/heads/master
|
/Intro/TutorailPt3.py
|
# While loop
number = 100
while number > 1:
print(number)
number //= 2
command = ""
while command.lower() != "quit":
command = input(">")
print("ECHO" + command)
while True:
command = input(">")
print("ECHO" + command)
if command.lower() == "quit":
break
count = 0
for number in range(1, 10):
if number % 2 == 0:
print(number)
count += 1
print("we have " + str(count))
|
{"/PythonStandardLibrary/SQLite.py": ["/PythonStandardLibrary/JSON.py"], "/PopularPackages/PyText.py": ["/PopularPackages/config.py"]}
|
33,978,191
|
emilbaekdahl/masters-code
|
refs/heads/main
|
/kgdata/subgraph.py
|
import ast
import concurrent.futures as cf
import functools as ft
import multiprocessing as mp
import operator as op
import os
import pathlib as pl
import networkx as nx
import numpy as np
import pandas as pd
import tqdm.auto as tqdm
from . import util
rng = np.random.default_rng()
class NxExtractor:
def __init__(self, dataset):
self.dataset = dataset
@util.cached_property
def graph(self):
return self.dataset.graph
@util.cached_property
def undirected(self):
return self.graph.to_undirected()
@util.cached_property
def degree(self):
degrees = list(map(self.graph.degree, self.graph.nodes))
return round(sum(degrees) / len(degrees))
def neighbourhood(self, entity, depth=1, stochastic=False):
neighbours = set(self.undirected.neighbors(entity))
for _ in range(depth - 1):
neighbours = ft.reduce(
lambda acc, current: acc
| set(
rng.choice(
list(self.undirected.neighbors(current)),
self.degree,
replace=True,
)
),
neighbours,
neighbours,
)
return self.graph.subgraph(neighbours | set([entity]))
class Extractor:
def __init__(self, dataset):
self.dataset = dataset
self.cache = Cache(dataset.path / "neighbourhoods")
@util.cached_property
def wide_data(self):
return self.dataset.data
@util.cached_property
def long_data(self):
return self.wide_data.melt(
id_vars="relation", var_name="role", value_name="entity", ignore_index=False
)
@util.cached_property
def entity_data(self):
return self.long_data["entity"].reset_index().set_index("entity")["index"]
@util.cached_property
def index_data(self):
return self.entity_data.reset_index().set_index("index")["entity"]
@util.cached_property
def graph(self):
return nx.MultiDiGraph(
zip(
self.wide_data["head"],
self.wide_data["tail"],
self.wide_data["relation"],
)
)
@ft.lru_cache(maxsize=100_000)
def stochastic_neighbourhood(
self,
entity: str,
depth: int = 1,
index_only: bool = False,
use_cache: bool = True,
cache_namespace=None,
**kwargs,
) -> pd.DataFrame:
if use_cache:
cache = cache_namespace.cache if cache_namespace else self.cache
cached = cache(entity, depth, True)
else:
cached = None
if cached is not None:
idx = pd.Index(cached)
else:
reducer = ft.partial(self._stochastic_neighbourhood_reducer, **kwargs)
idx = ft.reduce(
reducer, range(depth - 1), pd.Index(self.entity_data[[entity]])
).unique()
return idx if index_only else self.wide_data.loc[idx]
def _stochastic_neighbourhood_reducer(self, idx, _, triples_per_entity=None):
if triples_per_entity is None:
triples_per_entity = self.triples_per_entity
entities = self.index_data[idx]
if len(entities) > triples_per_entity:
entities = entities.sample(triples_per_entity)
triples = self.entity_data[entities.unique()]
# triples = (
# self.entity_data[self.index_data[idx].unique()]
# .groupby(level=0)
# .sample(triples_per_entity or self.triples_per_entity, replace=True)
# )
return idx.union(triples.unique())
@util.cached_property
def triples_per_entity(self):
return round(
self.long_data.reset_index().groupby("entity")["index"].nunique().mean()
)
@ft.lru_cache(maxsize=100_000)
def neighbourhood(
self,
entity: str,
depth: int = 1,
index_only: bool = False,
use_cache: bool = True,
cache_namespace=None,
) -> pd.DataFrame:
if use_cache:
cache = cache_namespace.cache if cache_namespace else self.cache
cached = cache(entity, depth, False)
else:
cached = None
if cached:
idx = pd.Index(cached)
else:
idx = ft.reduce(
lambda idx, _: idx.union(
self.entity_data[self.index_data[idx].unique()].unique()
),
range(depth - 1),
pd.Index(self.entity_data[[entity]]),
).unique()
return idx if index_only else self.wide_data.loc[idx]
def enclosing(self, head, tail, stochastic=False, index_only=False, **kwargs):
function = self.stochastic_neighbourhood if stochastic else self.neighbourhood
if head == tail:
return function(head, index_only=index_only, **kwargs)
idx = function(head, index_only=True, **kwargs).intersection(
function(tail, index_only=True, **kwargs)
)
return idx if index_only else self.wide_data.loc[idx]
def all_neighbourhoods(
self,
entities=None,
max_entities: float = None,
seed: int = None,
max_workers: int = None,
depth: int = 1,
chunk_size: int = None,
n_chunks: int = 1,
current_chunk: int = 1,
**kwargs,
):
if entities is None:
entities = self.dataset.entities
else:
entities = self.dataset.entities[self.dataset.entities.isin(entities)]
if max_entities is not None:
params = {
"n" if max_entities > 1 else "frac": max_entities,
"random_state": seed,
}
entities = entities.sample(**params)
if max_workers is None and "SLURM_CPUS_PER_TASK" in os.environ:
max_workers = int(os.environ["SLURM_CPUS_PER_TASK"])
with cf.ProcessPoolExecutor(max_workers=max_workers) as pool:
if chunk_size is None:
chunk_size = max(1, len(entities) // pool._max_workers)
print(f"Workers: {pool._max_workers}. Worker chunksize {chunk_size}")
with mp.Manager() as manager:
namespace = manager.Namespace()
namespace.cache = self.cache
worker = ft.partial(
self._all_neighbourhoods_worker,
depth=depth,
index_only=True,
cache_namespace=namespace,
**kwargs,
)
jobs = pool.map(worker, entities, chunksize=chunk_size)
neighbourhoods = list(
tqdm.tqdm(
jobs,
total=len(entities),
desc=f"Extracting depth-{depth} neighbourhoods (chunk {current_chunk}/{n_chunks})",
unit="entities",
)
)
return pd.concat(neighbourhoods)
def _all_neighbourhoods_worker(self, entity, stochastic: bool = False, **kwargs):
if stochastic:
neighbourhood = self.stochastic_neighbourhood(entity, **kwargs)
else:
neighbourhood = self.neighbourhood(entity, **kwargs)
return pd.Series(neighbourhood, index=[entity] * len(neighbourhood))
def all_neighbourhood_sizes(
self, depth: int = 1, entities=None, max_entities=None, **kwargs
):
if entities is None:
entities = self.dataset.entities
else:
entities = self.dataset.entities[self.dataset.entities.isin(entities)]
if max_entities is not None:
params = {"n" if max_entities > 1 else "frac": max_entities}
entities = entities.sample(**params)
chunks = np.array_split(entities, max(1, len(entities) // (10_000 // depth)))
return pd.concat(
[
(
self.all_neighbourhoods(
entities=chunk,
depth=depth,
n_chunks=len(chunks),
current_chunk=n + 1,
**kwargs,
)
.groupby(level=0)
.count()
.to_frame("size")
.assign(
depth=depth,
prop=lambda data: data["size"] / len(self.dataset),
)
)
for n, chunk in enumerate(chunks)
]
)
def all_enclosing(
self,
depth: int = 1,
max_pairs: float = None,
pairs=None,
seed: int = None,
max_workers: int = None,
chunk_size: int = None,
**kwargs,
):
pairs = self.dataset.unique_entity_pairs
if max_pairs is not None:
params = {
"n" if max_pairs > 1 else "frac": max_pairs,
"random_state": seed,
}
pairs = pairs.sample(**params)
pairs = [tuple(pair) for pair in pairs.itertuples(index=False)]
if max_workers is None and "SLURM_CPUS_PER_TASK" in os.environ:
max_workers = int(os.environ["SLURM_CPUS_PER_TASK"])
with cf.ProcessPoolExecutor(max_workers) as pool:
if chunk_size is None:
chunk_size = max(1, len(pairs) // pool._max_workers)
print(f"Using {pool._max_workers} workers. Worker chunksize {chunk_size}")
with mp.Manager() as manager:
namespace = manager.Namespace()
namespace.cache = self.cache
worker = ft.partial(
self._all_enclosing_worker,
depth=depth,
index_only=True,
cache_namespace=namespace,
**kwargs,
)
jobs = pool.map(worker, *zip(*pairs), chunksize=chunk_size)
subgraphs = list(
tqdm.tqdm(
jobs,
total=len(pairs),
desc=f"Extracting depth-{depth} enclosing subgraphs",
unit="pairs",
)
)
return pd.concat(subgraphs)
def _all_enclosing_worker(self, head, tail, **kwargs):
enclosing = self.enclosing(head, tail, **kwargs)
return pd.Series(
enclosing,
index=pd.MultiIndex.from_tuples(
[(head, tail)] * len(enclosing), names=("ent_1", "ent_2")
),
)
def all_enclosing_sizes(self, depth: int = 1, **kwargs):
return (
self.all_enclosing(depth=depth, **kwargs)
.groupby(level=[0, 1])
.count()
.to_frame("size")
.assign(depth=depth, prop=lambda data: data["size"] / len(self.dataset))
)
def neighbourhood_sizes(self, depths, max_entities=None, seed=None, **kwargs):
if isinstance(depths, tuple):
min_depth, max_depth = depths
depths = range(min_depth, max_depth + 1)
path = self.dataset.path / "neighbourhood_sizes"
if self.dataset.split:
path = path / self.dataset.split
path.mkdir(exist_ok=True, parents=True)
path = path / f"depths_{depths}_ents_{max_entities or 'all'}_seed_{seed}.csv"
if False and path.exists():
return pd.read_csv(path, index_col=0)
sizes = pd.concat(
[
self.all_neighbourhoods(
depth=depth,
max_entities=max_entities,
seed=seed,
**kwargs,
)
.map(len)
.to_frame(name="size")
.assign(depth=depth, prop=lambda data: data["size"] / len(self.dataset))
for depth in depths
]
)
if False:
sizes.to_csv(path)
return sizes
def enclosing_sizes(self, depths, max_pairs=None, seed=None, **kwargs):
if isinstance(depths, tuple):
min_depth, max_depth = depths
depths = range(min_depth, max_depth + 1)
path = self.dataset.path / "enclosing_sizes"
if self.dataset.split:
path = path / self.dataset.split
path.mkdir(exist_ok=True, parents=True)
path = path / f"depths_{depths}_pairs_{max_pairs or 'all'}_seed_{seed}.csv"
if path.exists():
return pd.read_csv(path, index_col=(0, 1))
sizes = pd.concat(
[
self.all_enclosing(
depth=depth, max_pairs=max_pairs, seed=seed, **kwargs
)
.map(len)
.to_frame(name="size")
.assign(depth=depth, prop=lambda data: data["size"] / len(self.dataset))
for depth in depths
]
)
sizes.to_csv(path)
return sizes
class Cache:
def __init__(self, path):
self.path = path
if not isinstance(self.path, pl.Path):
self.path = pl.Path(self.path)
self.cache = {}
self.stochastic_cache = {}
def __call__(self, entity: str, depth: int, stochastic: bool):
cache = self.stochastic_cache if stochastic else self.cache
if depth not in cache:
path = self.path
if stochastic:
path = path / "stochastic"
path = path / f"{depth}.csv"
if path.exists():
cache[depth] = pd.read_csv(path, index_col=0)
else:
return None
try:
return cache[depth]["index"].loc[[entity]]
except KeyError:
return None
def save_cache_data(self, data, depth: int, stochastic: bool, exist_ok=False):
path = self.path
if stochastic:
path = path / "stochastic"
path.mkdir(exist_ok=True, parents=True)
path = path / f"{depth}.csv"
if not path.exists() or exist_ok:
data.to_csv(path)
|
{"/kgdata/__main__.py": ["/kgdata/dataset.py", "/kgdata/path.py", "/kgdata/sample.py"], "/tune.py": ["/kgdata/model.py"], "/train.py": ["/kgdata/model.py"]}
|
33,978,192
|
emilbaekdahl/masters-code
|
refs/heads/main
|
/kgdata/model.py
|
import argparse
import functools as ft
import pathlib as pl
import typing as tp
import networkx as nx
import numpy as np
import pandas as pd
import pytorch_lightning as ptl
import pytorch_lightning.callbacks
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import torch.utils.data
import torchmetrics as tm
from . import feature, util
rng = np.random.default_rng()
class KG:
path: pl.Path
def __init__(self, path: tp.Union[str, pl.Path]):
if isinstance(path, str):
self.path = pl.Path(path)
else:
self.path = path
def __len__(self) -> int:
return len(self.data)
def __iter__(self):
return self.data.itertuples()
@util.cached_property
def all_org_data(self) -> pd.DataFrame:
files = [
self.path.parent / file_name
for file_name in ["train.csv", "valid.csv", "test.csv"]
]
return pd.concat(map(pd.read_csv, files))
@util.cached_property
def org_data(self) -> pd.DataFrame:
return pd.read_csv(self.path, dtype=str)
@util.cached_property
def data(self) -> pd.DataFrame:
return self.org_data.assign(
head=lambda data: self.entity_to_index.loc[data["head"]].values,
relation=lambda data: self.relation_to_index.loc[data["relation"]].values,
tail=lambda data: self.entity_to_index.loc[data["tail"]].values,
)
@util.cached_property
def head_relation_data(self) -> pd.Series:
return (
self.data.reset_index().set_index(["head", "relation"])["tail"].sort_index()
)
@util.cached_property
def tail_relation_data(self) -> pd.Series:
return (
self.data.reset_index().set_index(["tail", "relation"])["head"].sort_index()
)
@util.cached_property
def head_index_data(self) -> pd.DataFrame:
return self.data.reset_index().set_index(["head"]).sort_index()
@util.cached_property
def head_index_data_idx(self) -> set:
return set(self.head_index_data.index)
@util.cached_property
def graph(self) -> nx.MultiDiGraph:
return nx.MultiDiGraph(
zip(
self.data["head"],
self.data["tail"],
self.data["relation"],
[{"index": index} for index in self.data.index],
),
)
@util.cached_property
def entities(self) -> pd.Series:
return pd.Series(
pd.concat([self.all_org_data["head"], self.all_org_data["tail"]]).unique(),
name="entity",
)
@util.cached_property
def entity_to_index(self):
return pd.Series(self.entities.index, index=self.entities).sort_index()
@util.cached_property
def relations(self) -> pd.Series:
return pd.Series(self.all_org_data["relation"].unique(), name="relation")
@util.cached_property
def relation_to_index(self):
return pd.Series(self.relations.index, index=self.relations).sort_index()
@util.cached_property
def degree(self):
return pd.Series(dict(self.graph.degree))
@util.cached_property
def median_degree(self):
return round(self.degree.median())
@util.cached_property
def mean_degree(self):
return round(self.degree.mean())
@ft.lru_cache(maxsize=100_000)
def _neighbourhood_idx(
self, entity: str, depth: int = 3, sampling: str = None, sample_size: int = None
) -> set:
if entity not in self.head_index_data_idx:
return set()
if sample_size is None and sampling is not None:
if sampling == "mean":
sample_size = self.mean_degree
elif sampling == "median":
sample_size = self.median_degree
elif sampling is not None:
raise ValueError(f"subgraph sampling '{sampling}' is unknown")
data = self.head_index_data.loc[[entity]]
if sample_size and len(data) > sample_size:
data = data.sample(sample_size)
idx = set(data["index"])
for _ in range(depth - 1):
tails = data["tail"]
if len(tails) == 0:
break
data = self.head_index_data.loc[
self.head_index_data_idx.intersection(tails)
]
if sample_size and len(data) > sample_size:
data = data.sample(sample_size)
idx.update(data["index"])
return idx
def _enclosing_idx(self, head: str, tail: str, **kwargs):
return self._neighbourhood_idx(head, **kwargs).intersection(
self._neighbourhood_idx(tail, **kwargs)
)
@ft.lru_cache(maxsize=100_000)
def get_rel_seqs(
self,
head: str,
tail: str,
min_length: int = 1,
max_length: int = 3,
max_paths: int = None,
subgraph_sampling: str = None,
no_rel_rep: bool = False,
) -> tp.List[np.array]:
seqs = []
if subgraph_sampling:
idx = self._enclosing_idx(
head, tail, depth=max_length, sampling=subgraph_sampling
)
data = self.data.loc[idx]
graph = self.graph.edge_subgraph(
zip(data["head"], data["tail"], data["relation"])
)
else:
graph = self.graph
try:
for path in nx.all_simple_edge_paths(graph, head, tail, cutoff=max_length):
if len(path) < min_length:
continue
seq = [relation for _head, _tail, relation in path]
if seq not in seqs and (
not no_rel_rep or KG._is_non_repeating_seq(seq)
):
seqs.append(seq)
if max_paths and len(seqs) >= max_paths:
break
except nx.NodeNotFound:
pass
return [np.array(seq) for seq in seqs]
@staticmethod
def _is_non_repeating_seq(seq: tp.List[int]) -> bool:
for index in range(1, len(seq)):
if seq[index - 1] == seq[index]:
return False
return True
class Dataset(torch.utils.data.Dataset):
def __init__(
self,
path: tp.Union[str, pl.Path],
split: str = "train",
neg_rate: float = 1,
max_paths: int = None,
min_path_length: int = 1,
max_path_length: int = 1,
subgraph_sampling: str = None,
domain_semantics: bool = False,
no_rel_rep: bool = False,
):
if isinstance(path, str):
self.path = pl.Path(path)
else:
self.path = path
self.split = split
self.neg_rate = neg_rate
self.max_paths = max_paths
self.min_path_length = min_path_length
self.max_path_length = max_path_length
self.subgraph_sampling = subgraph_sampling
self.domain_semantics = domain_semantics
if self.domain_semantics:
assert "yago" in str(
self.path
), f"kg '{self.path}' does not support domain semantics"
self.no_rel_rep = no_rel_rep
@util.cached_property
def kg(self) -> KG:
return KG(self.path / "train.csv")
@util.cached_property
def pos_data(self) -> pd.DataFrame:
return KG(self.path / f"{self.split}.csv").data
@util.cached_property
def replace_tail_probs(self) -> pd.Series:
return self.kg.data.groupby("relation").apply(
lambda group: pd.Series(
{
"tph": group.groupby("head").size().sum() / group["head"].nunique(),
"hpt": group.groupby("tail").size().sum() / group["tail"].nunique(),
}
).agg(lambda data: data["hpt"] / (data["hpt"] + data["tph"]))
)
@util.cached_property
def entity_semantics(self) -> pd.DataFrame:
if self.domain_semantics:
return pd.read_csv(self.path / "sems.csv", index_col=0)
return feature.rel_counts(self.kg.data).astype("float32")
@util.cached_property
def default_entity_semantics(self) -> np.array:
return np.repeat(0, len(self.kg.relations)).astype("float32")
@util.cached_property
def idx_map(self):
pos_idx_map = pd.DataFrame({"pos_index": self.pos_data.index, "label": 1})
neg_idx_map = pos_idx_map.sample(
frac=self.neg_rate, replace=self.neg_rate > 1
).assign(label=0)
return pd.concat([pos_idx_map, neg_idx_map]).sort_index().reset_index(drop=True)
def __len__(self) -> int:
return round(len(self.pos_data) * (self.neg_rate + 1))
def __getitem__(
self, idx: int
) -> tp.Tuple[
str, str, torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor
]:
pos_idx, label = self.idx_map.iloc[idx]
pos_sample = self.pos_data.iloc[pos_idx]
if label == 1:
sample = pos_sample
else:
sample = self._gen_neg_sample(*pos_sample)
head, relation, tail = sample
# Entity semantics
if head in self.entity_semantics.index:
head_sem = self.entity_semantics.loc[head].values
else:
head_sem = self.default_entity_semantics
if tail in self.entity_semantics.index:
tail_sem = self.entity_semantics.loc[tail].values
else:
tail_sem = self.default_entity_semantics
head_sem = torch.from_numpy(head_sem)
tail_sem = torch.from_numpy(tail_sem)
# Relation
relation = torch.tensor(relation + 1)
# Relation sequences
rel_seqs = [
torch.from_numpy(rel_seq) + 1
for rel_seq in self.kg.get_rel_seqs(
head,
tail,
min_length=self.min_path_length,
max_length=self.max_path_length,
max_paths=self.max_paths,
subgraph_sampling=self.subgraph_sampling,
no_rel_rep=self.no_rel_rep,
)
]
if len(rel_seqs) == 0:
rel_seqs = [torch.tensor([0])]
rel_seqs = nn.utils.rnn.pad_sequence(rel_seqs, batch_first=True)
# Label
label = torch.tensor(label, dtype=torch.float32)
return head, tail, head_sem, tail_sem, relation, rel_seqs, label
def _gen_neg_sample(
self, head: str, relation: str, tail: str
) -> tp.Tuple[str, str, str]:
replace_tail = rng.binomial(1, self.replace_tail_probs[relation]) == 1
try:
if replace_tail:
invalid_entities = self.kg.head_relation_data[head, relation]
else:
invalid_entities = self.kg.tail_relation_data[tail, relation]
candidate_entities = self.kg.entities.index[
~self.kg.entities.index.isin(invalid_entities)
]
except KeyError:
candidate_entities = self.kg.entities.index
(new_entity,) = rng.choice(candidate_entities, 1)
if replace_tail:
return head, relation, new_entity
return new_entity, relation, tail
@staticmethod
def collate_fn(batch):
head, tail, head_sem, tail_sem, relation, seqs, label = zip(*batch)
return (
head,
tail,
torch.stack(head_sem),
torch.stack(tail_sem),
torch.stack(relation),
Dataset.pad_nested_seqs(seqs),
torch.stack(label),
)
@staticmethod
def pad_nested_seqs(seq_of_seqs):
# Largest first and second dimension of all padded seqences.
dim_1 = max([seq.shape[0] for seq in seq_of_seqs])
dim_2 = max([seq.shape[1] for seq in seq_of_seqs])
expanded = [
torch.nn.functional.pad(
seq, pad=(0, dim_2 - seq.shape[1], 0, dim_1 - seq.shape[0])
)
for seq in seq_of_seqs
]
return torch.stack(expanded)
class DataModule(ptl.LightningDataModule):
def __init__(
self,
path: str,
neg_rate: float = 1,
max_paths: int = None,
min_path_length: int = 1,
max_path_length: int = 3,
subgraph_sampling: str = None,
batch_size: int = 32,
num_workers: int = 0,
prefetch_factor: int = 2,
shuffle_train: bool = True,
domain_semantics: bool = False,
no_rel_rep: bool = False,
):
self.path = path
self.neg_rate = neg_rate
self.max_paths = max_paths
self.min_path_length = min_path_length
self.max_path_length = max_path_length
self.subgraph_sampling = subgraph_sampling
self.batch_size = batch_size
self.num_workers = num_workers
self.prefetch_factor = prefetch_factor
self.shuffle_train = shuffle_train
self.domain_semantics = domain_semantics
self.no_rel_rep = no_rel_rep
super().__init__()
@util.cached_property
def kg(self) -> KG:
return self.train_dataloader().dataset.kg
def train_dataloader(self):
return self._create_dataloader("train")
def val_dataloader(self):
return self._create_dataloader("valid")
def test_dataloader(self):
return self._create_dataloader("test")
def _create_dataloader(self, split: str) -> torch.utils.data.DataLoader:
return torch.utils.data.DataLoader(
Dataset(
self.path,
split=split,
neg_rate=self.neg_rate,
max_paths=self.max_paths,
min_path_length=self.min_path_length,
max_path_length=self.max_path_length,
subgraph_sampling=self.subgraph_sampling,
domain_semantics=self.domain_semantics,
no_rel_rep=self.no_rel_rep,
),
batch_size=self.batch_size,
num_workers=self.num_workers,
prefetch_factor=self.prefetch_factor,
collate_fn=Dataset.collate_fn,
shuffle=self.shuffle_train and split == "train",
pin_memory=True,
)
class Model(ptl.LightningModule):
def __init__(
self,
n_rels: int,
emb_dim: int,
sem_dim: int = None,
pooling: str = "avg",
optimiser: str = "sgd",
no_early_stopping: bool = False,
early_stopping: str = "val_loss",
learning_rate: float = 0.0001,
batch_size: int = None,
no_semantics: bool = False,
):
"""
Parameters:
n_rels: Number of relations in the dataset.
emb_dim: Dimentionality of relation embeddings.
"""
super().__init__()
assert pooling in ["avg", "lse", "max"], f"pooling function '{pooling}' unknown"
assert optimiser in ["sgd", "adam"], f"optimiser '{optimiser}' unknown"
if sem_dim is None:
sem_dim = n_rels
self.save_hyperparameters(
"n_rels",
"emb_dim",
"sem_dim",
"pooling",
"optimiser",
"no_early_stopping",
"early_stopping",
"learning_rate",
"batch_size",
"no_semantics",
)
# (n_rels, emb_dim + 1)
# +1 to account for padding_idx
self.rel_emb = nn.Embedding(
self.hparams.n_rels + 1, self.hparams.emb_dim, padding_idx=0
)
# (emb_dim, 2 * emb_dim)
self.comp = nn.Parameter(
torch.rand(self.hparams.emb_dim, 2 * self.hparams.emb_dim)
)
nn.init.xavier_uniform_(self.comp.data)
# (emb_dim, 2 * n_rels + emb_dim)
if not self.hparams.no_semantics:
self.ent_comp = nn.Parameter(
torch.rand(
self.hparams.emb_dim,
2 * self.hparams.sem_dim + self.hparams.emb_dim,
)
)
nn.init.xavier_uniform_(self.ent_comp.data)
# Setup metrics
metrics = tm.MetricCollection(
{
"mrr": tm.RetrievalMRR(),
**{f"h@{k}": tm.RetrievalPrecision(k=k) for k in [1, 3, 10]},
}
)
self.val_metrics = metrics.clone(prefix="val_")
self.test_metrics = metrics.clone(prefix="test_")
def forward(self, path, relation, head_sem=None, tail_sem=None):
"""
Parameters:
path: (batch_size, n_paths, path_length)
relation: (batch_size)
Return:
(batch_size)
"""
n_paths = path.size()[1]
# (batch_size, n_paths, emb_dim)
path_emb = self._encode_emb_path(self.rel_emb(path))
# (batch_size, emb_dim)
rel_emb = self.rel_emb(relation)
if not self.hparams.no_semantics:
path_emb = torch.cat(
[
head_sem.unsqueeze(1).repeat_interleave(n_paths, dim=1),
path_emb,
tail_sem.unsqueeze(1).repeat_interleave(n_paths, dim=1),
],
dim=2,
)
# (batch_size, n_paths, emb_dim)
path_emb = torch.sigmoid(
torch.matmul(self.ent_comp, path_emb.unsqueeze(-1)).squeeze(-1)
)
rel_emb = torch.cat([head_sem, rel_emb, tail_sem], dim=1)
rel_emb = torch.sigmoid(
torch.matmul(self.ent_comp, rel_emb.unsqueeze(-1)).squeeze(-1)
)
# (batch_size, n_paths)
similarities = torch.matmul(path_emb, rel_emb.unsqueeze(-1)).squeeze(-1)
# (batch_size)
if self.hparams.pooling == "avg":
agg = torch.mean(similarities, dim=1)
elif self.hparams.pooling == "lse":
agg = torch.logsumexp(similarities, dim=1)
elif self.hparams.pooling == "max":
agg, _ = torch.max(similarities, dim=1)
return torch.sigmoid(agg)
def configure_optimizers(self):
if self.hparams.optimiser == "sgd":
optim_class = optim.SGD
elif self.hparams.optimiser == "adam":
optim_class = optim.Adam
return optim_class(self.parameters(), lr=self.hparams.learning_rate)
def configure_callbacks(self) -> tp.List[ptl.callbacks.Callback]:
monitor_mode = "min" if self.hparams.early_stopping == "val_loss" else "max"
callbacks = []
# callbacks = [
# ptl.callbacks.ModelCheckpoint(
# monitor=self.hparams.early_stopping, mode=monitor_mode
# )
# ]
if not self.hparams.no_early_stopping:
callbacks.append(
ptl.callbacks.EarlyStopping(
monitor=self.hparams.early_stopping, mode=monitor_mode
)
)
return callbacks
def training_step(self, batch, _batch_idx):
_head, _tail, head_sem, tail_sem, relation, path, label = batch
pred = self(path, relation, head_sem=head_sem, tail_sem=tail_sem)
loss = F.binary_cross_entropy(pred, label)
# Log
self.log("train_loss", loss, sync_dist=True)
return loss
def validation_step(self, batch, batch_idx):
_head, _tail, head_sem, tail_sem, relation, path, label = batch
pred = self(path, relation, head_sem=head_sem, tail_sem=tail_sem)
loss = F.binary_cross_entropy(pred, label)
# Compute and log metrics.
label = label.int()
retr_idx = torch.tensor(batch_idx).expand_as(label)
# retr_idx = batch_idx.clone().detach().expand_as(label)
metrics = self.val_metrics(pred, label, retr_idx)
self.log("val_loss", loss, sync_dist=True)
self.log_dict(metrics, sync_dist=True)
def test_step(self, batch, batch_idx):
_head, _tail, head_sem, tail_sem, relation, path, label = batch
pred = self(path, relation, head_sem=head_sem, tail_sem=tail_sem)
# Compute and log metrics.
label = label.int()
retr_idx = torch.tensor(batch_idx).expand_as(label)
metrics = self.test_metrics(pred, label, retr_idx)
self.log_dict(metrics)
def _encode_emb_path(self, path):
"""
Parameters:
path: (batch_size, n_paths, path_length, emb_dim)
Return:
(batch_size, n_paths, emb_dim)
"""
# (batch_size, n_paths, path_length - 1, emb_dim), (batch_size, n_paths, 1, emb_dim)
head, tail = torch.split(path, [path.shape[2] - 1, 1], dim=2)
# (batch_size, n_paths, emb_dim)
tail = tail.squeeze(2)
if head.shape[2] == 0:
return tail
# (batch_size, n_paths, emb_dim)
head = self._encode_emb_path(head)
# (batch_size, n_paths, emb_size * 2)
stack = torch.cat([head, tail], dim=2)
# (batch_size, n_paths, emb_dim)
product = torch.matmul(self.comp, stack.unsqueeze(-1)).squeeze(-1)
return torch.sigmoid(product)
@classmethod
def add_argparse_args(
cls, parent_parser: argparse.ArgumentParser
) -> argparse.ArgumentParser:
group_parser = parent_parser.add_argument_group("Model")
group_parser.add_argument("--emb_dim", type=int, default=100)
group_parser.add_argument("--sem_dim", type=int)
group_parser.add_argument("--pooling", type=str, default="avg")
group_parser.add_argument("--optimiser", type=str, default="sgd")
group_parser.add_argument("--early_stopping", type=str, default="val_loss")
group_parser.add_argument("--no_early_stopping", action="store_true")
group_parser.add_argument("--learning_rate", type=float, default=0.0001)
group_parser.add_argument("--no_semantics", action="store_true")
return parent_parser
|
{"/kgdata/__main__.py": ["/kgdata/dataset.py", "/kgdata/path.py", "/kgdata/sample.py"], "/tune.py": ["/kgdata/model.py"], "/train.py": ["/kgdata/model.py"]}
|
33,978,193
|
emilbaekdahl/masters-code
|
refs/heads/main
|
/kgdata/feature.py
|
import numpy as np
def rel_counts(data):
return (
data.melt(id_vars="relation", value_name="entity")
.value_counts(["entity", "relation"])
.unstack(fill_value=0)
)
def rel_props(data):
return rel_counts(data).apply(lambda row: row / row.sum(), axis=1)
def rel_dists(data):
return (
rel_counts(data)
.T.apply(lambda col: col / col.sum())
.apply(np.exp)
.apply(lambda col: col / col.sum())
.T
)
|
{"/kgdata/__main__.py": ["/kgdata/dataset.py", "/kgdata/path.py", "/kgdata/sample.py"], "/tune.py": ["/kgdata/model.py"], "/train.py": ["/kgdata/model.py"]}
|
33,978,194
|
emilbaekdahl/masters-code
|
refs/heads/main
|
/tune.py
|
import test_tube
from kgdata.model import DataModule, Model
def main():
...
if __name__ == "__main__":
parser = test_tube.HyperOptArgumentParser(strategy="grid_search")
parser.opt_list(
"--emb_dim", type=int, default=100, options=[10, 100, 1000], tunable=True
)
parser.opt_list(
"--optimiser", type=str, default="sgd", options=["sgd", "adam"], tunable=True
)
hparams = parser.parse_args()
print(list(hparams.trials(1)))
cluster = test_tube.SlurmCluster(
hyperparam_optimizer=hparams, log_path="test/log/dir"
)
cluster.per_experiment_nb_gpus = 2
cluster.per_experiment_nb_cpus = 4
cluster.notify_job_status("ebakda16@student.aau.dk", on_done=True, on_fail=True)
|
{"/kgdata/__main__.py": ["/kgdata/dataset.py", "/kgdata/path.py", "/kgdata/sample.py"], "/tune.py": ["/kgdata/model.py"], "/train.py": ["/kgdata/model.py"]}
|
33,978,195
|
emilbaekdahl/masters-code
|
refs/heads/main
|
/kgdata/sample.py
|
import concurrent.futures
import os
import numpy as np
import pandas as pd
import tqdm.auto as tqdm
from . import util
rng = np.random.default_rng()
def gen_neg_samples(dataset):
data = dataset.data.join(_replace_tail_prob(dataset.data), on="relation").assign(
replace_tail=lambda data: rng.binomial(1, data["replace_tail_prob"])
)
data[data["replace_tail"] == 1]["tail"] = (
data[data["replace_tail"] == 1]
.groupby(["head", "relation"], as_index=False)["tail"]
.apply(
lambda tail: pd.Series(
dataset.entities[~dataset.entities.isin(tail)].sample(len(tail)),
index=["tail"] * len(tail),
)
)
)
data[data["replace_tail"] == 0]["head"] = (
data[data["replace_tail"] == 0]
.groupby(["tail", "relation"], as_index=False)["head"]
.apply(
lambda head: pd.Series(
dataset.entities[~dataset.entities.isin(head)].sample(len(head)),
index=["head"] * len(head),
)
)
)
return data[["head", "relation", "tail"]]
def _replace_tail_prob(data):
return (
data.groupby("relation")
.apply(
lambda group: pd.Series(
{
"tph": group.groupby("head").size().sum() / group["head"].nunique(),
"hpt": group.groupby("tail").size().sum() / group["tail"].nunique(),
}
)
)
.agg(lambda data: data["hpt"] / (data["hpt"] + data["tph"]), axis=1)
).rename("replace_tail_prob")
class NegativeSampler:
def __init__(self, data, seed=None):
self.data = data
self.seed = seed
def __call__(self, head, relation, tail):
replace_tail = self.rng.binomial(1, self.replace_tail_probs[relation])
triples = self.data[self.data["relation"] == relation]
if replace_tail:
invalid_entities = triples[triples["head"] == head]["tail"]
else:
invalid_entities = triples[triples["tail"] == tail]["head"]
new_entity = (
self.entities[~self.entities.isin(invalid_entities)]
.sample(1, random_state=self.seed)
.iloc[0]
)
if replace_tail:
return head, relation, new_entity
return new_entity, relation, tail
def generate(self, triples, chunk_size=100, max_workers=None):
if max_workers is None and "SLURM_CPUS_PER_TASK" in os.environ:
max_workers = int(os.environ["SLURM_CPUS_PER_TASK"])
with concurrent.futures.ProcessPoolExecutor(max_workers) as pool:
jobs = pool.map(self, *zip(*triples), chunksize=chunk_size)
samples = list(tqdm.tqdm(jobs, total=len(triples)))
return pd.DataFrame(samples, columns=["head", "relation", "tail"])
@util.cached_property
def rng(self):
return np.random.default_rng(seed=self.seed)
@util.cached_property
def entities(self):
return pd.Series(
pd.concat(
[self.data["head"], self.data["tail"]], ignore_index=True
).unique()
)
@util.cached_property
def replace_tail_probs(self):
probs = self.data.groupby("relation").apply(
lambda group: pd.Series(
{
"tph": group.groupby("head").size().sum()
/ len(group["head"].unique()),
"hpt": group.groupby("tail").size().sum()
/ len(group["tail"].unique()),
}
)
)
return probs["hpt"] / (probs["hpt"] + probs["tph"])
@staticmethod
def generate_samples(data, neg_rate=1, max_workers=None, **kwargs):
sampler = NegativeSampler(data, **kwargs)
pos_samples = data.sample(frac=neg_rate, replace=neg_rate > 1)
if max_workers is None and "SLURM_CPUS_PER_TASK" in os.environ:
max_workers = int(os.environ["SLURM_CPUS_PER_TASK"])
with concurrent.futures.ProcessPoolExecutor(max_workers) as pool:
neg_samples = list(
tqdm.tqdm(
pool.map(
sampler,
*zip(*pos_samples.itertuples(index=False)),
chunksize=100,
),
desc=f"Generating negative samples (rate {neg_rate})",
total=len(pos_samples),
unit="triples",
)
)
return pd.DataFrame(neg_samples, columns=["head", "relation", "tail"])
|
{"/kgdata/__main__.py": ["/kgdata/dataset.py", "/kgdata/path.py", "/kgdata/sample.py"], "/tune.py": ["/kgdata/model.py"], "/train.py": ["/kgdata/model.py"]}
|
33,978,196
|
emilbaekdahl/masters-code
|
refs/heads/main
|
/kgdata/dataset.py
|
import abc
import itertools as it
import os
import pathlib
import re
import shutil
import networkx as nx
import numpy as np
import pandas as pd
import tqdm
from . import decompress, download, feature, sparql, subgraph, util
@util.delegate(
"neighbourhood",
"all_neighbourhoods",
"enclosing",
"all_enclosing",
"all_neighbourhood_sizes",
"neighbourhood_sizes",
"enclosing_sizes",
"all_enclosing_sizes",
"stochastic_neighbourhood",
to_attribute="subgraph_extractor",
)
class Dataset:
def __init__(self, data):
self.data = data
def __len__(self):
return len(self.data)
@util.cached_property
def entities(self):
return pd.Series(
pd.concat(
[self.data["head"], self.data["tail"]], ignore_index=True
).unique()
)
@util.cached_property
def relations(self):
return pd.Series(self.data["relation"].unique())
@util.cached_property
def rel_to_idx(self):
return dict(zip(self.relations, self.relations.index))
def rel_seq_to_idx(self, rel_seq):
return [self.rel_to_idx[rel] for rel in rel_seq]
@util.cached_property
def stats(self):
return pd.DataFrame(
{
"entities": len(self.entities),
"relations": len(self.relations),
"triples": len(self),
},
index=[self.__class__.__name__],
).assign(triples_per_entity=lambda stats: stats["triples"] / stats["entities"])
@util.cached_property
def entity_pairs(self):
return self.data[["head", "tail"]].drop_duplicates(ignore_index=True)
@util.cached_property
def unique_entity_pairs(self):
sets = set(map(frozenset, self.entity_pairs.itertuples(index=False)))
data = pd.DataFrame(sets, columns=["entity_1", "entity_2"])
data.loc[data["entity_1"].isna(), "entity_1"] = data[data["entity_1"].isna()][
"entity_2"
]
data.loc[data["entity_2"].isna(), "entity_2"] = data[data["entity_2"].isna()][
"entity_1"
]
return data
@util.cached_property
def subgraph_extractor(self):
return subgraph.Extractor(self)
def rel_dists(self):
return feature.rel_dists(self.data)
@util.cached_property
def graph(self):
return nx.MultiDiGraph(
zip(self.data["head"], self.data["tail"], self.data["relation"])
)
@staticmethod
def load(path):
return Dataset(pd.read_csv(path, dtype=str))
class PersistedDataset(Dataset):
NAME = None
def __init__(self, path, split=None):
self.path = path
if not isinstance(self.path, pathlib.Path):
self.path = pathlib.Path(self.path)
self.split = split
if self.split is None:
self.split = ["train", "valid", "test"]
elif isinstance(self.split, str):
self.split = [self.split]
@util.cached_property
def name(self):
return self.__class__.NAME or self.__class__.__name__
@abc.abstractmethod
def download(self):
...
def load_split(self, split):
return self.__class__(self.path, split=split)
@util.cached_property
def split_file_names(self):
return [self.path / f"{split}.csv" for split in self.split]
@util.cached_property
def data(self):
if not self.path.exists():
self.download()
return pd.concat(map(pd.read_csv, self.split_file_names), ignore_index=True)
def subset(self, size, path, force=False):
if not isinstance(path, pathlib.Path):
path = pathlib.Path(path)
if not path.exists() or force:
path.mkdir(exist_ok=True, parents=True)
params = {"n" if size > 1 else "frac": size}
for file in self.split_file_names:
pd.read_csv(file).sample(**params).to_csv(path / file.name, index=False)
return self.__class__(path, split=self.split)
class FB15K237Raw(PersistedDataset):
NAME = "FB15K237"
def download(self):
compressed_path = download.download_file(
"https://download.microsoft.com/download/8/7/0/8700516A-AB3D-4850-B4BB-805C515AECE1/FB15K-237.2.zip",
self.path,
)
decompressed_path = decompress.decompress_zip(
compressed_path, self.path, keep=True
)
source_dir = self.path / "Release"
for file_name in tqdm.tqdm(
["train.txt", "valid.txt", "test.txt"], desc="Moving files", unit="files"
):
pd.read_csv(
source_dir / file_name,
sep="\t",
names=["head", "relation", "tail"],
).to_csv((self.path / file_name).with_suffix(".csv"), index=False)
shutil.rmtree(source_dir)
class FB15K237(PersistedDataset):
@util.cached_property
def raw_dataset(self):
return FB15K237Raw(self.path, split=self.split)
@util.cached_property
def data(self):
data = self.raw_dataset.data.assign(
head=self.wikidata_labels.loc[self.raw_dataset.data["head"]].values,
tail=self.wikidata_labels.loc[self.raw_dataset.data["tail"]].values,
)
data.loc[data["tail"].isna(), "tail"] = self.raw_dataset.data[
data["tail"].isna()
]["head"]
data.loc[data["head"].isna(), "head"] = self.raw_dataset.data[
data["head"].isna()
]["tail"]
return data
@util.cached_property
def wikidata_labels(self):
path = self.path / "wikidata_labels.csv"
if not path.exists():
self.get_wikidata_labels().to_csv(path)
return pd.read_csv(path, index_col=0)
def get_wikidata_labels(self):
query = (
"SELECT ?fb ?itemLabel "
"WHERE {{ ?item wdt:P646 ?fb. VALUES ?fb {{ {fb_ids} }} "
"SERVICE wikibase:label {{ bd:serviceParam wikibase:language 'en'. }} }}"
).format(
fb_ids=" ".join([f"'{entity}'" for entity in self.raw_dataset.entities])
)
result = sparql.Wikidata().query(query)
grouped = {
key: list(value)
for key, value in it.groupby(
result.bindings, lambda value: value["fb"]["value"]
)
}
def reduce_group(entity):
try:
return list(grouped[entity])[0]["itemLabel"]["value"]
except (IndexError, ValueError, KeyError):
return None
return pd.Series(
{entity: reduce_group(entity) for entity in self.raw_dataset.entities},
name="wikidata_label",
)
class WN18RR(PersistedDataset):
def download(self):
compressed_path = download.download_file(
"https://data.deepai.org/WN18RR.zip", self.path
)
decompressed_path = decompress.decompress_zip(
compressed_path, self.path, keep=True
)
for file_name in ["train.txt", "valid.txt", "test.txt"]:
pd.read_csv(
self.path / "WN18RR" / "text" / file_name,
sep="\t",
names=["head", "relation", "tail"],
).to_csv((self.path / file_name).with_suffix(".csv"), index=False)
shutil.rmtree(self.path / "WN18RR")
class YAGO3(PersistedDataset):
def download(self):
compressed_path = download.download_file(
"https://github.com/TimDettmers/ConvE/raw/5feb358eb7dbd1f534978cdc4c20ee0bf919148a/YAGO3-10.tar.gz",
self.path,
)
decompress.decompress_tar(compressed_path, self.path, keep=True)
for file_name in tqdm.tqdm(
["train.txt", "valid.txt", "test.txt"], desc="Moving files"
):
path = self.path / file_name
pd.read_csv(
path,
sep="\t",
names=["head", "relation", "tail"],
).to_csv(path.with_suffix(".csv"), index=False)
path.unlink()
class OpenBioLink(PersistedDataset):
def download(self):
compressed_path = download.download_file(
"https://zenodo.org/record/3834052/files/HQ_DIR.zip", self.path
)
decompress.decompress_zip(compressed_path, self.path, keep=True)
file_pairs = [
("train", "train"),
("val", "valid"),
("test", "test"),
("negative_train", "train_neg"),
("negative_val", "valid_neg"),
("negative_test", "test_neg"),
]
for source, target in tqdm.tqdm(file_pairs, desc="Moving files", unit="files"):
pd.read_csv(
self.path / "HQ_DIR" / "train_test_data" / f"{source}_sample.csv",
sep="\t",
names=["head", "relation", "tail"],
usecols=[0, 1, 2],
).to_csv(self.path / f"{target}.csv", index=False)
shutil.rmtree(self.path / "HQ_DIR")
|
{"/kgdata/__main__.py": ["/kgdata/dataset.py", "/kgdata/path.py", "/kgdata/sample.py"], "/tune.py": ["/kgdata/model.py"], "/train.py": ["/kgdata/model.py"]}
|
33,978,197
|
emilbaekdahl/masters-code
|
refs/heads/main
|
/kgdata/__main__.py
|
import pathlib as pl
import click
import pandas as pd
import kgdata.dataset
import kgdata.path
import kgdata.sample
DATASET_MAP = {
"fb": kgdata.dataset.FB15K237Raw,
"wn": kgdata.dataset.WN18RR,
"yago": kgdata.dataset.YAGO3,
"bio": kgdata.dataset.OpenBioLink,
}
dataset_choices = click.Choice(list(DATASET_MAP.keys()))
def dataset_class_from_string(dataset):
if dataset not in DATASET_MAP:
raise ValueError(f"dataset '{dataset}' unknown")
return DATASET_MAP[dataset]
@click.group()
def cli():
pass
@cli.command()
@click.argument("dataset", type=dataset_choices)
@click.argument("target", type=click.Path(file_okay=False, writable=True))
def download(dataset, target):
dataset_class = dataset_class_from_string(dataset)
dataset_class(target).download()
@cli.command()
@click.argument("dataset", type=dataset_choices)
@click.argument("source", type=click.Path(file_okay=False, exists=True))
@click.option(
"--split",
"-s",
"splits",
multiple=True,
default=["train", "valid", "test"],
show_default=True,
)
@click.option("--neg-rate", "-n", type=float, default=1, show_default=True)
@click.option("--seed", type=int, default=1, show_default=True)
@click.option("--max-workers", type=int)
def neg_samples(dataset, source, splits, neg_rate, seed, max_workers):
source = pl.Path(source)
dataset_class = dataset_class_from_string(dataset)
for split in splits:
dataset = dataset_class(source, split=split)
kgdata.sample.NegativeSampler.generate_samples(
dataset.data, neg_rate=neg_rate, seed=seed, max_workers=max_workers
).to_csv((source / f"{split}_neg.csv"), index=False)
@cli.command()
@click.argument("dataset", type=dataset_choices)
@click.argument("source", type=click.Path(exists=True))
@click.option(
"--split",
"-s",
"splits",
multiple=True,
default=["train", "valid", "test", "train_neg", "valid_neg", "test_neg"],
show_default=True,
)
@click.option("--depth", "-d", type=int, default=2, show_default=True)
@click.option("--length", "-l", type=(int, int), default=(1, 3), show_default=True)
@click.option("--max-pairs", "-m", type=float)
@click.option("--seed", type=int, default=1, show_default=True)
@click.option("--max-workers", type=int)
@click.option("--stochastic/--no-stochastic", default=False)
def paths(
dataset, source, splits, depth, length, max_pairs, seed, max_workers, stochastic
):
source = pl.Path(source)
dataset_class = dataset_class_from_string(dataset)
min_length, max_length = length
if max_pairs and int(max_pairs) == max_pairs:
max_pairs = int(max_pairs)
for split in splits:
if not (source / f"{split}.csv").exists():
continue
combined_dataset = dataset_class(source, split=set([split, "train"]))
split_dataset = dataset_class(source, split=split)
pairs = split_dataset.unique_entity_pairs
if max_pairs is not None:
params = {"n" if max_pairs > 1 else "frac": max_pairs, "random_state": seed}
pairs = pairs.sample(**params)
pairs = [tuple(pair) for pair in pairs.itertuples(index=False)]
kgdata.path.all_relation_paths(
combined_dataset,
pairs,
depth=depth,
min_length=min_length,
max_length=max_length,
max_workers=max_workers,
stochastic=stochastic,
).to_csv(source / f"{split}_paths.csv", index=False)
@cli.command()
@click.argument("dataset", type=dataset_choices)
@click.argument("source", type=click.Path(file_okay=False))
@click.argument("target", type=click.Path(dir_okay=False, writable=True))
@click.option("--split")
@click.option("--depth", type=int, default=2, show_default=True)
@click.option("--max-pairs", type=float)
@click.option("--max-workers", type=int)
@click.option("--stochastic/--no-stochastic", default=True)
def enclosing_sizes(
dataset, source, target, split, depth, max_pairs, max_workers, stochastic
):
if max_pairs and int(max_pairs) == max_pairs:
max_pairs = int(max_pairs)
dataset_class = dataset_class_from_string(dataset)
data = dataset_class(source, split=split)
data.all_enclosing_sizes(
depth=depth, max_pairs=max_pairs, max_workers=max_workers, stochastic=stochastic
).to_csv(target)
@cli.command()
@click.argument("dataset", type=dataset_choices)
@click.argument("source", type=click.Path(file_okay=False))
@click.option("--depth", "-d", type=int, default=1)
@click.option("--max-entities", type=float)
@click.option("--max-workers", type=int)
@click.option("--stochastic/--no-stochastic", default=False)
@click.option("--chunk-size", type=int)
def neighbourhoods(
dataset, source, depth, max_entities, max_workers, stochastic, chunk_size
):
dataset_class = dataset_class_from_string(dataset)
dataset = dataset_class(source)
target_folder = pl.Path(source) / "neighbourhoods"
if max_entities and int(max_entities) == max_entities:
max_entities = int(max_entities)
if stochastic:
target_folder = target_folder / "stochastic"
target_folder.mkdir(exist_ok=True, parents=True)
dataset.all_neighbourhoods(
depth=depth,
max_entities=max_entities,
max_workers=max_workers,
stochastic=stochastic,
chunk_size=chunk_size,
).groupby(level=0).apply(list).to_csv(target_folder / f"{depth}.csv")
@cli.command()
@click.argument("dataset", type=dataset_choices)
@click.argument("source", type=click.Path(file_okay=False))
@click.option("--depth", "-d", type=int, default=1)
@click.option("--max-pairs", type=float)
@click.option("--max-paths", type=int)
def nx_paths(dataset, source, depth, max_pairs, max_paths):
dataset_class = dataset_class_from_string(dataset)
dataset = dataset_class(source)
if max_pairs and max_pairs == int(max_pairs):
max_pairs = int(max_pairs)
for split in dataset.split:
split_dataset = dataset_class(source, split=set([split] + ["train"]))
rel_seqs = kgdata.path.all_nx_rel_seqs(
split_dataset, max_pairs=max_pairs, max_paths=max_paths, depth=depth
)
(dataset.path / "rel_seqs").mkdir(exist_ok=True)
rel_seqs.to_csv(dataset.path / "rel_seqs" / f"{split}_{depth}.csv")
if __name__ == "__main__":
cli()
|
{"/kgdata/__main__.py": ["/kgdata/dataset.py", "/kgdata/path.py", "/kgdata/sample.py"], "/tune.py": ["/kgdata/model.py"], "/train.py": ["/kgdata/model.py"]}
|
33,978,198
|
emilbaekdahl/masters-code
|
refs/heads/main
|
/kgdata/path.py
|
import concurrent.futures as cf
import functools as ft
import os
import networkx as nx
import pandas as pd
import tqdm.auto as tqdm
def nx_rel_seqs(dataset, head, tail, max_paths=None, min_length=1, max_length=3):
seqs = []
all_paths = (
path
for path in nx.all_simple_edge_paths(
dataset.graph, head, tail, cutoff=max_length
)
if len(path) >= min_length
)
for path in all_paths:
seq = [relation for _head, _tail, relation in path]
seq = dataset.rel_seq_to_idx(seq)
if not seq in seqs:
seqs.append(seq)
if max_paths and len(seqs) >= max_paths:
break
return seqs
def all_nx_rel_seqs(dataset, max_pairs=None, depth=3, **kwargs):
pairs = dataset.unique_entity_pairs
if max_pairs:
params = {"frac" if isinstance(max_pairs, float) else "n": max_pairs}
pairs = pairs.sample(**params)
all_rel_seqs = [
(
(head, tail),
nx_rel_seqs(dataset, head, tail, max_length=depth, **kwargs),
)
for head, tail in tqdm.tqdm(
pairs.itertuples(index=False),
total=len(pairs),
desc=dataset.__class__.__name__,
)
]
all_rel_seqs = [
(index, rel_seqs) for index, rel_seqs in all_rel_seqs if len(rel_seqs) > 0
]
return pd.concat(
[
pd.Series(
rel_seqs,
index=pd.MultiIndex.from_tuples(
[(head, tail)] * len(rel_seqs), names=["ent_1", "ent_2"]
),
name="rel_seq",
)
for (head, tail), rel_seqs in all_rel_seqs
]
)
def relation_paths(data, head, tail, min_length=1, max_length=3):
if isinstance(data, pd.DataFrame):
graph = nx.MultiDiGraph(zip(data["head"], data["tail"], data["relation"]))
elif isinstance(data, nx.MultiDiGraph):
graph = data
else:
raise ValueError("data must be either a dataframe of a graph")
rel_paths = []
paths = (
path
for path in nx.all_simple_edge_paths(graph, head, tail, cutoff=max_length)
if len(path) >= min_length
)
for path in paths:
rel_path = [relation for _head, _tail, relation in path]
if rel_path not in rel_paths:
rel_paths.append(rel_path)
return rel_paths
def all_relation_paths(data, pairs, max_workers=None, chunk_size=10, **kwargs):
if max_workers is None and "SLURM_CPUS_PER_TASK" in os.environ:
max_workers = os.environ["SLURM_CPUS_PER_TASK"]
with cf.ProcessPoolExecutor(max_workers) as pool:
function = ft.partial(_all_relation_paths_worker, data, **kwargs)
jobs = pool.map(function, *zip(*pairs), chunksize=chunk_size)
paths = list(tqdm.tqdm(jobs, total=len(pairs)))
return pd.DataFrame(
[entry for entries in paths for entry in entries],
columns=["ent_1", "ent_2", "path"],
)
def _all_relation_paths_worker(data, head, tail, depth=2, stochastic=False, **kwargs):
subdata = data.enclosing(head, tail, depth=depth, stochastic=stochastic)
subgraph = data.graph.edge_subgraph(
zip(subdata["head"], subdata["tail"], subdata["relation"])
)
return [
(head, tail, path) for path in relation_paths(subgraph, head, tail, **kwargs)
]
|
{"/kgdata/__main__.py": ["/kgdata/dataset.py", "/kgdata/path.py", "/kgdata/sample.py"], "/tune.py": ["/kgdata/model.py"], "/train.py": ["/kgdata/model.py"]}
|
33,978,199
|
emilbaekdahl/masters-code
|
refs/heads/main
|
/train.py
|
import argparse
import os.path
import pytorch_lightning as ptl
import pytorch_lightning.loggers
from kgdata.model import DataModule, Model
def main(args):
data_module = DataModule.from_argparse_args(args)
trainer = ptl.Trainer.from_argparse_args(
args,
logger=ptl.loggers.TensorBoardLogger(
"lightning_logs", name=os.path.basename(args.path)
),
)
model = Model(
n_rels=len(data_module.kg.relations),
emb_dim=args.emb_dim,
pooling=args.pooling,
optimiser=args.optimiser,
early_stopping=args.early_stopping,
no_early_stopping=args.no_early_stopping,
learning_rate=args.learning_rate,
batch_size=args.batch_size,
no_semantics=args.no_semantics,
sem_dim=args.sem_dim,
)
trainer.fit(model, data_module)
trainer.test()
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser = ptl.Trainer.add_argparse_args(parser)
parser = DataModule.add_argparse_args(parser)
parser = Model.add_argparse_args(parser)
main(parser.parse_args())
|
{"/kgdata/__main__.py": ["/kgdata/dataset.py", "/kgdata/path.py", "/kgdata/sample.py"], "/tune.py": ["/kgdata/model.py"], "/train.py": ["/kgdata/model.py"]}
|
33,978,200
|
emilbaekdahl/masters-code
|
refs/heads/main
|
/kgdata/util.py
|
try:
from functools import cached_property
except ImportError:
from cached_property import cached_property
def delegate(*methods: str, to_attribute: str):
"""Delegates one or more methods to an attribute of a class.
Parameters
methods: Names of the methods to delegate.
to: object that the relavante methods will be delegated to.
Returns:
Decorated class.
Exampels:
>>> @delagate("sum", "mean", to="aggregator")
>>> class Collection:
>>> def __init__(self, values):
>>> self.values = values
>>> self.aggregator = Aggregator(self)
>>>
>>> class Aggregator:
>>> def __init__(self, collection):
>>> self.collection = collection
>>>
>>> def sum(self):
>>> return sum(self.collection.values)
>>>
>>> def mean(self):
>>> return self.sum() / len(self.collection.values)
>>>
>>> collection = Collection([1, 10, 100, 1000])
>>> collection.sum()
1111
>>> collection.mean()
227,75
"""
def define_method(name):
def temp(self, *args, **kwargs):
to_object = getattr(self, to_attribute)
to_method = getattr(to_object, name)
return to_method(*args, **kwargs)
temp.__name__ = name
return temp
def _delegate(klass):
for method in methods:
setattr(klass, method, define_method(method))
return klass
return _delegate
|
{"/kgdata/__main__.py": ["/kgdata/dataset.py", "/kgdata/path.py", "/kgdata/sample.py"], "/tune.py": ["/kgdata/model.py"], "/train.py": ["/kgdata/model.py"]}
|
33,978,201
|
emilbaekdahl/masters-code
|
refs/heads/main
|
/kgdata/decompress.py
|
import pathlib
import tarfile
import typing as tp
import zipfile
import tqdm
def decompress_tar(path, dest=None, keep=False):
with tarfile.open(path, "r:gz") as tar_file:
for file in tqdm.tqdm(tar_file.getmembers(), desc="Decompressing"):
tar_file.extract(file, path=dest)
if keep is False:
path.unlink()
return dest
def decompress_zip(
path: tp.Union[str, pathlib.Path], dest=None, keep=False
) -> pathlib.Path:
"""
Parameters:
path: Location of the zip file to decompress.
Returns:
Path to the decompressed folder.
"""
with zipfile.ZipFile(path, "r") as zip_file:
for file in tqdm.tqdm(zip_file.namelist(), desc="Decompressing"):
zip_file.extract(file, dest)
if keep is False:
path.unlink()
return path.with_suffix("")
|
{"/kgdata/__main__.py": ["/kgdata/dataset.py", "/kgdata/path.py", "/kgdata/sample.py"], "/tune.py": ["/kgdata/model.py"], "/train.py": ["/kgdata/model.py"]}
|
33,978,202
|
emilbaekdahl/masters-code
|
refs/heads/main
|
/kgdata/download.py
|
import os.path
import pathlib
import typing as tp
import requests as rq
import tqdm
def download_file(url: str, dest_folder: tp.Union[str, pathlib.Path]) -> pathlib.Path:
"""Downloads a single file to a destination folder.
Paramters:
url: Location of the file to download.
dest_folder: Folder to put the downloaded file in.
Returns:
Path to the downloaded file.
"""
if not isinstance(dest_folder, pathlib.Path):
dest_folder = pathlib.Path(dest_folder)
dest_folder.mkdir(parents=True, exist_ok=True)
file_name = os.path.basename(url)
dest = dest_folder / file_name
if dest.exists():
return dest
response = rq.get(url, stream=True)
file_size = int(response.headers["Content-Length"])
with open(dest, "wb") as file, tqdm.tqdm(
desc="Downloading",
unit="B",
unit_scale=True,
unit_divisor=1024,
total=file_size,
) as pbar:
for chunk in response.iter_content(chunk_size=1024):
write_size = file.write(chunk)
pbar.update(write_size)
return dest
|
{"/kgdata/__main__.py": ["/kgdata/dataset.py", "/kgdata/path.py", "/kgdata/sample.py"], "/tune.py": ["/kgdata/model.py"], "/train.py": ["/kgdata/model.py"]}
|
33,978,203
|
emilbaekdahl/masters-code
|
refs/heads/main
|
/kgdata/sparql.py
|
import abc
import requests as rq
from . import util
class Endpoint:
def __init__(self, url):
self.url = url
class Wikidata(Endpoint):
def __init__(self):
super().__init__("https://query.wikidata.org/sparql")
def query(self, query):
response = rq.post(
self.url,
params={"format": "json"},
headers={
"Content-Type": "application/sparql-query",
"Accept": "application/json",
},
data=query,
)
return WikidataResult(response.json())
class WikidataResult:
def __init__(self, data):
self.data = data
@util.cached_property
def bindings(self):
return self.data["results"]["bindings"]
|
{"/kgdata/__main__.py": ["/kgdata/dataset.py", "/kgdata/path.py", "/kgdata/sample.py"], "/tune.py": ["/kgdata/model.py"], "/train.py": ["/kgdata/model.py"]}
|
33,978,204
|
emilbaekdahl/masters-code
|
refs/heads/main
|
/kgdata/kg.py
|
from __future__ import annotations
import abc
import concurrent.futures as cf
import functools as ft
import operator
import typing as tp
import pandas as pd
from . import util
class Element(abc.ABC):
label: str
kg: KG
def __init__(self, kg: KG, label: str) -> None:
self.kg = kg
self.label = label
def __eq__(self, other: tp.Any) -> bool:
if isinstance(other, Element):
return self.label == other.label
if isinstance(other, str):
return self.label == other
return False
def __str__(self) -> str:
return str(self.label)
def __hash__(self):
return hash(self.label)
class Entity(Element):
@ft.lru_cache
def neighbourhood(self, depth: int = 1) -> SubKG:
reindexed = self.kg.long_data.reset_index()
# e_indexed = self.kg.kong_data.set_index("entity")
idx = ft.reduce(
lambda acc, _depth: reindexed[
reindexed["entity"].isin(
reindexed.loc[acc][["index"]].merge(reindexed, on="index")["entity"]
)
].index,
range(depth - 1),
reindexed[reindexed["entity"] == self].index,
)
return SubKG(self.kg, reindexed.loc[idx]["index"].unique())
@ft.lru_cache
def _neighbourhood(self, depth: int = 1) -> SubKG:
indicies = self.kg.long_data[self.kg.long_data["entity"] == self].index.unique()
sub_kg = SubKG(self.kg, indicies)
if depth == 1:
return sub_kg
nested_neighbourhoods = [
entity._neighbourhood(depth=depth - 1) for entity in sub_kg.entities
]
return ft.reduce(operator.__or__, nested_neighbourhoods + [sub_kg])
class Relation(Element):
...
class ElementContainer(abc.ABC):
data: tp.Dict[str, Element]
def __init__(
self,
kg: KG,
labels: tp.Iterable[str] = None,
klass: tp.Type[Element] = None,
elements: tp.Iterable[Element] = None,
) -> None:
self.kg = kg
self.data = {}
if elements is not None:
assert len(set(map(operator.attrgetter("__class__"), elements))) == 1
self.data = {element.label: element for element in elements}
if labels is not None and klass is not None:
self.data = {label: klass(self.kg, label) for label in labels}
def __getitem__(self, index_or_label: tp.Union[str, int]) -> Element:
if isinstance(index_or_label, int):
return list(self)[index_or_label]
return self.data[index_or_label]
def __setitem(self, label: str, element: Element) -> None:
self.data[label] = element
def __iter__(self) -> tp.Iterator[Element]:
return iter(self.data.values())
def __contains__(self, other: tp.Any) -> bool:
if isinstance(other, str):
return other in self.data
if isinstance(other, Element):
return other.label in self.data
return False
def __or__(self, other: tp.Any):
if isinstance(other, self.__class__) and self.kg is other.kg:
return self.__class__(self.kg, elements=list(self) + list(other))
raise ValueError()
def __and__(self, other: tp.Any):
if isinstance(other, self.__class__) and self.kg is other.kg:
return self.__class__(self.kg, elements=set(self) & set(other))
raise ValueError()
class EntityContainer(ElementContainer):
def __init__(self, *args, **kwargs):
super().__init__(*args, klass=Entity, **kwargs)
class RelationContainer(EntityContainer):
def __init__(self, *args, **kwargs):
super().__init__(*args, klass=Relation, **kwargs)
class KG:
data: pd.DataFrame
def __init__(self, data: pd.DataFrame) -> None:
self.data = data
def __len__(self) -> int:
return len(self.data)
@util.cached_property
def long_data(self) -> pd.DataFrame:
return self.data.melt(
id_vars="relation", var_name="role", value_name="entity", ignore_index=False
)
@util.cached_property
def entities(self) -> EntityContainer:
entities = self.long_data["entity"].unique()
return EntityContainer(self, labels=entities)
@util.cached_property
def relations(self) -> RelationContainer:
relations = self.data["relation"].unique()
return RelationContainer(self, labels=relations)
class SubKG(KG):
super_kg: KG
indicies: pd.Index
def __init__(self, super_kg: KG, indicies: pd.Index) -> None:
self.super_kg = super_kg
self.indicies = indicies
@util.cached_property
def data(self):
return self.super_kg.data.loc[self.indicies]
@util.cached_property
def long_data(self):
return self.super_kg.long_data.loc[self.indicies]
@util.cached_property
def entities(self):
entities = self.long_data["entity"].unique()
return EntityContainer(
self.super_kg,
elements=[self.super_kg.entities[entity] for entity in entities],
)
@util.cached_property
def relations(self):
relations = self.data["relation"].unique()
return RelationContainer(
self.super_kg,
elements=[self.super_kg.relations[relation] for relation in relations],
)
def __or__(self, other):
if isinstance(other, SubKG):
return SubKG(self.super_kg, self.indicies.union(other.indicies))
raise ValueError()
|
{"/kgdata/__main__.py": ["/kgdata/dataset.py", "/kgdata/path.py", "/kgdata/sample.py"], "/tune.py": ["/kgdata/model.py"], "/train.py": ["/kgdata/model.py"]}
|
34,027,378
|
stevenmarr/presentation-manager
|
refs/heads/master
|
/db_oauth2.py
|
from dropbox.client import DropboxOAuth2Flow, DropboxClient
from google.appengine.api import taskqueue, modules
from google.appengine.ext import db, ndb
from main import BaseHandler, config, data_cache, admin_required
from secrets import APP_KEY, APP_SECRET
from models import ConferenceData
from main import admin_required, data_cache
import webapp2
import logging
def get_dropbox_auth_flow(session, redirect_uri):
return DropboxOAuth2Flow(APP_KEY, APP_SECRET, redirect_uri,
session, 'dropbox-auth-csrf-token')
# URL handler for /dropbox-auth-start
class DBAuthStartHandler(BaseHandler):
@admin_required
def get(self):
redirect = self.get
redirect = self.uri_for('auth-finish',_full = True)
authorize_url = get_dropbox_auth_flow(self.session, redirect).start()
self.redirect(authorize_url)
# URL handler for /dropbox-auth-finish
class DBAuthFinishHandler(BaseHandler):
@admin_required
def get(self):
session = self.session
params = self.request.params
#logging.error('Params %s' % params)
redirect = self.uri_for('auth-finish',_full = True)
try:
access_token, user_id, url_state = \
get_dropbox_auth_flow(session, redirect).finish(params)
client = DropboxClient(access_token, locale='en_US', rest_client=None)
conference_data = self.get_conference_data()
conference_data.dbox_access_token = access_token
data_cache.set('%s-conference_data'% self.module, None)
conference_data.put()
return self.render_response('utilities.html',
access_token = access_token)
except DropboxOAuth2Flow.BadRequestException, e:
http_status(400)
except DropboxOAuth2Flow.BadStateException, e:
# Start the auth flow again.
redirect_to("/db_oauth/dropbox-auth-start")
except DropboxOAuth2Flow.CsrfException, e:
http_status(403)
except DropboxOAuth2Flow.NotApprovedException, e:
flash('Not approved? Why not?')
return redirect_to("/home")
except DropboxOAuth2Flow.ProviderException, e:
logger.log("Auth error: %s" % (e,))
http_status(403)
class DBAuthRevokeHandler(BaseHandler):
@admin_required
def get(self):
conference_data = self.get_conference_data()
access_token = conference_data.dbox_access_token
#access_token = data_cache.get('access_token')
if access_token:
try:
client = DropboxClient(access_token, locale = 'en_US', rest_client=None)
client.disable_access_token()
conference_data.dbox_access_token = None
data_cache.set('%s-conference_data'% self.module, None)
conference_data.put()
except:
return self.render_response('utilities.html')
return self.render_response('utilities.html', access_token = access_token)
app = webapp2.WSGIApplication([
webapp2.Route ('/db_oauth/dropbox-auth-start', DBAuthStartHandler),
webapp2.Route ('/db_oauth/dropbox-auth-finish', DBAuthFinishHandler, name = 'auth-finish'),
webapp2.Route ('/db_oauth/dropbox-auth-revoke', DBAuthRevokeHandler)
], debug=True, config=config)
|
{"/admin.py": ["/email_messages.py", "/constants.py", "/models.py", "/main.py", "/google_to_dropbox.py", "/forms.py"], "/email_messages.py": ["/constants.py"], "/main.py": ["/models.py", "/email_messages.py", "/secrets.py", "/constants.py", "/forms.py", "/admin.py"], "/super_admin_handlers.py": ["/main.py", "/models.py", "/secrets.py", "/forms.py"], "/serve_presentations.py": ["/models.py", "/main.py"], "/google_to_dropbox.py": ["/models.py", "/main.py", "/secrets.py"], "/dropbox_oauth.py": ["/main.py", "/secrets.py"], "/forms.py": ["/models.py"], "/messages.py": ["/main.py", "/email_messages.py", "/constants.py"], "/utilities.py": ["/models.py", "/main.py"], "/templates/OLD-templates/presentations.py": ["/models.py"]}
|
34,027,379
|
stevenmarr/presentation-manager
|
refs/heads/master
|
/google_to_dropbox.py
|
from dropbox.client import DropboxClient
from dropbox import rest as dbrest
from models import SessionData
from google.appengine.ext.webapp import blobstore_handlers
from google.appengine.ext import db, blobstore
from main import admin_required, BaseHandler, config
from secrets import DB_TOKEN
import time
import logging
from google.appengine.api import taskqueue
import webapp2
class CopyBlobstoreToDropBox(blobstore_handlers.BlobstoreDownloadHandler, BaseHandler):
@admin_required
def post(self):
key = self.request.get('session_key')
blob_info = self.request.get('blob_info')
logging.info('Task Queues returns key: %s, blob_info: %s.' %(key, blob_info))
session = SessionData.get(key)
logging.info('Task Queues returns key: %s, blob_info: %s, retults in session: %s.' %(key, blob_info, session))
client = DropboxClient(DB_TOKEN, "en_US", rest_client=None)
if not session.presentation_uploaded_to_db:
f = session.blob_store_key.open()
size = session.blob_store_key.size
uploader = client.get_chunked_uploader(f, size)
while uploader.offset < size:
try:
upload = uploader.upload_chunked()
except:
logging.error("Drop Box Error")
filename = session.lastname + '_' + session.filename
if session.session_date: date = session.session_date
else: date = 'no-date-provided'
response = uploader.finish('/beta/%s/%s/%s/%s'% (session.session_room, date, session.lastname, filename), overwrite = True) #folder structure /conf_name/room/date/lastname/filename
session.presentation_uploaded_to_db = True
session.presentation_db_path = response['mime_type']
session.presentation_db_size = response['size']
session.put()
f.close()
return
class BuildUploadTasksHandler(BaseHandler):
@admin_required
def post(self):
sessions = db.GqlQuery("SELECT * FROM SessionData WHERE blob_store_key != NULL")
for session in sessions:
params = {'session_key':session.key(), 'blob_key':session.blob_store_key.key()}
taskqueue.add(url='/dropbox/update_dropbox/',params=params, target='db-upload')
self.render_response('dropbox.html', success = True, message = "Dropbox upload in progress..." )
class DropBoxHandler(BaseHandler):
@admin_required
def get(self):
self.render_response('dropbox.html')
#taskqueue.add(url='/admin/update_dropbox/', target='db-upload')
class ResetSessionDataDBFlagHandler(BaseHandler):
@admin_required
def post(self):
sessions = db.GqlQuery("SELECT * FROM SessionData WHERE blob_store_key != NULL")
for session in sessions:
if session.presentation_uploaded_to_db:
session.presentation_uploaded_to_db = False
session.put()
self.render_response('dropbox.html', success = True, message = "SessionData DB Reset" )
app = webapp2.WSGIApplication(
[('/dropbox', DropBoxHandler),
('/dropbox/build_upload_dropbox/', BuildUploadTasksHandler),
('/dropbox/update_dropbox/', CopyBlobstoreToDropBox),
('/dropbox/re_initialize_upload_status/', ResetSessionDataDBFlagHandler)
], debug=True, config=config)
|
{"/admin.py": ["/email_messages.py", "/constants.py", "/models.py", "/main.py", "/google_to_dropbox.py", "/forms.py"], "/email_messages.py": ["/constants.py"], "/main.py": ["/models.py", "/email_messages.py", "/secrets.py", "/constants.py", "/forms.py", "/admin.py"], "/super_admin_handlers.py": ["/main.py", "/models.py", "/secrets.py", "/forms.py"], "/serve_presentations.py": ["/models.py", "/main.py"], "/google_to_dropbox.py": ["/models.py", "/main.py", "/secrets.py"], "/dropbox_oauth.py": ["/main.py", "/secrets.py"], "/forms.py": ["/models.py"], "/messages.py": ["/main.py", "/email_messages.py", "/constants.py"], "/utilities.py": ["/models.py", "/main.py"], "/templates/OLD-templates/presentations.py": ["/models.py"]}
|
34,027,380
|
stevenmarr/presentation-manager
|
refs/heads/master
|
/serve_presentations.py
|
from google.appengine.api import taskqueue
from google.appengine.ext import blobstore, db
from models import SessionData, AppEventData
from google.appengine.ext.webapp import blobstore_handlers
from main import BaseHandler, config, user_required, admin_required, data_cache
import forms
import webapp2
import time
import urllib
import logging
class UserDefaultHandler(BaseHandler):
@user_required
def get(self):
user_id = self.user.email
sessions = self.get_sessions(user_id)
upload_urls = {}
for session in sessions:
upload_urls[session.name] = blobstore.create_upload_url('/upload')
return self.render_response('presenters.html',
sessions = sessions,
upload_urls= upload_urls)
@admin_required
def post(self):
user_id = self.request.get('user_id')
if not user_id:
self.redirect('/admin/manage_sessions')
sessions = self.get_sessions(user_id)
upload_urls = {}
for session in sessions:
upload_urls[session.name] = blobstore.create_upload_url('/upload_admin')
return self.render_response('presenters.html',
sessions = sessions,
upload_urls= upload_urls)
class UploadHandler(blobstore_handlers.BlobstoreUploadHandler, BaseHandler):
@user_required
def post(self):
upload_files = self.get_uploads('file')
if upload_files:
key = self.request.get('session_key')
session = SessionData.get(key)
blob_info = upload_files[0]
session.blob_store_key = blob_info.key()
session.filename = blob_info.filename
session.uploaded_to_dbox = False
data_cache.set('%s-sessions'% self.module, None)
session.put()
time.sleep(.25)
if self.upload_to_db():
params = {'session_key':key, 'blob_key':blob_info}
taskqueue.add(url='/utilities/update_dropbox/',method='POST',params=params, target='db-upload')
return self.redirect('/default')
class AdminUploadHandler(blobstore_handlers.BlobstoreUploadHandler, BaseHandler):
@admin_required
def post(self):
upload_files = self.get_uploads('file')
form = forms.SessionForm()
form.users.choices = self.get_users_tuple()
sessions = self.get_sessions()
if not upload_files:
return self.render_response('manage_sessions.html',
failed = True,
message = "Presentation upload failed, please try again",
sessions = sessions,
form = form)
key = self.request.get('session_key')
session = SessionData.get(key)
blob_info = upload_files[0]
logging.info("blob key %s | %s | %s | blob_info, blob_info.key, blob_info.key()" %(blob_info, blob_info.key, blob_info.key()))
session.blob_store_key = blob_info.key()
session.uploaded_to_dbox = False
session.filename = blob_info.filename
data_cache.set('%s-sessions'% self.module, None)
session.put()
time.sleep(.25)
logging.info("session.blob_store_key is %s"% session.blob_store_key)
if self.upload_to_db():
params = {'session_key':key,
'conf_key': self.get_conference_data().key(),
'blob_key':blob_info.key()}
taskqueue.add(url='/utilities/update_dropbox/',
method='POST',
params=params,
target='db-upload')
return self.render_response('manage_sessions.html',
success = True,
message = 'Presentation for | %s | upload successful'% session.name,
sessions = sessions,
form = form)
class ServeHandler(blobstore_handlers.BlobstoreDownloadHandler, BaseHandler):
@user_required
def get(self, resource):
resource = str(urllib.unquote(resource))
blob_info = blobstore.BlobInfo.get(resource)
query = db.GqlQuery("SELECT * FROM SessionData WHERE blob_store_key = '%s'" % resource)
for session in query:
filename = session.filename
self.send_blob(blob_info, save_as = filename)
class DeleteBlobStoreHandler(BaseHandler):
@user_required
def post(self, resource):
key = self.request.get('session_key')
resource = str(urllib.unquote(resource))
blob_info = blobstore.BlobInfo.get(resource)
session = SessionData.get(key)
blob_info.delete()
AppEventData(event = session.filename, event_type='file', transaction='DEL', user = self.user.email).put()
session.blob_store_key = None
session.filename = None
session.uploaded_to_dbox = False
params = { 'session_key':key,
'conf_key':self.get_conference_data().key(),
'db_path':session.dbox_path}
taskqueue.add(url='/utilities/delete_dropbox/',
method='POST',
params=params,
target='db-upload')
data_cache.set('file', None)
session.put()
time.sleep(.25)
self.redirect('/admin')
app = webapp2.WSGIApplication(
[
('/default', UserDefaultHandler),
('/upload', UploadHandler),
('/upload_admin', AdminUploadHandler),
('/serve/([^/]+)?', ServeHandler),
('/delete/([^/]+)?', DeleteBlobStoreHandler)
], debug=True, config=config)
|
{"/admin.py": ["/email_messages.py", "/constants.py", "/models.py", "/main.py", "/google_to_dropbox.py", "/forms.py"], "/email_messages.py": ["/constants.py"], "/main.py": ["/models.py", "/email_messages.py", "/secrets.py", "/constants.py", "/forms.py", "/admin.py"], "/super_admin_handlers.py": ["/main.py", "/models.py", "/secrets.py", "/forms.py"], "/serve_presentations.py": ["/models.py", "/main.py"], "/google_to_dropbox.py": ["/models.py", "/main.py", "/secrets.py"], "/dropbox_oauth.py": ["/main.py", "/secrets.py"], "/forms.py": ["/models.py"], "/messages.py": ["/main.py", "/email_messages.py", "/constants.py"], "/utilities.py": ["/models.py", "/main.py"], "/templates/OLD-templates/presentations.py": ["/models.py"]}
|
34,027,381
|
stevenmarr/presentation-manager
|
refs/heads/master
|
/templates/OLD-templates/presenter_html.py
|
header = '''<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-type" content="text/html; charset=utf-8" />
<title>Presentation Manager</title>
<script src="/js/jquery-1.11.2.min.js"></script>
<!--<script src="/js/sorttable.js"></script>-->
<!-- <script src="/js/jquery-1.11.2.min.js"></script>-->
<link rel="stylesheet" href="/css/style.css" type="text/css" media="all" />
</head>
<body>
<!-- Header -->
<div id="header">
<div class="shell">
<!-- Logo + Top Nav -->
<div id="top">
<h1><a href="/default">Conference Media Manager</a></h1>
<div id="top-navigation">
Welcome <strong>firstname| Account type: {type}</strong>
<span>|</span>
<a href="http://www.google.com/?gws_rd=ssl#q=help">Help</a>
<span>|</span>
<a href="/logout">Log out</a>
<span>|</span>
<a href="/default">Home</a>
</div>
</div>
</div>
</div>
<!-- End Header -->
<!-- Container -->
<div id="container" >
<div class="shell">
<div id = "warning">
<div class="msg msg-error">
<p><strong>Upload in progress do not navigate away from this page</strong></p>
</div>
</div>
<!-- End Message Error -->
<br />
<!-- Main -->
<div id="main">
<div class="cl"> </div>
<!-- Content -->
<div id="content">
<!-- Box -->
<div class="box">
<!-- Box Head -->
<div class="box-head">
<h2 class="left">firstname's Presentations</h2>
</div>
<!-- End Box Head -->
<!-- Table -->
<div class="presentation-table">
<table width="100%" border="0" cellspacing="0" cellpadding="0">
<tr>
<th width="450px">Session Name</th>
<th width = "80px" class="ac">Session Date</th>
<th width = "100px" class="ac">Filename</th>
<th width ="40px" class="ac">View Presentation</th>
<th width ="40px" class="ac">Delete Presentation</th>
<th width ="150px" class="ac">Upload Presentation</th>
<th></th>
</tr>'''
entries = """ <script>$(document).ready(function(){$('#loading-icon').hide();$('#warning').hide();$('#button{i}').click(function(){$('#loading-icon{i}').show();$('#upload-icon{i}').hide();$('#warning').show();});});</script>
<tr height = "75px">
<td>{session_name}</td>
<td>{date_time}</td>
{% if filename %}
<td class="ac">{filename}</td>
{% else %}
<td class="ac">--</td>
</td>
{% endif %}
<!-- Link to current presentation if it exists -->
{% if blob_store_key == None %}
<td class="ac">--</td>
{% else %}
<td class="ac" >
<a href = "/serve/blob_store_key.key()}"><img src = "/img/open-file-icon.png" width="20" height ="20" alt = "Open file"></a>
</td>
{% endif %}
<!--****************************-->
<!-- Delete presentation -->
{% blob_store_key == None %}
<td class="ac">--</td>
{% else %}
<td class="ac">
<form action="{'/delete/%s' % blob_store_key}" method="POST">
<input type="hidden" name="session_key" value="{key}">
<input type="image" src="/img/delete-icon.png" name="submit" alt="Submit" width="20" height="20"/>
</form>
</td>
{% endif %}
<!--****************************-->
<!-- Upload presentation -->
<td class="ac">
<form action="{upload_url}" method="POST" enctype="multipart/form-data">
<input type="hidden" name="session_key" value="{key}">
<input type="hidden" name="blob_store_key" value="{blob_store_key}">
<input type="file" name="file" class = "button" value="upload file"/>
</td>
<td class="ac">
<div id = "upload-icon"><input type="image" src="/img/upload-icon.png" id="button{i}" name="submit" alt="Submit" width="20" height="20"/></div>
<div id = "loading-icon"><img src="/img/small-loading.gif" width="20" height="20"/></div>
</form>
</td>
</tr>"""
footer = """
</table>
</div>
</div>
<!-- End content -->
<div class="cl"> </div>
</div>
<!-- Main -->
</div>
<!-- End Shell -->
</div>
<!-- End Container -->
<!-- Footer -->
<div id="footer">
<div class="footer-shell">
<span class="left">© 2015 - Steven and Jill Marr</span>
<span class="right">
Powered by <a href="https://cloud.google.com/appengine/" title="Google App Engine">Google App Engine</a>
</span>
</div>
</div>
<!-- End Footer -->
</body>
</html>"""
|
{"/admin.py": ["/email_messages.py", "/constants.py", "/models.py", "/main.py", "/google_to_dropbox.py", "/forms.py"], "/email_messages.py": ["/constants.py"], "/main.py": ["/models.py", "/email_messages.py", "/secrets.py", "/constants.py", "/forms.py", "/admin.py"], "/super_admin_handlers.py": ["/main.py", "/models.py", "/secrets.py", "/forms.py"], "/serve_presentations.py": ["/models.py", "/main.py"], "/google_to_dropbox.py": ["/models.py", "/main.py", "/secrets.py"], "/dropbox_oauth.py": ["/main.py", "/secrets.py"], "/forms.py": ["/models.py"], "/messages.py": ["/main.py", "/email_messages.py", "/constants.py"], "/utilities.py": ["/models.py", "/main.py"], "/templates/OLD-templates/presentations.py": ["/models.py"]}
|
34,027,382
|
stevenmarr/presentation-manager
|
refs/heads/master
|
/admin.py
|
import time
import csv
import webapp2
import logging
import email_messages
import forms
from constants import SENDER
from google.appengine.api import mail, modules, taskqueue
from models import User, SessionData, AppEventData, ConferenceData
from main import BaseHandler, config, user_required, admin_required, jinja2_factory, check_csv, AccountActivateHandler, data_cache
from google.appengine.ext.webapp import blobstore_handlers
from google.appengine.ext import db, blobstore, ndb
from datetime import date
from webapp2_extras.appengine.auth.models import Unique
from dateutil.parser import *
from google.appengine.api import taskqueue
weekdays = {7:'Sunday',1:'Monday',2:'Tuesday',3:'Wednesday',4:'Thursday',5:'Friday',6:'Saturday'}
#Render main admin page
class LogsHandler(BaseHandler):
@user_required
def get(self):
user_events = data_cache.get('%s-user_events'% self.module)
if user_events == None:
user_events = db.GqlQuery("SELECT * FROM AppEventData WHERE event_type = 'user' and module = '%s' ORDER BY time_stamp DESC LIMIT 50"% self.module)
logging.info('AppEventData DB Query')
data_cache.set('%s-user_events'% self.module, user_events)
session_events = data_cache.get('%s-session_events'% self.module)
if session_events == None:
session_events = db.GqlQuery("SELECT * FROM AppEventData WHERE event_type = 'session' and module = '%s' ORDER BY time_stamp DESC LIMIT 50"% self.module)
logging.info('AppEventData DB Query')
data_cache.set('%s-session_events'% self.module, session_events)
file_events = data_cache.get('%s-file_events'% self.module)
if file_events == None:
file_events = db.GqlQuery("SELECT * FROM AppEventData WHERE event_type = 'file' and module = '%s' ORDER BY time_stamp DESC LIMIT 50"% self.module)
logging.info('AppEventData DB Query')
data_cache.set('%s-file_events'% self.module, file_events)
self.render_response( 'logs.html',
user_events = user_events,
session_events = session_events,
file_events = file_events)
class ManageConferenceHandler(BaseHandler):
@admin_required
def get(self):
conference_data = self.get_conference_data()
form = forms.ConferenceForm(obj = conference_data)
#form.start.data = parse(conference_data.start_date).date()
form.start.data = ('%s'% conference_data.start_date)#.date()
form.end.data = ('%s'% conference_data.end_date)#.date()
return self.render_response('edit_conference.html', form=form)
def post(self):
conference_data = self.get_conference_data()
form = forms.ConferenceForm(self.request.POST, obj = conference_data)
if not form.validate():
return self.render_response('edit_conference.html',
form=form,
failed=True,
message="Form failed to validate with errors %s"% form.errors)
form.populate_obj(conference_data)
conference_data.start_date = parse('%s'% form.start.data).date()
conference_data.end_date = parse('%s'% form.end.data).date()
#TODO: compare values, make sure they are in chronological order.
conference_data.save()
data_cache.set('%s-conference_data'% self.module, None)
time.sleep(.25)
return self.redirect('/admin/conference_data')
#Session Management
class ManageSessionsHandler(BaseHandler):
@user_required
def get(self):
sessions = self.get_sessions()
form = forms.SessionForm()
form.users.choices = self.get_users_tuple()
#dates = SessionData.all().filter('module =', self.module).group('date').get()
result = db.GqlQuery("SELECT date, dotw FROM SessionData WHERE module = '%s' ORDER BY date DESC"% self.module)
# build a list of days of the week do sort dates by
dates = {}
for date in result:
if date.date in dates: pass
else: dates[date.date]=date.dotw
return self.render_response("manage_sessions.html",
sessions = sessions,
form = form,
dates = dates)
class EditSessionHandler(BaseHandler):
@user_required
def post(self):
key = self.request.get('session_key')
session = SessionData.get(key)
user = User.query(User.email == session.user_id).get()
form = forms.SessionForm(obj = session)
form.users.choices = self.get_users_tuple()
form.date.data = parse('%s'% session.date).date()
form.time.data = parse('%s'% session.time).time()
if user:
form.users.choices.insert(0, (session.user_id, user.lastname+', '+user.firstname))
return self.render_response("edit_session.html",
form = form,
key = key)
else:
return self.render_response("edit_session.html",
failed = True,
message = 'That presenter no longer exists in the database, please choose a new presenter',
form = form,
key = key)
class SessionByDateHandler(BaseHandler):
@user_required
def get(self, date):
sessions = db.GqlQuery("SELECT * FROM SessionData WHERE date = '%s'"% date)
dotw = weekdays[parse(date).date().isoweekday()]
return self.render_response( 'sessions.html',
sessions = sessions,
dotw = dotw)
class UpdateSessionHandler(blobstore_handlers.BlobstoreUploadHandler, BaseHandler):
@admin_required
def post(self):
key = self.request.get('key')
session = SessionData.get(key)
form = forms.SessionForm(self.request.POST, obj=session)
form.users.choices = self.get_users_tuple()
if not form.validate():
logging.error("Session Form Validation Errors in UpdateSessionHandler %s" % form.errors)
return self.render_response('edit_session.html',
failed = True,
message = 'Invalid form submission',
form=form,
key=key)
form.populate_obj(session)
session.date = str(parse('%s'% form.date.data).date())
session.time = str(parse('%s'% form.time.data).time().isoformat())[0:5]
session.dotw = parse('%s'% form.date.data).date().strftime("%A")
session.user_id = form.users.data
session.save()
data_cache.set('%s-sessions'% self.module, None)
time.sleep(.25)
form = forms.SessionForm()
form.users.choices = self.get_users_tuple()
return self
#self.render_response('manage_sessions.html',
# success = True,
# message = ('%s - session edited successfully' %session.name),
# sessions = self.get_sessions(),
# form = form)
class AddSessionHandler(blobstore_handlers.BlobstoreUploadHandler, BaseHandler):
@admin_required
def post(self):
form = forms.SessionForm(self.request.POST)
form.users.choices = self.get_users_tuple()
if not form.validate():
return self.render_response('manage_sessions.html',
failed = True,
message = 'Invalid add session submission',
sessions= self.get_sessions(),
form = form)
user_id = form.users.data
query = ndb.gql("SELECT * FROM User WHERE email = '%s'"% user_id)
presenter = query.get()
session = SessionData(Parent = presenter)
form.populate_obj(session)
session.date = str(parse('%s'% form.date.data).date())
session.time = str(parse('%s'% form.time.data).time().isoformat())[0:5]
session.dotw = parse('%s'% form.date.data).date().strftime("%A")
session.user_id = user_id
session.presenter = self.get_users(user_id)
session.save()
time.sleep(.25)
return self.redirect(webapp2.uri_for('sessions'))
#,
##success = True,
#message = ('%s - session created successfully' %session.name),
#sessions = self.get_sessions(),
#form = form)
class DeleteSessionHandler(BaseHandler):
@admin_required
def post(self):
key = self.request.get('session_key')
session = SessionData.get(key)
if session:#key = session.blob_store_key
if session.blob_store_key:
session.blob_store_key.delete()
AppEventData(event = session.name, event_type='session', transaction='DEL', user = self.user.email).put()
data_cache.set('events', None)
session.delete()
time.sleep(.25)
self.redirect('/admin/manage_sessions')
class RetrievePresentationHandler(BaseHandler):
@admin_required
def post(self):
key = self.request.get('session_key')
if key:
self.redirect('/serve/%s' % SessionData.get(key).blob_store_key.key())
else:
self.redirect('/manage_sessions')
return
#Uploaded Session Data Management
class RenderConferenceUploadDataHandler(blobstore_handlers.BlobstoreUploadHandler, BaseHandler):
@admin_required
def post(self):
session_data_upload = self.get_uploads('file')
if session_data_upload:
session_data_info = session_data_upload[0]
session_data_file = session_data_info.open()
file_csv = csv.reader(session_data_file)
self.render_response( 'check_upload.html',
file_csv = file_csv,
blob_key = session_data_info.key())
else:
self.redirect('/admin/manage_sessions')
class CheckConferenceDataHandler(blobstore_handlers.BlobstoreUploadHandler, BaseHandler):
@admin_required
def post(self):
conference_data_upload = self.get_uploads('file')
if conference_data_upload:
conference_data_file = conference_data_upload[0].open()
file_csv = csv.reader(conference_data_file)
self.render_response('/admin/check_upload.html',
file_csv = file_csv,
blob_key = conference_data_file.key())
else:
self.redirect('/admin/manage_sessions')
class DeleteConferenceUploadData(BaseHandler):
@admin_required
def post(self):
key = self.request.get('blob_key')
blob_info = blobstore.BlobInfo.get(key)
logging.error("delete handler %s" % blob_info)
blob_info.delete()
self.redirect('/admin/manage_sessions')
class CommitConferenceUploadData(BaseHandler):
@admin_required
def post(self):
key = self.request.get('blob_key')
email_user = self.request.get('email_user')
session_data_info = blobstore.BlobInfo.get(key)
session_data_file = session_data_info.open()
file_csv = csv.reader(session_data_file)
check_csv(file_csv)
for row in file_csv:
firstname = row[0]
lastname = row[1]
email = row[2].lower()
name = row[3]
room = row[4]
user_id = ('%s|%s' % (self.module, email))
unique_properties = []
created, user = self.user_model.create_user(user_id,
unique_properties ,
email= email,
account_type = 'user',
firstname = firstname,
lastname = lastname,
verified = False)
if created:
AppEventData(event = email, event_type='user', transaction='CREATE', user = self.user.email).put()
data_cache.set('events', None)
if created and email_user == 'on':
url = self.uri_for('activate', _full=True)
#name = firstname+' '+lastname
#params = {'category':'new_account', 'email':email, 'name':name, 'url':url}
#taskqueue.add(url='/send_emails',params=params, target='email-users')
#url = self.uri_for('activate', _full=True)
name = firstname+' '+lastname
subject = email_messages.new_account[0]
body = email_messages.new_account[1].format(url = url, name = name)
mail.send_mail( sender = SENDER,
to = email,
subject = subject,
body = body)
session = SessionData( firstname = firstname,
lastname = lastname,
user_id = email,
name = name,
room = room)
AppEventData(event = name, event_type='session', transaction='CREATE', user = self.user.email).put()
data_cache.set('events', None)
session.put()
data_cache.set('sessions', None)
time.sleep(.25)
session_data_info.delete()
self.redirect('/admin/manage_sessions')
#User Management
class ManageUserAccountsHandler(BaseHandler):
@admin_required
def get(self):
users = self.get_users()
form = forms.AddUserForm()
self.render_response("manage_users.html", users = users, form=form)
class AddUserAccountHandler(BaseHandler):
@admin_required
def post(self):
form = forms.AddUserForm(self.request.POST)
users = self.get_users()
if not form.validate():
return self.render_response('manage_users.html', users=users, form=form)
email = form.email.data.lower()
firstname = form.firstname.data
lastname = form.lastname.data
email_user = form.email_user.data
user_id = ('%s|%s' % (self.module, email))
unique_properties = []
created, user = self.user_model.create_user(user_id,
unique_properties,
email= email,
account_type = 'user',
firstname= firstname,
lastname= lastname,
verified= False)
if created:
AppEventData(event = email, event_type='user', transaction='CREATE', user = email).put()
data_cache.set('events', None)
time.sleep(.25)
data_cache.set('%s-users-tuple'% self.module, None)
if email_user:
url = self.uri_for('activate', _full=True)
name = firstname+' '+lastname
subject = email_messages.new_account[0]
body = email_messages.new_account[1].format(url = url, name = name)
mail.send_mail( sender = SENDER,
to = email,
subject = subject,
body = body)
return self.render_response('manage_users.html', success = True,
message = 'User added succesfully',
users = users,
form = form)
elif not created:
return self.render_response('manage_users.html', failed = True,
message = 'Duplicate user, please confirm email address',
users = users,
form = form)
class DeleteUserAccountHandler(BaseHandler):
@admin_required
def post(self):
user_id = self.request.get('user_id')
user = User.get_by_auth_id('%s|%s' % (self.module, user_id))
if user:
Unique.delete_multi( map(lambda s: 'User.auth_id:' + s, user.auth_ids) )
time.sleep(.25)
user.key.delete()
AppEventData(event = user_id, event_type='user', transaction='DEL', user = self.user.email).put()
data_cache.set('events', None)
data_cache.set('%s-users-tuple'% self.module, None)
time.sleep(.25)
return self.render_response('manage_users.html',
success = True,
message = 'User %s succesfully deleted' % user_id,
form = forms.AddUserForm(),
users = self.get_users())
self.redirect('/admin/manage_users')
"""app = webapp2.WSGIApplication(
[webapp2.Route('/admin', ManageSessionsHandler),
webapp2.Route('/admin/conference_data', ManageConferenceHandler),
webapp2.Route('/admin/manage_sessions', ManageSessionsHandler, name='sessions'),
webapp2.Route('/admin/session/<date>', SessionByDateHandler, name='session_by_date'),
webapp2.Route('/admin/add_session', AddSessionHandler),
webapp2.Route('/admin/edit_session', EditSessionHandler),
webapp2.Route('/admin/update_session', UpdateSessionHandler),
webapp2.Route('/admin/delete_session', DeleteSessionHandler),
webapp2.Route('/admin/retrieve_presentation', RetrievePresentationHandler),
webapp2.Route('/admin/logs', LogsHandler),
webapp2.Route('/admin/upload_conference_data/', RenderConferenceUploadDataHandler),
webapp2.Route('/admin/check_conference_data/', CheckConferenceDataHandler),
webapp2.Route('/admin/delete_upload', DeleteConferenceUploadData),
webapp2.Route('/admin/commit_upload', CommitConferenceUploadData),
webapp2.Route('/admin/manage_users', ManageUserAccountsHandler),
webapp2.Route('/admin/add_user_account', AddUserAccountHandler),
webapp2.Route('/admin/delete_user_account', DeleteUserAccountHandler),
webapp2.Route('/activate', AccountActivateHandler, name='activate')
], debug=True, config=config)"""
|
{"/admin.py": ["/email_messages.py", "/constants.py", "/models.py", "/main.py", "/google_to_dropbox.py", "/forms.py"], "/email_messages.py": ["/constants.py"], "/main.py": ["/models.py", "/email_messages.py", "/secrets.py", "/constants.py", "/forms.py", "/admin.py"], "/super_admin_handlers.py": ["/main.py", "/models.py", "/secrets.py", "/forms.py"], "/serve_presentations.py": ["/models.py", "/main.py"], "/google_to_dropbox.py": ["/models.py", "/main.py", "/secrets.py"], "/dropbox_oauth.py": ["/main.py", "/secrets.py"], "/forms.py": ["/models.py"], "/messages.py": ["/main.py", "/email_messages.py", "/constants.py"], "/utilities.py": ["/models.py", "/main.py"], "/templates/OLD-templates/presentations.py": ["/models.py"]}
|
34,027,383
|
stevenmarr/presentation-manager
|
refs/heads/master
|
/messaging.py
|
#all messages are (subject, body)
from google.appengine.api import mail
account_verification = ('Please verify your account','Dear {name},\
Thank you for activating your account, we look forward to recieving.\
your presentations. To complete the process please activate your account\
by clicking on the following link <a href="{url}">{url}</a>')
password_reset = ('Password Reset','Dear {name},\
Please click on the following link to reset your password <a href="{url}">{url}</a>')
new_account = ('New Account','Dear {name},\
Your account is ready for activation for the upccoming %s , please follow this link to activate your account <a href="{url}">{url}</a>' % 'NCIS')
recieved_presentation = ('Presentation recieved', 'Dear {name},\
Congratulations your presentation has uploaded succesfully, to view your submission and confirm the upload please click <a href="{url}">{url}</a>' )
def send_email(to, name, subject, msg, url=None):
if url:
body = msg.format(url=url, name=name,
to = to,
subject=subject,
html = body)
if message.is_initialized():
message.send()
return True
else:
return False
|
{"/admin.py": ["/email_messages.py", "/constants.py", "/models.py", "/main.py", "/google_to_dropbox.py", "/forms.py"], "/email_messages.py": ["/constants.py"], "/main.py": ["/models.py", "/email_messages.py", "/secrets.py", "/constants.py", "/forms.py", "/admin.py"], "/super_admin_handlers.py": ["/main.py", "/models.py", "/secrets.py", "/forms.py"], "/serve_presentations.py": ["/models.py", "/main.py"], "/google_to_dropbox.py": ["/models.py", "/main.py", "/secrets.py"], "/dropbox_oauth.py": ["/main.py", "/secrets.py"], "/forms.py": ["/models.py"], "/messages.py": ["/main.py", "/email_messages.py", "/constants.py"], "/utilities.py": ["/models.py", "/main.py"], "/templates/OLD-templates/presentations.py": ["/models.py"]}
|
34,027,384
|
stevenmarr/presentation-manager
|
refs/heads/master
|
/localtesting/test_handler_response.py
|
# coding: utf-8
import unittest
import webapp2
from webapp2 import uri_for
import webtest
from google.appengine.ext import testbed
from main import app, BaseHandler
from forms import AddUserForm
from mock import Mock, patch
from models import AppEventData
import admin
import models
import main
class AppTest(unittest.TestCase):
def setUp(self):
# Create a WSGI application.
# Wrap the app with WebTest’s TestApp.
self.testapp = webtest.TestApp(app)
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
self.testbed.init_mail_stub()
def tear_down(self):
self.testbed.deactivate()
def testLoginHandler(self):
"""Verify existence of route '/''"""
pass
def testAccountActivateHandler(self):
"""Verify existence of route '/activate'"""
pass
def testAccountVerificationHandler(self):
pass
def
webapp2.Route('/', LoginHandler, name='home'),
webapp2.Route('/activate', AccountActivateHandler, name='activate'),
webapp2.Route('/signup', AccountActivateHandler, name='activate'),
webapp2.Route('/<type:v|p>/<user_id:\d+>-<signup_token:.+>',
handler=VerificationHandler, name='verification'),
webapp2.Route('/password', SetPasswordHandler),
webapp2.Route('/login', LoginHandler, name='login'),
webapp2.Route('/logout', LogoutHandler, name='logout'),
webapp2.Route('/forgot', ForgotPasswordHandler, name='forgot'),
webapp2.Route('/.*', NotFoundPageHandler),
webapp2.Route('/_ah/upload/.*', BadUploadHandler),
webapp2.Route('/admin', admin.ManageSessionsHandler),
webapp2.Route('/admin/conference_data', admin.ManageConferenceHandler),
webapp2.Route('/admin/manage_sessions', admin.ManageSessionsHandler, name='sessions'),
webapp2.Route('/admin/session/<date>', admin.SessionByDateHandler, name='session_by_date'),
webapp2.Route('/admin/add_session', admin.AddSessionHandler),
webapp2.Route('/admin/edit_session', admin.EditSessionHandler),
webapp2.Route('/admin/update_session', admin.UpdateSessionHandler),
webapp2.Route('/admin/delete_session', admin.DeleteSessionHandler),
webapp2.Route('/admin/retrieve_presentation', admin.RetrievePresentationHandler),
webapp2.Route('/admin/logs', admin.LogsHandler),
webapp2.Route('/admin/upload_conference_data/', admin.RenderConferenceUploadDataHandler),
webapp2.Route('/admin/check_conference_data/', admin.CheckConferenceDataHandler),
webapp2.Route('/admin/delete_upload', admin.DeleteConferenceUploadData),
webapp2.Route('/admin/commit_upload', admin.CommitConferenceUploadData),
webapp2.Route('/admin/manage_users', admin.ManageUserAccountsHandler),
webapp2.Route('/admin/add_user_account', admin.AddUserAccountHandler),
webapp2.Route('/admin/delete_user_account', admin.DeleteUserAccountHandler),
webapp2.Route('/activate', admin.AccountActivateHandler)
|
{"/admin.py": ["/email_messages.py", "/constants.py", "/models.py", "/main.py", "/google_to_dropbox.py", "/forms.py"], "/email_messages.py": ["/constants.py"], "/main.py": ["/models.py", "/email_messages.py", "/secrets.py", "/constants.py", "/forms.py", "/admin.py"], "/super_admin_handlers.py": ["/main.py", "/models.py", "/secrets.py", "/forms.py"], "/serve_presentations.py": ["/models.py", "/main.py"], "/google_to_dropbox.py": ["/models.py", "/main.py", "/secrets.py"], "/dropbox_oauth.py": ["/main.py", "/secrets.py"], "/forms.py": ["/models.py"], "/messages.py": ["/main.py", "/email_messages.py", "/constants.py"], "/utilities.py": ["/models.py", "/main.py"], "/templates/OLD-templates/presentations.py": ["/models.py"]}
|
34,027,385
|
stevenmarr/presentation-manager
|
refs/heads/master
|
/super_admin_handlers.py
|
from google.appengine.ext.webapp import template
from google.appengine.ext import ndb, db, blobstore
from main import BaseHandler, config
from models import User
import webapp2
import time
from webapp2_extras.appengine.users import admin_required as super_admin_required
from webapp2_extras.appengine.auth.models import Unique
import forms
import logging
class SuperAdmin(BaseHandler):
@super_admin_required
def get(self):
return self.render_template('super_admin.html')
class ManageAdminAccountsHandler(BaseHandler):
@super_admin_required
def get(self):
form = forms.AddAdminForm()
users = ndb.gql("SELECT * FROM User WHERE account_type != 'presenter' ORDER BY account_type DESC ")
form.account_type.choices = [(choice, choice) for choice in User.account_type_choices]
return self.render_response("manage_admins.html", users = users, form=form)
class AddAdminAccountHandler(BaseHandler):
def post(self):
form = forms.AddAdminForm(self.request.POST)
form.account_type.choices = [(choice, choice) for choice in User.account_type_choices]
if not form.validate():
logging.info("Form did not Validate *****************")
users = ndb.gql("SELECT * FROM User WHERE account_type != 'presenter' ORDER BY account_type DESC ")
return self.render_response("manage_admins.html", form=form, users = users)
email = form.email.data.lower()
firstname = form.firstname.data
lastname = form.lastname.data
account_type = form.account_type.data
user_id = ('%s|%s' % (self.module, email))
unique_properties = []
session, user = self.user_model.create_user(user_id,
unique_properties,
email= email,
account_type = account_type,
firstname = firstname,
lastname = lastname,
verified = False)
time.sleep(.25)
if not session:
self.display_message('Unable to create user for email %s because of \
duplicate keys %s' % (email, user))
self.redirect('/super_admin/manage_users')
class DeleteAdminAccountHandler(BaseHandler):
def post(self):
email = self.request.get('user_id')
user_id = ('%s|%s' % (self.module, email))
user = User.get_by_auth_id(user_id)
if user:
Unique.delete_multi( map(lambda s: 'User.auth_id:' + s, user.auth_ids) )
user.key.delete()
time.sleep(.25)
self.redirect('/super_admin/manage_users')
app = webapp2.WSGIApplication(
[webapp2.Route('/super_admin', SuperAdmin),
webapp2.Route('/super_admin/manage_users', ManageAdminAccountsHandler),
webapp2.Route('/super_admin/add_user_account', AddAdminAccountHandler),
webapp2.Route('/super_admin/delete_user_account', DeleteAdminAccountHandler)
], debug=True, config=config)
|
{"/admin.py": ["/email_messages.py", "/constants.py", "/models.py", "/main.py", "/google_to_dropbox.py", "/forms.py"], "/email_messages.py": ["/constants.py"], "/main.py": ["/models.py", "/email_messages.py", "/secrets.py", "/constants.py", "/forms.py", "/admin.py"], "/super_admin_handlers.py": ["/main.py", "/models.py", "/secrets.py", "/forms.py"], "/serve_presentations.py": ["/models.py", "/main.py"], "/google_to_dropbox.py": ["/models.py", "/main.py", "/secrets.py"], "/dropbox_oauth.py": ["/main.py", "/secrets.py"], "/forms.py": ["/models.py"], "/messages.py": ["/main.py", "/email_messages.py", "/constants.py"], "/utilities.py": ["/models.py", "/main.py"], "/templates/OLD-templates/presentations.py": ["/models.py"]}
|
34,027,386
|
stevenmarr/presentation-manager
|
refs/heads/master
|
/constants.py
|
SENDER = "Conference Media Support <marr.stevenmarr@gmail.com>"
CONF_NAME = "NCIS"
|
{"/admin.py": ["/email_messages.py", "/constants.py", "/models.py", "/main.py", "/google_to_dropbox.py", "/forms.py"], "/email_messages.py": ["/constants.py"], "/main.py": ["/models.py", "/email_messages.py", "/secrets.py", "/constants.py", "/forms.py", "/admin.py"], "/super_admin_handlers.py": ["/main.py", "/models.py", "/secrets.py", "/forms.py"], "/serve_presentations.py": ["/models.py", "/main.py"], "/google_to_dropbox.py": ["/models.py", "/main.py", "/secrets.py"], "/dropbox_oauth.py": ["/main.py", "/secrets.py"], "/forms.py": ["/models.py"], "/messages.py": ["/main.py", "/email_messages.py", "/constants.py"], "/utilities.py": ["/models.py", "/main.py"], "/templates/OLD-templates/presentations.py": ["/models.py"]}
|
34,027,387
|
stevenmarr/presentation-manager
|
refs/heads/master
|
/dropbox_oauth.py
|
from main import BaseHandler, config
import webapp2
import urllib
import urllib2
import secrets
import logging
class StartOauthHandler(BaseHandler):
def get(self):
query_args = { 'response_type':'code',
'client_id':secrets.App_key,
'redirect_uri':'https://presetationmgr.appspot.com/oauth2',
#'redirect_uri':'http://localhost:8080/oauth2',
'state':"",
'force_reapprove':'false',
'disable_signup':'true' }
encoded_args = urllib.urlencode(query_args)
#url = 'https://www.dropbox.com/1/oauth2/authorize'+'?'+ encoded_args
url = 'https://www.dropbox.com/1/oauth2/authorize'+'?'+encoded_args
self.redirect(url)
#request = urllib2.Request(url)
#handler = urllib2.urlopen(request)
class DBResponseHandler(BaseHandler):
def get(self):
code = self.request.get('code')
logging.error('code %s' % code)
url = "https://api.dropbox.com/1/oauth2/token"
#url = 'http://localhost:8080/'
params = { 'code':code,
'grant_type': 'authorization_code',
'client_id': secrets.App_key,
'client_secret': secrets.App_secret,
'redirect_uri':'https://presetationmgr.appspot.com/oauth2'}
#'redirect_uri':'http://localhost:8080/oauth2'}
data = urllib.urlencode(params)
req = urllib2.Request(url, data)
response = urllib2.urlopen(req)
this_page = response.read()
logging.error("request for request data is %s"% this_page)
app = webapp2.WSGIApplication([
webapp2.Route('/oauth2/init', StartOauthHandler),
('/oauth2#.*', DBResponseHandler),
('/oauth2/', DBResponseHandler),
webapp2.Route ('/oauth2', DBResponseHandler),
webapp2.Route('/oauth2/', DBResponseHandler),
webapp2.Route('/oauth2/init', StartOauthHandler)
], debug=True, config=config)
|
{"/admin.py": ["/email_messages.py", "/constants.py", "/models.py", "/main.py", "/google_to_dropbox.py", "/forms.py"], "/email_messages.py": ["/constants.py"], "/main.py": ["/models.py", "/email_messages.py", "/secrets.py", "/constants.py", "/forms.py", "/admin.py"], "/super_admin_handlers.py": ["/main.py", "/models.py", "/secrets.py", "/forms.py"], "/serve_presentations.py": ["/models.py", "/main.py"], "/google_to_dropbox.py": ["/models.py", "/main.py", "/secrets.py"], "/dropbox_oauth.py": ["/main.py", "/secrets.py"], "/forms.py": ["/models.py"], "/messages.py": ["/main.py", "/email_messages.py", "/constants.py"], "/utilities.py": ["/models.py", "/main.py"], "/templates/OLD-templates/presentations.py": ["/models.py"]}
|
34,027,388
|
stevenmarr/presentation-manager
|
refs/heads/master
|
/main.py
|
#!/usr/bin/env python
from google.appengine.ext.webapp import template
from google.appengine.ext import ndb, db, blobstore
from google.appengine.api import mail, memcache, modules
from google.appengine.ext.webapp import blobstore_handlers
import time
import logging
import os.path
import webapp2
import email_messages
import forms
import datetime
from webapp2_extras import auth
from webapp2_extras import sessions, jinja2
from webapp2_extras import users
from webapp2_extras.auth import InvalidAuthIdError
from webapp2_extras.auth import InvalidPasswordError
from secrets import SECRET_KEY
from models import SessionData, User, ConferenceData
from constants import SENDER
def presenter_required(handler):
"""
Decorator that checks if there's a user associated with the current session.
Will also fail if there's no session present.
"""
def check_login(self, *args, **kwargs):
auth = self.auth
if auth.get_user_by_session():
if auth.get_user_by_session()['account_type'] =='presenter' or 'user' or 'admin' or 'super_admin':
return handler(self, *args, **kwargs)
else: self.redirect('/login')
else: self.redirect('/login')
return check_login
def user_required(handler):
"""
Decorator that checks if there's a user associated with the current session.
Will also fail if there's no session present.
"""
def check_login(self, *args, **kwargs):
auth = self.auth
if auth.get_user_by_session():
if auth.get_user_by_session()['account_type'] == 'user' or 'admin' or 'super_admin':
return handler(self, *args, **kwargs)
else: self.redirect('/login')
else: self.redirect('/login')
return check_login
def admin_required(handler):
"""
Decorator that checks if there's a user associated with the current session.
Will also fail if there's no session present.
"""
def check_login(self, *args, **kwargs):
auth = self.auth
if auth.get_user_by_session():
if auth.get_user_by_session()['account_type'] == 'admin' or 'super_admin':
return handler(self, *args, **kwargs)
else: self.redirect('/default')
elif os.environ['CURRENT_MODULE_ID'] == 'testing':
return handler(self, *args, **kwargs)
else: self.redirect('/default')
return check_login
def super_admin_required(handler):
"""
Decorator that checks if there's a user associated with the current session.
Will also fail if there's no session present.
"""
def check_login(self, *args, **kwargs):
auth = self.auth
if auth.get_user_by_session():
if auth.get_user_by_session()['account_type'] == 'admin':
return handler(self, *args, **kwargs)
else: self.redirect('/default')
else: self.redirect('/default')
return check_login
def google_admin_required(handler):
if users.admin_required(handler):
return handler
else:
return self.redirect('/')
def jinja2_factory(app):
"True ninja method for attaching additional globals/filters to jinja"
j = jinja2.Jinja2(app)
j.environment.globals.update({
'uri_for': webapp2.uri_for,
})
return j
def validate(name, type = 'string'):
return name
def check_csv(csv):
return csv
data_cache = memcache.Client()
class BaseHandler(webapp2.RequestHandler):
module = modules.get_current_module_name()
@webapp2.cached_property
def auth(self):
"""Shortcut to access the auth instance as a property."""
return auth.get_auth()
@webapp2.cached_property
def user_info(self):
"""Shortcut to access a subset of the user attributes that are stored
in the session.
The list of attributes to store in the session is specified in
config['webapp2_extras.auth']['user_attributes'].
:returns
A dictionary with most user information
"""
return self.auth.get_user_by_session()
@webapp2.cached_property
def user(self):
"""Shortcut to access the current logged in user.
Unlike user_info, it fetches information from the persistence layer and
returns an instance of the underlying model.
:returns
The instance of the user model associated to the logged in user.
"""
u = self.user_info
return self.user_model.get_by_id(u['user_id']) if u else None
@webapp2.cached_property
def user_model(self):
"""Returns the implementation of the user model.
It is consistent with config['webapp2_extras.auth']['user_model'], if set.
"""
return self.auth.store.user_model
@webapp2.cached_property
def session(self):
"""Shortcut to access the current session."""
return self.session_store.get_session(backend="datastore")
def render_template(self, view_filename, params=None):#dict method
if not params:
params = {}
user = self.user_info
params['user'] = user
path = os.path.join(os.path.dirname(__file__), 'templates', view_filename)
self.response.out.write(template.render(path, params))
@webapp2.cached_property
def jinja2(self):
return jinja2.get_jinja2(factory=jinja2_factory, app=self.app)
def render_response(self, _template, **context):#jinja
conference_data = self.get_conference_data()
ctx = {'user': self.user_info, 'conference_data': conference_data}
ctx.update(context)
rv = self.jinja2.render_template(_template, **ctx)
self.response.write(rv)
def upload_to_db(self):
return self.get_conference_data().dbox_update
def get_conference_data(self):
conference_data = data_cache.get('%s-conference_data'% self.module)
if not conference_data:
conference_data = ConferenceData.all().filter('module =', self.module).get()
logging.info('CoferenceData DB Query')
data_cache.set('%s-conference_data'% self.module, conference_data)
if not conference_data:
entry=ConferenceData()
entry.put()
time.sleep(.25)
data_cache.set('%s-conference_data'% self.module, None)
return entry
return conference_data
def get_sessions(self, user_id = None):
if user_id:
return SessionData.all().filter('module =', self.module)\
.filter('user_id =', user_id).order('-name')
sessions = data_cache.get('%s-sessions'% self.module)
if not sessions:
sessions = SessionData.all().filter('module =', self.module).order('-name')
logging.info('SessionData DB Query')
data_cache.set('%s-sessions'% self.module, sessions)
if not SessionData.all().get():
return None
return sessions
def get_users(self, user_id = None):
if user_id:
if user_id:
return [[g.lastname, g.firstname, g.email] for g in (User.query(User.auth_ids == '%s|%s'% (self.module, user_id)))][0]
return User.query(User.module == self.module).order(User.lastname)
def get_users_tuple(self):
users = data_cache.get('%s-users-tuple'% self.module)
if users == None:
users = [(g.email, g.lastname+', '+g.firstname+', '+g.email ) for g in self.get_users()]
logging.info('User DB Query')
data_cache.set('%s-users-tuple'% self.module, users)
return users
def display_message(self, message):
"""Utility function to display a template with a simple message."""
params = {
'message': message
}
self.render_template('message.html', params)
# this is needed for webapp2 sessions to work
def dispatch(self):
# Get a session store for this request.
self.session_store = sessions.get_store(request=self.request)
try:
# Dispatch the request.
webapp2.RequestHandler.dispatch(self)
finally:
# Save all sessions.
self.session_store.save_sessions(self.response)
class MainHandler(BaseHandler):
def get(self):
self.render_response('home.html')
class AccountActivateHandler(BaseHandler):
def get(self):
form = forms.ActivateForm()
self.render_response('activate.html', form=form)
def post(self):
form = forms.ActivateForm(self.request.POST)
if not form.validate():
return self.render_response('activate.html', failed = True, form = form)
email = form.email.data.lower()
password = form.password.data
user_id = ('%s|%s' % (self.module, email))
user = self.user_model.get_by_auth_id(user_id)
if not user:
return self.render_response('activate.html',
failed=True,
message='That email address does not match an entry in our records, please try again.',
form=form)
if user.verified == True:
return self.render_response('login.html',
failed=True,
message='That account is already activated, please login below',
form=form)
if user:
user.set_password(password)
user.put()
time.sleep(.25)
user_id = user.get_id()
token = self.user_model.create_signup_token(user_id)
verification_url = self.uri_for('verification',
type = 'v',
user_id = user_id,
signup_token = token,
_full = True)
subject = email_messages.account_verification[0]
name = user.firstname+' '+user.lastname
body = email_messages.account_verification[1].format(url = verification_url, name = name)
mail.send_mail( sender = SENDER,
to = email,
subject = subject,
body = body)
self.render_response('message.html', success = True, message = "An email containing verification information has been sent.")
return
class ForgotPasswordHandler(BaseHandler):
def get(self):
#self._serve_page()
self.render_response('forgot.html')
def post(self):
email = self.request.get('email').lower()
user_id = ('%s|%s' % (self.module, email))
user = self.user_model.get_by_auth_id(user_id)
if not user:
self.render_response('forgot.html',
failed = True,
message = 'That email address does not match an entry in our records, please try again.')
return
user_id = user.get_id()
token = self.user_model.create_signup_token(user_id)
# Generate email message
verification_url = self.uri_for('verification',
type = 'p',
user_id = user_id,
signup_token = token,
_full = True)
subject = email_messages.password_reset[0]
name = user.firstname+' '+user.lastname
body = email_messages.password_reset[1].format(url = verification_url, name = name)
mail.send_mail( sender = SENDER,
to = email,
subject = subject,
body = body)
return self.render_response('login.html', success = True, message = "An email containing password reset information has been sent.")
#self.display_message('An email containing password reset information has been sent.')
#return
class VerificationHandler(BaseHandler):
def get(self, *args, **kwargs):
user = None
user_id = kwargs['user_id']
signup_token = kwargs['signup_token']
verification_type = kwargs['type']
# it should be something more concise like
# self.auth.get_user_by_token(user_id, signup_token)
# unfortunately the auth interface does not (yet) allow to manipulate
# signup tokens concisely
user, ts = self.user_model.get_by_auth_token(int(user_id), signup_token,
'signup')
if not user:
self.abort(404)
# store user data in the session
self.auth.set_session(self.auth.store.user_to_dict(user), remember=True)
if verification_type == 'v':
# remove signup token, we don't want users to come back with an old link
self.user_model.delete_signup_token(user.get_id(), signup_token)
if not user.verified:
user.verified = True
user.put()
self.render_response('message.html', success = True, message = 'User email address has been verified.')
#self.display_message('User email address has been verified.')
return
elif verification_type == 'p':
# supply user to the page
params = {
'user': user,
'token': signup_token
}
self.render_template('resetpassword.html', params)
else:
logging.info('verification type not supported')
self.abort(404)
class SetPasswordHandler(BaseHandler):
@user_required
def post(self):
password = self.request.get('password')
old_token = self.request.get('t')
if not password or password != self.request.get('confirm_password'):
self.render_response('message.html', failed = True, message = 'Passwords do not match')
return
user = self.user
user.set_password(password)
user.put()
# remove signup token, we don't want users to come back with an old link
self.user_model.delete_signup_token(user.get_id(), old_token)
self.render_response('message.html', success = True, message = 'Password updated')
#self.display_message('Password updated')
class LoginHandler(BaseHandler):
def get(self):
if self.user: #redirect to admin if already logged in
self.redirect('/admin')
#self._serve_page()
form = forms.LoginForm()
self.render_response('login.html', form=form)
def post(self):
form = forms.LoginForm(self.request.POST)
if not form.validate():
return self.render_response('login.html',
failed = True,
message = "Invalid login, please try again",
form = form)
email = form.email.data.lower()
user_id = ('%s|%s' % (self.module, email))
user = self.user_model.get_by_auth_id(user_id)
if not user:
return self.render_response('login.html',
failed = True,
message = "Invalid email address for login please try again",
form = form)
else:
if not user.password:
return self.redirect('/activate')
password = form.password.data
try:
u = self.auth.get_user_by_password(user_id, password, remember=True,
save_session=True)
if self.auth.get_user_by_session()['account_type'] == 'admin':
return self.redirect('/admin')
else: self.redirect('/default')
return self.redirect('/default')
except (InvalidAuthIdError, InvalidPasswordError) as e:
return self.render_response('login.html',
failed = True,
message = "Invalid login please try again",
form = form)
def _serve_page(self, failed=False, message = ""):
form = forms.LoginForm()
email = self.request.get('email').lower()
params = {
'email': email,
'failed': failed,
'message': message
}
self.render_template('login.html', params)
class LogoutHandler(BaseHandler):
def get(self):
self.auth.unset_session()
self.redirect(self.uri_for('home'))
class NotFoundPageHandler(BaseHandler):
def get(self):
self.error(404)
self.render_response('message.html', failed = True, message = "Sorry that page doesn\'t exist, if you were attempting a file \
upload please refresh the page prior to upload.")
class BadUploadHandler(BaseHandler):
def get(self):
self.render_response('default.html',
failed = True,
message = "If you were attempting a file upload please refresh the page prior to upload.")
config = {
'webapp2_extras.auth': {
'user_model': 'models.User',
'user_attributes': ['firstname', 'account_type']
},
'webapp2_extras.sessions': {
'secret_key': SECRET_KEY
}
}
import admin
app = webapp2.WSGIApplication(
[
webapp2.Route('/', LoginHandler, name='home'),
webapp2.Route('/activate', AccountActivateHandler, name='activate'),
webapp2.Route('/signup', AccountActivateHandler, name='activate'),
webapp2.Route('/<type:v|p>/<user_id:\d+>-<signup_token:.+>',
handler=VerificationHandler, name='verification'),
webapp2.Route('/password', SetPasswordHandler),
webapp2.Route('/login', LoginHandler, name='login'),
webapp2.Route('/logout', LogoutHandler, name='logout'),
webapp2.Route('/forgot', ForgotPasswordHandler, name='forgot'),
webapp2.Route('/.*', NotFoundPageHandler),
webapp2.Route('/_ah/upload/.*', BadUploadHandler),
webapp2.Route('/admin', admin.ManageSessionsHandler),
webapp2.Route('/admin/conference_data', admin.ManageConferenceHandler),
webapp2.Route('/admin/manage_sessions', admin.ManageSessionsHandler, name='sessions'),
webapp2.Route('/admin/session/<date>', admin.SessionByDateHandler, name='session_by_date'),
webapp2.Route('/admin/add_session', admin.AddSessionHandler),
webapp2.Route('/admin/edit_session', admin.EditSessionHandler),
webapp2.Route('/admin/update_session', admin.UpdateSessionHandler),
webapp2.Route('/admin/delete_session', admin.DeleteSessionHandler),
webapp2.Route('/admin/retrieve_presentation', admin.RetrievePresentationHandler),
webapp2.Route('/admin/logs', admin.LogsHandler),
webapp2.Route('/admin/upload_conference_data/', admin.RenderConferenceUploadDataHandler),
webapp2.Route('/admin/check_conference_data/', admin.CheckConferenceDataHandler),
webapp2.Route('/admin/delete_upload', admin.DeleteConferenceUploadData),
webapp2.Route('/admin/commit_upload', admin.CommitConferenceUploadData),
webapp2.Route('/admin/manage_users', admin.ManageUserAccountsHandler),
webapp2.Route('/admin/add_user_account', admin.AddUserAccountHandler),
webapp2.Route('/admin/delete_user_account', admin.DeleteUserAccountHandler),
webapp2.Route('/activate', admin.AccountActivateHandler)
], debug=True, config=config)
logging.getLogger().setLevel(logging.DEBUG)
|
{"/admin.py": ["/email_messages.py", "/constants.py", "/models.py", "/main.py", "/google_to_dropbox.py", "/forms.py"], "/email_messages.py": ["/constants.py"], "/main.py": ["/models.py", "/email_messages.py", "/secrets.py", "/constants.py", "/forms.py", "/admin.py"], "/super_admin_handlers.py": ["/main.py", "/models.py", "/secrets.py", "/forms.py"], "/serve_presentations.py": ["/models.py", "/main.py"], "/google_to_dropbox.py": ["/models.py", "/main.py", "/secrets.py"], "/dropbox_oauth.py": ["/main.py", "/secrets.py"], "/forms.py": ["/models.py"], "/messages.py": ["/main.py", "/email_messages.py", "/constants.py"], "/utilities.py": ["/models.py", "/main.py"], "/templates/OLD-templates/presentations.py": ["/models.py"]}
|
34,027,389
|
stevenmarr/presentation-manager
|
refs/heads/master
|
/forms.py
|
from wtforms import Form, BooleanField, StringField, validators, TextField, PasswordField, DateField, SelectField
from wtforms.widgets.core import Select
from wtforms import widgets
import re
import logging
from wtforms.ext.appengine import db, ndb
from models import SessionData, User, ConferenceData
from dateutil.parser import *
dateRE = '1[4-9][0-1][1-9][0-3][0-1]-[0-2][1-9]:[0-5][0-9]'
class RegistrationForm(Form):
username = StringField(u'Username', [validators.Length(min=4, max=25)])
email = StringField(u'Email Address', [validators.Length(min=6, max=35)])
accept_rules= BooleanField(u'I accept the site rules', [validators.InputRequired()])
class ActivateForm(Form):
email = StringField('Email',
[
validators.Email()])
password = PasswordField('Password',
[validators.Required()])
verify = PasswordField('Verify Password',
[validators.Required(),
validators.EqualTo('password',
message=(u'Passwords must match.'))])
#logging.info("Verify is %s" % password_confirm.data)
class LoginForm(Form):
email = StringField(u'Email',
[validators.Required(), validators.Email()])
password = PasswordField(u'Password',
[validators.Required()])
class AddUserForm(Form):
email = StringField(u'Email',
[validators.Required(), validators.Email()])
firstname = TextField(u'First Name',
[validators.Required()])
lastname = TextField(u'Last Name',
[validators.Required()])
email_user = BooleanField(u'Email presenter with account activation information')
class AddAdminForm(AddUserForm):
account_type = SelectField(u'User type')
SessionDataForm = db.model_form(SessionData, base_class=Form, field_args={
'users': {
'label': 'User Id',
'description': '',
'validators': [validators.Required()]
},
'name': {
'label': 'Session Name',
'description': '',
'validators': [validators.Required()]
},
'room': {
'label': 'Session Room',
'description': '',
},
}, exclude=('date_time','module',
'blob_store_key',
'filename',
'dbox_path',
'uploaded_to_dbox',
'dbox_size'))
UserDataForm = ndb.model_form(User, base_class=Form, field_args={
'firstname': {
'label': 'First Name',
'description': '',
'validators': [validators.Required()]
},
'lastname': {
'label': 'Last Name',
'description': '',
'validators': [validators.Required()]
},
'email_address': {
'label': 'Email',
'description': '',
'validators': [validators.Required()]
},
}, exclude=('module', 'password', 'account_type'))
ConferenceDataForm = db.model_form(ConferenceData, base_class=Form, field_args={
'c_client': {
'label': 'Client Name',
'description': '',
'validators': [validators.Required()]
},
'name': {
'label': 'Conference Name',
'description': '',
'validators': [validators.Required()]
},
'account_verification_msg': {
'label': 'Email message to send to attendees to verify email address',
'description': '',
'validators': [validators.Required()],
'widget': widgets.TextArea()
},
'password_reset_msg': {
'label': 'Email message to send to attendees to reset password',
'description': '',
'validators': [validators.Required()],
'widget': widgets.TextArea()
},
'new_account_msg': {
'label': 'Email message to send to attendees when they have an account to activate',
'description': '',
'validators': [validators.Required()],
'widget': widgets.TextArea()
},
'dbox_update': {
'label': 'Upload Conference Data to DropBox in realtime',
'description': '',
'widget': widgets.CheckboxInput()
},
}, exclude=('start_date',
'end_date',
'module',
'dbox_access_token',
'db_user_id',
'db_account_info'))
class AddSessionForm(Form):
email = StringField('Email',[validators.Required(),validators.Email()])
name = TextField('Session Name',[validators.Required()])
room = TextField('Session Room')
#date_time = DateField(format='%m %d %y', widget=SelectDateWidget())
date_time = DateField('Session Date mm/dd/yyyy')
date_time = TextField('Session Time hh:mm --')
class SessionForm(SessionDataForm):
#setattr(SelectField, _name, 'user_id')
users = SelectField(u'Presenter',[validators.Required()])
date = TextField(u'Session Date (mm/dd/yyyy)', \
[validators.Required()], widget = widgets.Input(input_type='date'))
time = TextField(u'Session Time (hh:mm --)', \
[], widget = widgets.Input(input_type='time'))
user_id = StringField()
class ConferenceForm(ConferenceDataForm):
start = TextField(u'Start Date', \
[validators.Required()], widget = widgets.Input(input_type='date'))
end = TextField(u'End Date', \
[validators.Required()], widget = widgets.Input(input_type='date'))
|
{"/admin.py": ["/email_messages.py", "/constants.py", "/models.py", "/main.py", "/google_to_dropbox.py", "/forms.py"], "/email_messages.py": ["/constants.py"], "/main.py": ["/models.py", "/email_messages.py", "/secrets.py", "/constants.py", "/forms.py", "/admin.py"], "/super_admin_handlers.py": ["/main.py", "/models.py", "/secrets.py", "/forms.py"], "/serve_presentations.py": ["/models.py", "/main.py"], "/google_to_dropbox.py": ["/models.py", "/main.py", "/secrets.py"], "/dropbox_oauth.py": ["/main.py", "/secrets.py"], "/forms.py": ["/models.py"], "/messages.py": ["/main.py", "/email_messages.py", "/constants.py"], "/utilities.py": ["/models.py", "/main.py"], "/templates/OLD-templates/presentations.py": ["/models.py"]}
|
34,027,390
|
stevenmarr/presentation-manager
|
refs/heads/master
|
/messages.py
|
import webapp2
from main import BaseHandler, config
from google.appengine.api import mail
import email_messages
from constants import SENDER
class SendBulkEmailsHandler(BaseHandler):
def post(self):
category = self.request.get('category')
email = self.request.get('email')
name = self.request.get('name')
if category == 'new_account':
subject = email_messages.new_account[0]
body = email_messages.new_account[1].format(name=name)
mail.send_mail( sender = SENDER,
to = email,
subject = subject,
body = body)
return
#Send some emails
app = webapp2.WSGIApplication([
webapp2.Route('/send_emails', SendBulkEmailsHandler)
], debug=True, config=config)
|
{"/admin.py": ["/email_messages.py", "/constants.py", "/models.py", "/main.py", "/google_to_dropbox.py", "/forms.py"], "/email_messages.py": ["/constants.py"], "/main.py": ["/models.py", "/email_messages.py", "/secrets.py", "/constants.py", "/forms.py", "/admin.py"], "/super_admin_handlers.py": ["/main.py", "/models.py", "/secrets.py", "/forms.py"], "/serve_presentations.py": ["/models.py", "/main.py"], "/google_to_dropbox.py": ["/models.py", "/main.py", "/secrets.py"], "/dropbox_oauth.py": ["/main.py", "/secrets.py"], "/forms.py": ["/models.py"], "/messages.py": ["/main.py", "/email_messages.py", "/constants.py"], "/utilities.py": ["/models.py", "/main.py"], "/templates/OLD-templates/presentations.py": ["/models.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.