index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
10,374
|
ishambhandari/Python
|
refs/heads/master
|
/main.py
|
from purchase import purchase,discountAmount,createInvoice
from readfiles import readInventory
from updateinventory import updateStock
import datetime
print("Hello!!! This is an electronic store.We sell different kinds of mobile phones,laptops and Harddisks.Please Proceed if you wish to buy.")
def main():
person_name = input("Enter your full name")
inventory = readInventory()
purchases = []
ans = True
while ans == True:
handling_1 = True
while handling_1 == True:
try:
ans = input("would you like to make a purchase?(y/n)")
if ans=="y":
purchased_item = purchase(inventory)
if (purchased_item):
purchases.append(purchased_item)
ans = True
elif ans=="n":
ans=False
handling_1 = False
else:
handling_1 = True
print("Please enter y or n")
except:
print("Please enter correct values.")
handling_1 = True
print("We give 10% discount in our product.Discount amount is subtracted in your bills.Enjoy shopping...")
discount_check = True
createInvoice(person_name, purchases, discount_check)
print("Thank you for visiting our store..")
main()
|
{"/main.py": ["/purchase.py", "/readfiles.py", "/updateinventory.py"], "/purchase.py": ["/readfiles.py", "/updateinventory.py"]}
|
10,375
|
ishambhandari/Python
|
refs/heads/master
|
/updateinventory.py
|
def updateStock(inventory):
file = open("stock.txt", "w")
for product in inventory:
line = product[0] + "," + str(product[1]) + "," + str(product[2]) + "\n"
file.write(line)
file.close()
|
{"/main.py": ["/purchase.py", "/readfiles.py", "/updateinventory.py"], "/purchase.py": ["/readfiles.py", "/updateinventory.py"]}
|
10,376
|
ishambhandari/Python
|
refs/heads/master
|
/purchase.py
|
import readfiles
from updateinventory import updateStock
import datetime
inventory = readfiles.readInventory() #Assigning readInventory function from readfiles.py to inventory
#This is the main function. It take input and calls other functions.
#This is purchase(inventory) function.
def purchase(inventory):
for index, product in enumerate(inventory, 1):
print(str(index) + ". " + product[0])
choice = int(input("What would you like to purchase? "))
name = inventory[choice - 1][0]
price = inventory[choice - 1][1]
stock = int(inventory[choice - 1][2])
print("Price: " + str(price))
print("Available: " + str(stock))
quantity = int(input("How many " + name + " would you like to buy?"))
if stock - quantity < 0:
print("Out of stock!!")
return False
stock = stock - quantity
inventory[choice - 1][2] = stock
updateStock(inventory)
return [name, price, quantity]
def discountAmount(price):
return price * 0.1
def createInvoice(person_name, purchases, discount_check):
Total_price = []
invoice_name = person_name + '-' + str(datetime.datetime.now())
file = open(invoice_name+".txt","w")
file.write('Person Name: ' + person_name + '\n')
file.write('Purchase Date ' + str(datetime.datetime.now()) + '\n')
file.write('Purchase details\n'+"\n")
for purchase in purchases:
price = purchase[1]
quantity = purchase[2]
total = price * quantity
if (discount_check):
discount = discountAmount(total)
else:
discount = 0
net = total - discount
file.write("Product Name=" + '\t'+ purchase[0]+ '\n')
file.write("Price=" + '\t'+ str(price)+"$" + '\n')
file.write("Quantity=" + '\t'+ str(quantity)+" piece" + '\n')
file.write("Total=" + '\t'+ str(total) +"$"+ '\n')
file.write("Discount amount=" + '\t'+ str(discount) +"$"+ '\n')
file.write("Final amount=" + '\t'+ str(net) + "$"+'\n'+"\n"+"\n"+"\n")
Total_price.append(int(net))
sum_ = 0
for prices in Total_price:
sum_ = float(sum_) + prices
file.write("Total amount =" + str(sum_)+"$")
print("Total amount =",float(sum_),"$"+'\n')
print("Please check your invoice for further details..")
file.close()
|
{"/main.py": ["/purchase.py", "/readfiles.py", "/updateinventory.py"], "/purchase.py": ["/readfiles.py", "/updateinventory.py"]}
|
10,378
|
krinish291/Farmer_portal
|
refs/heads/master
|
/Farmer_portal/Farmer_portal/Expert/migrations/0001_initial.py
|
# Generated by Django 2.1.5 on 2020-02-02 15:27
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Expert',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('username', models.CharField(default='', max_length=100)),
('password', models.CharField(default='', max_length=20)),
('Dp', models.ImageField(default='kisan.jpg', upload_to='Expert_pics')),
('category', models.CharField(choices=[('Grains', 'Grains'), ('pulses', 'pulses'), ('Vegetables', 'Vegetables'), ('Fruits', 'Fruits'), ('Other', 'Other')], default='Grains', max_length=15)),
('email', models.EmailField(default='', max_length=50)),
('File', models.FileField(upload_to='documents/')),
('description', models.EmailField(blank=True, default='', max_length=50)),
],
),
migrations.CreateModel(
name='gunjan',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(default='', max_length=100)),
],
),
]
|
{"/Farmer_portal/Farmer_portal/blog/urls.py": ["/Farmer_portal/Farmer_portal/blog/views.py"], "/Farmer_portal/Farmer_portal/Expert/views.py": ["/Farmer_portal/Farmer_portal/Expert/forms.py"], "/Farmer_portal/Farmer_portal/blog/views.py": ["/Farmer_portal/Farmer_portal/blog/models.py"], "/Farmer_portal/Farmer_portal/Expert/forms.py": ["/Farmer_portal/Farmer_portal/Expert/models.py"], "/Farmer_portal/Farmer_portal/blog/admin.py": ["/Farmer_portal/Farmer_portal/blog/models.py"]}
|
10,379
|
krinish291/Farmer_portal
|
refs/heads/master
|
/Farmer_portal/Farmer_portal/blog/models.py
|
from django.db import models
from django.utils import timezone
from django.contrib.auth.models import User
from django.urls import reverse
from PIL import Image
from Expert.models import Expert
Category_CHOICES = [
('Grains','Grains'),
('pulses','pulses'),
('Vegetables', 'Vegetables'),
('Fruits','Fruits'),
('Other','Other'),
]
Locations =[
('Gujrat',
(
('Rajkot','Rajkot'),
('Mehsana','Mehsana'),
('Ahmedabad','Ahmedabad'),
('Anand','Anand'),
('Dahod','Dahod'),
('Kheda','Kheda'),
('Vadodara','Vadodara'),
('Panchmahal','Panchmahal'),
('Aravalli','Aravalli'),
('Banaskantha','Banaskantha'),
('Gandhinagar','Gandhinagar'),
('Patan','Patan'),
('Amreli','Amreli'),
('Bhavnagar','Bhavnagar'),
('Jamnagar','Jamnagar'),
('Junagadh','Junagadh'),
('Morbi','Morbi'),
('Sabarkantha','Sabarkantha'),
('Bharuch','Bharuch'),
('Dang','Dang'),
('Narmada','Narmada'),
('Navsari','Navsari'),
('Surat','Surat'),
('Valsad','Valsad'),
)
)
]
Ferti=[
('Nofertilizer','Nofertilizer'),
('bio-fertilizer','bio-fertilizer'),
('Urea','Urea'),
('N P K',
(
('NPK 19-19-19','NPK 19-19-19'),
('NPK 20-20-20','NPK 20-20-20'),
('NPK 20-20-0','NPK 20-20-0'),
('NPK 46-0-0','NPK 46-0-0'),
)
),
('D A P',
(
('DAP 18-46-0','DAP 18-46-0'),
)
),
]
area_type=(
('Bigha','Bigha'),
('Guntha','Guntha'),
('Acre','Acre'),
('Hectare','Hectare'),
('Square Meter','Square Meter',)
)
def findarea(area,area_type):
if area_type=="Bigha":
area=area*1621.344
elif area_type=="Guntha":
area=area*101.17
elif area_type=="Acre":
area=area*4046.86
elif area_type=="Hectare":
area=area*10000
else:
area=area*1
return int(area)
# Create your models here.
class Post(models.Model):
title = models.CharField(max_length=100)
Tell_your_story = models.TextField(default="")
date_posted = models.DateField(auto_now=True)
author = models.ForeignKey(User, on_delete=models.CASCADE)
location = models.CharField(max_length=15, choices=Locations, default='Gujrat')
seed = models.CharField(max_length=100,default="")
fertilizers = models.CharField(max_length=15, choices=Ferti, default='Urea')
treatment_details = models.TextField(default="")
category = models.CharField(max_length=15, choices=Category_CHOICES, default='Grains')
Sowing_date = models.DateField(default=timezone.now)
Harvest_date = models.DateField(default=timezone.now)
area = models.IntegerField(default="")
area_type = models.CharField(max_length=15, choices=area_type, default='Bigha')
net_profit_in_INR_rupee= models.IntegerField(default="")
image = models.ImageField(default='kisan.jpg', upload_to='Story_pics')
#def __str__(self):
# return f'{self.user.username} Post' + self.title
def save(self,*args, **kwargs):
self.area=findarea(self.area,self.area_type)
super(Post, self).save(*args, **kwargs)
img = Image.open(self.image.path)
if img.height > 300 or img.width > 300:
output_size = (300, 300)
img.thumbnail(output_size)
img.save(self.image.path)
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse('post-detail', kwargs={'pk': self.pk})
def likelist(self):
l=[]
for i in self.likes.all():
l.append(i.user_id.id)
print(l)
return l
class Like(models.Model):
user_id = models.ForeignKey(User, on_delete=models.CASCADE)
Post_id = models.ForeignKey(Post, on_delete=models.CASCADE ,related_name="likes")
class Query(models.Model):
user_id = models.ForeignKey(User, on_delete=models.CASCADE)
category = models.CharField(max_length=15, choices=Category_CHOICES, default='Grains')
image = models.ImageField(default='kisan.jpg', upload_to='query_pics')
Tell_your_Query = models.TextField(default="")
is_answer =models.BooleanField(default=False)
def get_absolute_url(self):
return reverse('post-detail')
class Query_Answer(models.Model):
Query_id = models.ForeignKey(Query, on_delete=models.CASCADE)
Expert_id = models.ForeignKey(Expert, on_delete=models.CASCADE)
Query_Reply = models.TextField(default="")
|
{"/Farmer_portal/Farmer_portal/blog/urls.py": ["/Farmer_portal/Farmer_portal/blog/views.py"], "/Farmer_portal/Farmer_portal/Expert/views.py": ["/Farmer_portal/Farmer_portal/Expert/forms.py"], "/Farmer_portal/Farmer_portal/blog/views.py": ["/Farmer_portal/Farmer_portal/blog/models.py"], "/Farmer_portal/Farmer_portal/Expert/forms.py": ["/Farmer_portal/Farmer_portal/Expert/models.py"], "/Farmer_portal/Farmer_portal/blog/admin.py": ["/Farmer_portal/Farmer_portal/blog/models.py"]}
|
10,380
|
krinish291/Farmer_portal
|
refs/heads/master
|
/Farmer_portal/Farmer_portal/Expert/migrations/0003_auto_20200202_2110.py
|
# Generated by Django 2.1.5 on 2020-02-02 15:40
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Expert', '0002_auto_20200202_2059'),
]
operations = [
migrations.AlterField(
model_name='expert',
name='description',
field=models.TextField(default=''),
),
]
|
{"/Farmer_portal/Farmer_portal/blog/urls.py": ["/Farmer_portal/Farmer_portal/blog/views.py"], "/Farmer_portal/Farmer_portal/Expert/views.py": ["/Farmer_portal/Farmer_portal/Expert/forms.py"], "/Farmer_portal/Farmer_portal/blog/views.py": ["/Farmer_portal/Farmer_portal/blog/models.py"], "/Farmer_portal/Farmer_portal/Expert/forms.py": ["/Farmer_portal/Farmer_portal/Expert/models.py"], "/Farmer_portal/Farmer_portal/blog/admin.py": ["/Farmer_portal/Farmer_portal/blog/models.py"]}
|
10,381
|
krinish291/Farmer_portal
|
refs/heads/master
|
/Farmer_portal/Farmer_portal/Expert/migrations/0004_auto_20200311_1930.py
|
# Generated by Django 3.0.4 on 2020-03-11 14:00
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Expert', '0003_auto_20200202_2110'),
]
operations = [
migrations.RenameField(
model_name='expert',
old_name='File',
new_name='File_Verify',
),
migrations.AlterField(
model_name='expert',
name='username',
field=models.CharField(default='', max_length=100, unique=True),
),
]
|
{"/Farmer_portal/Farmer_portal/blog/urls.py": ["/Farmer_portal/Farmer_portal/blog/views.py"], "/Farmer_portal/Farmer_portal/Expert/views.py": ["/Farmer_portal/Farmer_portal/Expert/forms.py"], "/Farmer_portal/Farmer_portal/blog/views.py": ["/Farmer_portal/Farmer_portal/blog/models.py"], "/Farmer_portal/Farmer_portal/Expert/forms.py": ["/Farmer_portal/Farmer_portal/Expert/models.py"], "/Farmer_portal/Farmer_portal/blog/admin.py": ["/Farmer_portal/Farmer_portal/blog/models.py"]}
|
10,382
|
krinish291/Farmer_portal
|
refs/heads/master
|
/Farmer_portal/Farmer_portal/blog/migrations/0003_query_is_answer.py
|
# Generated by Django 3.0.4 on 2020-03-11 16:17
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog', '0002_query_answer'),
]
operations = [
migrations.AddField(
model_name='query',
name='is_answer',
field=models.BooleanField(default=False),
),
]
|
{"/Farmer_portal/Farmer_portal/blog/urls.py": ["/Farmer_portal/Farmer_portal/blog/views.py"], "/Farmer_portal/Farmer_portal/Expert/views.py": ["/Farmer_portal/Farmer_portal/Expert/forms.py"], "/Farmer_portal/Farmer_portal/blog/views.py": ["/Farmer_portal/Farmer_portal/blog/models.py"], "/Farmer_portal/Farmer_portal/Expert/forms.py": ["/Farmer_portal/Farmer_portal/Expert/models.py"], "/Farmer_portal/Farmer_portal/blog/admin.py": ["/Farmer_portal/Farmer_portal/blog/models.py"]}
|
10,383
|
krinish291/Farmer_portal
|
refs/heads/master
|
/Farmer_portal/Farmer_portal/blog/urls.py
|
from django.urls import path
from django.conf.urls import url
from .views import (
PostListView,
PostDetailView,
PostCreateView,
QueryCreateView,
PostUpdateView,
PostDeleteView,
UserPostListView,
typecon,
price,
querygenerate,
addquery,
myQueryans,
)
from . import views
urlpatterns = [
path('', PostListView.as_view(), name='blog-home'),
path('user/<str:username>', UserPostListView.as_view(), name='user-posts'),
path('post/<int:pk>/', PostDetailView.as_view(), name='post-detail'),
path('post/new/', PostCreateView.as_view(), name='post-create'),
path('Query/new/', QueryCreateView.as_view(), name='Query-create'),
path('post/<int:pk>/update/', PostUpdateView.as_view(), name='post-update'),
path('post/<int:pk>/delete/', PostDeleteView.as_view(), name='post-delete'),
path('about/', views.about, name='blog-about'),
url(r'^like/$',views.like,name='like'),
url(r'^disLike/$',views.disLike,name='disLike'),
url(r'^filter/$', typecon),
url(r'^myQueryans/$', myQueryans),
url(r'^price/$', price),
url(r'^addquery/$', addquery),
url(r'^querygenerate/$', querygenerate),
]
|
{"/Farmer_portal/Farmer_portal/blog/urls.py": ["/Farmer_portal/Farmer_portal/blog/views.py"], "/Farmer_portal/Farmer_portal/Expert/views.py": ["/Farmer_portal/Farmer_portal/Expert/forms.py"], "/Farmer_portal/Farmer_portal/blog/views.py": ["/Farmer_portal/Farmer_portal/blog/models.py"], "/Farmer_portal/Farmer_portal/Expert/forms.py": ["/Farmer_portal/Farmer_portal/Expert/models.py"], "/Farmer_portal/Farmer_portal/blog/admin.py": ["/Farmer_portal/Farmer_portal/blog/models.py"]}
|
10,384
|
krinish291/Farmer_portal
|
refs/heads/master
|
/Farmer_portal/Farmer_portal/Expert/views.py
|
from django.shortcuts import render
#from django.shortcuts import render_to_response
from django.views.generic import TemplateView
from django.http import HttpResponseRedirect
from django.contrib import auth
from django.template.context_processors import csrf
from Expert.models import Expert
from blog.models import Query,Query_Answer
from .forms import ExpertRegisterForm,UserUpdateForm
from django.shortcuts import redirect
from django.contrib.auth.decorators import login_required
from django.contrib import messages
def auth_view(request):
username = request.POST.get('username', '')
password = request.POST.get('password', '')
try:
if username == 'admin' or password == 'admin':
request.session['username'] = username
return redirect('/Expert/expertverify/')
else:
user = Expert.objects.get(username=username)
if user.password == password:
if user.is_valid == True:
request.session['username'] = username
print(request.session['username'])
return HttpResponseRedirect('/Expert/loggedin/')
else:
messages.info(request, 'You are not varified ')
return HttpResponseRedirect('/Expert/login/')
else:
return HttpResponseRedirect('/Expert/login/')
except Expert.DoesNotExist:
return HttpResponseRedirect('/Expert/login/')
def loggedin(request):
username = request.session['username']
print(username)
if username == None:
return redirect('/Expert/login/')
else:
expert = Expert.objects.get(username = request.session['username'] )
print(expert.category)
queries= Query.objects.filter(category = expert.category,is_answer = False)
print(queries)
c={
'queries' : queries
}
return render(request,'loggedin.html', c)
def invalidlogin(request):
return render(request,'invalidlogin.html')
def logout(request):
if 'username' in request.session:
del request.session['username']
return render(request,'login.html')
else:
return render(request,'login.html')
def login(request):
c = {}
c.update(csrf(request))
return render(request,'login.html', c)
def userdoesnotexist(request):
c = {}
c.update(csrf(request))
return render(request,'userdoesnotexist.html', c)
def register(request):
if request.method == 'POST':
print(request.FILES)
form = ExpertRegisterForm(request.POST,request.FILES)
print("reach here")
print(form.is_valid())
print(form.errors)
if form.is_valid():
form.save()
print("reach here")
username = form.cleaned_data.get('username')
messages.success(request, f'{username}! ,Your account has been created! You are now able to log in')
return redirect('/Expert/login/')
else:
form = ExpertRegisterForm()
return render(request, 'Expert/register.html', {'form': form})
def submitanswer(request):
ans = request.POST.get("reply",'')
qid = request.POST.get("qid",'')
print(qid)
Q =Query_Answer(Query_id=Query.objects.get(id=qid),Query_Reply = ans , Expert_id = Expert.objects.get(username=request.session['username']))
Q.save()
Q = Query.objects.get(id = qid)
Q.is_answer= True
Q.save()
return redirect('/Expert/loggedin')
def allansQuery(request):
expert = Expert.objects.get(username = request.session['username'] )
que = Query_Answer.objects.filter(Expert_id = expert.id)
c={
'que': que
}
return render(request,'Past_ans.html', c)
def updateQueryans(request):
if request.method == 'POST':
ans = request.POST.get("qans",'')
qid = request.POST.get("qid",'')
request.session['qid'] = qid
else:
ans = request.POST.get("qans",'')
qid = request.POST.get("qid",'')
request.session['qid'] = qid
context = {
'ans': ans
}
return render(request, 'Expert/Update_Query.html', context)
def updatedans(request):
ans = request.POST.get("update_ans",'')
qid = request.session['qid']
q = Query_Answer.objects.get(id = qid)
q.Query_Reply = ans
q.save()
return redirect('/Expert/allansQuery/')
def expertverify(request):
username = request.session['username']
experts=Expert.objects.filter(is_valid = False)
c={
'experts':experts,
'username': username
}
return render(request, 'desktop.html', c)
def varify(request):
ans=request.POST.get("message",'')
eid=request.POST.get("eid",'')
print(ans)
if ans == "success":
expert = Expert.objects.get(id = eid)
expert.is_valid =True
expert.save()
elif ans == "reject":
expert = Expert.objects.get(id = eid)
expert.delete()
return redirect('/Expert/expertverify/')
|
{"/Farmer_portal/Farmer_portal/blog/urls.py": ["/Farmer_portal/Farmer_portal/blog/views.py"], "/Farmer_portal/Farmer_portal/Expert/views.py": ["/Farmer_portal/Farmer_portal/Expert/forms.py"], "/Farmer_portal/Farmer_portal/blog/views.py": ["/Farmer_portal/Farmer_portal/blog/models.py"], "/Farmer_portal/Farmer_portal/Expert/forms.py": ["/Farmer_portal/Farmer_portal/Expert/models.py"], "/Farmer_portal/Farmer_portal/blog/admin.py": ["/Farmer_portal/Farmer_portal/blog/models.py"]}
|
10,385
|
krinish291/Farmer_portal
|
refs/heads/master
|
/Farmer_portal/Farmer_portal/blog/views.py
|
from django.shortcuts import render,get_object_or_404
from django.contrib.auth.mixins import LoginRequiredMixin, UserPassesTestMixin
# import json to load json data to python dictionary
import json
# urllib.request to make a request to api
import urllib.request
from django.http import JsonResponse
from django.template.context_processors import csrf
from .models import Post,Query_Answer,Query
from .models import Like
from datetime import datetime,date
from django.contrib import messages
from django.contrib.auth.models import User
from django.views.generic import (
ListView,
DetailView,
CreateView,
UpdateView,
DeleteView
)
#from selenium import webdriver
#from bs4 import BeautifulSoup
#from selenium.webdriver.chrome.options import Options
#options = Options()
#options.add_argument('--headless')
#options.add_argument('--disable-gpu')
def typecon(request):
cat=request.POST.get('cat','')
loc=request.POST.get('location','')
data ={}
if loc=='ALL' and cat=='ALL':
context = {
'posts': Post.objects.all(),
'data': data,
'today_date': date(date.today().year,date.today().month,date.today().day),
}
elif loc!='ALL' and cat=='ALL':
context = {
'posts': Post.objects.filter(location=loc) ,
'data': data,
'today_date': date(date.today().year,date.today().month,date.today().day),
}
elif loc!='ALL' and cat!='ALL':
context = {
'posts': Post.objects.filter(location=loc,category=cat),
'data': data,
'today_date': date(date.today().year,date.today().month,date.today().day),
}
else:
context = {
'posts': Post.objects.filter(category=cat) ,
'data': data,
'today_date': date(date.today().year,date.today().month,date.today().day),
}
return render(request, 'blog/home.html', context)
class PostListView(ListView):
model = Post
template_name = 'blog/home.html' # <app>/<model>_<viewtype>.html
context_object_name = 'posts'
ordering = ['-id']
paginate_by = 5
def get_context_data(self, **kwargs):
# Call the base implementation first to get a context
context = super().get_context_data(**kwargs)
# Add in a QuerySet of all the books
context['today_date'] = date(date.today().year,date.today().month,date.today().day)
return context
class UserPostListView(ListView):
model = Post
template_name = 'blog/user_posts.html' # <app>/<model>_<viewtype>.html
context_object_name = 'posts'
paginate_by = 5
def get_queryset(self):
user = get_object_or_404(User, username=self.kwargs.get('username'))
return Post.objects.filter(author=user).order_by('-id')
def get_context_data(self,**kwargs):
context = super().get_context_data(**kwargs)
context['today_date'] = date(date.today().year,date.today().month,date.today().day)
return context
class PostDetailView(DetailView):
model = Post
def get_context_data(self, **kwargs):
# Call the base implementation first to get a context
context = super().get_context_data(**kwargs)
context['today_date'] = date(date.today().year,date.today().month,date.today().day)
return context
class PostCreateView(LoginRequiredMixin, CreateView):
model = Post
fields = ['title', 'location','seed','fertilizers','treatment_details','category','Sowing_date','Harvest_date','area','area_type','net_profit_in_INR_rupee','Tell_your_story','image']
def form_valid(self, form):
form.instance.author = self.request.user
return super().form_valid(form)
class PostUpdateView(LoginRequiredMixin, UserPassesTestMixin, UpdateView):
model = Post
fields = ['title','location','seed','fertilizers','treatment_details','category','Sowing_date','Harvest_date','area','area_type','net_profit_in_INR_rupee', 'Tell_your_story','image']
def form_valid(self, form):
form.instance.author = self.request.user
return super().form_valid(form)
def test_func(self):
post = self.get_object()
if self.request.user == post.author:
return True
return False
class PostDeleteView(LoginRequiredMixin, UserPassesTestMixin, DeleteView):
model = Post
success_url = '/'
def test_func(self):
post = self.get_object()
if self.request.user == post.author:
return True
return False
def about(request):
if request.method == 'POST':
city = request.POST.get('city','')
''' api key might be expired use your own api_key
place api_key in place of appid ="your_api_key_here " '''
# source contain JSON data from API
source = urllib.request.urlopen(
"http://api.openweathermap.org/data/2.5/weather?q="+city +"&APPID=47ac36fecbf7e55eee286bef7823f521").read()
# converting JSON data to a dictionary
list_of_data = json.loads(source)
print(list_of_data)
if source == None:
data="not found"
else:
# data for variable list_of_data
data = {
"country_code": str(list_of_data['sys']['country']),
"coordinate": str(list_of_data['coord']['lon']) + ' '
+ str(list_of_data['coord']['lat']),
"temp": str(list_of_data['main']['temp']) + 'k',
"pressure": str(list_of_data['main']['pressure']),
"humidity": str(list_of_data['main']['humidity']),
}
print(data)
else:
data ={}
context = {
'data': data,
}
return render(request, 'blog/about.html',context)
def like(request):
postid=request.GET.get('postid','')
post=Post.objects.get(id=postid)
print(post.likes.all())
l=Like(user_id=request.user,Post_id=post)
l.save()
return JsonResponse("success",safe=False)
def disLike(request):
print("ret")
postid=request.GET.get('postid','')
post=Post.objects.get(id=postid)
like = Like.objects.filter(user_id=request.user,Post_id=post)
like.delete()
return JsonResponse("success",safe=False)
class QueryCreateView(LoginRequiredMixin, CreateView):
model = Query
fields = ['category','image','Tell_your_Query']
def form_valid(self, form):
form.instance.user_id = self.request.user
return super().form_valid(form)
def price(request):
a = Rate()
context={
'p1_list':a.get_Rate1(),
'p2_list':a.get_Rate2(),
'p3_list':a.get_Rate3(),
}
return render(request,'blog/price.html',context)
def querygenerate(request):
c = {}
c.update(csrf(request))
return render(request,'myquery.html', c)
def addquery(request):
catagory = request.POST.get('catagorys', '')
discription = request.POST.get('discription', '')
images = request.FILES.get('myimage')
u = Query(user_id=request.user,category= catagory,image = images,Tell_your_Query=discription)
u.save()
querys =Query.objects.filter(user_id=request.user.id,is_answer=False)
c = {
'querys':querys,
}
c.update(csrf(request))
return render(request, 'Querys.html',c)
def myQueryans(request):
querys =Query.objects.filter(user_id=request.user.id,is_answer=False)
ans_query = Query.objects.filter(user_id=request.user.id,is_answer=True)
ans_list=[]
for a in Query_Answer.objects.all():
for b in ans_query:
if a.Query_id.id == b.id:
ans_list.append(a)
print(ans_list)
c = {
'querys':querys,
'ans_query':ans_list
}
c.update(csrf(request))
return render(request, 'Privious_query.html',c)
'''class Rate():
def __init__(self):
self.commodity = ""
self.center = ""
self.price = ""
def get_Rate1(self):
driver = webdriver.Chrome(executable_path = r'C:\chromedriver.exe',chrome_options=options)
url = 'https://www.commodityonline.com/mandiprices/all/gujarat/0/12'
# download html page
driver.get(url)
# print driver.page_source
# create soup
soup = BeautifulSoup(driver.page_source, 'lxml')
div = soup.find('div', class_="boder_left_sp_bottom")
row1=div.find_all('div',class_="row")
#print(row1)
#print("ROW 2\n")
#row2=row1[4]
#print(row1[4])
#row2=row1.nextSibling
#print(row2)
#row2=div.find_all('div',class_="dt_ta_14")
#p=div.find_all('div',class_"dt_ta_14")
#print("parth \n")
#print(row2)
c_list = []
m_list = []
p_list = []
Rate_list = []
n=0
for a in div.find_all('div',class_="dt_ta_14"):
if(n==0):
n=1
p_list.append(a.text)
#print(a.text)
else:
n=0
#for a in div.find_all('div',class_="dt_ta_14"):
# p_list.append(a.text)
#print(p_list)
for a in div.find_all('div',class_="dt_ta_10"):
c_list.append(a.text)
# print(a.text)
for a in div.find_all('div',class_="dt_ta_11"):
m_list.append(a.text)
# print(a.text)
#print(p_list)
#print(c_list)
#print(m_list)
for i in range(0,36):
new_item = Rate()
new_item.commodity = c_list[i]
new_item.center = m_list[i]
new_item.price = p_list[i]
Rate_list.append(new_item)
for one_player in Rate_list:
print(one_player.commodity)
print(one_player.center)
print(one_player.price)
print("\n")
driver.quit()
return Rate_list
def get_Rate2(self):
driver = webdriver.Chrome(executable_path = r'C:\chromedriver.exe',chrome_options=options)
url = 'https://www.commodityonline.com/mandiprices/all/gujarat/0/12/36'
# download html page
driver.get(url)
# print driver.page_source
# create soup
soup = BeautifulSoup(driver.page_source, 'lxml')
div = soup.find('div', class_="boder_left_sp_bottom")
row1=div.find_all('div',class_="row")
#print(row1)
#print("ROW 2\n")
#row2=row1[4]
#print(row1[4])
#row2=row1.nextSibling
#print(row2)
#row2=div.find_all('div',class_="dt_ta_14")
#p=div.find_all('div',class_"dt_ta_14")
#print("parth \n")
#print(row2)
c_list = []
m_list = []
p_list = []
Rate_list = []
n=0
for a in div.find_all('div',class_="dt_ta_14"):
if(n==0):
n=1
p_list.append(a.text)
#print(a.text)
else:
n=0
#for a in div.find_all('div',class_="dt_ta_14"):
# p_list.append(a.text)
#print(p_list)
for a in div.find_all('div',class_="dt_ta_10"):
c_list.append(a.text)
# print(a.text)
for a in div.find_all('div',class_="dt_ta_11"):
m_list.append(a.text)
# print(a.text)
#print(p_list)
#print(c_list)
#print(m_list)
for i in range(0,36):
new_item = Rate()
new_item.commodity = c_list[i]
new_item.center = m_list[i]
new_item.price = p_list[i]
Rate_list.append(new_item)
for one_player in Rate_list:
print(one_player.commodity)
print(one_player.center)
print(one_player.price)
print("\n")
driver.quit()
return Rate_list
def get_Rate3(self):
driver = webdriver.Chrome(executable_path = r'C:\chromedriver.exe',chrome_options=options)
url = 'https://www.commodityonline.com/mandiprices/all/gujarat/0/12/72'
# download html page
driver.get(url)
# print driver.page_source
# create soup
soup = BeautifulSoup(driver.page_source, 'lxml')
div = soup.find('div', class_="boder_left_sp_bottom")
row1=div.find_all('div',class_="row")
#print(row1)
#print("ROW 2\n")
#row2=row1[4]
#print(row1[4])
#row2=row1.nextSibling
#print(row2)
#row2=div.find_all('div',class_="dt_ta_14")
#p=div.find_all('div',class_"dt_ta_14")
#print("parth \n")
#print(row2)
c_list = []
m_list = []
p_list = []
Rate_list = []
n=0
for a in div.find_all('div',class_="dt_ta_14"):
if(n==0):
n=1
p_list.append(a.text)
#print(a.text)
else:
n=0
#for a in div.find_all('div',class_="dt_ta_14"):
# p_list.append(a.text)
#print(p_list)
for a in div.find_all('div',class_="dt_ta_10"):
c_list.append(a.text)
# print(a.text)
for a in div.find_all('div',class_="dt_ta_11"):
m_list.append(a.text)
# print(a.text)
#print(p_list)
#print(c_list)
#print(m_list)
for i in range(0,10):
new_item = Rate()
new_item.commodity = c_list[i]
new_item.center = m_list[i]
new_item.price = p_list[i]
Rate_list.append(new_item)
for one_player in Rate_list:
print(one_player.commodity)
print(one_player.center)
print(one_player.price)
print("\n")
driver.quit()
return Rate_list'''
|
{"/Farmer_portal/Farmer_portal/blog/urls.py": ["/Farmer_portal/Farmer_portal/blog/views.py"], "/Farmer_portal/Farmer_portal/Expert/views.py": ["/Farmer_portal/Farmer_portal/Expert/forms.py"], "/Farmer_portal/Farmer_portal/blog/views.py": ["/Farmer_portal/Farmer_portal/blog/models.py"], "/Farmer_portal/Farmer_portal/Expert/forms.py": ["/Farmer_portal/Farmer_portal/Expert/models.py"], "/Farmer_portal/Farmer_portal/blog/admin.py": ["/Farmer_portal/Farmer_portal/blog/models.py"]}
|
10,386
|
krinish291/Farmer_portal
|
refs/heads/master
|
/Farmer_portal/Farmer_portal/Expert/forms.py
|
from django import forms
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
from .models import Expert
from blog.models import Query_Answer
from django.forms import ModelForm
class ExpertRegisterForm(ModelForm):
password = forms.CharField(max_length=32, widget=forms.PasswordInput)
#File = forms.FileField()
class Meta:
model = Expert
fields = ['username', 'password','Dp','category', 'email','File_Verify','description']
class UserUpdateForm(forms.ModelForm):
class Meta:
model = Query_Answer
fields = ['Query_Reply']
|
{"/Farmer_portal/Farmer_portal/blog/urls.py": ["/Farmer_portal/Farmer_portal/blog/views.py"], "/Farmer_portal/Farmer_portal/Expert/views.py": ["/Farmer_portal/Farmer_portal/Expert/forms.py"], "/Farmer_portal/Farmer_portal/blog/views.py": ["/Farmer_portal/Farmer_portal/blog/models.py"], "/Farmer_portal/Farmer_portal/Expert/forms.py": ["/Farmer_portal/Farmer_portal/Expert/models.py"], "/Farmer_portal/Farmer_portal/blog/admin.py": ["/Farmer_portal/Farmer_portal/blog/models.py"]}
|
10,387
|
krinish291/Farmer_portal
|
refs/heads/master
|
/Farmer_portal/Farmer_portal/blog/migrations/0002_query_answer.py
|
# Generated by Django 2.1.5 on 2020-02-02 16:13
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('Expert', '0003_auto_20200202_2110'),
('blog', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Query_Answer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Query_Reply', models.TextField(default='')),
('Expert_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Expert.Expert')),
('Query_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='blog.Query')),
],
),
]
|
{"/Farmer_portal/Farmer_portal/blog/urls.py": ["/Farmer_portal/Farmer_portal/blog/views.py"], "/Farmer_portal/Farmer_portal/Expert/views.py": ["/Farmer_portal/Farmer_portal/Expert/forms.py"], "/Farmer_portal/Farmer_portal/blog/views.py": ["/Farmer_portal/Farmer_portal/blog/models.py"], "/Farmer_portal/Farmer_portal/Expert/forms.py": ["/Farmer_portal/Farmer_portal/Expert/models.py"], "/Farmer_portal/Farmer_portal/blog/admin.py": ["/Farmer_portal/Farmer_portal/blog/models.py"]}
|
10,388
|
krinish291/Farmer_portal
|
refs/heads/master
|
/Farmer_portal/Farmer_portal/blog/migrations/0001_initial.py
|
# Generated by Django 2.1.5 on 2020-02-02 14:59
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Like',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
),
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100)),
('Tell_your_story', models.TextField(default='')),
('date_posted', models.DateField(auto_now=True)),
('location', models.CharField(choices=[('Gujrat', (('Rajkot', 'Rajkot'), ('Mehsana', 'Mehsana'), ('Ahmedabad', 'Ahmedabad'), ('Anand', 'Anand'), ('Dahod', 'Dahod'), ('Kheda', 'Kheda'), ('Vadodara', 'Vadodara'), ('Panchmahal', 'Panchmahal'), ('Aravalli', 'Aravalli'), ('Banaskantha', 'Banaskantha'), ('Gandhinagar', 'Gandhinagar'), ('Patan', 'Patan'), ('Amreli', 'Amreli'), ('Bhavnagar', 'Bhavnagar'), ('Jamnagar', 'Jamnagar'), ('Junagadh', 'Junagadh'), ('Morbi', 'Morbi'), ('Sabarkantha', 'Sabarkantha'), ('Bharuch', 'Bharuch'), ('Dang', 'Dang'), ('Narmada', 'Narmada'), ('Navsari', 'Navsari'), ('Surat', 'Surat'), ('Valsad', 'Valsad')))], default='Gujrat', max_length=15)),
('seed', models.CharField(default='', max_length=100)),
('fertilizers', models.CharField(choices=[('Nofertilizer', 'Nofertilizer'), ('bio-fertilizer', 'bio-fertilizer'), ('Urea', 'Urea'), ('N P K', (('NPK 19-19-19', 'NPK 19-19-19'), ('NPK 20-20-20', 'NPK 20-20-20'), ('NPK 20-20-0', 'NPK 20-20-0'), ('NPK 46-0-0', 'NPK 46-0-0'))), ('D A P', (('DAP 18-46-0', 'DAP 18-46-0'),))], default='Urea', max_length=15)),
('treatment_details', models.TextField(default='')),
('category', models.CharField(choices=[('Grains', 'Grains'), ('pulses', 'pulses'), ('Vegetables', 'Vegetables'), ('Fruits', 'Fruits'), ('Other', 'Other')], default='Grains', max_length=15)),
('Sowing_date', models.DateField(default=django.utils.timezone.now)),
('Harvest_date', models.DateField(default=django.utils.timezone.now)),
('area', models.IntegerField(default='')),
('area_type', models.CharField(choices=[('Bigha', 'Bigha'), ('Guntha', 'Guntha'), ('Acre', 'Acre'), ('Hectare', 'Hectare'), ('Square Meter', 'Square Meter')], default='Bigha', max_length=15)),
('net_profit_in_INR_rupee', models.IntegerField(default='')),
('image', models.ImageField(default='kisan.jpg', upload_to='Story_pics')),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Query',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('category', models.CharField(choices=[('Grains', 'Grains'), ('pulses', 'pulses'), ('Vegetables', 'Vegetables'), ('Fruits', 'Fruits'), ('Other', 'Other')], default='Grains', max_length=15)),
('image', models.ImageField(default='kisan.jpg', upload_to='query_pics')),
('Tell_your_Query', models.TextField(default='')),
('user_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='like',
name='Post_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='likes', to='blog.Post'),
),
migrations.AddField(
model_name='like',
name='user_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
|
{"/Farmer_portal/Farmer_portal/blog/urls.py": ["/Farmer_portal/Farmer_portal/blog/views.py"], "/Farmer_portal/Farmer_portal/Expert/views.py": ["/Farmer_portal/Farmer_portal/Expert/forms.py"], "/Farmer_portal/Farmer_portal/blog/views.py": ["/Farmer_portal/Farmer_portal/blog/models.py"], "/Farmer_portal/Farmer_portal/Expert/forms.py": ["/Farmer_portal/Farmer_portal/Expert/models.py"], "/Farmer_portal/Farmer_portal/blog/admin.py": ["/Farmer_portal/Farmer_portal/blog/models.py"]}
|
10,389
|
krinish291/Farmer_portal
|
refs/heads/master
|
/Farmer_portal/Farmer_portal/blog/admin.py
|
from django.contrib import admin
from .models import Post
from .models import Like
from .models import Query
from .models import Query_Answer
# Register your models here.
admin.site.register(Post)
admin.site.register(Like)
admin.site.register(Query)
admin.site.register(Query_Answer)
|
{"/Farmer_portal/Farmer_portal/blog/urls.py": ["/Farmer_portal/Farmer_portal/blog/views.py"], "/Farmer_portal/Farmer_portal/Expert/views.py": ["/Farmer_portal/Farmer_portal/Expert/forms.py"], "/Farmer_portal/Farmer_portal/blog/views.py": ["/Farmer_portal/Farmer_portal/blog/models.py"], "/Farmer_portal/Farmer_portal/Expert/forms.py": ["/Farmer_portal/Farmer_portal/Expert/models.py"], "/Farmer_portal/Farmer_portal/blog/admin.py": ["/Farmer_portal/Farmer_portal/blog/models.py"]}
|
10,390
|
krinish291/Farmer_portal
|
refs/heads/master
|
/Farmer_portal/Farmer_portal/Expert/migrations/0002_auto_20200202_2059.py
|
# Generated by Django 2.1.5 on 2020-02-02 15:29
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Expert', '0001_initial'),
]
operations = [
migrations.DeleteModel(
name='gunjan',
),
migrations.AddField(
model_name='expert',
name='is_valid',
field=models.BooleanField(default=False),
),
]
|
{"/Farmer_portal/Farmer_portal/blog/urls.py": ["/Farmer_portal/Farmer_portal/blog/views.py"], "/Farmer_portal/Farmer_portal/Expert/views.py": ["/Farmer_portal/Farmer_portal/Expert/forms.py"], "/Farmer_portal/Farmer_portal/blog/views.py": ["/Farmer_portal/Farmer_portal/blog/models.py"], "/Farmer_portal/Farmer_portal/Expert/forms.py": ["/Farmer_portal/Farmer_portal/Expert/models.py"], "/Farmer_portal/Farmer_portal/blog/admin.py": ["/Farmer_portal/Farmer_portal/blog/models.py"]}
|
10,391
|
krinish291/Farmer_portal
|
refs/heads/master
|
/Farmer_portal/Farmer_portal/Expert/models.py
|
from django.db import models
from django.contrib.auth.models import User
from _queue import SimpleQueue
Category_CHOICES = [
('Grains','Grains'),
('pulses','pulses'),
('Vegetables', 'Vegetables'),
('Fruits','Fruits'),
('Other','Other'),
]
class Expert(models.Model):
username = models.CharField(max_length=100,default='',unique=True)
password = models.CharField(max_length=20,default='')
Dp = models.ImageField(default='kisan.jpg', upload_to='Expert_pics')
category= models.CharField(max_length=15, choices=Category_CHOICES, default='Grains')
email = models.EmailField(max_length=50,default='' )
File_Verify =models.FileField(upload_to='documents/')
description = models.TextField(default="")
is_valid = models.BooleanField(default=False)
|
{"/Farmer_portal/Farmer_portal/blog/urls.py": ["/Farmer_portal/Farmer_portal/blog/views.py"], "/Farmer_portal/Farmer_portal/Expert/views.py": ["/Farmer_portal/Farmer_portal/Expert/forms.py"], "/Farmer_portal/Farmer_portal/blog/views.py": ["/Farmer_portal/Farmer_portal/blog/models.py"], "/Farmer_portal/Farmer_portal/Expert/forms.py": ["/Farmer_portal/Farmer_portal/Expert/models.py"], "/Farmer_portal/Farmer_portal/blog/admin.py": ["/Farmer_portal/Farmer_portal/blog/models.py"]}
|
10,392
|
krinish291/Farmer_portal
|
refs/heads/master
|
/Farmer_portal/Farmer_portal/Expert/urls.py
|
from django.urls import path
from Expert.views import login, auth_view, logout,loggedin, invalidlogin,varify,userdoesnotexist,updatedans,register,submitanswer,allansQuery,updateQueryans,expertverify
from django.contrib.auth import views as auth_views
from django.conf.urls import url
urlpatterns = [
url(r'^login/$', login),
url(r'^auth/$', auth_view),
url(r'^logout/$', logout),
url(r'^loggedin/$', loggedin),
url(r'^invalidlogin/$', invalidlogin),
url(r'^register/$', register),
url(r'^varify/$', varify),
url(r'^updatedans/$', updatedans),
url(r'^expertverify/$', expertverify),
url(r'^updateQueryans/$', updateQueryans),
url(r'^allansQuery/$', allansQuery),
url(r'^submitanswer/$', submitanswer),
url(r'^userdoesnotexist/$', userdoesnotexist),
]
|
{"/Farmer_portal/Farmer_portal/blog/urls.py": ["/Farmer_portal/Farmer_portal/blog/views.py"], "/Farmer_portal/Farmer_portal/Expert/views.py": ["/Farmer_portal/Farmer_portal/Expert/forms.py"], "/Farmer_portal/Farmer_portal/blog/views.py": ["/Farmer_portal/Farmer_portal/blog/models.py"], "/Farmer_portal/Farmer_portal/Expert/forms.py": ["/Farmer_portal/Farmer_portal/Expert/models.py"], "/Farmer_portal/Farmer_portal/blog/admin.py": ["/Farmer_portal/Farmer_portal/blog/models.py"]}
|
10,423
|
funkonaut/Sos_data_fun
|
refs/heads/master
|
/update_TCAD_data.py
|
"""
File has functions to update TCAD data from the traviscad.org
web page and read the data into a pandas dataframe.
"""
import sys
import os
from datetime import datetime
from logger import logger
import time
import numpy as np
import pandas as pd
import selenium
from selenium import webdriver
import urllib3
from webdriver_manager.chrome import ChromeDriverManager
import meta_data as md
def init_wd():
"""Init selenium webdriver in headless mode."""
options = webdriver.ChromeOptions()
options.add_argument('headless')
return webdriver.Chrome(ChromeDriverManager().install(),options=options)
def nav_url(browser,url=""):
"""Navigate to url."""
try:
time.sleep(1)
browser.get(url)
except Exception as e:
logger.info("Could not navigate to url")
logger.error("Could not navigate to url")
return 0
logger.info("Successfully navigated to "+url)
return 1
def download_data(url_down):
"""Curl the TCAD data into ./data/TCAD"""
date = str(datetime.date(datetime.now()))
fn = 'tcad' + date + '.zip'
od = '../data/TCAD'
os.system('curl '+url_down+' -o '+fn)
os.system('unzip '+fn+' -d '+od)
os.system('rm '+fn)
def scrape_url(url="https://www.traviscad.org/reports-request/"):
"""Navigate, scrape download url, dowload TCAD data."""
logger.info(f"Scraping TCAD data")
try:
browser = init_wd()
#wait 10 seconds when doing a find_element
browser.implicitly_wait(10)
nav_url(browser,url)
link = browser.find_element_by_link_text('TCAD APPRAISAL ROLL EXPORT')
url_down = link.get_attribute("href")
logger.info("Successfully fetched link "+url_down)
download_data(url_down)
logger.info(f"Scraped TCAD data")
except Exception as e:
logger.error("Failed to fetch link")
sys.exit()
def read_tcad(fn='../data/TCAD/'):
"""Read TCAD property data into a dataframe."""
logger.info(f"Reading TCAD data")
df_tcad = pd.read_fwf(fn+'PROP.TXT', md.tcad_prop_w, encoding = "ISO-8859-1")
df_tcad.columns = md.tcad_prop_names
##Clean up entries
df_tcad = df_tcad.apply(lambda x: x.astype(str).str.upper())
##Upper case all text strip punctuation?
##Convert to nan will convert to None in execute_values()
df_tcad = df_tcad.replace("NAN",np.nan)
logger.info(f"Successfully read TCAD data")
return df_tcad
def download_read():
"""Download and read TCAD data."""
scrape_url()
df_tcad = read_tcad()
return df_tcad
if __name__ == '__main__':
# download_read()
print(read_tcad())
|
{"/update_TCAD_data.py": ["/logger.py", "/meta_data.py"], "/database.py": ["/logger.py", "/meta_data.py", "/fwf_read.py"], "/build_database.py": ["/meta_data.py", "/database.py", "/update_TCAD_data.py", "/fwf_read.py", "/logger.py"], "/fwf_read.py": ["/logger.py", "/database.py", "/meta_data.py"]}
|
10,424
|
funkonaut/Sos_data_fun
|
refs/heads/master
|
/login_sos.py
|
"""
File has functions to login into SOS
web page and read the cookie for curling files
"""
import sys
import os
from datetime import datetime
from logger import logger
import time
import shlex
import subprocess
import re
from bs4 import BeautifulSoup
from dotenv import load_dotenv
import numpy as np
import pandas as pd
import selenium
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
import urllib3
from webdriver_manager.chrome import ChromeDriverManager
import meta_data as md
load_dotenv()
def init_wd():
"""Init selenium webdriver in headless mode."""
options = webdriver.ChromeOptions()
options.add_argument('headless')
return webdriver.Chrome(ChromeDriverManager().install(),options=options)
driver = init_wd()
def nav_url(browser,url=""):
"""Navigate to url."""
try:
time.sleep(1)
browser.get(url)
except Exception as e:
logger.info(f"Could not navigate to url {e}")
logger.error("Could not navigate to url")
return 0
logger.info("Successfully navigated to "+url)
return 1
def build_cmd(dl)
date = "1/29/2021"
order = "1026032650002"
cmd = f"""curl 'https://direct.sos.state.tx.us/{dl}' \
-H 'authority: direct.sos.state.tx.us' \
-H 'cache-control: max-age=0' \
-H 'sec-ch-ua: "Chromium";v="88", "Google Chrome";v="88", ";Not A Brand";v="99"' \
-H 'sec-ch-ua-mobile: ?0' \
-H 'upgrade-insecure-requests: 1' \
-H 'user-agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.96 Safari/537.36' \
-H 'accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9' \
-H 'sec-fetch-site: none' \
-H 'sec-fetch-mode: navigate' \
-H 'sec-fetch-user: ?1' \
-H 'sec-fetch-dest: document' \
-H 'accept-language: en-US,en;q=0.9' \
-H 'cookie: ASPSESSIONIDAGASDSCB={cookie}; c%5Fclient%5Fid=80793825; c%5Fordering%5Fparty%5Femail=ap%40trla%2Eorg; c%5Fordering%5Fparty%5Ffax=956+968+8823; c%5Fordering%5Fparty%5Fphone=956+447+4800; c%5Fordering%5Fparty%5Fname=TEXAS+RIOGRANDE+LEGAL+AID%2C+INC%2E' \
--compressed"""
return cmd
def download_data(cookie):
"""Curl the weekly filing data."""
dl = f"corp_bulkorder/corp_bulkorder.asp?submit=download&dn={order}&td={date}"
cmd = build_cmd(dl)
#figure out the actual donwload link
out = subprocess.check_output(shlex.split(cmd))
soup = BeautifulSoup(out, 'html.parser')
for link in soup.find_all('a', href=True):
dl = link['href']
cmd = build_cmd(dl)
out = subprocess.check_output(shlex.split(cmd))
def login_download():
"""Navigate login, scrape download url, dowload filing data."""
logger.info(f"Logging in.")
username = os.getenv("SOS_USERNAME")
password = os.getenv("SOS_PASSWORD")
try:
driver.implicitly_wait(10)
url = "https://direct.sos.state.tx.us/acct/acct-login.asp"
nav_url(driver,url)
#login
driver.find_element_by_name("client_id").send_keys(username)
driver.find_element_by_name("web_password").send_keys(password)
driver.find_element_by_name("submit").click()
logger.info("Successfully logged in.")
#update billing info
driver.implicitly_wait(3)
driver.find_element_by_xpath("//select[@name='payment_type_id']/option[text()='Client Account']").click()
driver.implicitly_wait(3)
driver.find_element_by_name("Submit").click()
driver.implicitly_wait(3)
#get cookies for curl
cookies = driver.get_cookies()
print(cookies)
cookie = cookies[-1]['value']#this is a bad assumpion
download_data(cookie)
# logger.info(f"Scraped TCAD data")
except Exception as e:
logger.info(f"Failed to login: {e}")
logger.error(f"Failed to login: {e}")
# sys.exit()
if __name__ == '__main__':
# download_read()
# print(read_tcad())
login_download()
|
{"/update_TCAD_data.py": ["/logger.py", "/meta_data.py"], "/database.py": ["/logger.py", "/meta_data.py", "/fwf_read.py"], "/build_database.py": ["/meta_data.py", "/database.py", "/update_TCAD_data.py", "/fwf_read.py", "/logger.py"], "/fwf_read.py": ["/logger.py", "/database.py", "/meta_data.py"]}
|
10,425
|
funkonaut/Sos_data_fun
|
refs/heads/master
|
/database.py
|
"""
PostgreSQL database related functions.
"""
import os
import sys
from io import StringIO
from logger import logger
from dotenv import load_dotenv
import pandas as pd
import psycopg2
from psycopg2 import sql
import psycopg2.extras as extras
import meta_data as md
import fwf_read as fwf
load_dotenv()
local_dev = os.getenv("LOCAL_DEV") == "true"
def get_database_connection(local_dev=True):
"""Connection to PSQL DB."""
if local_dev:
conn = psycopg2.connect(os.getenv("LOCAL_DATABASE_URL"))
else:
conn = psycopg2.connect(os.getenv("DATABASE_URL"))
return conn
conn = get_database_connection(local_dev=local_dev)
cursor = conn.cursor()
def execute_values(conn, df, table):
"""Using psycopg2.extras.execute_values() to insert the dataframe."""
#Convert nans to None for SQL and clean up
df = df.where(pd.notnull(df), None)
# Create a list of tupples from the dataframe values
tuples = [tuple(x) for x in df.to_numpy()]
# Comma-separated dataframe columns
cols = ','.join(list(df.columns))
# SQL quert to execute
query = "INSERT INTO %s(%s) VALUES %%s" % (table, cols)
cursor = conn.cursor()
try:
extras.execute_values(cursor, query, tuples)
conn.commit()
except (Exception, psycopg2.DatabaseError) as error:
logger.error(f"Error {table}: {error}")
print(f"Error {table}: {error}")
conn.rollback()
cursor.close()
return 1
logger.info(f"execute_values for {table} done")
cursor.close()
def filter_df(df,layout_code):
"""Filter only layout_code entries in dataframe."""
#totals_log is the 12th entry in meta_data.py array NAMES
if layout_code == 99:
cols = md.NAMES[13-1]
else:
cols = md.NAMES[layout_code-1]
if "filler" in cols:
cols.remove("filler")
return df.loc[df["layout_code"].eq(layout_code)][cols]
def dump_df(df):
"""Insert all entries into their layout_code tables."""
#make sure type is consistant
df['layout_code'] = df['layout_code'].astype(int)
for layout_code in df["layout_code"].unique():
df_f = filter_df(df,layout_code) #filtered dataframe
if layout_code == 99:
table = md.TABLE_NAMES[13-1]
else:
table = md.TABLE_NAMES[layout_code-1]
execute_values(conn, df_f, table)
def delete_log(df_del):
"""Delete records for df_del["filing number"] from all tables."""
skip = ["reserved", "totals_log", "delete_all_log"]
tables = [table for table in md.TABLE_NAMES if table not in skip]
for table in tables:
for i,row in df_del.iterrows():
filing_del = row["filing_num"]
cursor.execute(sql.SQL("DELETE FROM {} WHERE filing_num=%s;").format(sql.Identifier(table)),[str(int(filing_del))])
conn.commit()
logger.info(f"Removed delete_all_log entries for {table}")
return
#Takes in weekly dump from SOS and updates the database maybe put in fwf_read
#is address ever updated without a master filing?
#test this? read meta data more!
def update_database(fn):
"""Read in one weekly update file {fn} and add it to the database"""
fn = "../data/weekly_updates/"+fn
data = fwf.read_data(fn)
df = fwf.split_read_combine(data)
df_2 = filter_df(df,2)
#search and replace filing number
delete_log(df_2)
dump_df(df)
return
def dump_to_df(conn,table):
"""Read all entries from table into a dataframe."""
df = pd.read_sql_query('SELECT * FROM "%s"'%(table),con=conn)
return df
if __name__=="__main__":
#delete logs
# df_del = dump_to_df(conn, "delete_all_log")
# delete_log(df_del)
update_database("CW030121.txt")
|
{"/update_TCAD_data.py": ["/logger.py", "/meta_data.py"], "/database.py": ["/logger.py", "/meta_data.py", "/fwf_read.py"], "/build_database.py": ["/meta_data.py", "/database.py", "/update_TCAD_data.py", "/fwf_read.py", "/logger.py"], "/fwf_read.py": ["/logger.py", "/database.py", "/meta_data.py"]}
|
10,426
|
funkonaut/Sos_data_fun
|
refs/heads/master
|
/build_database.py
|
"""
Code to build the SOS and TCAD database.
Builds schemas and popuates databases.
Runs quick search sql code and address normalization.
"""
import os
from dotenv import load_dotenv
import meta_data as md
import database as db
import update_TCAD_data as tcad
import fwf_read as fwf
from logger import logger
load_dotenv()
local_dev = os.getenv("LOCAL_DEV") == "true"
def create_data_table_schema(i,name):
"""Create .sql schema file for SOS data."""
d = "sql"
fn = f"create_{name}.sql"
#totals log is different everything else starts the same
if name == "totals_log":
schema = f"CREATE TABLE {name.upper()} (\n LAYOUT_CODE TEXT,"
else:
schema = f"CREATE TABLE {name.upper()} (\n FILING_NUM TEXT,\n LAYOUT_CODE TEXT,"
#go through tables and build schema based on meta_data.py
for j,entry in enumerate(md.NAMES[i]):
#not including filler and already have the other two
if (entry == "layout_code") | (entry == "filing_num") | (entry == "filler"): continue
#specify type
if md.DTYPES[i][j] == "N": dtype = "NUMERIC"
elif md.DTYPES[i][j] == "D": dtype = "DATE"
else: dtype = "TEXT"
schema += f"\n {entry.upper()} {dtype},"
#get rid of trailing , and add in );
schema = schema[:-1]
schema += "\n);"
#create an index on filing_num for more efficient sql
if name != "totals_log":
schema += f"\nCREATE INDEX ON {name.upper()}(FILING_NUM);"
#write it out to a .sql file
with open(f"./{d}/{fn}", "w") as fh:
fh.write(schema)
#actually do the thing
run_schema(d,fn)
def create_md_table_schema(col,table):
"""Create meta data schema for SOS."""
d = "sql"
fn = f"create_{table}.sql"
schema = f"CREATE TABLE {table.upper()} ("
for entry in col:
schema += f"\n {entry[0]} NUMERIC,"
schema += f"\n {entry[1]} TEXT,"
#get rid of trailing , and add in );
schema = schema[:-1]
schema += "\n);"
#write it out to a .sql file
with open(f"./{d}/{fn}", "w") as fh:
fh.write(schema)
#actually do the thing
run_schema(d,fn)
#THIS MIGHT NEED CHANGING WE STILL NEED TO LINK UP METADATA
def populate_meta_data_table(col, table):
"""Populate meta data for SOS."""
conn = db.get_database_connection(local_dev=local_dev)
for entry in col:
df = md.df_meta[list(entry)].dropna()
db.execute_values(conn,df,table)
def run_schema(d,fn):
"""Run sql schema file to make data table."""
os.system(f"psql -d Sos_data_fun -f ./{d}/{fn}")
print(f"Created sql table ran {fn}")
return
def create_tcad_schema():
"""Create TCAD property data schema."""
d = "sql"
table = "tcad"
fn = f"create_{table}.sql"
schema = f"CREATE TABLE {table} ("
for name in md.tcad_prop_names:
schema += f"\n {name.upper()} TEXT,"
#get rid of trailing , and add in );
schema = schema[:-1]
schema += "\n);"
#write it out to a .sql file
with open(f"./{d}/{fn}", "w") as fh:
fh.write(schema)
#actually do the thing
run_schema(d,fn)
def create_sos_schema():
"""Run over all tables in meta_data."""
#1 indexed cycle thru [1,14) to index into meta data arrays
for i in range(1,14):
#Create main database schema
table = md.TABLE_NAMES[i-1]
#Dont do reserved
if table != "reserved":
create_data_table_schema(i-1,table)
#Create meta data schema and populate
col = md.COLS[i-1]
table = md.MD_TABLE_NAMES[i-1]
if col is not None:
create_md_table_schema(col, table)
populate_meta_data_table(col, table)
def main():
#Build schema for SOS data
logger.info("Creating SOS file schema")
create_sos_schema()
#Populate SOS data
logger.info("Running SOS file reads")
fwf.main()
#Create TCAD data schema and populate
logger.info("Running TCAD file reads")
#df = tcad.download_read()
df = tcad.read_tcad()
create_tcad_schema()
conn = db.get_database_connection(local_dev=local_dev)
db.execute_values(conn,df,"tcad")
#Run normalization code for addresses
logger.info("Running address normalization schema")
run_schema("sql","create_normalized_addresses.sql")
#Run normalization code for addresses
logger.info("Running index creation for names (biz/person")
run_schema("sql","create_name_search_index.sql")
#To redo:
#rm error.log
#dropdb Sos_data_fun
#createdb Sos_data_fun
#rm .sql in ./sql EXCEPT create_normalized_addresses.sql and create_name_search_index.sql
#createdb Sos_data_fun
#run code
if __name__ == "__main__":
main()
|
{"/update_TCAD_data.py": ["/logger.py", "/meta_data.py"], "/database.py": ["/logger.py", "/meta_data.py", "/fwf_read.py"], "/build_database.py": ["/meta_data.py", "/database.py", "/update_TCAD_data.py", "/fwf_read.py", "/logger.py"], "/fwf_read.py": ["/logger.py", "/database.py", "/meta_data.py"]}
|
10,427
|
funkonaut/Sos_data_fun
|
refs/heads/master
|
/fwf_read.py
|
"""
The module to read in SOS fwf data into an SQL database.
"""
import os
import sys
from datetime import datetime, date
from itertools import accumulate
from logger import logger
import re
import pandas as pd
import numpy as np
import database as db
import meta_data as md
import clean_up as cu
def read_data(fn: str) -> str:
"""Read in a txt file and strip newlines."""
with open(fn,"r",encoding='Latin-1') as fh:
data = fh.read()
return data
def convert_date(data):
"""Convert a data entry (YYYYDDMM) to a date."""
return datetime.strptime(data, '%Y%m%d').date()
def split_read_combine(data):
"""Split/read entries into a dict/combine them into a dataframe."""
l = data.split('\n') #entries delimited by \n
dfs = [] #array of dictionaries
e = 0
fw_e = 0
for record in l:
try:
d,fw_e = read_multi_fwf(record,fw_e)
dfs.append(d)
except Exception as error:
logger.error(f"{error}\n'{record}'")
e += 1
logger.info(f"There were {e} record read errors check log for specifics")
logger.info(f"There were {fw_e} fixed width entry (type) errors check log for specifics")
return pd.DataFrame(dfs)
#Read sub fwfs according to specified fw from layout_code
def read_multi_fwf(record,fw_e):
"""Split a fwf file entry's fields according to metadata described in corp-bulkorder-layout.doc into a dictionary."""
#Read in that data
#compute index from layout_code to use correct metadata
layout_code = int(record[0:2])
if layout_code == 99:
layout_code = 13
#Split according to widths spec just makes it easier instead of typing in all start and end pos
width = md.WIDTHS[layout_code-1]
bounds = list(accumulate(width, lambda a,b: a+b))
col_widths = list(zip(bounds[0::1],bounds[1::1]))
data_type = md.DTYPES[layout_code-1]
#Read all the entries according to meta_data and collect them as a list of dicts
entry = []
for w,dt in zip(col_widths,data_type):
data = record[w[0]:w[1]]
if dt == "C": #Character type
data = data.rstrip() #left justified space padded
data = data.upper() #TEXT data should be uppercased do we want to strip punctuation too?
elif dt == "D": #Date type
try:
data = data.lstrip('0') #right justified 0 padded
data = convert_date(data)
except Exception as error:
data = data.strip()
if data != "":
fw_e += 1
logger.error(f"{error}: Could not convert {data} to date")
data = None
else:# N (numeric type)
try:
data = data.lstrip('0')
data = int(data)
except Exception as error:
data = data.strip()
if data != "":
fw_e += 1
logger.error(f"{error}: Could not convert {data} to int")
data = None
entry.append(data)
d = dict(zip(md.NAMES[layout_code-1],entry))
return d,fw_e
def main():
"""Read in all files in data directory and dump them to a data table depeding on their layout_code."""
directory = "../data/"
logger.info(f"Reading and populating SOS data")
for fn in os.listdir(directory):
if fn.endswith(".txt"):#Only read in txt files
logger.info(f"Reading in file: {fn}")
data = read_data(directory + fn)
df = split_read_combine(data)
logger.info(f"Read in file: {fn}")
db.dump_df(df)
#also link meta_data and types?
#KEEPING DELETE_LOG records
# cu.delete_records()
logger.info(f"Read and populated SOS data")
if __name__ == "__main__":
main()
|
{"/update_TCAD_data.py": ["/logger.py", "/meta_data.py"], "/database.py": ["/logger.py", "/meta_data.py", "/fwf_read.py"], "/build_database.py": ["/meta_data.py", "/database.py", "/update_TCAD_data.py", "/fwf_read.py", "/logger.py"], "/fwf_read.py": ["/logger.py", "/database.py", "/meta_data.py"]}
|
10,428
|
funkonaut/Sos_data_fun
|
refs/heads/master
|
/logger.py
|
"""
Logger logs info to std.out and errors to errors.log
"""
#perhaps should implement different file handlers for each module
#so different log files for errors?
import sys
import logging
class log_filter(object):
def __init__(self, level):
self.__level = level
def filter(self, logRecord):
return logRecord.levelno <= self.__level
logger = logging.getLogger()
logger.setLevel(logging.INFO)
formatter = logging.Formatter('[%(asctime)s] %(levelname)s [%(filename)s.%(funcName)s:%(lineno)d] %(message)s',
datefmt='%a, %d %b %Y %H:%M:%S')
sh = logging.StreamHandler(sys.stdout)
sh.setFormatter(formatter)
sh.setLevel(logging.INFO)
sh.addFilter(log_filter(logging.INFO))
fh = logging.FileHandler(filename='errors.log')
fh.setFormatter(formatter)
fh.setLevel(logging.DEBUG)
logger.addHandler(fh)
logger.addHandler(sh)
|
{"/update_TCAD_data.py": ["/logger.py", "/meta_data.py"], "/database.py": ["/logger.py", "/meta_data.py", "/fwf_read.py"], "/build_database.py": ["/meta_data.py", "/database.py", "/update_TCAD_data.py", "/fwf_read.py", "/logger.py"], "/fwf_read.py": ["/logger.py", "/database.py", "/meta_data.py"]}
|
10,429
|
funkonaut/Sos_data_fun
|
refs/heads/master
|
/meta_data.py
|
"""
The module to define meta data constants as specified by corp-bulkorder-layout.doc
"""
import pandas as pd
############META DATA CONSTANTS################
df_meta = pd.read_csv("sos_meta_data.csv",dtype=object)
#Layout columns to link
cols_2 = [("status_id","status_description"),("corp_type_id","corp_type"),("nonprofit_subtype_id","description_n")]
cols_9 = [("name_status_id","status"),("name_type_id","name_description")]
cols_10 = [("capacity_id","corp_type_id_description")]
COLS = [None, cols_2, None, None, None, None, None, None, cols_9, cols_10, None, None, None]
MD_TABLE_NAMES = [None, "md_master", None, None, None, None, None, None, "md_charter_names", "md_associated_entity", None, None, None]
#Each record has a length of 560 characters and all the fields contained within are fixed-width strings. Data Type ‘N’ (Numeric) is right justified zero filled on the left. Data Type ‘C’ (Character) is left justified and space filled on the right, even if the value happens to be a number. Data Type 'D' (Date) is a subset of data type 'N' and is not specified in the layout document but infered from the table columns names (ends with _date)
#Record layout code 01 Delete All Command Record
delete_rec_w = [0,2,10,6,542]
delete_rec_dt = ["N","N","C","C"]
delete_rec_names = ["layout_code","filing_num","value_DELETE","filler"]
#Record layout code 02 Master Record
master_rec_w = [0,2,10,2,2,150,2,8,8,8,8,8,11,150,16,4,64,8,16,3,2,8,70]
#master_rec_dt = ["N","N","N","N","C","N","N","N","N","N","N","N","C","C","C","C","N","C","N","N","C","C"]
master_rec_dt = ["N","N","N","N","C","N","N","D","D","D","D","N","C","C","C","C","D","C","N","N","C","C"] #BOC DATE? WHAT TODO IS IT THE SAME FORMAT?????
master_rec_names = ["layout_code","filing_num","status_id","corp_type_id","name","perpetual_flag","creation_date","expiration_date","inactive_date","formation_date","report_due_date","tax_id","dba_name","foreign_fein","foreign_state","foreign_country","foreign_formation_date","expiration_Type","nonprofit_subtype_id","boc_flag","boc_date","filler"]
#Record layout code 03 Master Address Record
address_rec_w = [0,2,10,50,50,64,4,9,6,64,301]
address_rec_dt = ["N","N","C","C","C","C","C","C","C","C"]
address_rec_names = ["layout_code","filing_num","address1","address2","city","state","zip_code","zip_extension","country","filler"]
#Record layout code 04 Reserved
reserved_rec_w = []
reserved_rec_dt = []
reserved_rec_names = []
#Record layout code 05 Registered Agent Record - Business Name
ra_business_rec_w = [0,2,10,50,50,64,4,9,6,64,8,150,143]
ra_business_rec_dt = ["N","N","C","C","C","C","C","C","C","D","C","C"]
ra_business_rec_names = ["layout_code","filing_num","address1","address2","city","state","zip_code","zip_extension","country","inactive_date","business_name","filler"]
#Record layout code 06 Registered Agent Record - Personal Name
ra_personal_rec_w = [0,2,10,50,50,64,4,9,6,64,8,50,50,50,6,137]
ra_personal_rec_dt = ["N","N","C","C","C","C","C","C","C","D","C","C","C","C","C"]
ra_personal_rec_names = ["layout_code","filing_num","address1","address2","city","state","zip_code","zip_extension","country","inactive_date","agent_last_name","agent_first_name","agent_middle_name","agent_suffix","filler"]
#Record layout code 07 Charter Officer - Business Name
co_business_rec_w = [0,2,10,50,50,64,4,9,6,64,6,32,150,113]
co_business_rec_dt = ["N","N","C","C","C","C","C","C","C","N","C","C","C"]
co_business_rec_names = ["layout_code","filing_num","address1","address2","city","state","zip_code","zip_extension","country","officer_id","officer_title","business_name","filler"]
#Record layout code 08 Charter Officer - Personal Name
co_personal_rec_w = [0,2,10,50,50,64,4,9,6,64,6,32,50,50,50,6,107]
co_personal_rec_dt = ["N","N","C","C","C","C","C","C","C","N","C","C","C","C","C","C"]
co_personal_rec_names = ["layout_code","filing_num","address1","address2","city","state","zip_code","zip_extension","country","officer_id","officer_title","last_name","first_name","middle_name","suffix","filler"]
#Record layout code 09 Charter Names Record
charter_names_rec_w = [0,2,10,6,150,3,3,8,8,8,8,11,254,5,84]
charter_names_rec_dt = ["N","N","N","C","N","N","D","D","D","C","N","C","C","C"]
charter_names_rec_names = ["layout_code","filing_num","name_id","name","name_status_id","name_type_id","creation_date","inactive_date","expire_date","county_type","consent_filing_number","selected_county_array","reserved","filler"]
#Record layout code 10 Associated Entity Record
associated_entity_rec_w = [0,2,10,6,150,12,8,64,4,8,4,292]
associated_entity_rec_dt = ["N","N","N","C","N","D","C","C","D","N","C"]
associated_entity_rec_names = ["layout_code","filing_num","associated_entity_id","associated_entity_name","entity_filing_number","entity_filing_date","jurisdiction_country","jurisdiction_state","inactive_date","capacity_id","filler"]
#Record layout code 11 Filing History Record 12>10 392>394
filing_hist_rec_w = [0,2,10,14,12,96,8,8,8,2,8,392]
filing_hist_rec_dt = ["N","N","N","N","C","D","D","D","N","D","C"]
filing_hist_rec_names = ["layout_code","filing_num","document_number","filing_type_id","filing_type","entry_date","filing_date","effective_date","effective_cond_flag","inactive_date","filler"]
#Record layout code 12 Corp Audit Log Record
audit_rec_w = [0,2,10,8,4,4,10,300,222]
audit_rec_dt = ["N","N","D","N","N","C","C","C"]
audit_rec_names = ["layout_code","filing_num","audit_date","table_id","field_id","action","current_value","audit_comment"]
#Record layout code 99 Totals Record
code99_rec_w = [0,2,10,8,12,12,12,12,12,12,12,12,12,12,12,12,12,384]
code99_rec_dt = ["N","N","D","N","N","N","N","N","N","N","N","N","N","N","N","N","N"]
code99_rec_names = ["layout_code","all_9s","date_of_run","count_01","count_02","count_03","count_04","count_05","count_06","count_07","count_08","count_09","count_10","count_11","count_12","count_13","filler"]
WIDTHS = [delete_rec_w,master_rec_w,address_rec_w,reserved_rec_w,ra_business_rec_w,ra_personal_rec_w,co_business_rec_w ,co_personal_rec_w ,charter_names_rec_w,associated_entity_rec_w,filing_hist_rec_w ,audit_rec_w,code99_rec_w]
DTYPES = [delete_rec_dt,master_rec_dt,address_rec_dt,reserved_rec_dt,ra_business_rec_dt,ra_personal_rec_dt,co_business_rec_dt ,co_personal_rec_dt ,charter_names_rec_dt,associated_entity_rec_dt,filing_hist_rec_dt ,audit_rec_dt,code99_rec_dt]
NAMES = [delete_rec_names,master_rec_names,address_rec_names,reserved_rec_names,ra_business_rec_names,ra_personal_rec_names,co_business_rec_names ,co_personal_rec_names ,charter_names_rec_names,associated_entity_rec_names,filing_hist_rec_names ,audit_rec_names,code99_rec_names]
TABLE_NAMES = ["delete_all_log","master","address","reserved","registered_agent_business","registered_agent_personal","charter_officer_business","charter_officer_personal","charter_names","associated_entity","filing_hist","audit_log","totals_log"]
##################TCAD DATA#######################
tcad_prop_w = [(12,17),(2608,2609),(2033,2058),(2731,2741),(2741,2751),(2751,2761),(1745,1795),(1695,1745),(1675,1685),(1659,1675),(1149,1404),(1915,1930),(1686,1695),(546,596),(0,12),(596, 608),(608,678),(4459,4474),(1039,1049),(1049,1099),(1099,1109),(1109,1139),(1139,1149),(4475,4479),(693,753),(753,813),(813,873),(873,923),(923,974),(978,983),(4135,4175)]
tcad_prop_names = ['ptype','hs','deed','code','code2','code3','lot','block','sub_div','acre','description','value','hood','geo_id','prop_id', 'py_owner_i','prop_owner','st_number','prefix','st_name','suffix','city','zip','unit_num','mail_add_1','mail_add_2','mail_add_3','mail_city','mail_state','mail_zip','DBA']
|
{"/update_TCAD_data.py": ["/logger.py", "/meta_data.py"], "/database.py": ["/logger.py", "/meta_data.py", "/fwf_read.py"], "/build_database.py": ["/meta_data.py", "/database.py", "/update_TCAD_data.py", "/fwf_read.py", "/logger.py"], "/fwf_read.py": ["/logger.py", "/database.py", "/meta_data.py"]}
|
10,442
|
alenmora/styleGAN
|
refs/heads/master
|
/models/commonBlocks.py
|
import torch
import torch.nn as nn
import numpy as np
from torch.nn import functional as F
def getActivation(name):
if name == 'lrelu':
return nn.LeakyReLU(negative_slope=0.2)
if name == 'relu':
return nn.ReLU()
if name == 'tanh':
return nn.Tanh()
if name == 'sigmoid':
return nn.Sigmoid()
else:
print('Activation function ERROR: The specified activation function is not a valid one')
class PixelNorm(nn.Module):
"""
Performs pixel normalization by dividing each
pixel by the norm of the tensor
"""
def __init__(self):
super().__init__()
def forward(self, input):
return input * torch.rsqrt(torch.mean(input.pow(2), dim=1, keepdim=True)+1e-8)
class Linear(nn.Module):
"""
Dense linear layer, with the option of weight scaling. If true, before the output, it
equalizes the learning rate for the weights by scaling them using the normalization constant
from He's initializer
"""
def __init__(self, inCh, outCh, gain=np.sqrt(2), bias=True, biasInit = 0, scaleWeights=True, lrmul = 1):
super().__init__()
# calc wt scale
initStd = 1./lrmul
self.wtScale = lrmul*gain/np.sqrt(inCh+outCh)
self.lrmul = lrmul
if not scaleWeights:
initStd = gain/(lrmul*np.sqrt(inCh+outCh))
self.wtScale = lrmul
if bias:
self.bias = torch.nn.Parameter(torch.zeros(outCh).fill_(biasInit))
else:
self.bias = None
# init
self.weight = torch.nn.Parameter(torch.zeros(outCh, inCh))
nn.init.normal_(self.weight, mean=0.0, std=initStd)
self.name = f'Linear module: {inCh} --> {outCh}'
def forward(self, x):
bias = None
if self.bias is not None: bias = self.bias*self.lrmul
return F.linear(x, self.weight*self.wtScale, bias)
def __repr__(self):
return self.name
class Conv2D(nn.Module):
"""
2D convolutional layer, with 'same' padding (output and input have the same size), and with the option of weight scaling.
If true, before the output, it equalizes the learning rate for the weights by scaling them using the normalization constant
from He's initializer
"""
def __init__(self, inCh, outCh, kernelSize, padding='same', gain=np.sqrt(2), scaleWeights=True, bias=True, lrmul = 1):
super().__init__()
if padding == 'same': #Make sure the output tensors for each channel are the same size as the input ones
padding = kernelSize // 2
#padding = ((size - 1) * (stride - 1) + dilation * (kernel - 1)) // 2
self.padding = padding
self.lrmul = lrmul
# new bias to use after wscale
if bias:
self.bias = torch.nn.Parameter(torch.zeros(outCh))
else:
self.bias = None
# calc wt scale
fanIn = inCh*kernelSize*kernelSize # Leave out number of outCh
initStd = 1./lrmul
self.wtScale = lrmul*gain/np.sqrt(fanIn)
if not scaleWeights:
initStd = gain/(lrmul*np.sqrt(fanIn))
self.wtScale = lrmul
self.weight = nn.Parameter(torch.zeros(outCh,inCh,kernelSize,kernelSize))
# init
nn.init.normal_(self.weight, mean=0.0, std=initStd)
self.name = 'Convolution2D Module '+ str(self.weight.shape)
def forward(self, x):
output = F.conv2d(x,
self.wtScale*self.weight,
padding = self.padding,
bias = self.bias*self.lrmul if self.bias is not None else None)
return output
def __repr__(self):
return self.name
class ModulatedConv2D(nn.Module):
"""
Modulated 2D convolutional layer. This is a 2D convolutional layer whose weights are modulated by an output of a linear
network which maps the hidden latent vector to a style, and then demodulated (by scaling them) to a standad deviation of one.
It also has the option of weight scaling , which, if true, before the output, equalizes the learning rate for the original weights
of the convolutional network and for the linear network used for modulation
"""
def __init__(self, styleCh, inCh, outCh, kernelSize, padding='same', gain=np.sqrt(2), bias=False, lrmul = 1, scaleWeights=True, demodulate = True):
super().__init__()
assert kernelSize >= 1 and kernelSize % 2 == 1, 'Conv2D Error: The kernel size must be an odd integer bigger than one'
if padding == 'same': #Make sure the output tensors for each channel are the same size as the input ones
padding = kernelSize // 2
self.kernelSize = kernelSize
self.padding = padding
self.lrmul = lrmul
self.outCh = outCh
self.demodulate = demodulate
# Get weights
self.weights = nn.Parameter(torch.zeros(1,outCh,inCh,self.kernelSize,self.kernelSize), requires_grad=True)
if bias:
self.bias = nn.Parameter(torch.zeros(outCh), requires_grad=True)
else:
self.bias = None
# calc wt scale
fanIn = inCh*kernelSize*kernelSize # Leave out number of outCh
initStd = 1./lrmul
self.wtScale = lrmul*gain/np.sqrt(fanIn)
if not scaleWeights:
initStd = gain/(lrmul*np.sqrt(fanIn))
self.wtScale = lrmul
# init
nn.init.normal_(self.weights, mean=0.0, std=initStd)
#We need 1 scaling parameter per each input channel
self.linear = Linear(styleCh, inCh, scaleWeights=scaleWeights, biasInit=1)
self.name = f'ModulatedConv2D: convolution {inCh} --> {outCh}; style length: {styleCh}'
def forward(self, x, y):
batchSize, inCh, h, w = x.shape
s = self.linear(y).view(batchSize, 1, inCh, 1, 1) #N x 1 x inCh x 1 x 1
modul = self.wtScale*self.weights.mul(s) #N x outCh x inCh x k x k - Modulate by multiplication over the inCh dimension
if self.demodulate:
norm = torch.rsqrt(modul.pow(2).sum([2,3,4], keepdim=True)+1e-8) #N x outCh x 1 x1 x 1 - Norm for demodulation, which is calculated for each batch over the input weights of the same channel
modul = modul * norm #N x outCh x inCh x k x k - Demodulate by dividing over the norm
x = x.view(1, batchSize*inCh, h, w)
modul = modul.view(batchSize*self.outCh, inCh, self.kernelSize, self.kernelSize)
bias = None
if self.bias is not None: bias = self.bias*self.lrmul
output = F.conv2d(x, modul, padding=self.padding, bias = bias, groups=batchSize) #N x outCh x H x W
return output.view(batchSize, self.outCh, *output.shape[2:])
def __repr__(self):
return self.name
|
{"/generator.py": ["/models/generatorNetwork.py", "/config.py"], "/models/generatorNetwork.py": ["/models/generatorBlocks.py", "/models/commonBlocks.py"], "/models/criticNetwork.py": ["/models/commonBlocks.py"], "/trainer.py": ["/models/generatorNetwork.py", "/models/criticNetwork.py", "/misc/dataLoader.py", "/misc/logger.py", "/config.py"], "/decoderTrainer.py": ["/misc/logger.py", "/models/generatorNetwork.py", "/models/decoderNetwork.py", "/config.py"], "/models/decoderNetwork.py": ["/models/commonBlocks.py"], "/models/generatorBlocks.py": ["/models/commonBlocks.py"]}
|
10,443
|
alenmora/styleGAN
|
refs/heads/master
|
/misc/dataLoader.py
|
import torch as torch
import numpy as np
import torchvision.transforms as transforms
from torch.utils.data import DataLoader as torchDataLoader
from torchvision.datasets import ImageFolder
import os
from glob import glob
import logging
from PIL import Image
import math
class DataLoader:
def __init__(self, dataPath = './data/', resolution = None, nCh = None, batchSize = 24, numWorkers = 0):
self.dataPath = dataPath
self.ims = glob(os.path.join(self.dataPath,'/*/*.jpg'))
self.ims += glob(os.path.join(self.dataPath,'/*/*.png'))
self.ims += glob(os.path.join(self.dataPath,'/*/*.jpeg'))
assert len(self.ims) > 0, logging.error("dataLoader ERROR: No images found in the given folder")
if resolution and nCh:
assert resolution >= 4, logging.error("dataLoader ERROR: The output resolution must be bigger than or equal to 4x4")
self.resolution = int(resolution)
assert nCh >= 1, logging.error("dataLoader ERROR: The number of channels must be a positive integer")
self.nCh = nCh
else: #deduce resolution from first image in data folder
firstImg = Image.open(self.ims[0])
self.resolution = min(firstImg.size)
self.nCh = len(firstImg.getbands())
if self.resolution != 2**(int(np.log2(resolution))):
trueres = 4
while self.resolution//(trueres*2) != 0:
trueres = trueres*2
self.resolution = trueres
self.numWorkers = numWorkers
self.batchSize = batchSize
self.loadData()
def loadData(self):
logging.info(f'Loading data from {self.dataPath} with resolution {self.resolution}x{self.resolution}')
self.dataset = ImageFolder(
root=self.dataPath,
transform=transforms.Compose([
transforms.Resize(size=(self.resolution,self.resolution), interpolation=Image.LANCZOS),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
]))
self.dataloader = torchDataLoader(
dataset=self.dataset,
batch_size=self.batchSize,
shuffle=True,
num_workers=self.numWorkers,
drop_last=True,
pin_memory = torch.cuda.is_available()
)
def __iter__(self):
return iter(self.dataloader)
def __next__(self):
return next(self.dataloader)
def __len__(self):
return len(self.dataloader.dataset)
def get_batch(self):
dataIter = iter(self.dataloader)
return next(dataIter)[0].mul(2).add(-1) # pixel range [-1, 1]
def get(self, n = None):
if n is None: n = self.batchSize
x = self.get_batch()
for i in range(n // self.batchSize):
torch.nn.cat([x, self.get_batch()], 0)
return x[:n]
|
{"/generator.py": ["/models/generatorNetwork.py", "/config.py"], "/models/generatorNetwork.py": ["/models/generatorBlocks.py", "/models/commonBlocks.py"], "/models/criticNetwork.py": ["/models/commonBlocks.py"], "/trainer.py": ["/models/generatorNetwork.py", "/models/criticNetwork.py", "/misc/dataLoader.py", "/misc/logger.py", "/config.py"], "/decoderTrainer.py": ["/misc/logger.py", "/models/generatorNetwork.py", "/models/decoderNetwork.py", "/config.py"], "/models/decoderNetwork.py": ["/models/commonBlocks.py"], "/models/generatorBlocks.py": ["/models/commonBlocks.py"]}
|
10,444
|
alenmora/styleGAN
|
refs/heads/master
|
/models/__init__.py
|
def toggle_grad(model, requires_grad):
"""
Function to change the trainability
of a model
"""
for p in model.parameters():
p.requires_grad_(requires_grad)
|
{"/generator.py": ["/models/generatorNetwork.py", "/config.py"], "/models/generatorNetwork.py": ["/models/generatorBlocks.py", "/models/commonBlocks.py"], "/models/criticNetwork.py": ["/models/commonBlocks.py"], "/trainer.py": ["/models/generatorNetwork.py", "/models/criticNetwork.py", "/misc/dataLoader.py", "/misc/logger.py", "/config.py"], "/decoderTrainer.py": ["/misc/logger.py", "/models/generatorNetwork.py", "/models/decoderNetwork.py", "/config.py"], "/models/decoderNetwork.py": ["/models/commonBlocks.py"], "/models/generatorBlocks.py": ["/models/commonBlocks.py"]}
|
10,445
|
alenmora/styleGAN
|
refs/heads/master
|
/generator.py
|
import torch
from models.generatorNetwork import Generator
from torch import FloatTensor as FT
from misc import utils
import os
import argparse
import math
from config import cfg as opt
import numpy as np
def loadPretrainedWts(dir):
"""
load trained weights
"""
if os.path.isfile(dir):
try:
wtsDict = torch.load(dir, map_location=lambda storage, loc: storage)
return wtsDict
except:
print(f'ERROR: The weights in {dir} could not be loaded')
else:
print(f'ERROR: The file {dir} does not exist')
if __name__ == "__main__":
parser = argparse.ArgumentParser('StyleGAN_GEN')
parser.add_argument('--nImages', type=int, default=20)
# When sampling the latent vector during training, extreme values are less likely to appear,
# and hence the generator is not sufficiently trained in these regions. Hence, we limit the
# values of the latent vector to be inside (-psiCut, psiCut)
parser.add_argument('--psiCut', type=float, default=0.2)
parser.add_argument('--latentSize', nargs='?', type=int)
parser.add_argument('--nChannels', type=int, default=3)
parser.add_argument('--wtsFile', type=str, default='./pretrainedModels/64x64_modelCheckpoint_semifinal_paterm_nopsicut_nogridtrain_256.pth.tar')
parser.add_argument('--outputFolder', type=str, default='./generatedImages/')
parser.add_argument('--outputFile', type=str, nargs='?')
parser.add_argument('--config', nargs='?', type=str)
parser.add_argument('--resolution', nargs='?', type=int)
parser.add_argument('--createInterpolGif', action='store_true')
args, _ = parser.parse_known_args()
if args.config:
opt.merge_from_file(args.config)
opt.freeze()
endRes = int(args.resolution) if args.resolution else int(args.wtsFile.split('/')[-1].split('x')[0])
latentSize = args.latentSize if args.latentSize else int(args.wtsFile.split('/')[-1].split('_')[-1].split('.')[0])
device = torch.device('cpu')
cut = abs(args.psiCut)
wts = loadPretrainedWts(args.wtsFile)
n = args.nImages
folder = utils.createDir(args.outputFolder)
fname = args.outputFile if args.outputFile else 'generated'
out = os.path.join(folder, fname+'.png')
if n <= 0:
n = 20
mopt = opt.model
gopt = opt.model.gen
common = {
'fmapMax': mopt.fmapMax,
'fmapMin': mopt.fmapMin,
'fmapDecay': mopt.fmapDecay,
'fmapBase': mopt.fmapBase,
'activation': mopt.activation,
'upsample': mopt.sampleMode,
'downsample': mopt.sampleMode
}
gen = Generator(**common, **gopt).to(device)
gen.load_state_dict(wts['gen'])
z = utils.getNoise(bs = n, latentSize = latentSize, device = device)
ext_comp = (z.abs() > abs(cut)).type(FT)
while ext_comp.sum() > 0:
z = z*(1-ext_comp)+utils.getNoise(bs = n, latentSize = latentSize, device = device)*z*abs(cut)
ext_comp = (z.abs() > abs(cut)).type(FT)
if cut < 0: z = -z
fakes = gen(z)[0]
print('single image size: ', str(fakes.shape[2]) + 'x' + str(fakes.shape[2]))
print(f'number of images: {n}')
print(f'saving image to: {out}')
nrows = 1
if math.sqrt(n) == int(math.sqrt(n)):
nrows = int(math.sqrt(n))
elif n > 5:
i = int(math.sqrt(n))
while i > 2:
if (n % i) == 0:
nrows = i
break
i = i-1
utils.saveImage(fakes, out, nrow=nrows, padding=5)
|
{"/generator.py": ["/models/generatorNetwork.py", "/config.py"], "/models/generatorNetwork.py": ["/models/generatorBlocks.py", "/models/commonBlocks.py"], "/models/criticNetwork.py": ["/models/commonBlocks.py"], "/trainer.py": ["/models/generatorNetwork.py", "/models/criticNetwork.py", "/misc/dataLoader.py", "/misc/logger.py", "/config.py"], "/decoderTrainer.py": ["/misc/logger.py", "/models/generatorNetwork.py", "/models/decoderNetwork.py", "/config.py"], "/models/decoderNetwork.py": ["/models/commonBlocks.py"], "/models/generatorBlocks.py": ["/models/commonBlocks.py"]}
|
10,446
|
alenmora/styleGAN
|
refs/heads/master
|
/models/generatorNetwork.py
|
import torch
import torch.nn as nn
import numpy as np
import math
from models.generatorBlocks import constantInput, Mapping, Synthesis
from models.commonBlocks import PixelNorm
from random import randint
class Generator(nn.Module):
"""
StyleGAN2 main generator
Composed of two subnetworks, mapping and synthesis.
"""
def __init__(self, latentSize = 256, dLatentSize = 256, mappingLayers = 4, neuronsInMappingLayers = 256, normalizeLatents = True,
resolution = 64, fmapBase = 2048, fmapDecay = 1, fmapMax = 256, fmapMin = 1, randomizeNoise = False,
activation = 'lrelu', scaleWeights = False, outCh = 3, upsample = 'bilinear', synthesisMode = 'skip', psiCut = 0.7,
maxCutLayer = -1, makeConstantInputTrainable = True, **kwargs):
super().__init__()
self.normalizeLatents = bool(normalizeLatents)
if self.normalizeLatents:
self.norm = PixelNorm()
self.mapping = Mapping(latentSize = latentSize, dLatentSize = dLatentSize, mappingLayers = mappingLayers,
neuronsInMappingLayers = neuronsInMappingLayers, activation = activation,
scaleWeights = scaleWeights)
nf1 = np.clip(int(fmapBase /2.0 ** (fmapDecay)), fmapMin, fmapMax)
self.cInput = constantInput(nf1, resol = 4, makeTrainable = makeConstantInputTrainable)
self.synthesis = Synthesis(dLatentSize = dLatentSize, resolution = resolution, fmapBase = fmapBase, fmapDecay = fmapDecay, fmapMax = fmapMax,
fmapMin = fmapMin, randomizeNoise = randomizeNoise, activation = activation, scaleWeights = scaleWeights, outCh = 3,
upsample = upsample, mode = synthesisMode)
self.psiCut = psiCut
self.maxCutLayer = self.synthesis.nLayers-1 if maxCutLayer < 0 else maxCutLayer
def forward(self, z, zmix = None, wmix = None, cutLayer = None):
"""
Forward the generator through the input z
z (tensor): latent vector
fadeWt (double): Weight to regularly fade in higher resolution blocks
zmix (tensor): the second latent vector, used when performing mixing regularization
wmix (tensor): a second disentangled latent vector, used for style transfer
cutLayer (int): layer at which to introduce the new mixing element
"""
assert zmix is None or wmix is None, 'Generator ERROR: You must specify only one between: mixing latent (zmix), or mixing latent disentangled (wmix)'
if self.normalizeLatents:
z = self.norm(z)
w = self.mapping.forward(z)
x = self.cInput(w)
w = w.mean(dim=1,keepdim=True)+self.psiCut*(w - w.mean(dim=1,keepdim=True))
if zmix is not None:
if self.normalizeLatents:
zmix = self.norm(zmix)
wmix = self.mapping.forward(zmix)
wmix = wmix.mean(dim=1,keepdim=True)+self.psiCut*(wmix - wmix.mean(dim=1,keepdim=True))
if wmix is not None:
if cutLayer is None:
cutLayer = self.maxCutLayer-1
layer = randint(1,cutLayer)
x, extraOutput =self.synthesis.forwardTo(x, w, layer)
output = self.synthesis.forwardFrom(x, wmix, extraOutput, layer)
else:
output = self.synthesis.forward(x, w)
return [output, w]
def paTerm(self, z):
"""
Calculates the pulling away term, as explained in arXiv:1609.03126v4.
Believed to improve the variance of the generator and avoid mode collapse
z (tensor): latent vector
"""
bs = z.size(0)
if bs < 2: #Nothing to do if we only generate one candidate
return 0
w = self.mapping.forward(z)
x = self.cInput(w)
fakes = self.synthesis.forward(x, w)
nCh = fakes.size(1)
fakes = fakes.view(bs, nCh, -1) #N x nCh x (h*w)
#Calculate pair-wise cosine similarities between batch elements
suma = 0
for i in range(bs):
for j in range(i+1,bs):
fakesim = torch.nn.functional.cosine_similarity(fakes[i],fakes[j],dim=0).mean()
wsim = torch.nn.functional.cosine_similarity(w[i],w[j],dim=0)
zsim = torch.nn.functional.cosine_similarity(z[i],z[j],dim=0)
diff1 = (zsim-wsim)**2/(zsim**2 + 1e-8)
diff2 = (fakesim-wsim)**2/(wsim**2 + 1e-8)
suma = suma + (diff1+diff2)/2
return suma/(bs*(bs-1))
|
{"/generator.py": ["/models/generatorNetwork.py", "/config.py"], "/models/generatorNetwork.py": ["/models/generatorBlocks.py", "/models/commonBlocks.py"], "/models/criticNetwork.py": ["/models/commonBlocks.py"], "/trainer.py": ["/models/generatorNetwork.py", "/models/criticNetwork.py", "/misc/dataLoader.py", "/misc/logger.py", "/config.py"], "/decoderTrainer.py": ["/misc/logger.py", "/models/generatorNetwork.py", "/models/decoderNetwork.py", "/config.py"], "/models/decoderNetwork.py": ["/models/commonBlocks.py"], "/models/generatorBlocks.py": ["/models/commonBlocks.py"]}
|
10,447
|
alenmora/styleGAN
|
refs/heads/master
|
/config.py
|
# StyleGAN2 configuration options
from yacs.config import CfgNode as CN
from torch import cuda
import logging
cfg = CN()
############################
# Global options
############################
cfg.device = 'cuda' if cuda.is_available() else 'cpu'
cfg.deviceId = '0'
cfg.preWtsFile = "" # File to get the pretrained weights from
cfg.tick = 1000 #Unit of images shown (to make input compact)
cfg.loops = 3000 #Total number of training ticks
############################
# Data options
############################
cfg.dataLoader = CN()
cfg.dataLoader.dataPath = './data/' # Folder were the training data is stored
cfg.dataLoader.resolution = 64 #Final image resolution. If not specified, gets it from the first image in the training data
cfg.dataLoader.noChannels = 3 #Number of input and output channels. If not specified, gets it from the first image in the training data
cfg.dataLoader.batchSize = 16
cfg.dataLoader.numWorkers = 0
############################
# Training Options
############################
cfg.trainer = CN()
cfg.trainer.resumeTraining = False #Wether to resume a previous training. The user must specify the number of images already shown in the last training session
cfg.trainer.lossFunc = 'NSL' #Loss model used. Default is Non Saturating Loss (NSL). The other options are Wasserstein's Distance (WD) and Logistic
cfg.trainer.applyLossScaling = False #Wether to scale any loss function before calculating any gradient penalization term or not
cfg.trainer.paterm = False #Include a pulling away term in the generator , similar to arXiv =1609.03126v4
cfg.trainer.lambg = 0. #Weight of the pulling-away term in the generator loss function
cfg.trainer.gLazyReg = 10 #Number of minibatches shown before computing the regularization term for the generator (lazy regularization)
cfg.trainer.styleMixingProb = 0.5 #Probabilty to mix styles during training
cfg.trainer.meanPathLengthDecay = 0.01 #Decay constant for the exponential running averaging of the path length
cfg.trainer.pathLengthRWeight = 2. #Weight of the path regularization term in the generator loss function
cfg.trainer.nCritPerGen = 1 #Number of critic training loops per generator training loop
cfg.trainer.lambR2 = 0. #Weight of the extra R2 gradient penalization (0 = Deactivated)
cfg.trainer.obj = 450 #Objective value for the gradient norm in R2 regularization (arXiv =1704.00028v3)
cfg.trainer.lambR1 = 10. #Weight of the extra R1 gradient penalization
cfg.trainer.epsilon = 1e-3 #Weight of the loss term related to the magnitud of the real samples' loss from the critic
cfg.trainer.cLazyReg = 20 #Number of minibatches shown before computing the regularization term for the critic (lazy regularization)
cfg.trainer.unrollCritic = 0 #For an integer greater than 1, it unrolls the critic n steps (arXiv =1611.02163v4)
############################
# Common model Options
############################
cfg.model = CN()
cfg.model.fmapMax = 256 #Maximum number of channels in a convolutional block
cfg.model.fmapMin = 1 #Minimum number of channels in a convolutional block
cfg.model.fmapBase = 2048 #Parameter to calculate the number of channels in each block = nChannels = max(min(fmapMax, 4*fmapBase/(resolution**fmapDecay), fmapMin)
cfg.model.fmapDecay = 1. #Parameter to calculate the number of channels in each block = nChannels = max(min(fmapMax, 4*fmapBase/(resolution**fmapDecay), fmapMin)
cfg.model.activation = 'lrelu' #Which activation function to use for all networks
cfg.model.sampleMode = 'bilinear' #Algorithm to use for upsampling and downsampling tensors
############################
# Generator model Options
############################
cfg.model.gen = CN()
cfg.model.gen.makeConstantInputTrainable = True #Wether to train the constant input in the generator, or leave it as a tensor of ones
cfg.model.gen.psiCut = 0.8 #Value at which to apply the psi truncation cut in the generator disentangled latent
cfg.model.gen.maxCutLayer = -1 #Maximum generator layer at which to apply the psi cut (-1 = last layer)
cfg.model.gen.synthesisNetwork = 'skip' #Network architecture for the generator synthesis. The other option is 'resnet'
cfg.model.gen.latentSize = 256 #Size of the latent vector (z)
cfg.model.gen.dLatentSize = 256 #Size of the disentangled latent vector (w)
cfg.model.gen.normalizeLatents = False #Wether to normalize the latent vector (z) before feeding it to the mapping network
cfg.model.gen.mappingLayers = 4 #Number of mapping layers
cfg.model.gen.neuronsInMappingLayers = 256 #Number of neurons in each of the mapping layers
cfg.model.gen.randomizeNoise = False #Wether to randomize noise inputs every time
cfg.model.gen.scaleWeights = False #Wether to scale the weights for equalized learning
cfg.optim = CN()
############################
# Gen optimizer Options
############################
cfg.optim.gen = CN()
cfg.optim.gen.lr = 0.001
cfg.optim.gen.beta1 = 0.
cfg.optim.gen.beta2 = 0.99
cfg.optim.gen.eps = 1e-8
cfg.optim.gen.lrDecay =0.1 #Generator learning rate decay constant
cfg.optim.gen.lrDecayEvery = 1000 #(Approx) Number of ticks shown before applying the decay to the generator learning rate
cfg.optim.gen.lrWDecay = 0. #Generator weight decay constant
############################
# Critic model Options
############################
cfg.model.crit = CN()
cfg.model.crit.scaleWeights = True #Wether to use weight scaling as in ProGAN in the discriminator
cfg.model.crit.network = 'resnet' #Network architecture for the critic. The other option is 'skip'
cfg.model.crit.stdDevGroupSize = 4 #Size of the groups to calculate the std dev in the last block of the critic
############################
# Crit optimizer Options
############################
cfg.optim.crit = CN()
cfg.optim.crit.lr = 0.001
cfg.optim.crit.beta1 = 0.
cfg.optim.crit.beta2 = 0.99
cfg.optim.crit.eps = 1e-8
cfg.optim.crit.lrDecay =0.1 #Critic learning rate decay constant
cfg.optim.crit.lrDecayEvery = 1000 #(Approx) Number of ticks shown before applying the decay to the critic learning rate
cfg.optim.crit.lrWDecay = 0. #Critic weight decay constant
############################
# Logging
############################
cfg.logger = CN()
cfg.logger.logPath = './exp4/' #Folder were the training outputs are stored
cfg.logger.logLevel = logging.INFO #Use values from logging: 50
cfg.logger.saveModelEvery = 35. #(Approx) Number of ticks shown before saving a checkpoint of the model
cfg.logger.saveImageEvery = 35. #(Approx) Number of ticks shown before generating a set of images and saving them in the log directory
cfg.logger.logStep = 5. #(Approx) Number of ticks shown before writing a log in the log directory
############################
# Decoder options
############################
cfg.dec = CN()
cfg.dec.network = 'resnet' #Network architecture for the decoder
cfg.dec.wtsFile = '' #Trained weights
cfg.dec.useCriticWeights = True #Initialize as many parameters of the decoder as possible using the critic trained weights
cfg.dec.resumeTraining = False #Initialize as many parameters of the decoder as possible using the critic trained weights
cfg.dec.batchSize = 40 #Initialize as many parameters of the decoder as possible using the critic trained weights
############################
# Decoder optimizer Options
############################
cfg.optim.dec = CN()
cfg.optim.dec.lr = 0.003
cfg.optim.dec.beta1 = 0.
cfg.optim.dec.beta2 = 0.99
cfg.optim.dec.eps = 1e-8
cfg.optim.dec.lrDecay =0.1 #Critic learning rate decay constant
cfg.optim.dec.lrDecayEvery = 2000 #(Approx) Number of ticks shown before applying the decay to the critic learning rate
cfg.optim.dec.lrWDecay = 0. #Critic weight decay constant
|
{"/generator.py": ["/models/generatorNetwork.py", "/config.py"], "/models/generatorNetwork.py": ["/models/generatorBlocks.py", "/models/commonBlocks.py"], "/models/criticNetwork.py": ["/models/commonBlocks.py"], "/trainer.py": ["/models/generatorNetwork.py", "/models/criticNetwork.py", "/misc/dataLoader.py", "/misc/logger.py", "/config.py"], "/decoderTrainer.py": ["/misc/logger.py", "/models/generatorNetwork.py", "/models/decoderNetwork.py", "/config.py"], "/models/decoderNetwork.py": ["/models/commonBlocks.py"], "/models/generatorBlocks.py": ["/models/commonBlocks.py"]}
|
10,448
|
alenmora/styleGAN
|
refs/heads/master
|
/misc/logger.py
|
import torch
import numpy as np
import os
from datetime import datetime
import logging
from misc import utils
class _Logger:
"""
Base class
"""
def __init__(self, trainer, tick, loops, logPath = './log/', logStep = 5, logLevel = logging.INFO, device = torch.device('cpu')):
self.logger = logging.getLogger()
self.logger.setLevel(logging.DEBUG)
#Create console handler
self.console = logging.StreamHandler()
self.logLevel = int(logLevel)
self.console.setLevel(self.logLevel)
#Create formatter
formatter = logging.Formatter('[%(levelname)s]\t%(message)s\t(%(filename)s)')
self.console.setFormatter(formatter)
#Add handler to logger
self.logger.addHandler(self.console)
#Loggings steps
self.tick = int(tick)
self.loops = int(loops)
self.logStep = int(logStep*self.tick)
#trainer
self.trainer = trainer
#log counter
self.logCounter = -1
#log file
self.logFile = 'netStatus.txt'
#log path
self.logPath = utils.createDir(logPath)
#device
self.device = device
def _saveSnapshot(self, title=None, stateDict=None):
"""
Saves model snapshot
"""
if title is None:
title = f'modelCheckpoint_{int(self.trainer.imShown)}.pth.tar'
else:
title = title+'.pth.tar'
path = os.path.join(self.logPath,title)
torch.save(stateDict, path)
class Logger(_Logger):
"""
Logger class to output net status, images and network snapshots for the training of the StyleGAN2 architecture
"""
def __init__(self, trainer, latentSize = 256, resumeTraining = False, tick=1000, loops=6500,
logPath='./exp1/', logStep = 10, saveImageEvery = 20, saveModelEvery = 20, logLevel = None, device = torch.device('cpu')):
super().__init__(trainer, tick, loops, logPath, logStep, logLevel, device = device)
self.saveImageEvery = int(saveImageEvery*self.tick)
self.saveModelEvery = int(saveModelEvery*self.tick)
self.latentSize = int(latentSize)
self.resumeTraining = resumeTraining
z = utils.getNoise(bs = 4, latentSize = self.latentSize, device = self.device)
zs = []
for i in range(4):
alpha = 2*(3-i)/3
interp = z*(alpha - 1)
zs.append(interp)
self.z = torch.cat(zs, dim=0)
#monitoring parameters
self.genLoss = 0
self.criticLoss = 0
self.criticLossReals = 0
self.criticLossFakes = 0
self.ncAppended = 0
self.ngAppended = 0
self.snapCounter = 0
self.imgCounter = 0
#Outputs
self.netStatusHeaderShown = False
self.archFile = 'architecture.txt'
self.logFile = 'netStatus.txt'
self.latentsFile = 'latents.txt'
def appendGLoss(self, gloss):
"""
This function will append the generator loss to the genLoss list
"""
self.startLogging() #Log according to size of appendGLoss, so call the function when appending
if self.logLevel > logging.INFO:
return
self.genLoss = (self.genLoss + gloss).detach().requires_grad_(False)
self.ngAppended =+ 1
def appendCLoss(self, closs, clossReals, clossFakes):
"""
This function will append the critic training output to the critic lists
"""
if self.logLevel > logging.INFO:
return
self.criticLoss = (self.criticLoss + closs).detach().requires_grad_(False)
self.criticLossReals = (self.criticLossReals + clossReals).detach().requires_grad_(False)
self.criticLossFakes = (self.criticLossFakes + clossFakes).detach().requires_grad_(False)
self.ncAppended =+ 1
def startLogging(self):
snapCounter = int(self.trainer.imShown) // self.saveModelEvery
imgCounter = int(self.trainer.imShown) // self.saveImageEvery
if snapCounter > self.snapCounter:
self.saveSnapshot()
self.snapCounter = snapCounter
if imgCounter > self.imgCounter:
self.outputPictures()
self.imgCounter = imgCounter
if self.logLevel > logging.INFO:
return
logCounter = int(self.trainer.imShown) // self.logStep
if logCounter > self.logCounter:
self.logNetStatus()
#Release memory
self.genLoss = 0
self.criticLoss = 0
self.criticLossReals = 0
self.criticLossFakes = 0
self.ncAppended = 0
self.ngAppended = 0
torch.cuda.empty_cache()
self.logCounter = logCounter
def logNetStatus(self):
"""
Print and write mean losses and current status of net (resolution, stage, images shown)
"""
if self.netStatusHeaderShown == False:
colNames = f'time and date |iter |genLoss |critLoss |cLossReal |cLossFake '
sep = '|'.join(['-'*14,'-'*9,'-'*10,'-'*10,'-'*10,'-'*10])
self.logger.info(colNames)
self.logger.info(sep)
f = os.path.join(self.logPath,self.logFile) #Create a new log file
if not self.resumeTraining:
utils.writeFile(f, colNames, 'w')
utils.writeFile(f, sep, 'a')
self.netStatusHeaderShown = True
imShown = int(self.trainer.imShown)
# Average all stats and log
gl = self.genLoss.item()/self.ngAppended if self.ngAppended != 0 else 0.
cl = self.criticLoss.item()/self.ncAppended if self.ncAppended != 0 else 0.
clr = self.criticLossReals.item()/self.ncAppended if self.ncAppended != 0 else 0.
clf = self.criticLossFakes.item()/self.ncAppended if self.ncAppended != 0 else 0.
stats = f' {datetime.now():%H:%M (%d/%m)}'
leadingSpaces = 9-len(str(imShown))
stats = stats + "|"+leadingSpaces*" "+str(imShown)
stats = stats + "| {:9.4f}| {:9.4f}| {:9.4f}| {:9.4f}".format(gl,cl,clr,clf)
self.logger.info(stats)
f = os.path.join(self.logPath,self.logFile)
utils.writeFile(f, stats, 'a')
def saveSnapshot(self, title=None):
"""
Saves model snapshot
"""
if title is None:
title = f'modelCheckpoint_{int(self.trainer.imShown)}_{self.trainer.latentSize}.pth.tar'
else:
title = title+'.pth.tar'
path = os.path.join(self.logPath,title)
torch.save({'crit':self.trainer.crit.state_dict(),
'cOptimizer':self.trainer.cOptimizer.state_dict(),
'clrScheduler':self.trainer.clrScheduler.state_dict(),
'gen':self.trainer.gen.state_dict(),
'gOptimizer':self.trainer.gOptimizer.state_dict(),
'glrScheduler':self.trainer.glrScheduler.state_dict(),
'imShown':self.trainer.imShown,
'loops':self.loops,
'tick':self.tick,
'logCounter':self.logCounter,
'ncAppended':self.ncAppended,
'ngAppended':self.ngAppended,
'snapCounter':self.snapCounter,
'imgCounter':self.imgCounter,
'genLoss':self.genLoss,
'criticLoss':self.criticLoss,
'criticLossReals':self.criticLossReals,
'criticLossFakes':self.criticLossFakes,
'batchShown': self.trainer.batchShown,
'meanPathLength': self.trainer.meanPathLength,
}, path)
def outputPictures(self):
"""
outputs a grid of 4 x 4 pictures generated from the same latents
"""
fake = self.trainer.getFakes(z = self.z)[0]
fName = '_'.join([str(int(self.trainer.resolution)),str(int(self.trainer.imShown))+'.jpg'])
path = os.path.join(self.logPath,fName)
utils.saveImage(fake, path, nrow = 4)
class DecoderLogger(_Logger):
"""
Logger class to output net status and network snapshots for the training of the StyleGAN2 decoder
"""
def __init__(self, trainer, latentSize = 256, resumeTraining = False, tick=1000, loops=6500,
logPath='./exp1/', logStep = 10, saveDiffEvery = 20, saveModelEvery = 20, logLevel = None):
super().__init__(trainer, tick, loops, logPath, logStep, logLevel)
self.saveDiffEvery = int(saveDiffEvery*self.tick)
self.saveModelEvery = int(saveModelEvery*self.tick)
self.latentSize = int(latentSize)
self.resumeTraining = resumeTraining
self.z = utils.getNoise(bs = 16, latentSize = self.latentSize, device = torch.device('cpu'))
#monitoring parameters
self.loss = 0
self.appended = 0
self.snapCounter = 0
self.diffCounter = 0
#Outputs
self.netStatusHeaderShown = False
self.archFile = 'architecture.txt'
self.logFile = 'netStatus.txt'
def appendLoss(self, loss):
"""
This function will append the decoder loss to the loss variable
"""
self.startLogging()
if self.logLevel > logging.INFO:
return
self.loss = (self.loss + loss).detach().requires_grad_(False)
self.appended =+ 1
def startLogging(self):
snapCounter = int(self.trainer.imShown) // self.saveModelEvery
diffCounter = int(self.trainer.imShown) // self.saveDiffEvery
if snapCounter > self.snapCounter:
self.saveSnapshot()
self.snapCounter = snapCounter
if diffCounter > self.diffCounter:
self.outputDifferences()
self.diffCounter = diffCounter
if self.logLevel > logging.INFO:
return
logCounter = int(self.trainer.imShown) // self.logStep
if logCounter > self.logCounter:
self.logNetStatus()
#Release memory
self.loss = 0
self.appended = 0
torch.cuda.empty_cache()
self.logCounter = logCounter
def logNetStatus(self):
"""
Print and write mean loss and current status of net (resolution, images shown)
"""
if self.netStatusHeaderShown == False:
colNames = f'time and date |iter |loss '
sep = '|'.join(['-'*14,'-'*9,'-'*10])
self.logger.info(colNames)
self.logger.info(sep)
f = os.path.join(self.logPath,self.logFile) #Create a new log file
if not self.resumeTraining:
utils.writeFile(f, colNames, 'w')
utils.writeFile(f, sep, 'a')
self.netStatusHeaderShown = True
imShown = int(self.trainer.imShown)
# Average all stats and log
dl = self.loss.item()/self.appended if self.appended != 0 else 0.
stats = f' {datetime.now():%H:%M (%d/%m)}'
leadingSpaces = 9-len(str(imShown))
stats = stats + "|"+leadingSpaces*" "+str(imShown)
stats = stats + "| {:9.4f}".format(dl)
self.logger.info(stats)
f = os.path.join(self.logPath,self.logFile)
utils.writeFile(f, stats, 'a')
def saveSnapshot(self, title=None):
"""
Saves model snapshot
"""
if title is None:
title = f'modelCheckpoint_{int(self.trainer.imShown)}_{self.latentSize})decoder.pth.tar'
else:
title = title+'.pth.tar'
path = os.path.join(self.logPath,title)
torch.save({'dec':self.trainer.dec.state_dict(),
'dOptimizer':self.trainer.dOptimizer.state_dict(),
'dlrScheduler':self.trainer.dlrScheduler.state_dict(),
'imShown':self.trainer.imShown,
'loops':self.loops,
'tick':self.tick,
'logCounter':self.logCounter,
'appended':self.appended,
'snapCounter':self.snapCounter,
'diffCounter':self.diffCounter,
'dLoss':self.loss,
}, path)
def outputDifferences(self):
"""
outputs the differences between the original and the decoded w for the same 25
random z inputs
"""
w = self.trainer.mapping(self.z.to(self.trainer.device))
fake = self.trainer.gen(self.z.to(self.trainer.device))
decoded = self.trainer.dec(fake)
diff = (w - decoded).cpu()
fName = '_'.join([str(diff.size(1)),str(int(self.trainer.imShown))+'.pt'])
torch.save(diff, fName)
|
{"/generator.py": ["/models/generatorNetwork.py", "/config.py"], "/models/generatorNetwork.py": ["/models/generatorBlocks.py", "/models/commonBlocks.py"], "/models/criticNetwork.py": ["/models/commonBlocks.py"], "/trainer.py": ["/models/generatorNetwork.py", "/models/criticNetwork.py", "/misc/dataLoader.py", "/misc/logger.py", "/config.py"], "/decoderTrainer.py": ["/misc/logger.py", "/models/generatorNetwork.py", "/models/decoderNetwork.py", "/config.py"], "/models/decoderNetwork.py": ["/models/commonBlocks.py"], "/models/generatorBlocks.py": ["/models/commonBlocks.py"]}
|
10,449
|
alenmora/styleGAN
|
refs/heads/master
|
/misc/utils.py
|
from glob import glob
import os
import torch
import numpy as np
import torchvision
import torchvision.transforms as transforms
import torchvision.utils as vutils
from torch import FloatTensor as FT
from datetime import datetime
import math
import PIL.Image as Image
import animeface
from shutil import copyfile
def resize(x, size):
transform = transforms.Compose([
transforms.toPILImage(),
transforms.Scale(size),
transforms.ToTensor(),
])
return transform(x)
def writeFile(path, content, mode):
"""
This will write content to a give file
"""
file = open(path, mode)
file.write(content); file.write('\n')
file.close()
def createDir(dir):
"""
Create directory
"""
try:
os.makedirs(dir)
print(f'Created new folder at {dir}')
except FileExistsError:
print(f'Using previously created folder {dir}')
return dir
def getNoise(bs, latentSize, device):
"""
This function will return noise
"""
return FT(bs, latentSize).normal_().to(device=device)
# Loop through each image and process
def makeImagesGrid(tensor, nrow=8, padding=2, pad_value=0):
if not (torch.is_tensor(tensor) or
(isinstance(tensor, list) and all(torch.is_tensor(t) for t in tensor))):
raise TypeError('tensor or list of tensors expected, got {}'.format(type(tensor)))
# if list of tensors, convert to a 4D mini-batch Tensor
if isinstance(tensor, list):
tensor = torch.stack(tensor, dim=0)
if tensor.dim() == 2: # single image H x W
tensor = tensor.view(1, tensor.size(0), tensor.size(1))
if tensor.dim() == 3: # single image
if tensor.size(0) == 1: # if single-channel, convert to 3-channel
tensor = torch.cat((tensor, tensor, tensor), 0)
return tensor
if tensor.dim() == 4 and tensor.size(1) == 1: # single-channel images
tensor = torch.cat((tensor, tensor, tensor), 1)
# make the mini-batch of images into a grid
nmaps = tensor.size(0)
xmaps = min(nrow, nmaps)
ymaps = int(math.ceil(float(nmaps) / xmaps))
height, width = int(tensor.size(2) + padding), int(tensor.size(3) + padding)
grid = tensor.new(3, height * ymaps + padding, width * xmaps + padding).fill_(pad_value)
k = 0
for y in range(ymaps):
for x in range(xmaps):
if k >= nmaps:
break
grid.narrow(1, y * height + padding, height - padding)\
.narrow(2, x * width + padding, width - padding)\
.copy_(tensor[k])
k = k + 1
return grid
def saveImage(tensor, filename, nrow=8, padding=2, pad_value=0):
"""Save a given Tensor into an image file.
Args:
tensor (Tensor or list): Image to be saved. If given a mini-batch tensor,
saves the tensor as a grid of images by calling ``make_grid``.
**kwargs: Other arguments are documented in ``make_grid``.
"""
tensor = tensor.cpu()
grid = makeImagesGrid(tensor, nrow=nrow, padding=padding, pad_value=pad_value)
ndarr = grid.mul(255).clamp(0, 255).byte().permute(1, 2, 0).numpy()
im = Image.fromarray(ndarr)
im.save(filename)
def switchTrainable(net,isTrainable):
"""
This is used to switch models parameters to trainable or not
"""
for p in net.parameters(): p.requires_grad = isTrainable
def debugMemory():
import collections, gc, resource, torch
print('maxrss = {}'.format(
resource.getrusage(resource.RUSAGE_SELF).ru_maxrss))
tensors = collections.Counter((str(o.device), o.dtype, tuple(o.shape))
for o in gc.get_objects()
if torch.is_tensor(o))
for line in sorted(tensors.items()):
print('{}\t{}'.format(*line))
def cleanImagesFolder(curPath, newPath, res = None, searchFaces = False, faceThreshold = 0.5):
"""
Creates a new folder containing all the images
from the current folder with anime faces on them, using the
animeface library
"""
createDir(newPath)
images = glob(os.path.join(curPath, '*.jpg'))
for image in images:
try:
im = Image.open(image)
if res != None:
if min(im.size) < res: continue
if searchFaces:
faces = animeface.detect(im)
if not faces: continue #Get rid of garbage
if (faces[0].likelihood < faceThreshold): continue #Get rid of garbage
imName = image.split('/')[-1]
newImage = os.path.join(newPath,imName)
copyfile(image,newImage)
except OSError:
continue
|
{"/generator.py": ["/models/generatorNetwork.py", "/config.py"], "/models/generatorNetwork.py": ["/models/generatorBlocks.py", "/models/commonBlocks.py"], "/models/criticNetwork.py": ["/models/commonBlocks.py"], "/trainer.py": ["/models/generatorNetwork.py", "/models/criticNetwork.py", "/misc/dataLoader.py", "/misc/logger.py", "/config.py"], "/decoderTrainer.py": ["/misc/logger.py", "/models/generatorNetwork.py", "/models/decoderNetwork.py", "/config.py"], "/models/decoderNetwork.py": ["/models/commonBlocks.py"], "/models/generatorBlocks.py": ["/models/commonBlocks.py"]}
|
10,450
|
alenmora/styleGAN
|
refs/heads/master
|
/models/criticNetwork.py
|
import torch
import torch.nn as nn
import numpy as np
from torch.nn import functional as F
from models.commonBlocks import Linear, Conv2D, ModulatedConv2D, getActivation
class MiniBatchStdDevLayer(nn.Module):
"""
Add std to last layer group of critic to improve variance
"""
def __init__(self, groupSize = 4):
super().__init__()
self.groupSize = groupSize
def forward(self, x):
shape = list(x.size()) # NCHW - Initial size
xStd = x.view(self.groupSize, -1, shape[1], shape[2], shape[3]) # GMCHW - split minbatch into M groups of size G (= groupSize)
xStd -= torch.mean(xStd, dim=0, keepdim=True) # GMCHW - Subract mean over groups
xStd = torch.mean(xStd ** 2, dim=0, keepdim=False) # MCHW - Calculate variance over groups
xStd = (xStd + 1e-08) ** 0.5 # MCHW - Calculate std dev over groups
xStd = torch.mean(xStd.view(xStd.shape[0], -1), dim=1, keepdim=True).view(-1, 1, 1, 1)
# M111 - Take mean over CHW
xStd = xStd.repeat(self.groupSize, 1, shape[2], shape[3]) # N1HW - Expand to same shape as x with one channel
output = torch.cat([x, xStd], 1)
return output
def __repr__(self):
return self.__class__.__name__ + '(Group Size = %s)' % (self.groupSize)
class Critic(nn.Module):
"""
StyleGAN2 critics
"""
def __init__(self, resolution = 64, fmapBase = 4096, fmapDecay = 1., fmapMax = 256, fmapMin = 1, activation = 'lrelu',
scaleWeights = True, inCh = 3, stdGroupSize = 8, downsample = 'bilinear', mode = 'resnet', asRanker = False, **kwargs):
super().__init__()
self.resolution = resolution
self.fmapBase = fmapBase
self.fmapDecay = fmapDecay
self.fmapMax = fmapMax
self.fmapMin = fmapMin
self.activation = getActivation(activation)
self.scaleWeights = scaleWeights
self.inCh = inCh
self.stdGroupSize = stdGroupSize
self.downsample = downsample
assert mode in ['skip','resnet'], f'Critic ERROR: Invalid synthesis network architecture {mode}'
self.mode = mode
self.asRanker = asRanker
rlog2 = int(np.log2(self.resolution))
assert self.resolution == 2**(rlog2) and self.resolution >= 4, 'Critic ERROR: The resolution should be a power of 2 greater than 4'
def nf(stage): #Get the number of channels per layer
return np.clip(int(self.fmapBase / (2.0 ** (stage * self.fmapDecay))), self.fmapMin, self.fmapMax)
self.nLayers = 2*(rlog2-1)-1 #4x4 requires 1 (conv) layer, 8x8 requires 3, 16x16 requires 5,...
self.convs = nn.ModuleList() #Keeps the 2D convolutional modules
self.fromRGB = nn.ModuleList() #Keeps the FromRGB modules
self.lp = nn.ModuleList() #Keeps the 2DConv modules for linear projection when performing resnet architecture
def layer(kernel, layerId): #Constructor of layers
stage = int((layerId+1)//2) #Resolution stage: (4x4 --> 0), (8x8 --> 1), (16x16 --> 2) ...
inCh = nf(stage) if layerId % 2 else nf(stage+1) #The even layers receive the input of the resolution block, so their number of inCh must be the same of the outCh for the previous stage
outCh = nf(stage)
if not layerId % 2: #Even layer
if self.mode == 'skip': #add the fromRGB module for the given resolution
self.fromRGB.append(nn.Sequential(
Conv2D(inCh=self.inCh, outCh=inCh, kernelSize=1, scaleWeights=self.scaleWeights),
self.activation,
))
elif self.mode == 'resnet': #Add the convolution modules for properly matching the channels during the residual connection
if layerId > 0: # (the first layer does not require this module)
self.lp.append(Conv2D(inCh=inCh, outCh=outCh, kernelSize=1))
#Add the required convolutional module
self.convs.append(Conv2D(inCh=inCh, outCh=outCh, kernelSize=kernel))
for layerId in range(self.nLayers): #Create the layers from to self.nLayers-1
layer(kernel=3, layerId=layerId)
if self.mode == 'resnet': #Add the only toRGB module in the resnet architecture
self.fromRGB.append(nn.Sequential(
Conv2D(inCh=self.inCh, outCh=nf((self.nLayers+1)//2), kernelSize=1, scaleWeights=self.scaleWeights),
self.activation,
))
if self.stdGroupSize > 1:
self.miniBatchLayer = MiniBatchStdDevLayer(self.stdGroupSize)
inCh = nf(0) if self.stdGroupSize <= 1 else nf(0)+1
self.fullyConnected = nn.Sequential(Linear(inCh=inCh*4*4, outCh=nf(0), scaleWeights=self.scaleWeights),
self.activation,
Linear(inCh=nf(0),outCh=1,scaleWeights=self.scaleWeights))
def forward(self, x):
"""
Forward function.
x (tentsor): the input
*args, **kwargs: extra arguments for the forward step in the pogressive growing configuration
"""
if self.mode == 'skip':
return self.forwardSkip_(x)
elif self.mode == 'resnet':
return self.forwardResnet_(x)
def applyOneLayer(self, x, layer):
"""
Apply one layer of the critic to
the tensor x
"""
x = self.convs[layer](x)
return self.activation(x)
def applyLastLayer(self, x):
if self.stdGroupSize > 1 and not self.asRanker:
x = self.miniBatchLayer(x)
x = x.view(x.size(0),-1) #Unroll
return self.fullyConnected(x)
def forwardSkip_(self, x):
"""
Perform a forward pass using
the architecture with skip connections
"""
t = 0
for layer in range(self.nLayers-1,-1,-1):
if not layer % 2: #Even layer: get the fromRGB version of the downsampled image
t = self.fromRGB[layer//2](x)+t
t = self.applyOneLayer(t, layer)
if layer % 2: #Downsample
t = F.interpolate(t, scale_factor=0.5, mode=self.downsample, align_corners=False)
x = F.interpolate(x, scale_factor=0.5, mode=self.downsample, align_corners=False)
t = self.applyLastLayer(t)
return t
def forwardResnet_(self, x):
"""
Perform a forward pass using
the architecture with residual networks
"""
x = self.fromRGB[0](x) #Use the only fromRGB for this net
carryover = None
for layer in range(self.nLayers-1,-1,-1): #Apply all layers
if not layer % 2: #Even layer
if carryover is not None:
x = (carryover + x)/np.sqrt(2)
carryover = x
x = self.applyOneLayer(x, layer)
if layer % 2: #Odd layer, downsample
x = F.interpolate(x, scale_factor=0.5, mode=self.downsample, align_corners=False)
carryover = self.lp[layer//2](carryover)
carryover = F.interpolate(carryover, scale_factor=0.5, mode=self.downsample, align_corners=False)
x = self.applyLastLayer(x)
return x
|
{"/generator.py": ["/models/generatorNetwork.py", "/config.py"], "/models/generatorNetwork.py": ["/models/generatorBlocks.py", "/models/commonBlocks.py"], "/models/criticNetwork.py": ["/models/commonBlocks.py"], "/trainer.py": ["/models/generatorNetwork.py", "/models/criticNetwork.py", "/misc/dataLoader.py", "/misc/logger.py", "/config.py"], "/decoderTrainer.py": ["/misc/logger.py", "/models/generatorNetwork.py", "/models/decoderNetwork.py", "/config.py"], "/models/decoderNetwork.py": ["/models/commonBlocks.py"], "/models/generatorBlocks.py": ["/models/commonBlocks.py"]}
|
10,451
|
alenmora/styleGAN
|
refs/heads/master
|
/trainer.py
|
import torch
import torch.nn as nn
import numpy as np
import argparse
from torch.optim import Adam, lr_scheduler
import torch.nn.functional as F
import torch.autograd as autograd
from datetime import datetime
from models.generatorNetwork import Generator
from models.criticNetwork import Critic
from misc.dataLoader import DataLoader
from misc.logger import Logger
from misc import utils
import os
import math
import copy
from random import random
def applyLossScaling(value):
return value*2**(value)
def undoLossScaling(value):
return value*2**(-value)
def NonSaturatingLoss(value, truth):
truth = -1*truth
return F.softplus(truth*value).mean()
def WassersteinLoss(value, truth):
truth = -1*truth
return (truth*value).mean()
class Trainer:
"""
Trainer class with hyperparams, log, train function etc.
"""
def __init__(self, opt):
lopt = opt.logger
topt = opt.trainer
mopt = opt.model
gopt = opt.model.gen
copt = opt.model.crit
goopt = opt.optim.gen
coopt = opt.optim.crit
#CUDA configuration
if opt.device == 'cuda' and torch.cuda.is_available():
os.environ['CUDA_VISIBLE_DEVICES'] = opt.deviceId
torch.backends.cudnn.benchmark = True
else:
opt.device = 'cpu'
self.device = torch.device(opt.device)
#logger
self.logger_ = Logger(self, gopt.latentSize, topt.resumeTraining, opt.tick, opt.loops, lopt.logPath, lopt.logStep,
lopt.saveImageEvery, lopt.saveModelEvery, lopt.logLevel, self.device)
self.logger = self.logger_.logger
#Logging configuration parameters
if opt.device == 'cuda':
num_gpus = len(opt.deviceId.split(','))
self.logger.info("Using {} GPUs.".format(num_gpus))
self.logger.info("Training on {}.\n".format(torch.cuda.get_device_name(0)))
#data loader
dlopt = opt.dataLoader
self.dataLoader = DataLoader(dlopt.dataPath, dlopt.resolution, dlopt.noChannels, dlopt.batchSize, dlopt.numWorkers)
self.resolution, self.nCh = self.dataLoader.resolution, self.dataLoader.nCh
# training opt
assert opt.tick > 0, self.logger.error(f'The number of ticks should be a positive integer, got {opt.tick} instead')
self.tick = float(opt.tick)
assert opt.loops > 0, self.logger.error(f'The number of ticks should be a positive integer, got {opt.loops} instead')
self.loops = int(opt.loops)
self.imShown = 0
self.batchShown = self.imShown // self.dataLoader.batchSize
assert topt.lossFunc in ['NSL','WD'], self.logger.error(f'The specified loss model is not supported. Please choose between "NSL" or "WD"')
self.lossFunc = topt.lossFunc
self.criterion = NonSaturatingLoss if self.lossFunc == 'NSL' else WassersteinLoss
self.applyLossScaling = bool(topt.applyLossScaling)
self.paterm = topt.paterm
self.lambg = float(topt.lambg)
self.gLazyReg = max(topt.gLazyReg,1)
self.styleMixingProb = float(topt.styleMixingProb)
self.meanPathLength = 0.
self.plDecay = topt.meanPathLengthDecay
self.pathRegWeight = topt.pathLengthRWeight
assert topt.nCritPerGen > 0, self.logger.error(f'Trainer ERROR: The number of critic training loops per generator loop should be an integer >= 1 (got {topt.nCritPerGen})')
self.nCritPerGen = int(topt.nCritPerGen)
self.lambR2 = float(topt.lambR2) if topt.lambR2 else 0 #lambda R2
self.obj = float(topt.obj) if topt.obj else 1 #objective value (1-GP)
self.lambR1 = float(topt.lambR1) if topt.lambR2 else 0 #lambda R1
self.epsilon = float(topt.epsilon) if topt.epsilon else 0 #epsilon (drift loss)
self.cLazyReg = max(topt.cLazyReg,1)
self.kUnroll = int(topt.unrollCritic) if topt.unrollCritic else 0
assert self.kUnroll >= 0, self.logger.error(f'Trainer ERROR: The unroll parameter is less than zero ({self.kUnroll})')
#Common model parameters
common = {
'fmapMax': mopt.fmapMax,
'fmapMin': mopt.fmapMin,
'fmapDecay': mopt.fmapDecay,
'fmapBase': mopt.fmapBase,
'activation': mopt.activation,
'upsample': mopt.sampleMode,
'downsample': mopt.sampleMode
}
#Generator model parameters
self.gen = Generator(**common, **gopt).to(self.device)
self.latentSize = self.gen.mapping.latentSize
self.logger.info(f'Generator constructed. Number of parameters {sum([np.prod([*p.size()]) for p in self.gen.parameters()])}')
#Critic model parameters
self.crit = Critic(**mopt, **copt).to(self.device)
self.logger.info(f'Critic constructed. Number of parameters {sum([np.prod([*p.size()]) for p in self.crit.parameters()])}')
#Generator optimizer parameters
glr, beta1, beta2, epsilon, lrDecay, lrDecayEvery, lrWDecay = list(goopt.values())
assert lrDecay >= 0 and lrDecay <= 1, self.logger.error('Trainer ERROR: The decay constant for the learning rate of the generator must be a constant between [0, 1]')
assert lrWDecay >= 0 and lrWDecay <= 1, self.logger.error('Trainer ERROR: The weight decay constant for the generator must be a constant between [0, 1]')
self.gOptimizer = Adam(filter(lambda p: p.requires_grad, self.gen.parameters()), lr = glr, betas=(beta1, beta2), weight_decay=lrWDecay, eps=epsilon)
if lrDecayEvery and lrDecay:
self.glrScheduler = lr_scheduler.StepLR(self.gOptimizer, step_size=lrDecayEvery*self.tick, gamma=lrDecay)
else:
self.glrScheduler = None
self.logger.info(f'Generator optimizer constructed')
#Critic optimizer parameters
clr, beta1, beta2, epsilon, lrDecay, lrDecayEvery, lrWDecay = list(coopt.values())
assert lrDecay >= 0 and lrDecay <= 1, self.logger.error('Trainer ERROR: The decay constant for the learning rate of the critic must be a constant between [0, 1]')
assert lrWDecay >= 0 and lrWDecay <= 1, self.logger.error('Trainer ERROR: The weight decay constant for the critic must be a constant between [0, 1]')
self.cOptimizer = Adam(filter(lambda p: p.requires_grad, self.crit.parameters()), lr = clr, betas=(beta1, beta2), weight_decay=lrWDecay, eps=epsilon)
if lrDecayEvery and lrDecay:
self.clrScheduler = lr_scheduler.StepLR(self.gOptimizer, step_size=lrDecayEvery*self.tick, gamma=lrDecay)
else:
self.clrScheduler = None
self.logger.info(f'Critic optimizer constructed')
self.preWtsFile = opt.preWtsFile
self.resumeTraining = bool(topt.resumeTraining)
self.loadPretrainedWts(resumeTraining = self.resumeTraining)
self.logger.info(f'The trainer has been instantiated.... Starting step: {self.imShown}. Resolution: {self.resolution}')
self.logArchitecture(clr,glr)
def logArchitecture(self, clr, glr):
"""
This function will print hyperparameters and architecture and save the in the log directory under the architecture.txt file
"""
cstFcn = f'Cost function model: {self.lossFunc}\n'
hyperParams = (f'HYPERPARAMETERS - res = {self.resolution}|bs = {self.dataLoader.batchSize}|cLR = {clr}|gLR = {glr}|lambdaR2 = {self.lambR2}|'
f'obj = {self.obj}|lambdaR1 = {self.lambR1}|epsilon = {self.epsilon}|{self.loops} loops, showing {self.tick} images per loop'
f'|Using pulling away regularization? {"Yes" if self.paterm else "No"}')
architecture = '\n' + str(self.crit) + '\n\n' + str(self.gen) + '\n\n'
self.logger.info(cstFcn+hyperParams)
f = os.path.join(self.logger_.logPath, self.logger_.archFile)
self.logger.debug(architecture)
utils.writeFile(f, cstFcn+hyperParams+architecture, 'w')
def loadPretrainedWts(self, resumeTraining = False):
"""
Search for weight file in the experiment directory, and loads it if found
"""
dir = self.preWtsFile
if os.path.isfile(dir):
try:
stateDict = torch.load(dir, map_location=lambda storage, loc: storage)
self.crit.load_state_dict(stateDict['crit'])
self.gen.load_state_dict(stateDict['gen'], strict=False) #Since the cached noise buffers are initialized at None
self.logger.debug(f'Loaded pre-trained weights from {dir}')
if resumeTraining:
self.imShown = stateDict['imShown']
self.loops = stateDict['loops']
self.tick = stateDict['tick']
self.logger_.genLoss = stateDict['genLoss']
self.logger_.criticLoss = stateDict['criticLoss']
self.logger_.criticLossReals = stateDict['criticLossReals']
self.logger_.criticLossFakes = stateDict['criticLossFakes']
self.logger_.logCounter = stateDict['logCounter']
self.logger_.ncAppended = stateDict['ncAppended']
self.logger_.ngAppended = stateDict['ngAppended']
self.logger_.snapCounter = stateDict['snapCounter']
self.logger_.imgCounter = stateDict['imgCounter']
self.cOptimizer.load_state_dict(stateDict['cOptimizer'])
self.gOptimizer.load_state_dict(stateDict['gOptimizer'])
self.clrScheduler.load_state_dict(stateDict['clrScheduler'])
self.glrScheduler.load_state_dict(stateDict['glrScheduler'])
self.batchShown = stateDict['batchShown']
self.meanPathLength = stateDict['meanPathLength']
self.logger.debug(f'And the optimizers states as well')
return True
except Exception as e:
self.logger.error(f'ERROR: The weights in {dir} could not be loaded\n {str(e)}\n Proceding from zero...')
return False
else:
self.logger.error(f'ERROR: The file {dir} does not exist. Proceding from zero...')
return False
def getReals(self, n = None):
"""
Returns n real images
"""
return self.dataLoader.get(n).to(device = self.device)
def getFakes(self, n = None, z = None):
"""
Returns n fake images and their latent vectors
"""
if n is None: n = self.dataLoader.batchSize
if z is None:
z = utils.getNoise(bs = n, latentSize = self.latentSize, device = self.device)
if self.styleMixingProb and random() < self.styleMixingProb:
zmix = utils.getNoise(bs = n, latentSize = self.latentSize, device = self.device)
zmix = (zmix - zmix.mean(dim=1, keepdim=True))/(zmix.std(dim=1, keepdim=True)+1e-8)
output = self.gen(z, zmix = zmix)
else:
output = self.gen(z)
else:
output = self.gen(z)
if isinstance(output, list):
return [*output, z]
else:
return [output, z]
def getBatchReals(self):
"""
Returns a batch of real images
"""
return self.dataLoader.get_batch().to(device = self.device)
def getBatchFakes(self):
"""
Returns a batch of fake images and the latent vector which generated it
"""
return self.getFakes()
def R2GradientPenalization(self, reals, fakes):
alpha = torch.rand(reals.size(0), 1, 1, 1, device=reals.device)
interpols = (alpha*reals + (1-alpha)*fakes).detach().requires_grad_(True)
cOut = self.crit(interpols).sum()
if self.applyLossScaling:
cOut = applyLossScaling(cOut)
ddx = autograd.grad(outputs=cOut, inputs=interpols,
grad_outputs = torch.ones_like(cOut,device=self.device),
create_graph = True, retain_graph=True, only_inputs=True)[0]
ddx = ddx.view(ddx.size(0), -1)
if self.applyLossScaling:
ddx = undoLossScaling(ddx)
return ((ddx.norm(dim=1)-self.obj).pow(2)).mean()/(self.obj+1e-8)**2
def R1GradientPenalization(self, reals):
reals.requires_grad_(True)
cOut = self.crit(reals).sum()
if self.applyLossScaling:
cOut = applyLossScaling(cOut)
ddx = autograd.grad(outputs=cOut, inputs=reals,
grad_outputs = torch.ones_like(cOut,device=self.device),
create_graph = True, retain_graph=True, only_inputs=True)[0]
ddx = ddx.view(ddx.size(0), -1)
if self.applyLossScaling:
ddx = undoLossScaling(ddx)
return 0.5*(ddx.pow(2).sum(dim=1)).mean()
def GradientPathRegularization(self, fakes, latents):
noise = torch.randn_like(fakes) / math.sqrt(fakes.size(2)*fakes.size(3))
ddx = autograd.grad(outputs=(fakes*noise).sum(), inputs=latents, create_graph=True)[0]
pathLengths = ddx.norm(dim=1)
if self.meanPathLength == 0:
self.meanPathLength = pathLengths.mean()
else:
self.meanPathLength = self.meanPathLength + self.plDecay*(pathLengths.mean() - self.meanPathLength)
self.meanPathLength = self.meanPathLength.detach()
return (pathLengths - self.meanPathLength).pow(2).mean()
def trainCritic(self):
"""
Train the critic for one step and store outputs in logger
"""
utils.switchTrainable(self.crit, True)
utils.switchTrainable(self.gen, False)
# real
real = self.dataLoader.get_batch().to(self.device)
cRealOut = self.crit(x=real)
# fake
fake, *_ = self.getBatchFakes()
cFakeOut = self.crit(x=fake.detach())
lossReals = self.criterion(cRealOut, truth = 1)
lossFakes = self.criterion(cFakeOut, truth = -1)
loss = lossReals+lossFakes
if self.batchShown % self.cLazyReg == self.cLazyReg-1:
if self.lambR2:
loss += self.cLazyReg*self.lambR2*self.R2GradientPenalization(real, fake)
if self.epsilon:
loss += self.epsilon*(cRealOut**2).mean()
if self.lambR1:
loss += self.lambR1*self.R1GradientPenalization(real)
self.cOptimizer.zero_grad()
loss.backward(); self.cOptimizer.step()
if self.clrScheduler is not None: self.clrScheduler.step() #Reduce learning rate
self.logger_.appendCLoss(loss, lossReals, lossFakes)
def trainGenerator(self):
"""
Train Generator for 1 step and store outputs in logger
"""
utils.switchTrainable(self.gen, True)
utils.switchTrainable(self.crit, False)
fake, *latents = self.getBatchFakes()
cFakeOut = self.crit(fake)
loss = self.criterion(cFakeOut, truth = 1)
if self.batchShown % self.gLazyReg == self.gLazyReg-1:
if self.pathRegWeight > 0:
dlatent = latents[0]
loss += self.GradientPathRegularization(fake, dlatent)*self.gLazyReg*self.pathRegWeight
if self.lambg > 0 and self.paterm:
latent = latents[-1]
pat = self.gen.paTerm(latent)*self.lambg*self.gLazyReg
loss += pat
self.gOptimizer.zero_grad()
loss.backward(); self.gOptimizer.step()
if self.glrScheduler is not None: self.glrScheduler.step() #Reduce learning rate
self.logger_.appendGLoss(loss)
return fake.size(0)
def train(self):
"""
Main train loop
"""
self.logger.info('Starting training...')
self.logger_.startLogging() #Start the logger
# loop over images
while self.imShown < self.tick*self.loops:
if self.kUnroll:
for i in range(self.nCritPerGen):
self.trainCritic()
if i == 0:
self.cBackup = copy.deepcopy(self.crit)
else:
for i in range(self.nCritPerGen):
self.trainCritic()
shown = self.trainGenerator() #Use the generator training batches to count for the images shown, not the critic
if self.kUnroll:
self.crit.load(self.cBackup)
self.imShown = self.imShown + int(shown)
self.batchShown = self.batchShown + 1
if self.batchShown > max(self.gLazyReg, self.cLazyReg):
self.batchShown = 0
self.logger_.saveSnapshot(f'{self.resolution}x{self.resolution}_final_{self.latentSize}')
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="StyleGAN2 pytorch implementation.")
parser.add_argument('--config', nargs='?', type=str)
args = parser.parse_args()
from config import cfg as opt
if args.config:
opt.merge_from_file(args.config)
opt.freeze()
Trainer = Trainer(opt)
Trainer.train()
|
{"/generator.py": ["/models/generatorNetwork.py", "/config.py"], "/models/generatorNetwork.py": ["/models/generatorBlocks.py", "/models/commonBlocks.py"], "/models/criticNetwork.py": ["/models/commonBlocks.py"], "/trainer.py": ["/models/generatorNetwork.py", "/models/criticNetwork.py", "/misc/dataLoader.py", "/misc/logger.py", "/config.py"], "/decoderTrainer.py": ["/misc/logger.py", "/models/generatorNetwork.py", "/models/decoderNetwork.py", "/config.py"], "/models/decoderNetwork.py": ["/models/commonBlocks.py"], "/models/generatorBlocks.py": ["/models/commonBlocks.py"]}
|
10,452
|
alenmora/styleGAN
|
refs/heads/master
|
/decoderTrainer.py
|
import torch.nn as nn
import torch
import numpy as np
from torch.optim import Adam, lr_scheduler
from misc import utils
from misc.logger import DecoderLogger
from models.generatorNetwork import Generator
from models.decoderNetwork import Decoder
import os
import math
import copy
from random import random
import argparse
class DecoderTrainer:
"""
Trainer class for the decoder
"""
def __init__(self, config):
lopt = opt.logger
topt = opt.trainer
mopt = opt.model
gopt = opt.model.gen
copt = opt.model.crit
dopt = opt.dec
doopt = opt.optim.dec
#logger
self.logger_ = DecoderLogger(self, gopt.latentSize, dopt.resumeTraining, opt.tick, opt.loops, lopt.logPath, lopt.logStep,
lopt.saveModelEvery, lopt.logLevel)
self.logger = self.logger_.logger
#CUDA configuration parameters
if opt.device == 'cuda':
os.environ['CUDA_VISIBLE_DEVICES'] = opt.deviceId
num_gpus = len(opt.deviceId.split(','))
self.logger.info("Using {} GPUs.".format(num_gpus))
self.logger.info("Training on {}.\n".format(torch.cuda.get_device_name(0)))
torch.backends.cudnn.benchmark = True
self.device = torch.device(opt.device)
# training opt
assert opt.tick > 0, self.logger.error(f'The number of ticks should be a positive integer, got {opt.tick} instead')
self.tick = float(opt.tick)
assert opt.loops > 0, self.logger.error(f'The number of ticks should be a positive integer, got {opt.loops} instead')
self.loops = int(opt.loops)
self.imShown = 0
#Common model parameters
common = {
'fmapMax': mopt.fmapMax,
'fmapMin': mopt.fmapMin,
'fmapDecay': mopt.fmapDecay,
'fmapBase': mopt.fmapBase,
'activation': mopt.activation,
'upsample': mopt.sampleMode,
'downsample': mopt.sampleMode
}
#Generator model parameters
self.gen = Generator(**common, **gopt).to(self.device)
self.latentSize = self.gen.mapping.latentSize
self.logger.info(f'Generator constructed. Number of parameters {sum([np.prod([*p.size()]) for p in self.gen.parameters()])}')
#Decoder model parameters
copt.network = dopt.network
self.decoder = Decoder(**mopt, **copt).to(self.device)
self.logger.info(f'Decoder constructed. Number of parameters {sum([np.prod([*p.size()]) for p in self.dec.parameters()])}')
#Decoder optimizer parameters
clr, beta1, beta2, epsilon, lrDecay, lrDecayEvery, lrWDecay = list(doopt.values())
assert lrDecay >= 0 and lrDecay <= 1, self.logger.error('Trainer ERROR: The decay constant for the learning rate of the critic must be a constant between [0, 1]')
assert lrWDecay >= 0 and lrWDecay <= 1, self.logger.error('Trainer ERROR: The weight decay constant for the critic must be a constant between [0, 1]')
self.dOptimizer = Adam(filter(lambda p: p.requires_grad, self.crit.parameters()), lr = clr, betas=(beta1, beta2), weight_decay=lrWDecay, eps=epsilon)
if lrDecayEvery and lrDecay:
self.dlrScheduler = lr_scheduler.StepLR(self.gOptimizer, step_size=lrDecayEvery, gamma=lrDecay)
else:
self.dlrScheduler = None
self.logger.info(f'Decoder optimizer constructed')
#Trained data loading
dir = dopt.wtsFile
if os.path.isfile(dir):
try:
stateDict = torch.load(dir, map_location=lambda storage, loc: storage)
self.gen.load_state_dict(stateDict['gen'], strict=False) #Since the cached noise buffers are initialized at None
self.logger.info(f'Loaded generator trained weights from {dir}')
if dopt.useCriticWeights or dopt.resumeTraining:
if 'dec' in stateDict.keys():
self.decoder.load_state_dict(stateDict['dec']) #First, try to load a decoder dictionary
else:
self.decoder.load_state_dict(stateDict['crit'], strict=False) #Last layer won't match, so make strict = False
self.logger.info(f'Loaded critic trained weights from {dir}')
if dopt.resumeTraining:
self.imShown = stateDict['imShown']
self.loops = stateDict['loops']
self.tick = stateDict['tick']
self.Logger_.loss = stateDict['dLoss']
self.Logger_.logCounter = stateDict['logCounter']
self.Logger_.appended = stateDict['appended']
self.Logger_.snapCounter = stateDict['snapCounter']
self.Logger_.diffCounter = stateDict['diffCounter']
self.dOptimizer.load_state_dict(stateDict['dOptimizer'])
self.logger.info(f'And the optimizers states as well')
except:
self.logger.error(f'ERROR: The information in {dir} could not be loaded. Exiting')
raise IOError
else:
self.logger.error(f'ERROR: The file {dir} does not exist. Proceding Exiting')
raise IOError
utils.switchTrainable(self.gen, False)
self.batchSize = max(dopt.batchSize, 1)
self.logger.info(f'The trainer has been instantiated...')
def getBatch(self):
"""
Returns n fake images and their latent vectors
"""
z = utils.getNoise(bs = self.batchSize, latentSize = self.latentSize, device = self.device)
return self.gen(z)
def decoderLoss(self, dout, w):
return (dout-w).norm(dim=1).mean()
def trainDecoder(self):
"""
Train the critic for one step and store outputs in logger
"""
self.dOptimizer.zero_grad()
# fake
ims, w = self.getBatch()
dout = self.decoder(ims)
loss = self.decoderLoss(dout, w)
loss.backward(); self.dOptimizer.step()
if self.dlrScheduler is not None: self.dlrScheduler.step() #Reduce learning rate
self.logger.appendLoss(loss)
def train(self):
"""
Main train loop
"""
print('Starting training...')
self.logger.startLogging() #Start the logging
while self.imShown < self.tick*self.nLoops: self.trainDecoder()
self.logger.saveSnapshot(f'{self.res}x{self.res}_final_{self.latentSize}_decoder')
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="StyleGAN2 pytorch implementation.")
parser.add_argument('--config', nargs='?', type=str)
args = parser.parse_args()
from config import cfg as opt
if args.config:
opt.merge_from_file(args.config)
opt.freeze()
Trainer = DecoderTrainer(opt)
Trainer.train()
|
{"/generator.py": ["/models/generatorNetwork.py", "/config.py"], "/models/generatorNetwork.py": ["/models/generatorBlocks.py", "/models/commonBlocks.py"], "/models/criticNetwork.py": ["/models/commonBlocks.py"], "/trainer.py": ["/models/generatorNetwork.py", "/models/criticNetwork.py", "/misc/dataLoader.py", "/misc/logger.py", "/config.py"], "/decoderTrainer.py": ["/misc/logger.py", "/models/generatorNetwork.py", "/models/decoderNetwork.py", "/config.py"], "/models/decoderNetwork.py": ["/models/commonBlocks.py"], "/models/generatorBlocks.py": ["/models/commonBlocks.py"]}
|
10,453
|
alenmora/styleGAN
|
refs/heads/master
|
/models/decoderNetwork.py
|
import torch
import torch.nn as nn
import numpy as np
from torch.nn import functional as F
from models.commonBlocks import Linear, Conv2D, ModulatedConv2D, getActivation
class Decoder(nn.Module):
"""
StyleGAN2 decoder
"""
def __init__(self, resolution = 64, dLatentSize = 256, fmapBase = 4096, fmapDecay = 1., fmapMax = 256, fmapMin = 1, activation = 'lrelu',
scaleWeights = True, inCh = 3, downsample = 'bilinear', mode = 'resnet', **kwargs):
super().__init__()
self.resolution = resolution
self.fmapBase = fmapBase
self.fmapDecay = fmapDecay
self.fmapMax = fmapMax
self.fmapMin = fmapMin
self.activation = getActivation(activation)
self.scaleWeights = scaleWeights
self.inCh = inCh
self.downsample = downsample
assert mode in ['skip','resnet'], f'Decoder ERROR: Invalid synthesis network architecture {mode}'
self.mode = mode
rlog2 = int(np.log2(self.resolution))
assert self.resolution == 2**(rlog2) and self.resolution >= 4, 'Critic ERROR: The resolution should be a power of 2 greater than 4'
def nf(stage): #Get the number of channels per layer
return np.clip(int(self.fmapBase / (2.0 ** (stage * self.fmapDecay))), self.fmapMin, self.fmapMax)
self.nLayers = 2*(rlog2-1)-1 #4x4 requires 1 (conv) layer, 8x8 requires 3, 16x16 requires 5,...
self.convs = nn.ModuleList() #Keeps the 2D convolutional modules
self.fromRGB = nn.ModuleList() #Keeps the toRGB modules
self.lp = nn.ModuleList() #Keeps the 2DConv modules for linear projection when performing resnet architecture
def layer(kernel, layerId): #Constructor of layers
stage = int((layerId+1)//2) #Resolution stage: (4x4 --> 0), (8x8 --> 1), (16x16 --> 2) ...
inCh = nf(stage) if layerId % 2 else nf(stage+1) #The even layers receive the input of the resolution block, so their number of inCh must be the same of the outCh for the previous stage
outCh = nf(stage)
if not layerId % 2: #Even layer
if self.mode != 'resnet': #add the fromRGB module for the given resolution
self.fromRGB.append(nn.Sequential(
Conv2D(inCh=self.inCh, outCh=inCh, kernelSize=1, scaleWeights=self.scaleWeights),
self.activation,
))
else: #Add the convolution modules for properly matching the channels during the residual connection
if layerId > 0: # (the first layer does not require this module)
self.lp.append(Conv2D(inCh=inCh, outCh=outCh,kernelSize=kernel))
#Add the required convolutional module
if layerId == 0:
self.convs.append(Conv2D(inCh=inCh, outCh=outCh, kernelSize=4, padding=0))
else:
self.convs.append(Conv2D(inCh=inCh, outCh=outCh, kernelSize=kernel))
for layerId in range(self.nLayers): #Create the layers from to self.nLayers-1
layer(kernel=3, layerId=layerId)
if self.mode == 'resnet': #Add the only toRGB module in the resnet architecture
self.fromRGB.append(nn.Sequential(
Conv2D(inCh=self.inCh, outCh=nf((self.nLayers+1)//2), kernelSize=1, scaleWeights=self.scaleWeights),
self.activation,
))
if self.stdGroupSize > 1:
self.miniBatchLayer = MiniBatchStdDevLayer(self.stdGroupSize)
self.logits = Linear(inCh=nf(0),outCh=dLatentSize,scaleWeights=self.scaleWeights)
def forward(self, x):
"""
Forward function.
x (tentsor): the input
*args, **kwargs: extra arguments for the forward step in the pogressive growing configuration
"""
if self.mode == 'skip':
return self.forwardSkip_(x)
elif self.mode == 'resnet':
return self.forwardResnet_(x)
def applyOneLayer(self, x, layer):
"""
Apply one layer of the critic to
the tensor x
"""
x = self.convs[layer](x)
return self.activation(x)
def forwardSkip_(self, x):
"""
Perform a forward pass using
the architecture with skip connections
"""
t = 0
for layer in range(self.nLayers-1,-1,-1):
if not layer % 2: #Even layer: get the fromRGB version of the downsampled image
t = self.fromRGB[layer//2](x)+t
t = self.applyOneLayer(t, layer)
if layer % 2: #Downsample
t = F.interpolate(t, scale_factor=0.5, mode=self.downsample, align_corners=False)
x = F.interpolate(x, scale_factor=0.5, mode=self.downsample, align_corners=False)
t = self.logits(t)
return t
def forwardResnet_(self, x):
"""
Perform a forward pass using
the architecture with residual networks
"""
x = self.fromRGB[0](x) #Use the only fromRGB for this net
carryover = None
for layer in range(self.nLayers-1,-1,-1): #Apply all layers
if not layer % 2: #Even layer
if carryover is not None:
x = (carryover + x)/np.sqrt(2)
carryover = x
x = self.applyOneLayer(x, layer)
if layer % 2: #Odd layer, downsample
x = F.interpolate(x, scale_factor=0.5, mode=self.downsample, align_corners=False)
carryover = self.lp[layer//2](carryover)
carryover = F.interpolate(carryover, scale_factor=0.5, mode=self.downsample, align_corners=False)
x = self.logits(x)
return x
|
{"/generator.py": ["/models/generatorNetwork.py", "/config.py"], "/models/generatorNetwork.py": ["/models/generatorBlocks.py", "/models/commonBlocks.py"], "/models/criticNetwork.py": ["/models/commonBlocks.py"], "/trainer.py": ["/models/generatorNetwork.py", "/models/criticNetwork.py", "/misc/dataLoader.py", "/misc/logger.py", "/config.py"], "/decoderTrainer.py": ["/misc/logger.py", "/models/generatorNetwork.py", "/models/decoderNetwork.py", "/config.py"], "/models/decoderNetwork.py": ["/models/commonBlocks.py"], "/models/generatorBlocks.py": ["/models/commonBlocks.py"]}
|
10,454
|
alenmora/styleGAN
|
refs/heads/master
|
/models/generatorBlocks.py
|
import torch
import torch.nn as nn
import numpy as np
from torch.nn import functional as F
from models.commonBlocks import PixelNorm, Linear, Conv2D, ModulatedConv2D, getActivation
class constantInput(nn.Module):
def __init__(self, nCh, resol=4, makeTrainable = True):
super().__init__()
self.cInput = nn.Parameter(torch.randn(1,nCh,4,4)) #Constant random input
self.cInput.requires_grad_(makeTrainable)
def forward(self, input):
batchSize = input.size(0)
return self.cInput.repeat(batchSize, 1, 1, 1)
class Mapping(nn.Module):
"""
StyleGAN2 mapping generator module
"""
def __init__(self, latentSize=256, dLatentSize=256, mappingLayers = 4, neuronsInMappingLayers = 256, lrmul = 0.01,
activation = 'lrelu', scaleWeights = False, normalizeLayers = False, **kwargs):
super().__init__()
self.latentSize = latentSize
self.dLatentSize = dLatentSize
self.mappingLayers = mappingLayers
assert self.mappingLayers > 0, 'Mapping Module ERROR: The number of mapping layers should be a positive integer'
self.scaleWeights = scaleWeights
self.nNeurons = neuronsInMappingLayers
self.activation = getActivation(activation)
self.lrmul = lrmul
mods = []
inCh = self.latentSize
for layerId in range(self.mappingLayers):
outCh = self.nNeurons if layerId != (self.mappingLayers-1) else self.dLatentSize
mods.append(Linear(inCh, outCh, scaleWeights=self.scaleWeights, lrmul=self.lrmul))
mods.append(self.activation)
if normalizeLayers: mods.append(PixelNorm())
inCh = outCh
self.map = nn.Sequential(*mods)
self.name = 'Mapping subnetwork: '+str(self.map)
def forward(self, x):
return self.map(x)
def __repr__(self):
return self.name
class NoiseLayer(nn.Module):
"""
Module that adds the noise to the ModulatedConv2D output
"""
def __init__(self, outCh, resolution, randomizeNoise = False):
super().__init__()
self.noise = torch.randn(1,1,resolution,resolution)
self.register_buffer('cached_noise', self.noise)
self.randomizeNoise = randomizeNoise
self.weights = nn.Parameter(torch.zeros(1,outCh,1,1), requires_grad=True)
self.name = 'Noise layer: '+str(outCh)
def forward(self, x):
noise = torch.randn(1,1,x.size(2),x.size(3), device=x.device) if self.randomizeNoise else self.noise.to(x.device)
return x+self.weights*noise
def __repr__(self):
return self.name
class StyledConv2D(nn.Module):
"""
Module representing the mixing of a modulated 2DConv and noise addition
"""
def __init__(self, styleCh, inCh, outCh, kernelSize, resolution, padding='same', gain=np.sqrt(2), bias=False, lrmul = 1, scaleWeights=True,
demodulate = True, randomizeNoise = False, activation = 'lrelu'):
super().__init__()
self.conv = ModulatedConv2D(styleCh, inCh, outCh, kernelSize, padding=padding, gain=gain, bias=bias, lrmul=lrmul, scaleWeights=scaleWeights, demodulate=demodulate)
self.noise = NoiseLayer(outCh, resolution, randomizeNoise=randomizeNoise)
self.activation = getActivation(activation)
def forward(self, x, y):
out = self.conv(x, y)
out = self.noise(out)
out = self.activation(out)
return out
def __repr__(self):
return 'StyledConv2D based on '+self.conv.__repr__()
class ToRGB(nn.Module):
"""
Module to transform to image space
"""
def __init__(self, styleCh, inCh, outCh):
super().__init__()
self.conv = ModulatedConv2D(styleCh, inCh, outCh, kernelSize = 1, demodulate=False)
self.bias = nn.Parameter(torch.zeros(1, outCh, 1, 1))
def forward(self, x, y):
out = self.conv(x,y)
out = out + self.bias
return out
def __repr__(self):
return f'ToRGB using '+self.conv.__repr__()
class Synthesis(nn.Module):
"""
StyleGAN2 original synthesis network
"""
def __init__(self, dLatentSize = 256, resolution = 64, fmapBase = 2048, fmapDecay = 1, fmapMax = 256, fmapMin = 1,
randomizeNoise = False, activation = 'lrelu', scaleWeights = False, outCh = 3, upsample = 'bilinear', mode = 'skip',
normalizeLayers = False,**kwargs):
super().__init__()
self.dLatentSize = dLatentSize
self.resolution = resolution
self.fmapBase = fmapBase
self.fmapDecay = fmapDecay
self.fmapMax = fmapMax
self.fmapMin = fmapMin
self.activation = activation
self.upsample = upsample
self.mode = mode
self.outCh = outCh
self.normalizeLayers = normalizeLayers
assert self.mode in ['skip','resnet'], f'Generator ERROR: Invalid synthesis network architecture {self.mode}'
rlog2 = int(np.log2(self.resolution))
assert self.resolution == 2**(rlog2) and self.resolution >= 4, f'Synthesis Module ERROR: The resolution should be a power of 2 greater than 4 ({self.resolution})'
def nf(stage): #Get the number of channels per layer
return np.clip(int(self.fmapBase / (2.0 ** (stage * self.fmapDecay))), self.fmapMin, self.fmapMax)
self.nLayers = 2*rlog2-3 #a maximum resolution of 4x4 requires 1 layer, 8x8 requires 3, 16x16 requires 5,...
self.styleConvs = nn.ModuleList() #Keeps the style convolutional modules
self.toRGB = nn.ModuleList() #Keeps the ToRGB modules
self.lp = nn.ModuleList() #Keeps the 2DConv modules for linear projection when performing resnet architecture
if self.normalizeLayers: self.normalizer = PixelNorm() #Pixel normalizer
def layer(kernel, layerId): #Constructor of layers
resol = int(2**((layerId+5)//2)) #Recover the resolution of the current layer from its id (0 --> 4), (1 --> 8), (2 --> 8), (3 --> 16),...
stage = int(np.log2(resol)-2) #Resolution stage: (4x4 --> 0), (8x8 --> 1), (16x16 --> 2) ...
inCh = nf(stage)
outCh = nf(stage) if layerId % 2 else nf(stage+1) #The even layers give the output for the resolution block, so their number of outCh must be the same of the inCh for the next stage
if not layerId % 2: #Even layer
if self.mode == 'skip': #add the ToRGB module for the given resolution
self.toRGB.append(ToRGB(styleCh=self.dLatentSize, inCh=outCh, outCh=self.outCh))
elif self.mode == 'resnet': #Add the convolution modules for properly matching the channels during the residual connection
if layerId < self.nLayers-1: # (the last layer --which is even-- does not require this module)
self.lp.append(Conv2D(inCh=inCh, outCh=outCh, kernelSize=1))
#Add the required modulated convolutional module
self.styleConvs.append(StyledConv2D(styleCh=self.dLatentSize, inCh=inCh, outCh=outCh, kernelSize=kernel, resolution=resol, randomizeNoise=randomizeNoise, activation=activation))
for layerId in range(self.nLayers): #Create the layers from to self.nLayers-1
layer(kernel=3, layerId=layerId)
if self.mode == 'resnet': #Add the only toRGB module in the resnet architecture
self.toRGB.append(Conv2D(inCh=nf((self.nLayers+1)//2),outCh=self.outCh, kernelSize=1, scaleWeights=self.scaleWeights))
def forward(self, x, w):
"""
Forward function.
y (tensor): the disentangled latent vector
x (tentsor): the constant input map
*args, **kwargs: extra arguments for the forward step in the pogressive growing configuration
"""
if self.mode == 'skip':
return self.forwardSkip_(x,w)
elif self.mode == 'resnet':
return self.forwardResnet_(x,w)
def forwardTo(self, x, w, maxLayer):
"""
Forward tensor y up to layer maxLayer
y (tensor): the disentangled latent vector
maxLayer (int): the layer to forward the tensor up to
x (tentsor): the constant input map
"""
assert maxLayer <= self.nLayers, f'Module Synthesis ERROR: The maxLayer {maxLayer} value in the forwardTo function is larger than the number of layers in the network {self.nLayers}'
assert maxLayer >= 0, f'Module Synthesis ERROR: The maxLayer {maxLayer} value in the forwardTo function must be a nonnegative integer'
if self.mode == 'skip':
return self.forwardSkip_(x,w,maxLayer=maxLayer, getExtraOutputs=True)
elif self.mode == 'resnet':
return self.forwardResnet_(x,w,maxLayer=maxLayer, getExtraOutputs=True)
def forwardFrom(self, x, w, extraInput, minLayer):
"""
Forward tensor y up to layer maxLayer
y (tensor): the disentangled latent vector
x (tensor): the constant input map
extraInput (tensor): for the skip and resnet configs, the carryover and output terms from the previous configuration
minLayer(int): the layer from which to start the forwarding
"""
assert minLayer <= self.nLayers, f'Module Synthesis ERROR: The minLayer {minLayer} value in the forwardFrom function is larger than the number of layers in the network {self.nLayers}'
assert minLayer >= 0, f'Module Synthesis ERROR: The minLayer {minLayer} value in the forwardFrom function must be a nonnegative integer'
if self.mode == 'skip':
return self.forwardSkip_(x,w,output=extraInput,minLayer=minLayer)
elif self.mode == 'resnet':
return self.forwardResnet_(x,w,carryover=extraInput,minLayer=minLayer)
def forwardSkip_(self, x, w, minLayer = 0, maxLayer = None, output = 0, getExtraOutputs = False):
"""
Perform a forward pass using
the architecture with skip connections
"""
if maxLayer is None:
maxLayer = self.nLayers
for layer in range(minLayer, maxLayer): #Apply all layers
if layer % 2: #Odd layer, so increase size
x = F.interpolate(x, scale_factor=2, mode=self.upsample, align_corners=False)
output = F.interpolate(output, scale_factor=2, mode=self.upsample, align_corners=False)
x = self.styleConvs[layer](x, w)
if self.normalizeLayers:
x = self.normalizer(x)
if not layer % 2: #Even layer, so get the generated output for the given resolution, resize it, and add it to the final output
output = output + self.toRGB[layer//2](x, w)
if getExtraOutputs:
return x, output
return output
def forwardResnet_(self, x, w, minLayer = 0, maxLayer = None, carryover = None, getExtraOutputs = False):
"""
Perform a forward pass using
the architecture with residual networks
"""
if maxLayer is None:
maxLayer = self.nLayers
for layer in range(minLayer, maxLayer): #Apply all layers
if layer % 2: #Odd layer, so increase size
x = F.interpolate(x, scale_factor=2, mode=self.upsample, align_corners=False)
carryover = self.lp[layer//2](carryover)
carryover = F.interpolate(carryover, scale_factor=2, mode=self.upsample, align_corners=False)
x = self.styleConvs[layer](x, w)
if self.normalizeLayers:
x = self.normalizer(x)
if not layer % 2: #Even layer, so add and actualize carryover value
if carryover is not None: #If there is a carryover, add it to the output
x = (carryover + x)/np.sqrt(2)
carryover = x
x = self.toRGB[0](x, w) #Use the only toRGB for this net
if getExtraOutputs:
return x, carryover
return x
|
{"/generator.py": ["/models/generatorNetwork.py", "/config.py"], "/models/generatorNetwork.py": ["/models/generatorBlocks.py", "/models/commonBlocks.py"], "/models/criticNetwork.py": ["/models/commonBlocks.py"], "/trainer.py": ["/models/generatorNetwork.py", "/models/criticNetwork.py", "/misc/dataLoader.py", "/misc/logger.py", "/config.py"], "/decoderTrainer.py": ["/misc/logger.py", "/models/generatorNetwork.py", "/models/decoderNetwork.py", "/config.py"], "/models/decoderNetwork.py": ["/models/commonBlocks.py"], "/models/generatorBlocks.py": ["/models/commonBlocks.py"]}
|
10,461
|
msjha-vedi1995/sudoku-solver-with-image-processing
|
refs/heads/main
|
/videocam.py
|
import cv2
from matplotlib import pyplot as plt
from opencv_part import get_sudo_grid, get_sudoku, solve_sudoku, create_sudoku_img, change_perspective_to_original
# cap = cv2.VideoCapture(0)
# images = []
# while 1:
# ret, frame = cap.read()
# try:
# crp_img, orgnl, pts1, pts2 = get_sudo_grid(frame,900)
# images.append(crp_img)
# if crp_img.shape[0] == 900:
# cv2.imshow('frame',crp_img)
# break
# except:
# if cv2.waitKey(1) & 0xFF == ord('q'):
# break
#
# cap.release()
# cv2.destroyAllWindows()
folder = 'images/'
img = cv2.imread("cropped.jpg",0)
orgnl = cv2.imread("original.jpg",0)
img = cv2.cvtColor(img,cv2.COLOR_BGR2RGB)
sd_img, unsolved_sd_lst = get_sudoku(img, 900)
cv2.imwrite(folder + "sd_img.jpg",sd_img)
print("Numbers are extracted")
solved_sd_lst, unsolved_sd_img = solve_sudoku(unsolved_sd_lst, sd_img.shape)
cv2.imwrite(folder + "unsolved_sd_img.jpg",unsolved_sd_img)
print("Unsolved Sudoku image ready")
solved_sd_img = create_sudoku_img(img, solved_sd_lst, unsolved_sd_lst, False)
cv2.imwrite(folder + "solved_sd_img.jpg",solved_sd_img)
print("Solved sudoku image ready")
|
{"/videocam.py": ["/opencv_part.py"], "/main.py": ["/opencv_part.py"], "/opencv_part.py": ["/prediction.py"]}
|
10,462
|
msjha-vedi1995/sudoku-solver-with-image-processing
|
refs/heads/main
|
/main.py
|
import cv2
from matplotlib import pyplot as plt
from opencv_part import get_sudo_grid, get_sudoku, solve_sudoku, create_sudoku_img, change_perspective_to_original
'''
get_sudoku_grid:-
Input: Img array, Size
Output: cropped_img, original, pts1, pts2
get_sudoku
Input: Cropped_img, size
Output: sudoku_image_with_eroded_digits, unsolved_sudoku_list
solve_sudoku
Input: sudoku_unsolved, shape
Output: sudoku_solved_list, sudoku_unsolved_image
create_sudoku_img
Input: sudoku_image_original, sudoku_solved, sudoku_unsolved, with_lines:bool
Output: solved_sudoku_image
change_perspective_to_original
Input: pts2, pts1, sudoku_image, original
output: Final_Image
'''
folder = 'output/'
name = 'sudoku_images/sudoku5.jpg'
img = cv2.imread(name,1)
crp_img, orgnl, pts1, pts2 = get_sudo_grid(img,900)
cv2.imwrite(folder + "crpzimg.jpg",crp_img)
cv2.imwrite(folder + "orgnl.jpg",orgnl)
print("Image is cropped")
sd_img, unsolved_sd_lst = get_sudoku(crp_img, 900)
cv2.imwrite(folder + "sd_img.jpg",sd_img)
print("Numbers are extracted")
solved_sd_lst, unsolved_sd_img = solve_sudoku(unsolved_sd_lst, sd_img.shape)
cv2.imwrite(folder + "unsolved_sd_img.jpg",unsolved_sd_img)
print("Unsolved Sudoku image ready")
solved_sd_img = create_sudoku_img(crp_img, solved_sd_lst, unsolved_sd_lst, False)
cv2.imwrite(folder + "solved_sd_img.jpg",solved_sd_img)
print("Solved sudoku image ready")
final = change_perspective_to_original(pts2, pts1, solved_sd_img, orgnl)
cv2.imwrite(folder + "final.jpg",final)
print("Perspective changed to original image")
plt.imshow(final)
plt.show()
|
{"/videocam.py": ["/opencv_part.py"], "/main.py": ["/opencv_part.py"], "/opencv_part.py": ["/prediction.py"]}
|
10,463
|
msjha-vedi1995/sudoku-solver-with-image-processing
|
refs/heads/main
|
/opencv_part.py
|
from typing import List, Any, Union
import cv2
from imutils import contours as cnt_sort
import numpy as np
from matplotlib import pyplot as plt
from prediction import predict
SIZE = 9
matrix=[[]]
#======================================================================================================================
#======================================================================================================================
#======================================================================================================================
#======================================================================================================================
def get_sudo_grid(name,size):
#img = cv2.imread(name,0)
img = name
original = img.copy()
#img = cv2.medianBlur(img,5)
img = cv2.cvtColor(img,cv2.COLOR_BGR2RGB)
greymain = cv2.cvtColor(img,cv2.COLOR_RGB2GRAY)
th2 = cv2.adaptiveThreshold(greymain,255,cv2.ADAPTIVE_THRESH_MEAN_C,\
cv2.THRESH_BINARY_INV,39,10)
#contours,heirarchy = cv2.findContours(th2,cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE)
major = cv2.__version__.split('.')[0]
if major == '3':
ret, contours, hierarchy = cv2.findContours(th2, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
else:
contours, hierarchy = cv2.findContours(th2, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
maxarea = 0
cnt = contours[0]
for i in contours:
if cv2.contourArea(i)>maxarea:
cnt = i
maxarea = cv2.contourArea(i)
blank = np.zeros(img.shape,np.uint8)
image = cv2.drawContours(blank,[cnt],-1,(255,255,255),2)
edges = cv2.Canny(image,40,150,apertureSize = 3)
lines = cv2.HoughLines(edges,1,np.pi/180,100)
createhor = []
createver = []
created = []
anglediff=10
rhodiff=10
flag=0
count = 2
for line in lines:
for (rho,theta) in line:
flag=0
for (rho1,theta1) in created:
if abs(rho-rho1)<rhodiff and abs(theta-theta1)<anglediff:
flag=1
if flag==0:
a = np.cos(theta)
b = np.sin(theta)
x0 = a*rho
y0 = b*rho
x1 = int(x0 + 1000*(-b))
y1 = int(y0 + 1000*(a))
x2 = int(x0 - 1000*(-b))
y2 = int(y0 - 1000*(a))
d = np.linalg.norm(np.array((x1,y1,0))-np.array((x2,y2,0)))
cv2.line(img,(x1,y1),(x2,y2),(0,255,0),2)
m=abs(1/np.tan(theta))
if m<1:
createhor.append((rho,theta))
else:
createver.append((rho,theta))
created.append((rho,theta))
points=[]
for (rho,theta) in createhor:
for (rho1,theta1) in createver:
if (rho,theta)!=(rho1,theta1):
a=[[np.cos(theta),np.sin(theta)],[np.cos(theta1),np.sin(theta1)]]
b=[rho,rho1]
cor=np.linalg.solve(a,b)
if list(cor) not in points:
points.append(list(cor))
points.sort()
if (points[0][1]>points[1][1]):
points[0],points[1]=points[1],points[0]
if (points[-1][1]<points[-2][1]):
points[-1],points[-2]=points[-2],points[-1]
points[1],points[2]=points[2],points[1]
for i in points:
images = cv2.circle(image,(int(i[0]),int(i[1])),4,(0,0,255),-1)
pts1 = np.float32(points)
pts2 = np.float32([[0,0],[size,0],[0,size],[size,size]])
M = cv2.getPerspectiveTransform(pts1,pts2)
warped2 = cv2.warpPerspective(blank,M,(size,size))
img = cv2.warpPerspective(original,M,(size,size))
return [img, original,pts1,pts2]
#======================================================================================================================
#======================================================================================================================
#======================================================================================================================
#======================================================================================================================
def get_sudoku(img ,size=900):
img = cv2.cvtColor(img,cv2.COLOR_RGB2GRAY)
thresh = cv2.adaptiveThreshold(img,255,cv2.ADAPTIVE_THRESH_MEAN_C,\
cv2.THRESH_BINARY_INV,39,10)
thresh1 = thresh.copy()
kernel = np.ones((1,1),np.uint8)
thresh = cv2.morphologyEx(thresh,cv2.MORPH_OPEN,kernel)
thresh = cv2.dilate(thresh,kernel,iterations=3)
kernel = np.ones((1,10),np.uint8)
thresh = cv2.morphologyEx(thresh,cv2.MORPH_CLOSE,kernel)
kernel = np.ones((10,1),np.uint8)
thresh = cv2.morphologyEx(thresh,cv2.MORPH_CLOSE,kernel)
#contours,heirarchy = cv2.findContours(thresh,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
thresh = cv2.bitwise_not(thresh)
#contours,heirarchy = cv2.findContours(thresh,cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE)
major = cv2.__version__.split('.')[0]
if major == '3':
ret, contours, hierarchy = cv2.findContours(thresh, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
else:
contours, hierarchy = cv2.findContours(thresh, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
blank = np.zeros(img.shape,np.uint8)
finalContours = []
for cnt in contours:
epsilon = 0.04*cv2.arcLength(cnt,True)
approx = cv2.approxPolyDP(cnt,epsilon,True)
approx = cv2.convexHull(cnt)
area = cv2.contourArea(approx)
if area <= 9000:
finalContours.append(approx)
sudoku_rows,_ = cnt_sort.sort_contours(finalContours,method="left-to-right")
kernel = np.ones((3,3),np.uint8)
thresh1 = cv2.erode(thresh1,kernel,iterations=1)
blank_base = blank.copy()
for c in sudoku_rows:
blank = cv2.drawContours(blank,[c],-1,(255),-1)
blank_base = cv2.drawContours(blank_base,[c],-1,(255),-1)
blank = cv2.bitwise_and(thresh1,blank,mask=blank)
kernel = np.ones((5,1),np.uint8)
blank = cv2.erode(blank,kernel,iterations=1)
kernel = np.ones((6,6),np.uint8)
blank = cv2.morphologyEx(blank,cv2.MORPH_CLOSE,kernel)
kernel = np.ones((1,5),np.uint8)
blank = cv2.erode(blank,kernel,iterations=1)
kernel = np.ones((9,9),np.uint8)
blank = cv2.morphologyEx(blank,cv2.MORPH_CLOSE,kernel)
kernel = np.ones((6,6),np.uint8)
blank = cv2.dilate(blank,kernel,iterations=1)
factor = blank.shape[0]//9
sudoku_unsolved = []
for i in range(9):
for j in range(9):
part = blank[i*factor:(i+1)*factor, j*factor:(j+1)*factor ]
part = cv2.resize(part,(28,28))
cv2.imwrite("images/{}_{}.jpg".format(i,j),part)
num,_ = predict(part)
sudoku_unsolved.append(str(num))
for i in range(10):
cv2.line(blank,(0,factor*i),(blank.shape[1],factor*i),(255),2,2)
cv2.line(blank,(factor*i,0),(factor*i,blank.shape[0]),(255),2,2)
matrix=[row[:] for row in sudoku_unsolved]
return [blank, sudoku_unsolved]
#======================================================================================================================
#======================================================================================================================
#======================================================================================================================
#======================================================================================================================
def number_unassigned(row, col):
num_unassign = 0
for i in range(0,SIZE):
for j in range (0,SIZE):
#cell is unassigned
if matrix[i][j] == 0:
row = i
col = j
num_unassign = 1
a = [row, col, num_unassign]
return a
a = [-1, -1, num_unassign]
return a
#function to check if we can put a
#value in a paticular cell or not
#======================================================================================================================
#======================================================================================================================
#======================================================================================================================
#======================================================================================================================
def is_safe(n, r, c):
#checking in row
for i in range(0,SIZE):
#there is a cell with same value
if matrix[r][i] == n:
return False
#checking in column
for i in range(0,SIZE):
#there is a cell with same value
if matrix[i][c] == n:
return False
row_start = (r//3)*3
col_start = (c//3)*3
#checking submatrix
for i in range(row_start,row_start+3):
for j in range(col_start,col_start+3):
if matrix[i][j]==n:
return False
return True
#function to check if we can put a
#value in a paticular cell or not
#======================================================================================================================
#======================================================================================================================
#======================================================================================================================
#======================================================================================================================
def solve_sudoku():
row = 0
col = 0
#if all cells are assigned then the sudoku is already solved
#pass by reference because number_unassigned will change the values of row and col
a = number_unassigned(row, col)
if a[2] == 0:
return True
row = a[0]
col = a[1]
#number between 1 to 9
for i in range(1,10):
#if we can assign i to the cell or not
#the cell is matrix[row][col]
if is_safe(i, row, col):
matrix[row][col] = i
#backtracking
if solve_sudoku():
return True
#f we can't proceed with this solution
#reassign the cell
matrix[row][col]=0
return False
#======================================================================================================================
#======================================================================================================================
#======================================================================================================================
#======================================================================================================================
def solve_sudoku(sudoku_unsolved,shape):
sudoku_image = np.zeros(shape,np.uint8)
y=-1
x=0
sudoku_solved = [row[:] for row in matrix]
factor = shape[0]//9
for num in sudoku_unsolved:
if (x%9)==0:
x=0
y+=1
textX = int( factor*x+factor/2 )
textY = int( factor*y+factor/2 )
font = cv2.FONT_HERSHEY_SIMPLEX
if num!='0':
cv2.putText(sudoku_image,str(num),(textX,textY),font,1,(255,255,255),6)
x+=1
for i in range(10):
cv2.line(sudoku_image,(0,factor*i),(shape[1],factor*i),(255),2,2)
cv2.line(sudoku_image,(factor*i,0),(factor*i,shape[0]),(255),2,2)
return sudoku_solved,sudoku_image
#======================================================================================================================
#======================================================================================================================
#======================================================================================================================
#======================================================================================================================
def create_sudoku_img(sudoku_image,sudoku,sudoku_unsolved,with_lines = True):
x=0
y=-1
sudoku_image = np.zeros(sudoku_image.shape,np.uint8)
factor = sudoku_image.shape[0]//9
for num in range(len(sudoku)):
if (x%9)==0:
x=0
y+=1
textX = int( factor*x+factor/2 )
textY = int( factor*y+factor/2 + factor//4)
font = cv2.FONT_HERSHEY_SIMPLEX
if sudoku_unsolved[num] == '0':
cv2.putText(sudoku_image,sudoku[num],(textX,textY),font,1.75,(0,255,255),4)
x+=1
if with_lines:
for i in range(10):
cv2.line(sudoku_image,(0,factor*i),(sudoku_image.shape[1],factor*i),(0),2,2)
cv2.line(sudoku_image,(factor*i,0),(factor*i,sudoku_image.shape[0]),(0),2,2)
return sudoku_image
#======================================================================================================================
#======================================================================================================================
#======================================================================================================================
#======================================================================================================================
def change_perspective_to_original(pts2,pts1,sudoku_image,original):
M = cv2.getPerspectiveTransform(pts2,pts1)
img = cv2.warpPerspective(sudoku_image,M,(original.shape[1],original.shape[0]))
img = cv2.bitwise_not(img)
img = cv2.bitwise_and(img,original)
return img
|
{"/videocam.py": ["/opencv_part.py"], "/main.py": ["/opencv_part.py"], "/opencv_part.py": ["/prediction.py"]}
|
10,464
|
msjha-vedi1995/sudoku-solver-with-image-processing
|
refs/heads/main
|
/prediction.py
|
import numpy as np
import cv2
import scipy.ndimage
from skimage.feature import hog
from skimage import data, color, exposure
from sklearn.model_selection import train_test_split
from sklearn.neighbors import KNeighborsClassifier
import joblib
knn = joblib.load('models/knn_model.pkl')
def feature_extraction(image):
return hog(color.rgb2gray(image), orientations=8, pixels_per_cell=(4, 4), cells_per_block=(7, 7))
def predict(img):
df = feature_extraction(img)
predict = knn.predict(df.reshape(1,-1))[0]
predict_proba = knn.predict_proba(df.reshape(1,-1))
return predict, predict_proba[0][predict]
|
{"/videocam.py": ["/opencv_part.py"], "/main.py": ["/opencv_part.py"], "/opencv_part.py": ["/prediction.py"]}
|
10,469
|
gvanmaren/3D-Utilities
|
refs/heads/master
|
/scripts/create_3Dgravity_mains.py
|
import arcpy
import time
import os
import sys
import math
import scripts.common_lib as common_lib
from scripts.common_lib import create_msg_body, msg, trace
from scripts.settings import *
class NoNoDataError(Exception):
pass
class LicenseError3D(Exception):
pass
class LicenseErrorSpatial(Exception):
pass
class SchemaLock(Exception):
pass
class NotSupported(Exception):
pass
class NoLayerFile(Exception):
pass
class FunctionError(Exception):
pass
class NoFeatures(Exception):
pass
# used functions
def GetNearestElevValueForXY(cs, xy, compare_points, elev_attribute, lc_error_elevation, lc_zero_as_error):
try:
elev = lc_error_elevation
x_list = []
y_list = []
x, y = xy
# get a list of all x and y coordinates, compare xy point with closest end or start points, see if it has an elevation attribute
with arcpy.da.SearchCursor(compare_points, [elev_attribute, "SHAPE@XY"]) as f_cursor:
for f_row in f_cursor:
fx, fy = f_row[1]
if abs(fx - x) < 1 and abs(fy - y) < 1:
if f_row[0] is None:
elev = lc_error_elevation
else:
if f_row[0] > 0:
arcpy.AddMessage("Fixed value...")
elev = f_row[0]
break
if elev == 0 and lc_zero_as_error:
elev = lc_error_elevation
return elev
except arcpy.ExecuteWarning:
print((arcpy.GetMessages(1)))
arcpy.AddWarning(arcpy.GetMessages(1))
except arcpy.ExecuteError:
print((arcpy.GetMessages(2)))
arcpy.AddError(arcpy.GetMessages(2))
# Return any other type of error
except:
# By default any other errors will be caught here
#
e = sys.exc_info()[1]
print((e.args[0]))
arcpy.AddError(e.args[0])
def interpolate_3Dvalues_along_2Dline(workspace, input_features, upper_elevation_field, lower_elevation_field,
diameter_field,
lc_elevation_field, line_id_field, line_order_field, error_value, zero_as_error):
try:
# calculate correct start / end vertex elevations from invert elevation attribute. error elevation is set for Null values and zero if user input
with arcpy.da.UpdateCursor(input_features,
[upper_elevation_field, lower_elevation_field, diameter_field]) as u_cursor:
for u_row in u_cursor:
if u_row[0] is not None:
if u_row[0] == 0 and zero_as_error:
u_row[0] = error_value # zero == error: set error elevation on attribute
else:
u_row[0] = u_row[0] + u_row[2] / 2 # adjust for invert elevation
else:
u_row[0] = error_value # Null: set error elevation
if u_row[1] is not None:
if u_row[1] == 0 and zero_as_error: # zero == error: set error elevation on attribute
u_row[1] = error_value
else:
u_row[1] = u_row[1] + u_row[2] / 2 # adjust for invert elevation
else:
u_row[1] = error_value # Null: set error elevation
u_cursor.updateRow(u_row)
# For each line feature: get points, get measurement, interpolate elevation based on start and end elevation and set point order
LinePoints = os.path.join(workspace, "line_2Dpoints")
if arcpy.Exists(LinePoints):
arcpy.Delete_management(LinePoints)
sr = arcpy.Describe(input_features).spatialReference
lineOID = arcpy.Describe(input_features).OIDFieldName
lineOID_field = line_id_field
# copy line OBJECTID to a new field
arcpy.AddField_management(input_features, lineOID_field, "LONG")
arcpy.CalculateField_management(input_features, lineOID_field, "!" + lineOID + "!", "PYTHON_9.3")
flds_in = ("SHAPE@", lineOID_field, upper_elevation_field, lower_elevation_field)
fld_Number = line_order_field # point number from start point
fld_Z = lc_elevation_field # Elevation
fld_Chainage = "Chainage" # Distance m from start of polyline
# create the output featureclass
geometry_type = "POINT"
template = ""
has_m = "DISABLED" # you could enable M values...
has_z = "ENABLED"
ws_path, fc_out_name = os.path.split(LinePoints)
arcpy.CreateFeatureclass_management(workspace, fc_out_name, geometry_type, template, has_m, has_z, sr)
# add the fields to the point featureclass
arcpy.AddField_management(LinePoints, lineOID_field, "LONG")
arcpy.AddField_management(LinePoints, fld_Number, "LONG")
arcpy.AddField_management(LinePoints, fld_Z, "DOUBLE")
arcpy.AddField_management(LinePoints, fld_Chainage, "DOUBLE")
# fields for insert cursor on output points
flds_out = ("SHAPE@", lineOID_field, fld_Number, fld_Z, fld_Chainage)
arcpy.AddMessage("Interpolating elevation values for vertices along line segments...")
# start insert cursor for output points
with arcpy.da.InsertCursor(LinePoints, flds_out) as curs_out:
# start search cursor on lines
with arcpy.da.SearchCursor(input_features, flds_in) as curs:
for row in curs:
number = 0
polyline = row[0]
line_ID = row[1]
for part in polyline:
for pnt in part:
number += 1
if pnt:
ptGeom = arcpy.PointGeometry(pnt, sr)
line_length = polyline.length
chainage = polyline.measureOnLine(ptGeom)
# we assume that the start elevation is the UPPER elevation
if chainage == 0: # start point
if row[2] == error_value:
elevation = error_value
else:
elevation = row[2]
elif chainage - line_length == 0: # end point
if row[3] == error_value:
elevation = error_value
else:
elevation = row[3]
else: # in between points
if row[2] == error_value or row[3] == error_value:
elevation = error_value
else:
elevation_delta = (row[2] - row[3])
distance_percentage = chainage / line_length
elevation = row[2] - (elevation_delta * distance_percentage)
curs_out.insertRow((ptGeom, line_ID, number, elevation, chainage))
return LinePoints
except arcpy.ExecuteWarning:
print((arcpy.GetMessages(1)))
arcpy.AddWarning(arcpy.GetMessages(1))
except arcpy.ExecuteError:
print((arcpy.GetMessages(2)))
arcpy.AddError(arcpy.GetMessages(2))
# Return any other type of error
except:
# By default any other errors will be caught here
#
e = sys.exc_info()[1]
print((e.args[0]))
arcpy.AddError(e.args[0])
def GetAttributeRange(local_input_features, attribute):
try:
# cycle through features, get minimum and maximum value
# create a list of unique "Attribute" values
unique_field_values = common_lib.unique_values(local_input_features, attribute)
return [unique_field_values[0], unique_field_values[len(unique_field_values) - 1]]
except arcpy.ExecuteWarning:
print((arcpy.GetMessages(1)))
arcpy.AddWarning(arcpy.GetMessages(1))
except arcpy.ExecuteError:
print((arcpy.GetMessages(2)))
arcpy.AddError(arcpy.GetMessages(2))
# Return any other type of error
except:
# By default any other errors will be caught here
#
e = sys.exc_info()[1]
print((e.args[0]))
arcpy.AddError(e.args[0])
def GetUnitVector(v):
# Normalize a vector.
# This input vector is not expected to be normalized but the output vector is.
# Both input and output vectors' XYZ components are contained in tuples.
magnitude = math.sqrt(v[0] * v[0] + v[1] * v[1] + v[2] * v[2])
x = v[0] / magnitude
y = v[1] / magnitude
z = v[2] / magnitude
return x, y, z
def GetDistance(v1, v2):
distance = math.sqrt(
math.pow((v1[0] - v2[0]), 2) + math.pow((v1[1] - v1[1]), 2) + math.pow((v1[2] - v1[2]), 2))
return distance
def GetSlope(vect1, vect2):
uv1 = GetUnitVector(vect1)
uv2 = GetUnitVector(vect2)
dist_a = GetDistance(uv1, uv2)
dist_o = uv1[2] - uv2[2]
if dist_o > 0:
slope = math.degrees(math.sin(dist_o / dist_a))
else:
slope = 0
return slope
def Create3DlineFromLineAttributes(out_ws, ws, out_name, tin_ws, input_fc, upper_invert_elevation_field,
lower_invert_elevation_field, lc_diameter, lc_default_diameter,
lc_use_nearby_points, zero_as_error, error_elevation, lc_interpolate_errors, verbose):
try:
lineOID_field = "line_objectid"
line_order_field = "line_order" # point number from start point
elevation_field = "elevation"
start_elevation_field = "upper_line_elevation"
end_elevation_field = "lower_line_elevation"
line_fieldtype = "SHORT"
elevation_fieldtype = "DOUBLE"
field_list = ["elevation"]
error_field = "error"
# create 3D lines from 2D lines
arcpy.AddMessage("Extracting Line Points...")
# set all diameter values on input fc
# check if diameter attribute exists
common_lib.delete_add_field(input_fc, DIAMETER_FIELD, "DOUBLE")
if lc_diameter:
if common_lib.check_fields(input_fc, [lc_diameter], False, verbose) == 0:
arcpy.CalculateField_management(input_fc, DIAMETER_FIELD, "!" + lc_diameter + "!", "PYTHON_9.3")
common_lib.set_null_or_negative_to_value_in_fields(input_fc, [DIAMETER_FIELD],
[lc_default_diameter],
True, verbose)
else: # create a default attribute
arcpy.CalculateField_management(input_fc, DIAMETER_FIELD, lc_default_diameter, "PYTHON_9.3")
else:
arcpy.CalculateField_management(input_fc, DIAMETER_FIELD, lc_default_diameter, "PYTHON_9.3")
# copy upper and lower elevation attributes so we can modify them
common_lib.delete_add_field(input_fc, start_elevation_field, "DOUBLE")
arcpy.CalculateField_management(input_fc, start_elevation_field,
"!" + upper_invert_elevation_field + "!",
"PYTHON_9.3")
common_lib.delete_add_field(input_fc, end_elevation_field, "DOUBLE")
arcpy.CalculateField_management(input_fc, end_elevation_field, "!" + lower_invert_elevation_field + "!",
"PYTHON_9.3")
Points2D_interpolated = interpolate_3Dvalues_along_2Dline(ws, input_fc, start_elevation_field,
end_elevation_field, DIAMETER_FIELD,
elevation_field, lineOID_field,
line_order_field,
error_elevation, zero_as_error)
# use elevation surface through good point to interpolate bad values
if lc_interpolate_errors:
Z_field = "Z"
surface = common_lib.create_surface_from_points(ws, tin_ws, Points2D_interpolated, elevation_field,
error_elevation)
if surface:
arcpy.AddSurfaceInformation_3d(Points2D_interpolated, surface, Z_field, "BILINEAR", 1, 1, 0,
None)
else:
raise NoFeatures
with arcpy.da.UpdateCursor(Points2D_interpolated, [elevation_field, Z_field]) as cursor:
for row in cursor:
if row[1]:
if zero_as_error:
if row[0] == 0 or row[0] == error_elevation:
row[0] = row[1]
else:
if row[0] == error_elevation:
row[0] = row[1]
cursor.updateRow(row)
# create 3D points
points3D = os.path.join(ws, "points_3D")
if arcpy.Exists(points3D):
arcpy.Delete_management(points3D)
arcpy.FeatureTo3DByAttribute_3d(Points2D_interpolated, points3D, elevation_field)
# create 3D lines
lines3D = os.path.join(out_ws, out_name + "_3Dlines", )
if arcpy.Exists(lines3D):
arcpy.Delete_management(lines3D)
arcpy.AddMessage("Joining original attributes...")
arcpy.PointsToLine_management(points3D, lines3D, lineOID_field, line_order_field)
arcpy.JoinField_management(lines3D, lineOID_field, input_fc, lineOID_field)
# calculate error field
common_lib.delete_add_field(lines3D, error_field, line_fieldtype)
arcpy.AddMessage("Calculating errors ...")
s = 0
z_property = "Z_MAX"
arcpy.AddZInformation_3d(lines3D, z_property)
with arcpy.da.UpdateCursor(lines3D,
[start_elevation_field, end_elevation_field, error_field,
z_property]) as cursor:
for row in cursor:
if zero_as_error: # if zero is error
if row[0] == error_elevation or row[1] == error_elevation: # we have a error value
if abs(row[3]) == error_elevation:
row[2] = int(1) # NULL values set to user error elevation
else:
row[2] = int(2) # fixed it earlier
else:
row[2] = int(0)
else:
if row[0] == error_elevation or row[1] == error_elevation:
if abs(row[3]) == error_elevation:
row[2] = int(1) # NULL values set to user error elevation
else:
row[2] = int(2) # fixed it earlier
else:
row[2] = int(0)
cursor.updateRow(row)
s += 1
# cleaning up
common_lib.delete_fields(input_fc, [start_elevation_field, end_elevation_field])
return lines3D
except arcpy.ExecuteWarning:
print((arcpy.GetMessages(1)))
arcpy.AddWarning(arcpy.GetMessages(1))
except arcpy.ExecuteError:
print((arcpy.GetMessages(2)))
arcpy.AddError(arcpy.GetMessages(2))
# Return any other type of error
except:
# By default any other errors will be caught here
#
e = sys.exc_info()[1]
print((e.args[0]))
arcpy.AddError(e.args[0])
def main(input_layer, start_vertex_elevation, end_vertex_elevation,
vertex_elevation_unit, diameter, diameter_unit, default_diameter,
output_features, output_as_3dobject, use_nearby_points,
zero_as_error, error_elevation, interpolate_errors,
debug):
try:
# Get Attributes from User
if debug == 0:
# script variables
aprx = arcpy.mp.ArcGISProject("CURRENT")
home_directory = aprx.homeFolder
tin_directory = home_directory + "\\Tins"
scripts_directory = aprx.homeFolder + "\\Scripts"
rule_directory = aprx.homeFolder + "\\RulePackages"
log_directory = aprx.homeFolder + "\\Logs"
layer_directory = home_directory + "\\LayerFiles"
project_ws = aprx.defaultGeodatabase
enableLogging = True
DeleteIntermediateData = True
verbose = 0
in_memory_switch = True
else:
# debug
input_layer = r'D:\Gert\Work\Esri\Solutions\Utilities\work2.1\3DUtilities\Local_Scene.gdb\mains_2d_test1'
start_vertex_elevation = "UPELEV"
end_vertex_elevation = "DOWNELEV"
vertex_elevation_unit = "Feet"
diameter = "DIAMETER"
diameter_unit = "Inches"
default_diameter = 3
output_features = r'D:\Gert\Work\Esri\Solutions\Utilities\work2.1\3DUtilities\Local_Scene.gdb\mains_2d_test3D_1'
output_as_3dobject = True
use_nearby_points = True
zero_as_error = True
error_elevation = 1000
interpolate_errors = True
# Create and set workspace location in same directory as input feature class gdb
home_directory = r'D:\Gert\Work\Esri\Solutions\Utilities\work2.1\3DUtilities'
rule_directory = home_directory + "\RulePackages"
layer_directory = home_directory + "\LayerFiles"
project_ws = home_directory + "\\Results.gdb"
tin_directory = home_directory + "\TINs"
scripts_directory = home_directory + "\\Scripts"
log_directory = home_directory + "\\Logs"
enableLogging = False
DeleteIntermediateData = True
verbose = 1
in_memory_switch = False
# set data paths for packing tool so all additional data is stored in the package - ESRI packing only!
data_directory_pack = ""
geodatabase = ""
feature_class = ""
model_directory_pack = ""
model_file = ""
rule_directory_pack = "RulePackages"
rule_file = "ExtrudePolygon.rpk"
# note: rename all *.lyrx to *.txt first. This is only needed for packaging.
layer_directory_pack = "LayerFiles"
layer_file = "Line3DError.lyrx"
common_lib.set_data_paths_for_packaging(data_directory_pack, geodatabase, feature_class, model_directory_pack,
model_file, rule_directory_pack, rule_file, layer_directory_pack, layer_file)
if not os.path.exists(tin_directory):
os.makedirs(tin_directory)
common_lib.set_up_logging(log_directory, TOOLNAME1)
start_time = time.clock()
ORIG_FID = "ORIG_FID"
avg_height_field = "avg_height"
unique_id_field = "unique_id"
esri_upper_elevation_field = "esri_upper_elev"
esri_lower_elevation_field = "esri_lower_elev"
esri_diameter_field = "esri_diameter"
slope_field = "calc_slope"
z_field = "Z"
scratch_ws = common_lib.create_gdb(home_directory, "Intermediate.gdb")
output_ws = os.path.dirname(output_features)
if arcpy.Exists(output_ws):
arcpy.env.workspace = scratch_ws
arcpy.env.overwriteOutput = True
if arcpy.CheckExtension("3D") == "Available":
arcpy.CheckOutExtension("3D")
if arcpy.CheckExtension("Spatial") == "Available":
arcpy.CheckOutExtension("Spatial")
arcpy.AddMessage("Processing input features: " + common_lib.get_name_from_feature_class(input_layer))
objects3D = None
objects3D_layer = None
Line3D = None
Line3D_layer = None
# make a copy of the input feature class
input_fc = os.path.join(scratch_ws,
common_lib.get_name_from_feature_class(input_layer) + "_copy")
if arcpy.Exists(input_fc):
arcpy.Delete_management(input_fc)
# write to fc
arcpy.AddMessage(
"Copying " + common_lib.get_name_from_feature_class(input_layer) + " to " + input_fc)
arcpy.CopyFeatures_management(input_layer, input_fc)
# just because of this schema lock
input_layer = input_fc
# create 3D line
zValues = arcpy.Describe(input_layer).hasZ
arcpy.AddMessage("Creating 3D lines...")
# check for output directory
if not os.path.exists(tin_directory):
os.makedirs(tin_directory)
# create unique ObjectID attribute
lineOID = arcpy.Describe(input_layer).OIDFieldName
arcpy.AddField_management(input_layer, unique_id_field, "LONG")
arcpy.CalculateField_management(input_layer, unique_id_field, "!" + lineOID + "!", "PYTHON_9.3")
# create start and end elevation attributes in segment elevation units
layer_unit = common_lib.get_xy_unit(input_layer, verbose)
common_lib.delete_add_field(input_layer, esri_upper_elevation_field, "DOUBLE")
common_lib.delete_add_field(input_layer, esri_lower_elevation_field, "DOUBLE")
if not vertex_elevation_unit:
vertex_elevation_unit = layer_unit
arcpy.AddMessage(
"No invert elevation unit detected. Using XY units instead: " + vertex_elevation_unit)
conversion_factor = common_lib.unitConversion(layer_unit, vertex_elevation_unit, verbose)
common_lib.calculate_field_from_other_field(input_layer, input_fc, start_vertex_elevation,
esri_upper_elevation_field,
"multiply", conversion_factor, verbose)
common_lib.calculate_field_from_other_field(input_layer, input_fc, end_vertex_elevation,
esri_lower_elevation_field,
"multiply", conversion_factor, verbose)
# check if error elevation is larger than max elevation in the data
maxValue = arcpy.SearchCursor(input_layer, "", "", "",
esri_upper_elevation_field + " D").next().getValue(
esri_upper_elevation_field) # Get 1st row in ascending cursor sort
if maxValue > error_elevation:
error_elevation += maxValue
arcpy.AddMessage(
"Maximum value of " + start_vertex_elevation + " attribute is larger than the error elevation value")
arcpy.AddMessage("Setting the error elevation value to: " + str(error_elevation))
# create diameter attribute in segment elevation units
common_lib.delete_add_field(input_layer, esri_diameter_field, "DOUBLE")
if not diameter_unit:
diameter_unit = layer_unit
arcpy.AddMessage("No Diameter Unit detected. Using XY units instead: " + diameter_unit)
if diameter:
conversion_factor = common_lib.unitConversion(layer_unit, diameter_unit, verbose)
common_lib.calculate_field_from_other_field(input_layer, input_fc, diameter,
esri_diameter_field,
"multiply", conversion_factor, verbose)
else:
arcpy.CalculateField_management(input_layer, esri_diameter_field, default_diameter,
"PYTHON_9.3")
output_name = str(os.path.basename(output_features))
Line3D = Create3DlineFromLineAttributes(output_ws, scratch_ws, output_name, tin_directory,
input_layer, esri_upper_elevation_field,
esri_lower_elevation_field, esri_diameter_field,
default_diameter, use_nearby_points,
zero_as_error, error_elevation,
interpolate_errors, debug)
Line3D_layer = common_lib.get_name_from_feature_class(Line3D)
arcpy.MakeFeatureLayer_management(Line3D, Line3D_layer)
if common_lib.get_z_unit(Line3D_layer, 0) == "Feet":
SymbologyLayer = layer_directory + "\\Line3DError.lyrx"
else:
SymbologyLayer = layer_directory + "\\Line3DError_meters.lyrx"
if not arcpy.Exists(SymbologyLayer):
arcpy.AddWarning("Can't find: " + SymbologyLayer + ". Symbolize features by error attribute to see data errors.")
# convert 3D Points to 3D objects
if output_as_3dobject:
objects3D = os.path.join(output_ws, output_name + "_3Dobjects")
if arcpy.Exists(objects3D):
arcpy.Delete_management(objects3D)
# we must remove self intersections
# Check out extension
arcpy.AddMessage("Checking for self intersections (OGC Validation)...")
arcpy.RepairGeometry_management(Line3D, "#", "OGC")
arcpy.AddMessage("Buffering: " + common_lib.get_name_from_feature_class(Line3D))
arcpy.AddMessage("This might take some time depending on the number of lines.")
common_lib.delete_add_field(Line3D, RADIUS_FIELD, "DOUBLE")
arcpy.CalculateField_management(Line3D, RADIUS_FIELD, "!" + DIAMETER_FIELD + "! / 2",
"PYTHON_9.3")
arcpy.Buffer3D_3d(Line3D, objects3D, RADIUS_FIELD, 'Straight', '10')
objects3D_layer = common_lib.get_name_from_feature_class(objects3D)
arcpy.MakeFeatureLayer_management(objects3D, objects3D_layer)
if common_lib.get_z_unit(objects3D_layer, 0) == "Feet":
SymbologyLayer = layer_directory + "\\LineObject3DError.lyrx"
else:
SymbologyLayer = layer_directory + "\\LineObject3DError_meters.lyrx"
if not arcpy.Exists(SymbologyLayer):
arcpy.AddWarning("Can't find: " + SymbologyLayer + ". Symbolize features by error attribute to see data errors.")
# check if any of the lines failed buffering
org_line_ids_line = common_lib.get_row_values_for_fields(None, Line3D, [unique_id_field],
None, "no_expression")
org_line_ids_object = common_lib.get_row_values_for_fields(None, objects3D,
[unique_id_field], None,
"no_expression")
difference = list(set(org_line_ids_line) - set(org_line_ids_object))
if len(difference) > 0:
arcpy.AddWarning("Buffering failed for lines with the following OBJECTIDs: " + str(
difference) + " Check geometries!")
if DeleteIntermediateData:
fcs = common_lib.listFcsInGDB(scratch_ws)
msg_prefix = "Deleting intermediate data..."
msg_body = common_lib.create_msg_body(msg_prefix, 0, 0)
common_lib.msg(msg_body)
for fc in fcs:
arcpy.Delete_management(fc)
arcpy.ClearWorkspaceCache_management()
end_time = time.clock()
msg_body = create_msg_body("Create 3D Gravity Mains completed successfully.", start_time, end_time)
msg(msg_body)
return Line3D_layer, objects3D_layer
else:
raise LicenseErrorSpatial
else:
raise LicenseError3D
except NoLayerFile:
print("Can't find Layer file. Exiting...")
arcpy.AddError("Can't find Layer file. Exiting...")
except LicenseError3D:
print("3D Analyst license is unavailable")
arcpy.AddError("3D Analyst license is unavailable")
except LicenseErrorSpatial:
print("Spatial Analyst license is unavailable")
arcpy.AddError("Spatial Analyst license is unavailable")
except NoNoDataError:
print("Input raster does not have NODATA values")
arcpy.AddError("Input raster does not have NODATA values")
except ValueError:
print("Input no flood value is not a number.")
arcpy.AddError("Input no flood value is not a number.")
except arcpy.ExecuteError:
line, filename, synerror = trace()
msg("Error on %s" % line, ERROR)
msg("Error in file name: %s" % filename, ERROR)
msg("With error message: %s" % synerror, ERROR)
msg("ArcPy Error Message: %s" % arcpy.GetMessages(2), ERROR)
except FunctionError as f_e:
messages = f_e.args[0]
msg("Error in function: %s" % messages["function"], ERROR)
msg("Error on %s" % messages["line"], ERROR)
msg("Error in file name: %s" % messages["filename"], ERROR)
msg("With error message: %s" % messages["synerror"], ERROR)
msg("ArcPy Error Message: %s" % messages["arc"], ERROR)
except:
line, filename, synerror = trace()
msg("Error on %s" % line, ERROR)
msg("Error in file name: %s" % filename, ERROR)
msg("with error message: %s" % synerror, ERROR)
finally:
arcpy.CheckInExtension("3D")
arcpy.CheckInExtension("Spatial")
# for debug only!
if __name__ == "__main__":
main("", "", "", "", "", "", "", "", "", "", "", "", "", 1)
|
{"/scripts/create_3Dgravity_mains.py": ["/scripts/settings.py"], "/scripts/create_surface_hole.py": ["/scripts/settings.py"], "/scripts/gptools.py": ["/scripts/create_3Dgravity_mains.py", "/scripts/create_surface_hole.py", "/scripts/create_3Dlaterals.py", "/scripts/create_3Dmanholes.py", "/scripts/create_elevation_tile_package.py", "/scripts/settings.py"], "/scripts/create_elevation_tile_package.py": ["/scripts/settings.py"], "/scripts/create_3Dmanholes.py": ["/scripts/settings.py"], "/scripts/create_3Dlaterals.py": ["/scripts/settings.py"]}
|
10,470
|
gvanmaren/3D-Utilities
|
refs/heads/master
|
/scripts/settings.py
|
""" Settings that can be modified to customize the behavior of the gptools script """
# Name of the tool.
# Used for logging
TOOLNAME1 = "Create3DGravityMains"
TOOLNAME2 = "CreateLaterals"
TOOLNAME3 = "Create3DManholes"
TOOLNAME4 = "CreateHoleSurface"
TOOLNAME5 = "CreateElevationTilePackage"
# error name
# used when printing errors
ERROR = "error"
WARNING = "warning"
# global fields
UNDEFINED = "Undefined"
DIAMETER_FIELD = "util_diameter"
RADIUS_FIELD = "util_radius"
SLOPE_FIELD = "util_slope"
INVERTELEV_FIELD = "util_invertelev"
HEIGHT_FIELD = "util_height"
|
{"/scripts/create_3Dgravity_mains.py": ["/scripts/settings.py"], "/scripts/create_surface_hole.py": ["/scripts/settings.py"], "/scripts/gptools.py": ["/scripts/create_3Dgravity_mains.py", "/scripts/create_surface_hole.py", "/scripts/create_3Dlaterals.py", "/scripts/create_3Dmanholes.py", "/scripts/create_elevation_tile_package.py", "/scripts/settings.py"], "/scripts/create_elevation_tile_package.py": ["/scripts/settings.py"], "/scripts/create_3Dmanholes.py": ["/scripts/settings.py"], "/scripts/create_3Dlaterals.py": ["/scripts/settings.py"]}
|
10,471
|
gvanmaren/3D-Utilities
|
refs/heads/master
|
/scripts/create_surface_hole.py
|
import arcpy
import time
import os
import scripts.common_lib as common_lib
from scripts.common_lib import create_msg_body, msg, trace
from scripts.settings import *
class FunctionError(Exception):
pass
def create_hole_in_surface(local_sw, local_input_surface, local_input_features, local_depth, local_output_surface, local_verbose):
if local_verbose == 1:
msg("--------------------------")
msg("Executing create_hole_in_surface...")
start_time = time.clock()
try:
i = 0
msg_prefix = ""
failed = True
# get extent of input features
msg_body = create_msg_body("Creating extent polygon...", 0, 0)
msg(msg_body)
extent_poly = common_lib.get_extent_feature(local_sw, local_input_features)
msg_body = create_msg_body("Clipping terrain...", 0, 0)
msg(msg_body)
# clip the input surface
clipTerrain = local_sw + "\\terrain_clip"
if arcpy.Exists(clipTerrain):
arcpy.Delete_management(clipTerrain)
# clip terrain to extent
arcpy.Clip_management(local_input_surface, "#", clipTerrain, extent_poly)
common_lib.get_name_from_feature_class(extent_poly)
# subtract depth
msg_body = create_msg_body("Creating hole...", 0, 0)
msg(msg_body)
depthTerrain = local_sw + "\\terrain_depth"
if arcpy.Exists(depthTerrain):
arcpy.Delete_management(depthTerrain)
arcpy.Minus_3d(clipTerrain, local_depth, depthTerrain)
# find IsNull values
arcpy.env.extent = common_lib.get_full_path_from_layer(local_input_surface)
outIsNull = os.path.join(local_sw, "outIsNull")
if arcpy.Exists(outIsNull):
arcpy.Delete_management(outIsNull)
outIsNullRaster = arcpy.sa.IsNull(clipTerrain)
outIsNullRaster.save(outIsNull)
# mod the input surface.
# Create modified raster
if arcpy.Exists(local_output_surface):
arcpy.Delete_management(local_output_surface)
outConRaster = arcpy.sa.Con(outIsNull, common_lib.get_full_path_from_layer(local_input_surface), depthTerrain)
outConRaster.save(local_output_surface)
arcpy.ResetEnvironments()
arcpy.env.workspace = local_sw
arcpy.env.overwriteOutput = True
msg_prefix = "Function create_hole_in_surface completed successfully."
failed = False
return extent_poly, local_output_surface
except:
line, filename, synerror = trace()
failed = True
msg_prefix = ""
raise FunctionError(
{
"function": "create_hole_in_surface",
"line": line,
"filename": filename,
"synerror": synerror,
"arc": str(arcpy.GetMessages(2))
}
)
finally:
end_time = time.clock()
msg_body = create_msg_body(msg_prefix, start_time, end_time)
if failed:
msg(msg_body, ERROR)
else:
if local_verbose == 1:
msg(msg_body)
pass
def main(input_raster, input_layer, depth, output_raster, debug):
"""The source code of the tool."""
# error classes
class NoNoDataError(Exception):
pass
class LicenseError3D(Exception):
pass
class LicenseErrorSpatial(Exception):
pass
class SchemaLock(Exception):
pass
class NotSupported(Exception):
pass
class NoLayerFile(Exception):
pass
class FunctionError(Exception):
pass
class NoFeatures(Exception):
pass
try:
# Get Attributes from User
if debug == 0:
# script variables
aprx = arcpy.mp.ArcGISProject("CURRENT")
home_directory = aprx.homeFolder
tin_directory = home_directory + "\\Tins"
scripts_directory = aprx.homeFolder + "\\Scripts"
rule_directory = aprx.homeFolder + "\\RulePackages"
log_directory = aprx.homeFolder + "\\Logs"
layer_directory = home_directory + "\\LayerFiles"
project_ws = aprx.defaultGeodatabase
enableLogging = True
DeleteIntermediateData = True
verbose = 0
in_memory_switch = True
else:
# debug
input_raster = r'D:\Gert\Work\Esri\Solutions\Utilities\work2.1\3DUtilities\Naperville.gdb\DEM_clip_feet'
input_layer = r'D:\Gert\Work\Esri\Solutions\Utilities\work2.1\3DUtilities\Local_Scene.gdb\manHoles_test1'
depth = 500
output_raster = r'D:\Gert\Work\Esri\Solutions\Utilities\work2.1\3DUtilities\Local_Scene.gdb\surface_mod_test'
# Create and set workspace location in same directory as input feature class gdb
home_directory = r'D:\Gert\Work\Esri\Solutions\Utilities\work2.1\3DUtilities'
rule_directory = home_directory + "\RulePackages"
layer_directory = home_directory + "\LayerFiles"
project_ws = home_directory + "\\Results.gdb"
tin_directory = home_directory + "\TINs"
scripts_directory = home_directory + "\\Scripts"
log_directory = home_directory + "\\Logs"
enableLogging = False
DeleteIntermediateData = True
verbose = 1
in_memory_switch = False
# set data paths for packing tool so all additional data is stored in the package - ESRI packing only!
data_directory_pack = ""
geodatabase = ""
feature_class = ""
model_directory_pack = ""
model_file = ""
rule_directory_pack = "RulePackages"
rule_file = "ExtrudePolygon.rpk"
# note: rename all *.lyrx to *.txt first. This is only needed for packaging.
layer_directory_pack = "LayerFiles"
layer_file = "Line3DError.lyrx"
common_lib.set_data_paths_for_packaging(data_directory_pack, geodatabase, feature_class, model_directory_pack,
model_file, rule_directory_pack, rule_file, layer_directory_pack,
layer_file)
if not os.path.exists(tin_directory):
os.makedirs(tin_directory)
common_lib.set_up_logging(log_directory, TOOLNAME3)
start_time = time.clock()
scratch_ws = common_lib.create_gdb(home_directory, "Intermediate.gdb")
arcpy.env.workspace = scratch_ws
arcpy.env.overwriteOutput = True
if arcpy.CheckExtension("3D") == "Available":
arcpy.CheckOutExtension("3D")
if arcpy.CheckExtension("Spatial") == "Available":
arcpy.CheckOutExtension("Spatial")
arcpy.AddMessage("Processing input raster: " + common_lib.get_name_from_feature_class(input_raster))
# make a copy of the input feature class
input_fc = os.path.join(scratch_ws, common_lib.get_name_from_feature_class(input_layer) + "_copy")
if arcpy.Exists(input_fc):
arcpy.Delete_management(input_fc)
# write to fc
arcpy.AddMessage(
"Copying " + common_lib.get_name_from_feature_class(input_layer) + " to " + input_fc)
arcpy.CopyFeatures_management(input_layer, input_fc)
polygon, raster = create_hole_in_surface(scratch_ws, input_raster, input_fc, float(depth),
output_raster, verbose)
# add polygon for bottom of hole with mulch texture
SymbologyLayer = layer_directory + "\\hole_texture2.lyrx"
if arcpy.Exists(SymbologyLayer):
output_layer = common_lib.get_name_from_feature_class(polygon)
arcpy.MakeFeatureLayer_management(polygon, output_layer)
else:
msg_body = create_msg_body("Can't find" + SymbologyLayer + " in " + layer_directory, 0, 0)
msg(msg_body, WARNING)
end_time = time.clock()
msg_body = create_msg_body("create_usrface_hole completed successfully.", start_time, end_time)
return raster, output_layer
else:
raise LicenseErrorSpatial
else:
raise LicenseError3D
except NoLayerFile:
print("Can't find Layer file. Exiting...")
arcpy.AddError("Can't find Layer file. Exiting...")
except LicenseError3D:
print("3D Analyst license is unavailable")
arcpy.AddError("3D Analyst license is unavailable")
except LicenseErrorSpatial:
print("Spatial Analyst license is unavailable")
arcpy.AddError("Spatial Analyst license is unavailable")
except NoNoDataError:
print("Input raster does not have NODATA values")
arcpy.AddError("Input raster does not have NODATA values")
except ValueError:
print("Input no flood value is not a number.")
arcpy.AddError("Input no flood value is not a number.")
except arcpy.ExecuteError:
line, filename, synerror = trace()
msg("Error on %s" % line, ERROR)
msg("Error in file name: %s" % filename, ERROR)
msg("With error message: %s" % synerror, ERROR)
msg("ArcPy Error Message: %s" % arcpy.GetMessages(2), ERROR)
except FunctionError as f_e:
messages = f_e.args[0]
msg("Error in function: %s" % messages["function"], ERROR)
msg("Error on %s" % messages["line"], ERROR)
msg("Error in file name: %s" % messages["filename"], ERROR)
msg("With error message: %s" % messages["synerror"], ERROR)
msg("ArcPy Error Message: %s" % messages["arc"], ERROR)
except:
line, filename, synerror = trace()
msg("Error on %s" % line, ERROR)
msg("Error in file name: %s" % filename, ERROR)
msg("with error message: %s" % synerror, ERROR)
finally:
arcpy.CheckInExtension("3D")
arcpy.CheckInExtension("Spatial")
# for debug only!
if __name__ == "__main__":
main("", "", "", "", 1)
|
{"/scripts/create_3Dgravity_mains.py": ["/scripts/settings.py"], "/scripts/create_surface_hole.py": ["/scripts/settings.py"], "/scripts/gptools.py": ["/scripts/create_3Dgravity_mains.py", "/scripts/create_surface_hole.py", "/scripts/create_3Dlaterals.py", "/scripts/create_3Dmanholes.py", "/scripts/create_elevation_tile_package.py", "/scripts/settings.py"], "/scripts/create_elevation_tile_package.py": ["/scripts/settings.py"], "/scripts/create_3Dmanholes.py": ["/scripts/settings.py"], "/scripts/create_3Dlaterals.py": ["/scripts/settings.py"]}
|
10,472
|
gvanmaren/3D-Utilities
|
refs/heads/master
|
/scripts/gptools.py
|
import arcpy
import time
import os
import math
import sys
import scripts.create_3Dgravity_mains as create_3Dgravity_mains
import scripts.create_surface_hole as create_surface_hole
import scripts.create_3Dlaterals as create_3Dlaterals
import scripts.create_3Dmanholes as create_3Dmanholes
import scripts.create_elevation_tile_package as create_elevation_tile_package
import importlib
importlib.reload(create_3Dgravity_mains) # force reload of the module
importlib.reload(create_3Dlaterals) # force reload of the module
importlib.reload(create_3Dmanholes) # force reload of the module
importlib.reload(create_surface_hole) # force reload of the module
importlib.reload(create_elevation_tile_package) # force reload of the module
import scripts.common_lib as common_lib
from scripts.common_lib import create_msg_body, msg, trace
from scripts.settings import *
class Create3DGravityMains(object):
def __init__(self):
"""Define the tool (tool name is the name of the class)."""
self.label = "Create 3D Gravity Mains"
self.description = "Creates 3D gravity mains lines from 2D and 3D gravity mains " + \
"lines with start invert and end invert elevation attributes."
self.canRunInBackground = False
def getParameterInfo(self):
"""Define parameter definitions"""
input_features = arcpy.Parameter(displayName="Input Features",
name="InputFeatures",
datatype=["DEFeatureClass", "GPLayer"],
parameterType="Required",
direction="Input")
upper_elevation = arcpy.Parameter(displayName="Upper Invert Elevation",
name="UpperInvertElevation",
datatype = "GPString",
parameterType="Required",
direction="Input")
lower_elevation = arcpy.Parameter(displayName="Lower Invert Elevation",
name="LowerInvertElevation",
datatype = "GPString",
parameterType="Required",
direction="Input")
invert_unit = arcpy.Parameter(displayName="Invert Elevation Unit",
name="InvertElevationUnit",
datatype = "GPString",
parameterType="Optional",
direction="Input")
diameter = arcpy.Parameter(displayName="Diameter",
name="Diameter",
datatype = "GPString",
parameterType="Optional",
direction="Input")
diameter_unit = arcpy.Parameter(displayName="Diameter Unit",
name="DiameterUnit",
datatype = "GPString",
parameterType="Required",
direction="Input")
default_diameter = arcpy.Parameter(displayName="Default Diameter",
name="DefaultDiameter",
datatype = "GPDouble",
parameterType="Required",
direction="Input")
output_features = arcpy.Parameter(displayName="Output Features",
name="OutputFeatures",
datatype="DEFeatureClass",
parameterType="Required",
direction="Output")
output_3dobjects = arcpy.Parameter(displayName="Output As 3D Objects",
name="OutputAs3DObjects",
datatype="GPBoolean",
parameterType="Required",
direction="Input")
use_nearby_points= arcpy.Parameter(displayName="Use Nearby Points For Elevation",
name="UseNearbyPointsForElevation",
datatype="GPBoolean",
parameterType="Optional",
direction="Input")
zero_as_error = arcpy.Parameter(displayName="Treat 0 as Error",
name="Treat0asError",
datatype="GPBoolean",
parameterType="Optional",
direction="Input")
error_elevation = arcpy.Parameter(displayName="Error Elevation Value",
name="ErrorElevationValue",
datatype = "GPDouble",
parameterType="Optional",
direction="Input")
interpolate_errors = arcpy.Parameter(displayName="Interpolate Errors",
name="InterpolateErrors",
datatype = "GPBoolean",
parameterType="Optional",
direction="Input")
layer = arcpy.Parameter(displayName="layer",
name="layer",
datatype="GPFeatureLayer",
parameterType="Derived", enabled=True,
direction="Output")
layer2 = arcpy.Parameter(displayName="layer2",
name="layer2",
datatype="GPFeatureLayer",
parameterType="Derived", enabled=True,
direction="Output")
layer3 = arcpy.Parameter(displayName="layer3",
name="layer3",
datatype="GPFeatureLayer",
parameterType="Derived", enabled=True,
direction="Output")
layer4 = arcpy.Parameter(displayName="layer4",
name="layer4",
datatype="GPFeatureLayer",
parameterType="Derived", enabled=True,
direction="Output")
default_diameter.value = 1
diameter_unit.enabled = False
diameter_unit.value = UNDEFINED
invert_unit.value = None
use_nearby_points.enabled = False
output_3dobjects.value = False
zero_as_error.value = False
interpolate_errors.value = False
error_elevation.value = 9999
zero_as_error.category = 'Error Handling'
error_elevation.category = 'Error Handling'
interpolate_errors.category = 'Error Handling'
layer.parameterDependencies = [input_features.name]
layer2.parameterDependencies = [input_features.name]
layer3.parameterDependencies = [input_features.name]
layer4.parameterDependencies = [input_features.name]
aprx = arcpy.mp.ArcGISProject("CURRENT")
layer_directory = aprx.homeFolder + "\\LayerFiles"
layer.symbology = os.path.join(layer_directory, 'Line3DError.lyrx')
layer2.symbology = os.path.join(layer_directory, 'Line3DError_meters.lyrx')
layer3.symbology = os.path.join(layer_directory, 'LineObject3DError.lyrx')
layer4.symbology = os.path.join(layer_directory, 'LineObject3DError_meters.lyrx')
params = [input_features, upper_elevation, lower_elevation, invert_unit, diameter, diameter_unit, default_diameter,
output_features, output_3dobjects, use_nearby_points, zero_as_error, error_elevation, interpolate_errors,
layer, layer2, layer3, layer4]
return params
def isLicensed(self):
"""Set whether tool is licensed to execute."""
return True
def updateParameters(self, params):
"""Modify the values and properties of parameters before internal
validation is performed. This method is called whenever a parameter
has been changed."""
aprx = arcpy.mp.ArcGISProject("CURRENT")
if params[0].value:
if arcpy.Exists(params[0].value):
fields = arcpy.ListFields(params[0].value)
real_fields_list = []
params[1].filter.list = []
for f in fields:
if f.type == "Double" or f.type == "Integer" or f.type == "SmallInteger" or f.type == "Single":
real_fields_list.append(f.name)
params[1].filter.list = sorted(set(real_fields_list))
params[2].filter.list = sorted(set(real_fields_list))
if params[1].value and params[2].value:
full_list = sorted(set(real_fields_list))
full_list.remove(params[1].value)
full_list.remove(params[2].value)
params[4].filter.list = full_list
unitList1 = ["Inches", "Feet", "Millimeters", "Centimeters", "Meters"]
unitList2 = [UNDEFINED, "Inches", "Feet", "Millimeters", "Centimeters", "Meters"]
params[3].filter.list = unitList1
params[5].filter.list = unitList2
if params[4].value:
params[5].enabled = True
else:
params[5].enabled = False
return
def updateMessages(self, params):
"""Modify the messages created by internal validation for each tool
parameter. This method is called after internal validation."""
if params[4].value and not params[5].value:
params[5].setErrorMessage('Diameter Unit is required if a diameter attribute has been selected!')
return
def execute(self, parameters, messages):
class NoLayerFile(Exception):
pass
class NoOutput(Exception):
pass
try:
"""The source code of the tool."""
(input_features, upper_elevation, lower_elevation, invert_unit, diameter, diameter_unit,
default_diameter, output_features, output_3dobjects, use_nearby_points,
zero_as_error, error_elevation, interpolate_errors) = [p.valueAsText for p in parameters[:-4]]
if diameter_unit == UNDEFINED:
diameter_unit = None
# check if input exists
if arcpy.Exists(parameters[0].value):
lines_3d, objects_3d = create_3Dgravity_mains.main(input_layer=parameters[0].value, start_vertex_elevation=upper_elevation, end_vertex_elevation=lower_elevation,
vertex_elevation_unit=invert_unit, diameter=diameter, diameter_unit=diameter_unit,
default_diameter=parameters[6].value,
output_features=output_features,
output_as_3dobject=parameters[8].value,
use_nearby_points=parameters[9].value,
zero_as_error=parameters[10].value,
error_elevation=parameters[11].value,
interpolate_errors=parameters[12].value,
debug=0)
if lines_3d:
arcpy.AddMessage("Adding: " + common_lib.get_name_from_feature_class(lines_3d))
if common_lib.get_z_unit(lines_3d, 0) == "Feet":
arcpy.SetParameter(13, lines_3d)
else:
arcpy.SetParameter(14, lines_3d)
if objects_3d:
if common_lib.get_z_unit(objects_3d, 0) == "Feet":
arcpy.SetParameter(15, objects_3d)
else:
arcpy.SetParameter(16, objects_3d)
else:
raise NoOutput
else:
raise NoLayerFile
except NoLayerFile:
print("Can't find Layer file. Exiting...")
arcpy.AddError("Can't find Layer file. Exiting...")
except NoOutput:
print("Can't create output. Exiting...")
arcpy.AddError("Can't create output. Exiting...")
class Create3DLaterals(object):
def __init__(self):
"""Define the tool (tool name is the name of the class)."""
self.label = "Create 3D Laterals"
self.description = "Creates 3D lateral lines from 2D and 3D laterals " + \
"using 3D gravity mains as input."
self.canRunInBackground = False
def getParameterInfo(self):
"""Define parameter definitions"""
input_features = arcpy.Parameter(displayName="Input Features",
name="InputFeatures",
datatype=["DEFeatureClass", "GPLayer"],
parameterType="Required",
direction="Input")
input_3Dmains = arcpy.Parameter(displayName="3D Gravity Mains",
name="3DGravitymains",
datatype=["DEFeatureClass", "GPLayer"],
parameterType="Required",
direction="Input")
diameter = arcpy.Parameter(displayName="Diameter",
name="Diameter",
datatype = "GPString",
parameterType="Optional",
direction="Input")
diameter_unit = arcpy.Parameter(displayName="Diameter Unit",
name="DiameterUnit",
datatype = "GPString",
parameterType="Required",
direction="Input")
default_diameter = arcpy.Parameter(displayName="Default Diameter",
name="DefaultDiameter",
datatype = "GPDouble",
parameterType="Required",
direction="Input")
slope = arcpy.Parameter(displayName="Slope",
name="Slope",
datatype = "GPString",
parameterType="Optional",
direction="Input")
default_slope = arcpy.Parameter(displayName="Default Slope",
name="DefaultSlope",
datatype = "GPDouble",
parameterType="Required",
direction="Input")
output_features = arcpy.Parameter(displayName="Output Features",
name="OutputFeatures",
datatype="DEFeatureClass",
parameterType="Required",
direction="Output")
output_3dobjects = arcpy.Parameter(displayName="Output As 3D Objects",
name="OutputAs3DObjects",
datatype="GPBoolean",
parameterType="Required",
direction="Input")
layer = arcpy.Parameter(displayName="layer",
name="layer",
datatype="GPFeatureLayer",
parameterType="Derived", enabled=True,
direction="Output")
layer2 = arcpy.Parameter(displayName="layer2",
name="layer2",
datatype="GPFeatureLayer",
parameterType="Derived", enabled=True,
direction="Output")
layer3 = arcpy.Parameter(displayName="layer3",
name="layer3",
datatype="GPFeatureLayer",
parameterType="Derived", enabled=True,
direction="Output")
layer4 = arcpy.Parameter(displayName="layer4",
name="layer4",
datatype="GPFeatureLayer",
parameterType="Derived", enabled=True,
direction="Output")
default_diameter.value = 0.5
diameter_unit.enabled = False
diameter_unit.value = UNDEFINED
default_slope.value = 2
output_3dobjects.value = False
layer.parameterDependencies = [input_features.name]
layer2.parameterDependencies = [input_features.name]
layer3.parameterDependencies = [input_features.name]
layer4.parameterDependencies = [input_features.name]
aprx = arcpy.mp.ArcGISProject("CURRENT")
layer_directory = aprx.homeFolder + "\\LayerFiles"
layer.symbology = os.path.join(layer_directory, 'LateralLine3D.lyrx')
layer2.symbology = os.path.join(layer_directory, 'LateralLine3D_meter.lyrx')
layer3.symbology = os.path.join(layer_directory, 'LateralObject3D.lyrx')
layer4.symbology = os.path.join(layer_directory, 'LateralObject3D_meter.lyrx')
params = [input_features, input_3Dmains, diameter, diameter_unit, default_diameter,
slope, default_slope,
output_features, output_3dobjects,
layer, layer2, layer3, layer4]
return params
def isLicensed(self):
"""Set whether tool is licensed to execute."""
return True
def updateParameters(self, params):
"""Modify the values and properties of parameters before internal
validation is performed. This method is called whenever a parameter
has been changed."""
if params[0].value:
if arcpy.Exists(params[0].value):
fields = arcpy.ListFields(params[0].value)
real_fields_list = []
for f in fields:
if f.type == "Double" or f.type == "Integer" or f.type == "SmallInteger" or f.type == "Single":
real_fields_list.append(f.name)
full_list = sorted(set(real_fields_list))
params[2].filter.list = full_list
if params[2].value:
full_list.remove(params[2].value)
params[5].filter.list = full_list
unitList = ["Undefined", "Inches", "Feet", "Millimeter", "Centimeter", "Meter"]
params[3].filter.list = unitList
if params[2].value:
params[3].enabled = True
return
def updateMessages(self, parameters):
"""Modify the messages created by internal validation for each tool
parameter. This method is called after internal validation."""
return
def execute(self, parameters, messages):
class NoLayerFile(Exception):
pass
class NoOutput(Exception):
pass
try:
"""The source code of the tool."""
(input_features, input_3Dmains, diameter, diameter_unit, default_diameter, slope, default_slope,
output_features, output_3dobjects) = [p.valueAsText for p in parameters[:-4]]
if diameter_unit == UNDEFINED:
diameter_unit = None
# check if input exists
if arcpy.Exists(parameters[0].value):
lines_3d, objects_3d = create_3Dlaterals.main(input_layer=parameters[0].value,
input_3d_mains_layer=parameters[1].value,
diameter=diameter,
diameter_unit=diameter_unit,
default_diameter=parameters[3].value,
slope=slope,
default_slope=default_slope,
output_features=output_features,
output_as_3dobject=parameters[7].value,
debug=0)
if lines_3d:
arcpy.AddMessage("Adding: " + common_lib.get_name_from_feature_class(lines_3d))
if common_lib.get_z_unit(lines_3d, 0) == "Feet":
arcpy.SetParameter(9, lines_3d)
else:
arcpy.SetParameter(10, lines_3d)
if objects_3d:
if common_lib.get_z_unit(objects_3d, 0) == "Feet":
arcpy.SetParameter(11, objects_3d)
else:
arcpy.SetParameter(12, objects_3d)
else:
raise NoOutput
else:
raise NoLayerFile
except NoLayerFile:
print("Can't find Layer file. Exiting...")
arcpy.AddError("Can't find Layer file. Exiting...")
except NoOutput:
print("Can't create output. Exiting...")
arcpy.AddError("Can't create output. Exiting...")
class Create3DManholes(object):
def __init__(self):
"""Define the tool (tool name is the name of the class)."""
self.label = "Create 3D Manholes"
self.description = "Creates 3D manhole points from 2D and 3D manholes " + \
"points with rim and invert elevation attributes."
self.canRunInBackground = False
def getParameterInfo(self):
"""Define parameter definitions"""
input_features = arcpy.Parameter(displayName="Input Features",
name="InputFeatures",
datatype=["DEFeatureClass", "GPLayer"],
parameterType="Required",
direction="Input")
upper_elevation = arcpy.Parameter(displayName="Upper Invert Elevation",
name="UpperInvertElevation",
datatype = "GPString",
parameterType="Required",
direction="Input")
lower_elevation = arcpy.Parameter(displayName="Lower Invert Elevation",
name="LowerInvertElevation",
datatype = "GPString",
parameterType="Required",
direction="Input")
invert_unit = arcpy.Parameter(displayName="Invert Elevation Unit",
name="InvertElevationUnit",
datatype = "GPString",
parameterType="Optional",
direction="Input")
diameter = arcpy.Parameter(displayName="Diameter",
name="Diameter",
datatype = "GPString",
parameterType="Optional",
direction="Input")
diameter_unit = arcpy.Parameter(displayName="Diameter Unit",
name="DiameterUnit",
datatype = "GPString",
parameterType="Required",
direction="Input")
default_diameter = arcpy.Parameter(displayName="Default Diameter",
name="DefaultDiameter",
datatype = "GPDouble",
parameterType="Required",
direction="Input")
output_features = arcpy.Parameter(displayName="Output Features",
name="OutputFeatures",
datatype="DEFeatureClass",
parameterType="Required",
direction="Output")
output_3dobjects = arcpy.Parameter(displayName="Output As 3D Objects",
name="OutputAs3DObjects",
datatype="GPBoolean",
parameterType="Required",
direction="Input")
zero_as_error = arcpy.Parameter(displayName="Treat 0 as Error",
name="Treat0asError",
datatype="GPBoolean",
parameterType="Optional",
direction="Input")
error_elevation = arcpy.Parameter(displayName="Error Elevation Value",
name="ErrorElevationValue",
datatype = "GPDouble",
parameterType="Optional",
direction="Input")
interpolate_errors = arcpy.Parameter(displayName="Interpolate Errors",
name="InterpolateErrors",
datatype = "GPBoolean",
parameterType="Optional",
direction="Input")
input_raster = arcpy.Parameter(displayName="Terrain Surface",
name="TerrainSurface",
datatype="GPRasterLayer",
parameterType="Optional",
direction="Input")
layer = arcpy.Parameter(displayName="layer",
name="layer",
datatype="GPFeatureLayer",
parameterType="Derived", enabled=True,
direction="Output")
layer2 = arcpy.Parameter(displayName="layer2",
name="layer2",
datatype="GPFeatureLayer",
parameterType="Derived", enabled=True,
direction="Output")
layer3 = arcpy.Parameter(displayName="layer3",
name="layer3",
datatype="GPFeatureLayer",
parameterType="Derived", enabled=True,
direction="Output")
layer4 = arcpy.Parameter(displayName="layer4",
name="layer4",
datatype="GPFeatureLayer",
parameterType="Derived", enabled=True,
direction="Output")
default_diameter.value = 1
diameter_unit.enabled = False
diameter_unit.value = UNDEFINED
invert_unit.value = None
output_3dobjects.value = False
zero_as_error.value = False
interpolate_errors.value = False
error_elevation.value = 9999
input_raster.value = None
input_raster.enabled = False
zero_as_error.category = 'Error Handling'
error_elevation.category = 'Error Handling'
interpolate_errors.category = 'Error Handling'
input_raster.category = 'Error Handling'
layer.parameterDependencies = [input_features.name]
layer2.parameterDependencies = [input_features.name]
layer3.parameterDependencies = [input_features.name]
layer4.parameterDependencies = [input_features.name]
aprx = arcpy.mp.ArcGISProject("CURRENT")
layer_directory = aprx.homeFolder + "\\LayerFiles"
layer.symbology = os.path.join(layer_directory, 'Point3DError.lyrx')
layer2.symbology = os.path.join(layer_directory, 'Point3DError_meter.lyrx')
layer3.symbology = os.path.join(layer_directory, 'PointObject3DError.lyrx')
layer4.symbology = os.path.join(layer_directory, 'PointObject3DError_meter.lyrx')
params = [input_features, upper_elevation, lower_elevation, invert_unit, diameter, diameter_unit, default_diameter,
output_features, output_3dobjects, zero_as_error, error_elevation, interpolate_errors, input_raster,
layer, layer2, layer3, layer4]
return params
def isLicensed(self):
"""Set whether tool is licensed to execute."""
return True
def updateParameters(self, params):
"""Modify the values and properties of parameters before internal
validation is performed. This method is called whenever a parameter
has been changed."""
aprx = arcpy.mp.ArcGISProject("CURRENT")
if params[0].value:
if arcpy.Exists(params[0].value):
fields = arcpy.ListFields(params[0].value)
real_fields_list = []
params[1].filter.list = []
for f in fields:
if f.type == "Double" or f.type == "Integer" or f.type == "SmallInteger" or f.type == "Single":
real_fields_list.append(f.name)
params[1].filter.list = sorted(set(real_fields_list))
params[2].filter.list = sorted(set(real_fields_list))
if params[1].value and params[2].value:
full_list = sorted(set(real_fields_list))
full_list.remove(params[1].value)
full_list.remove(params[2].value)
params[4].filter.list = full_list
unitList1 = ["Inches", "Feet", "Millimeters", "Centimeters", "Meters"]
unitList2 = [UNDEFINED, "Inches", "Feet", "Millimeters", "Centimeters", "Meters"]
params[3].filter.list = unitList1
params[5].filter.list = unitList2
if params[4].value:
params[5].enabled = True
else:
params[5].enabled = False
if params[11].value:
params[12].enabled = True
else:
params[12].enabled = False
return
def updateMessages(self, params):
"""Modify the messages created by internal validation for each tool
parameter. This method is called after internal validation."""
if params[4].value and not params[5].value:
params[5].setErrorMessage('Diameter Unit is required if a diameter attribute has been selected!')
if params[11].value and not params[12].value:
params[12].setErrorMessage('Terrain Surface is required if Interpolate Errors is set!')
return
def execute(self, parameters, messages):
class NoLayerFile(Exception):
pass
class NoOutput(Exception):
pass
try:
"""The source code of the tool."""
(input_features, upper_elevation, lower_elevation, invert_unit, diameter, diameter_unit,
default_diameter, output_features, output_3dobjects,
zero_as_error, error_elevation, interpolate_errors, input_raster) = [p.valueAsText for p in parameters[:-4]]
if diameter_unit == UNDEFINED:
diameter_unit = None
# check if input exists
if arcpy.Exists(parameters[0].value):
points_3d, objects_3d = create_3Dmanholes.main(input_layer=parameters[0].value, rim_elevation=upper_elevation, invert_elevation=lower_elevation,
vertex_elevation_unit=invert_unit, diameter=diameter, diameter_unit=diameter_unit,
default_diameter=parameters[6].value,
output_features=output_features,
output_as_3dobject=parameters[8].value,
zero_as_error=parameters[9].value,
error_elevation=parameters[10].value,
interpolate_errors=parameters[11].value,
terrain_surface=parameters[12].value,
debug=0)
if points_3d:
arcpy.AddMessage("Adding: " + common_lib.get_name_from_feature_class(points_3d))
if common_lib.get_z_unit(points_3d, 0) == "Feet":
arcpy.SetParameter(13, points_3d)
else:
arcpy.SetParameter(14, points_3d)
if objects_3d:
if common_lib.get_z_unit(objects_3d, 0) == "Feet":
arcpy.SetParameter(15, objects_3d)
else:
arcpy.SetParameter(16, objects_3d)
else:
raise NoOutput
else:
raise NoLayerFile
except NoLayerFile:
print("Can't find Layer file. Exiting...")
arcpy.AddError("Can't find Layer file. Exiting...")
except NoOutput:
print("Can't create output. Exiting...")
arcpy.AddError("Can't create output. Exiting...")
class CreateSurfaceHole(object):
def __init__(self):
"""Define the tool (tool name is the name of the class)."""
self.label = "Create Surface Hole"
self.description = "Creates a surface that can be used to create a hole in the elevation " + \
"surface so that utilities can be viewed from below the surface."
self.canRunInBackground = False
def getParameterInfo(self):
"""Define parameter definitions"""
input_raster = arcpy.Parameter(displayName="Input Surface",
name="InputSurface",
datatype="GPRasterLayer",
parameterType="Optional",
direction="Input")
input_features = arcpy.Parameter(displayName="Input Features",
name="InputFeatures",
datatype=["DEFeatureClass", "GPLayer"],
parameterType="Required",
direction="Input")
depth = arcpy.Parameter(displayName="Depth",
name="Depth",
datatype = "GPDouble",
parameterType="Required",
direction="Input")
output_raster = arcpy.Parameter(displayName="Output Surface",
name="OutputSurface",
datatype="GPRasterLayer",
parameterType="Optional",
direction="Output")
layer = arcpy.Parameter(displayName="layer",
name="layer",
datatype="GPLayer",
parameterType="Derived", enabled=True,
direction="Output")
layer2 = arcpy.Parameter(displayName="layer",
name="layer",
datatype="GPFeatureLayer",
parameterType="Derived", enabled=True,
direction="Output")
aprx = arcpy.mp.ArcGISProject("CURRENT")
layer_directory = aprx.homeFolder + "\\LayerFiles"
layer2.symbology = os.path.join(layer_directory, 'hole_texture2.lyrx')
params = [input_raster, input_features, depth, output_raster, layer, layer2]
return params
def isLicensed(self):
"""Set whether tool is licensed to execute."""
return True
def updateParameters(self, params):
"""Modify the values and properties of parameters before internal
validation is performed. This method is called whenever a parameter
has been changed."""
return
def updateMessages(self, params):
"""Modify the messages created by internal validation for each tool
parameter. This method is called after internal validation."""
return
def execute(self, parameters, messages):
class NoLayerFile(Exception):
pass
class NoOutput(Exception):
pass
try:
"""The source code of the tool."""
# check if input exists
if arcpy.Exists(parameters[0].value):
surface, polygon = create_surface_hole.main(input_raster=parameters[0].value,
input_layer=parameters[1].value,
depth=parameters[2].value,
output_raster=parameters[3].valueAsText,
debug=0)
if surface:
arcpy.AddMessage("Created: " + common_lib.get_name_from_feature_class(surface))
arcpy.SetParameter(4, surface)
if polygon:
arcpy.AddMessage("Adding: " + common_lib.get_name_from_feature_class(polygon) + " as extent with texture.")
arcpy.SetParameter(5, polygon)
else:
raise NoOutput
else:
raise NoLayerFile
except NoLayerFile:
print("Can't find Layer file. Exiting...")
arcpy.AddError("Can't find Layer file. Exiting...")
except NoOutput:
print("Can't create output. Exiting...")
arcpy.AddError("Can't create output. Exiting...")
class CreateElevationTilePackage(object):
def __init__(self):
"""Define the tool (tool name is the name of the class)."""
self.label = "Create Elevation Tile Package"
self.description = "Creates an Elevation Tile Package (*.tpk) from elevation datasource."
self.canRunInBackground = False
def getParameterInfo(self):
"""Define parameter definitions"""
input_raster = arcpy.Parameter(displayName="Input Elevation Source",
name="InputElevationSource",
datatype="GPRasterLayer",
parameterType="Optional",
direction="Input")
scale = arcpy.Parameter(displayName="Minimum Cached Scale Level",
name="MCSL",
datatype = "GPLong",
parameterType="Required",
direction="Input")
pixel_tolerance = arcpy.Parameter(displayName="Pixel Tolerance",
name="PixelTolerance",
datatype = "GPDouble",
parameterType="Required",
direction="Input")
output_workspace = arcpy.Parameter(displayName="Output Cache Directory",
name="OutputCacheDirectory",
datatype="DEWorkspace",
parameterType="Required",
direction="Input")
scale.filter.type = 'Range'
scale.filter.list = [0,19]
pixel_tolerance.filter.type = 'Range'
pixel_tolerance.filter.list = [0,1]
params = [input_raster, scale, pixel_tolerance, output_workspace]
return params
def isLicensed(self):
"""Set whether tool is licensed to execute."""
return True
def updateParameters(self, params):
"""Modify the values and properties of parameters before internal
validation is performed. This method is called whenever a parameter
has been changed."""
return
def updateMessages(self, params):
"""Modify the messages created by internal validation for each tool
parameter. This method is called after internal validation."""
return
def execute(self, parameters, messages):
class NoLayerFile(Exception):
pass
class NoOutput(Exception):
pass
try:
"""The source code of the tool."""
# check if input exists
if arcpy.Exists(parameters[0].value):
arcpy.AddMessage("Creating tpk for: " + common_lib.get_name_from_feature_class(common_lib.get_name_from_feature_class(parameters[0].value)))
cache = create_elevation_tile_package.main(input_raster=parameters[0].value,
minimum_scale_level= parameters[1].valueAsText,
pixel_tolerance=parameters[2].valueAsText,
output_ws=parameters[3].valueAsText,
debug=0)
if cache:
arcpy.AddMessage("Elevation Tile Package created: " + parameters[3].valueAsText)
else:
raise NoOutput
else:
raise NoLayerFile
except NoLayerFile:
print("Can't find Layer file. Exiting...")
arcpy.AddError("Can't find Layer file. Exiting...")
except NoOutput:
print("Can't create output. Exiting...")
arcpy.AddError("Can't create output. Exiting...")
# for debug only!
def main():
# create_3Dgravity_mains.main("", "", "", "", "", "", "", "", "", "", "", "", "", 1)
# create_3Dlaterals.main("", "", "", "", "", "", "", "", "", 1)
# create_3Dmanholes.main("", "", "", "", "", "", "", "", "", "", "", "", "", 1)
# create_surface_hole.main("", "", "", "", 1)
create_elevation_tile_package.main("", "", "", "", 1)
if __name__ == "__main__":
main()
|
{"/scripts/create_3Dgravity_mains.py": ["/scripts/settings.py"], "/scripts/create_surface_hole.py": ["/scripts/settings.py"], "/scripts/gptools.py": ["/scripts/create_3Dgravity_mains.py", "/scripts/create_surface_hole.py", "/scripts/create_3Dlaterals.py", "/scripts/create_3Dmanholes.py", "/scripts/create_elevation_tile_package.py", "/scripts/settings.py"], "/scripts/create_elevation_tile_package.py": ["/scripts/settings.py"], "/scripts/create_3Dmanholes.py": ["/scripts/settings.py"], "/scripts/create_3Dlaterals.py": ["/scripts/settings.py"]}
|
10,473
|
gvanmaren/3D-Utilities
|
refs/heads/master
|
/scripts/create_elevation_tile_package.py
|
#-------------------------------------------------------------------------------
# Name: CreateElevationTilePackageForAGOL
# Purpose:
#
# Author: Gert van Maren
#
# Created: 27/07/2016
# Copyright: (c) Esri 2016
# updated:
# updated:
# updated:
#-------------------------------------------------------------------------------
import arcpy
import os
import sys
import shutil
import re
import time
import scripts.common_lib as common_lib
from scripts.common_lib import create_msg_body, msg, trace
from scripts.settings import *
class LicenseError3D(Exception):
pass
class LicenseErrorSpatial(Exception):
pass
class NoFeatures(Exception):
pass
class No3DFeatures(Exception):
pass
def getNameFromFeatureClass(feature_class):
descFC = arcpy.Describe(feature_class)
return(descFC.name)
# Get Workspace from Building feature class location
def getWorkSpaceFromFeatureClass(feature_class, get_gdb):
dirname = os.path.dirname(arcpy.Describe(feature_class).catalogPath)
desc = arcpy.Describe(dirname)
if hasattr(desc, "datasetType") and desc.datasetType == 'FeatureDataset':
dirname = os.path.dirname(dirname)
if (get_gdb == "yes"):
return(dirname)
else: # directory where gdb lives
return(os.path.dirname(dirname))
def GenerateLERCTilingScheme(input_layer, lc_scheme_dir, error):
try:
# variables
method = "PREDEFINED"
numscales = "#"
predefScheme = lc_scheme_dir+"\\ArcGIS_Online_Bing_Maps_Google_Maps.xml"
outputTilingScheme = lc_scheme_dir+"\\"+getNameFromFeatureClass(input_layer)+"_tiling_lerc.xml"
scales = "#"
scaleType = "#"
tileOrigin = "#"
dpi = "96"
tileSize = "256 x 256"
tileFormat = "LERC"
compQuality = "75"
storageFormat = "COMPACT"
lerc_error = error
if arcpy.Exists(predefScheme):
arcpy.GenerateTileCacheTilingScheme_management(input_layer, outputTilingScheme, method, numscales, predefScheme,
scales, scaleType, tileOrigin, dpi, tileSize, tileFormat, compQuality, storageFormat, lerc_error)
else:
arcpy.AddWarning(
"Can't find: " + predefScheme + ". Can't creat Tile Package. Exciting.")
raise FileNotFoundError
# return obstruction FC
return (outputTilingScheme)
except arcpy.ExecuteWarning:
print((arcpy.GetMessages(1)))
arcpy.AddWarning(arcpy.GetMessages(1))
except arcpy.ExecuteError:
print((arcpy.GetMessages(2)))
arcpy.AddError(arcpy.GetMessages(2))
# Return any other type of error
except:
# By default any other errors will be caught here
#
e = sys.exc_info()[1]
print((e.args[0]))
arcpy.AddError(e.args[0])
def ManageTileCache(input_layer, cache_directory, output_scheme, scale_level):
try:
# variables
scales = [1128.497176,2256.994353,4513.988705,9027.977411,18055.954822,36111.909643,72223.819286,144447.638572,
288895.277144,577790.554289,1155581.108577,2311162.217155,4622324.434309,9244648.868618,18489297.737236,
36978595.474472,73957190.948944,147914381.897889,295828763.795777,591657527.591555]
list_length = len(scales)
folder = cache_directory
mode = "RECREATE_ALL_TILES"
cacheName = getNameFromFeatureClass(input_layer) + "_cache"
dataSource = input_layer
method = "IMPORT_SCHEME"
tilingScheme = output_scheme
scale_default = "#"
areaofinterest = "#"
maxcellsize = "#"
maxcachedscale = str(scales[0])
mincachedscale = str(scales[list_length - 1 - scale_level])
# check if directory is present
if arcpy.Exists(folder+"\\"+cacheName):
shutil.rmtree(folder+"\\"+cacheName)
arcpy.AddMessage("Deleted old cache directory: "+folder+"\\"+cacheName)
arcpy.AddMessage("Creating Tile Cache with "+str(list_length - scale_level)+" levels: L"+str(scale_level)+":"+mincachedscale+" down to L:"+str(list_length - 1)+":"+maxcachedscale)
result = arcpy.ManageTileCache_management(
folder, mode, cacheName, dataSource, method, tilingScheme,
scale_default, areaofinterest, maxcellsize, mincachedscale, maxcachedscale)
##arcpy.AddMessage(result.status)
# return obstruction FC
return (folder+"\\"+cacheName)
except arcpy.ExecuteWarning:
print((arcpy.GetMessages(1)))
arcpy.AddWarning(arcpy.GetMessages(1))
except arcpy.ExecuteError:
print((arcpy.GetMessages(2)))
arcpy.AddError(arcpy.GetMessages(2))
# Return any other type of error
except:
# By default any other errors will be caught here
#
e = sys.exc_info()[1]
print((e.args[0]))
arcpy.AddError(e.args[0])
def ExportTileCache(input_layer, cache_directory, tile_cache):
try:
cacheSource = tile_cache
cacheFolder = cache_directory
cachePackage = getNameFromFeatureClass(input_layer)
cacheType = "TILE_PACKAGE"
arcpy.AddMessage("Creating Tile Package: " + cacheFolder + "\\" +cachePackage+".tpk. This might take some time...")
arcpy.GetMessages()
arcpy.ExportTileCache_management(cacheSource, cacheFolder, cachePackage,
cacheType)
return (cacheFolder + "\\" + cachePackage)
except arcpy.ExecuteWarning:
print((arcpy.GetMessages(1)))
arcpy.AddWarning(arcpy.GetMessages(1))
except arcpy.ExecuteError:
print((arcpy.GetMessages(2)))
arcpy.AddError(arcpy.GetMessages(2))
# Return any other type of error
except:
# By default any other errors will be caught here
#
e = sys.exc_info()[1]
print((e.args[0]))
arcpy.AddError(e.args[0])
def main(input_raster, minimum_scale_level, pixel_tolerance, output_ws, debug):
"""The source code of the tool."""
# error classes
class NoNoDataError(Exception):
pass
class LicenseError3D(Exception):
pass
class LicenseErrorSpatial(Exception):
pass
class SchemaLock(Exception):
pass
class NotSupported(Exception):
pass
class NoLayerFile(Exception):
pass
class FunctionError(Exception):
pass
class NoFeatures(Exception):
pass
try:
# Get Attributes from User
if debug == 0:
# script variables
aprx = arcpy.mp.ArcGISProject("CURRENT")
home_directory = aprx.homeFolder
log_directory = aprx.homeFolder + "\\Logs"
scheme_directory = home_directory + "\TilingSchemes"
project_ws = aprx.defaultGeodatabase
enableLogging = True
DeleteIntermediateData = True
verbose = 0
in_memory_switch = True
else:
# debug
input_raster = r'D:\Gert\Work\Esri\Solutions\Utilities\work2.1\3DUtilities\Naperville.gdb\DEM_clip_feet'
minimum_scale_level = str(12)
pixel_tolerance = str(0.5)
output_ws = r'D:\Gert\Work\Esri\Solutions\Utilities\work2.1\3DUtilities\Cache'
# Create and set workspace location in same directory as input feature class gdb
home_directory = r'D:\Gert\Work\Esri\Solutions\Utilities\work2.1\3DUtilities'
scheme_directory = home_directory + "\TilingSchemes"
project_ws = home_directory + "\\Results.gdb"
log_directory = home_directory + "\\Logs"
enableLogging = False
DeleteIntermediateData = True
verbose = 1
in_memory_switch = False
# set data paths for packing tool so all additional data is stored in the package - ESRI packing only!
data_directory_pack = ""
geodatabase = ""
feature_class = ""
model_directory_pack = ""
model_file = ""
rule_directory_pack = "RulePackages"
rule_file = "ExtrudePolygon.rpk"
# note: rename all *.lyrx to *.txt first. This is only needed for packaging.
layer_directory_pack = "LayerFiles"
layer_file = "Line3DError.lyrx"
common_lib.set_data_paths_for_packaging(data_directory_pack, geodatabase, feature_class, model_directory_pack,
model_file, rule_directory_pack, rule_file, layer_directory_pack,
layer_file)
if not os.path.exists(output_ws):
os.makedirs(output_ws)
common_lib.set_up_logging(log_directory, TOOLNAME3)
start_time = time.clock()
scratch_ws = common_lib.create_gdb(home_directory, "Intermediate.gdb")
arcpy.env.workspace = scratch_ws
arcpy.env.overwriteOutput = True
if arcpy.CheckExtension("3D") == "Available":
arcpy.CheckOutExtension("3D")
if arcpy.CheckExtension("Spatial") == "Available":
arcpy.CheckOutExtension("Spatial")
arcpy.AddMessage("Processing input raster: " + common_lib.get_name_from_feature_class(input_raster))
lercError = float(re.sub(",", ".", pixel_tolerance))
scaleLevel = re.sub(",", ".", minimum_scale_level)
outputTilingScheme = GenerateLERCTilingScheme(input_raster, scheme_directory, lercError)
arcpy.AddMessage("Created LERC Tiling Scheme with LERC error: " + str(lercError))
tileCache = ManageTileCache(input_raster, output_ws, outputTilingScheme, int(scaleLevel))
arcpy.AddMessage("Created Tile Cache...")
arcpy.AddMessage("Exporting to Tile Package...")
tilePackage = ExportTileCache(input_raster, output_ws, tileCache)
return tilePackage
else:
raise LicenseErrorSpatial
else:
raise LicenseError3D
except NoLayerFile:
print("Can't find Layer file. Exiting...")
arcpy.AddError("Can't find Layer file. Exiting...")
except LicenseError3D:
print("3D Analyst license is unavailable")
arcpy.AddError("3D Analyst license is unavailable")
except LicenseErrorSpatial:
print("Spatial Analyst license is unavailable")
arcpy.AddError("Spatial Analyst license is unavailable")
except NoNoDataError:
print("Input raster does not have NODATA values")
arcpy.AddError("Input raster does not have NODATA values")
except ValueError:
print("Input no flood value is not a number.")
arcpy.AddError("Input no flood value is not a number.")
except arcpy.ExecuteError:
line, filename, synerror = trace()
msg("Error on %s" % line, ERROR)
msg("Error in file name: %s" % filename, ERROR)
msg("With error message: %s" % synerror, ERROR)
msg("ArcPy Error Message: %s" % arcpy.GetMessages(2), ERROR)
except FunctionError as f_e:
messages = f_e.args[0]
msg("Error in function: %s" % messages["function"], ERROR)
msg("Error on %s" % messages["line"], ERROR)
msg("Error in file name: %s" % messages["filename"], ERROR)
msg("With error message: %s" % messages["synerror"], ERROR)
msg("ArcPy Error Message: %s" % messages["arc"], ERROR)
except:
line, filename, synerror = trace()
msg("Error on %s" % line, ERROR)
msg("Error in file name: %s" % filename, ERROR)
msg("with error message: %s" % synerror, ERROR)
finally:
arcpy.CheckInExtension("3D")
arcpy.CheckInExtension("Spatial")
# for debug only!
if __name__ == "__main__":
main("", "", "", "", 1)
|
{"/scripts/create_3Dgravity_mains.py": ["/scripts/settings.py"], "/scripts/create_surface_hole.py": ["/scripts/settings.py"], "/scripts/gptools.py": ["/scripts/create_3Dgravity_mains.py", "/scripts/create_surface_hole.py", "/scripts/create_3Dlaterals.py", "/scripts/create_3Dmanholes.py", "/scripts/create_elevation_tile_package.py", "/scripts/settings.py"], "/scripts/create_elevation_tile_package.py": ["/scripts/settings.py"], "/scripts/create_3Dmanholes.py": ["/scripts/settings.py"], "/scripts/create_3Dlaterals.py": ["/scripts/settings.py"]}
|
10,474
|
gvanmaren/3D-Utilities
|
refs/heads/master
|
/scripts/create_3Dmanholes.py
|
import arcpy
import time
import os
import scripts.common_lib as common_lib
from scripts.common_lib import create_msg_body, msg, trace
from scripts.settings import *
"""The source code of the tool."""
# error classes
class NoNoDataError(Exception):
pass
class LicenseError3D(Exception):
pass
class LicenseErrorSpatial(Exception):
pass
class SchemaLock(Exception):
pass
class NotSupported(Exception):
pass
class NoLayerFile(Exception):
pass
class FunctionError(Exception):
pass
class NoFeatures(Exception):
pass
def Create3DPointFromPointAttributes(out_ws, ws, out_name, tin_ws, lc_input_layer,
upper_rim_elevation_field, lower_invert_elevation_field,
export_invert_elevation_field,
lc_diameter, lc_default_diameter, export_diameter_field,
export_height_field,
dtm,
error_elevation, lc_interpolate_errors, lc_zero_error, local_verbose):
if local_verbose == 1:
msg("--------------------------")
msg("Executing Create3DPointFromPointAttributes...")
start_time = time.clock()
try:
i = 0
msg_prefix = ""
failed = True
rimelev_field = "util_rimelev"
error_field = "error"
point_fieldtype = "SHORT"
# set all diameter values
# check if diameter attribute exists
if lc_diameter:
if common_lib.check_fields(lc_input_layer, [lc_diameter], False, local_verbose) == 0:
common_lib.set_null_or_negative_to_value_in_fields(lc_input_layer, [lc_diameter],
[lc_default_diameter], True, local_verbose)
common_lib.delete_add_field(lc_input_layer, export_diameter_field, "DOUBLE")
arcpy.CalculateField_management(lc_input_layer, export_diameter_field, "!" + lc_diameter + "!",
"PYTHON_9.3")
else: # create a default attribute
common_lib.delete_add_field(lc_input_layer, export_diameter_field, "DOUBLE")
arcpy.CalculateField_management(lc_input_layer, export_diameter_field, lc_default_diameter,
"PYTHON_9.3")
lc_diameter = export_diameter_field
else:
common_lib.delete_add_field(lc_input_layer, export_diameter_field, "DOUBLE")
arcpy.CalculateField_management(lc_input_layer, export_diameter_field, lc_default_diameter,
"PYTHON_9.3")
lc_diameter = export_diameter_field
if common_lib.get_xy_unit(lc_input_layer, local_verbose) == "Feet":
conv_factor = 1
else:
conv_factor = 0.3048
min_depth = conv_factor * 1
max_depth = conv_factor * 100
# copy attributes to default util ones
common_lib.delete_add_field(lc_input_layer, export_invert_elevation_field, "DOUBLE")
common_lib.delete_add_field(lc_input_layer, rimelev_field, "DOUBLE")
common_lib.delete_add_field(lc_input_layer, export_height_field, "DOUBLE")
arcpy.CalculateField_management(lc_input_layer, export_invert_elevation_field,
"!" + lower_invert_elevation_field + "!", "PYTHON_9.3")
arcpy.CalculateField_management(lc_input_layer, rimelev_field, "!" + upper_rim_elevation_field + "!",
"PYTHON_9.3")
# create surface from good values
if lc_interpolate_errors:
Z_field = "Z"
invertZ_field = "invertZ"
# interpolate invert elevations
surface = common_lib.create_surface_from_points(ws, tin_ws, lc_input_layer,
export_invert_elevation_field, error_elevation)
if surface:
arcpy.AddSurfaceInformation_3d(lc_input_layer, surface, Z_field, "BILINEAR", 1, 1, 0, None)
common_lib.delete_add_field(lc_input_layer, invertZ_field, "DOUBLE")
arcpy.CalculateField_management(lc_input_layer, invertZ_field, "!" + Z_field + "!",
"PYTHON_9.3")
# interpolate rim elevations
if arcpy.Exists(dtm):
common_lib.delete_fields(lc_input_layer, [Z_field])
arcpy.AddSurfaceInformation_3d(lc_input_layer, dtm, Z_field, "BILINEAR", 1, 1, 0, None)
# check invert and rim elevation values
with arcpy.da.UpdateCursor(lc_input_layer,
[export_invert_elevation_field, invertZ_field, rimelev_field,
Z_field]) as cursor:
for row in cursor:
if lc_zero_error:
if row[0] is None or row[0] == 0 or row[
0] == error_elevation: # error with invert elevation
if row[1]:
row[0] = row[1]
else:
row[0] = error_elevation
if row[2] is None or row[2] == 0 or row[
2] == error_elevation: # error with rim elevation
if row[3]:
row[2] = row[3]
else:
row[2] = error_elevation
else:
if row[0] is None or row[0] == error_elevation: # error with invert elevation
if row[1]:
row[0] = row[1]
else:
row[0] = error_elevation
if row[2] is None or row[2] == error_elevation: # error with rim elevation
if row[3]:
row[2] = row[3]
else:
row[2] = error_elevation
cursor.updateRow(row)
else:
arcpy.AddWarning("Can't interpolate values; not enough good points to create surface.")
# recalculate NULL values to error value
arcpy.AddMessage("Recalculating NULL values to " + str(error_elevation))
s = 0
with arcpy.da.UpdateCursor(lc_input_layer, [export_invert_elevation_field, rimelev_field,
export_height_field]) as cursor:
for row in cursor:
# set invert attribute
if row[0] is None:
row[0] = int(error_elevation)
else:
if lc_zero_error:
if row[0] == 0:
row[0] = int(error_elevation)
# set rim attribute
if row[1] is None:
row[1] = int(error_elevation)
else:
if lc_zero_error:
if row[1] == 0:
row[1] = int(error_elevation)
# set height attribute
if row[0] and row[1]:
if row[1] > (row[0] + min_depth) and row[1] - row[0] < max_depth:
# if (row[0] + min_depth) < row[1] - row[0] < max_depth: # assume max manhole depth is less than 100 and more than 1
if lc_zero_error:
if row[0] == 0 or row[1] == 0:
row[2] = error_elevation - row[0]
else:
row[2] = row[1] - row[0]
else:
row[2] = row[1] - row[0]
else:
row[2] = error_elevation - row[0]
else:
row[2] = error_elevation
cursor.updateRow(row)
s += 1
# create 3D points
points3D = os.path.join(out_ws, out_name + "_3Dpoints")
if arcpy.Exists(points3D):
arcpy.Delete_management(points3D)
arcpy.FeatureTo3DByAttribute_3d(lc_input_layer, points3D, export_invert_elevation_field)
# calculate error field
common_lib.delete_add_field(points3D, error_field, point_fieldtype)
arcpy.AddMessage("Calculating errors ...")
s = 0
z_property = "Z"
arcpy.AddZInformation_3d(points3D, z_property)
# set error_field against original attributes
with arcpy.da.UpdateCursor(points3D,
[lower_invert_elevation_field, error_field, z_property, export_height_field,
upper_rim_elevation_field, rimelev_field]) as cursor:
for row in cursor:
if lc_zero_error: # if zero is error
if row[4] == 0 or row[4] is None:
if row[5] == error_elevation:
row[1] = int(1)
else:
row[1] = int(2) # fixed it earlier
else:
if row[0] == 0 or row[0] is None:
if abs(row[2]) == error_elevation:
row[1] = int(1) # NULL values set to user error elevation
else:
row[1] = int(2) # fixed it earlier
else:
row[1] = int(0)
else:
if row[4] is None:
if row[5] == error_elevation:
row[1] = int(1)
else:
row[1] = int(2)
else:
if row[0] is None:
if abs(row[2]) == error_elevation:
row[1] = int(1) # NULL values set to user error elevation
else:
row[1] = int(2) # fixed it earlier
else:
row[1] = int(0)
if row[3] > max_depth: # assume max manhole depth is less than 100 and larger than 1
row[1] = int(1)
# height error
# if row[3] == error_elevation:
# row[1] = int(1)
cursor.updateRow(row)
s += 1
msg_prefix = "Create3DPointFromPointAttributes completed successfully."
failed = False
return points3D
except:
line, filename, synerror = trace()
failed = True
msg_prefix = ""
raise FunctionError(
{
"function": "Create3DPointFromPointAttributes",
"line": line,
"filename": filename,
"synerror": synerror,
"arc": str(arcpy.GetMessages(2))
}
)
finally:
end_time = time.clock()
msg_body = create_msg_body(msg_prefix, start_time, end_time)
if failed:
msg(msg_body, ERROR)
else:
if local_verbose == 1:
msg(msg_body)
pass
def main(input_layer, rim_elevation, invert_elevation,
vertex_elevation_unit, diameter, diameter_unit, default_diameter,
output_features, output_as_3dobject,
zero_as_error, error_elevation, interpolate_errors, terrain_surface,
debug):
try:
# Get Attributes from User
if debug == 0:
# script variables
aprx = arcpy.mp.ArcGISProject("CURRENT")
home_directory = aprx.homeFolder
tin_directory = home_directory + "\\Tins"
scripts_directory = aprx.homeFolder + "\\Scripts"
rule_directory = aprx.homeFolder + "\\RulePackages"
log_directory = aprx.homeFolder + "\\Logs"
layer_directory = home_directory + "\\LayerFiles"
project_ws = aprx.defaultGeodatabase
enableLogging = True
DeleteIntermediateData = True
verbose = 0
in_memory_switch = True
else:
# debug
input_layer = r'D:\Gert\Work\Esri\Solutions\Utilities\work2.1\3DUtilities\Local_Scene.gdb\manHoles_test1'
rim_elevation = "RIMELEV"
invert_elevation = "INVERTELEV"
vertex_elevation_unit = "Feet"
diameter = "diameter"
diameter_unit = "Inches"
default_diameter = 1
output_features = r'D:\Gert\Work\Esri\Solutions\Utilities\work2.1\3DUtilities\Local_Scene.gdb\manHoles3D_test1'
output_as_3dobject = True
zero_as_error = True
error_elevation = 1000
interpolate_errors = True
terrain_surface = r'D:\Gert\Work\Esri\Solutions\Utilities\work2.1\3DUtilities\Naperville.gdb\DEM_clip_feet'
# Create and set workspace location in same directory as input feature class gdb
home_directory = r'D:\Gert\Work\Esri\Solutions\Utilities\work2.1\3DUtilities'
rule_directory = home_directory + "\RulePackages"
layer_directory = home_directory + "\LayerFiles"
project_ws = home_directory + "\\Results.gdb"
tin_directory = home_directory + "\TINs"
scripts_directory = home_directory + "\\Scripts"
log_directory = home_directory + "\\Logs"
enableLogging = False
DeleteIntermediateData = True
verbose = 1
in_memory_switch = False
# set data paths for packing tool so all additional data is stored in the package - ESRI packing only!
data_directory_pack = ""
geodatabase = ""
feature_class = ""
model_directory_pack = ""
model_file = ""
rule_directory_pack = "RulePackages"
rule_file = "ExtrudePolygon.rpk"
# note: rename all *.lyrx to *.txt first. This is only needed for packaging.
layer_directory_pack = "LayerFiles"
layer_file = "Line3DError.lyrx"
common_lib.set_data_paths_for_packaging(data_directory_pack, geodatabase, feature_class, model_directory_pack,
model_file, rule_directory_pack, rule_file, layer_directory_pack,
layer_file)
if not os.path.exists(tin_directory):
os.makedirs(tin_directory)
common_lib.set_up_logging(log_directory, TOOLNAME3)
start_time = time.clock()
esri_upper_elevation_field = "esri_upper_elev"
esri_lower_elevation_field = "esri_lower_elev"
esri_diameter_field = "esri_diameter"
extrude_rpk = rule_directory + "\\ExtrudePolygon.rpk"
scratch_ws = common_lib.create_gdb(home_directory, "Intermediate.gdb")
output_ws = os.path.dirname(output_features)
arcpy.env.workspace = scratch_ws
arcpy.env.overwriteOutput = True
if arcpy.Exists(output_ws):
arcpy.env.workspace = scratch_ws
arcpy.env.overwriteOutput = True
if arcpy.CheckExtension("3D") == "Available":
arcpy.CheckOutExtension("3D")
if arcpy.CheckExtension("Spatial") == "Available":
arcpy.CheckOutExtension("Spatial")
arcpy.AddMessage(
"Processing input features: " + common_lib.get_name_from_feature_class(input_layer))
objects3D = None
objects3D_layer = None
Points3D = None
Points3D_layer = None
# create 3D points
# check if point feature class has Z values. If not generate 3D points from 2D points using attributes
zValues = arcpy.Describe(input_layer).hasZ
# make a copy of the input feature class
input_fc = os.path.join(scratch_ws, common_lib.get_name_from_feature_class(input_layer) + "_copy")
if arcpy.Exists(input_fc):
arcpy.Delete_management(input_fc)
# write to fc
arcpy.AddMessage(
"Copying " + common_lib.get_name_from_feature_class(input_layer) + " to " + input_fc)
arcpy.CopyFeatures_management(input_layer, input_fc)
# just because of this schema lock
input_layer = input_fc
arcpy.AddMessage("Creating 3D points...")
# check for output directory
if not os.path.exists(tin_directory):
os.makedirs(tin_directory)
# create start and end elevation attributes in segment elevation units
layer_unit = common_lib.get_xy_unit(input_layer, verbose)
common_lib.delete_add_field(input_layer, esri_upper_elevation_field, "DOUBLE")
common_lib.delete_add_field(input_layer, esri_lower_elevation_field, "DOUBLE")
if not vertex_elevation_unit:
vertex_elevation_unit = layer_unit
arcpy.AddMessage(
"No invert elevation unit detected. Using XY units instead: " + vertex_elevation_unit)
conversion_factor = common_lib.unitConversion(layer_unit, vertex_elevation_unit, verbose)
common_lib.calculate_field_from_other_field(input_layer, input_fc, rim_elevation,
esri_upper_elevation_field,
"multiply", conversion_factor, verbose)
common_lib.calculate_field_from_other_field(input_layer, input_fc, invert_elevation,
esri_lower_elevation_field,
"multiply", conversion_factor, verbose)
# check if error elevation is larger than max elevation in the data
maxValue = arcpy.SearchCursor(input_layer, "", "", "",
esri_upper_elevation_field + " D").next().getValue(
esri_upper_elevation_field) # Get 1st row in ascending cursor sort
if maxValue > error_elevation:
error_elevation += maxValue
arcpy.AddMessage(
"Maximum value of " + rim_elevation + " attribute is larger than the error elevation value")
arcpy.AddMessage("Setting the error elevation value to: " + str(error_elevation))
# create diameter attribute in segment elevation units
common_lib.delete_add_field(input_layer, esri_diameter_field, "DOUBLE")
if not diameter_unit:
diameter_unit = layer_unit
arcpy.AddMessage("No Diameter Unit detected. Using XY units instead: " + diameter_unit)
if diameter:
conversion_factor = common_lib.unitConversion(layer_unit, diameter_unit, verbose)
common_lib.calculate_field_from_other_field(input_layer, input_fc, diameter,
esri_diameter_field,
"multiply", conversion_factor, verbose)
else:
arcpy.CalculateField_management(input_layer, esri_diameter_field, default_diameter,
"PYTHON_9.3")
output_name = str(os.path.basename(output_features))
# if not zValues:
Points3D = Create3DPointFromPointAttributes(output_ws, scratch_ws, output_name, tin_directory,
input_layer,
esri_upper_elevation_field, esri_lower_elevation_field,
INVERTELEV_FIELD,
esri_diameter_field, default_diameter, DIAMETER_FIELD,
HEIGHT_FIELD,
terrain_surface,
error_elevation, interpolate_errors, zero_as_error,
verbose)
Points3D_layer = common_lib.get_name_from_feature_class(Points3D)
arcpy.MakeFeatureLayer_management(Points3D, Points3D_layer)
if common_lib.get_z_unit(Points3D_layer, 0) == "Feet":
SymbologyLayer = layer_directory + "\\Point3DError.lyrx"
else:
SymbologyLayer = layer_directory + "\\Point3DError_meters.lyrx"
if not arcpy.Exists(SymbologyLayer):
arcpy.AddWarning(
"Can't find: " + SymbologyLayer + ". Symbolize features by error attribute to see data errors.")
if output_as_3dobject:
objects3D = os.path.join(output_ws, output_name + "_3Dobjects")
if arcpy.Exists(objects3D):
arcpy.Delete_management(objects3D)
# convert 3D Points to 3D objects
arcpy.AddMessage("Buffering: " + common_lib.get_name_from_feature_class(Points3D))
common_lib.delete_add_field(Points3D, RADIUS_FIELD, "DOUBLE")
arcpy.CalculateField_management(Points3D, RADIUS_FIELD, "!" + DIAMETER_FIELD + "! / 2",
"PYTHON_9.3")
output3d_objects = common_lib.Point3DToObject(scratch_ws, extrude_rpk, Points3D,
INVERTELEV_FIELD,
RADIUS_FIELD, HEIGHT_FIELD, objects3D, verbose)
objects3D_layer = common_lib.get_name_from_feature_class(output3d_objects)
arcpy.MakeFeatureLayer_management(output3d_objects, objects3D_layer)
if common_lib.get_z_unit(objects3D_layer, 0) == "Feet":
SymbologyLayer = layer_directory + "\\PointObject3DError.lyrx"
else:
SymbologyLayer = layer_directory + "\\PointObject3DError_meter.lyrx"
if not arcpy.Exists(SymbologyLayer):
arcpy.AddWarning(
"Can't find: " + SymbologyLayer + ". Symbolize features by error attribute to see data errors.")
if DeleteIntermediateData:
fcs = common_lib.listFcsInGDB(scratch_ws)
arcpy.AddMessage("Deleting intermediate data...")
for fc in fcs:
arcpy.Delete_management(fc)
# here goes all the other if/else
end_time = time.clock()
msg_body = create_msg_body("PointTo3DManHole completed successfully.", start_time, end_time)
msg(msg_body)
return Points3D_layer, objects3D_layer
else:
raise LicenseErrorSpatial
else:
raise LicenseError3D
except NoLayerFile:
print("Can't find Layer file. Exiting...")
arcpy.AddError("Can't find Layer file. Exiting...")
except LicenseError3D:
print("3D Analyst license is unavailable")
arcpy.AddError("3D Analyst license is unavailable")
except LicenseErrorSpatial:
print("Spatial Analyst license is unavailable")
arcpy.AddError("Spatial Analyst license is unavailable")
except NoNoDataError:
print("Input raster does not have NODATA values")
arcpy.AddError("Input raster does not have NODATA values")
except ValueError:
print("Input no flood value is not a number.")
arcpy.AddError("Input no flood value is not a number.")
except arcpy.ExecuteError:
line, filename, synerror = trace()
msg("Error on %s" % line, ERROR)
msg("Error in file name: %s" % filename, ERROR)
msg("With error message: %s" % synerror, ERROR)
msg("ArcPy Error Message: %s" % arcpy.GetMessages(2), ERROR)
except FunctionError as f_e:
messages = f_e.args[0]
msg("Error in function: %s" % messages["function"], ERROR)
msg("Error on %s" % messages["line"], ERROR)
msg("Error in file name: %s" % messages["filename"], ERROR)
msg("With error message: %s" % messages["synerror"], ERROR)
msg("ArcPy Error Message: %s" % messages["arc"], ERROR)
except:
line, filename, synerror = trace()
msg("Error on %s" % line, ERROR)
msg("Error in file name: %s" % filename, ERROR)
msg("with error message: %s" % synerror, ERROR)
finally:
arcpy.CheckInExtension("3D")
arcpy.CheckInExtension("Spatial")
# for debug only!
if __name__ == "__main__":
main("", "", "", "", "", "", "", "", "", "", "", "", "", 1)
|
{"/scripts/create_3Dgravity_mains.py": ["/scripts/settings.py"], "/scripts/create_surface_hole.py": ["/scripts/settings.py"], "/scripts/gptools.py": ["/scripts/create_3Dgravity_mains.py", "/scripts/create_surface_hole.py", "/scripts/create_3Dlaterals.py", "/scripts/create_3Dmanholes.py", "/scripts/create_elevation_tile_package.py", "/scripts/settings.py"], "/scripts/create_elevation_tile_package.py": ["/scripts/settings.py"], "/scripts/create_3Dmanholes.py": ["/scripts/settings.py"], "/scripts/create_3Dlaterals.py": ["/scripts/settings.py"]}
|
10,475
|
gvanmaren/3D-Utilities
|
refs/heads/master
|
/scripts/create_3Dlaterals.py
|
import arcpy
import time
import os
import math
import scripts.common_lib as common_lib
from scripts.common_lib import create_msg_body, msg, trace
from scripts.settings import *
class NoNoDataError(Exception):
pass
class LicenseError3D(Exception):
pass
class LicenseErrorSpatial(Exception):
pass
class SchemaLock(Exception):
pass
class NotSupported(Exception):
pass
class NoLayerFile(Exception):
pass
class FunctionError(Exception):
pass
class NoFeatures(Exception):
pass
class No3DFeatures(Exception):
pass
def calculateStartZPointfromSlope(local_start_points, local_end_points, local_elevation_field, local_sort_field,
local_slope_field, local_verbose):
if local_verbose == 1:
msg("--------------------------")
msg("Executing calculateStartZPointfromSlope...")
start_time = time.clock()
try:
i = 0
msg_prefix = ""
failed = True
# step through start points
with arcpy.da.UpdateCursor(local_start_points, [local_sort_field, local_elevation_field, "SHAPE@XY",
local_slope_field]) as cursor:
for row in cursor:
line_id_start = row[0]
with arcpy.da.SearchCursor(local_end_points,
[local_sort_field, local_elevation_field, "SHAPE@XY"]) as f_cursor:
for f_row in f_cursor: # find the accompanying end point and get Z
if line_id_start == f_row[0]: # we have the same line
z_end = f_row[1] # we have the end Z
sx, sy = row[2]
ex, ey = f_row[2]
# get distance between the points
distance = math.hypot((sx - ex), (sy - ey))
# calculate Z difference based on slope and distance
slope_radians = math.radians(row[3])
tan_value = math.tan(slope_radians)
Z_diff = tan_value * distance
row[1] = z_end + Z_diff
cursor.updateRow(row)
break
msg_prefix = "Function calculateZStartPointfromSlope completed successfully."
failed = False
return 0
except:
line, filename, synerror = trace()
failed = True
msg_prefix = ""
raise FunctionError(
{
"function": "calculateStartZPointfromSlope",
"line": line,
"filename": filename,
"synerror": synerror,
"arc": str(arcpy.GetMessages(2))
}
)
finally:
end_time = time.clock()
msg_body = create_msg_body(msg_prefix, start_time, end_time)
if failed:
msg(msg_body, ERROR)
else:
if local_verbose == 1:
msg(msg_body)
pass
def create_laterals3DfromTIN(cl3D_output_ws, cl3D_ws, cl3D_tin, cl3D_laterals, cl3D_diameter,
cl3D_default_diameter, cl3D_slope, cl3D_default_slope, cl3D_building_fp,
cl3D_outname, cl3D_verbose):
if cl3D_verbose == 1:
msg("--------------------------")
msg("Executing create_laterals3D...")
start_time = time.clock()
try:
i = 0
msg_prefix = ""
failed = True
line_field = "line_order"
elevation_field = "elevation"
start_elevation_field = "start_elevation"
end_elevation_field = "end_elevation"
line_fieldtype = "SHORT"
elevation_fieldtype = "DOUBLE"
field_list = ["elevation"]
sort_field = "ORIG_FID"
# make a copy of the input feature class
input_fc = os.path.join(cl3D_ws, common_lib.get_name_from_feature_class(cl3D_laterals) + "_copy")
if arcpy.Exists(input_fc):
arcpy.Delete_management(input_fc)
# write to fc
arcpy.AddMessage("Copying " + common_lib.get_name_from_feature_class(cl3D_laterals) + " to " + input_fc)
arcpy.CopyFeatures_management(cl3D_laterals, input_fc)
# create 3D lines from 2D lines (note only end points of lines are used to created 3D lines!)
LineStartPoints = os.path.join(cl3D_ws, "lateral_startpoints")
if arcpy.Exists(LineStartPoints):
arcpy.Delete_management(LineStartPoints)
LineEndPoints = os.path.join(cl3D_ws, "lateral_endpoints")
if arcpy.Exists(LineEndPoints):
arcpy.Delete_management(LineEndPoints)
arcpy.AddMessage("Extracting Start Points...")
common_lib.delete_add_field(input_fc, DIAMETER_FIELD, "DOUBLE")
# set diameter values
if cl3D_diameter:
if common_lib.check_fields(input_fc, [cl3D_diameter], True, cl3D_verbose) == 0:
common_lib.set_null_or_negative_to_value_in_fields(input_fc, [cl3D_diameter],
[cl3D_default_diameter], True, cl3D_verbose)
arcpy.CalculateField_management(input_fc, DIAMETER_FIELD, "!" + cl3D_diameter + "!",
"PYTHON_9.3")
else: # create a default attribute
arcpy.CalculateField_management(input_fc, DIAMETER_FIELD, cl3D_default_diameter, "PYTHON_9.3")
else:
arcpy.CalculateField_management(input_fc, DIAMETER_FIELD, cl3D_default_diameter, "PYTHON_9.3")
common_lib.delete_add_field(input_fc, SLOPE_FIELD, "DOUBLE")
# set slope values
if cl3D_slope:
if common_lib.check_fields(input_fc, [cl3D_slope], True, cl3D_verbose) == 0:
common_lib.set_null_or_negative_to_value_in_fields(input_fc, [cl3D_slope], [cl3D_default_slope],
True, cl3D_verbose)
arcpy.CalculateField_management(input_fc, SLOPE_FIELD, "!" + cl3D_slope + "!", "PYTHON_9.3")
else: # create a default attribute
arcpy.CalculateField_management(input_fc, SLOPE_FIELD, cl3D_default_slope, "PYTHON_9.3")
else:
arcpy.CalculateField_management(input_fc, SLOPE_FIELD, cl3D_default_slope, "PYTHON_9.3")
# get start and end points and set line order and elevation attribute
arcpy.AddMessage("Calculating End Point elevations")
arcpy.FeatureVerticesToPoints_management(input_fc, LineEndPoints, "END")
common_lib.delete_add_field(LineEndPoints, elevation_field, elevation_fieldtype)
arcpy.AddSurfaceInformation_3d(LineEndPoints, cl3D_tin, "Z", "BILINEAR")
arcpy.CalculateField_management(LineEndPoints, elevation_field, "!Z!", "PYTHON_9.3", None)
common_lib.set_null_to_value_in_fields(LineEndPoints, [elevation_field], [0], True, cl3D_verbose)
common_lib.delete_add_field(LineEndPoints, line_field, line_fieldtype)
arcpy.CalculateField_management(LineEndPoints, line_field, "2", "PYTHON_9.3", None)
arcpy.AddMessage("Calculating Start Point elevations")
arcpy.FeatureVerticesToPoints_management(input_fc, LineStartPoints, "START")
common_lib.delete_add_field(LineStartPoints, elevation_field, elevation_fieldtype)
# join slope field based on sort_field
arcpy.JoinField_management(LineStartPoints, sort_field, input_fc, arcpy.Describe(input_fc).OIDFieldName,
[SLOPE_FIELD])
# if building footprints use these to find the start elevation, else we use the slope variables
if cl3D_building_fp:
arcpy.CalculateField_management(LineStartPoints, elevation_field, "!Z!", "PYTHON_9.3", None)
arcpy.AddSurfaceInformation_3d(LineStartPoints, cl3D_tin, "Z", "BILINEAR")
else:
calculateStartZPointfromSlope(LineStartPoints, LineEndPoints, elevation_field, sort_field,
SLOPE_FIELD, cl3D_verbose)
common_lib.delete_add_field(LineStartPoints, line_field, line_fieldtype)
arcpy.CalculateField_management(LineStartPoints, line_field, "1", "PYTHON_9.3", None)
# merge start and end points
merged_fc = os.path.join(cl3D_ws, "merged_lateral_points")
if arcpy.Exists(merged_fc):
arcpy.Delete_management(merged_fc)
arcpy.Merge_management([LineStartPoints, LineEndPoints], merged_fc)
# create 3D points
points3D = os.path.join(cl3D_ws, "lateral_points_3D")
if arcpy.Exists(points3D):
arcpy.Delete_management(points3D)
arcpy.FeatureTo3DByAttribute_3d(merged_fc, points3D, elevation_field)
# create 3D lines
lines3D = os.path.join(cl3D_output_ws, cl3D_outname + "_3Dlines", )
if arcpy.Exists(lines3D):
arcpy.Delete_management(lines3D)
arcpy.AddMessage("Joining original attributes...")
arcpy.PointsToLine_management(points3D, lines3D, sort_field, line_field)
join_field = arcpy.Describe(input_fc).OIDFieldName
arcpy.JoinField_management(lines3D, sort_field, input_fc, join_field)
msg_prefix = "Function create_laterals3D completed successfully."
failed = False
return lines3D
except:
line, filename, synerror = trace()
failed = True
msg_prefix = ""
raise FunctionError(
{
"function": "create_laterals3D",
"line": line,
"filename": filename,
"synerror": synerror,
"arc": str(arcpy.GetMessages(2))
}
)
finally:
end_time = time.clock()
msg_body = create_msg_body(msg_prefix, start_time, end_time)
if failed:
msg(msg_body, ERROR)
else:
if cl3D_verbose == 1:
msg(msg_body)
pass
def create_laterals(out_ws, ws, tin_ws, lc_laterals, lc_3d_mains, lc_building_fp, lc_diameter,
lc_default_diameter, lc_slope, lc_default_slope, lc_outputname, local_verbose):
if local_verbose == 1:
msg("--------------------------")
msg("Executing create_laterals...")
start_time = time.clock()
try:
i = 0
msg_prefix = ""
failed = True
mains_full_name = common_lib.get_full_path_from_layer(lc_3d_mains)
if lc_building_fp:
tin_string = "{} Shape.Z Hard_Line <None>;{} Shape.Z Hardvalue_Fill <None>".format(mains_full_name,
lc_building_fp)
else:
tin_string = "{} Shape.Z Hard_Line <None>".format(mains_full_name)
out_tin = os.path.join(tin_ws, "LateralTin")
if arcpy.Exists(out_tin):
arcpy.Delete_management(out_tin)
arcpy.CreateTin_3d(out_tin, arcpy.Describe(lc_laterals).spatialReference, tin_string, "DELAUNAY")
# create 3D Lines
Line3D = create_laterals3DfromTIN(out_ws, ws, out_tin, lc_laterals, lc_diameter, lc_default_diameter,
lc_slope, lc_default_slope, lc_building_fp, lc_outputname,
local_verbose)
msg_prefix = "Function create_laterals completed successfully."
failed = False
return Line3D
except:
line, filename, synerror = trace()
failed = True
msg_prefix = ""
raise FunctionError(
{
"function": "create_laterals",
"line": line,
"filename": filename,
"synerror": synerror,
"arc": str(arcpy.GetMessages(2))
}
)
finally:
end_time = time.clock()
msg_body = create_msg_body(msg_prefix, start_time, end_time)
if failed:
msg(msg_body, ERROR)
else:
if local_verbose == 1:
msg(msg_body)
pass
def main(input_layer, input_3d_mains_layer, diameter, diameter_unit, default_diameter,
slope, default_slope, output_features, output_as_3dobject,
debug):
try:
# Get Attributes from User
if debug == 0:
# script variables
aprx = arcpy.mp.ArcGISProject("CURRENT")
home_directory = aprx.homeFolder
tin_directory = home_directory + "\\Tins"
scripts_directory = aprx.homeFolder + "\\Scripts"
rule_directory = aprx.homeFolder + "\\RulePackages"
log_directory = aprx.homeFolder + "\\Logs"
layer_directory = home_directory + "\\LayerFiles"
project_ws = aprx.defaultGeodatabase
enableLogging = True
DeleteIntermediateData = True
verbose = 0
in_memory_switch = True
else:
# debug
input_layer = r'D:\Gert\Work\Esri\Solutions\Utilities\work2.1\3DUtilities\Local_Scene.gdb\lateral_test1'
input_3d_mains_layer = r'D:\Gert\Work\Esri\Solutions\Utilities\work2.1\3DUtilities\Local_Scene.gdb\mains_3d_test1'
diameter = "DIAMETER"
diameter_unit = "Inches"
default_diameter = 3
slope = "Slope"
default_slope = 45
output_features = r'D:\Gert\Work\Esri\Solutions\Utilities\work2.1\3DUtilities\Local_Scene.gdb\lateral_test1_3D'
output_as_3dobject = True
# Create and set workspace location in same directory as input feature class gdb
home_directory = r'D:\Gert\Work\Esri\Solutions\Utilities\work2.1\3DUtilities'
rule_directory = home_directory + "\RulePackages"
layer_directory = home_directory + "\LayerFiles"
project_ws = home_directory + "\\Results.gdb"
tin_directory = home_directory + "\TINs"
scripts_directory = home_directory + "\\Scripts"
log_directory = home_directory + "\\Logs"
enableLogging = False
DeleteIntermediateData = True
verbose = 1
in_memory_switch = False
# set data paths for packing tool so all additional data is stored in the package - ESRI packing only!
data_directory_pack = ""
geodatabase = ""
feature_class = ""
model_directory_pack = ""
model_file = ""
rule_directory_pack = "RulePackages"
rule_file = "ExtrudePolygon.rpk"
# note: rename all *.lyrx to *.txt first. This is only needed for packaging.
layer_directory_pack = "LayerFiles"
layer_file = "Line3DError.lyrx"
common_lib.set_data_paths_for_packaging(data_directory_pack, geodatabase, feature_class, model_directory_pack,
model_file, rule_directory_pack, rule_file, layer_directory_pack,
layer_file)
esri_diameter_field = "esri_diameter"
if not os.path.exists(tin_directory):
os.makedirs(tin_directory)
common_lib.set_up_logging(log_directory, TOOLNAME2)
start_time = time.clock()
scratch_ws = common_lib.create_gdb(home_directory, "Intermediate.gdb")
output_ws = os.path.dirname(output_features)
if arcpy.Exists(output_ws):
arcpy.env.workspace = scratch_ws
arcpy.env.overwriteOutput = True
if arcpy.CheckExtension("3D") == "Available":
arcpy.CheckOutExtension("3D")
if arcpy.CheckExtension("Spatial") == "Available":
arcpy.CheckOutExtension("Spatial")
arcpy.AddMessage(
"Processing input features: " + common_lib.get_name_from_feature_class(input_layer))
objects3D = None
objects3D_layer = None
Line3D = None
Line3D_layer = None
# check if line feature class has Z values. If not generate 3D line from 2D line using attributes
line_zValues = arcpy.Describe(input_3d_mains_layer).hasZ
if line_zValues:
input_building_fp = None
arcpy.AddMessage("Creating 3D laterals...")
output_name = str(os.path.basename(output_features))
# create diameter attribute in segment elevation units
layer_unit = common_lib.get_xy_unit(input_layer, verbose)
common_lib.delete_add_field(input_layer, esri_diameter_field, "DOUBLE")
if not diameter_unit:
diameter_unit = layer_unit
arcpy.AddMessage(
"No Diameter Unit detected. Using XY units instead: " + diameter_unit)
if diameter:
expression = "!" + diameter + "! * " + str(
common_lib.unitConversion(layer_unit, diameter_unit, verbose))
arcpy.CalculateField_management(input_layer, esri_diameter_field, expression,
"PYTHON_9.3")
else:
arcpy.CalculateField_management(input_layer, esri_diameter_field,
default_diameter,
"PYTHON_9.3")
Line3D = create_laterals(output_ws, scratch_ws, tin_directory, input_layer,
input_3d_mains_layer, input_building_fp, esri_diameter_field,
default_diameter,
slope, default_slope, output_name, verbose)
Line3D_layer = common_lib.get_name_from_feature_class(Line3D)
arcpy.MakeFeatureLayer_management(Line3D, Line3D_layer)
if common_lib.get_z_unit(Line3D_layer, 0) == "Feet":
SymbologyLayer = layer_directory + "\\LateralLine3D.lyrx"
else:
SymbologyLayer = layer_directory + "\\LateralLine3D_Meters.lyrx"
if not arcpy.Exists(SymbologyLayer):
arcpy.AddWarning(
"Can't find: " + SymbologyLayer + ". Symbolize features by error attribute to see data errors.")
if output_as_3dobject:
objects3D = os.path.join(output_ws, output_name + "_3Dobjects")
if arcpy.Exists(objects3D):
arcpy.Delete_management(objects3D)
arcpy.AddMessage("Buffering: " + common_lib.get_name_from_feature_class(Line3D))
arcpy.AddMessage("This might take some time depending on the number of lines.")
common_lib.delete_add_field(Line3D, RADIUS_FIELD, "DOUBLE")
arcpy.CalculateField_management(Line3D, RADIUS_FIELD,
"!" + DIAMETER_FIELD + "! / 2",
"PYTHON_9.3")
arcpy.Buffer3D_3d(Line3D, objects3D, RADIUS_FIELD, 'Straight', '10')
objects3D_layer = common_lib.get_name_from_feature_class(objects3D)
arcpy.MakeFeatureLayer_management(objects3D, objects3D_layer)
if common_lib.get_z_unit(objects3D_layer, 0) == "Feet":
SymbologyLayer = layer_directory + "\\LateralObject3D.lyrx"
else:
SymbologyLayer = layer_directory + "\\LateralObject3D_meter.lyrx"
if not arcpy.Exists(SymbologyLayer):
arcpy.AddWarning(
"Can't find: " + SymbologyLayer + ". Symbolize features by error attribute to see data errors.")
end_time = time.clock()
msg_body = create_msg_body("Create Laterals completed successfully.", start_time,
end_time)
if DeleteIntermediateData:
fcs = common_lib.listFcsInGDB(scratch_ws)
msg_prefix = "Deleting intermediate data..."
msg_body = common_lib.create_msg_body(msg_prefix, 0, 0)
common_lib.msg(msg_body)
for fc in fcs:
arcpy.Delete_management(fc)
arcpy.ClearWorkspaceCache_management()
end_time = time.clock()
msg_body = create_msg_body("Create 3D Laterals completed successfully.", start_time,
end_time)
msg(msg_body)
return Line3D_layer, objects3D_layer
else:
raise No3DFeatures
else:
raise LicenseErrorSpatial
else:
raise LicenseError3D
except NoLayerFile:
print("Can't find Layer file. Exiting...")
arcpy.AddError("Can't find Layer file. Exiting...")
except LicenseError3D:
print("3D Analyst license is unavailable")
arcpy.AddError("3D Analyst license is unavailable")
except LicenseErrorSpatial:
print("Spatial Analyst license is unavailable")
arcpy.AddError("Spatial Analyst license is unavailable")
except NoNoDataError:
print("Input raster does not have NODATA values")
arcpy.AddError("Input raster does not have NODATA values")
except ValueError:
print("Input no flood value is not a number.")
arcpy.AddError("Input no flood value is not a number.")
except arcpy.ExecuteError:
line, filename, synerror = trace()
msg("Error on %s" % line, ERROR)
msg("Error in file name: %s" % filename, ERROR)
msg("With error message: %s" % synerror, ERROR)
msg("ArcPy Error Message: %s" % arcpy.GetMessages(2), ERROR)
except FunctionError as f_e:
messages = f_e.args[0]
msg("Error in function: %s" % messages["function"], ERROR)
msg("Error on %s" % messages["line"], ERROR)
msg("Error in file name: %s" % messages["filename"], ERROR)
msg("With error message: %s" % messages["synerror"], ERROR)
msg("ArcPy Error Message: %s" % messages["arc"], ERROR)
except:
line, filename, synerror = trace()
msg("Error on %s" % line, ERROR)
msg("Error in file name: %s" % filename, ERROR)
msg("with error message: %s" % synerror, ERROR)
finally:
arcpy.CheckInExtension("3D")
arcpy.CheckInExtension("Spatial")
# for debug only!
if __name__ == "__main__":
main("", "", "", "", "", "", "", "", "", 1)
|
{"/scripts/create_3Dgravity_mains.py": ["/scripts/settings.py"], "/scripts/create_surface_hole.py": ["/scripts/settings.py"], "/scripts/gptools.py": ["/scripts/create_3Dgravity_mains.py", "/scripts/create_surface_hole.py", "/scripts/create_3Dlaterals.py", "/scripts/create_3Dmanholes.py", "/scripts/create_elevation_tile_package.py", "/scripts/settings.py"], "/scripts/create_elevation_tile_package.py": ["/scripts/settings.py"], "/scripts/create_3Dmanholes.py": ["/scripts/settings.py"], "/scripts/create_3Dlaterals.py": ["/scripts/settings.py"]}
|
10,547
|
SRLKilling/interactive-rubiks
|
refs/heads/master
|
/cube.py
|
from algorithm import Algorithm
import colorama
class Cube:
def __init__(self):
self.reset()
self.algo = {}
self.options = {}
self.options["verbose"] = 2
self.options["interactive"] = True
self.options["rotateMode"] = True
self.options["coloredOutput"] = True
def reset(self):
self.faces = [[i for j in range(9)] for i in range(6)]
def pause(self):
if self.options["interactive"]:
input('')
def pr(self, str):
if self.options["verbose"]:
print(str)
# Primary moves
def rotateLeft(self, n=1):
for i in range(n):
self.faces[0], self.faces[1], self.faces[2], self.faces[3] = self.faces[3], self.faces[0], self.faces[1], self.faces[2]
f = self.faces[4]; f[0], f[1], f[2], f[3], f[5], f[6], f[7], f[8] = f[2], f[5], f[8], f[1], f[7], f[0], f[3], f[6]
f = self.faces[5]; f[0], f[1], f[2], f[3], f[5], f[6], f[7], f[8] = f[6], f[3], f[0], f[7], f[1], f[8], f[5], f[2]
def rotateDown(self, n=1):
for i in range(n):
f1, f2 = self.faces[5][::], self.faces[3][::]
self.faces[1], self.faces[5] = self.faces[4][::], self.faces[1][::]
f = self.faces[3]; f[0], f[1], f[2], f[3], f[4], f[5], f[6], f[7], f[8] = f1[8], f1[7], f1[6], f1[5], f1[4], f1[3], f1[2], f1[1], f1[0]
f = self.faces[4]; f[0], f[1], f[2], f[3], f[4], f[5], f[6], f[7], f[8] = f2[8], f2[7], f2[6], f2[5], f2[4], f2[3], f2[2], f2[1], f2[0]
f = self.faces[2]; f[0], f[1], f[2], f[3], f[5], f[6], f[7], f[8] = f[2], f[5], f[8], f[1], f[7], f[0], f[3], f[6]
f = self.faces[0]; f[0], f[1], f[2], f[3], f[5], f[6], f[7], f[8] = f[6], f[3], f[0], f[7], f[1], f[8], f[5], f[2]
def turnFace(self, n=1):
for i in range(n):
f = self.faces[1]; f[0], f[1], f[2], f[3], f[5], f[6], f[7], f[8] = f[6], f[3], f[0], f[7], f[1], f[8], f[5], f[2]
f1, f2, f3, f4 = self.faces[0], self.faces[4], self.faces[2], self.faces[5]
f1[8], f1[5], f1[2], f2[6], f2[7], f2[8], f3[0], f3[3], f3[6], f4[2], f4[1], f4[0] = f4[2], f4[1], f4[0], f1[8], f1[5], f1[2], f2[6], f2[7], f2[8], f3[0], f3[3], f3[6]
# Simple moves
def F(self, n=1):
self.turnFace(n)
return self
def F2(self):
self.turnFace(2)
return self
def F_(self):
self.turnFace(3)
return self
def B(self, n=1):
self.rotateLeft(2)
self.turnFace(n)
self.rotateLeft(2)
return self
def B2(self):
return self.B(2)
def B_(self):
return self.B(3)
def U(self, n=1):
self.rotateDown()
self.turnFace(n)
self.rotateDown(3)
return self
def U2(self):
self.U(2)
return self
def U_(self):
self.U(3)
return self
def D(self, n=1):
self.rotateDown(3)
self.turnFace(n)
self.rotateDown()
return self
def D2(self):
self.D(2)
return self
def D_(self):
self.D(3)
return self
def R(self, n=1):
self.rotateLeft(3)
self.turnFace(n)
self.rotateLeft()
return self
def R2(self):
self.R(2)
return self
def R_(self):
self.R(3)
return self
def L(self, n=1):
self.rotateLeft()
self.turnFace(n)
self.rotateLeft(3)
return self
def L2(self):
self.L(2)
return self
def L_(self):
self.L(3)
return self
## Double moves
def f(self, n=1):
self.B(n)
self.z(n)
return self
def f2(self):
return self.f(2)
def f_(self):
return self.f(3)
def b(self, n=1):
self.F(n)
self.z(3*n)
return self
def b2(self):
return this.b(2)
def b_(self):
return this.b(3)
def u(self, n=1):
self.D(n)
self.y(n)
return self
def u2(self):
return self.u(2)
def u_(self):
return self.u(3)
def d(self, n=1):
self.U(n)
self.y(3*n)
return self
def d2(self):
return self.d(2)
def d_(self):
return self.d(3)
def r(self, n=1):
self.L(n)
self.x(n)
return self
def r2(self):
return self.r(2)
def r_(self):
return self.r(3)
def l(self, n=1):
self.R(n)
self.x(3*n)
return self
def l2(self):
return self.l(2)
def l_(self):
return self.l(3)
## Middle moves
def M(self, n=1):
self.R(n)
self.L(3*n)
self.x(3*n)
def m(self, n=1):
self.L(n)
self.R(3*n)
def E(self, n=1):
self.U(n)
self.D(3*n)
self.y(3*n)
def e(self, n=1):
self.U(3*n)
self.D(n)
def S(self, n=1):
self.F(3*n)
self.B(n)
self.z(n)
def s(self, n=1):
self.F(n)
self.B(3*n)
## Cube rotations
def x(self, n=1):
self.rotateDown(3*n)
return self
def x2(self):
return self.x(2)
def x_(self):
return self.x(3)
def y(self, n=1):
self.rotateLeft(3*n)
return self
def y2(self):
return self.y(2)
def y_(self):
return self.y(3)
def z(self, n=1):
self.rotateLeft(3)
self.rotateDown(n)
self.rotateLeft()
return self
def z2(self):
return self.z(2)
def z_(self):
return self.z(3)
## Eval and algorithms
def eval(self, str):
algo = Algorithm()
changer = algo.parseLine(str)
algo.do(self)
if changer: self.printCube()
def do(self, str, m=0, silent=False):
self.z(m)
i = 0
s = ""
suffix = ['', "2", "'"]
while i < len(str):
n, c = 1, str[i]
if i+1 < len(str) and str[i+1] == "'":
n = 3
i += 2
elif i+1 < len(str) and str[i+1] == '2':
n = 2
i += 2
elif i+1 < len(str) and c == '2':
n = 2
c = str[i+1]
i += 2
else:
i += 1
if(c == 'F'): self.F(n)
elif(c == 'B'): self.B(n)
elif(c == 'U'): self.U(n)
elif(c == 'D'): self.D(n)
elif(c == 'R'): self.R(n)
elif(c == 'L'): self.L(n)
elif(c == 'f'): self.f(n)
elif(c == 'b'): self.b(n)
elif(c == 'u'): self.u(n)
elif(c == 'd'): self.d(n)
elif(c == 'r'): self.r(n)
elif(c == 'l'): self.l(n)
elif(c == 'M'): self.M(n)
elif(c == 'm'): self.m(n)
elif(c == 'E'): self.E(n)
elif(c == 'e'): self.e(n)
elif(c == 'S'): self.S(n)
elif(c == 's'): self.s(n)
elif(c == 'x'): self.x(n)
elif(c == 'y'): self.y(n)
elif(c == 'z'): self.z(n)
else: continue
s += self.zDecal(c, m, n) + " "
self.z(3*m)
if self.options["verbose"] > 1 and silent == False:
if self.options["rotateMode"]: print(s)
else:
if m%4 == 0: print(s)
if m%4 == 1: print('z -', s, "- z'")
elif m%4 == 2: print('z2 -', s, '- z2')
elif m%4 == 3: print("z' -", s, '- z')
def addAlgo(self, name, algo):
self.algo[name] = algo
def doAlgo(self, str):
if str in self.algo:
return self.algo[str].do(self)
else: return "Error: '" + str + "' has not been declared"
def imp(self, file):
a = Algorithm()
a.loadFromFile(file + ".algo")
self.addAlgo(file, a)
def setOption(self, name, value):
if name not in self.options:
print("Error: '", name, "' is not an option", sep='')
else:
if type(self.options[name]) == int:
self.options[name] = int(value)
elif type(self.options[name]) == float:
self.options[name] = float(value)
elif type(self.options[name]) == bool:
self.options[name] = (value.lower() in ["true", "yes", "y", "t", "1"])
else:
self.options[name] = value
## Printing
def zDecal(self, c, i, n):
suffix = ['', "2", "'"]
if self.options["rotateMode"]:
moves = [['U', 'L', 'D', 'R'], ['u', 'l', 'd', 'r']]
for move in moves:
if c in move:
return move[(move.index(c) + i) % len(move)] + suffix[n-1]
moves = [['M', 'E'], ['m', 'e']]
for move in moves:
if c in move:
if i%4 == 0: return c + suffix[n-1]
elif i%4 == 1: return move[(move.index(c) + i) % len(move)] + suffix[n-1]
elif i%4 == 2: return c + suffix[-1*(n-2)+1]
elif i%4 == 3: return move[(move.index(c) + i) % len(move)] + suffix[-1*(n-2)+1]
return c + suffix[n-1]
def __str__(self):
s = ''
f = self.faces
s += " |-------|\n"
for i in range(3):
s += " | " + str(f[4][3*i]) + ' ' + str(f[4][3*i+1]) + ' ' + str(f[4][3*i+2]) + ' |\n'
s += "|-------|-------|-------|-------|\n"
for i in range(3):
s += '| ' + str(f[0][3*i]) + ' ' + str(f[0][3*i+1]) + ' ' + str(f[0][3*i+2]) + ' | ' + str(f[1][3*i]) + ' ' + str(f[1][3*i+1]) + ' ' + str(f[1][3*i+2]) + ' | ' + str(f[2][3*i]) + ' ' + str(f[2][3*i+1]) + ' ' + str(f[2][3*i+2]) + ' | ' + str(f[3][3*i]) + ' ' + str(f[3][3*i+1]) + ' ' + str(f[3][3*i+2]) + ' |\n'
s += "|-------|-------|-------|-------|\n"
for i in range(3):
s += " | " + str(f[5][3*i]) + ' ' + str(f[5][3*i+1]) + ' ' + str(f[5][3*i+2]) + ' |\n'
s += " |-------|\n"
return s
def printCube(self):
if not self.options["verbose"] > 0:
return
if not self.options["coloredOutput"]:
print(self)
return
print('')
f = self.faces
for i in range(3):
print(" ", end='')
self.printCase(4, 3*i)
self.printCase(4, 3*i+1)
self.printCase(4, 3*i+2)
print('')
for i in range(3):
self.printCase(0, 3*i)
self.printCase(0, 3*i+1)
self.printCase(0, 3*i+2)
self.printCase(1, 3*i)
self.printCase(1, 3*i+1)
self.printCase(1, 3*i+2)
self.printCase(2, 3*i)
self.printCase(2, 3*i+1)
self.printCase(2, 3*i+2)
self.printCase(3, 3*i)
self.printCase(3, 3*i+1)
self.printCase(3, 3*i+2)
print('')
for i in range(3):
print(" ", end='')
self.printCase(5, 3*i)
self.printCase(5, 3*i+1)
self.printCase(5, 3*i+2)
print('')
print('')
def printCase(self, f, i):
if self.faces[f][i] == 0: print(colorama.Back.GREEN, " ", colorama.Back.RESET, sep='', end='')
if self.faces[f][i] == 1: print(colorama.Back.WHITE, " ", colorama.Back.RESET, sep='', end='')
if self.faces[f][i] == 2: print(colorama.Back.BLUE, " ", colorama.Back.RESET, sep='', end='')
if self.faces[f][i] == 3: print(colorama.Back.YELLOW, " ", colorama.Back.RESET, sep='', end='')
if self.faces[f][i] == 4: print(colorama.Back.MAGENTA, " ", colorama.Back.RESET, sep='', end='')
if self.faces[f][i] == 5: print(colorama.Back.RED, " ", colorama.Back.RESET, sep='', end='')
|
{"/cube.py": ["/algorithm.py"], "/interactive-rubiks.py": ["/cube.py", "/algorithm.py"], "/algorithm.py": ["/pattern.py"]}
|
10,548
|
SRLKilling/interactive-rubiks
|
refs/heads/master
|
/interactive-rubiks.py
|
from cube import Cube
from algorithm import Algorithm
import colorama
colorama.init()
c = Cube()
files = ["firstcross", "firstface", "middle", "lastcross", "lastface", "resolve"]
for s in files:
a = Algorithm()
a.loadFromFile(s + ".algo")
c.addAlgo(s, a)
print("Welcome to Rubick's Cube player ! :)");
print("Type 'help' to get a list of usable command");
c.printCube();
inStr = ""
while inStr != "exit":
s = c.eval(inStr)
inStr = input(">> ")
|
{"/cube.py": ["/algorithm.py"], "/interactive-rubiks.py": ["/cube.py", "/algorithm.py"], "/algorithm.py": ["/pattern.py"]}
|
10,549
|
SRLKilling/interactive-rubiks
|
refs/heads/master
|
/pattern.py
|
class Pattern:
def __init__(self, str=None):
self.faces = [['_' for j in range(9)] for i in range(6)]
if str != None: self.load(str)
def load(self, str):
lines = str.split('\n')
for i in range(3):
lines[i] = lines[i].strip().split()
self.faces[4][i*3], self.faces[4][i*3+1], self.faces[4][i*3+2] = lines[i][0], lines[i][1], lines[i][2]
for i in range(3):
lines[3+i] = lines[3+i].strip().split()
self.faces[0][i*3], self.faces[0][i*3+1], self.faces[0][i*3+2] = lines[3+i][0], lines[3+i][1], lines[3+i][2]
self.faces[1][i*3], self.faces[1][i*3+1], self.faces[1][i*3+2] = lines[3+i][3], lines[3+i][4], lines[3+i][5]
self.faces[2][i*3], self.faces[2][i*3+1], self.faces[2][i*3+2] = lines[3+i][6], lines[3+i][7], lines[3+i][8]
self.faces[3][i*3], self.faces[3][i*3+1], self.faces[3][i*3+2] = lines[3+i][9], lines[3+i][10], lines[3+i][11]
for i in range(3):
lines[6+i] = lines[6+i].strip().split()
self.faces[5][i*3], self.faces[5][i*3+1], self.faces[5][i*3+2] = lines[6+i][0], lines[6+i][1], lines[6+i][2]
def match(self, cube):
for z in range(4):
colors, matched = [], True
for f in range(6):
if matched:
for i in range(9):
if self.matchCase(cube, colors, f, i) == False:
cube.z()
matched = False
break
if matched:
cube.z(3*z)
return z
return -1
def matchOnly(self, cube, c):
for z in range(4):
color, matched = -1, True
for f in range(6):
if matched:
for i in range(9):
if color == -1 and self.faces[f][i] == c:
color = cube.faces[f][i]
elif color != -1 and self.faces[f][i] == c and cube.faces[f][i] != color:
cube.z()
matched = False
break
if matched:
cube.z(3*z)
return z
return -1
def matchCase(self, cube, colors, f, i):
if(self.faces[f][i] == '_'):
return True
else:
char, color = int(self.faces[f][i]), -1
# print(colors, char)
for c in colors:
if c[0] == char:
color = c[1]
break
elif c[1] == cube.faces[f][i]:
return False
if color == -1:
colors.append( (char, cube.faces[f][i]) )
return True
else:
return color == cube.faces[f][i]
|
{"/cube.py": ["/algorithm.py"], "/interactive-rubiks.py": ["/cube.py", "/algorithm.py"], "/algorithm.py": ["/pattern.py"]}
|
10,550
|
SRLKilling/interactive-rubiks
|
refs/heads/master
|
/algorithm.py
|
from pattern import Pattern
from random import randint
class AlgoAction:
def __init__(self, code, silent=False):
self.code = code
self.silent = silent
def do(self, cube, m):
cube.do(self.code, m, self.silent)
return False
class AlgoPrint:
def __init__(self, str):
self.str = str
def do(self, cube):
cube.pr(self.str)
return False
class AlgoImport:
def __init__(self, file):
self.file = file
def do(self, cube):
cube.imp(self.file)
return False
class AlgoDoAlgo:
def __init__(self, str):
self.str = str
def do(self, cube):
return cube.doAlgo(self.str)
class AlgoPrintCube:
def do(self, cube):
cube.printCube()
return False
class AlgoPause:
def do(self, cube):
cube.pause()
return False
class AlgoReset:
def do(self, cube):
cube.reset()
return False
class AlgoSetOption:
def __init__(self, optname, optval):
self.name = optname
self.val = optval
def do(self, cube):
cube.setOption(self.name, self.val)
return False
class AlgoRandomize:
def __init__(self, silent=False):
self.silent = silent
def do(self, cube):
s = ""
for j in range(20):
r = randint(0, 18)
t, n = r//3, r%3
if(t == 0):
s += "F"
if(t == 1):
s += "B"
if(t == 2):
s += "U"
if(t == 3):
s += "D"
if(t == 4):
s += "R"
if(t == 5):
s += "L"
if(n == 1): s+="2"
elif(n == 2): s+="_"
s += " "
cube.do(s, 0, self.silent)
return False
class Algorithm:
IF = 0
WHILE = 1
UNTIL = 2
def __init__(self, patterns=[Pattern()], parent=None, conditionType=IF):
self.step = []
self.patterns = patterns
self.conditionType = conditionType
self.parent = parent
self.elseAlgo = None
def newAlgo(self, patterns, cond):
step = Algorithm(patterns, self, cond)
self.step.append(step)
return step
def newDoAlgo(self, str):
self.step.append( AlgoDoAlgo(str) )
def newMatchdoing(self, pattern, c, action):
step = AlgoMatch(self, [pattern], c, action)
self.step.append(step)
return step
def newElseMatchdoing(self, pattern, c, action):
self.elseAlgo = AlgoMatch(self.parent, [pattern], c, action)
return self.elseAlgo
def newElse(self, patterns):
self.elseAlgo = Algorithm(patterns, self.parent, Algorithm.IF)
return self.elseAlgo
def newAction(self, str, silent=False):
self.step.append( AlgoAction(str, silent) )
def newPrint(self, str):
self.step.append( AlgoPrint(str) )
def newPrintCube(self):
self.step.append( AlgoPrintCube() )
def newPause(self):
self.step.append( AlgoPause() )
def newRandomize(self, silent=False):
self.step.append( AlgoRandomize(silent) )
def newReset(self):
self.step.append( AlgoReset() )
def newImport(self, str):
self.step.append( AlgoImport(str) )
def newSetOption(self, optname, optval):
self.step.append( AlgoSetOption(optname, optval) )
def loadFromFile(self, filepath):
file = None
try:
file = open(filepath, "r")
except IOError:
print("Error: '", filepath, "' no such file", sep='')
return
line = file.readline()
lineno = 1
step = self
while line != '':
line = line.strip(' \t\n\r')
if line.startswith("match "):
param = line[6:].split("doing")
param[0] = param[0].strip(); param[1] = param[1].strip()
str = ''
for i in range(9): str += file.readline()
lineno += 9
step = step.newMatchdoing(Pattern(str), param[0], param[1])
elif line.startswith("elseif-match "):
if self.conditionType != Algorithm.IF:
print("Error in ",filepath,":", lineno,": 'elseif-match' must follow an 'if', 'elseif', 'match', or 'elseif-match' clause", sep='')
return
param = line[13:].split("doing")
param[0] = param[0].strip(); param[1] = param[1].strip()
str = ''
for i in range(9): str += file.readline()
lineno += 9
step = step.newElseMatchdoing(Pattern(str), param[0], param[1])
elif line.startswith("if"):
l = self.parsePatterns(file)
lineno += l[0]
step = step.newAlgo(l[1], Algorithm.IF)
elif line == "elseif":
if self.conditionType != Algorithm.IF:
print("Error in ",filepath,":", lineno,": 'elseif' must follow an 'if', 'elseif', 'match', or 'elseif-match' clause", sep='')
return
l = self.parsePatterns(file)
lineno += l[0]
step = step.newElse(l[1])
elif line == "else":
if self.conditionType != Algorithm.IF:
print("Error in ",filepath,":", lineno,": 'else' must follow an 'if', 'elseif', 'match', or 'elseif-match' clause", sep='')
return
step = step.newElse([Pattern()])
elif line.startswith("while"):
l = self.parsePatterns(file)
lineno += l[0]
step = step.newAlgo(l[1], Algorithm.WHILE)
elif line.startswith("until"):
l = self.parsePatterns(file)
lineno += l[0]
step = step.newAlgo(l[1], Algorithm.UNTIL)
elif line == "end":
if step.parent == None:
print("Error in ",filepath,":", lineno,": Too much 'end'", sep='')
return
step = step.parent
else:
self.parseLine(line, step, filepath, lineno)
line = file.readline()
lineno += 1
def parseLine(self, line, step=None, filename='<input>', lineno=1):
if(step == None): step = self
line = line.strip()
if line.lower() == "help":
print("Here is a list of command :")
print(" - 'randomize' to get a random rubick's cube (if you don't want to print the random moves, use 'randomize silent')")
print(" - 'do' succeeded by a move sequence (moves are L, R, U, D, B, F, l, r, u, d, b, f, M, E, S, m, e, s, x, y, z)")
print(" - 'doalgo <name>' will do the algorithm named <name>, previously loaded with import")
print(" Startup loaded algo: firstcross, firstface, middle, lastcross, lastface, resolve")
print(" - 'import <path>' so that <path>.algo will by imported into the algorithm list")
print(" - 'reset' to start with a new fresh cube")
print(" - 'set <name> = <value>' to set one of the options")
print(" current options are : 'interactive' to enable ('true') or disable ('false') pause during algorithm execution")
print(" 'coloredOutput' to enable a colored output ('true') vs a numbered output ('false')")
print(" 'verbose' to enable all output ('2'), only cube printing ('1'), or no output at all ('0')")
print(" - 'exit' to quit this command prompt")
print("")
print("Basically, if you want to have fun with this software, juste use :")
print(" >> randomize")
print(" >> doalgo resolve")
print("")
elif line.startswith("do "):
if(line[3:].strip().startswith("silent ")): step.newAction(line[3:].strip()[6:], True)
else: step.newAction(line[3:].strip())
return True
elif line.startswith("doalgo "):
step.newDoAlgo(line[7:].strip())
return False
elif line.startswith("print "):
step.newPrint(line[6:].strip())
return False
elif line.lower() == "printcube":
step.newPrintCube()
return False
elif line.lower() == "reset":
step.newReset()
return True
elif line.lower().startswith("import "):
step.newImport(line[7:].strip())
return False
elif line.lower() == "pause":
step.newPause()
return False
elif line.lower().startswith("randomize"):
if(line[10:].strip().startswith("silent")): step.newRandomize(True)
else: step.newRandomize()
return True
elif line.lower().startswith("set"):
opt = line[3:].split("=")
step.newSetOption(opt[0].strip(), opt[1].strip())
elif line != '' and line.startswith('#') == False:
print("Error in ",filename,":", lineno,": Unrecognized syntax", sep='')
# print(line)
return False
def parsePatterns(self, file):
lineno, pat, cont = 0, [], True
while cont:
str = ''
for i in range(9): str += file.readline()
lineno += 9
pat.append(Pattern(str))
s = file.readline()
if(s.strip().lower() != "or"):
lineno += 1
cont = False
return [lineno, pat]
def do(self, cube):
if(self.conditionType == Algorithm.IF):
m = self.match(cube)
if m >= 0:
if self.doSteps(cube, m): return True
elif self.elseAlgo != None:
if self.elseAlgo.do(cube): return True
elif(self.conditionType == Algorithm.WHILE):
m = self.match(cube)
while m >= 0:
if self.doSteps(cube, m): return True
m = self.match(cube)
elif(self.conditionType == Algorithm.UNTIL):
m = self.match(cube)
j = 0
while m < 0 and j <= 50:
if self.doSteps(cube, 0): return True
m = self.match(cube)
j += 1
if j >= 50:
print("Erreur - boucle infinie detectee")
return True
return False
def doSteps(self, cube, m):
for step in self.step:
if isinstance(step, AlgoAction):
if step.do(cube, m): return True
else:
if step.do(cube): return True
return False
def match(self, cube):
for p in self.patterns:
m = p.match(cube)
if m >= 0: return m
return -1
class AlgoMatch(Algorithm):
def __init__(self, parent, patterns, c, action):
self.patterns = patterns
self.action = action
self.c = c
self.parent = parent
self.conditionType = Algorithm.IF
self.step = []
self.elseAlgo = None
def do(self, cube):
m = self.patterns[0].matchOnly(cube, self.c)
if m >= 0:
n = 0
m = self.patterns[0].match(cube)
while m < 0 and n < 4:
cube.do(self.action, m, True)
m = self.match(cube)
n += 1
if n != 4:
print( cube.zDecal(self.action, m, n), ' - ', sep='', end='')
if self.doSteps(cube, m): return True
elif self.elseAlgo != None:
if self.elseAlgo.do(cube): return True
elif self.elseAlgo != None:
if self.elseAlgo.do(cube): return True
|
{"/cube.py": ["/algorithm.py"], "/interactive-rubiks.py": ["/cube.py", "/algorithm.py"], "/algorithm.py": ["/pattern.py"]}
|
10,551
|
ToddTurnbull/reload
|
refs/heads/master
|
/tests/__init__.py
|
from .context import db
|
{"/examples.py": ["/db/__init__.py", "/db/models/__init__.py"]}
|
10,552
|
ToddTurnbull/reload
|
refs/heads/master
|
/db/models/__init__.py
|
from sqlalchemy import Boolean
from sqlalchemy import CheckConstraint
from sqlalchemy import Column
from sqlalchemy import Date
from sqlalchemy import DateTime
from sqlalchemy import DECIMAL
from sqlalchemy import ForeignKey
from sqlalchemy import func
from sqlalchemy import Index
from sqlalchemy import Integer
from sqlalchemy import PrimaryKeyConstraint
from sqlalchemy import Sequence
from sqlalchemy import String
from sqlalchemy import Text
from sqlalchemy import UniqueConstraint
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from functools import partial
def name_table(table, schema=None):
"""Return a schema qualified table name"""
return "{}.{}".format(schema, table) if schema else table
with_schema = partial(name_table, schema="tempdb") # to do: use config.schema
Base = declarative_base()
Base.metadata.schema = "tempdb" # to do: use config.schema
Base.metadata.naming_convention = {
"ix": "%(table_name)s_%(column_0_name)s_index",
"uq": "%(table_name)s_%(column_0_name)s_key",
# "ck": "%(table_name)s_%(column_0_name)s_check",
"fk": "%(table_name)s_%(column_0_name)s_%(referred_table_name)s_%(referred_column_0_name)s_fkey",
"pk": "%(table_name)s_pkey"
}
class Data(Base):
__tablename__ = "data"
# Columns
recordnumber = Column(String(5), nullable=False, unique=True)
internalmemo = Column(Text)
comments = Column(Text)
recnum = Column(
String(7),
CheckConstraint(
"left(RecNum, 3) = 'WRN'",
name = "data_recnum_check"
),
nullable=False,
unique=True
)
org1 = Column(String(100))
org2 = Column(String(70))
org3 = Column(String(70))
org4 = Column(String(70))
org5 = Column(String(70))
altorg = Column(Text)
formerorg = Column(Text)
xref = Column(Text)
streetbuilding = Column(String(90))
streetaddress = Column(String(90))
streetcity = Column(String(40))
mailcareof = Column(String(60))
building = Column(String(90))
address = Column(String(90))
city = Column(String(40))
province = Column(String(25))
postalcode = Column(String(7))
accessibility = Column(Text)
location = Column(String(60))
intersection = Column(String(60))
officephone = Column(Text)
fax = Column(Text)
email = Column(Text)
www = Column(String(255))
afterhoursphone = Column(Text)
crisisphone = Column(Text)
tddphone = Column(Text)
data = Column(String(30))
description = Column(Text)
pubdescription = Column(Text)
generalinfo = Column(Text)
bnd = Column(Text)
otherresource = Column(Text)
fees = Column(Text)
hours = Column(Text)
dates = Column(Text)
areaserved = Column(Text)
eligibility = Column(Text)
application = Column(Text)
languages = Column(Text)
contact1 = Column(String(60))
contact1title = Column(String(120))
contact1org = Column(String(90))
contact1phone = Column(Text)
contact2 = Column(String(60))
contact2title = Column(String(120))
printedmaterial = Column(Text)
contact2org = Column(String(90))
contact2phone = Column(Text)
contact3 = Column(String(60))
contact3title = Column(String(120))
contact3org = Column(String(90))
contact3phone = Column(Text)
contact4 = Column(String(60))
contact4title = Column(String(120))
contact4org = Column(String(90))
contact4phone = Column(Text)
dateestablished = Column(String(60))
elections = Column(String(120))
funding = Column(Text)
ddcode = Column(String(10))
levelofservice = Column(String(60))
subject = Column(Text)
usedfor = Column(Text)
blue = Column(Text)
seealso = Column(Text)
localsubjects = Column(Text)
typeofrecord = Column(String(2))
qualitylevel = Column(String(20))
tobedeleted = Column(String(20))
distribution = Column(Text)
pub = Column(Text)
sourceofinfo = Column(String(60))
sourcetitle = Column(String(60))
sourceorg = Column(String(60))
sourcebuilding = Column(String(30))
sourceaddress = Column(String(60))
sourcecity = Column(String(30))
sourceprovince = Column(String(2))
sourcepostalcode = Column(String(7))
sourcephone = Column(Text)
collectedby = Column(String(40))
datecollected = Column(String(10))
createdby = Column(String(40))
updatedby = Column(String(40))
updatedate = Column(String(10))
updateschedule = Column(String(10))
historyofupdate = Column(String(10))
lastmodified = Column(Text)
org1_sort = Column(String(100))
id = Column(Integer, primary_key=True)
org_name_id = Column(Integer, nullable=False)
# delete?
class Thes(Base):
__tablename__ = "thes"
# Columns
id = Column(Integer, primary_key=True)
term = Column(String(60), nullable=False, index=True)
note = Column(Text, nullable=False)
action = Column(String(6))
cat_id = Column(Integer, ForeignKey("thes_cat.id"))
sort = Column(String(6))
# delete?
class ThesCat(Base):
__tablename__ = "thes_cat"
# Columns
id = Column(Integer, primary_key=True)
category = Column(String(30), nullable=False)
# delete?
class ThesTree(Base):
__tablename__ = "thes_tree"
# Columns
id = Column(Integer, primary_key=True)
term = Column(Text, nullable=False)
parent_id = Column(Integer, ForeignKey("thes.id"))
cat_id = Column(Integer, nullable=False)
# delete?
class City(Base):
__tablename__ = "city"
# Columns
id = Column(Integer, primary_key=True)
city = Column(String(20), nullable=False)
class Pub(Base):
__tablename__ = "pub"
# Columns
id = Column(Integer, primary_key=True)
code = Column(String(20), nullable=False, unique=True)
title = Column(String(50), nullable=False, index=True)
isdefault = Column(Boolean, nullable=False, default=False)
lastupdated = Column(DateTime)
note = Column(Text)
# Relationships
taxonomy = relationship( # many-to-many
"TaxLinkNote",
secondary = with_schema("pubtax")
)
# delete?
class ThesRelated(Base):
__tablename__ = "thes_related"
__table_args__ = (
PrimaryKeyConstraint("thes_id", "related_id"),
)
# Columns
thes_id = Column(Integer, ForeignKey("thes.id"), nullable=False)
related_id = Column(Integer, ForeignKey("thes.id"), nullable=False)
# delete?
class ThesReject(Base):
__tablename__ = "thes_reject"
# SQLAlchemy needs a primary key
__table_args__ = (
PrimaryKeyConstraint("thes_id", "accept_id"),
)
# Columns
thes_id = Column(Integer, ForeignKey("thes.id"), nullable=False)
accept_id = Column(Integer, ForeignKey("thes.id"), nullable=False)
class AddressType(Base):
__tablename__ = "tlkpaddresstype"
# Columns
id = Column(Integer, primary_key=True)
name = Column(String(50), nullable=False)
class Address(Base):
__tablename__ = "tbladdress"
__table_args__ = (
CheckConstraint("""
(utm_x is null and utm_y is null)
or
(utm_x is not null and utm_y is not null)
or
(latitude is null and longitude is null)
or
(latitude is not null and longitude is not null)
""",
name = "tbladdress_check"
),
)
# Columns
id = Column(Integer, primary_key=True)
addresstypeid = Column(Integer, ForeignKey("tlkpaddresstype.id"), nullable=False)
incareof = Column(String(60))
building = Column(String(50))
address = Column(String(50))
city = Column(String(50), nullable=False)
province = Column(String(2), default="ON")
postalcode = Column(
String(7),
CheckConstraint(
"postalcode ~* '[a-z][0-9][a-z] [0-9][a-z][0-9]'",
name = "tbladdress_postalcode_check"
)
)
intersection = Column(String(255))
unit = Column(String(10))
unitvalue = Column(String(10))
streetnumber = Column(String(10))
streetsuffix = Column(String(10))
streetdirection = Column(String(2))
unitextra = Column(String(25))
deliverynumber = Column(String(10))
deliverystation = Column(String(30))
deliverymode = Column(String(20))
busroute = Column(String(50))
utm_x = Column(Integer)
utm_y = Column(Integer)
ismappable = Column(Boolean)
latitude = Column(DECIMAL(11,9))
longitude = Column(DECIMAL(11,9))
# Relationships
type = relationship("AddressType") # many-to-one
access = relationship(
"Accessibility",
secondary= with_schema("treladdressaccessibility"),
uselist=False # one-to-one
)
org = relationship(
"Org",
secondary = with_schema("org_address_rel"),
uselist = False # Org-to-Address is one-to-many
)
class Accessibility(Base):
__tablename__ = "tlkpaccessibility"
# Columns
id = Column(Integer, primary_key=True)
name = Column(String(100), nullable=False)
class AddressAccessibility(Base):
__tablename__ = "treladdressaccessibility"
# Columns
addressid = Column(Integer, ForeignKey("tbladdress.id"), primary_key=True)
accessibilityid = Column(Integer, ForeignKey("tlkpaccessibility.id"), nullable=False)
class CommType(Base):
__tablename__ = "tlkpcommtype"
# Columns
id = Column(Integer, primary_key=True)
name = Column(String(50), nullable=False, unique=True)
class Comm(Base):
__tablename__ = "tblcomm"
__table_args__ = (
CheckConstraint("""
(commtypeid in (1, 2, 3, 5, 6) and value ~* '[0-9][0-9][0-9]-[0-9][0-9][0-9][0-9]')
or
(commtypeid = 2 and value = '911')
or
(commtypeid = 4 and value like '_%@%.%')
or
(commtypeid = 7 and value like '%.__%')
or
commtypeid > 7
""",
name = "tblcomm_check"
),
)
# Columns
id = Column(Integer, primary_key=True)
commtypeid = Column(Integer, ForeignKey("tlkpcommtype.id"), nullable=False)
value = Column(String(255), nullable=False, index=True)
comment = Column(Text)
# Relationships
type = relationship("CommType") # many-to-one
org = relationship(
"Org",
secondary = with_schema("org_comm_rel"),
uselist = False # Org-to-Comm is one-to-many
)
class Contact(Base):
__tablename__ = "tblcontact"
# Columns
id = Column(Integer, primary_key=True)
name = Column(String(60))
title = Column(String(120))
org = Column(String(90))
comm = Column(Text)
contacttype = Column(Integer, default=0, index=True)
# Relationships
org = relationship(
"Org",
secondary = with_schema("org_contact_rel"),
uselist = False # Org-to-Contact is one-to-many
)
comms = relationship(
"Comm",
secondary = with_schema("contact_comm")
)
class Service(Base):
__tablename__ = "tblservice"
# Columns
id = Column(Integer, primary_key=True)
description = Column(Text)
eligibility = Column(Text)
info = Column(Text)
fees = Column(Text)
hours = Column(Text)
dates = Column(Text)
application = Column(Text)
updated = Column(DateTime)
ciocdescription = Column(Text)
cioceligibility = Column(Text)
ciocapplication = Column(Text)
# Relationships
language = relationship(
"Language",
secondary = with_schema("trelservicelanguage"),
uselist = False # one-to-one
)
area = relationship(
"Area",
secondary = with_schema("trelservicearea"),
uselist = False # one-to-one
)
org = relationship(
"Org",
secondary = with_schema("org_service_rel"),
uselist = False # Org-to-Service is one-to-one
)
class Language(Base):
__tablename__ = "tlkplanguage"
# Columns
id = Column(Integer, primary_key=True)
name = Column(Text, nullable=False)
class ServiceLanguage(Base):
__tablename__ = "trelservicelanguage"
__table_args__ = (
PrimaryKeyConstraint("serviceid", "languageid"),
)
# Columns
serviceid = Column(Integer, ForeignKey("tblservice.id"), nullable=False)
languageid = Column(Integer, ForeignKey("tlkplanguage.id"), nullable=False)
class Area(Base): # see also Areas for area
__tablename__ = "tlkparea"
# Columns
id = Column(Integer, primary_key=True)
name = Column(Text, nullable=False)
class ServiceArea(Base):
__tablename__ = "trelservicearea"
__table_args__ = (
PrimaryKeyConstraint("serviceid", "areaid"),
)
# Columns
serviceid = Column(Integer, ForeignKey("tblservice.id"), nullable=False)
areaid = Column(Integer, ForeignKey("tlkparea.id"), nullable=False)
class OrgName(Base):
__tablename__ = "tblorgname"
# Columns
id = Column(Integer, primary_key=True)
orgnametypeid = Column(Integer, ForeignKey("tlkporgnametype.id"), nullable=False)
name = Column(String(100), nullable=False, index=True)
parentid = Column(Integer, ForeignKey("tblorgname.id"))
level = Column(Integer)
sort = Column(String(100), index=True)
sort_key = Column(String(100), index=True)
added = Column(DateTime, default=func.now())
# Relationships
type = relationship("OrgNameType") # many-to-one
org = relationship(
"Org",
secondary = with_schema("org_names"),
back_populates = "names",
uselist = False # Org-to-Orgname is one-to-many
)
class OrgNameType(Base):
__tablename__ = "tlkporgnametype"
# Columns
id = Column(Integer, primary_key=True)
type = Column(String(20), nullable=False)
class OrgNames(Base):
__tablename__ = "org_names"
__table_args__ = (
UniqueConstraint("org_id", "org_name_id"),
Index("org_names_org_name_id_org_id_index", "org_name_id", "org_id")
)
# Columns
id = Column(Integer, primary_key=True)
org_id = Column(Integer, ForeignKey("org.id"), nullable=False, index=True)
org_name_id = Column(Integer, ForeignKey("tblorgname.id"), nullable=False, index=True)
added = Column(DateTime, default=func.now())
# Relationships
name = relationship("OrgName") # many-to-one
class Org(Base):
__tablename__ = "org"
# Columns
id = Column(Integer, primary_key=True)
org_name_id = Column(Integer, ForeignKey("tblorgname.id"), nullable=False)
update_note = Column(Text)
cic_id = Column(String(7), nullable=False, unique=True)
updated = Column(DateTime, default=func.now())
service_level = Column(String(60), nullable=False)
created = Column(DateTime, nullable=False, default=func.now(), index=True)
isactive = Column(Boolean, nullable=False, default=True, index=True)
iscomplete = Column(Boolean, nullable=False, default=False, index=True)
modified = Column(DateTime)
established = Column(
String(4),
CheckConstraint(
"established ~* '[1-2][0-9][0-9][0-9]'",
name = "org_established_check"
)
)
bn = Column(
String(15),
CheckConstraint(
"bn ~* '[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]RR[0-9][0-9][0-9][0-9]'",
name = "org_bn_check"
)
)
deleted = Column(DateTime)
# Relationships
names = relationship( # official names, one-to-many
"OrgName",
secondary = with_schema("org_names"),
back_populates = "org",
primaryjoin = "and_(Org.id == OrgNames.org_id, OrgName.orgnametypeid == 1)",
order_by = "OrgName.level"
)
alt_names = relationship( # one-to-many
"OrgName",
secondary = "tempdb.org_names",
back_populates = "org",
primaryjoin = "and_(Org.id == OrgNames.org_id, OrgName.orgnametypeid != 1)"
)
comms = relationship( # one-to-many
"Comm",
secondary = with_schema("org_comm_rel"),
back_populates = "org"
)
addresses = relationship( # one-to-many
"Address",
secondary = with_schema("org_address_rel"),
back_populates = "org"
)
contacts = relationship( # one-to-many
"Contact",
secondary = with_schema("org_contact_rel"),
back_populates = "org"
)
service = relationship(
"Service",
secondary = with_schema("org_service_rel"),
uselist = False # Org-to-Service is one-to-one
)
# http://docs.sqlalchemy.org/en/rel_1_0/orm/basic_relationships.html#association-object
pubs = relationship("PubOrg") # one-to-many
thes_all = relationship( # many-to-many
"Thesaurus",
secondary = with_schema("org_thes"),
secondaryjoin = "OrgThes.thes_id == Thesaurus.id"
)
thes_official = relationship( # many-to-many
"Thesaurus",
secondary = with_schema("org_thes"),
secondaryjoin = "and_(OrgThes.thes_id == Thesaurus.id, OrgThes.thes_id == OrgThes.official_id)"
)
notes = relationship("OrgNotes") # one-to-many
updates = relationship("OrgUpdated") # one-to-many
taxonomy_links = relationship(
"TaxLinkNote",
secondary = with_schema("orgtaxlink")
)
ic_agency = relationship( # one-to-one
"ICAgency",
uselist = False,
back_populates = "org"
)
ic_site = relationship( # one-to-one
"ICSite",
uselist = False,
back_populates = "org"
)
ic_service = relationship( # one-to-one
"ICService",
uselist = False,
back_populates = "org"
)
sites = relationship("OrgSite") # one-to-many
class OrgComm(Base):
__tablename__ = "org_comm_rel"
# Columns
id = Column(Integer, primary_key=True)
org_id = Column(Integer, ForeignKey("org.id"), nullable=False)
comm_id = Column(Integer, ForeignKey("tblcomm.id"), nullable=False)
added = Column(DateTime, nullable=False, default=func.now())
note = Column(Text)
class OrgAddress(Base):
__tablename__ = "org_address_rel"
# Columns
id = Column(Integer, primary_key=True)
org_id = Column(Integer, ForeignKey("org.id"), nullable=False)
address_id = Column(Integer, ForeignKey("tbladdress.id"), nullable=False)
added = Column(DateTime, nullable=False, default=func.now())
note = Column(String(100))
label = Column(String(50))
class OrgContact(Base):
__tablename__ = "org_contact_rel"
# Columns
id = Column(Integer, primary_key=True)
org_id = Column(Integer, ForeignKey("org.id"), nullable=False)
contact_id = Column(Integer, ForeignKey("tblcontact.id"), nullable=False)
added = Column(DateTime, nullable=False, default=func.now())
note = Column(Text)
class OrgRelatedDeletions(Base):
__tablename__ = "org_rel_del"
# Columns
id = Column(Integer, primary_key=True)
org_id = Column(Integer, nullable=False)
rel_id = Column(Integer, nullable=False)
added = Column(DateTime, nullable=False)
note = Column(Text)
deleted = Column(DateTime, nullable=False)
table_id = Column(Integer, nullable=False)
class OrgService(Base):
__tablename__ = "org_service_rel"
# Columns
id = Column(Integer, primary_key=True)
org_id = Column(Integer, ForeignKey("org.id"), nullable=False)
service_id = Column(Integer, ForeignKey("tblservice.id"), nullable=False)
added = Column(DateTime, nullable=False, default=func.now())
note = Column(Text)
class OrgDeletions(Base):
__tablename__ = "org_del"
# Columns
id = Column(Integer, primary_key=True)
org_name_id = Column(Integer, nullable=False)
update_note = Column(Text)
cic_id = Column(String(7), nullable=False, unique=True)
updated = Column(DateTime)
service_level = Column(String(60))
class PubOrg(Base):
__tablename__ = "pub_org"
__table_args__ = (
UniqueConstraint("pub_id", "org_id"),
)
# Columns
id = Column(Integer, primary_key=True)
pub_id = Column(Integer, ForeignKey("pub.id"), nullable=False)
org_id = Column(Integer, ForeignKey("org.id"), nullable=False)
added = Column(DateTime, nullable=False, default=func.now())
org_contact_id = Column(
Integer,
# SQLAlchemy defaults to "on delete set null"?
ForeignKey("org_contact_rel.id", ondelete="set null")
)
deleted = Column(DateTime)
isactive = Column(Boolean, nullable=False, default=True)
xml = Column(Text)
# Relationships
contact = relationship(
"Contact",
secondary = with_schema("org_contact_rel"),
uselist = False # PubOrg-to-Contact is one-to-one
)
# http://docs.sqlalchemy.org/en/rel_1_0/orm/basic_relationships.html#association-object
pub = relationship("Pub") # many-to-one
class Thesaurus(Base):
__tablename__ = "thes_original"
# Columns
id = Column(Integer, primary_key=True)
de = Column(String(100), nullable = False, unique=True)
use = Column(String(100))
woo = Column(String(1))
eq = Column(String(100))
uf = Column(Text)
sn = Column(Text)
bt = Column(String(100))
nt = Column(Text)
rt = Column(String(150))
ca = Column(String(50))
input = Column(String(50))
act = Column(String(10), nullable=False)
msg = Column(String(50))
cr = Column(String(50))
up = Column(String(50))
sort = Column(String(100))
comments = Column(Text)
# Relationships
relations = relationship( # one-to-many
"ThesRel",
primaryjoin = "Thesaurus.id == ThesRel.thes_id"
)
used_fors = relationship( # one-to-many
"ThesRel",
primaryjoin = "and_(Thesaurus.id == ThesRel.thes_id, ThesRel.rel_type == 'uf')"
)
see_alsos = relationship( # one-to-many
"ThesRel",
primaryjoin = "and_(Thesaurus.id == ThesRel.thes_id, ThesRel.rel_type == 'rt')"
)
broader_terms = relationship( # one-to-many but not often
"ThesRel",
primaryjoin = "and_(Thesaurus.id == ThesRel.thes_id, ThesRel.rel_type == 'bt')"
)
class ThesRel(Base):
__tablename__ = "thes_rel"
# Columns
id = Column(Integer, primary_key=True)
thes_id = Column(Integer, ForeignKey("thes_original.id"), nullable=False)
rel_id = Column(Integer, ForeignKey("thes_original.id"), nullable=False)
rel_type = Column(String(2), nullable=False, index=True)
ca = Column(Integer, ForeignKey("thes_cat.id"))
sort_key = Column(String(100))
comments = Column(Text)
# Relationships
related = relationship( # many-to-one
"Thesaurus",
primaryjoin = "ThesRel.rel_id == Thesaurus.id"
)
class OrgThes(Base):
__tablename__ = "org_thes"
__table_args__ = (
UniqueConstraint("org_id", "thes_id", "official_id"),
)
# Columns
id = Column(Integer, primary_key=True)
org_id = Column(Integer, ForeignKey("org.id"), nullable=False)
thes_id = Column(Integer, ForeignKey("thes_original.id"), nullable=False)
official_id = Column(Integer, ForeignKey("thes_original.id"), nullable=False)
class PubEntry(Base):
__tablename__ = "pub_entry"
__table_args__ = (
UniqueConstraint("pub_org_id", "pub_year"),
Index("pub_entry_pub_year_entry_index", "pub_year", "entry")
)
# Columns
id = Column(Integer, primary_key=True)
pub_org_id = Column(Integer, ForeignKey("pub_org.id"), nullable=False)
entry = Column(Integer, nullable=False)
pub_year = Column(
Integer,
CheckConstraint(
"pub_year > 2000",
name = "pub_entry_pub_year_check"
),
nullable = False
)
class Areas(Base): # see also Area for tlkparea
__tablename__ = "area"
__table_args__ = (
UniqueConstraint("name", "locatedin"),
)
# Columns
id = Column(Integer, primary_key=True)
name = Column(String(255), nullable=False)
locatedin = Column(Integer, ForeignKey("area.id"))
alt = String(255)
# Relationships
surrounds = relationship("Areas") # one-to-many
surrounded_by = relationship("Areas", remote_side=[id]) # many-to-one
class Taxonomy(Base):
__tablename__ = "taxonomy"
# Columns
id = Column(Integer, primary_key=True)
name = Column(String(100), nullable=False, index=True)
code = Column(String(19), unique=True)
ispreferred = Column(Boolean, nullable=False)
definition = Column(Text)
created = Column(Date)
modified = Column(Date, index=True)
parentid = Column(Integer, ForeignKey("taxonomy.id"))
cicmodified = Column(DateTime)
# Relationships
relations = relationship( # one-to-many
"TaxRel",
primaryjoin = "Taxonomy.id == TaxRel.taxid"
)
class TaxRel(Base):
__tablename__ = "taxrel"
__table_args__ = (
UniqueConstraint("taxid", "relid"),
)
# Columns
id = Column(Integer, primary_key=True)
taxid = Column(Integer, ForeignKey("taxonomy.id"), nullable=False)
relid = Column(Integer, ForeignKey("taxonomy.id"), nullable=False)
reltype = Column(String(2), nullable=False)
# Relationships
related = relationship( # one-to-one
"Taxonomy",
primaryjoin = "TaxRel.relid == Taxonomy.id"
)
class Locations(Base): # same as tempdb.area/Areas?
__tablename__ = "locations"
# Columns
id = Column(Integer, primary_key=True)
officialname = Column(String(100), nullable=False)
locatedin = Column(Integer, ForeignKey("locations.id"))
sortas = Column(String(100))
displayas = Column(String(100))
class PubGroupName(Base):
__tablename__ = "pubgroupname"
# Columns
id = Column(Integer, primary_key=True)
groupname = Column(String(50), nullable=False)
class PubGroup(Base):
__tablename__ = "pubgroup"
__table_args__ = (
UniqueConstraint("pubid", "groupid"),
)
# Columns
id = Column(Integer, primary_key=True)
pubid = Column(Integer, ForeignKey("pub.id"), nullable=False)
groupid = Column(Integer, ForeignKey("pubgroupname.id"), nullable=False)
class OrgNotes(Base):
__tablename__ = "orgnotes"
# Columns
id = Column(Integer, primary_key=True)
orgid = Column(Integer, ForeignKey("org.id"), nullable=False)
notetype = Column(Integer, ForeignKey("orgnotetypes.id"), nullable=False)
note = Column(Text, nullable=False)
added = Column(DateTime, nullable=False, default=func.now())
modified = Column(DateTime)
isactive = Column(Boolean, nullable=False, default=True)
ispublic = Column(Boolean, nullable=False, default=True)
alertdate = Column(Date)
# Relationships
type = relationship("OrgNoteTypes") # many-to-one
class OrgNoteTypes(Base):
__tablename__ = "orgnotetypes"
# Columns
id = Column(Integer, primary_key=True)
value = Column(String(30), nullable=False)
class PubThes(Base):
__tablename__ = "pubthes"
__table_args__ = (
UniqueConstraint("pubid", "thesid"),
)
# Columns
id = Column(Integer, primary_key=True)
pubid = Column(Integer, ForeignKey("pub.id"), nullable=False)
thesid = Column(Integer, ForeignKey("thes_original.id"), nullable=False)
isactive = Column(Boolean, nullable=False, default=True)
class TaxGroups(Base):
__tablename__ = "taxgroups"
__table_args__ = (
UniqueConstraint("taxgroup", "taxid"),
)
# Columns
id = Column(Integer, primary_key=True)
taxgroup = Column(Integer, nullable=False)
taxid = Column(Integer, ForeignKey("taxonomy.id"), nullable=False)
isactive = Column(Boolean, nullable=False)
haschildren = Column(Boolean, nullable=False)
added = Column(DateTime, nullable=False, default=func.now())
islocal = Column(Boolean, nullable=False, default=False)
modified = Column(DateTime)
class TempTaxGroup(Base):
__tablename__ = "temptaxgroup"
# SQLAlchemy needs a primary key
__table_args__ = (
PrimaryKeyConstraint("groupid", "taxcode"),
)
# Columns
groupid = Column(Integer, nullable=False)
taxcode = Column(String(13), nullable=False)
class TaxChanges(Base):
__tablename__ = "taxchanges"
# SQLAlchemy needs a primary key
__table_args__ = (
PrimaryKeyConstraint("changetype", "oldcode", "newcode"),
)
# Columns
changetype = Column(Integer, nullable=False)
oldcode = Column(String(13), nullable=False)
newcode = Column(String(13), nullable=False)
oldname = Column(String(60), nullable=False)
newname = Column(String(60), nullable=False)
dateus = Column(String(10), nullable=False)
class OrgUpdated(Base):
__tablename__ = "orgupdated"
__table_args__ = (
UniqueConstraint("orgid", "updated"),
)
# Columns
id = Column(Integer, primary_key=True)
orgid = Column(Integer, ForeignKey("org.id"), nullable=False)
updated = Column(DateTime, nullable=False)
class TaxLink(Base):
__tablename__ = "taxlink"
__table_args__ = (
UniqueConstraint("linkid", "taxid"),
)
# Columns
id = Column(Integer, primary_key=True)
linkid = Column(Integer, ForeignKey("taxlinknote.id"), nullable=False)
taxid = Column(Integer, ForeignKey("taxonomy.id"), nullable=False)
class OrgTaxLink(Base):
__tablename__ = "orgtaxlink"
__table_args__ = (
UniqueConstraint("orgid", "linkid"),
)
# Columns
id = Column(Integer, primary_key=True)
orgid = Column(Integer, ForeignKey("org.id"), nullable=False)
linkid = Column(Integer, ForeignKey("taxlinknote.id"), nullable=False)
added = Column(DateTime, default=func.now())
class TaxLinkNote(Base):
__tablename__ = "taxlinknote"
# Columns
id = Column(Integer, primary_key=True)
note = Column(Text, nullable=False)
# Relationships
taxonomy = relationship( # many-to-many
"Taxonomy",
secondary = with_schema("taxlink")
)
class Cioc(Base):
__tablename__ = "cioc"
__table_args__ = (
UniqueConstraint("xid", "ptype", "pid"),
)
# Columns
id = Column(Integer, primary_key=True)
pid = Column(Integer, ForeignKey("pub.id"), nullable=False)
ptype = Column(Integer, nullable=False)
xid = Column(Integer, ForeignKey("ciocexport.id"), nullable=False)
class CiocExport(Base):
__tablename__ = "ciocexport"
# Columns
id = Column(Integer, primary_key=True)
updated = Column(DateTime)
notes = Column(Text, nullable=False)
class TaxRelTemp(Base):
__tablename__ = "taxreltemp"
# Columns
id = Column(Integer, primary_key=True)
taxcode = Column(String(19), nullable=False)
relcode = Column(String(19), nullable=False)
reltype = Column(String(2), nullable=False)
class TempTaxNames(Base):
__tablename__ = "temptaxnames"
# SQLAlchemy needs a primary key
__table_args__ = (
PrimaryKeyConstraint("code", "name"),
)
# Columns
code = Column(String(19), nullable=False, index=True)
name = Column(String(100), nullable=False)
ispreferred = Column(Boolean, nullable=False)
release = Column(Text)
class TempTaxAlso(Base):
__tablename__ = "temptaxalso"
# SQLAlchemy needs a primary key
__table_args__ = (
PrimaryKeyConstraint("code", "see"),
)
# Columns
code = Column(String(19), nullable=False, index=True)
see = Column(String(19), nullable=False, index=True)
release = Column(Text)
class TempTaxOld(Base):
__tablename__ = "temptaxold"
# SQLAlchemy needs a primary key
__table_args__ = (
PrimaryKeyConstraint("code", "old"),
)
# Columns
code = Column(String(19), nullable=False, index=True)
old = Column(String(19), nullable=False, index=True)
release = Column(Text)
class TempTaxDetails(Base):
__tablename__ = "temptaxdetails"
# Columns
code = Column(String(19), primary_key=True) # SQLAlchemy needs a primary key
definition = Column(Text, nullable=False)
created = Column(Date, nullable=False)
modified = Column(Date, nullable=False)
release = Column(Text)
class PubTax(Base):
__tablename__ = "pubtax"
__table_args__ = (
UniqueConstraint("pubid", "taxid"),
)
# Columns
id = Column(Integer, primary_key=True)
pubid = Column(Integer, ForeignKey("pub.id"), nullable=False)
taxid = Column(Integer, ForeignKey("taxlinknote.id"), nullable=False)
added = Column(DateTime, nullable=False, default=func.now())
class ICAgency(Base):
__tablename__ = "ic_agencies"
# Columns
id = Column(Integer, primary_key=True)
orgid = Column(Integer, ForeignKey("org.id"), nullable=False, unique=True)
cnd = Column(String(8))
name_1 = Column(String(100))
name_level_1 = Column(Integer)
name_2 = Column(String(100))
name_level_2 = Column(Integer)
# Relationships
org = relationship( # one-to-one
"Org",
back_populates = "ic_agency"
)
sites = relationship( # one-to-many
"ICSite",
back_populates = "agency"
)
class ICSite(Base):
__tablename__ = "ic_agency_sites"
__table_args__ = (
UniqueConstraint("agencyid", "siteid"),
)
# Columns
id = Column(Integer, primary_key=True)
agencyid = Column(Integer, ForeignKey("ic_agencies.id"), nullable=False)
siteid = Column(Integer, ForeignKey("org.id"), nullable=False)
cnd = Column(String(8))
site_name = Column(String(200), nullable=False) # added nullable=False
site_name_level = Column(Integer)
site_name_other = Column(String(3))
# Relationships
agency = relationship( # many-to-one
"ICAgency",
back_populates = "sites"
)
services = relationship( # one-to-many
"ICService",
back_populates = "site"
)
org = relationship( # one-to-one
"Org",
back_populates = "ic_site"
)
class ICService(Base):
__tablename__ = "ic_site_services"
__table_args__ = (
UniqueConstraint("siteid", "serviceid"),
)
# Columns
id = Column(Integer, primary_key=True)
siteid = Column(Integer, ForeignKey("ic_agency_sites.id"), nullable=False)
serviceid = Column(Integer, ForeignKey("org.id"), nullable=False)
service_name_1 = Column(String(200))
service_name_2 = Column(String(200))
# Relationships
site = relationship( # many-to-one
"ICSite",
back_populates = "services"
)
org = relationship( # one-to-one
"Org",
back_populates = "ic_service"
)
class PubTree(Base):
__tablename__ = "pub_tree"
__table_args__ = (
PrimaryKeyConstraint("id", "parent"),
)
# Columns
id = Column(Integer, nullable=False, index=True)
parent = Column(Integer, nullable=False, index=True) # why not a foreign key?
pub = Column(Integer, ForeignKey("pub.id"), nullable=False, index=True) # rename to pub_id
note = Column(Text)
depth = Column(Integer, nullable=False)
# Relationships
publication = relationship("Pub") # many-to-one, rename to pub
class Site(Base):
__tablename__ = "site"
# Columns
id = Column(Integer, primary_key=True)
org_address_id = Column(Integer, ForeignKey("org_address_rel.id"), nullable=False, unique=True)
context_id = Column(Integer, nullable=False, default=1)
code = Column(String(20))
# Relationships
address = relationship(
"Address",
secondary = with_schema("org_address_rel"),
uselist = False # one-to-one: org_address_id is unique
)
class OrgTree(Base):
__tablename__ = "org_tree"
# Columns
id = Column(Integer, primary_key=True)
org_id = Column(Integer, ForeignKey("org.id"), nullable=False)
super_id = Column(Integer, ForeignKey("org_tree.id"), nullable=False)
class OrgSite(Base):
__tablename__ = "org_site"
__table_args__ = (
UniqueConstraint("org_id", "site_id", "label"),
)
# Columns
id = Column(Integer, primary_key=True)
org_id = Column(Integer, ForeignKey("org.id"), nullable=False)
site_id = Column(Integer, ForeignKey("site.id"), nullable=False)
name = Column(String(100))
note = Column(Text)
label = Column(String(100))
type = Column(Integer, nullable=False, default=3)
# Relationships
site = relationship("Site") # many-to-one
org = relationship("Org") # many-to-one
org_name = relationship(
"OrgNames", # org_names
secondary = with_schema("org_site_name"),
uselist = False # one-to-one
)
class OrgSiteName(Base):
__tablename__ = "org_site_name"
# Columns
id = Column(Integer, primary_key=True)
org_site_id = Column(Integer, ForeignKey("org_site.id"), nullable=False)
org_names_id = Column(Integer, ForeignKey("org_names.id"), nullable=False)
class OrgThesPub(Base):
__tablename__ = "org_thes_pub"
__table_args__ = (
UniqueConstraint("org_thes_id", "pub_id"),
)
# Columns
id = Column(Integer, primary_key=True)
org_thes_id = Column(Integer, ForeignKey("org_thes.id"), nullable=False)
pub_id = Column(Integer, ForeignKey("pub.id"), nullable=False)
is_active = Column(Boolean, nullable=False, default=True)
class TempTaxActive(Base):
__tablename__ = "temptaxactive"
# Columns
code = Column(String(25), primary_key=True) # SQLAlchemy needs a primary key
class TempCCAC(Base):
__tablename__ = "tempccac"
# Columns
ext = Column(String(10), primary_key=True) # SQLAlchemy needs a primary key
# Foreign key added for SQLAlchemy
id = Column(String(10), ForeignKey("org.cic_id"), nullable=False)
name = Column(String(200), nullable=False)
# Relationships
org = relationship("Org")
class ContactComm(Base):
__tablename__ = "contact_comm"
# Columns
id = Column(Integer, primary_key=True)
contact_id = Column(Integer, ForeignKey("tblcontact.id"), nullable=False)
comm_id = Column(Integer, ForeignKey("tblcomm.id"), nullable=False)
type = Column(Integer)
note = Column(String(50))
added = Column(DateTime, nullable=False, default=func.now())
class External(Base):
__tablename__ = "external"
# Columns
id = Column(Integer, primary_key=True)
name = Column(String(50), nullable=False)
field = Column(String(50), nullable=False)
cic = Column(String(50), nullable=False)
note = Column(Text, nullable=False)
class ExternalData(Base):
__tablename__ = "external_data"
# Columns
id = Column(Integer, primary_key=True)
external_type = Column(Integer, ForeignKey("external.id"), nullable=False)
cic_id = Column(Integer, nullable=False)
data = Column(Text, nullable=False)
external_id = Column(String(50), nullable=False)
|
{"/examples.py": ["/db/__init__.py", "/db/models/__init__.py"]}
|
10,553
|
ToddTurnbull/reload
|
refs/heads/master
|
/edit/__init__.py
|
from flask import Flask
from .context import Session
app = Flask(__name__)
import edit.views
# http://flask.pocoo.org/docs/0.10/patterns/sqlalchemy/
@app.teardown_appcontext
def shutdown_session(exception=None):
print("shutdown_session() says bye!")
Session.remove()
|
{"/examples.py": ["/db/__init__.py", "/db/models/__init__.py"]}
|
10,554
|
ToddTurnbull/reload
|
refs/heads/master
|
/db/__init__.py
|
from contextlib import contextmanager
from functools import wraps
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session
from sqlalchemy.orm import sessionmaker
import click
import json
import config
pg = "postgresql://{user}:{password}@localhost:{port}/{database}"
db = pg.format(**config.db)
engine = create_engine(db, echo=False)
session_factory = sessionmaker(bind=engine)
Session = scoped_session(session_factory)
# http://docs.sqlalchemy.org/en/rel_1_0/orm/session_basics.html
@contextmanager
def session_scope():
"""Provide a transactional scope around a series of operations."""
click.echo("I am session_scope()")
session = Session()
try:
yield session
session.commit()
except:
session.rollback()
raise
finally:
click.echo("Closing session")
session.close()
def transactional(query_function):
"""
Decorate a function to use session_scope()
query_function has only named arguments, including "session"
"""
click.echo("I am transactional({})".format(query_function.__name__))
@wraps(query_function)
def wrapper(**kwargs):
click.echo("I am transactional.wrapper({})".format(query_function.__name__))
click.echo(query_function.__doc__)
with session_scope() as session:
return query_function(session=session, **kwargs)
return wrapper
def jsonify(dict_function):
"""Decorate a function to return JSON instead of a dict"""
click.echo("I am jsonify()")
@wraps(dict_function)
def wrapper(*args, **kwargs):
dict_ = dict_function(*args, **kwargs)
return json.dumps(dict_, sort_keys=False, indent=2)
return wrapper
|
{"/examples.py": ["/db/__init__.py", "/db/models/__init__.py"]}
|
10,555
|
ToddTurnbull/reload
|
refs/heads/master
|
/examples.py
|
from db import *
from db.models import *
from collections import OrderedDict
import click
@transactional
def address(session=None, address_id=5571):
"""Test joining tbladdress to tlkpaddressaccessibility"""
address = session.query(Address).filter_by(id=address_id).one()
return_value = "Address {} is '{}'".format(address_id, address.access.name)
print("Return value should be: {}".format(address.access.name))
return return_value
@transactional
@jsonify
def test_org(session=None, org_id="WRN2000"):
"""Test joining org to: names, contacts, publications, addresses, etc"""
org = session.query(Org).filter_by(cic_id=org_id).one()
return OrderedDict([
("Names", [name.name for name in org.names]),
("Alternate Names", [name.name for name in org.alt_names]),
("Contacts", [(contact.name, len(contact.comms)) for contact in org.contacts]),
("Publications/Contacts", [
(pub.pub.title, pub.contact.name) if pub.contact
else (pub.pub.title, None)
for pub in org.pubs
]),
("Postal codes", [address.postalcode for address in org.addresses]),
("Service Description", org.service.description),
("Thesaurus Terms", [thes.de for thes in org.thes_official]),
("Notes", [note.note for note in org.notes]),
("Update History", [str(update.updated) for update in org.updates]),
("Taxonomy", [
{link.note: [tax.name for tax in link.taxonomy]}
for link in org.taxonomy_links
]),
("Agency", "Is an agency" if org.ic_agency else "Is not an agency")
])
@transactional
@jsonify
def test_thesaurus(session=None, thes_id=0):
"""Test joining thesaurus term to its related terms"""
thes = session.query(Thesaurus).filter_by(id=thes_id).one()
return OrderedDict([
("Term", thes.de),
("Related", [(rel.rel_type, rel.related.de) for rel in thes.relations]),
("Used for", [uf.related.de for uf in thes.used_fors]),
("See also", [sa.related.de for sa in thes.see_alsos]),
("Broader terms", [bt.related.de for bt in thes.broader_terms])
])
@transactional
@jsonify
def test_taxonomy(session=None, code="BD"):
"""Test joining taxonomy term to its related terms"""
tax = session.query(Taxonomy).filter_by(code=code).one()
return OrderedDict([
("Term", tax.name),
("Related", [
(rel.reltype, rel.related.code, rel.related.name)
for rel in tax.relations
])
])
@transactional
@jsonify
def test_pub(session=None, pub_id=527):
"""Test joining publication to its taxonomy terms"""
pub = session.query(Pub).filter_by(id=pub_id).one()
return OrderedDict([
("Title", pub.title),
("Taxonomy", [tax.note for tax in pub.taxonomy])
])
@transactional
@jsonify
def test_agency(session=None, agency_id=1214):
"""Test joining agency to its org, sites, services"""
agency = session.query(ICAgency).filter_by(id=agency_id).one()
return OrderedDict([
("Agency", agency.id),
("Org", [name.name for name in agency.org.names]),
("Sites", [site.site_name for site in agency.sites]),
("Services", [
(
site.site_name,
[(service.service_name_1, service.service_name_2) for service in site.services]
)
for site in agency.sites
])
])
@transactional
@jsonify
def test_site(session=None, site_id=89):
"""Test joining site to its address"""
site = session.query(Site).filter_by(id=site_id).one()
return OrderedDict([
("Site", site.id),
("Address", (site.address.address, site.address.city))
])
@transactional
@jsonify
def test_org_site(session=None, org_id="WRN5575"):
"""List sites for an org record"""
org = session.query(Org).filter_by(cic_id=org_id).one()
return OrderedDict([
("Org", [name.name for name in org.names]),
("Sites", [OrderedDict([
("Label", site.label),
("Site Name", site.name),
("Site Address City", site.site.address.city),
("Org Name", site.org_name.name.name if site.org_name else None)
]) for site in org.sites
])
])
if __name__ == "__main__":
print(address())
print(test_org())
print(test_thesaurus(thes_id=3))
print(test_taxonomy(code="BD"))
print(test_pub(pub_id=527))
print(test_agency(agency_id=1214))
print(test_site())
print(test_org_site())
|
{"/examples.py": ["/db/__init__.py", "/db/models/__init__.py"]}
|
10,562
|
gordol/LawnCronPi
|
refs/heads/master
|
/pids.py
|
import configuration
import os
import json
import signal
import errno
def create_dirs():
dir = configuration.pid_files
if dir == '':
return
try:
os.makedirs(dir)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(dir):
pass
else:
raise
def create_pid_file_path(name):
return os.path.join(configuration.pid_files, str(name))
def read_pid_file(file_path):
if not status_file_exists(file_path):
return False
f = open(file_path, 'r')
contents = f.read()
f.close()
return json.loads(contents)
def status_file_exists(file_path):
return os.path.isfile(file_path)
def create_status_file(file_path, schedule_zone, start, end):
contents = {
"pid": str(os.getpid()),
"zone": schedule_zone,
"start": str(start),
"end": str(end)
}
status_file = open(file_path, "w")
status_file.write(json.dumps(contents))
status_file.close()
def delete_status_file(file_path):
if status_file_exists(file_path):
os.remove(file_path)
def kill(pid):
os.kill(int(pid), signal.SIGTERM)
create_dirs()
|
{"/pids.py": ["/configuration.py"], "/schedule.py": ["/configuration.py", "/logger.py"], "/logger.py": ["/configuration.py"], "/gpio.py": ["/logger.py", "/configuration.py"], "/valve_driver.py": ["/logger.py", "/gpio.py", "/pids.py"]}
|
10,563
|
gordol/LawnCronPi
|
refs/heads/master
|
/schedule.py
|
from crontab import CronTab
import configuration as conf
import logger
from datetime import datetime
import json
import pika
import subprocess
SCHEDULE = "schedule.py"
# TODO: Specifying this as a global means the service will have to be restarted if the cron file changes. This library
# TODO: holds the cronfile in memory, so if you delete one manually then the next time this library adds one, it'll
# TODO: undo your manual delete. Figure out if this if good behavior or not.
# Global cron file
cron_file = CronTab(tabfile="/etc/cron.d/lawn")
def get_driver_command(schedule_id, zone, duration):
duration_in_secs = (int(duration['hours']) * 60 + int(duration['minutes'])) * 60
return "{0} {1} {2} {3} {4}".format(conf.python, conf.driver, schedule_id, zone, duration_in_secs)
def add(schedule_id, zone, duration, time, days):
# Create the cron
job = cron_file.new(comment=schedule_id, command="root " + get_driver_command(schedule_id, zone, duration))
job.hour.on(time["hours"])
job.minute.on(time["minutes"])
job.dow.on(days[0])
if len(days) > 1:
for d in range(1, len(days)):
job.dow.also.on(days[d])
# Write to cron file
cron_file.write()
# Log
pretty_time = str(time['hours']) + ":" + str(time['minutes'])
logger.info(SCHEDULE, "Adding schedule {0} in zone {1} for {2} minutes starting at {3} on {4}" \
.format(str(schedule_id), str(zone), str(duration), pretty_time, ", ".join(days)))
def delete(schedule_id):
stop(schedule_id)
cron_file.remove_all(comment=schedule_id)
cron_file.write()
logger.info(SCHEDULE, "Removing schedule " + schedule_id)
def update(schedule_id, zone, duration, time, days):
delete(schedule_id)
add(schedule_id, zone, duration, time, days)
def play(schedule_id, zone, duration):
logger.info(SCHEDULE, "Playing schedule " + schedule_id)
cmd = get_driver_command(schedule_id, zone, duration)
subprocess.Popen(cmd.split(" "))
def stop(schedule_id):
logger.info(SCHEDULE, "Stopping schedule " + schedule_id)
message = json.dumps({'action': 'stop', 'ts': str(datetime.now())})
logger.debug(SCHEDULE, "Sending: " + message + "To: " + schedule_id)
local_connection = pika.BlockingConnection(pika.ConnectionParameters(host="localhost"))
schedule_channel = local_connection.channel()
schedule_channel.queue_declare(queue=schedule_id)
schedule_channel.basic_publish(exchange='', routing_key=schedule_id, body=message)
local_connection.close()
def refresh(schedules):
logger.info(SCHEDULE, "Refreshing cron file")
cron_file.remove_all()
cron_file.write()
for schedule in schedules:
add(schedule["id"], schedule['zone'], schedule['duration'], schedule['time'], schedule['days'])
|
{"/pids.py": ["/configuration.py"], "/schedule.py": ["/configuration.py", "/logger.py"], "/logger.py": ["/configuration.py"], "/gpio.py": ["/logger.py", "/configuration.py"], "/valve_driver.py": ["/logger.py", "/gpio.py", "/pids.py"]}
|
10,564
|
gordol/LawnCronPi
|
refs/heads/master
|
/RMQSend.py
|
__author__ = 'zmiller'
import pika
import sys
import configuration
rpi = sys.argv[1]
message = sys.argv[2]
connection = pika.BlockingConnection(pika.ConnectionParameters(configuration.rmq_host))
channel = connection.channel()
channel.queue_declare(queue=rpi)
channel.basic_publish(exchange='',
routing_key=rpi,
body = message)
print 'Sent: ' + message + " To: " + rpi
f = open('log', 'w');
f.write('Sent: ' + message + " To: " + rpi)
f.close()
connection.close()
|
{"/pids.py": ["/configuration.py"], "/schedule.py": ["/configuration.py", "/logger.py"], "/logger.py": ["/configuration.py"], "/gpio.py": ["/logger.py", "/configuration.py"], "/valve_driver.py": ["/logger.py", "/gpio.py", "/pids.py"]}
|
10,565
|
gordol/LawnCronPi
|
refs/heads/master
|
/logger.py
|
from datetime import datetime
import logging
import configuration
import os
import errno
logging.getLogger("urllib3").setLevel(logging.ERROR)
logging.getLogger("pika").setLevel(logging.ERROR)
logging.basicConfig(filename=configuration.log_file, level=configuration.log_level)
def create_dirs():
dir = os.path.dirname(configuration.log_file)
if dir == '':
return
try:
os.makedirs(dir)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(dir):
pass
else:
raise
def get_log_line(identifier, message):
return "{0}\t{1}\t{2}".format(str(datetime.now()), identifier, message)
def debug(identifier, message):
logging.debug(get_log_line(identifier, message))
def info(identifier, message):
logging.info(get_log_line(identifier, message))
def warn(identifier, message):
logging.warning(get_log_line(identifier, message))
def error(identifier, message):
logging.error(get_log_line(identifier, message))
create_dirs()
|
{"/pids.py": ["/configuration.py"], "/schedule.py": ["/configuration.py", "/logger.py"], "/logger.py": ["/configuration.py"], "/gpio.py": ["/logger.py", "/configuration.py"], "/valve_driver.py": ["/logger.py", "/gpio.py", "/pids.py"]}
|
10,566
|
gordol/LawnCronPi
|
refs/heads/master
|
/lawn_cron.py
|
import time
import pika
import configuration
import json
import schedule
from threading import Timer
from datetime import datetime, timedelta
import logger
import sys
import os
import pids
import gpio
from multiprocessing.dummy import Pool
import requests
import subprocess
LAWN_CRON = "lawn_cron.py"
network_pool = Pool(10)
def purge_queue(queue):
response = os.system("rabbitmqctl purge_queue {0}".format(queue))
def ping(hostname):
response = os.system("ping -c 1 " + hostname)
if response == 0:
pingstatus = True
else:
pingstatus = False
return pingstatus
def parse_request(request):
try:
return json.loads(request)
except ValueError:
return False
def send_status_notification():
logger.debug(LAWN_CRON, "Posting status")
network_pool.apply_async(requests.post, ['http://lawncron.com/api/status', {"rpi": configuration.id}])
Timer(450, send_status_notification).start()
def cleanup_pids():
for name in os.listdir(configuration.pid_files):
pid_file = pids.create_pid_file_path(name)
pid_contents = pids.read_pid_file(pid_file)
if pid_contents is not False:
end = datetime.strptime(pid_contents["end"], '%Y-%m-%d %H:%M:%S.%f')
if end < datetime.now():
gpio.setup(pid_contents["zone"])
gpio.off(pid_contents["zone"])
pids.kill(int(pid_contents["pid"]))
pids.delete_status_file(pid_file)
logger.info(LAWN_CRON, "Cleaned up pid {0}".format(pid_contents["pid"]))
Timer(configuration.cleanup_frequency, cleanup_pids).start()
def callback(ch, method, properties, body):
logger.debug(LAWN_CRON, "Received: " + body)
request = parse_request(body)
if request is not False:
action = str(request['method'])
schedule_id = str(request["id"]) if "id" in request else ""
zone = str(request["zone"]) if "zone" in request else ""
duration = request["duration"] if "duration" in request else {}
start_time = request["time"] if "time" in request else {}
days = request["days"] if "days" in request else []
schedules = request["schedules"] if "schedules" in request else []
if action == 'add':
logger.debug(LAWN_CRON, "Adding :" + body)
schedule.add(schedule_id, zone, duration, start_time, days)
elif action == "delete":
logger.debug(LAWN_CRON, "Deleting: " + body)
schedule.delete(schedule_id)
elif action == "play":
logger.debug(LAWN_CRON, "Playing: " + body)
schedule.play(schedule_id, zone, duration)
elif action == "stop":
logger.debug(LAWN_CRON, "Stopping :" + body)
schedule.stop(schedule_id)
elif action == "update":
logger.debug(LAWN_CRON, "Updating: " + body)
schedule.update(schedule_id, zone, duration, start_time, days)
elif action == "refresh":
logger.debug(LAWN_CRON, "Refreshing schedules.")
schedule.refresh(schedules)
# Wait until the network is available
while True:
network_connectivity = ping(configuration.rmq_host)
if network_connectivity:
logger.info(LAWN_CRON, "Network connection found")
break
time.sleep(5)
# Wait for a bit -- useful in the case of a unexpected reboot
logger.info(LAWN_CRON, "Warming up, this will take a few seconds")
time.sleep(7)
# Set async timers
Timer(configuration.cleanup_frequency, cleanup_pids).start()
Timer(1, send_status_notification).start()
# Setup RMQ
last_error_report = None
while True:
try:
# Purge the queue -- get rid of any old messages
if last_error_report is None:
purge_queue(configuration.id)
# Establish RMQ connection
connection = pika.BlockingConnection(pika.ConnectionParameters(host=configuration.rmq_host))
logger.info(LAWN_CRON, "Connected to " + configuration.rmq_host)
# Create channel
channel = connection.channel()
logger.info(LAWN_CRON, "Created channel")
# Decleare queue
channel.queue_declare(queue=configuration.id)
logger.info(LAWN_CRON, "Declaring queue: " + configuration.id)
# Start listening to RMQ
channel.basic_consume(callback, queue=configuration.id, no_ack=True)
logger.info(LAWN_CRON, "Consuming queue: " + configuration.id)
print(' [*] Waiting for messages. To exit press CTRL+C')
channel.start_consuming()
except Exception as e:
if last_error_report is None or (datetime.now() - last_error_report) > timedelta(minutes=20):
print e
logger.warn(LAWN_CRON, "Exception raised.")
logger.warn(LAWN_CRON, e.message)
last_error_report = datetime.now()
else:
logger.debug(LAWN_CRON, "Exception raised.")
logger.debug(LAWN_CRON, sys.exc_info()[0])
time.sleep(15)
continue
|
{"/pids.py": ["/configuration.py"], "/schedule.py": ["/configuration.py", "/logger.py"], "/logger.py": ["/configuration.py"], "/gpio.py": ["/logger.py", "/configuration.py"], "/valve_driver.py": ["/logger.py", "/gpio.py", "/pids.py"]}
|
10,567
|
gordol/LawnCronPi
|
refs/heads/master
|
/gpio.py
|
import RPi.GPIO as GPIO
import logger
import configuration
GPIOLOG = "gpio.py"
GPIO.setmode(GPIO.BOARD)
def get_pin(zone):
pin = configuration.gpio_zone_map['4']
if zone not in configuration.gpio_zone_map:
logger.warn(GPIOLOG, "Zone {0} was not found, returning pin {1}".format(str(zone), str(pin)))
return pin
pin = configuration.gpio_zone_map[zone]
logger.info(GPIOLOG, "Returning pin {0} for zone {1}".format(str(pin), str(zone)))
return pin
def setup(zone):
pin = get_pin(zone)
GPIO.setup(pin, GPIO.OUT)
GPIO.output(pin, True)
logger.info(GPIOLOG, "Setting up pin {0}".format(str(pin)))
def output(pin, state):
GPIO.output(pin, state)
logger.debug(GPIOLOG, "Outputting pin {0}".format(str(pin)))
def on(zone):
pin = get_pin(zone)
output(pin, True)
logger.info(GPIOLOG, "Turning on pin {0}".format(str(pin)))
def off(zone):
pin = get_pin(zone)
output(pin, False)
logger.info(GPIOLOG, "Turning off pin {0}".format(str(pin)))
|
{"/pids.py": ["/configuration.py"], "/schedule.py": ["/configuration.py", "/logger.py"], "/logger.py": ["/configuration.py"], "/gpio.py": ["/logger.py", "/configuration.py"], "/valve_driver.py": ["/logger.py", "/gpio.py", "/pids.py"]}
|
10,568
|
gordol/LawnCronPi
|
refs/heads/master
|
/configuration.py
|
import logging
__author__ = 'zmiller'
# Rabbit MQ Configuration
id = "6ec32790"
rmq_host = "lawncron.com"
# Cron configuration
python = "/usr/bin/python"
driver = "/home/zmiller/PycharmProjects/LawnCronPi/valve_driver.py"
pid_files = "/tmp/lcpids"
log_file="log"
log_level = logging.INFO
# GPIO configuration
gpio_zone_map = {
'1': 31,
'2': 33,
'3': 35,
'4': 37
}
cleanup_frequency = 60.0 # in seconds
|
{"/pids.py": ["/configuration.py"], "/schedule.py": ["/configuration.py", "/logger.py"], "/logger.py": ["/configuration.py"], "/gpio.py": ["/logger.py", "/configuration.py"], "/valve_driver.py": ["/logger.py", "/gpio.py", "/pids.py"]}
|
10,569
|
gordol/LawnCronPi
|
refs/heads/master
|
/valve_driver.py
|
import sys
import pika
import json
from threading import Timer
from datetime import datetime, timedelta
import logger
import gpio
import pids
VALVE_DRIVER = "valve_driver.py"
pin = 7
schedule_id = sys.argv[1]
zone = sys.argv[2]
duration = sys.argv[3]
start_time = datetime.now()
def shutdown(ch, rk):
message = json.dumps({'action': 'stop', 'ts': str(datetime.now())})
ch.basic_publish(exchange='', routing_key=rk, body=message)
def parse_message(message):
try:
parsed = json.loads(message)
ts = datetime.strptime(parsed['ts'], '%Y-%m-%d %H:%M:%S.%f')
parsed['ts'] = ts
return parsed
except Exception:
return False
def rmq_listener(ch, method, properties, body):
logger.debug(VALVE_DRIVER, "rmq_listener received: " + body)
message = parse_message(body)
if message is not False and message['ts'] > start_time and message['action'] == "stop":
gpio.off(zone)
pid_file_path = pids.create_pid_file_path(schedule_id)
pid_file_contents = pids.read_pid_file(pid_file_path)
pids.delete_status_file(pid_file_path)
pids.kill(int(pid_file_contents["pid"]))
# PID file
pid_file = pids.create_pid_file_path(schedule_id)
# Check if schedule is running in another thread
if pids.status_file_exists(pid_file):
logger.info(VALVE_DRIVER, "Schedule {0} already running, exiting".format(schedule_id))
sys.exit(0)
# Write file indicating this schedule is running
pids.create_status_file(pid_file, zone, datetime.now(), datetime.now() + timedelta(seconds=int(duration)))
# Setup GPIO Output
gpio.setup(zone)
gpio.on(zone)
started = False
last_error_report = None
while True:
try:
logger.info(VALVE_DRIVER, "Attempting to establish connection...")
# Establish local RMQ connection and listen schedule_id channel
connection = pika.BlockingConnection(pika.ConnectionParameters(host="localhost"))
logger.info(VALVE_DRIVER, "Connection established with localhost")
channel = connection.channel()
logger.info(VALVE_DRIVER, "Created a channel")
channel.queue_declare(queue=schedule_id)
logger.info(VALVE_DRIVER, "Declared queue " + schedule_id)
channel.basic_consume(rmq_listener, queue=schedule_id, no_ack=True)
logger.info(VALVE_DRIVER, "Consuming queue " + schedule_id)
# Set the shutdown timer and start consuming
if not started:
Timer(float(duration), shutdown, [channel, schedule_id]).start()
started = True
channel.start_consuming()
except Exception:
if last_error_report is None or (datetime.now() - last_error_report) > timedelta(minutes=20):
logger.warn(VALVE_DRIVER, "Exception raised.")
logger.warn(VALVE_DRIVER, sys.exc_info()[0])
last_error_report = datetime.now()
else:
logger.debug(VALVE_DRIVER, "Exception raised.")
logger.debug(VALVE_DRIVER, sys.exc_info()[0])
continue
|
{"/pids.py": ["/configuration.py"], "/schedule.py": ["/configuration.py", "/logger.py"], "/logger.py": ["/configuration.py"], "/gpio.py": ["/logger.py", "/configuration.py"], "/valve_driver.py": ["/logger.py", "/gpio.py", "/pids.py"]}
|
10,570
|
fferri/geometric_patterns
|
refs/heads/master
|
/video8.py
|
from common import *
imgsz=(2048,)*2
r,a=meshgrid_polar(imgsz)
im=np.float32(np.uint8(np.log(1+r)*4%2)^np.uint8(np.sin(a*16)>0))
def draw(t=0, **kwargs):
im2=im
# fast box blur:
for n in (1+2*t,):
for axis in range(2):
im2=sum(np.roll(im2,i,axis) for i in range(-n//2,(n+1)//2))
im2=imnormalize(im2)>(0.25+r/imgsz[0])*255
return im2
if __name__ == '__main__':
for t in range(1000):
print('rendering frame %08d...'%t)
im2=draw(t)
imsave(im2,'video8-%08d.png'%t)
|
{"/video8.py": ["/common.py"], "/video4.py": ["/common.py"], "/p04.py": ["/common.py"], "/p13.py": ["/common.py"], "/video5.py": ["/common.py"], "/p20.py": ["/common.py"], "/p01.py": ["/common.py"], "/p10.py": ["/common.py"], "/p24.py": ["/common.py"], "/p09.py": ["/common.py"], "/video8b.py": ["/common.py"], "/p16.py": ["/common.py"], "/video6.py": ["/common.py"], "/p19.py": ["/common.py"], "/p22.py": ["/common.py"], "/video.py": ["/common.py"], "/video9.py": ["/common.py"], "/p03.py": ["/common.py"], "/p18.py": ["/common.py"], "/video3.py": ["/common.py"], "/p02.py": ["/common.py"], "/p17.py": ["/common.py"], "/video8a.py": ["/common.py"], "/p15.py": ["/common.py"], "/p12.py": ["/common.py"], "/p14.py": ["/common.py"], "/video2.py": ["/common.py"], "/p11.py": ["/common.py"], "/p05.py": ["/common.py"], "/p25.py": ["/common.py"], "/p21.py": ["/common.py"], "/p23.py": ["/common.py"], "/video7.py": ["/common.py"]}
|
10,571
|
fferri/geometric_patterns
|
refs/heads/master
|
/more/plasma_spiral.py
|
import math
import numpy as np
from PIL import Image
def ramp(values,positions,n):
r=np.zeros((n,),dtype=np.uint8)
for (vi,vj,pi,pj) in zip(values,values[1:],positions,positions[1:]):
for h in range(pi,1+pj):
a=(h-pi)/(pj-pi)
r[h]=(1-a)*vi+a*vj
return r
im=np.zeros((1024,1024,3),dtype=np.uint8)
cmap=np.zeros((1024,3),dtype=np.uint8)
cmap[...,0]=ramp([20,0,100,50],[0,700,900,1023],1024)
cmap[...,1]=ramp([100,0,100,0],[0,255,700,1023],1024)
cmap[...,2]=ramp([255,0,100,0,255],[0,500,950,1000,1023],1024)
print('im.shape:',im.shape)
for i in range(im.shape[0]):
for j in range(im.shape[1]):
y,x=i-im.shape[0]*0.5,j-im.shape[1]*0.5
r,a=math.hypot(x,y),math.atan2(y,x)
v=1023*0.5*(1.+math.sin((0.003*r)**2+4*a))+(8*a*1024/2/math.pi)
while v<0: v+=1024
while v>=1024: v-=1024
for h in range(3):
im[i,j,h]=cmap[int(v),h]
im=Image.fromarray(im)
#im.save('my.png')
im.show()
|
{"/video8.py": ["/common.py"], "/video4.py": ["/common.py"], "/p04.py": ["/common.py"], "/p13.py": ["/common.py"], "/video5.py": ["/common.py"], "/p20.py": ["/common.py"], "/p01.py": ["/common.py"], "/p10.py": ["/common.py"], "/p24.py": ["/common.py"], "/p09.py": ["/common.py"], "/video8b.py": ["/common.py"], "/p16.py": ["/common.py"], "/video6.py": ["/common.py"], "/p19.py": ["/common.py"], "/p22.py": ["/common.py"], "/video.py": ["/common.py"], "/video9.py": ["/common.py"], "/p03.py": ["/common.py"], "/p18.py": ["/common.py"], "/video3.py": ["/common.py"], "/p02.py": ["/common.py"], "/p17.py": ["/common.py"], "/video8a.py": ["/common.py"], "/p15.py": ["/common.py"], "/p12.py": ["/common.py"], "/p14.py": ["/common.py"], "/video2.py": ["/common.py"], "/p11.py": ["/common.py"], "/p05.py": ["/common.py"], "/p25.py": ["/common.py"], "/p21.py": ["/common.py"], "/p23.py": ["/common.py"], "/video7.py": ["/common.py"]}
|
10,572
|
fferri/geometric_patterns
|
refs/heads/master
|
/video4.py
|
from common import *
def kaleidoscope(x,y):
def f(i,j):
cy,cx=imgsz[0]*y,imgsz[1]*x
r,a=np.sqrt((i-cy)**2+(j-cx)**2),np.arctan2(i-cy,j-cx)
l=math.pi*2/4
a=np.abs(np.fmod(1.5*(2*math.pi+a),l)-l/2)*2+math.sin(x+y+math.sin(x+4*y))
return cy+r*np.sin(a),cx+r*np.cos(a)
return f
def spiral(shape,nbands=16,twist=0.1):
r,a=meshgrid_polar(shape)
return np.sin(np.log(1+10*(1+np.sin(r*0.002)))*twist+a*nbands)+1/(1.2+0.0007*r)
imgsz=(1024,1024)
s1,s2=(spiral(imgsz,6,32*i) for i in (-1,1))
im=s1*s2
def draw(t=0, **kwargs):
a=t*0.008
r=0.2+0.15*math.sin(a*3)
im2=imwarp(im,kaleidoscope(0.5+r*math.sin(a),0.5+r*math.cos(a)))
im2=apply_colormap(im2,colormap.jet)
return im2
if __name__ == '__main__':
for t in range(4000):
print('rendering frame %08d...'%t)
im2=draw(t)
imsave(im2,'video4-%08d.png'%t)
|
{"/video8.py": ["/common.py"], "/video4.py": ["/common.py"], "/p04.py": ["/common.py"], "/p13.py": ["/common.py"], "/video5.py": ["/common.py"], "/p20.py": ["/common.py"], "/p01.py": ["/common.py"], "/p10.py": ["/common.py"], "/p24.py": ["/common.py"], "/p09.py": ["/common.py"], "/video8b.py": ["/common.py"], "/p16.py": ["/common.py"], "/video6.py": ["/common.py"], "/p19.py": ["/common.py"], "/p22.py": ["/common.py"], "/video.py": ["/common.py"], "/video9.py": ["/common.py"], "/p03.py": ["/common.py"], "/p18.py": ["/common.py"], "/video3.py": ["/common.py"], "/p02.py": ["/common.py"], "/p17.py": ["/common.py"], "/video8a.py": ["/common.py"], "/p15.py": ["/common.py"], "/p12.py": ["/common.py"], "/p14.py": ["/common.py"], "/video2.py": ["/common.py"], "/p11.py": ["/common.py"], "/p05.py": ["/common.py"], "/p25.py": ["/common.py"], "/p21.py": ["/common.py"], "/p23.py": ["/common.py"], "/video7.py": ["/common.py"]}
|
10,573
|
fferri/geometric_patterns
|
refs/heads/master
|
/p04.py
|
from common import *
def square_spiral(shape,num_cycles):
w,b=np.ones(shape,dtype=np.uint8),np.zeros(shape,dtype=np.uint8)
im=np.hstack((b,w))
z=[[w,b],[w,b],[b,w],[b,w]]
for i in range(2,3+num_cycles):
j=(i-2)%4
if i<2+num_cycles:
im=np.vstack((np.kron([1]*i,z[j][0]),im,np.kron([1]*i,z[j][1])))
else:
im=np.vstack((np.kron([1]*i,z[j][0]),im))
im=im.T
return im
def draw(**kwargs):
s=square_spiral((4,4),10)
s1=1-s.T[...,::-1]
s2=s1[::-1,::-1]
s2=s2[...,5:]
q=np.hstack((s1,s2))
q[-4:,...]=1
q=np.vstack((q,q[...,::-1]))
im=imtile(q,np.array(q.shape)*10)
return im
if __name__ == '__main__':
im=draw()
imshow(im)
imsave(im,'p04.png')
|
{"/video8.py": ["/common.py"], "/video4.py": ["/common.py"], "/p04.py": ["/common.py"], "/p13.py": ["/common.py"], "/video5.py": ["/common.py"], "/p20.py": ["/common.py"], "/p01.py": ["/common.py"], "/p10.py": ["/common.py"], "/p24.py": ["/common.py"], "/p09.py": ["/common.py"], "/video8b.py": ["/common.py"], "/p16.py": ["/common.py"], "/video6.py": ["/common.py"], "/p19.py": ["/common.py"], "/p22.py": ["/common.py"], "/video.py": ["/common.py"], "/video9.py": ["/common.py"], "/p03.py": ["/common.py"], "/p18.py": ["/common.py"], "/video3.py": ["/common.py"], "/p02.py": ["/common.py"], "/p17.py": ["/common.py"], "/video8a.py": ["/common.py"], "/p15.py": ["/common.py"], "/p12.py": ["/common.py"], "/p14.py": ["/common.py"], "/video2.py": ["/common.py"], "/p11.py": ["/common.py"], "/p05.py": ["/common.py"], "/p25.py": ["/common.py"], "/p21.py": ["/common.py"], "/p23.py": ["/common.py"], "/video7.py": ["/common.py"]}
|
10,574
|
fferri/geometric_patterns
|
refs/heads/master
|
/p13.py
|
from common import *
# hyperbolic coords checkerboard
def draw(**kwargs):
w,h=2048,2048
u,v=meshgrid_hyperbolic((w,h))
u=np.uint(u*10)%2
v=np.uint(v//300)%2
im=u^v
im=np.hstack((im[...,::-1],im))
im=np.vstack((im[::-1,...],im))
return im
if __name__ == '__main__':
im=draw()
imshow(im)
imsave(im,'p13.png')
|
{"/video8.py": ["/common.py"], "/video4.py": ["/common.py"], "/p04.py": ["/common.py"], "/p13.py": ["/common.py"], "/video5.py": ["/common.py"], "/p20.py": ["/common.py"], "/p01.py": ["/common.py"], "/p10.py": ["/common.py"], "/p24.py": ["/common.py"], "/p09.py": ["/common.py"], "/video8b.py": ["/common.py"], "/p16.py": ["/common.py"], "/video6.py": ["/common.py"], "/p19.py": ["/common.py"], "/p22.py": ["/common.py"], "/video.py": ["/common.py"], "/video9.py": ["/common.py"], "/p03.py": ["/common.py"], "/p18.py": ["/common.py"], "/video3.py": ["/common.py"], "/p02.py": ["/common.py"], "/p17.py": ["/common.py"], "/video8a.py": ["/common.py"], "/p15.py": ["/common.py"], "/p12.py": ["/common.py"], "/p14.py": ["/common.py"], "/video2.py": ["/common.py"], "/p11.py": ["/common.py"], "/p05.py": ["/common.py"], "/p25.py": ["/common.py"], "/p21.py": ["/common.py"], "/p23.py": ["/common.py"], "/video7.py": ["/common.py"]}
|
10,575
|
fferri/geometric_patterns
|
refs/heads/master
|
/video5.py
|
from common import *
# plasma effect
imgsz=(1280,800)
y,x=meshgrid_euclidean(imgsz)
def draw(t=0, **kwargs):
cx1,cy1=x-imgsz[1]*0.5*(1+math.cos(t*0.01)),y-imgsz[0]*0.5*(1+math.sin(t*0.01))
v1=np.sin(np.sqrt(cx1**2+cy1**2)/imgsz[0]*12+t*0.0354837)
v2=np.sin(9*(1+0.4*math.sin(t*0.04566))*x/imgsz[1]*math.sin(t*0.01)+7*(1+0.6*math.cos(t*0.0463))*y/imgsz[0]*math.cos(t*0.00784)+t*0.0295528)
v3=np.sin(0.546427+np.sqrt(cx1**2+cy1**2)/imgsz[0]*6+t*0.0156737)
v4=np.sin(0.4635+3*(1+0.5*math.sin(t*0.06566))*x/imgsz[1]*math.sin(t*0.01)+5*(1+0.6*math.cos(t*0.0463))*y/imgsz[0]*math.cos(t*0.00784)+t*0.0195528)
im=v1*(0.7+0.6*math.sin(t*0.04526))+v2*(0.8+0.7*math.cos(t*0.05))+v3+v4
im=apply_colormap(im,colormap.jet)
return im
if __name__ == '__main__':
for t in range(8000):
print('rendering frame %08d...'%t)
im=draw(t)
imsave(im,'video5-%08d.png'%t)
|
{"/video8.py": ["/common.py"], "/video4.py": ["/common.py"], "/p04.py": ["/common.py"], "/p13.py": ["/common.py"], "/video5.py": ["/common.py"], "/p20.py": ["/common.py"], "/p01.py": ["/common.py"], "/p10.py": ["/common.py"], "/p24.py": ["/common.py"], "/p09.py": ["/common.py"], "/video8b.py": ["/common.py"], "/p16.py": ["/common.py"], "/video6.py": ["/common.py"], "/p19.py": ["/common.py"], "/p22.py": ["/common.py"], "/video.py": ["/common.py"], "/video9.py": ["/common.py"], "/p03.py": ["/common.py"], "/p18.py": ["/common.py"], "/video3.py": ["/common.py"], "/p02.py": ["/common.py"], "/p17.py": ["/common.py"], "/video8a.py": ["/common.py"], "/p15.py": ["/common.py"], "/p12.py": ["/common.py"], "/p14.py": ["/common.py"], "/video2.py": ["/common.py"], "/p11.py": ["/common.py"], "/p05.py": ["/common.py"], "/p25.py": ["/common.py"], "/p21.py": ["/common.py"], "/p23.py": ["/common.py"], "/video7.py": ["/common.py"]}
|
10,576
|
fferri/geometric_patterns
|
refs/heads/master
|
/p20.py
|
from common import *
def draw(**kwargs):
sq=np.array((256,)*2)
imgsz=sq*6
k=8 # try 128
h,v=map(lambda im: im//k%2, meshgrid_euclidean(imgsz))
c=checkerboard(imgsz,sq)
im=c*h+(1-c)*(1-v)
return im
if __name__ == '__main__':
im=draw()
imshow(im)
imsave(im,'p20.png')
|
{"/video8.py": ["/common.py"], "/video4.py": ["/common.py"], "/p04.py": ["/common.py"], "/p13.py": ["/common.py"], "/video5.py": ["/common.py"], "/p20.py": ["/common.py"], "/p01.py": ["/common.py"], "/p10.py": ["/common.py"], "/p24.py": ["/common.py"], "/p09.py": ["/common.py"], "/video8b.py": ["/common.py"], "/p16.py": ["/common.py"], "/video6.py": ["/common.py"], "/p19.py": ["/common.py"], "/p22.py": ["/common.py"], "/video.py": ["/common.py"], "/video9.py": ["/common.py"], "/p03.py": ["/common.py"], "/p18.py": ["/common.py"], "/video3.py": ["/common.py"], "/p02.py": ["/common.py"], "/p17.py": ["/common.py"], "/video8a.py": ["/common.py"], "/p15.py": ["/common.py"], "/p12.py": ["/common.py"], "/p14.py": ["/common.py"], "/video2.py": ["/common.py"], "/p11.py": ["/common.py"], "/p05.py": ["/common.py"], "/p25.py": ["/common.py"], "/p21.py": ["/common.py"], "/p23.py": ["/common.py"], "/video7.py": ["/common.py"]}
|
10,577
|
fferri/geometric_patterns
|
refs/heads/master
|
/p01.py
|
from common import *
def draw(**kwargs):
s=2**9
k=2 # try 1, 2, 3...
im=checkerboard(s,s//k)
for i in range(6):
im[[0,-1],...]^=1
im[...,[0,-1]]^=1
s//=2
ch=checkerboard((s,im.shape[1]),s//k)
cv=checkerboard((im.shape[0]+2*s,s),s//k)
im=np.vstack((cv,np.hstack((ch,im,ch)),cv))
return im
if __name__ == '__main__':
im=draw()
imshow(im)
imsave(im,'p01.png')
|
{"/video8.py": ["/common.py"], "/video4.py": ["/common.py"], "/p04.py": ["/common.py"], "/p13.py": ["/common.py"], "/video5.py": ["/common.py"], "/p20.py": ["/common.py"], "/p01.py": ["/common.py"], "/p10.py": ["/common.py"], "/p24.py": ["/common.py"], "/p09.py": ["/common.py"], "/video8b.py": ["/common.py"], "/p16.py": ["/common.py"], "/video6.py": ["/common.py"], "/p19.py": ["/common.py"], "/p22.py": ["/common.py"], "/video.py": ["/common.py"], "/video9.py": ["/common.py"], "/p03.py": ["/common.py"], "/p18.py": ["/common.py"], "/video3.py": ["/common.py"], "/p02.py": ["/common.py"], "/p17.py": ["/common.py"], "/video8a.py": ["/common.py"], "/p15.py": ["/common.py"], "/p12.py": ["/common.py"], "/p14.py": ["/common.py"], "/video2.py": ["/common.py"], "/p11.py": ["/common.py"], "/p05.py": ["/common.py"], "/p25.py": ["/common.py"], "/p21.py": ["/common.py"], "/p23.py": ["/common.py"], "/video7.py": ["/common.py"]}
|
10,578
|
fferri/geometric_patterns
|
refs/heads/master
|
/p10.py
|
from common import *
def draw(**kwargs):
s=256
im=checkerboard(s,s//2)
for i in range(6):
s//=2
c=checkerboard(s,s//2)
ch=imtile(c,(c.shape[0],im.shape[1]))
cv=imtile(c,(im.shape[0]+2*c.shape[0],c.shape[1]))
im=np.hstack((cv,np.vstack((ch,im,ch)),cv))
imgsz=np.uint(im.shape)
def radial_warp(i,j):
cx,cy=imgsz/2
a,r=np.arctan2(i-cy,j-cx),np.sqrt((i-cy)**2+(j-cx)**2)
a=a*6/4
r=r*np.sin(1000/(1+r))
return cx+np.cos(a)*r,cy+np.sin(a)*r
im=imwarp(im,radial_warp,cycle)
return im
if __name__ == '__main__':
im=draw()
imshow(im)
imsave(im,'p10.png')
|
{"/video8.py": ["/common.py"], "/video4.py": ["/common.py"], "/p04.py": ["/common.py"], "/p13.py": ["/common.py"], "/video5.py": ["/common.py"], "/p20.py": ["/common.py"], "/p01.py": ["/common.py"], "/p10.py": ["/common.py"], "/p24.py": ["/common.py"], "/p09.py": ["/common.py"], "/video8b.py": ["/common.py"], "/p16.py": ["/common.py"], "/video6.py": ["/common.py"], "/p19.py": ["/common.py"], "/p22.py": ["/common.py"], "/video.py": ["/common.py"], "/video9.py": ["/common.py"], "/p03.py": ["/common.py"], "/p18.py": ["/common.py"], "/video3.py": ["/common.py"], "/p02.py": ["/common.py"], "/p17.py": ["/common.py"], "/video8a.py": ["/common.py"], "/p15.py": ["/common.py"], "/p12.py": ["/common.py"], "/p14.py": ["/common.py"], "/video2.py": ["/common.py"], "/p11.py": ["/common.py"], "/p05.py": ["/common.py"], "/p25.py": ["/common.py"], "/p21.py": ["/common.py"], "/p23.py": ["/common.py"], "/video7.py": ["/common.py"]}
|
10,579
|
fferri/geometric_patterns
|
refs/heads/master
|
/p24.py
|
from common import *
def draw(**kwargs):
imgsz=(2048,)*2
r,a=meshgrid_polar(imgsz)
im=np.float32(np.uint8(np.log(1+r)*4%2)^np.uint8(np.sin(a*16)>0))
# fast box blur:
for n in (65,):
for axis in range(2):
im=sum(np.roll(im,i,axis) for i in range(-n//2,(n+1)//2))
im=imnormalize(im)>(0.25+r/imgsz[0])*255
return im
if __name__ == '__main__':
im=draw()
imshow(im)
imsave(im,'p24.png')
|
{"/video8.py": ["/common.py"], "/video4.py": ["/common.py"], "/p04.py": ["/common.py"], "/p13.py": ["/common.py"], "/video5.py": ["/common.py"], "/p20.py": ["/common.py"], "/p01.py": ["/common.py"], "/p10.py": ["/common.py"], "/p24.py": ["/common.py"], "/p09.py": ["/common.py"], "/video8b.py": ["/common.py"], "/p16.py": ["/common.py"], "/video6.py": ["/common.py"], "/p19.py": ["/common.py"], "/p22.py": ["/common.py"], "/video.py": ["/common.py"], "/video9.py": ["/common.py"], "/p03.py": ["/common.py"], "/p18.py": ["/common.py"], "/video3.py": ["/common.py"], "/p02.py": ["/common.py"], "/p17.py": ["/common.py"], "/video8a.py": ["/common.py"], "/p15.py": ["/common.py"], "/p12.py": ["/common.py"], "/p14.py": ["/common.py"], "/video2.py": ["/common.py"], "/p11.py": ["/common.py"], "/p05.py": ["/common.py"], "/p25.py": ["/common.py"], "/p21.py": ["/common.py"], "/p23.py": ["/common.py"], "/video7.py": ["/common.py"]}
|
10,580
|
fferri/geometric_patterns
|
refs/heads/master
|
/p09.py
|
from common import *
# 2-sides checkerboard
def draw(**kwargs):
imgsz=np.array([2*1024]*2)
def radial_warp(i,j):
cx,cy=imgsz/2
a,r=np.arctan2(i-cy,j-cx),np.sqrt((i-cy)**2+(j-cx)**2)
r=r*(1+0.1*np.sin(0.008*r))
a=a*6/4
return cx+np.cos(a)*r,cy+np.sin(a)*r
im=checkerboard(imgsz, imgsz//16)^imtile(boxN(imgsz//8,4),imgsz)
im2=checkerboard(imgsz, imgsz//16)^imtile(boxN(imgsz//16,4),imgsz)
im[512:1536,512:1536]=im2[512:1536,512:1536]
im=imwarp(im,radial_warp,cycle)
return im
if __name__ == '__main__':
im=draw()
imshow(im)
imsave(im,'p09.png')
|
{"/video8.py": ["/common.py"], "/video4.py": ["/common.py"], "/p04.py": ["/common.py"], "/p13.py": ["/common.py"], "/video5.py": ["/common.py"], "/p20.py": ["/common.py"], "/p01.py": ["/common.py"], "/p10.py": ["/common.py"], "/p24.py": ["/common.py"], "/p09.py": ["/common.py"], "/video8b.py": ["/common.py"], "/p16.py": ["/common.py"], "/video6.py": ["/common.py"], "/p19.py": ["/common.py"], "/p22.py": ["/common.py"], "/video.py": ["/common.py"], "/video9.py": ["/common.py"], "/p03.py": ["/common.py"], "/p18.py": ["/common.py"], "/video3.py": ["/common.py"], "/p02.py": ["/common.py"], "/p17.py": ["/common.py"], "/video8a.py": ["/common.py"], "/p15.py": ["/common.py"], "/p12.py": ["/common.py"], "/p14.py": ["/common.py"], "/video2.py": ["/common.py"], "/p11.py": ["/common.py"], "/p05.py": ["/common.py"], "/p25.py": ["/common.py"], "/p21.py": ["/common.py"], "/p23.py": ["/common.py"], "/video7.py": ["/common.py"]}
|
10,581
|
fferri/geometric_patterns
|
refs/heads/master
|
/video8b.py
|
from common import *
imgsz=(2048,)*2
r,a=meshgrid_polar(imgsz)
def draw(t=0, **kwargs):
discs=np.uint8(np.log(1+r)*4%2)
bands=np.uint8(np.sin(a*16+0.1*t-np.log(1+r)*t)>0)
im2=np.float32(discs^bands)
# fast box blur:
n=1+2*int(t*5)
for axis in range(2):
im2=sum(np.roll(im2,i,axis) for i in range(-n//2,(n+1)//2))
im2/=n*n
im3=imnormalize(im2)-(0.25+r/imgsz[0])*255
im3=apply_colormap(im3,colormap.hot)
return im3
if __name__ == '__main__':
for t in range(1000):
print('rendering frame %08d...'%t)
im3=draw(0.05*t)
imsave(im3,'video8b-%08d.png'%frame)
|
{"/video8.py": ["/common.py"], "/video4.py": ["/common.py"], "/p04.py": ["/common.py"], "/p13.py": ["/common.py"], "/video5.py": ["/common.py"], "/p20.py": ["/common.py"], "/p01.py": ["/common.py"], "/p10.py": ["/common.py"], "/p24.py": ["/common.py"], "/p09.py": ["/common.py"], "/video8b.py": ["/common.py"], "/p16.py": ["/common.py"], "/video6.py": ["/common.py"], "/p19.py": ["/common.py"], "/p22.py": ["/common.py"], "/video.py": ["/common.py"], "/video9.py": ["/common.py"], "/p03.py": ["/common.py"], "/p18.py": ["/common.py"], "/video3.py": ["/common.py"], "/p02.py": ["/common.py"], "/p17.py": ["/common.py"], "/video8a.py": ["/common.py"], "/p15.py": ["/common.py"], "/p12.py": ["/common.py"], "/p14.py": ["/common.py"], "/video2.py": ["/common.py"], "/p11.py": ["/common.py"], "/p05.py": ["/common.py"], "/p25.py": ["/common.py"], "/p21.py": ["/common.py"], "/p23.py": ["/common.py"], "/video7.py": ["/common.py"]}
|
10,582
|
fferri/geometric_patterns
|
refs/heads/master
|
/p16.py
|
from common import *
def draw(**kwargs):
imgsz=np.array([2*1024]*2)
r,a=meshgrid_polar(imgsz,dist=distance.L1)
r=np.uint(5*np.log(1+r))%2
a=np.uint(np.floor(a*16/math.pi/2))%2
im=r^a
return im
if __name__ == '__main__':
im=draw()
imshow(im)
imsave(im,'p16.png')
|
{"/video8.py": ["/common.py"], "/video4.py": ["/common.py"], "/p04.py": ["/common.py"], "/p13.py": ["/common.py"], "/video5.py": ["/common.py"], "/p20.py": ["/common.py"], "/p01.py": ["/common.py"], "/p10.py": ["/common.py"], "/p24.py": ["/common.py"], "/p09.py": ["/common.py"], "/video8b.py": ["/common.py"], "/p16.py": ["/common.py"], "/video6.py": ["/common.py"], "/p19.py": ["/common.py"], "/p22.py": ["/common.py"], "/video.py": ["/common.py"], "/video9.py": ["/common.py"], "/p03.py": ["/common.py"], "/p18.py": ["/common.py"], "/video3.py": ["/common.py"], "/p02.py": ["/common.py"], "/p17.py": ["/common.py"], "/video8a.py": ["/common.py"], "/p15.py": ["/common.py"], "/p12.py": ["/common.py"], "/p14.py": ["/common.py"], "/video2.py": ["/common.py"], "/p11.py": ["/common.py"], "/p05.py": ["/common.py"], "/p25.py": ["/common.py"], "/p21.py": ["/common.py"], "/p23.py": ["/common.py"], "/video7.py": ["/common.py"]}
|
10,583
|
fferri/geometric_patterns
|
refs/heads/master
|
/video6.py
|
from common import *
imgsz=np.array((512,512))
c=[imgsz*(0.5,d) for d in (0.25,0.75)]
(r1,a1),(r2,a2)=(meshgrid_polar(imgsz,c[i]) for i in range(2))
cmap=colormap.rainbow()
def draw(t=0, **kwargs):
k=0.03*t/250
im=np.minimum(r1*np.sin(k*r2),r2*np.sin(k*r1))
cmap=np.roll(cmap,-1,axis=0)
im=apply_colormap(im,cmap)
return im
if __name__ == '__main__':
for t in range(2153):
print('rendering frame %08d...'%t)
im=draw(t)
imsave(im,'video6-%08d.png'%t)
|
{"/video8.py": ["/common.py"], "/video4.py": ["/common.py"], "/p04.py": ["/common.py"], "/p13.py": ["/common.py"], "/video5.py": ["/common.py"], "/p20.py": ["/common.py"], "/p01.py": ["/common.py"], "/p10.py": ["/common.py"], "/p24.py": ["/common.py"], "/p09.py": ["/common.py"], "/video8b.py": ["/common.py"], "/p16.py": ["/common.py"], "/video6.py": ["/common.py"], "/p19.py": ["/common.py"], "/p22.py": ["/common.py"], "/video.py": ["/common.py"], "/video9.py": ["/common.py"], "/p03.py": ["/common.py"], "/p18.py": ["/common.py"], "/video3.py": ["/common.py"], "/p02.py": ["/common.py"], "/p17.py": ["/common.py"], "/video8a.py": ["/common.py"], "/p15.py": ["/common.py"], "/p12.py": ["/common.py"], "/p14.py": ["/common.py"], "/video2.py": ["/common.py"], "/p11.py": ["/common.py"], "/p05.py": ["/common.py"], "/p25.py": ["/common.py"], "/p21.py": ["/common.py"], "/p23.py": ["/common.py"], "/video7.py": ["/common.py"]}
|
10,584
|
fferri/geometric_patterns
|
refs/heads/master
|
/p19.py
|
from common import *
def spiral(shape,nbands=16,twist=0.1):
r,a=meshgrid_polar(shape)
return np.sin(np.log(1+10*(1+np.sin(r*0.002)))*twist+a*nbands)+1/(1.2+0.0007*r)
def draw(**kwargs):
imgsz=(2048,2048)
s1,s2=(spiral(imgsz,3,16*i) for i in (-1,1))
im=s1*s2
cmap=np.zeros((256,3), dtype=np.uint8)
cmap[0:49,:]=[185,0,0]
cmap[155:185,:]=[255,205,0]
cmap|=colormap.contours(4,3)
im=apply_colormap(im,cmap)
return im
if __name__ == '__main__':
im=draw()
imshow(im)
imsave(im,'p19.png')
|
{"/video8.py": ["/common.py"], "/video4.py": ["/common.py"], "/p04.py": ["/common.py"], "/p13.py": ["/common.py"], "/video5.py": ["/common.py"], "/p20.py": ["/common.py"], "/p01.py": ["/common.py"], "/p10.py": ["/common.py"], "/p24.py": ["/common.py"], "/p09.py": ["/common.py"], "/video8b.py": ["/common.py"], "/p16.py": ["/common.py"], "/video6.py": ["/common.py"], "/p19.py": ["/common.py"], "/p22.py": ["/common.py"], "/video.py": ["/common.py"], "/video9.py": ["/common.py"], "/p03.py": ["/common.py"], "/p18.py": ["/common.py"], "/video3.py": ["/common.py"], "/p02.py": ["/common.py"], "/p17.py": ["/common.py"], "/video8a.py": ["/common.py"], "/p15.py": ["/common.py"], "/p12.py": ["/common.py"], "/p14.py": ["/common.py"], "/video2.py": ["/common.py"], "/p11.py": ["/common.py"], "/p05.py": ["/common.py"], "/p25.py": ["/common.py"], "/p21.py": ["/common.py"], "/p23.py": ["/common.py"], "/video7.py": ["/common.py"]}
|
10,585
|
fferri/geometric_patterns
|
refs/heads/master
|
/p22.py
|
from common import *
def draw(**kwargs):
imgsz=(2048,)*2
r,a=meshgrid_polar(imgsz)
lr=np.log(1+r)
im=np.sin(a*5+np.sin(lr*4)+lr*2)
im=np.fmod((1+im+lr),1)
im=apply_colormap(im,colormap.hot)
return im
if __name__ == '__main__':
im=draw()
imshow(im)
imsave(im,'p22.png')
|
{"/video8.py": ["/common.py"], "/video4.py": ["/common.py"], "/p04.py": ["/common.py"], "/p13.py": ["/common.py"], "/video5.py": ["/common.py"], "/p20.py": ["/common.py"], "/p01.py": ["/common.py"], "/p10.py": ["/common.py"], "/p24.py": ["/common.py"], "/p09.py": ["/common.py"], "/video8b.py": ["/common.py"], "/p16.py": ["/common.py"], "/video6.py": ["/common.py"], "/p19.py": ["/common.py"], "/p22.py": ["/common.py"], "/video.py": ["/common.py"], "/video9.py": ["/common.py"], "/p03.py": ["/common.py"], "/p18.py": ["/common.py"], "/video3.py": ["/common.py"], "/p02.py": ["/common.py"], "/p17.py": ["/common.py"], "/video8a.py": ["/common.py"], "/p15.py": ["/common.py"], "/p12.py": ["/common.py"], "/p14.py": ["/common.py"], "/video2.py": ["/common.py"], "/p11.py": ["/common.py"], "/p05.py": ["/common.py"], "/p25.py": ["/common.py"], "/p21.py": ["/common.py"], "/p23.py": ["/common.py"], "/video7.py": ["/common.py"]}
|
10,586
|
fferri/geometric_patterns
|
refs/heads/master
|
/video.py
|
from common import *
im = None
def radial_warp(t,imgsz):
def f(i,j):
cx,cy=imgsz/2
a,r=np.arctan2(i-cy,j-cx),np.sqrt((i-cy)**2+(j-cx)**2)
r=r*(1+(0.1*math.sin(t*0.01)+0.1)*np.sin((0.008+math.sin(0.0007*t)*0.01)*r+t*0.005637))
a=a*6/4
return cx+np.cos(a)*r,cy+np.sin(a)*r
return f
def draw(t=0, **kwargs):
imgsz=np.array([2*1024]*2)
global im
if im is None:
im=checkerboard(imgsz, imgsz//16)^imtile(boxN(imgsz//8,4),imgsz)
im2=checkerboard(imgsz, imgsz//16)^imtile(boxN(imgsz//16,4),imgsz)
im[512:1536,512:1536]=im2[512:1536,512:1536]
return imwarp(im,radial_warp(t,imgsz),cycle)
if __name__ == '__main__':
for t in range(10000):
print('rendering frame %08d...'%t)
imsave(draw(t),'video%08d.png'%t)
|
{"/video8.py": ["/common.py"], "/video4.py": ["/common.py"], "/p04.py": ["/common.py"], "/p13.py": ["/common.py"], "/video5.py": ["/common.py"], "/p20.py": ["/common.py"], "/p01.py": ["/common.py"], "/p10.py": ["/common.py"], "/p24.py": ["/common.py"], "/p09.py": ["/common.py"], "/video8b.py": ["/common.py"], "/p16.py": ["/common.py"], "/video6.py": ["/common.py"], "/p19.py": ["/common.py"], "/p22.py": ["/common.py"], "/video.py": ["/common.py"], "/video9.py": ["/common.py"], "/p03.py": ["/common.py"], "/p18.py": ["/common.py"], "/video3.py": ["/common.py"], "/p02.py": ["/common.py"], "/p17.py": ["/common.py"], "/video8a.py": ["/common.py"], "/p15.py": ["/common.py"], "/p12.py": ["/common.py"], "/p14.py": ["/common.py"], "/video2.py": ["/common.py"], "/p11.py": ["/common.py"], "/p05.py": ["/common.py"], "/p25.py": ["/common.py"], "/p21.py": ["/common.py"], "/p23.py": ["/common.py"], "/video7.py": ["/common.py"]}
|
10,587
|
fferri/geometric_patterns
|
refs/heads/master
|
/video9.py
|
from common import *
# floor tiles metamorphosis
imgsz=(2048,)*2
x,y=meshgrid_euclidean(imgsz)
c=lambda x: np.cos(math.pi*x)
f=8./imgsz[0]
def draw(t=0, nf=250, **kwargs):
b=min(1.,max(0.,1.3*abs(math.fmod(2*t/125.,2)-1)))
q=int(t>=nf*0.25 and t<=nf*0.75)
im=(c(y*f)+b*c(x*f)>0)^(c(q+x*f)+b*c(y*f)>0)^q
return im
if __name__ == '__main__':
for t in range(nf):
print('rendering frame %08d...'%t)
im=draw(t)
imsave(im,'video9-%08d.png'%t)
|
{"/video8.py": ["/common.py"], "/video4.py": ["/common.py"], "/p04.py": ["/common.py"], "/p13.py": ["/common.py"], "/video5.py": ["/common.py"], "/p20.py": ["/common.py"], "/p01.py": ["/common.py"], "/p10.py": ["/common.py"], "/p24.py": ["/common.py"], "/p09.py": ["/common.py"], "/video8b.py": ["/common.py"], "/p16.py": ["/common.py"], "/video6.py": ["/common.py"], "/p19.py": ["/common.py"], "/p22.py": ["/common.py"], "/video.py": ["/common.py"], "/video9.py": ["/common.py"], "/p03.py": ["/common.py"], "/p18.py": ["/common.py"], "/video3.py": ["/common.py"], "/p02.py": ["/common.py"], "/p17.py": ["/common.py"], "/video8a.py": ["/common.py"], "/p15.py": ["/common.py"], "/p12.py": ["/common.py"], "/p14.py": ["/common.py"], "/video2.py": ["/common.py"], "/p11.py": ["/common.py"], "/p05.py": ["/common.py"], "/p25.py": ["/common.py"], "/p21.py": ["/common.py"], "/p23.py": ["/common.py"], "/video7.py": ["/common.py"]}
|
10,588
|
fferri/geometric_patterns
|
refs/heads/master
|
/p03.py
|
from common import *
def spiral(shape,nbands=16,twist=0.1):
r,a=meshgrid_polar(shape)
return np.sin(np.log(1+r)*twist+a*nbands)>0
def draw(**kwargs):
imgsz=(1024,1024)
s1,s2=(spiral(imgsz,16,16*i) for i in (1,-1))
return s1^s2
if __name__ == '__main__':
im=draw()
imshow(im)
imsave(im,'p03.png')
|
{"/video8.py": ["/common.py"], "/video4.py": ["/common.py"], "/p04.py": ["/common.py"], "/p13.py": ["/common.py"], "/video5.py": ["/common.py"], "/p20.py": ["/common.py"], "/p01.py": ["/common.py"], "/p10.py": ["/common.py"], "/p24.py": ["/common.py"], "/p09.py": ["/common.py"], "/video8b.py": ["/common.py"], "/p16.py": ["/common.py"], "/video6.py": ["/common.py"], "/p19.py": ["/common.py"], "/p22.py": ["/common.py"], "/video.py": ["/common.py"], "/video9.py": ["/common.py"], "/p03.py": ["/common.py"], "/p18.py": ["/common.py"], "/video3.py": ["/common.py"], "/p02.py": ["/common.py"], "/p17.py": ["/common.py"], "/video8a.py": ["/common.py"], "/p15.py": ["/common.py"], "/p12.py": ["/common.py"], "/p14.py": ["/common.py"], "/video2.py": ["/common.py"], "/p11.py": ["/common.py"], "/p05.py": ["/common.py"], "/p25.py": ["/common.py"], "/p21.py": ["/common.py"], "/p23.py": ["/common.py"], "/video7.py": ["/common.py"]}
|
10,589
|
fferri/geometric_patterns
|
refs/heads/master
|
/p18.py
|
from common import *
# inspired by a Aldous Huxley's book cover
def draw(**kwargs):
imgsz=(2048,)*2
r,a=meshgrid_polar(imgsz)
im=np.fmod(np.float32(3.5*np.log(1+r))+2*np.power(np.abs(np.sin(8*np.float32(a))),0.4),1.4)
im=apply_colormap(im,colormap.rainbow)
def warp(o):
def f(i,j):
cy,cx=imgsz[0]//2,imgsz[1]//2
y,x=i-cy,j-cx
r,a=np.sqrt(x**2+y**2),np.arctan2(y,x)
return cy+r*np.sin(a+o),cx+r*np.cos(a+o)
return f
im[:,:,0]=imwarp(im[:,:,0],warp(-0.02),cycle)
im[:,:,2]=imwarp(im[:,:,2],warp(0.03),cycle)
return im
if __name__ == '__main__':
im=draw()
imshow(im)
imsave(im,'p18.png')
|
{"/video8.py": ["/common.py"], "/video4.py": ["/common.py"], "/p04.py": ["/common.py"], "/p13.py": ["/common.py"], "/video5.py": ["/common.py"], "/p20.py": ["/common.py"], "/p01.py": ["/common.py"], "/p10.py": ["/common.py"], "/p24.py": ["/common.py"], "/p09.py": ["/common.py"], "/video8b.py": ["/common.py"], "/p16.py": ["/common.py"], "/video6.py": ["/common.py"], "/p19.py": ["/common.py"], "/p22.py": ["/common.py"], "/video.py": ["/common.py"], "/video9.py": ["/common.py"], "/p03.py": ["/common.py"], "/p18.py": ["/common.py"], "/video3.py": ["/common.py"], "/p02.py": ["/common.py"], "/p17.py": ["/common.py"], "/video8a.py": ["/common.py"], "/p15.py": ["/common.py"], "/p12.py": ["/common.py"], "/p14.py": ["/common.py"], "/video2.py": ["/common.py"], "/p11.py": ["/common.py"], "/p05.py": ["/common.py"], "/p25.py": ["/common.py"], "/p21.py": ["/common.py"], "/p23.py": ["/common.py"], "/video7.py": ["/common.py"]}
|
10,590
|
fferri/geometric_patterns
|
refs/heads/master
|
/video3.py
|
from common import *
def radial_warp(t):
def f(i,j):
cx,cy=imgsz/2
a,r=np.arctan2(i-cy,j-cx),norm.L2(i-cy,j-cx)
r+=(1+np.cos(0.1*t+r*math.pi*8/imgsz[0]))*imgsz[0]/(1+10*np.log(1+r))
a+=r*t/imgsz[0]/1000
return cx+np.cos(a)*r,cy+np.sin(a)*r
return f
imgsz=np.array([2*1024]*2)
im=checkerboard(imgsz, imgsz//16)
def draw(t=0, **kwargs):
return imwarp(im,radial_warp(t),cycle)
if __name__ == '__main__':
for t in range(4000):
print('rendering frame %08d...'%t)
im1=draw(t)
imsave(im1,'video3-%08d.png'%t)
|
{"/video8.py": ["/common.py"], "/video4.py": ["/common.py"], "/p04.py": ["/common.py"], "/p13.py": ["/common.py"], "/video5.py": ["/common.py"], "/p20.py": ["/common.py"], "/p01.py": ["/common.py"], "/p10.py": ["/common.py"], "/p24.py": ["/common.py"], "/p09.py": ["/common.py"], "/video8b.py": ["/common.py"], "/p16.py": ["/common.py"], "/video6.py": ["/common.py"], "/p19.py": ["/common.py"], "/p22.py": ["/common.py"], "/video.py": ["/common.py"], "/video9.py": ["/common.py"], "/p03.py": ["/common.py"], "/p18.py": ["/common.py"], "/video3.py": ["/common.py"], "/p02.py": ["/common.py"], "/p17.py": ["/common.py"], "/video8a.py": ["/common.py"], "/p15.py": ["/common.py"], "/p12.py": ["/common.py"], "/p14.py": ["/common.py"], "/video2.py": ["/common.py"], "/p11.py": ["/common.py"], "/p05.py": ["/common.py"], "/p25.py": ["/common.py"], "/p21.py": ["/common.py"], "/p23.py": ["/common.py"], "/video7.py": ["/common.py"]}
|
10,591
|
fferri/geometric_patterns
|
refs/heads/master
|
/p02.py
|
from common import *
def draw(**kwargs):
imgsz=np.array([2*1024]*2)
box_tile=boxN(imgsz//8, 8)
chk_tile=checkerboard(imgsz//8, imgsz//16)
im=imtile(chk_tile^box_tile,imgsz)
return im
if __name__ == '__main__':
im=draw()
imshow(im)
imsave(im,'p02.png')
|
{"/video8.py": ["/common.py"], "/video4.py": ["/common.py"], "/p04.py": ["/common.py"], "/p13.py": ["/common.py"], "/video5.py": ["/common.py"], "/p20.py": ["/common.py"], "/p01.py": ["/common.py"], "/p10.py": ["/common.py"], "/p24.py": ["/common.py"], "/p09.py": ["/common.py"], "/video8b.py": ["/common.py"], "/p16.py": ["/common.py"], "/video6.py": ["/common.py"], "/p19.py": ["/common.py"], "/p22.py": ["/common.py"], "/video.py": ["/common.py"], "/video9.py": ["/common.py"], "/p03.py": ["/common.py"], "/p18.py": ["/common.py"], "/video3.py": ["/common.py"], "/p02.py": ["/common.py"], "/p17.py": ["/common.py"], "/video8a.py": ["/common.py"], "/p15.py": ["/common.py"], "/p12.py": ["/common.py"], "/p14.py": ["/common.py"], "/video2.py": ["/common.py"], "/p11.py": ["/common.py"], "/p05.py": ["/common.py"], "/p25.py": ["/common.py"], "/p21.py": ["/common.py"], "/p23.py": ["/common.py"], "/video7.py": ["/common.py"]}
|
10,592
|
fferri/geometric_patterns
|
refs/heads/master
|
/p17.py
|
from common import *
def draw(**kwargs):
r,a=meshgrid_polar((2048,)*2)
im=np.sin(a*8+5*np.log(1+r))
im=apply_colormap(im,colormap.rainbow)
return im
if __name__ == '__main__':
im=draw()
imshow(im)
imsave(im,'p17.png')
|
{"/video8.py": ["/common.py"], "/video4.py": ["/common.py"], "/p04.py": ["/common.py"], "/p13.py": ["/common.py"], "/video5.py": ["/common.py"], "/p20.py": ["/common.py"], "/p01.py": ["/common.py"], "/p10.py": ["/common.py"], "/p24.py": ["/common.py"], "/p09.py": ["/common.py"], "/video8b.py": ["/common.py"], "/p16.py": ["/common.py"], "/video6.py": ["/common.py"], "/p19.py": ["/common.py"], "/p22.py": ["/common.py"], "/video.py": ["/common.py"], "/video9.py": ["/common.py"], "/p03.py": ["/common.py"], "/p18.py": ["/common.py"], "/video3.py": ["/common.py"], "/p02.py": ["/common.py"], "/p17.py": ["/common.py"], "/video8a.py": ["/common.py"], "/p15.py": ["/common.py"], "/p12.py": ["/common.py"], "/p14.py": ["/common.py"], "/video2.py": ["/common.py"], "/p11.py": ["/common.py"], "/p05.py": ["/common.py"], "/p25.py": ["/common.py"], "/p21.py": ["/common.py"], "/p23.py": ["/common.py"], "/video7.py": ["/common.py"]}
|
10,593
|
fferri/geometric_patterns
|
refs/heads/master
|
/video8a.py
|
from common import *
imgsz=(2048,)*2
r,a=meshgrid_polar(imgsz)
im=np.float32(np.uint8(np.log(1+r)*4%2)^np.uint8(np.sin(a*16)>0))
im2=im
def draw(t=0, **kwargs):
# fast box blur:
for n in (19,):
for axis in range(2):
im2=sum(np.roll(im2,i,axis) for i in range(-n//2,(n+1)//2))
im2/=n*n
im3=imnormalize(im2)>(0.25+r/imgsz[0])*255
return im3
if __name__ == '__main__':
for t in range(1000):
print('rendering frame %08d...'%frame)
im3=draw(t)
imsave(im3,'video8a-%08d.png'%frame)
|
{"/video8.py": ["/common.py"], "/video4.py": ["/common.py"], "/p04.py": ["/common.py"], "/p13.py": ["/common.py"], "/video5.py": ["/common.py"], "/p20.py": ["/common.py"], "/p01.py": ["/common.py"], "/p10.py": ["/common.py"], "/p24.py": ["/common.py"], "/p09.py": ["/common.py"], "/video8b.py": ["/common.py"], "/p16.py": ["/common.py"], "/video6.py": ["/common.py"], "/p19.py": ["/common.py"], "/p22.py": ["/common.py"], "/video.py": ["/common.py"], "/video9.py": ["/common.py"], "/p03.py": ["/common.py"], "/p18.py": ["/common.py"], "/video3.py": ["/common.py"], "/p02.py": ["/common.py"], "/p17.py": ["/common.py"], "/video8a.py": ["/common.py"], "/p15.py": ["/common.py"], "/p12.py": ["/common.py"], "/p14.py": ["/common.py"], "/video2.py": ["/common.py"], "/p11.py": ["/common.py"], "/p05.py": ["/common.py"], "/p25.py": ["/common.py"], "/p21.py": ["/common.py"], "/p23.py": ["/common.py"], "/video7.py": ["/common.py"]}
|
10,594
|
fferri/geometric_patterns
|
refs/heads/master
|
/p15.py
|
from common import *
def draw(**kwargs):
imgsz=np.array([2*1024]*2)
r,a=meshgrid_polar(imgsz,dist=distance.L2)
a2=a
a+=0.001*r
a2+=0.001*r
r=np.uint(5*np.log(1+r))%2
a=np.uint(np.floor(a*16/math.pi/2))%2
a2=np.uint(np.floor(a2*3*16/math.pi/2))%2
im=r*a|(1-r)*a2
return im
if __name__ == '__main__':
im=draw()
imshow(im)
imsave(im,'p15.png')
|
{"/video8.py": ["/common.py"], "/video4.py": ["/common.py"], "/p04.py": ["/common.py"], "/p13.py": ["/common.py"], "/video5.py": ["/common.py"], "/p20.py": ["/common.py"], "/p01.py": ["/common.py"], "/p10.py": ["/common.py"], "/p24.py": ["/common.py"], "/p09.py": ["/common.py"], "/video8b.py": ["/common.py"], "/p16.py": ["/common.py"], "/video6.py": ["/common.py"], "/p19.py": ["/common.py"], "/p22.py": ["/common.py"], "/video.py": ["/common.py"], "/video9.py": ["/common.py"], "/p03.py": ["/common.py"], "/p18.py": ["/common.py"], "/video3.py": ["/common.py"], "/p02.py": ["/common.py"], "/p17.py": ["/common.py"], "/video8a.py": ["/common.py"], "/p15.py": ["/common.py"], "/p12.py": ["/common.py"], "/p14.py": ["/common.py"], "/video2.py": ["/common.py"], "/p11.py": ["/common.py"], "/p05.py": ["/common.py"], "/p25.py": ["/common.py"], "/p21.py": ["/common.py"], "/p23.py": ["/common.py"], "/video7.py": ["/common.py"]}
|
10,595
|
fferri/geometric_patterns
|
refs/heads/master
|
/p12.py
|
from common import *
def draw(**kwargs):
w,h=2048,2048
x,y=np.meshgrid(range(w),range(h))
r=np.sqrt((x-w/2)**2+(y-h/2)**2)
a=np.arctan2(x-w/2,y-h/2)
im1=np.sin(np.log(1+r)*math.pi*16)>0
im2=np.sin(4*math.pi*np.cos(a*8+8*np.log(1+r)))>0
return im1^im2
if __name__ == '__main__':
im=draw()
imshow(im)
imsave(im,'p12.png')
|
{"/video8.py": ["/common.py"], "/video4.py": ["/common.py"], "/p04.py": ["/common.py"], "/p13.py": ["/common.py"], "/video5.py": ["/common.py"], "/p20.py": ["/common.py"], "/p01.py": ["/common.py"], "/p10.py": ["/common.py"], "/p24.py": ["/common.py"], "/p09.py": ["/common.py"], "/video8b.py": ["/common.py"], "/p16.py": ["/common.py"], "/video6.py": ["/common.py"], "/p19.py": ["/common.py"], "/p22.py": ["/common.py"], "/video.py": ["/common.py"], "/video9.py": ["/common.py"], "/p03.py": ["/common.py"], "/p18.py": ["/common.py"], "/video3.py": ["/common.py"], "/p02.py": ["/common.py"], "/p17.py": ["/common.py"], "/video8a.py": ["/common.py"], "/p15.py": ["/common.py"], "/p12.py": ["/common.py"], "/p14.py": ["/common.py"], "/video2.py": ["/common.py"], "/p11.py": ["/common.py"], "/p05.py": ["/common.py"], "/p25.py": ["/common.py"], "/p21.py": ["/common.py"], "/p23.py": ["/common.py"], "/video7.py": ["/common.py"]}
|
10,596
|
fferri/geometric_patterns
|
refs/heads/master
|
/p14.py
|
from common import *
def draw(**kwargs):
imgsz=np.array([2*1024]*2)
r,a=meshgrid_polar(imgsz,dist=distance.L2)
r=np.uint(7*np.log(1+r))
im=np.zeros(imgsz,dtype=np.uint8)
for i in range(8,17):
c,d,k=i*3,(i+1)*3,2**(i-6)
im|=(np.uint(np.floor(a*k/math.pi/2))%2)*(r>=c)*(r<d)
return im
if __name__ == '__main__':
im=draw()
imshow(im)
imsave(im,'p14.png')
|
{"/video8.py": ["/common.py"], "/video4.py": ["/common.py"], "/p04.py": ["/common.py"], "/p13.py": ["/common.py"], "/video5.py": ["/common.py"], "/p20.py": ["/common.py"], "/p01.py": ["/common.py"], "/p10.py": ["/common.py"], "/p24.py": ["/common.py"], "/p09.py": ["/common.py"], "/video8b.py": ["/common.py"], "/p16.py": ["/common.py"], "/video6.py": ["/common.py"], "/p19.py": ["/common.py"], "/p22.py": ["/common.py"], "/video.py": ["/common.py"], "/video9.py": ["/common.py"], "/p03.py": ["/common.py"], "/p18.py": ["/common.py"], "/video3.py": ["/common.py"], "/p02.py": ["/common.py"], "/p17.py": ["/common.py"], "/video8a.py": ["/common.py"], "/p15.py": ["/common.py"], "/p12.py": ["/common.py"], "/p14.py": ["/common.py"], "/video2.py": ["/common.py"], "/p11.py": ["/common.py"], "/p05.py": ["/common.py"], "/p25.py": ["/common.py"], "/p21.py": ["/common.py"], "/p23.py": ["/common.py"], "/video7.py": ["/common.py"]}
|
10,597
|
fferri/geometric_patterns
|
refs/heads/master
|
/video2.py
|
from common import *
from functools import reduce
def draw(t=0, **kwargs):
s=np.array((2048,2048))
y,x=meshgrid_euclidean(s)
pts=[]
for (r,n,o) in ((0,1,0),(0.2+0.12*math.sin(t*0.03),3,0.001*t+math.pi/6),(0.4+0.3*math.sin(0.34+0.0174*t),6,math.sin(0.4+0.0042*t)*math.pi)):
for a in range(n):
pts.append([getattr(math,f)(o+a*math.pi*2/n)*r+0.5 for f in ['cos','sin']])
r=[np.sqrt((x-p[1]*s[1])**2+(y-p[0]*s[0])**2) for p in pts]
r=reduce(np.minimum, r[1:], r[0])
im=np.sin(r*math.pi/(40+10*math.sin(0.43586+0.006342*t)))>0
return im
if __name__ == '__main__':
for t in range(10000):
print('rendering frame %08d...'%t)
im=draw(t)
imsave(im,'video2-%08d.png'%t)
|
{"/video8.py": ["/common.py"], "/video4.py": ["/common.py"], "/p04.py": ["/common.py"], "/p13.py": ["/common.py"], "/video5.py": ["/common.py"], "/p20.py": ["/common.py"], "/p01.py": ["/common.py"], "/p10.py": ["/common.py"], "/p24.py": ["/common.py"], "/p09.py": ["/common.py"], "/video8b.py": ["/common.py"], "/p16.py": ["/common.py"], "/video6.py": ["/common.py"], "/p19.py": ["/common.py"], "/p22.py": ["/common.py"], "/video.py": ["/common.py"], "/video9.py": ["/common.py"], "/p03.py": ["/common.py"], "/p18.py": ["/common.py"], "/video3.py": ["/common.py"], "/p02.py": ["/common.py"], "/p17.py": ["/common.py"], "/video8a.py": ["/common.py"], "/p15.py": ["/common.py"], "/p12.py": ["/common.py"], "/p14.py": ["/common.py"], "/video2.py": ["/common.py"], "/p11.py": ["/common.py"], "/p05.py": ["/common.py"], "/p25.py": ["/common.py"], "/p21.py": ["/common.py"], "/p23.py": ["/common.py"], "/video7.py": ["/common.py"]}
|
10,598
|
fferri/geometric_patterns
|
refs/heads/master
|
/p11.py
|
from common import *
from functools import reduce
def draw(**kwargs):
s=np.array((4096,4096))
y,x=meshgrid_euclidean(s)
pts=[]
for (r,n,o) in ((0,1,0),(0.2,3,math.pi/6),(0.4,6,0)):
for a in range(n):
pts.append([getattr(math,f)(o+a*math.pi*2/n)*r+0.5 for f in ['cos','sin']])
r=[np.sqrt((x-p[1]*s[1])**2+(y-p[0]*s[0])**2) for p in pts]
r=reduce(np.minimum, r[1:], r[0])
im=np.sin(r*math.pi/40)>0
return im
if __name__ == '__main__':
im=draw()
imshow(im)
imsave(im,'p11.png')
|
{"/video8.py": ["/common.py"], "/video4.py": ["/common.py"], "/p04.py": ["/common.py"], "/p13.py": ["/common.py"], "/video5.py": ["/common.py"], "/p20.py": ["/common.py"], "/p01.py": ["/common.py"], "/p10.py": ["/common.py"], "/p24.py": ["/common.py"], "/p09.py": ["/common.py"], "/video8b.py": ["/common.py"], "/p16.py": ["/common.py"], "/video6.py": ["/common.py"], "/p19.py": ["/common.py"], "/p22.py": ["/common.py"], "/video.py": ["/common.py"], "/video9.py": ["/common.py"], "/p03.py": ["/common.py"], "/p18.py": ["/common.py"], "/video3.py": ["/common.py"], "/p02.py": ["/common.py"], "/p17.py": ["/common.py"], "/video8a.py": ["/common.py"], "/p15.py": ["/common.py"], "/p12.py": ["/common.py"], "/p14.py": ["/common.py"], "/video2.py": ["/common.py"], "/p11.py": ["/common.py"], "/p05.py": ["/common.py"], "/p25.py": ["/common.py"], "/p21.py": ["/common.py"], "/p23.py": ["/common.py"], "/video7.py": ["/common.py"]}
|
10,599
|
fferri/geometric_patterns
|
refs/heads/master
|
/p05.py
|
from common import *
# 8-sides checkerboard
def draw(**kwargs):
imgsz=np.array([2*1024]*2)
def radial_warp(i,j):
cx,cy=imgsz/2
a,r=np.arctan2(i-cy,j-cx),np.sqrt((i-cy)**2+(j-cx)**2)
a=np.fmod(a,math.pi*2)
return cx+np.cos(a)*r,cy+np.sin(a)*r
im=checkerboard(imgsz, imgsz//16)
im=imwarp(im,radial_warp,cycle)
return im
if __name__ == '__main__':
im=draw()
imshow(im)
imsave(im,'p05.png')
|
{"/video8.py": ["/common.py"], "/video4.py": ["/common.py"], "/p04.py": ["/common.py"], "/p13.py": ["/common.py"], "/video5.py": ["/common.py"], "/p20.py": ["/common.py"], "/p01.py": ["/common.py"], "/p10.py": ["/common.py"], "/p24.py": ["/common.py"], "/p09.py": ["/common.py"], "/video8b.py": ["/common.py"], "/p16.py": ["/common.py"], "/video6.py": ["/common.py"], "/p19.py": ["/common.py"], "/p22.py": ["/common.py"], "/video.py": ["/common.py"], "/video9.py": ["/common.py"], "/p03.py": ["/common.py"], "/p18.py": ["/common.py"], "/video3.py": ["/common.py"], "/p02.py": ["/common.py"], "/p17.py": ["/common.py"], "/video8a.py": ["/common.py"], "/p15.py": ["/common.py"], "/p12.py": ["/common.py"], "/p14.py": ["/common.py"], "/video2.py": ["/common.py"], "/p11.py": ["/common.py"], "/p05.py": ["/common.py"], "/p25.py": ["/common.py"], "/p21.py": ["/common.py"], "/p23.py": ["/common.py"], "/video7.py": ["/common.py"]}
|
10,600
|
fferri/geometric_patterns
|
refs/heads/master
|
/p25.py
|
from common import *
# floor tiles
def draw(**kwargs):
imgsz=(2048,)*2
x,y=meshgrid_euclidean(imgsz)
s=lambda x: np.sin(math.pi*x)
b=0.5
h=s(y*8/imgsz[0])+b*s(x*8/imgsz[1])
v=s(1+x*8/imgsz[0])+b*s(y*8/imgsz[1])
im=(h>0)^(v>0)
return im
if __name__ == '__main__':
im=draw()
imshow(im)
imsave(im,'p25.png')
|
{"/video8.py": ["/common.py"], "/video4.py": ["/common.py"], "/p04.py": ["/common.py"], "/p13.py": ["/common.py"], "/video5.py": ["/common.py"], "/p20.py": ["/common.py"], "/p01.py": ["/common.py"], "/p10.py": ["/common.py"], "/p24.py": ["/common.py"], "/p09.py": ["/common.py"], "/video8b.py": ["/common.py"], "/p16.py": ["/common.py"], "/video6.py": ["/common.py"], "/p19.py": ["/common.py"], "/p22.py": ["/common.py"], "/video.py": ["/common.py"], "/video9.py": ["/common.py"], "/p03.py": ["/common.py"], "/p18.py": ["/common.py"], "/video3.py": ["/common.py"], "/p02.py": ["/common.py"], "/p17.py": ["/common.py"], "/video8a.py": ["/common.py"], "/p15.py": ["/common.py"], "/p12.py": ["/common.py"], "/p14.py": ["/common.py"], "/video2.py": ["/common.py"], "/p11.py": ["/common.py"], "/p05.py": ["/common.py"], "/p25.py": ["/common.py"], "/p21.py": ["/common.py"], "/p23.py": ["/common.py"], "/video7.py": ["/common.py"]}
|
10,601
|
fferri/geometric_patterns
|
refs/heads/master
|
/p21.py
|
from common import *
def draw(**kwargs):
sq=np.array((64,)*2)
imgsz=sq*32
h,v=map(lambda im: im//sq[0]%2, meshgrid_euclidean(imgsz))
im=apply_colormap((h+v)/2,make_colormap([[255,0,0],[255,255,255]],[0,255]))
return im
if __name__ == '__main__':
im=draw()
imshow(im)
imsave(im,'p21.png')
|
{"/video8.py": ["/common.py"], "/video4.py": ["/common.py"], "/p04.py": ["/common.py"], "/p13.py": ["/common.py"], "/video5.py": ["/common.py"], "/p20.py": ["/common.py"], "/p01.py": ["/common.py"], "/p10.py": ["/common.py"], "/p24.py": ["/common.py"], "/p09.py": ["/common.py"], "/video8b.py": ["/common.py"], "/p16.py": ["/common.py"], "/video6.py": ["/common.py"], "/p19.py": ["/common.py"], "/p22.py": ["/common.py"], "/video.py": ["/common.py"], "/video9.py": ["/common.py"], "/p03.py": ["/common.py"], "/p18.py": ["/common.py"], "/video3.py": ["/common.py"], "/p02.py": ["/common.py"], "/p17.py": ["/common.py"], "/video8a.py": ["/common.py"], "/p15.py": ["/common.py"], "/p12.py": ["/common.py"], "/p14.py": ["/common.py"], "/video2.py": ["/common.py"], "/p11.py": ["/common.py"], "/p05.py": ["/common.py"], "/p25.py": ["/common.py"], "/p21.py": ["/common.py"], "/p23.py": ["/common.py"], "/video7.py": ["/common.py"]}
|
10,602
|
fferri/geometric_patterns
|
refs/heads/master
|
/p23.py
|
from common import *
# "the flower of life"
def draw(**kwargs):
imgsz=(2048,)*2
r0=imgsz[0]*0.124
xy=set([(0,0,0)])
for (a1,a2) in [np.array((i,(i+1)%6))*math.pi/3+math.pi/6 for i in range(6)]:
for j in range(1,5):
(x1,y1),(x2,y2)=((math.cos(a)*r0*j,math.sin(a)*r0*j) for a in (a1,a2))
xy.add((x1,y1,j))
xy.add((x2,y2,j))
for h in np.linspace(0,1,j+1)[1:-1]:
xy.add((h*x1+(1-h)*x2,h*y1+(1-h)*y2,j))
circles=[1.*j*(meshgrid_distance(imgsz,(imgsz[0]*0.5+x,imgsz[1]*0.5+y))<=r0) for x,y,j in xy]
from functools import reduce
im=reduce(lambda a,b: a+b, circles)
im=apply_colormap(im,colormap.rainbow2)
return im
if __name__ == '__main__':
im=draw()
imshow(im)
imsave(im,'p23.png')
|
{"/video8.py": ["/common.py"], "/video4.py": ["/common.py"], "/p04.py": ["/common.py"], "/p13.py": ["/common.py"], "/video5.py": ["/common.py"], "/p20.py": ["/common.py"], "/p01.py": ["/common.py"], "/p10.py": ["/common.py"], "/p24.py": ["/common.py"], "/p09.py": ["/common.py"], "/video8b.py": ["/common.py"], "/p16.py": ["/common.py"], "/video6.py": ["/common.py"], "/p19.py": ["/common.py"], "/p22.py": ["/common.py"], "/video.py": ["/common.py"], "/video9.py": ["/common.py"], "/p03.py": ["/common.py"], "/p18.py": ["/common.py"], "/video3.py": ["/common.py"], "/p02.py": ["/common.py"], "/p17.py": ["/common.py"], "/video8a.py": ["/common.py"], "/p15.py": ["/common.py"], "/p12.py": ["/common.py"], "/p14.py": ["/common.py"], "/video2.py": ["/common.py"], "/p11.py": ["/common.py"], "/p05.py": ["/common.py"], "/p25.py": ["/common.py"], "/p21.py": ["/common.py"], "/p23.py": ["/common.py"], "/video7.py": ["/common.py"]}
|
10,603
|
fferri/geometric_patterns
|
refs/heads/master
|
/common.py
|
import math
import numpy as np
from PIL import Image
class norm:
@staticmethod
def L1(x1,x2):
return np.abs(x1)+np.abs(x2)
@staticmethod
def L2(x1,x2):
return np.sqrt(x1**2+x2**2)
@staticmethod
def Linf(x1,x2):
return np.maximum(np.abs(x1),np.abs(x2))
distance = norm
def meshgrid_euclidean(shape):
return np.meshgrid(*map(range,shape))
def meshgrid_distance(shape,center=None,dist=distance.L2):
y,x=meshgrid_euclidean(shape)
if center is None: center=np.array(shape)/2
y,x=y-center[0],x-center[1]
return dist(x,y)
def meshgrid_polar(shape,center=None,dist=distance.L2):
y,x=meshgrid_euclidean(shape)
if center is None: center=np.array(shape)/2
y,x=y-center[0],x-center[1]
return dist(x,y),np.arctan2(x,y)
def meshgrid_hyperbolic(shape):
y,x=meshgrid_euclidean(shape)
u=0.5*(np.log(x+1)-np.log(y+1))
v=np.sqrt(x*y)
return u,v
def clip(x,xmax):
return np.minimum(xmax-1,np.maximum(0,x))
def cycle(x,xmax):
return np.fmod(-np.minimum(0,np.ceil(x/xmax))*xmax+x,xmax)
def imwarp(im,fn,oob=clip):
warped=np.zeros_like(im)
i,j=meshgrid_euclidean(im.shape)
h,k=fn(i,j)
h,k=oob(h,im.shape[0]),oob(k,im.shape[1])
h,k=np.int32(h),np.int32(k)
i,j,h,k=map(lambda x: x.reshape(-1), (i,j,h,k))
warped[i,j]=im[h,k]
return warped
def imblt(im,op,x,y,srcim,srcx1=0,srcy1=0,srcx2=None,srcy2=None):
srcimsub=srcim[srcy1:srcy2,srcx1:srcx2]
im[y:y+srcimsub.shape[0],x:x+srcimsub.shape[1]]=op(im[y:y+srcimsub.shape[0],x:x+srcimsub.shape[1]],srcimsub)
def imcircle(im,x,y,r):
x,y,r=map(int,(x,y,r))
s=meshgrid_distance((2*r,)*2)<=r
imblt(im,np.maximum,int(x-r),int(y-r),s)
def imtile(im,shape):
im=np.kron(np.ones(tuple(int(0.5+shape[i]/im.shape[i]) for i in range(2)),dtype=np.uint8),im)
return im[0:shape[0],0:shape[1]]
def checkerboard(shape,sqshape,inv=False):
if isinstance(shape,(int,float))==1: shape=(int(shape),int(shape))
if isinstance(sqshape,(int,float))==1: sqshape=(int(sqshape),int(sqshape))
y,x=np.meshgrid(*map(range,shape))
return (x//sqshape[1]%2)^(y//sqshape[0]%2)
def box2(shape,delta):
box=np.zeros(shape,dtype=np.uint8)
box[delta[0]:shape[0]-delta[0], delta[1]:shape[1]-delta[1]]=1
return box
def boxN(shape,n):
box=meshgrid_distance(shape,None,distance.Linf)
l=max(shape)//(n*2)
box=box//l%2
return np.uint8(box)
def imnormalize(im):
im-=np.min(im)
M=np.max(im)
if M>0: im=im*255/M
return im
def imshow(im,normalize=True):
if len(im.shape)==2:
if normalize: im=imnormalize(im)
im=np.float32(im)
if len(im.shape)==3 and im.shape[2]==3:
im=np.uint8(im)
im=Image.fromarray(im)
im.show()
def imsave(im,filename,normalize=True):
if len(im.shape)==2:
if normalize: im=imnormalize(im)
im=Image.fromarray(np.uint8(im))
im.save(filename)
def imload(filename):
im=Image.open(filename)
arr=np.asarray(im.getdata())
arr.resize(im.height, im.width, 3)
return arr
def apply_colormap(im,cmap,prenormalize=True):
if callable(cmap): cmap=cmap()
if cmap.shape != (256,3): raise ValueError('colormap must be 256x3 uint8 values')
if prenormalize: im=imnormalize(im)
return cmap[np.uint8(im.reshape(-1))].reshape(im.shape+(3,))
def make_colormap(colors,positions=None):
if positions is None: positions=np.uint8(np.linspace(0,255,len(colors)))
colors=np.array(colors)
if colors.shape[1] != 3: raise ValueError('colors must be Nx3 uint8 values')
if len(positions) != colors.shape[0]: raise ValueError('positions must be an array of %d floating point values' % colors.shape[0])
if any(pos < 0 or pos > 255 for pos in positions): raise ValueError('positions must be between 0 and 255')
cmap=np.zeros((256,3), dtype=np.uint8)
for c1,c2,p1,p2 in zip(colors,colors[1:],positions,positions[1:]):
for i in range(p1,p2+1):
x=(i-p1)/(p2-p1)
cmap[i,:]=c1*(1-x)+c2*x
return np.uint8(cmap)
class colormap:
@staticmethod
def rainbow():
cc=[[255,0,0],[255,255,0],[0,255,0],[0,255,255],[0,0,255],[255,0,255],[255,0,0]]
pp=[0,25,76,127,178,229,255]
return make_colormap(cc,pp)
@staticmethod
def rainbow2(offset=0.0):
cmap=np.zeros((256,3), dtype=np.uint8)
for i in range(256):
for j in range(3):
cmap[i,j]=127.5*(1+math.cos(offset+math.pi*(i*2/255-2*j/3+0)))
return cmap
@staticmethod
def jet():
cc=[[0,0,255],[0,255,255],[130,255,130],[255,255,10],[255,0,0],[130,0,0]]
pp=[0,95,125,160,235,255]
return make_colormap(cc,pp)
@staticmethod
def hot():
cc=[[0,0,0],[255,0,0],[255,255,0],[255,255,255]]
pp=[0,95,185,255]
return make_colormap(cc,pp)
@staticmethod
def cold():
cc=[[0,0,0],[0,0,255],[0,255,255],[255,255,255]]
pp=[0,95,185,255]
return make_colormap(cc,pp)
@staticmethod
def contours(n,w=1):
cmap=np.zeros((256,3), dtype=np.uint8)
for p in np.linspace(0,255,2+n)[1:-1]:
cmap[int(p-w/2):int(p+w/2)+1,:]=[255,255,255]
return cmap
def mkline(start, end):
# Bresenham's Line Algorithm
x1, y1 = start
x2, y2 = end
x1, y1, x2, y2 = map(int, (x1, y1, x2, y2))
dx = x2 - x1
dy = y2 - y1
is_steep = abs(dy) > abs(dx)
if is_steep:
x1, y1 = y1, x1
x2, y2 = y2, x2
swapped = False
if x1 > x2:
x1, x2 = x2, x1
y1, y2 = y2, y1
swapped = True
dx = x2 - x1
dy = y2 - y1
error = int(dx / 2.0)
ystep = 1 if y1 < y2 else -1
y = y1
X, Y = [], []
for x in range(x1, x2 + 1):
X.append(y if is_steep else x)
Y.append(x if is_steep else y)
error -= abs(dy)
if error < 0:
y += ystep
error += dx
if swapped:
X.reverse()
Y.reverse()
return X, Y
def imline(im, start, end, value=255, alpha=1.0):
x, y = mkline(start, end)
x, y = x[1:], y[1:]
im[x,y] = alpha*value + (1-alpha)*im[x,y]
|
{"/video8.py": ["/common.py"], "/video4.py": ["/common.py"], "/p04.py": ["/common.py"], "/p13.py": ["/common.py"], "/video5.py": ["/common.py"], "/p20.py": ["/common.py"], "/p01.py": ["/common.py"], "/p10.py": ["/common.py"], "/p24.py": ["/common.py"], "/p09.py": ["/common.py"], "/video8b.py": ["/common.py"], "/p16.py": ["/common.py"], "/video6.py": ["/common.py"], "/p19.py": ["/common.py"], "/p22.py": ["/common.py"], "/video.py": ["/common.py"], "/video9.py": ["/common.py"], "/p03.py": ["/common.py"], "/p18.py": ["/common.py"], "/video3.py": ["/common.py"], "/p02.py": ["/common.py"], "/p17.py": ["/common.py"], "/video8a.py": ["/common.py"], "/p15.py": ["/common.py"], "/p12.py": ["/common.py"], "/p14.py": ["/common.py"], "/video2.py": ["/common.py"], "/p11.py": ["/common.py"], "/p05.py": ["/common.py"], "/p25.py": ["/common.py"], "/p21.py": ["/common.py"], "/p23.py": ["/common.py"], "/video7.py": ["/common.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.