index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
11,623
|
santander-syngenta/rc
|
refs/heads/master
|
/blog/views.py
|
from django.shortcuts import render, redirect
from django.http import HttpResponse
from rest_framework.renderers import TemplateHTMLRenderer
from rest_framework.views import APIView
from rest_framework.response import Response
from django.shortcuts import get_object_or_404
from rest_framework.parsers import MultiPartParser
# Create your views here.
from .models import *
from api.models import *
from api.serializers import *
from django.views.decorators.clickjacking import xframe_options_exempt
import json
def login(request):
return render(request, 'blog/home.html')
def home(request):
objs = Announcement.objects.values()
x = 0; d = {};
for y in objs:
d[x] = y
x+=1
methods = Form.objects.count()
links = Link.objects.count()
contents = Content.objects.count()
forms = Form2.objects.count()
context = {'text': json.dumps(d),'forms':[forms,links,contents,methods]}
return render(request, 'blog/home2.html', context)
def methods(request):
return render(request,'blog/methods.html')
def training(request):
return render(request, 'blog/training.html')
def files(request):
return render(request, 'blog/files.html')
def display(request, pk):
url = 'http://172.20.57.135:88/api/form-detail/' + pk + '/'
context = {'url':url}
return render(request, 'blog/display.html', context)
def display2(request, pk):
url = 'http://172.20.57.135:88/api/content-detail/' + pk + '/'
context = {'url':url}
return render(request, 'blog/display.html', context)
def display3(request, pk):
url = 'http://172.20.57.135:88/api/form2-detail/' + pk + '/'
context = {'url':url}
return render(request, 'blog/display.html', context)
def display4(request, pk):
url = 'http://172.20.57.135:88/api/content-detail/' + pk + '/'
context = {'url':url}
return render(request, 'blog/display4.html', context)
def tagDB(request):
return render(request,'blog/tag.html')
def resources(request):
return render(request, 'blog/resources.html')
def links(request):
return render(request, 'blog/links.html')
def blog(request):
return HttpResponse('This will be the blog')
def search(request, pk):
context = {'pk':pk}
return render(request, 'blog/search.html', context)
def forms(request):
return render(request, 'blog/forms.html')
def trainingUpload(request):
return render(request, 'blog/trainingUpload.html')
def resourceFormUpload(request):
return render(request, 'blog/resourceFormUpload.html')
def linkUpload(request):
return render(request, 'blog/linkUpload.html')
def support(request):
return render(request, 'blog/supportUser.html')
def supportAdmin(request):
return render(request, 'blog/support.html')
def calculator(request):
return render(request, 'blog/calculator.html')
|
{"/api/admin.py": ["/api/models.py"], "/api/forms.py": ["/api/models.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/api/serializers.py"], "/api/serializers.py": ["/api/models.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/forms.py"]}
|
11,624
|
santander-syngenta/rc
|
refs/heads/master
|
/blog/migrations/0003_delete_methods.py
|
# Generated by Django 3.0.8 on 2020-10-13 20:15
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('blog', '0002_methods'),
]
operations = [
migrations.DeleteModel(
name='Methods',
),
]
|
{"/api/admin.py": ["/api/models.py"], "/api/forms.py": ["/api/models.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/api/serializers.py"], "/api/serializers.py": ["/api/models.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/forms.py"]}
|
11,625
|
santander-syngenta/rc
|
refs/heads/master
|
/api/migrations/0011_content_file2.py
|
# Generated by Django 3.1 on 2020-12-31 13:10
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0010_auto_20201210_0809'),
]
operations = [
migrations.AddField(
model_name='content',
name='file2',
field=models.FileField(blank=True, null=True, upload_to='documents/training/'),
),
]
|
{"/api/admin.py": ["/api/models.py"], "/api/forms.py": ["/api/models.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/api/serializers.py"], "/api/serializers.py": ["/api/models.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/forms.py"]}
|
11,626
|
santander-syngenta/rc
|
refs/heads/master
|
/api/migrations/0003_auto_20201006_1457.py
|
# Generated by Django 3.0.8 on 2020-10-06 18:57
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0002_auto_20201005_1329'),
]
operations = [
migrations.AddField(
model_name='form',
name='date',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='link',
name='date',
field=models.DateTimeField(auto_now_add=True, null=True),
),
]
|
{"/api/admin.py": ["/api/models.py"], "/api/forms.py": ["/api/models.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/api/serializers.py"], "/api/serializers.py": ["/api/models.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/forms.py"]}
|
11,627
|
santander-syngenta/rc
|
refs/heads/master
|
/blog/migrations/0005_announcement_text2.py
|
# Generated by Django 3.1 on 2020-12-15 17:19
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog', '0004_announcement'),
]
operations = [
migrations.AddField(
model_name='announcement',
name='text2',
field=models.TextField(null=True),
),
]
|
{"/api/admin.py": ["/api/models.py"], "/api/forms.py": ["/api/models.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/api/serializers.py"], "/api/serializers.py": ["/api/models.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/forms.py"]}
|
11,628
|
santander-syngenta/rc
|
refs/heads/master
|
/api/migrations/0001_initial.py
|
# Generated by Django 3.0.8 on 2020-10-05 16:21
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Form',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=300, null=True)),
('file', models.FileField(null=True, upload_to='documents/')),
],
),
migrations.CreateModel(
name='Link',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=200)),
('url', models.CharField(max_length=200)),
],
),
]
|
{"/api/admin.py": ["/api/models.py"], "/api/forms.py": ["/api/models.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/api/serializers.py"], "/api/serializers.py": ["/api/models.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/forms.py"]}
|
11,629
|
santander-syngenta/rc
|
refs/heads/master
|
/api/serializers.py
|
from rest_framework import serializers
from .models import *
class FormSerializer(serializers.ModelSerializer):
serializers.DateField(format='%b %d, %Y', input_formats=['%b %d, %Y'])
class Meta:
model = Form
fields = '__all__'
class TagSerializer(serializers.ModelSerializer):
class Meta:
model = FormTags
fields = '__all__'
class LinkSerializer(serializers.ModelSerializer):
serializers.DateField(format='%b %d, %Y', input_formats=['%b %d, %Y'])
class Meta:
model = Link
fields = '__all__'
class ContentSerializer(serializers.ModelSerializer):
serializers.DateField(format='%b %d, %Y', input_formats=['%b %d, %Y',])
class Meta:
model = Content
fields = '__all__'
class SubjectSerializer(serializers.ModelSerializer):
class Meta:
model = Subject
fields = '__all__'
class Form2Serializer(serializers.ModelSerializer):
serializers.DateField(format='%b %d, %Y', input_formats=['%b %d, %Y',])
class Meta:
model = Form2
fields = '__all__'
class ContactSerializer(serializers.ModelSerializer):
serializers.DateField(format='%b %d, %Y', input_formats=['%b %d, %Y',])
class Meta:
model = Contact
fields = '__all__'
|
{"/api/admin.py": ["/api/models.py"], "/api/forms.py": ["/api/models.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/api/serializers.py"], "/api/serializers.py": ["/api/models.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/forms.py"]}
|
11,630
|
santander-syngenta/rc
|
refs/heads/master
|
/blog/migrations/0006_auto_20210105_1209.py
|
# Generated by Django 3.1 on 2021-01-05 20:09
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('blog', '0005_announcement_text2'),
]
operations = [
migrations.RenameField(
model_name='announcement',
old_name='text2',
new_name='body',
),
migrations.RenameField(
model_name='announcement',
old_name='text',
new_name='headline',
),
]
|
{"/api/admin.py": ["/api/models.py"], "/api/forms.py": ["/api/models.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/api/serializers.py"], "/api/serializers.py": ["/api/models.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/forms.py"]}
|
11,631
|
santander-syngenta/rc
|
refs/heads/master
|
/api/urls.py
|
from django.urls import path, include, re_path
from . import views
from django.views.generic.base import TemplateView
urlpatterns = [
path('', views.apiOverview, name='api-overview'),
path('tag-list/', views.tagList, name='tag-list'),
path('tag-detail/<str:pk>/', views.tagDetail, name='tag-detail'),
path('tag-create/', views.tagCreate, name='tag-create'),
path('tag-delete/<str:pk>/', views.tagDelete, name='tag-delete'),
path('tag-update/<str:pk>/', views.tagUpdate, name='tag-update'),
path('form-list/', views.formList, name='form-list'),
path('form-detail/<str:pk>/', views.formDetail, name='form-detail'),
path('form-create/', views.formCreate, name='form-create'),
path('form-delete/<str:pk>/', views.formDelete, name='form-delete'),
path('form-update/<str:pk>/', views.formUpdate, name='form-update'),
path('uploadForm/', views.uploadForm, name = 'uploadForm'),
path('link-list/', views.linkList, name='link-list'),
path('link-detail/<str:pk>/', views.linkDetail, name='link-detail'),
path('link-create/', views.linkCreate, name='link-create'),
path('link-delete/<str:pk>/', views.linkDelete, name='link-delete'),
path('link-update/<str:pk>/', views.linkUpdate, name='link-update'),
path('uploadTraining/', views.uploadTraining, name = 'uploadTraining'),
path('content-list/', views.contentList, name = 'content-list'),
path('content-detail/<str:pk>/', views.contentDetail, name='content-detail'),
path('content-delete/<str:pk>/', views.contentDelete, name='content-delete'),
path('content-update/<str:pk>/', views.contentUpdate, name='content-update'),
path('subject-list/', views.subjectList, name = 'subject-list'),
path('form2-list/', views.form2List, name='form2-list'),
path('form2-detail/<str:pk>/', views.form2Detail, name='form2-detail'),
path('form2-delete/<str:pk>/', views.form2Delete, name='form2-delete'),
path('form2-update/<str:pk>/', views.form2Update, name='form2-update'),
path('uploadResource/', views.uploadResourceFunc, name='uploadResource'),
path('contact-list/', views.contactList, name='contact-list'),
path('contact-detail/<str:pk>/', views.contactDetail, name='contact-detail'),
path('contact-create/', views.contactCreate, name='contact-create'),
path('contact-delete/<str:pk>/', views.contactDelete, name='contact-delete'),
path('contact-update/<str:pk>/', views.contactUpdate, name='contact-update'),
]
|
{"/api/admin.py": ["/api/models.py"], "/api/forms.py": ["/api/models.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/api/serializers.py"], "/api/serializers.py": ["/api/models.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/forms.py"]}
|
11,632
|
santander-syngenta/rc
|
refs/heads/master
|
/blog/models.py
|
from django.db import models
# Create your models here.
class Question(models.Model):
txt = models.TextField()
asked = models.DateTimeField(auto_now_add = True)
class Answer(models.Model):
question = models.ForeignKey(Question, on_delete = models.CASCADE)
answer = models.TextField()
answered = models.DateTimeField(auto_now_add = True)
class Announcement(models.Model):
headline = models.TextField()
body = models.TextField(null = True)
link = models.CharField(max_length = 200, null = True, blank = True)
|
{"/api/admin.py": ["/api/models.py"], "/api/forms.py": ["/api/models.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/api/serializers.py"], "/api/serializers.py": ["/api/models.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/forms.py"]}
|
11,633
|
santander-syngenta/rc
|
refs/heads/master
|
/api/views.py
|
from django.shortcuts import render, redirect
from django.http import JsonResponse
from django.contrib.auth.models import User
from rest_framework.decorators import api_view, authentication_classes, parser_classes, renderer_classes
from rest_framework.authentication import SessionAuthentication, BasicAuthentication, TokenAuthentication
from rest_framework.response import Response
from rest_framework.parsers import MultiPartParser, FormParser
from rest_framework.renderers import MultiPartRenderer
from .serializers import *
from .models import *
from .forms import *
# Create your views here.
@api_view(['GET'])
def apiOverview(request):
api_urls = {
'Form List': '/form-list/',
'form Detail': '/form-detail/<str:pk>/',
'Create form': 'form-create/',
'Update form': 'form-update/<str:pk>/',
'Delete form': 'form-delete/<str:pk>/',
'Tag List': '/tag-list/',
'Tag Detail': '/tag-detail/<str:pk>/',
'Create tag': 'tag-create/',
'Update tag': 'tag-update/<str:pk>/',
'Delete tag': 'tag-delete/<str:pk>/',
'Link List': '/link-list',
'Link Detail': '/link-detail/<str:pk>/',
'Create link':'/link-create/',
'Update link': '/link-update/<str:pk>/',
'Delete link': '/link-delete/<str:pk>/',
'Training List': '/content-list',
'Training Detail': '/training-list',
'Update Training': '/training-update',
'Delete Training': '/training-delete',
'Subject List': '/subject-list',
'Resource List':'/form2-list/',
'Resource Detail': '/form2-detail/<str:pk>/',
'Update Resource': '/form2-update/<str:pk>/',
'Delete Resource': '/form2-delete/',
}
return Response(api_urls)
@api_view(['GET'])
def linkList(request):
links = Link.objects.all().order_by('title')
serializer = LinkSerializer(links, many=True)
return Response(serializer.data)
@api_view(['GET'])
def linkDetail(request, pk):
link = Link.objects.get(id = pk)
serializer = LinkSerializer(link, many=False)
return Response(serializer.data)
@api_view(['POST'])
def linkCreate(request):
serializer = LinkSerializer(data=request.data, many=False)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
@api_view(['POST'])
def linkUpdate(request, pk):
link = Link.objects.get(id=pk)
serializer = LinkSerializer(instance = link, data=request.data, partial=True)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
@api_view(['DELETE'])
def linkDelete(request, pk):
link = Link.objects.get(id=pk)
link.delete()
return Response('Item Successfully Deleted')
@api_view(['GET'])
def contactList(request):
contacts = Contact.objects.all().order_by('name')
serializer = ContactSerializer(contacts, many=True)
return Response(serializer.data)
@api_view(['GET'])
def contactDetail(request, pk):
contact = Contact.objects.get(id = pk)
serializer = ContactSerializer(contact, many = False)
return Response(serializer.data)
@api_view(['POST'])
def contactCreate(request):
serializer = ContactSerializer(data = request.data, many=False)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
@api_view(['POST'])
def contactUpdate(request, pk):
contact = Contact.objects.get(id=pk)
serializer = ContactSerializer(instance=contact, data = request.data, partial=True)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
@api_view(['DELETE'])
def contactDelete(request, pk):
contact = Contact.objects.get(id=pk)
contact.delete()
return Response('Item Successfully Deleted!')
@api_view(['GET'])
def tagList(request):
tags = FormTags.objects.all().order_by('name')
serializer = TagSerializer(tags, many=True)
return Response(serializer.data)
@api_view(['GET'])
def tagDetail(request, pk):
tags = FormTags.objects.get(id = pk)
serializer = TagSerializer(tags, many=False)
return Response(serializer.data)
@api_view(['POST'])
def tagCreate(request):
serializer = TagSerializer(data=request.data, many=False)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
@api_view(['POST'])
def tagUpdate(request, pk):
tag = FormTags.objects.get(id=pk)
serializer = TagSerializer(instance=tag, data=request.data, partial=True)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
@api_view(['DELETE'])
def tagDelete(request, pk):
tag = FormTags.objects.get(id=pk)
tag.delete()
return Response('Item Successfully Deleted!')
@api_view(['GET'])
def formList(request):
forms = Form.objects.all().order_by('title')
serializer = FormSerializer(forms, many=True)
return Response(serializer.data)
@api_view(['GET'])
def formDetail(request, pk):
forms = Form.objects.get(id = pk)
serializer = FormSerializer(forms, many=False)
return Response(serializer.data)
@api_view(['POST'])
@parser_classes([MultiPartParser, FormParser])
def formCreate(request):
serializer = FormSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
@api_view(['POST'])
def formUpdate(request, pk):
form = Form.objects.get(id=pk)
serializer = FormSerializer(instance=form, data=request.data, partial=True)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
@api_view(['DELETE'])
def formDelete(request, pk):
form = Form.objects.get(id=pk)
form.delete()
return Response('Item Successfully Deleted!')
def uploadForm(request):
form = upload()
if request.method == "POST":
form = upload(request.POST, request.FILES)
if form.is_valid():
print('valid')
filename = str(request.FILES['file'])
handle_uploaded_file(request.FILES['file'], filename)
form.save()
return redirect('blog:files')
else:
print('not valid')
context = {'form':form}
return render(request, 'api/uploadForm.html',context)
def handle_uploaded_file(f, filename):
with open('static/images/documents/' + filename, 'wb+') as destination:
for chunk in f.chunks():
destination.write(chunk)
def uploadTraining(request):
form = uploadTrainingContent()
if request.method == "POST":
form = uploadTrainingContent(request.POST, request.FILES)
if form.is_valid:
for f in request.FILES.getlist('file'):
filename = str(f)
handle_uploaded_content(f, filename)
form.save()
return redirect('blog:trainingDB')
context = {'form':form}
return render(request, 'api/trainingUpload.html',context)
def handle_uploaded_content(f, filename):
with open('static/images/documents/training/' + filename, 'wb+') as destination:
for chunk in f.chunks():
destination.write(chunk)
@api_view(['GET'])
def contentList(request):
trainingContent = Content.objects.all().order_by('title')
serializer = ContentSerializer(trainingContent, many=True)
return Response(serializer.data)
@api_view(['GET'])
def contentDetail(request, pk):
trainingContent = Content.objects.get(id = pk)
serializer = ContentSerializer(trainingContent, many = False)
return Response(serializer.data)
@api_view(['POST'])
def contentUpdate(request, pk):
trainingContent = Content.objects.get(id = pk)
serializer = ContentSerializer(instance = form, data = request.data, partial = True)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
@api_view(['DELETE'])
def contentDelete(request, pk):
trainingContent = Content.objects.get(id = pk)
trainingContent.delete()
return Response('Item Successfully Deleted!')
@api_view(['GET'])
def subjectList(request):
subjects = Subject.objects.all().order_by('name')
serializer = SubjectSerializer(subjects, many = True)
return Response(serializer.data)
@api_view(['GET'])
def form2List(request):
forms = Form2.objects.all().order_by('title')
serializer = Form2Serializer(forms, many=True)
return Response(serializer.data)
@api_view(['GET'])
def form2Detail(request, pk):
forms = Form2.objects.get(id = pk)
serializer = Form2Serializer(forms, many=False)
return Response(serializer.data)
@api_view(['POST'])
def form2Update(request, pk):
form = Form2.objects.get(id=pk)
serializer = Form2Serializer(instance=form, data=request.data, partial=True)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
@api_view(['DELETE'])
def form2Delete(request, pk):
form = Form2.objects.get(id=pk)
form.delete()
return Response('Item Successfully Deleted!')
def uploadResourceFunc(request):
form = uploadResourceForm()
if request.method == "POST":
form = uploadResourceForm(request.POST, request.FILES)
if form.is_valid():
filename = str(request.FILES['file'])
handle_uploaded_form(request.FILES['file'], filename)
form.save()
return redirect('blog:resourceFormUpload')
context = {'form':form}
return render(request, 'api/uploadResource.html',context)
def handle_uploaded_form(f, filename):
with open('static/images/documents/resourceForms/' + filename, 'wb+') as destination:
for chunk in f.chunks():
destination.write(chunk)
|
{"/api/admin.py": ["/api/models.py"], "/api/forms.py": ["/api/models.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/api/serializers.py"], "/api/serializers.py": ["/api/models.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/forms.py"]}
|
11,634
|
santander-syngenta/rc
|
refs/heads/master
|
/blog/migrations/0004_announcement.py
|
# Generated by Django 3.1 on 2020-12-10 16:09
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog', '0003_delete_methods'),
]
operations = [
migrations.CreateModel(
name='Announcement',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.TextField()),
('link', models.CharField(blank=True, max_length=200, null=True)),
],
),
]
|
{"/api/admin.py": ["/api/models.py"], "/api/forms.py": ["/api/models.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/api/serializers.py"], "/api/serializers.py": ["/api/models.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/forms.py"]}
|
11,635
|
santander-syngenta/rc
|
refs/heads/master
|
/api/migrations/0002_auto_20201005_1329.py
|
# Generated by Django 3.0.8 on 2020-10-05 17:29
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='FormTags',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=300)),
],
),
migrations.CreateModel(
name='LinkTags',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=300)),
],
),
migrations.AddField(
model_name='form',
name='tag',
field=models.ManyToManyField(to='api.FormTags'),
),
migrations.AddField(
model_name='link',
name='tag',
field=models.ManyToManyField(to='api.LinkTags'),
),
]
|
{"/api/admin.py": ["/api/models.py"], "/api/forms.py": ["/api/models.py"], "/blog/views.py": ["/blog/models.py", "/api/models.py", "/api/serializers.py"], "/api/serializers.py": ["/api/models.py"], "/api/views.py": ["/api/serializers.py", "/api/models.py", "/api/forms.py"]}
|
11,640
|
melizalab/gammatone
|
refs/heads/master
|
/gammatone/plot.py
|
# -*- coding: utf-8 -*-
# -*- mode: python -*-
"""
Plotting utilities related to gammatone analysis, primarily for use with
``matplotlib``.
See COPYING for copyright and licensing information.
"""
from __future__ import division
import argparse
import os.path
import matplotlib.pyplot
import matplotlib.ticker
import numpy as np
import scipy.constants
import scipy.io.wavfile
from .filters import erb_point
import gammatone.gtgram
import gammatone.fftweight
class ERBFormatter(matplotlib.ticker.EngFormatter):
"""
Axis formatter for gammatone filterbank analysis. This formatter calculates
the ERB spaced frequencies used for analysis, and renders them similarly to
the engineering axis formatter.
The scale is changed so that `[0, 1]` corresponds to ERB spaced frequencies
from ``high_freq`` to ``low_freq`` (note the reversal). It should be used
with ``imshow`` where the ``extent`` argument is ``[a, b, 1, 0]`` (again,
note the inversion).
"""
def __init__(self, low_freq, high_freq, *args, **kwargs):
"""
Creates a new :class ERBFormatter: for use with ``matplotlib`` plots.
Note that this class does not supply the ``units`` or ``places``
arguments; typically these would be ``'Hz'`` and ``0``.
:param low_freq: the low end of the gammatone filterbank frequency range
:param high_freq: the high end of the gammatone filterbank frequency
range
"""
self.low_freq = low_freq
self.high_freq = high_freq
super().__init__(*args, **kwargs)
def _erb_axis_scale(self, fraction):
return erb_point(self.low_freq, self.high_freq, fraction)
def __call__(self, val, pos=None):
newval = self._erb_axis_scale(val)
return super().__call__(newval, pos)
def gtgram_plot(
gtgram_function,
axes,
x,
fs,
window_time,
hop_time,
channels,
f_min,
imshow_args=None,
):
"""
Plots a spectrogram-like time frequency magnitude array based on gammatone
subband filters.
:param gtgram_function: A function with signature::
fft_gtgram(
wave,
fs,
window_time, hop_time,
channels,
f_min)
See :func:`gammatone.gtgram.gtgram` for details of the paramters.
"""
# Set a nice formatter for the y-axis
formatter = ERBFormatter(f_min, fs / 2, unit="Hz", places=0)
axes.yaxis.set_major_formatter(formatter)
# Figure out time axis scaling
duration = len(x) / fs
# Calculate 1:1 aspect ratio
aspect_ratio = duration / scipy.constants.golden
gtg = gtgram_function(x, fs, window_time, hop_time, channels, f_min)
Z = np.flipud(20 * np.log10(gtg))
return axes.imshow(Z, extent=[0, duration, 1, 0], aspect=aspect_ratio)
# Entry point for CLI script
HELP_TEXT = """\
Plots the gammatone filterbank analysis of a WAV file.
If the file contains more than one channel, all channels are averaged before
performing analysis.
"""
def render_audio_from_file(path, duration, function):
"""
Renders the given ``duration`` of audio from the audio file at ``path``
using the gammatone spectrogram function ``function``.
"""
samplerate, data = scipy.io.wavfile.read(path)
# Average the stereo signal
if duration:
nframes = duration * samplerate
data = data[0:nframes, :]
signal = data.mean(1)
# Default gammatone-based spectrogram parameters
twin = 0.08
thop = twin / 2
channels = 1024
fmin = 20
# Set up the plot
fig = matplotlib.pyplot.figure()
axes = fig.add_axes([0.1, 0.1, 0.8, 0.8])
gtgram_plot(function, axes, signal, samplerate, twin, thop, channels, fmin)
axes.set_title(os.path.basename(path))
axes.set_xlabel("Time (s)")
axes.set_ylabel("Frequency")
matplotlib.pyplot.show()
def main():
"""
Entry point for CLI application to plot gammatonegrams of sound files.
"""
parser = argparse.ArgumentParser(description=HELP_TEXT)
parser.add_argument(
"sound_file",
help="The sound file to graph. See the help text for supported formats.",
)
parser.add_argument(
"-d",
"--duration",
type=int,
help="The time in seconds from the start of the audio to use for the "
"graph (default is to use the whole file).",
)
parser.add_argument(
"-a",
"--accurate",
action="store_const",
dest="function",
const=gammatone.gtgram.gtgram,
default=gammatone.fftweight.fft_gtgram,
help="Use the full filterbank approach instead of the weighted FFT "
"approximation. This is much slower, and uses a lot of memory, but"
" is more accurate.",
)
args = parser.parse_args()
return render_audio_from_file(args.sound_file, args.duration, args.function)
|
{"/gammatone/plot.py": ["/gammatone/gtgram.py", "/gammatone/fftweight.py"], "/tests/test_fft_weights.py": ["/gammatone/fftweight.py"], "/gammatone/fftweight.py": ["/gammatone/gtgram.py"], "/tests/test_specgram.py": ["/gammatone/fftweight.py"], "/tests/test_gammatonegram.py": ["/gammatone/gtgram.py"], "/tests/test_fft_gtgram.py": ["/gammatone/fftweight.py"]}
|
11,641
|
melizalab/gammatone
|
refs/heads/master
|
/tests/test_erb_space.py
|
#!/usr/bin/env python3
# Copyright 2014 Jason Heeris, jason.heeris@gmail.com
#
# This file is part of the gammatone toolkit, and is licensed under the 3-clause
# BSD license: https://github.com/detly/gammatone/blob/master/COPYING
import numpy as np
import scipy.io
from pkg_resources import resource_stream
import gammatone.filters
REF_DATA_FILENAME = 'data/test_erbspace_data.mat'
INPUT_KEY = 'erbspace_inputs'
RESULT_KEY = 'erbspace_results'
INPUT_COLS = ('f_low', 'f_high', 'num_f')
RESULT_COLS = ('cfs',)
def load_reference_data():
""" Load test data generated from the reference code """
# Load test data
with resource_stream(__name__, REF_DATA_FILENAME) as test_data:
data = scipy.io.loadmat(test_data, squeeze_me=False)
zipped_data = zip(data[INPUT_KEY], data[RESULT_KEY])
for inputs, refs in zipped_data:
input_dict = dict(zip(INPUT_COLS, map(np.squeeze, inputs)))
ref_dict = dict(zip(RESULT_COLS, map(np.squeeze, refs)))
yield (input_dict, ref_dict)
def test_ERB_space_known_values():
for inputs, refs in load_reference_data():
args = (
inputs['f_low'],
inputs['f_high'],
inputs['num_f'],
)
expected = (refs['cfs'],)
yield ERBSpaceTester(args, expected)
class ERBSpaceTester:
def __init__(self, args, expected):
self.args = args
self.expected = expected[0]
self.description = (
"ERB space for {:.1f} {:.1f} {:d}".format(
float(self.args[0]),
float(self.args[1]),
int(self.args[2]),
)
)
def __call__(self):
result = gammatone.filters.erb_space(*self.args)
assert np.allclose(result, self.expected, rtol=1e-6, atol=1e-10)
|
{"/gammatone/plot.py": ["/gammatone/gtgram.py", "/gammatone/fftweight.py"], "/tests/test_fft_weights.py": ["/gammatone/fftweight.py"], "/gammatone/fftweight.py": ["/gammatone/gtgram.py"], "/tests/test_specgram.py": ["/gammatone/fftweight.py"], "/tests/test_gammatonegram.py": ["/gammatone/gtgram.py"], "/tests/test_fft_gtgram.py": ["/gammatone/fftweight.py"]}
|
11,642
|
melizalab/gammatone
|
refs/heads/master
|
/tests/test_fft_weights.py
|
#!/usr/bin/env python3
# Copyright 2014 Jason Heeris, jason.heeris@gmail.com
#
# This file is part of the gammatone toolkit, and is licensed under the 3-clause
# BSD license: https://github.com/detly/gammatone/blob/master/COPYING
from __future__ import division
import numpy as np
import scipy.io
from pkg_resources import resource_stream
import gammatone.fftweight
REF_DATA_FILENAME = 'data/test_fft2gtmx_data.mat'
INPUT_KEY = 'fft2gtmx_inputs'
RESULT_KEY = 'fft2gtmx_results'
INPUT_COLS = ('nfft', 'sr', 'nfilts', 'width', 'fmin', 'fmax', 'maxlen')
RESULT_COLS = ('weights', 'gain',)
def load_reference_data():
""" Load test data generated from the reference code """
# Load test data
with resource_stream(__name__, REF_DATA_FILENAME) as test_data:
data = scipy.io.loadmat(test_data, squeeze_me=False)
zipped_data = zip(data[INPUT_KEY], data[RESULT_KEY])
for inputs, refs in zipped_data:
input_dict = dict(zip(INPUT_COLS, map(np.squeeze, inputs)))
ref_dict = dict(zip(RESULT_COLS, map(np.squeeze, refs)))
yield (input_dict, ref_dict)
def fft_weights_funcs(args, expected):
"""
Construct a pair of unit tests for the gains and weights of the FFT to
gammatonegram calculation. Returns two functions: test_gains, test_weights.
"""
args = list(args)
expected_weights = expected[0]
expected_gains = expected[1]
# Convert nfft, nfilts, maxlen to ints
args[0] = int(args[0])
args[2] = int(args[2])
args[6] = int(args[6])
weights, gains = gammatone.fftweight.fft_weights(*args)
(test_weights_desc, test_gains_desc) = (
"FFT weights {:s} for nfft = {:d}, fs = {:d}, nfilts = {:d}".format(
label,
int(args[0]),
int(args[1]),
int(args[2]),
) for label in ("weights", "gains"))
def test_gains():
assert gains.shape == expected_gains.shape
assert np.allclose(gains, expected_gains, rtol=1e-6, atol=1e-12)
def test_weights():
assert weights.shape == expected_weights.shape
assert np.allclose(weights, expected_weights, rtol=1e-6, atol=1e-12)
test_gains.description = test_gains_desc
test_weights.description = test_weights_desc
return test_gains, test_weights
def test_fft_weights():
for inputs, refs in load_reference_data():
args = tuple(inputs[col] for col in INPUT_COLS)
expected = (refs['weights'], refs['gain'])
test_gains, test_weights = fft_weights_funcs(args, expected)
yield test_gains
yield test_weights
|
{"/gammatone/plot.py": ["/gammatone/gtgram.py", "/gammatone/fftweight.py"], "/tests/test_fft_weights.py": ["/gammatone/fftweight.py"], "/gammatone/fftweight.py": ["/gammatone/gtgram.py"], "/tests/test_specgram.py": ["/gammatone/fftweight.py"], "/tests/test_gammatonegram.py": ["/gammatone/gtgram.py"], "/tests/test_fft_gtgram.py": ["/gammatone/fftweight.py"]}
|
11,643
|
melizalab/gammatone
|
refs/heads/master
|
/tests/__init__.py
|
# Copyright 2014 Jason Heeris, jason.heeris@gmail.com
#
# This file is part of the gammatone toolkit, and is licensed under the 3-clause
# BSD license: https://github.com/detly/gammatone/blob/master/COPYING
# Designate as module
|
{"/gammatone/plot.py": ["/gammatone/gtgram.py", "/gammatone/fftweight.py"], "/tests/test_fft_weights.py": ["/gammatone/fftweight.py"], "/gammatone/fftweight.py": ["/gammatone/gtgram.py"], "/tests/test_specgram.py": ["/gammatone/fftweight.py"], "/tests/test_gammatonegram.py": ["/gammatone/gtgram.py"], "/tests/test_fft_gtgram.py": ["/gammatone/fftweight.py"]}
|
11,644
|
melizalab/gammatone
|
refs/heads/master
|
/gammatone/gtgram.py
|
# -*- coding: utf-8 -*-
# -*- mode: python -*-
"""
This module contains functions for rendering "spectrograms" which use gammatone
filterbanks instead of Fourier transforms.
See COPYING for copyright and licensing information.
"""
import numpy as np
from .filters import make_erb_filters, centre_freqs, erb_filterbank
def round_half_away_from_zero(num):
"""Implement the round-half-away-from-zero rule, where fractional parts of
0.5 result in rounding up to the nearest positive integer for positive
numbers, and down to the nearest negative number for negative integers.
"""
return np.sign(num) * np.floor(np.abs(num) + 0.5)
def gtgram_strides(fs, window_time, hop_time, filterbank_cols):
"""
Calculates the window size for a gammatonegram.
@return a tuple of (window_size, hop_samples, output_columns)
"""
nwin = int(round_half_away_from_zero(window_time * fs))
hop_samples = int(round_half_away_from_zero(hop_time * fs))
columns = 1 + int(np.floor((filterbank_cols - nwin) / hop_samples))
return (nwin, hop_samples, columns)
def gtgram_xe(wave, fs, channels, f_min, f_max):
"""Calculate the intermediate ERB filterbank processed matrix"""
cfs = centre_freqs(fs, channels, f_min, f_max)
fcoefs = np.flipud(make_erb_filters(fs, cfs))
xf = erb_filterbank(wave, fcoefs)
xe = np.power(xf, 2)
return xe
def gtgram(
wave, fs, window_time, hop_time, channels, f_min, f_max=None, return_freqs=False
):
"""
Calculate a spectrogram-like time frequency magnitude array based on
gammatone subband filters. The waveform ``wave`` (at sample rate ``fs``) is
passed through an multi-channel gammatone auditory model filterbank, with
lowest frequency ``f_min`` and highest frequency ``f_max``. The outputs of
each band then have their energy integrated over windows of ``window_time``
seconds, advancing by ``hop_time`` secs for successive columns. These
magnitudes are returned as a nonnegative real matrix with ``channels`` rows.
| 2009-02-23 Dan Ellis dpwe@ee.columbia.edu
|
| (c) 2013 Jason Heeris (Python implementation)
"""
xe = gtgram_xe(wave, fs, channels, f_min, f_max)
nwin, hop_samples, ncols = gtgram_strides(fs, window_time, hop_time, xe.shape[1])
y = np.zeros((channels, ncols))
for cnum in range(ncols):
segment = xe[:, cnum * hop_samples + np.arange(nwin)]
y[:, cnum] = np.sqrt(segment.mean(1))
if return_freqs:
cfs = centre_freqs(fs, channels, f_min, f_max)
return cfs, y
return y
|
{"/gammatone/plot.py": ["/gammatone/gtgram.py", "/gammatone/fftweight.py"], "/tests/test_fft_weights.py": ["/gammatone/fftweight.py"], "/gammatone/fftweight.py": ["/gammatone/gtgram.py"], "/tests/test_specgram.py": ["/gammatone/fftweight.py"], "/tests/test_gammatonegram.py": ["/gammatone/gtgram.py"], "/tests/test_fft_gtgram.py": ["/gammatone/fftweight.py"]}
|
11,645
|
melizalab/gammatone
|
refs/heads/master
|
/gammatone/fftweight.py
|
# -*- coding: utf-8 -*-
# -*- mode: python -*-
"""
This module contains functions for calculating weights to approximate a
gammatone filterbank-like "spectrogram" from a Fourier transform.
See COPYING for copyright and licensing information.
"""
from __future__ import division
import numpy as np
import gammatone.filters as filters
import gammatone.gtgram as gtgram
def specgram_window(
nfft,
nwin,
):
"""
Window calculation used in specgram replacement function. Hann window of
width `nwin` centred in an array of width `nfft`.
"""
halflen = nwin // 2
halff = nfft // 2 # midpoint of win
acthalflen = int(np.floor(min(halff, halflen)))
halfwin = 0.5 * (1 + np.cos(np.pi * np.arange(0, halflen + 1) / halflen))
win = np.zeros((nfft,))
win[halff : halff + acthalflen] = halfwin[0:acthalflen]
win[halff : halff - acthalflen : -1] = halfwin[0:acthalflen]
return win
def specgram(x, n, sr, w, h):
"""Substitute for Matlab's specgram, calculates a simple spectrogram.
:param x: The signal to analyse
:param n: The FFT length
:param sr: The sampling rate
:param w: The window length (see :func:`specgram_window`)
:param h: The hop size (must be greater than zero)
"""
# Based on Dan Ellis' myspecgram.m,v 1.1 2002/08/04
assert h > 0, "Must have a hop size greater than 0"
s = x.shape[0]
win = specgram_window(n, w)
c = 0
# pre-allocate output array
ncols = 1 + int(np.floor((s - n) / h))
d = np.zeros(((1 + n // 2), ncols), np.dtype(complex))
for b in range(0, s - n, h):
u = win * x[b : b + n]
t = np.fft.fft(u)
d[:, c] = t[0 : (1 + n // 2)].T
c = c + 1
return d
def fft_weights(nfft, fs, nfilts, width, fmin, fmax, maxlen):
"""
:param nfft: the source FFT size
:param sr: sampling rate (Hz)
:param nfilts: the number of output bands required (default 64)
:param width: the constant width of each band in Bark (default 1)
:param fmin: lower limit of frequencies (Hz)
:param fmax: upper limit of frequencies (Hz)
:param maxlen: number of bins to truncate the rows to
:return: a tuple `weights`, `gain` with the calculated weight matrices and
gain vectors
Generate a matrix of weights to combine FFT bins into Gammatone bins.
Note about `maxlen` parameter: While wts has nfft columns, the second half
are all zero. Hence, aud spectrum is::
fft2gammatonemx(nfft,sr)*abs(fft(xincols,nfft))
`maxlen` truncates the rows to this many bins.
| (c) 2004-2009 Dan Ellis dpwe@ee.columbia.edu based on rastamat/audspec.m
| (c) 2012 Jason Heeris (Python implementation)
"""
ucirc = np.exp(1j * 2 * np.pi * np.arange(0, nfft / 2 + 1) / nfft)[None, ...]
# Common ERB filter code factored out
cf_array = filters.erb_space(fmin, fmax, nfilts)[::-1]
_, A11, A12, A13, A14, _, _, _, B2, gain = filters.make_erb_filters(
fs, cf_array, width
).T
A11, A12, A13, A14 = A11[..., None], A12[..., None], A13[..., None], A14[..., None]
r = np.sqrt(B2)
theta = 2 * np.pi * cf_array / fs
pole = (r * np.exp(1j * theta))[..., None]
GTord = 4
weights = np.zeros((nfilts, nfft))
weights[:, 0 : ucirc.shape[1]] = (
np.abs(ucirc + A11 * fs)
* np.abs(ucirc + A12 * fs)
* np.abs(ucirc + A13 * fs)
* np.abs(ucirc + A14 * fs)
* np.abs(fs * (pole - ucirc) * (pole.conj() - ucirc)) ** (-GTord)
/ gain[..., None]
)
weights = weights[:, 0 : int(maxlen)]
return weights, gain
def fft_gtgram(wave, fs, window_time, hop_time, channels, f_min):
"""
Calculate a spectrogram-like time frequency magnitude array based on
an FFT-based approximation to gammatone subband filters.
A matrix of weightings is calculated (using :func:`gtgram.fft_weights`), and
applied to the FFT of the input signal (``wave``, using sample rate ``fs``).
The result is an approximation of full filtering using an ERB gammatone
filterbank (as per :func:`gtgram.gtgram`).
``f_min`` determines the frequency cutoff for the corresponding gammatone
filterbank. ``window_time`` and ``hop_time`` (both in seconds) are the size
and overlap of the spectrogram columns.
| 2009-02-23 Dan Ellis dpwe@ee.columbia.edu
|
| (c) 2013 Jason Heeris (Python implementation)
"""
width = 1 # Was a parameter in the MATLAB code
nfft = int(2 ** (np.ceil(np.log2(2 * window_time * fs))))
nwin, nhop, _ = gtgram.gtgram_strides(fs, window_time, hop_time, 0)
gt_weights, _ = fft_weights(nfft, fs, channels, width, f_min, fs / 2, nfft / 2 + 1)
sgram = specgram(wave, nfft, fs, nwin, nhop)
result = gt_weights.dot(np.abs(sgram)) / nfft
return result
|
{"/gammatone/plot.py": ["/gammatone/gtgram.py", "/gammatone/fftweight.py"], "/tests/test_fft_weights.py": ["/gammatone/fftweight.py"], "/gammatone/fftweight.py": ["/gammatone/gtgram.py"], "/tests/test_specgram.py": ["/gammatone/fftweight.py"], "/tests/test_gammatonegram.py": ["/gammatone/gtgram.py"], "/tests/test_fft_gtgram.py": ["/gammatone/fftweight.py"]}
|
11,646
|
melizalab/gammatone
|
refs/heads/master
|
/tests/test_cfs.py
|
#!/usr/bin/env python3
# Copyright 2014 Jason Heeris, jason.heeris@gmail.com
#
# This file is part of the gammatone toolkit, and is licensed under the 3-clause
# BSD license: https://github.com/detly/gammatone/blob/master/COPYING
from mock import patch
import gammatone.filters
EXPECTED_PARAMS = (
((0, 0, 0), (0, 0, 0)),
((22050, 100, 100), (100, 11025, 100)),
((44100, 100, 100), (100, 22050, 100)),
((44100, 100, 20), (20, 22050, 100)),
((88200, 100, 20), (20, 44100, 100)),
((22050, 100, 10), (10, 11025, 100)),
((22050, 1000, 100), (100, 11025, 1000)),
((160000, 500, 200), (200, 80000, 500)),
)
def test_centre_freqs():
for args, params in EXPECTED_PARAMS:
yield CentreFreqsTester(args, params)
class CentreFreqsTester:
def __init__(self, args, params):
self.args = args
self.params = params
self.description = "Centre freqs for {:g} {:d} {:g}".format(*args)
@patch('gammatone.filters.erb_space')
def __call__(self, erb_space_mock):
gammatone.filters.centre_freqs(*self.args)
erb_space_mock.assert_called_with(*self.params)
|
{"/gammatone/plot.py": ["/gammatone/gtgram.py", "/gammatone/fftweight.py"], "/tests/test_fft_weights.py": ["/gammatone/fftweight.py"], "/gammatone/fftweight.py": ["/gammatone/gtgram.py"], "/tests/test_specgram.py": ["/gammatone/fftweight.py"], "/tests/test_gammatonegram.py": ["/gammatone/gtgram.py"], "/tests/test_fft_gtgram.py": ["/gammatone/fftweight.py"]}
|
11,647
|
melizalab/gammatone
|
refs/heads/master
|
/tests/test_specgram.py
|
#!/usr/bin/env python3
# Copyright 2014 Jason Heeris, jason.heeris@gmail.com
#
# This file is part of the gammatone toolkit, and is licensed under the 3-clause
# BSD license: https://github.com/detly/gammatone/blob/master/COPYING
from mock import patch
import numpy as np
import scipy.io
from pkg_resources import resource_stream
import gammatone.fftweight
REF_DATA_FILENAME = 'data/test_specgram_data.mat'
INPUT_KEY = 'specgram_inputs'
MOCK_KEY = 'specgram_mocks'
RESULT_KEY = 'specgram_results'
INPUT_COLS = ('name', 'wave', 'nfft', 'fs', 'nwin', 'nhop')
MOCK_COLS = ('window',)
RESULT_COLS = ('res',)
def load_reference_data():
""" Load test data generated from the reference code """
# Load test data
with resource_stream(__name__, REF_DATA_FILENAME) as test_data:
data = scipy.io.loadmat(test_data, squeeze_me=False)
zipped_data = zip(data[INPUT_KEY], data[MOCK_KEY], data[RESULT_KEY])
for inputs, mocks, refs in zipped_data:
input_dict = dict(zip(INPUT_COLS, inputs))
mock_dict = dict(zip(MOCK_COLS, mocks))
ref_dict = dict(zip(RESULT_COLS, refs))
yield (input_dict, mock_dict, ref_dict)
def test_specgram():
for inputs, mocks, refs in load_reference_data():
args = (
inputs['nfft'],
inputs['fs'],
inputs['nwin'],
inputs['nhop'],
)
yield SpecgramTester(
inputs['name'][0],
args,
inputs['wave'],
mocks['window'],
refs['res']
)
class SpecgramTester:
""" Testing class for specgram replacement calculation """
def __init__(self, name, args, sig, window, expected):
self.signal = np.asarray(sig).squeeze()
self.expected = np.asarray(expected).squeeze()
self.args = [int(a.squeeze()) for a in args]
self.window = window.squeeze()
self.description = "Specgram for {:s}".format(name)
def __call__(self):
with patch(
'gammatone.fftweight.specgram_window',
return_value=self.window):
result = gammatone.fftweight.specgram(self.signal, *self.args)
max_diff = np.max(np.abs(result - self.expected))
diagnostic = "Maximum difference: {:6e}".format(max_diff)
assert np.allclose(result, self.expected, rtol=1e-6, atol=1e-12), diagnostic
|
{"/gammatone/plot.py": ["/gammatone/gtgram.py", "/gammatone/fftweight.py"], "/tests/test_fft_weights.py": ["/gammatone/fftweight.py"], "/gammatone/fftweight.py": ["/gammatone/gtgram.py"], "/tests/test_specgram.py": ["/gammatone/fftweight.py"], "/tests/test_gammatonegram.py": ["/gammatone/gtgram.py"], "/tests/test_fft_gtgram.py": ["/gammatone/fftweight.py"]}
|
11,648
|
melizalab/gammatone
|
refs/heads/master
|
/tests/test_gammatone_filters.py
|
#!/usr/bin/env python3
# Copyright 2014 Jason Heeris, jason.heeris@gmail.com
#
# This file is part of the gammatone toolkit, and is licensed under the 3-clause
# BSD license: https://github.com/detly/gammatone/blob/master/COPYING
import numpy as np
import scipy.io
from pkg_resources import resource_stream
import gammatone.filters
REF_DATA_FILENAME = 'data/test_erb_filter_data.mat'
INPUT_KEY = 'erb_filter_inputs'
RESULT_KEY = 'erb_filter_results'
INPUT_COLS = ('fs', 'cfs')
RESULT_COLS = ('fcoefs',)
def load_reference_data():
""" Load test data generated from the reference code """
# Load test data
with resource_stream(__name__, REF_DATA_FILENAME) as test_data:
data = scipy.io.loadmat(test_data, squeeze_me=False)
zipped_data = zip(data[INPUT_KEY], data[RESULT_KEY])
for inputs, refs in zipped_data:
input_dict = dict(zip(INPUT_COLS, map(np.squeeze, inputs)))
ref_dict = dict(zip(RESULT_COLS, map(np.squeeze, refs)))
yield (input_dict, ref_dict)
def test_make_ERB_filters_known_values():
for inputs, refs in load_reference_data():
args = (
inputs['fs'],
inputs['cfs'],
)
expected = (refs['fcoefs'],)
yield MakeERBFiltersTester(args, expected)
class MakeERBFiltersTester:
def __init__(self, args, expected):
self.fs = args[0]
self.cfs = args[1]
self.expected = expected[0]
self.description = (
"Gammatone filters for {:f}, {:.1f} ... {:.1f}".format(
float(self.fs),
float(self.cfs[0]),
float(self.cfs[-1])
))
def __call__(self):
result = gammatone.filters.make_erb_filters(self.fs, self.cfs)
assert np.allclose(result, self.expected, rtol=1e-6, atol=1e-12)
|
{"/gammatone/plot.py": ["/gammatone/gtgram.py", "/gammatone/fftweight.py"], "/tests/test_fft_weights.py": ["/gammatone/fftweight.py"], "/gammatone/fftweight.py": ["/gammatone/gtgram.py"], "/tests/test_specgram.py": ["/gammatone/fftweight.py"], "/tests/test_gammatonegram.py": ["/gammatone/gtgram.py"], "/tests/test_fft_gtgram.py": ["/gammatone/fftweight.py"]}
|
11,649
|
melizalab/gammatone
|
refs/heads/master
|
/gammatone/__init__.py
|
# -*- coding: utf-8 -*-
# -*- mode: python -*-
"""gammatone filterbank toolkit
Copyright (C) 2013 Jason Heeris, <jason.heeris@gmail.com>
Copyright (C) 2022 Dan Meliza, Jonah Weissmann, Tyler Robbins <dan@meliza.org>
"""
__version__ = "0.1.1"
|
{"/gammatone/plot.py": ["/gammatone/gtgram.py", "/gammatone/fftweight.py"], "/tests/test_fft_weights.py": ["/gammatone/fftweight.py"], "/gammatone/fftweight.py": ["/gammatone/gtgram.py"], "/tests/test_specgram.py": ["/gammatone/fftweight.py"], "/tests/test_gammatonegram.py": ["/gammatone/gtgram.py"], "/tests/test_fft_gtgram.py": ["/gammatone/fftweight.py"]}
|
11,650
|
melizalab/gammatone
|
refs/heads/master
|
/tests/test_gammatonegram.py
|
#!/usr/bin/env python3
# Copyright 2014 Jason Heeris, jason.heeris@gmail.com
#
# This file is part of the gammatone toolkit, and is licensed under the 3-clause
# BSD license: https://github.com/detly/gammatone/blob/master/COPYING
from mock import patch
import numpy as np
import scipy.io
from pkg_resources import resource_stream
import gammatone.gtgram
REF_DATA_FILENAME = 'data/test_gammatonegram_data.mat'
INPUT_KEY = 'gammatonegram_inputs'
MOCK_KEY = 'gammatonegram_mocks'
RESULT_KEY = 'gammatonegram_results'
INPUT_COLS = ('name', 'wave', 'fs', 'twin', 'thop', 'channels', 'fmin')
MOCK_COLS = ('erb_fb', 'erb_fb_cols')
RESULT_COLS = ('gtgram', 'nwin', 'hopsamps', 'ncols')
def load_reference_data():
""" Load test data generated from the reference code """
# Load test data
with resource_stream(__name__, REF_DATA_FILENAME) as test_data:
data = scipy.io.loadmat(test_data, squeeze_me=True)
zipped_data = zip(data[INPUT_KEY], data[MOCK_KEY], data[RESULT_KEY])
for inputs, mocks, refs in zipped_data:
input_dict = dict(zip(INPUT_COLS, inputs))
mock_dict = dict(zip(MOCK_COLS, mocks))
ref_dict = dict(zip(RESULT_COLS, refs))
yield (input_dict, mock_dict, ref_dict)
def test_nstrides():
""" Test gamamtonegram stride calculations """
for inputs, mocks, refs in load_reference_data():
args = (
inputs['fs'],
inputs['twin'],
inputs['thop'],
mocks['erb_fb_cols']
)
expected = (
refs['nwin'],
refs['hopsamps'],
refs['ncols']
)
yield GTGramStrideTester(inputs['name'], args, expected)
class GTGramStrideTester:
""" Testing class for gammatonegram stride calculation """
def __init__(self, name, inputs, expected):
self.inputs = inputs
self.expected = expected
self.description = "Gammatonegram strides for {:s}".format(name)
def __call__(self):
results = gammatone.gtgram.gtgram_strides(*self.inputs)
diagnostic = (
"result: {:s}, expected: {:s}".format(
str(results),
str(self.expected)
)
)
# These are integer values, so use direct equality
assert results == self.expected
# TODO: possibly mock out gtgram_strides
def test_gtgram():
for inputs, mocks, refs in load_reference_data():
args = (
inputs['fs'],
inputs['twin'],
inputs['thop'],
inputs['channels'],
inputs['fmin']
)
yield GammatonegramTester(
inputs['name'],
args,
inputs['wave'],
mocks['erb_fb'],
refs['gtgram']
)
class GammatonegramTester:
""" Testing class for gammatonegram calculation """
def __init__(self, name, args, sig, erb_fb_out, expected):
self.signal = np.asarray(sig)
self.expected = np.asarray(expected)
self.erb_fb_out = np.asarray(erb_fb_out)
self.args = args
self.description = "Gammatonegram for {:s}".format(name)
def __call__(self):
with patch(
'gammatone.gtgram.erb_filterbank',
return_value=self.erb_fb_out):
result = gammatone.gtgram.gtgram(self.signal, *self.args)
max_diff = np.max(np.abs(result - self.expected))
diagnostic = "Maximum difference: {:6e}".format(max_diff)
assert np.allclose(result, self.expected, rtol=1e-6, atol=1e-12), diagnostic
|
{"/gammatone/plot.py": ["/gammatone/gtgram.py", "/gammatone/fftweight.py"], "/tests/test_fft_weights.py": ["/gammatone/fftweight.py"], "/gammatone/fftweight.py": ["/gammatone/gtgram.py"], "/tests/test_specgram.py": ["/gammatone/fftweight.py"], "/tests/test_gammatonegram.py": ["/gammatone/gtgram.py"], "/tests/test_fft_gtgram.py": ["/gammatone/fftweight.py"]}
|
11,651
|
melizalab/gammatone
|
refs/heads/master
|
/tests/test_filterbank.py
|
#!/usr/bin/env python3
# Copyright 2014 Jason Heeris, jason.heeris@gmail.com
#
# This file is part of the gammatone toolkit, and is licensed under the 3-clause
# BSD license: https://github.com/detly/gammatone/blob/master/COPYING
import numpy as np
import scipy.io
from pkg_resources import resource_stream
import gammatone.filters
REF_DATA_FILENAME = 'data/test_filterbank_data.mat'
INPUT_KEY = 'erb_filterbank_inputs'
RESULT_KEY = 'erb_filterbank_results'
INPUT_COLS = ('fcoefs', 'wave')
RESULT_COLS = ('filterbank',)
def load_reference_data():
""" Load test data generated from the reference code """
# Load test data
with resource_stream(__name__, REF_DATA_FILENAME) as test_data:
data = scipy.io.loadmat(test_data, squeeze_me=False)
zipped_data = zip(data[INPUT_KEY], data[RESULT_KEY])
for inputs, refs in zipped_data:
input_dict = dict(zip(INPUT_COLS, map(np.squeeze, inputs)))
ref_dict = dict(zip(RESULT_COLS, map(np.squeeze, refs)))
yield (input_dict, ref_dict)
def test_ERB_filterbank_known_values():
for inputs, refs in load_reference_data():
args = (
inputs['wave'],
inputs['fcoefs'],
)
expected = (refs['filterbank'],)
yield ERBFilterBankTester(args, expected)
class ERBFilterBankTester:
def __init__(self, args, expected):
self.signal = args[0]
self.fcoefs = args[1]
self.expected = expected[0]
self.description = (
"Gammatone filterbank result for {:.1f} ... {:.1f}".format(
self.fcoefs[0][0],
self.fcoefs[0][1]
))
def __call__(self):
result = gammatone.filters.erb_filterbank(self.signal, self.fcoefs)
assert np.allclose(result, self.expected, rtol=1e-5, atol=1e-12)
|
{"/gammatone/plot.py": ["/gammatone/gtgram.py", "/gammatone/fftweight.py"], "/tests/test_fft_weights.py": ["/gammatone/fftweight.py"], "/gammatone/fftweight.py": ["/gammatone/gtgram.py"], "/tests/test_specgram.py": ["/gammatone/fftweight.py"], "/tests/test_gammatonegram.py": ["/gammatone/gtgram.py"], "/tests/test_fft_gtgram.py": ["/gammatone/fftweight.py"]}
|
11,652
|
melizalab/gammatone
|
refs/heads/master
|
/tests/test_fft_gtgram.py
|
#!/usr/bin/env python3
# Copyright 2014 Jason Heeris, jason.heeris@gmail.com
#
# This file is part of the gammatone toolkit, and is licensed under the 3-clause
# BSD license: https://github.com/detly/gammatone/blob/master/COPYING
from mock import patch
import numpy as np
import scipy.io
from pkg_resources import resource_stream
import gammatone.fftweight
REF_DATA_FILENAME = "data/test_fft_gammatonegram_data.mat"
INPUT_KEY = "fft_gammatonegram_inputs"
MOCK_KEY = "fft_gammatonegram_mocks"
RESULT_KEY = "fft_gammatonegram_results"
INPUT_COLS = ("name", "wave", "fs", "twin", "thop", "channels", "fmin")
MOCK_COLS = ("wts",)
RESULT_COLS = ("res", "window", "nfft", "nwin", "nhop")
def load_reference_data():
"""Load test data generated from the reference code"""
# Load test data
with resource_stream(__name__, REF_DATA_FILENAME) as test_data:
data = scipy.io.loadmat(test_data, squeeze_me=False)
zipped_data = zip(data[INPUT_KEY], data[MOCK_KEY], data[RESULT_KEY])
for inputs, mocks, refs in zipped_data:
input_dict = dict(zip(INPUT_COLS, inputs))
mock_dict = dict(zip(MOCK_COLS, mocks))
ref_dict = dict(zip(RESULT_COLS, refs))
yield (input_dict, mock_dict, ref_dict)
def test_fft_specgram_window():
for inputs, mocks, refs in load_reference_data():
args = (
refs["nfft"],
refs["nwin"],
)
expected = (refs["window"],)
yield FFTGtgramWindowTester(inputs["name"], args, expected)
class FFTGtgramWindowTester:
def __init__(self, name, args, expected):
self.nfft = int(args[0].squeeze())
self.nwin = int(args[1].squeeze())
self.expected = expected[0].squeeze()
self.description = (
"FFT gammatonegram window for nfft = {:f}, nwin = {:f}".format(
(self.nfft), (self.nwin)
)
)
def __call__(self):
result = gammatone.fftweight.specgram_window(self.nfft, self.nwin)
max_diff = np.max(np.abs(result - self.expected))
diagnostic = "Maximum difference: {:6e}".format(max_diff)
assert np.allclose(result, self.expected, rtol=1e-6, atol=1e-12), diagnostic
def test_fft_gtgram():
for inputs, mocks, refs in load_reference_data():
args = (
inputs["fs"],
inputs["twin"],
inputs["thop"],
inputs["channels"],
inputs["fmin"],
)
yield FFTGammatonegramTester(
inputs["name"][0],
args,
inputs["wave"],
mocks["wts"],
refs["window"],
refs["res"],
)
class FFTGammatonegramTester:
"""Testing class for gammatonegram calculation"""
def __init__(self, name, args, sig, fft_weights, window, expected):
self.signal = np.asarray(sig).squeeze()
self.expected = np.asarray(expected).squeeze()
self.fft_weights = np.asarray(fft_weights)
self.args = args
self.window = window.squeeze()
self.description = "FFT gammatonegram for {:s}".format(name)
def __call__(self):
# Note that the second return value from fft_weights isn't actually used
with patch(
"gammatone.fftweight.fft_weights", return_value=(self.fft_weights, None)
), patch("gammatone.fftweight.specgram_window", return_value=self.window):
result = gammatone.fftweight.fft_gtgram(self.signal, *self.args)
max_diff = np.max(np.abs(result - self.expected))
diagnostic = "Maximum difference: {:6e}".format(max_diff)
assert np.allclose(result, self.expected, rtol=1e-6, atol=1e-12), diagnostic
|
{"/gammatone/plot.py": ["/gammatone/gtgram.py", "/gammatone/fftweight.py"], "/tests/test_fft_weights.py": ["/gammatone/fftweight.py"], "/gammatone/fftweight.py": ["/gammatone/gtgram.py"], "/tests/test_specgram.py": ["/gammatone/fftweight.py"], "/tests/test_gammatonegram.py": ["/gammatone/gtgram.py"], "/tests/test_fft_gtgram.py": ["/gammatone/fftweight.py"]}
|
11,657
|
Jerommaas/Thunderstruck
|
refs/heads/master
|
/Tools/terrain_generator/pandaWorld.py
|
import sys
import inspect
# Panda
from panda3d.core import *
from direct.showbase.ShowBase import ShowBase
# local
import main
from data_object import *
from camera import Camera
P3D_WIN_WIDTH = 720
P3D_WIN_HEIGHT = 560
#
# This class manages all objects in the scene, loads/saves, etc.
#
import json
def str_to_class(str):
return reduce(getattr, str.split("."), sys.modules[__name__])
class PandaLoader(object):
def __init__(self, world):
self.world = world
self.objects = []
self.events = [] # list of user actions (add, move, edit object etc.)
def load_scene(self, file):
print( "PandaLoader.load_scene():\n\t{}".format( file ) )
self.world.loadInitialEnv() # TEMP
world = self.world # shorthand
with open(file) as f:
data = json.load(f)
self.json_data = data
world.name = data.get('name', '<world name>')
world.version = data.get('version', 0)
json_objects = data["objects"]
for obj in json_objects:
# try to find this type
try:
subtype = globals()[obj["type"] ]
except:
print( "unknown type: {}".format(obj["type"]) )
continue
# check if this class is a subclass of data_object
if issubclass(subtype, data_object):
instance = subtype( obj["data"] )
instance.name = obj["name"]
self.objects.append(instance)
else:
print("type is not a subclass of data_type!")
# TODO(victor): super hacky, los dit goed op
for obj in self.objects:
if hasattr( obj, "model"):
obj_file = obj.model
if obj_file:
print( "loading model: {}".format(obj_file) )
model = loader.loadModel(obj_file)
model.reparentTo(self.world.render)
def save_scene(self, file):
print( "PandaLoader.save_scene():\n\t{}".format( file ) )
world = self.world
data = self.json_data
data["version"] = world.version+1
data["name"] = world.name
data["objects"] = []
print( "saving objects")
for obj in self.objects:
json_data = dict()
json_data["name"] = obj.name
json_data["type"] = type(obj).__name__
json_data["data"] = obj.save()
data["objects"].append( json_data )
with open(file, "w") as f:
json.dump(data, f, indent=4)
def load_object(self, file):
obj = data_object.data_object()
self.objects.append( obj )
return obj
#
# this class constructs the panda frame, used for visualizing the current world
#
class World(ShowBase):
def __init__(self):
ShowBase.__init__(self)
self.loader = PandaLoader(self)
self.accept("escape", sys.exit)
# add camera object
self.cam = Camera(self)
# fields from save file
self.name = "<world name>"
self.version = 0
def loadInitialEnv(self):
# Load the environment model.
self.scene = loader.loadModel("environment")
# Reparent the model to render.
self.scene.reparentTo(self.render)
# Apply scale and position transforms on the model.
s = 0.02
self.scene.setScale(s,s,s)
# self.scene.setPos(-8, 42, 0)
def step(self):
taskMgr.step()
def bindToWindow(self, windowHandle):
wp = WindowProperties().getDefault()
wp.setOrigin(0,0)
wp.setSize(P3D_WIN_WIDTH, P3D_WIN_HEIGHT)
wp.setParentWindow(windowHandle)
base.openDefaultWindow(props=wp )
self.wp = wp
if __name__ == "__main__":
main.main()
|
{"/Tools/terrain_generator/pandaWorld.py": ["/main.py"], "/Tools/terrain_generator/gui.py": ["/main.py"], "/Entities/Objects/Trucks.py": ["/Tools/__init__.py"], "/Tools/terrain_generator/data_object.py": ["/main.py"], "/Controls/Manager.py": ["/Controls/__init__.py"], "/main.py": ["/Controls/__init__.py", "/Entities/__init__.py", "/Entities/Objects/__init__.py"], "/Tools/terrain_generator/camera.py": ["/main.py"]}
|
11,658
|
Jerommaas/Thunderstruck
|
refs/heads/master
|
/Tests/victor_client_server/NetworkObject.py
|
'''
Class containing all functions for sharing an object state between client and server, in either direction
This class has no knowledge of weither it is client or server side,
nor in which direction the info is flowing
'''
import json
class NetworkObject(object):
MAX_NETWORK_OBJECTS_COUNT = 1000
objectList = dict() # all objects, sending and receiving
pendingObjectList = [] # newly created objects, will be removed by network manager
networkObjectId = 0 # hold the id of any new NetworkObject
def __init__(self):
self.class_name = self.__class__.__name__
# NOTE: make sure derived objects can be default constructed! (no arguments in init except optional ones)
NetworkObject.addObject(self)
@staticmethod
def addObject(obj):
# TODO(victor): looping over all objects is not efficient, but will work for now
NetworkObject.pendingObjectList.append(obj)
for i in range( NetworkObject.MAX_NETWORK_OBJECTS_COUNT ):
if not ( i in NetworkObject.objectList ):
# id is not yet used
NetworkObject.objectList[i] = obj
break
else:
print("NetworkObject.addObject(): no index available!")
def toConstructMessage(self):
# convert this object to some string representation
return ""
def fromConstructMessage(self, datastring):
# convert a string representation into an object
pass
def toUpdate(self):
# returns a string if an update was requried,
# this string contains the update data
# otherwise, returns None
return None
def fromUpdate( self, datastring ):
# overwrite in derived class, apply updates to variables
# by default, simply apply the json data to the dict
# objects which are sending lots of updates might need a more effcient method
self.__dict__.update( json.loads(datastring) )
def remove(self):
# TODO(vicdie): alert the NetworkManager that this object needs to be removed
raise NotImplementedError("NetworkObject.remove() not implemented!")
class NetworkEvent(object):
def __init__(self, data=None):
self.class_name = self.__class__.__name__
if data:
self.fromMessage(data)
def toMessage(self):
return self.__dict__
def fromMessage(self, data):
self.__dict__.update( data )
'''
Testclasses for NetworkObject and NetworkEvent
'''
class NetworkPerson(NetworkObject):
def __init__(self, name="<geen idee>", age=0, posx=0, posy=0 ):
# NOTE: this object is default constructable, none of the arguments is required
super().__init__()
self.name = name
self.age = age
self.posx = posx
self.posy = posy
def toConstructMessage(self):
# convert this object to some dict representation
return self.__dict__
def fromConstructMessage(self, data):
# convert a dict representation into an object
self.__dict__.update(data)
def toUpdateMessage(self):
# returns a dict containing the data if an update for this object was requried,
# otherwise, returns None
return { "age": self.age}
def fromUpdateMessage( self, data ):
# overwrite in derived class, apply updates to variables
self.__dict__.update( data )
def __repr__(self):
return "<networkPerson: name:{}, age:{}, x:{}, y:{}>".format(self.name, self.age, self.posx, self.posy)
class eventTextmessage(NetworkEvent):
def __init__(self, message=None, data=None):
super().__init__(data=data)
if data==None:
# only set variables if no message data was used
self.message = message
def __repr__(self):
return "<eventTextMessage: {}>".format(self.message)
def toMessage(self):
return self.__dict__
def fromMessage(self, data):
self.__dict__.update( data )
if __name__ == "__main__":
serverPerson = NetworkPerson( name="Henk", age=27, posx=3.14, posy=2.71 )
message = serverPerson.toConstructMessage()
clientPerson = NetworkPerson()
clientPerson.fromConstructMessage( message )
print( "\n=== construct ===" )
print( serverPerson )
print( message )
print( clientPerson )
serverPerson.age = 28
updateMessage = serverPerson.toUpdateMessage()
clientPerson.fromUpdateMessage( updateMessage )
print( "\n=== update ===" )
print( serverPerson )
print( updateMessage )
print( clientPerson )
|
{"/Tools/terrain_generator/pandaWorld.py": ["/main.py"], "/Tools/terrain_generator/gui.py": ["/main.py"], "/Entities/Objects/Trucks.py": ["/Tools/__init__.py"], "/Tools/terrain_generator/data_object.py": ["/main.py"], "/Controls/Manager.py": ["/Controls/__init__.py"], "/main.py": ["/Controls/__init__.py", "/Entities/__init__.py", "/Entities/Objects/__init__.py"], "/Tools/terrain_generator/camera.py": ["/main.py"]}
|
11,659
|
Jerommaas/Thunderstruck
|
refs/heads/master
|
/Tools/terrain_generator/gui.py
|
#
# This class contains the user interface.
# It also contains code to handle the panda frame
#
import os
import sys
import main
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
class tab_file(QWidget):
'''
Handles loading, saving(, exporting?) of the scene
'''
def __init__(self, parent, pandaWorld=None):
super(QWidget, self).__init__(parent)
self.pandaWorld = pandaWorld
self.layout = QVBoxLayout(self)
# save button
self.saveButton = QPushButton("save button")
self.saveButton.clicked.connect(self.saveDialog)
self.layout.addWidget(self.saveButton)
# load button
self.loadButton = QPushButton("load button")
self.loadButton.clicked.connect(self.loadDialog)
self.layout.addWidget(self.loadButton)
# Export button
self.exportButton = QPushButton("export button")
#self.exportButton.clicked.connect(self.exportButton)
self.layout.addWidget(self.exportButton)
# layout
self.setLayout(self.layout)
def saveDialog(self):
options = QFileDialog.Options()
options |= QFileDialog.DontUseNativeDialog
fileName = QFileDialog.getSaveFileName(self, 'Save File', "","All Files (*);;Python Files (*.py)", options=options)
if fileName:
print("save file as: {}".format(fileName) )
self.pandaWorld.loader.save_scene(fileName)
def loadDialog(self):
options = QFileDialog.Options()
options |= QFileDialog.DontUseNativeDialog
fileName, _ = QFileDialog.getOpenFileName(self,"Load File", "","All Files (*);;Python Files (*.py)", options=options)
if fileName:
print("load file: {}".format(fileName) )
print( self.pandaWorld )
self.pandaWorld.loader.load_scene(fileName)
def saveDialog(self):
# TODO: export options
options = QFileDialog.Options()
options |= QFileDialog.DontUseNativeDialog
fileName, _ = QFileDialog.getSaveFileName(self, 'Export To ...', "","All Files (*);;Python Files (*.py)", options=options)
if fileName:
self.pandaWorld.loader.save_scene(fileName)
class tab_terrain(QWidget):
'''
Generation of terrain, including
- heightmaps,
- texture blending,
- skyboxes, skybox objects (distant buildings/mountains etc.)
'''
def __init__(self, parent, pandaWorld=None):
super(QWidget, self).__init__(parent)
self.layout = QVBoxLayout(self)
label = QLabel(self)
label.setText("Terrain tab")
self.layout.addWidget(label)
def populateTree( tree, parent ):
# TODO(victor): move to some util class
for child in sorted(tree):
child_item = QStandardItem(child)
parent.appendRow(child_item)
if isinstance(tree,dict):
populateTree(tree[child], child_item)
class tab_object(QWidget):
'''
adding / editing / removing / animating objects
'''
def __init__(self, parent, pandaWorld=None):
super(QWidget, self).__init__(parent)
self.layout = QVBoxLayout(self)
self.pandaWorld = pandaWorld
# tree
self.objectTreeView = QTreeView(self)
# tree model
self.treeview_model = QDirModel()
self.treeview_model.setFilter( QDir.NoSymLinks | QDir.AllDirs | QDir.NoDotAndDotDot | QDir.Files | QDir.DirsFirst)
self.treeview_model.setNameFilters( ["*.egg"] )
self.treeview_model.setSorting( QDir.Reversed)
# TODO(victor): no hardcoded paths, get this at startup
folder = "C:/Users/Victor/Desktop/thunderstruck/Thunderstruck/Entities/"
print( "opening treeview in: {}".format(folder) )
#
self.objectTreeView.setModel(self.treeview_model)
self.objectTreeView.setColumnHidden(1, True)
self.objectTreeView.setColumnHidden(2, True)
self.objectTreeView.setColumnHidden(3, True)
self.objectTreeView.setRootIndex(self.treeview_model.index(folder))
self.objectTreeView.setSortingEnabled(True)
self.objectTreeView.setAnimated(False)
self.objectTreeView.setSelectionMode(QAbstractItemView.SingleSelection)
self.objectTreeView.selectionModel().selectionChanged.connect(self.treeSelectionChange)
# button
qdir = QDir(path=folder)
self.pointlessButton = QPushButton(qdir.absolutePath())
# label
self.label = QLabel(self)
self.label.setText("<file path>")
# finalize
self.layout.addWidget(self.pointlessButton)
self.layout.addWidget(self.objectTreeView)
self.layout.addWidget(self.label)
self.setLayout(self.layout)
def treeSelectionChange(self, index):
print("Selection changed:")
for idx in self.objectTreeView.selectedIndexes():
indexItem = self.treeview_model.index(idx.row(), 0, idx.parent())
fileName = self.treeview_model.fileName(indexItem)
filePath = self.treeview_model.filePath(indexItem)
print( "full path:\t{}\nfile: \t\t{}".format(filePath, fileName))
self.label.setText(filePath)
# TODO(victor): detect if this is a file or a folder
# TODO(victor): disable selecting folders
class tab_texture(QWidget):
def __init__(self, parent, pandaWorld=None):
super(QWidget, self).__init__(parent)
self.layout = QVBoxLayout(self)
label = QLabel(self)
label.setText("Texture tab")
self.layout.addWidget(label)
class tab_game_elements(QWidget):
'''
In this window, things like finish lines, invisible walls, event triggers, etc. can be edited
'''
def __init__(self, parent, pandaWorld=None):
super(QWidget, self).__init__(parent)
self.layout = QVBoxLayout(self)
self.pandaWorld = pandaWorld
label = QLabel(self)
label.setText("Game elements tab")
self.layout.addWidget(label)
class Gui(QWidget):
def __init__(self, parent, pandaWorld=None):
super(QWidget, self).__init__(parent)
self.pandaWorld = pandaWorld
self.layout = QVBoxLayout(self)
# Initialize tab screen
self.tabs = QTabWidget()
self.tab_file = tab_file(self, pandaWorld=pandaWorld)
self.tab_terrain = tab_terrain(self, pandaWorld=pandaWorld)
self.tab_object = tab_object(self, pandaWorld=pandaWorld)
self.tab_game_elements = tab_game_elements(self, pandaWorld=pandaWorld)
self.tabs.resize(300,200)
# Add tabs
self.tabs.addTab(self.tab_file,"File")
self.tabs.addTab(self.tab_terrain,"Terrain")
self.tabs.addTab(self.tab_object,"Objects")
self.tabs.addTab(self.tab_game_elements, "Game elements")
# Add tabs to widget
self.layout.addWidget(self.tabs)
self.setLayout(self.layout)
if __name__ == "__main__":
import main
main.main()
|
{"/Tools/terrain_generator/pandaWorld.py": ["/main.py"], "/Tools/terrain_generator/gui.py": ["/main.py"], "/Entities/Objects/Trucks.py": ["/Tools/__init__.py"], "/Tools/terrain_generator/data_object.py": ["/main.py"], "/Controls/Manager.py": ["/Controls/__init__.py"], "/main.py": ["/Controls/__init__.py", "/Entities/__init__.py", "/Entities/Objects/__init__.py"], "/Tools/terrain_generator/camera.py": ["/main.py"]}
|
11,660
|
Jerommaas/Thunderstruck
|
refs/heads/master
|
/Controls/Keyboard.py
|
from direct.showbase.DirectObject import DirectObject
class Arrows(DirectObject):
def __init__(self,ControlManager):
self.CM = ControlManager
self.LeftPressed = 0
self.RightPressed = 0
self.KeyBindings()
def KeyBindings(self):
self.accept('arrow_left',self.Left_press)
self.accept('arrow_left-up',self.Left_rel)
self.accept('arrow_right',self.Right_press)
self.accept('arrow_right-up',self.Right_rel)
self.accept('arrow_up',self.Up_press)
self.accept('arrow_up-up',self.Up_rel)
self.accept('arrow_down',self.Down_press)
self.accept('arrow_down-up',self.Down_rel)
# Remind:
# self.Steer()
# self.Throttle()
# self.Brake()
# are methods from superclass
# Steering controls
def Left_press(self):
self.LeftPressed = 1
self.LR_arrows()
def Left_rel(self):
self.LeftPressed = 0
self.LR_arrows()
def Right_press(self):
self.RightPressed = 1
self.LR_arrows()
def Right_rel(self):
self.RightPressed = 0
self.LR_arrows()
def LR_arrows(self):
self.CM.Steer(self.LeftPressed-self.RightPressed)
# Throttle controls
def Up_press(self):
self.CM.Throttle(1)
def Up_rel(self):
self.CM.Throttle(0)
# Brake controls
def Down_press(self):
self.CM.Brake(1)
def Down_rel(self):
self.CM.Brake(0)
class WASD(Arrows):
def KeyBindings(self):
self.accept('a',self.Left_press)
self.accept('a-up',self.Left_rel)
self.accept('d',self.Right_press)
self.accept('d-up',self.Right_rel)
self.accept('w',self.Up_press)
self.accept('w-up',self.Up_rel)
self.accept('s',self.Down_press)
self.accept('s-up',self.Down_rel)
|
{"/Tools/terrain_generator/pandaWorld.py": ["/main.py"], "/Tools/terrain_generator/gui.py": ["/main.py"], "/Entities/Objects/Trucks.py": ["/Tools/__init__.py"], "/Tools/terrain_generator/data_object.py": ["/main.py"], "/Controls/Manager.py": ["/Controls/__init__.py"], "/main.py": ["/Controls/__init__.py", "/Entities/__init__.py", "/Entities/Objects/__init__.py"], "/Tools/terrain_generator/camera.py": ["/main.py"]}
|
11,661
|
Jerommaas/Thunderstruck
|
refs/heads/master
|
/Tests/victor_client_server/client.py
|
'''
Client test class
'''
from sys import path
from os import getcwd
import time
path.append(getcwd() + "\\..\\..\\Tools\\game_config\\") # TODO(victor): check if indeed windows
from config import Config
from panda3d.core import QueuedConnectionManager
from panda3d.core import QueuedConnectionListener
from panda3d.core import QueuedConnectionReader
from panda3d.core import ConnectionWriter
#
from panda3d.core import PointerToConnection
from panda3d.core import NetAddress
from panda3d.core import loadPrcFileData
loadPrcFileData("", "window-type none")
# from direct.directbase import DirectStart
# from direct.showbase.ShowBase import ShowBase
from direct.task import Task
from panda3d.core import NetDatagram
class Client(object):
def __init__( self, host="localhost", port=5001, name="client"):
self.name = name
self.cManager = QueuedConnectionManager()
self.cReader = QueuedConnectionReader(self.cManager, 0)
self.cWriter = ConnectionWriter(self.cManager,0)
self.readerCallbacks = []
taskMgr = Task.TaskManager()
# how long until we give up trying to reach the server?
timeout_in_miliseconds=3000 # 3 seconds
self.myConnection = self.cManager.openTCPClientConnection(host,port,timeout_in_miliseconds)
if not self.myConnection:
print("{}: Failed to connect to server!".format(self.name) )
return
self.cReader.addConnection(self.myConnection) # receive messages from server
taskMgr.add(self.tskReaderPolling,"Poll the connection reader",-40)
print("{}: Successfully connected to server {} at {}!".format(self.name,port,host) )
def tskReaderPolling(self,taskdata):
# reader callback
if not self.cReader.dataAvailable():
return Task.cont
# catch the incoming data in this instance
# Check the return value; if we were threaded, someone else could have
# snagged this data before we did
datagram=NetDatagram()
if not self.cReader.getData(datagram):
return Task.cont
for callback in self.readerCallbacks:
callback( datagram )
return Task.cont
def addReaderCallback( self, callbackFunction ):
self.readerCallbacks.append( callbackFunction )
def ProcessReaderData( self, data ):
# TODO(vicdie): overwrite in derived classes
pass
def Close( self ):
# close connection if it exists
if self.myConnection:
self.cManager.closeConnection(self.myConnection)
if __name__ == "__main__":
print("=== Start ===")
config = Config()
client = Client( port=config["server"]["port"], host=config["server"]["host"] )
tStart = time.time()
while time.time() < tStart + 10:
pass
# close
client.Close()
print("=== Done! ===")
|
{"/Tools/terrain_generator/pandaWorld.py": ["/main.py"], "/Tools/terrain_generator/gui.py": ["/main.py"], "/Entities/Objects/Trucks.py": ["/Tools/__init__.py"], "/Tools/terrain_generator/data_object.py": ["/main.py"], "/Controls/Manager.py": ["/Controls/__init__.py"], "/main.py": ["/Controls/__init__.py", "/Entities/__init__.py", "/Entities/Objects/__init__.py"], "/Tools/terrain_generator/camera.py": ["/main.py"]}
|
11,662
|
Jerommaas/Thunderstruck
|
refs/heads/master
|
/Tools/EulerAngles.py
|
import numpy as np
import math
# Calculates Rotation Matrix given euler angles
def RotMatDeg(z,y,x):
return RotMat(np.deg2rad(z),np.deg2rad(y),np.deg2rad(x))
def RotMat(ang_z, ang_y, ang_x) :
R_x = np.array([[1, 0, 0 ],
[0, math.cos(ang_x), -math.sin(ang_x) ],
[0, math.sin(ang_x), math.cos(ang_x) ]
])
R_y = np.array([[math.cos(ang_y), 0, math.sin(ang_y) ],
[0, 1, 0 ],
[-math.sin(ang_y), 0, math.cos(ang_y) ]
])
R_z = np.array([[math.cos(ang_z), -math.sin(ang_z), 0],
[math.sin(ang_z), math.cos(ang_z), 0],
[0, 0, 1]
])
R = np.dot(R_z, np.dot( R_y, R_x ))
return R, np.linalg.inv(R)
|
{"/Tools/terrain_generator/pandaWorld.py": ["/main.py"], "/Tools/terrain_generator/gui.py": ["/main.py"], "/Entities/Objects/Trucks.py": ["/Tools/__init__.py"], "/Tools/terrain_generator/data_object.py": ["/main.py"], "/Controls/Manager.py": ["/Controls/__init__.py"], "/main.py": ["/Controls/__init__.py", "/Entities/__init__.py", "/Entities/Objects/__init__.py"], "/Tools/terrain_generator/camera.py": ["/main.py"]}
|
11,663
|
Jerommaas/Thunderstruck
|
refs/heads/master
|
/Tests/victor_client_server/server.py
|
'''
Server test class
'''
from sys import path
from os import getcwd
import time
path.append(getcwd() + "\\..\\..\\Tools\\game_config\\") # TODO(victor): check if indeed windows
from config import Config
from panda3d.core import QueuedConnectionManager
from panda3d.core import QueuedConnectionListener
from panda3d.core import QueuedConnectionReader
from panda3d.core import ConnectionWriter
#
from panda3d.core import PointerToConnection
from panda3d.core import NetAddress
from panda3d.core import loadPrcFileData
loadPrcFileData("", "window-type none")
# from direct.directbase import DirectStart
# from direct.showbase.ShowBase import ShowBase
from direct.task import Task
from panda3d.core import NetDatagram
from panda3d.core import Datagram
class Server(object):
# https://www.panda3d.org/manual/index.php/Client-Server_Connection
def __init__( self, host="localhost", port=5001 ):
taskMgr = Task.TaskManager()
self.cManager = QueuedConnectionManager()
self.cListener = QueuedConnectionListener(self.cManager, 0)
self.cReader = QueuedConnectionReader(self.cManager, 0)
self.cWriter = ConnectionWriter(self.cManager,0)
self.activeConnections = [] # We'll want to keep track of these later
self.readerCallbacks = []
backlog=1000 #If we ignore 1,000 connection attempts, something is wrong!
self.tcpSocket = self.cManager.openTCPServerRendezvous(port,backlog)
self.cListener.addConnection(self.tcpSocket)
taskMgr.add(self.tskListenerPolling,"Poll the connection listener",-39)
taskMgr.add(self.tskReaderPolling,"Poll the connection reader",-40)
print("started server! ({} at {})".format(port,host) )
def Start( self ):
# derived servers can overwrite this function if needed
pass
def tskListenerPolling(self,taskdata):
# listen for new connections
# TODO(victor): what happens if a client shuts down?
# print("server.tskListenerPolling()")
if self.cListener.newConnectionAvailable():
rendezvous = PointerToConnection()
netAddress = NetAddress()
newConnection = PointerToConnection()
if self.cListener.getNewConnection(rendezvous,netAddress,newConnection):
newConnection = newConnection.p()
self.activeConnections.append(newConnection) # Remember connection
self.cReader.addConnection(newConnection) # Begin reading connection
print("server: received new connection!")
return Task.cont
def tskReaderPolling(self,taskdata):
# reader callback
if not self.cReader.dataAvailable():
return Task.cont
# catch the incoming data in this instance
# Check the return value; if we were threaded, someone else could have
# snagged this data before we did
datagram=NetDatagram()
if not self.cReader.getData(datagram):
return Task.cont
for callback in self.readerCallbacks:
callback( datagram )
return Task.cont
def addReaderCallback( self, callbackFunction ):
self.readerCallbacks.append( callbackFunction )
def BroadcastMessage(self, datagram):
# send the same message to all clients
for client in self.activeConnections:
self.cWriter.send(datagram,client)
def Close( self ):
# remove all clients
for client in self.activeConnections:
self.cReader.removeConnection(client)
self.activeConnections=[]
# close down our listener
self.cManager.closeConnection(self.tcpSocket)
if __name__ == "__main__":
print("=== Start ===")
config = Config()
server = Server( port=config["server"]["port"], host=config["server"]["host"] )
server.Start()
tStart = time.time()
while time.time() < tStart + 10:
pass
# close
server.Close()
print("=== Done! ===")
|
{"/Tools/terrain_generator/pandaWorld.py": ["/main.py"], "/Tools/terrain_generator/gui.py": ["/main.py"], "/Entities/Objects/Trucks.py": ["/Tools/__init__.py"], "/Tools/terrain_generator/data_object.py": ["/main.py"], "/Controls/Manager.py": ["/Controls/__init__.py"], "/main.py": ["/Controls/__init__.py", "/Entities/__init__.py", "/Entities/Objects/__init__.py"], "/Tools/terrain_generator/camera.py": ["/main.py"]}
|
11,664
|
Jerommaas/Thunderstruck
|
refs/heads/master
|
/Tests/victor_client_server/test.py
|
'''
Test using client and server
start up server, than 2 clients
let them both send and receive some stuff
'''
from sys import path
from os import getcwd
import time
path.append(getcwd() + "\\..\\..\\Tools\\game_config\\")
from config import Config
from client import Client
from server import Server
from panda3d.core import NetDatagram
from panda3d.core import Datagram
from direct.task import Task
class TestServer(Server):
''' Test sending a heartbeat to clients '''
def __init__(self, host="localhost", port=5001 ):
super().__init__(host=host, port=port )
def heartbeat(self ):
myPyDatagram=Datagram()
print("server: sending heartbeat to {} clients".format(len(self.activeConnections) ) )
self.BroadcastMessage( myPyDatagram )
def ProcessReaderData(self, data):
# Todo: figure out who sent it
print("Server: receiving data")
class TestClient(Client):
''' Test receiving heartbeat from server '''
def ProcessReaderData( self, data ):
print("{}: reading data!".format(self.name) )
pass
def SendMessage(self):
print( "{}: sending message to server".format(self.name) )
myPyDatagram=Datagram()
self.cWriter.send(myPyDatagram,self.myConnection)
if __name__ == "__main__":
config = Config()
port = config["server"]["port"]
host = config["server"]["host"]
# start server and clients
server = TestServer( port=port, host=host )
client1 = TestClient(port=port, host=host, name="Henk" )
client2 = TestClient(port=port, host=host, name="Bert" )
# run test
# TODO(vicdie): run server and clients in separate threads,
# move Task.TaskManager().step() stuff
print("======= Server->Client =======")
tStart = time.time()
tLastHearbeat = tStart
while time.time() < tStart + 10:
Task.TaskManager().step() # perform a step as often as possible
if tLastHearbeat + 1 < time.time():
server.heartbeat()
tLastHearbeat = time.time()
print("======= Client->Server =======")
tStart = time.time()
tLastHearbeat = tStart
while time.time() < tStart + 10:
Task.TaskManager().step() # perform a step as often as possible
if tLastHearbeat + 1 < time.time():
client1.SendMessage()
client2.SendMessage()
tLastHearbeat = time.time()
# close
client1.Close()
client2.Close()
server.Close()
|
{"/Tools/terrain_generator/pandaWorld.py": ["/main.py"], "/Tools/terrain_generator/gui.py": ["/main.py"], "/Entities/Objects/Trucks.py": ["/Tools/__init__.py"], "/Tools/terrain_generator/data_object.py": ["/main.py"], "/Controls/Manager.py": ["/Controls/__init__.py"], "/main.py": ["/Controls/__init__.py", "/Entities/__init__.py", "/Entities/Objects/__init__.py"], "/Tools/terrain_generator/camera.py": ["/main.py"]}
|
11,665
|
Jerommaas/Thunderstruck
|
refs/heads/master
|
/Entities/Objects/Trucks.py
|
import numpy as np
from Tools import *
modelfolder = "Entities/Objects/Models/"
class Basic:
eggname = modelfolder+"Truck01/truck_01.egg"
#eggname = modelfolder+"Environment/pine_tree_01.egg"
# Static Class Properties
turnradius = 15 #[m]
sideacc = 10 #[m/s2] acceleration when steering max
forwardacc = 10 #[m/s2] without air drag
topspeed = 40 #[m/s]
brakespeed = 1 #[s] from topspeed to full stop
mass = 1000 #[kg]
# World Properties
rho = 1.225 #[kg/m3] air density
gravity = 9.81 #[m/s2]
def __init__(self, World):
self.Model()
self.StartLocation()
self.TruckParams()
self.RotationMatrices()
World.Clock.UpdateMe(self)
def Model(self):
self.m = loader.loadModel(self.eggname)
self.m.reparentTo(render)
def StartLocation(self):
self.m.setPos(0,0,20)
self.m.setHpr(0,0,0)
def TruckParams(self):
# User input
self.Steer = 0
self.Throttle = 0
self.Brake = 0
# Forces
self.Fengine = self.mass * self.forwardacc
self.Fbrake = self.mass * self.topspeed/self.brakespeed
# Aerodynamics
# At top speed: Fengine = 1/2 rho v2 Cd
self.Cd = self.Fengine * 2 / self.rho / (self.topspeed**2)
# Velocity
self.Vbody = np.array([0.,0.,0.]) #[m/s]
self.Vworld = np.array([0.,0.,0.]) #[m/s]
def RotationMatrices(self):
# Use attitude to compute Euler Transformation Matrices
self.Truck2World, self.World2Truck = EulerAngles.RotMatDeg(self.m.getH(), self.m.getP(), self.m.getR())
def Update(self,dt):
# Get Euler Rotation Matrices
Truck2World, World2Truck = EulerAngles.RotMatDeg(self.m.getH(), self.m.getP(), self.m.getR())
# Perform turning
Yaw = self.m.getH()
turnrate = self.Steer * 360/4 # Hardcoded turnrate for now
newYaw = Yaw + turnrate*dt
self.m.setH(newYaw)
# Only horizontal driving now, foeck gravity and terrain!
Fdrag = 0.5 * self.rho * self.Vbody[1]**2 * self.Cd
frontacc = (self.Throttle*self.Fengine - self.Brake*self.Fbrake - Fdrag)/self.mass
# New velocity
self.Vbody[1] = self.Vbody[1]+frontacc*dt
self.Vbody[1] = max(self.Vbody[1],0)
# Change frame of reference
self.Vworld = np.dot(self.Vbody,self.World2Truck)
# Update Position
p = self.m.getPos()
newP = np.array(p) + self.Vworld * dt
self.m.setX(newP[0])
self.m.setY(newP[1])
self.m.setZ(newP[2])
# New Rotation Matrices
self.RotationMatrices()
|
{"/Tools/terrain_generator/pandaWorld.py": ["/main.py"], "/Tools/terrain_generator/gui.py": ["/main.py"], "/Entities/Objects/Trucks.py": ["/Tools/__init__.py"], "/Tools/terrain_generator/data_object.py": ["/main.py"], "/Controls/Manager.py": ["/Controls/__init__.py"], "/main.py": ["/Controls/__init__.py", "/Entities/__init__.py", "/Entities/Objects/__init__.py"], "/Tools/terrain_generator/camera.py": ["/main.py"]}
|
11,666
|
Jerommaas/Thunderstruck
|
refs/heads/master
|
/Entities/__init__.py
|
__all__ = ['Terrain','skyDome','Clock','Camera']
|
{"/Tools/terrain_generator/pandaWorld.py": ["/main.py"], "/Tools/terrain_generator/gui.py": ["/main.py"], "/Entities/Objects/Trucks.py": ["/Tools/__init__.py"], "/Tools/terrain_generator/data_object.py": ["/main.py"], "/Controls/Manager.py": ["/Controls/__init__.py"], "/main.py": ["/Controls/__init__.py", "/Entities/__init__.py", "/Entities/Objects/__init__.py"], "/Tools/terrain_generator/camera.py": ["/main.py"]}
|
11,667
|
Jerommaas/Thunderstruck
|
refs/heads/master
|
/Controls/__init__.py
|
__all__ = ['Manager']
# Keyboard, Controller and xinput are imported in Manager
|
{"/Tools/terrain_generator/pandaWorld.py": ["/main.py"], "/Tools/terrain_generator/gui.py": ["/main.py"], "/Entities/Objects/Trucks.py": ["/Tools/__init__.py"], "/Tools/terrain_generator/data_object.py": ["/main.py"], "/Controls/Manager.py": ["/Controls/__init__.py"], "/main.py": ["/Controls/__init__.py", "/Entities/__init__.py", "/Entities/Objects/__init__.py"], "/Tools/terrain_generator/camera.py": ["/main.py"]}
|
11,668
|
Jerommaas/Thunderstruck
|
refs/heads/master
|
/Entities/Camera.py
|
import numpy as np
from direct.task import Task
class Camera:
Dist2Truck = 35 #[m]
Azimuth = 8 #[deg] position of camera wrt truck
def __init__(self,World):
# Stiekem bestaat er al een world.camera object
self.World = World
# Reference to the controlled truck!
# Of course, must be replaced by networkcommunication
self.Truck = World.Truck1
# Relative position wrt Truck
self.ComputePosition()
def ComputePosition(self):
AzRad = np.deg2rad(self.Azimuth)
self.Position = self.Dist2Truck * np.array([0, -np.cos(AzRad), np.sin(AzRad)])
def Update(self):
TruckPosition = np.array(self.Truck.m.getPos())
CamDistance = np.dot(self.Position,self.Truck.World2Truck)
CamPosition = TruckPosition + CamDistance
self.World.camera.setPos(tuple(CamPosition))
self.World.camera.setHpr(self.Truck.m.getH(), 0, 0)
|
{"/Tools/terrain_generator/pandaWorld.py": ["/main.py"], "/Tools/terrain_generator/gui.py": ["/main.py"], "/Entities/Objects/Trucks.py": ["/Tools/__init__.py"], "/Tools/terrain_generator/data_object.py": ["/main.py"], "/Controls/Manager.py": ["/Controls/__init__.py"], "/main.py": ["/Controls/__init__.py", "/Entities/__init__.py", "/Entities/Objects/__init__.py"], "/Tools/terrain_generator/camera.py": ["/main.py"]}
|
11,669
|
Jerommaas/Thunderstruck
|
refs/heads/master
|
/Tests/victor_client_server/testNetworkObject.py
|
# python packages
from sys import path
from os import getcwd
import time
import json
# client / server
from panda3d.core import QueuedConnectionManager
from panda3d.core import QueuedConnectionListener
from panda3d.core import QueuedConnectionReader
from panda3d.core import ConnectionWriter
# other panda stuff
from panda3d.core import *
from panda3d.direct import *
from panda3d.core import NetDatagram
from panda3d.core import Datagram
from panda3d.core import PointerToConnection
from panda3d.core import NetAddress
from panda3d.core import loadPrcFileData
loadPrcFileData("", "window-type none")
from direct.task import Task
# self made packages
from NetworkObject import NetworkObject, NetworkEvent, NetworkPerson, eventTextmessage
path.append(getcwd() + "\\..\\..\\Tools\\game_config\\")
# from config import Config
class ClientServerBase(object):
'''
Baseclass for client and server,
these two have almost all functionality similar, except for who is responsible for opening / closing connections
and the fact that server can have multiple connections
'''
def __init__(self, host="localhost", port=5001, name="client or server" ):
self.name = name
self.port = port
self.host = host
self.cManager = QueuedConnectionManager()
self.cReader = QueuedConnectionReader(self.cManager, 0)
self.cWriter = ConnectionWriter(self.cManager,0)
self.connections = [] # list of connections, contains 1 item for client, multiple for server
self.readerCallback = None # will be called when a new message arrives
self.writerCallback = None # will be called when a message needs to be constructed
self.taskMgr = Task.TaskManager()
self.taskMgr.add(self.tskReaderPolling,"Poll the connection reader",-40)
self.taskMgr.add(self.tskWriterPolling,"Send data package",-39)
def setReaderCallback( self, callbackFunction ):
self.readerCallback = callbackFunction
def setWriterCallback( self, callbackFunction ):
self.writerCallback = callbackFunction
def tskReaderPolling(self,taskdata):
# reader callback
if not self.cReader.dataAvailable():
# print( "tskReaderPolling(): no data available!" )
return Task.cont
datagram=NetDatagram()
if not self.cReader.getData(datagram):
print( "tskReaderPolling(): cannot claim data!" )
return Task.cont
if self.readerCallback:
print( "tskReaderPolling():readerCallback()" )
self.readerCallback( datagram )
return Task.cont
def tskWriterPolling( self , data ):
if not self.writerCallback:
return Task.cont
data = self.writerCallback()
if data == None:
return Task.cont
assert(isinstance(data,Datagram))
print( "tskWriterPolling() sending to : {}".format(len(self.connections) ))
for con in self.connections:
if con:
print( "tskWriterPolling() sending" )
self.cWriter.send(data,con)
return Task.cont
def Close( self ):
# close each of the connections
for c in self.connections:
self.cManager.closeConnection(c)
def ProcessReaderData( self, data ):
raise NotImplementedError("overwrite ProcessReaderData() in client/server implementation")
class Client(ClientServerBase):
def __init__( self, host="localhost", port=5001, name="client"):
super().__init__( host=host, port=port, name=name)
# self.setReaderCallback( sel.ProcessReaderData )
timeout_in_miliseconds=3000 # 3 seconds
self.connections.append( self.cManager.openTCPClientConnection(self.host,self.port,timeout_in_miliseconds) )
if not self.connections:
print("{}: Failed to connect to server!".format(self.name) )
return
for con in self.connections:
if con:
self.cReader.addConnection(con)
else:
print( "failed to add connection!" )
self.taskMgr.add(self.tskReaderPolling,"Poll the connection reader",-40)
class Server(ClientServerBase):
def __init__( self, host="localhost", port=5001 , name="server" ):
super().__init__( host=host, port=port, name=name)
# self.setReaderCallback( sel.ProcessReaderData )
backlog=1000
self.tcpSocket = self.cManager.openTCPServerRendezvous(port,backlog)
self.cListener = QueuedConnectionListener(self.cManager, 0)
self.cListener.addConnection(self.tcpSocket)
self.taskMgr.add(self.tskListenerPolling,"Poll the connection listener",-39)
def tskListenerPolling(self,taskdata):
if self.cListener.newConnectionAvailable():
rendezvous = PointerToConnection()
netAddress = NetAddress()
newConnection = PointerToConnection()
if self.cListener.getNewConnection(rendezvous,netAddress,newConnection):
newConnection = newConnection.p()
self.connections.append(newConnection) # Remember connection
self.cReader.addConnection(newConnection) # Begin reading connection
print("server: received new connection!")
return Task.cont
# TODO(victor): what happens when a connection drops away?
def BroadcastMessage(self, datagram):
# send the same message to all clients
for con in self.connections:
self.cWriter.send(datagram,con)
# def ProcessReaderData( self, data ):
# # TODO(vicdie): overwrite in derived classes
# pass
def Close( self ):
# remove all clients
for con in self.connections:
self.cReader.removeConnection(con)
self.connections=[]
self.cManager.closeConnection(self.tcpSocket)
'''
NOTE: base network manager
contains functions for packing all managed objects in a string
and extracting objects from this string
The functions for sending and receiving new objects are almost identical on server and client side
'''
class NetworkManager(object):
'''
Network manager baseclass,
Defines the interface of derived objects, takes care of all generic stuff
'''
class MessageEnum:
EVENT = 1
CONSTRUCT = 2
UPDATE = 3
DESTRUCT = 4
def __init__(self, client_or_server):
self.client_or_server = client_or_server
self.eventQueue = [] # events which will need to be sent in the next event update
self.newObjectQueue = [] # objects for which a construction message needs to be sent
self.destructObjectQueue = [] # objects for which a destruction message needs to be sent
self.client_or_server.setReaderCallback( self.readerCallback )
self.client_or_server.setWriterCallback( self.writerCallback )
self.managedNetworkObjects = [] # objects owned by this NetworkManager
self.sharedNetworkObjects = dict() # self.clientNetworkObjects[connection] = dict(id, object)
self.receivedNetworkEvents = dict()
def readerCallback( self, data ):
print("reader callback!" )
# data contains two fields: 1 containing the message type (EVENT, CONSTRUCT, UPDATE, DESTRUCT)
# the other is a string, containing the actual data (in json format for now)
sender = data.getConnection()
iterator = DatagramIterator(data)
messageEnum = iterator.getUint8()
if not sender in self.receivedNetworkEvents:
self.receivedNetworkEvents[sender] = []
if messageEnum == NetworkManager.MessageEnum.EVENT:
# received an event message
messageString = iterator.getString()
messageJsonData = json.loads( messageString )
for obj in messageJsonData:
# class_type = globals()[ obj["class_name"] ] #introspection
# class_instance = class_type( data=obj )
class_instance = globals()[obj["class_name"]]( data=obj )
print( "received event: {}".format(class_instance) )
# Add new event to list, keep track of who sent it (sender)
self.receivedNetworkEvents[sender].append( class_instance )
def writerCallback( self ):
# if there are no events queued, return None
if not self.eventQueue:
return None
# collect data
data = []
for event in self.eventQueue:
data.append( event.toMessage() )
self.eventQueue = []
# make a message for the events
myPyDatagram=Datagram()
myPyDatagram.add_uint8( NetworkManager.MessageEnum.EVENT )
datastring = json.dumps(data)
print( datastring )
myPyDatagram.add_string(datastring)
print("server: writer callback")
return myPyDatagram
def add(self, newObject ):
self.managedNetworkObjects.append( newObject )
self.newObjectQueue.append( newObject)
# self.sendNewObject(newObject)
print("NetworkManager: added object! {}".format(newObject) )
def remove( self, removeObject ):
self.managedNetworkObjects.remove( removeObject )
self.destructObjectQueue.append( removeObject )
print("NetworkManager: removed object! {}".format(removeObject) )
def addEvent( self, event ):
assert(isinstance(event,NetworkEvent)) # check correct type
self.eventQueue.append(event)
print("NetworkManager: sending new event! {}".format(event) )
'''
NOTE: client and server network manager are almost identical.
The client implementation also gets a dictionary with connections, even though there is only one (server)
This is to keep the underlying code exactly the same, so that as much of the code as possible is shared between client and server
'''
class ClientNetworkManager(NetworkManager):
def __init__(self, client):
super().__init__(client)
def addEvent( self, event ):
super().addEvent( event )
# TODO(victor): Do client specific stuff when a new event is sent
def readerCallback( self, data ):
super().readerCallback( data )
print("ClientNetworkManager: reader callback" )
# TODO(victor): Do client specific stuff when new data arrives
class ServerNetworkManager(NetworkManager):
def __init__(self, server):
super().__init__(server)
def addEvent( self, event ):
super().addEvent( event )
# TODO(victor): Do server specific stuff when a new event is sent
def readerCallback( self, data ):
super().readerCallback( data )
print("ServerNetworkManager: reader callback" )
# TODO(victor): Do server specific stuff when new data arrive
'''
'''
if __name__ == "__main__":
server = Server()
serverManager = ServerNetworkManager(server)
client = Client()
clientManager = ClientNetworkManager(client)
# TODO(victor): maybe we want to provide the networkobject with the sendingNetworkManager that it belongs to
# This way, when the object is destructed, we don't need to tell the network manager about it
person1 = NetworkPerson()
# TODO(vicdie): this is needed for as long as client and server are not separately running
# a static field can be added to the NetworkObject, which points to the NetworkManager where new ones need to register
serverManager.add( person1 )
# make some changes to person1
serverManager.remove( person1 )
# make sure the receiving side obtains a new NetworkPerson and a text message
b_send = True
tStart = time.time()
while time.time() < tStart + 2:
if time.time() > tStart + 1 and b_send:
b_send = False
textMessage = eventTextmessage( "server says hoi" )
serverManager.addEvent( textMessage )
textMessage2 = eventTextmessage( "server says dag" )
serverManager.addEvent( textMessage2 )
Task.TaskManager().step() # perform a step as often as possible
print("bla")
|
{"/Tools/terrain_generator/pandaWorld.py": ["/main.py"], "/Tools/terrain_generator/gui.py": ["/main.py"], "/Entities/Objects/Trucks.py": ["/Tools/__init__.py"], "/Tools/terrain_generator/data_object.py": ["/main.py"], "/Controls/Manager.py": ["/Controls/__init__.py"], "/main.py": ["/Controls/__init__.py", "/Entities/__init__.py", "/Entities/Objects/__init__.py"], "/Tools/terrain_generator/camera.py": ["/main.py"]}
|
11,670
|
Jerommaas/Thunderstruck
|
refs/heads/master
|
/Entities/Terrain.py
|
from panda3d.core import GeoMipTerrain, Texture, TextureStage,SamplerState
from direct.task import Task
import sys
import os
class Terrain:
folder = os.path.dirname(os.path.abspath(__file__))
subfolder = "/Maps/"
file = "simple.jpg"
filepath = folder+subfolder+file
def __init__(self):
fn = Filename.fromOsSpecific(self.filepath)
self.terrain = GeoMipTerrain("mySimpleTerrain")
self.terrain.setHeightfield("Entities/Maps/heightmap.png")
self.terrain.getRoot().setSz(40)
#terrain.setBruteforce(True)
self.terrain.getRoot().reparentTo(render)
# Set terrain properties
self.terrain.setBlockSize(16)
self.terrain.setNear(500)
self.terrain.setFar(100)
self.terrain.setFocalPoint(base.camera)
# Store the root NodePath for convenience
root = self.terrain.getRoot()
root.reparentTo(render)
# some tinkering
"""
# tell renderer to repeat texture when reading over the edge.
texGrass.setWrapU(Texture.WM_repeat)
texGrass.setWrapV(Texture.WM_repeat)
# apply mipmapping: tell renderer how to handle multiple texture pixels being rendered t a single screen pixel (makes textures 30% larger in GPU mem.)
texGrass.setMinfilter(SamplerState.FT_linear_mipmap_linear)
"""
self.terrain.generate()
"""
new attempt to include blend mapping:
"""
# determine terrain size
self.heightmap = self.terrain.heightfield()
if self.heightmap.getXSize() > self.heightmap.getYSize():
self.size = self.heightmap.getXSize()-1
else:
self.size = self.heightmap.getYSize()-1
self.xsize = self.heightmap.getXSize()-1
self.ysize = self.heightmap.getYSize()-1
# Set multi texture
# Source http://www.panda3d.org/phpbb2/viewtopic.php?t=4536
self.generateSurfaceTextures()
# load a blend texture from file:
self.blendTexture = loader.loadTexture("Entities/Maps/blendMap.png")
self.blendTS = TextureStage('blend')
self.blendTS.setSort(0)
self.blendTS.setPriority(1)
# apply textures to the terrain and connect custom shader for blend mapping:
self.setSurfaceTextures()
# Add a task to keep updating the terrain (for changing terrain, or synamic resolution)
def updateTask(task):
self.terrain.update()
return task.cont
taskMgr.add(updateTask, "update")
# this is where we load the textures to be assigned to the terrain
def generateSurfaceTextures(self):
# Textureize
self.grassTexture = loader.loadTexture("Entities/Maps/grassy2.png")
self.grassTexture.setWrapU(Texture.WMRepeat)
self.grassTexture.setWrapV(Texture.WMRepeat)
self.grassTexture.setMinfilter(SamplerState.FT_linear_mipmap_linear)
self.grassTexture.setAnisotropicDegree(8)
self.grassTS = TextureStage('grass')
self.grassTS.setSort(1) # sorting order is relevent for assigning textures to the four
self.rockTexture = loader.loadTexture("Entities/Maps/simple.jpg")
self.rockTexture.setWrapU(Texture.WMRepeat)
self.rockTexture.setWrapV(Texture.WMRepeat)
self.rockTexture.setMinfilter(SamplerState.FT_linear_mipmap_linear)
#self.grassTexture.setAnisotropicDegree(8)
self.rockTS = TextureStage('rock')
self.rockTS.setSort(2)
# self.rockTS.setCombineRgb(TextureStage.CMAdd, TextureStage.CSLastSavedResult, TextureStage.COSrcColor, TextureStage.CSTexture, TextureStage.COSrcColor)
self.sandTexture = loader.loadTexture("Entities/Maps/stars.png")
self.sandTexture.setWrapU(Texture.WMRepeat)
self.sandTexture.setWrapV(Texture.WMRepeat)
self.sandTexture.setMinfilter(SamplerState.FT_linear_mipmap_linear)
#self.sandTexture.setAnisotropicDegree(8)
self.sandTS = TextureStage('sand')
self.sandTS.setSort(3)
self.sandTS.setPriority(5) # TODO: figure out what this is for...
self.snowTexture = loader.loadTexture("Entities/Maps/grass.png")
self.snowTexture.setWrapU(Texture.WMRepeat)
self.snowTexture.setWrapV(Texture.WMRepeat)
self.snowTexture.setMinfilter(SamplerState.FT_linear_mipmap_linear)
#self.snowTexture.setAnisotropicDegree(8)
self.snowTS = TextureStage('snow')
self.snowTS.setSort(4)
self.snowTS.setPriority(0)
# a background (or rather freground?) texture that will be present independently from the blend map (consider removal)
self.overlayTexture = loader.loadTexture("Entities/Maps/heightmap.png")
self.overlayTexture.setWrapU(Texture.WMRepeat)
self.overlayTexture.setWrapV(Texture.WMRepeat)
self.overlayTexture.setMinfilter(SamplerState.FT_linear_mipmap_linear)
#self.overlayTexture.setAnisotropicDegree(8)
self.overlayTS = TextureStage('overlay')
self.overlayTS.setSort(5)
self.overlayTS.setPriority(10)
# this is where we assign loaded textures to be blended in the shader.
def setSurfaceTextures(self):
self.ownerview = False
root = self.terrain.getRoot()
root.clearTexture()
#self.terrain.setTextureMap()
root.setTexture( self.blendTS, self.snowTexture ) # this texture determines where the other textures are visible
root.setTexture( self.grassTS, self.snowTexture )
#root.setTexScale(self.grassTS, self.size*5, self.size*5) # I try to make the texture 20 times smaller then the blend map...
root.setTexture( self.rockTS, self.snowTexture ) #rockTexture
#root.setTexScale(self.rockTS, self.size*5, self.size*5)
root.setTexture( self.sandTS, self.snowTexture) #sandTexture
#root.setTexScale(self.sandTS, self.size*5, self.size*5)
root.setTexture( self.snowTS, self.snowTexture ) #snowTexture
#root.setTexScale(self.snowTS, self.size*5, self.size*5)
#(consider removal)
root.setTexture( self.overlayTS, self.overlayTexture ) #overlayTexture
#root.setTexScale(self.overlayTS, self.xsize, self.ysize)
root.setShaderInput('size', self.xsize, self.ysize, self.size, self.size)
root.setShader(loader.loadShader('Entities/Maps/terrainblender.sha'))
|
{"/Tools/terrain_generator/pandaWorld.py": ["/main.py"], "/Tools/terrain_generator/gui.py": ["/main.py"], "/Entities/Objects/Trucks.py": ["/Tools/__init__.py"], "/Tools/terrain_generator/data_object.py": ["/main.py"], "/Controls/Manager.py": ["/Controls/__init__.py"], "/main.py": ["/Controls/__init__.py", "/Entities/__init__.py", "/Entities/Objects/__init__.py"], "/Tools/terrain_generator/camera.py": ["/main.py"]}
|
11,671
|
Jerommaas/Thunderstruck
|
refs/heads/master
|
/Entities/Clock.py
|
from direct.task import Task
FPS = 30
class Clock():
def __init__(self,World):
self.World = World
self.dt = 1./FPS
self.dtcounter = 0
# List of objects that have an Update(dt) function that needs to be called
self.UpdateList = []
# The task for our simulation
def simulationTask(task):
# Add the deltaTime for the task to the accumulator
self.dtcounter += globalClock.getDt()
while self.dtcounter > self.dt:
# Remove a stepSize from the accumulator until
# the accumulated time is less than the stepsize
self.dtcounter -= self.dt
# Step the simulation
for Obj in self.UpdateList:
Obj.Update(self.dt)
# Camera position is only updated once per frame!
self.World.Camera.Update()
return task.cont
taskMgr.add(simulationTask, "Physics Simulation")
def UpdateMe(self,Obj):
# Add the Object to the list of appendables
self.UpdateList.append(Obj)
|
{"/Tools/terrain_generator/pandaWorld.py": ["/main.py"], "/Tools/terrain_generator/gui.py": ["/main.py"], "/Entities/Objects/Trucks.py": ["/Tools/__init__.py"], "/Tools/terrain_generator/data_object.py": ["/main.py"], "/Controls/Manager.py": ["/Controls/__init__.py"], "/main.py": ["/Controls/__init__.py", "/Entities/__init__.py", "/Entities/Objects/__init__.py"], "/Tools/terrain_generator/camera.py": ["/main.py"]}
|
11,672
|
Jerommaas/Thunderstruck
|
refs/heads/master
|
/Tools/terrain_generator/data_object.py
|
import main
import json
class data_object(object):
def __init__(self, json_data):
# check data
self.first_field = 5
self.second_field = 6
self.load(json_data)
def load( self , json_data ):
self.__dict__.update(json_data) # by default, add all fields to dict
def save( self ):
# returns a json object of all relevant fields within this object
json_data = { }
return json_data
class terrain_object(data_object):
"""
Class for describing a terrain
"""
def __init__(self, json_data):
self.textures = ["<some texture file>", "<some other texture file>"] # TODO(victor) multiple textures
self.height = "<some height map>"
self.blend = "<some blend map>"
self.xyz = [0,0,0]
self.rot = [0,0,0]
return super().__init__(json_data)
def save( self ):
json_data = { "textures": self.textures,
"height": self.height,
"blend": self.blend,
"xyz": self.xyz,
"rot": self.rot}
return json_data
class panda_object(data_object):
"""
Default object
"""
def __init__(self, json_data):
# set default values
self.model = "<some .egg file>"
self.xyz = [0,0,0]
self.rot = [0,0,0]
# override with data in json
return super().__init__(json_data)
def save( self ):
json_data = { "model": self.model,
"xyz": self.xyz,
"rot": self.rot}
return json_data
class skybox_object(data_object):
"""
skybox object
"""
def __init__(self, json_data):
# set default values
self.model = "<some .egg file>"
self.xyz = [0,0,0]
self.rot = [0,0,0]
# override with data in json
return super().__init__(json_data)
def save( self ):
json_data = { "model": self.model,
"xyz": self.xyz,
"rot": self.rot}
return json_data
if __name__ == "__main__":
main.main()
|
{"/Tools/terrain_generator/pandaWorld.py": ["/main.py"], "/Tools/terrain_generator/gui.py": ["/main.py"], "/Entities/Objects/Trucks.py": ["/Tools/__init__.py"], "/Tools/terrain_generator/data_object.py": ["/main.py"], "/Controls/Manager.py": ["/Controls/__init__.py"], "/main.py": ["/Controls/__init__.py", "/Entities/__init__.py", "/Entities/Objects/__init__.py"], "/Tools/terrain_generator/camera.py": ["/main.py"]}
|
11,673
|
Jerommaas/Thunderstruck
|
refs/heads/master
|
/Controls/Manager.py
|
from direct.showbase.DirectObject import DirectObject
from direct.task import Task
import sys
from . import Keyboard
from . import Controller
#from . import xinput
from operator import attrgetter
class ControlManager(DirectObject):
def __init__(self,World):
self.accept('escape', sys.exit)
# Reference to the controlled truck!
# Of course, must be replaced by networkcommunication
self.Truck = World.Truck1
# Start different control inputs
self.Arrows = Keyboard.Arrows(self)
self.WASD = Keyboard.WASD(self)
#self.SearchForControllers()
def SearchForControllers(self):
# Init the search for controllers
def SearchTask(task):
joys = xinput.XInputJoystick.enumerate_devices()
if joys:
# Joystick found: assume that it is X360
self.X360 = Controller.X360(self,joys[0])
return task.done
else:
task.delayTime = 3
return task.again
taskMgr.doMethodLater(0.1,SearchTask,'Search Controller')
#########################################################
# These functions must be replaced by handles to client comm
#########################################################
def Steer(self, value):
# Set steering direction: [-1,1] [left, right]
print("Steer", value)
self.Truck.Steer = value
def Throttle(self, value):
# Set throttle: [0,1] [idle, full]
print("Throttle", value*100,'%')
self.Truck.Throttle = value
def Brake(self, value):
# Set Brake: [0,1] [none, full]
print("Brake", value)
self.Truck.Brake = value
#########################################################
|
{"/Tools/terrain_generator/pandaWorld.py": ["/main.py"], "/Tools/terrain_generator/gui.py": ["/main.py"], "/Entities/Objects/Trucks.py": ["/Tools/__init__.py"], "/Tools/terrain_generator/data_object.py": ["/main.py"], "/Controls/Manager.py": ["/Controls/__init__.py"], "/main.py": ["/Controls/__init__.py", "/Entities/__init__.py", "/Entities/Objects/__init__.py"], "/Tools/terrain_generator/camera.py": ["/main.py"]}
|
11,674
|
Jerommaas/Thunderstruck
|
refs/heads/master
|
/Tools/game_config/config.py
|
'''
Read config file, provide an interface for easily reading the data
usage:
config = Config("some_file.json")
client_port = config["client"]["port"]
'''
import os
import json
class Config(object):
def __init__( self, file="config_file.json" ):
# Read file
here = os.path.dirname(os.path.abspath(__file__))
full_path = os.path.normpath(os.path.join(here, file))
with open( full_path, 'r') as f:
self.json_config = json.load(f)
def __getitem__(self, key):
# get key
try:
return self.json_config[key]
except:
print( "ERROR: attempting to obtain config[{}]".format(key) )
raise # propagate error
if __name__ == "__main__":
config_file = "config_file.json"
config = Config( config_file )
client_port = config["client"]["port"]
client_host = config["client"]["host"]
print( "port: {}; host: {};".format(client_port,client_host) )
|
{"/Tools/terrain_generator/pandaWorld.py": ["/main.py"], "/Tools/terrain_generator/gui.py": ["/main.py"], "/Entities/Objects/Trucks.py": ["/Tools/__init__.py"], "/Tools/terrain_generator/data_object.py": ["/main.py"], "/Controls/Manager.py": ["/Controls/__init__.py"], "/main.py": ["/Controls/__init__.py", "/Entities/__init__.py", "/Entities/Objects/__init__.py"], "/Tools/terrain_generator/camera.py": ["/main.py"]}
|
11,675
|
Jerommaas/Thunderstruck
|
refs/heads/master
|
/Entities/Objects/__init__.py
|
__all__ = ['Trucks']
|
{"/Tools/terrain_generator/pandaWorld.py": ["/main.py"], "/Tools/terrain_generator/gui.py": ["/main.py"], "/Entities/Objects/Trucks.py": ["/Tools/__init__.py"], "/Tools/terrain_generator/data_object.py": ["/main.py"], "/Controls/Manager.py": ["/Controls/__init__.py"], "/main.py": ["/Controls/__init__.py", "/Entities/__init__.py", "/Entities/Objects/__init__.py"], "/Tools/terrain_generator/camera.py": ["/main.py"]}
|
11,676
|
Jerommaas/Thunderstruck
|
refs/heads/master
|
/Controls/Controller.py
|
from direct.task import Task
class X360():
deadzone = 0.4
def __init__(self, ControlManager,Controller):
self.CM = ControlManager
self.Controller = Controller
self.Controller._last_state = self.Controller.get_state()
self.KeyBindings()
def CheckController(task):
state = self.Controller.get_state()
if not state:
# Start searching again for the controller!
self.CM.SearchForControllers()
return task.done
if state.packet_number != self.Controller._last_state.packet_number:
self.Controller.handle_changed_state(state)
self.Controller._last_state = state
return task.cont
taskMgr.add(CheckController)
def KeyBindings(self):
# Bind the X360 controller buttons to game scripts
@self.Controller.event
def on_axis(axis, value):
if axis=="l_thumb_x": # Left stick, horizontal direction
if abs(value)<self.deadzone:
self.CM.Steer(0)
else:
self.CM.Steer(min(1.,value*-2)) # Axis values range from [-0.5, 0.5]
@self.Controller.event
def on_button(button, pressed):
if button == 13: # A button
self.CM.Throttle(pressed)
elif button ==14: # B button
self.CM.Brake(pressed)
class SteamController(X360):
def KeyBindings(self):
# TODO: Find the key bindings for the Steam Controller
pass
|
{"/Tools/terrain_generator/pandaWorld.py": ["/main.py"], "/Tools/terrain_generator/gui.py": ["/main.py"], "/Entities/Objects/Trucks.py": ["/Tools/__init__.py"], "/Tools/terrain_generator/data_object.py": ["/main.py"], "/Controls/Manager.py": ["/Controls/__init__.py"], "/main.py": ["/Controls/__init__.py", "/Entities/__init__.py", "/Entities/Objects/__init__.py"], "/Tools/terrain_generator/camera.py": ["/main.py"]}
|
11,677
|
Jerommaas/Thunderstruck
|
refs/heads/master
|
/Tools/__init__.py
|
__all__ = ['EulerAngles']
|
{"/Tools/terrain_generator/pandaWorld.py": ["/main.py"], "/Tools/terrain_generator/gui.py": ["/main.py"], "/Entities/Objects/Trucks.py": ["/Tools/__init__.py"], "/Tools/terrain_generator/data_object.py": ["/main.py"], "/Controls/Manager.py": ["/Controls/__init__.py"], "/main.py": ["/Controls/__init__.py", "/Entities/__init__.py", "/Entities/Objects/__init__.py"], "/Tools/terrain_generator/camera.py": ["/main.py"]}
|
11,678
|
Jerommaas/Thunderstruck
|
refs/heads/master
|
/main.py
|
from direct.showbase.ShowBase import ShowBase
# Als je nieuwe modules hebt voor bij *, voeg de verwijzing
# toe in __init__.py van de module!
from Controls import *
from Entities import *
from Entities.Objects import *
import sys
print(sys.version)
class Thunderstruck_server():
def __init__(self):
pass
# Entities
# Load terrain hgtmap
# Load Objects
# Game Logic
# Receive control input from client
# Game Goals/Rules
# Physics
# Define truck behavior
# Output
# Send data to clients
class Thunderstruck_client(ShowBase):
def __init__(self,server):
ShowBase.__init__(self)
# Init the Global Clock
self.Clock = Clock.Clock(self)
# ----- ENTITIES -----
# World
self.Terrain = Terrain.Terrain()
self.SkyDome = skyDome.skyDome()
#Light Sources? (or weather, see also particles)
# Objects
self.Truck1 = Trucks.Basic(self)
# Particles
# Sparks, fire, LIGHTNING
self.Camera = Camera.Camera(self)
# User input
self.CM = Manager.ControlManager(self)
# Server communication
# Send User controls
# Receive Trucks locations
# Graphics
# Renderer
# Camera behavior
# GUI frontend
# Sound
# Music Player
# Sound effects
if __name__ == "__main__":
server = Thunderstruck_server()
#server.run()
client = Thunderstruck_client(server)
client.run()
|
{"/Tools/terrain_generator/pandaWorld.py": ["/main.py"], "/Tools/terrain_generator/gui.py": ["/main.py"], "/Entities/Objects/Trucks.py": ["/Tools/__init__.py"], "/Tools/terrain_generator/data_object.py": ["/main.py"], "/Controls/Manager.py": ["/Controls/__init__.py"], "/main.py": ["/Controls/__init__.py", "/Entities/__init__.py", "/Entities/Objects/__init__.py"], "/Tools/terrain_generator/camera.py": ["/main.py"]}
|
11,679
|
Jerommaas/Thunderstruck
|
refs/heads/master
|
/Entities/skyDome.py
|
from panda3d.core import Texture, TextureStage, DirectionalLight, AmbientLight, TexGenAttrib, VBase4
from panda3d.core import ColorBlendAttrib, LPoint3, LVector4
from direct.filter.CommonFilters import CommonFilters
from panda3d.core import PandaNode, NodePath
import sys
import os
class skyDome:
def __init__(self):
##-- load a skydome from a bam file --##
# this skydome is a small inverted sphere (inverted = back culling makes it transparent outside-in instead of inside-out)
# that is wrapped around the camera (you can see what's happening by turning on base.oobe(), with togles out of body experience mode)
# the camera is set as parent, such that the dome will stay centered around the camera.
# compass makes sure that rotations of the camera are ignored, allowing you to look around the skydome.
# the sphere is kept small, but disabling depth buffer and ensuring it is the first thing added to the render buffer alllows us to create the illusion that it is infinitely far away.
# note: SkySphere.bam has to be be re-created for each Panda3D version. you can do so by running sky Sphere.py
# load inverted sphere model, to texture
#self.SkyDome = loader.loadModel("Entities/Maps/skydome1/InvertedSphere.egg")
self.SkyDome = loader.loadModel("Entities/Maps/skydome1/spacedome.egg")
# create 3D texture coordinates on sphere
#self.SkyDome.setTexGen(TextureStage.getDefault(), TexGenAttrib.MWorldPosition) #* Copies the (x, y, z) position of each vertex, in world space, to the (u, v, w) texture coordinates.
#self.SkyDome.setTexGen(TextureStage.getDefault(), TexGenAttrib.MEyePosition) # Copies the (x, y, z) position of each vertex, in camera space, to the (u, v, w) texture coordinates.
#self.SkyDome.setTexGen(TextureStage.getDefault(), TexGenAttrib.MWorldNormal) # Copies the (x, y, z) lighting normal of each vertex, in world space, to the (u, v, w) texture coordinates.
#self.SkyDome.setTexGen(TextureStage.getDefault(), TexGenAttrib.MEyeNormal) # Copies the (x, y, z) lighting normal of each vertex, in camera space, to the (u, v, w) texture coordinates.
#self.SkyDome.setTexGen(TextureStage.getDefault(), TexGenAttrib.MEyeSphereMap) #* Generates (u, v) texture coordinates based on the lighting normal and the view vector to apply a standard reflection sphere map.
#self.SkyDome.setTexGen(TextureStage.getDefault(), TexGenAttrib.MEyeCubeMap) # Generates (u, v, w) texture coordinates based on the lighting normal and the view vector to apply a standard reflection cube map.
#self.SkyDome.setTexGen(TextureStage.getDefault(), TexGenAttrib.MWorldCubeMap) # Generates (u, v, w) texture coordinates based on the lighting normal and the view vector to apply a standard reflection cube map.
#self.SkyDome.setTexProjector(TextureStage.getDefault(), render, self.SkyDome) # should only be needed when projectibf cube map to sphere...
# create a cube map texture from 6 separate textures: (# should run 0-5)
#tex = loader.loadCubeMap('Entities/Maps/skydome1/lakes_#.png')
# or: get a pre-wrapped texture from the interwebs
scene = loader.loadTexture('Entities/Maps/skydome1/14-Hamarikyu_Bridge_B_8k.jpg') #new
#ts = TextureStage('ts')
#self.SkyDome.setTexGen(ts, TexGenAttrib.MWorldPosition) # old
#self.SkyDome.setTexGen(ts, TexGenAttrib.MEyeSphereMap) # new
#self.SkyDome.setTexProjector(ts, render, self.SkyDome) # old
# ts.setMode(TextureStage.MModulateGlow) # old
# and give it to inverted sphere
#self.SkyDome.setTexture(TextureStage.getDefault(),scene)
#self.SkyDome.setTexture(ts,tex)
#TODO: make sure that this cube map and .eeg model are loaded from a BAM file for faster loading. (and don't forget to re-set textProjector after loading!)
# load model (sphere + texture)
#self.SkyDome = loader.loadModel("SkySphere.bam")
# tell renderer how to project the texture to this sphere
#self.SkyDome.setTexProjector(TextureStage.getDefault(), render, self.SkyDome)
# origen of model is on the surface. Let's move to the centre
# (and make it a little larger to prevent it from intersecting the camera's fustrum)
self.SkyDome.setPos(0,0.5,0)
self.SkyDome.setScale(2)
# and slave it to the camera
self.SkyDome.wrtReparentTo(camera) # note: cam vs. camera! (cam will make skydome look normal even in oobe mode)
# altough parented by camera, tell to ignore camera rotations:
self.SkyDome.setCompass()
# tell renderer to use it as background (i.e. first to be rendered), and exclude it from depth buffer
self.SkyDome.set_bin("background", 0)
self.SkyDome.set_depth_write(0)
# ignore light effects?
self.SkyDome.setLightOff()
#base.oobe()
#render.setShaderAuto()
#filters = CommonFilters(base.win, base.cam)
#filterok = filters.setBloom(blend=(0, 0, 0, 1), desat=-0.5, mintrigger =0.1, intensity=8.0, size="medium")
# add some light
dlight = DirectionalLight('dlight')
alight = AmbientLight('alight')
dlnp = render.attachNewNode(dlight)
alnp = render.attachNewNode(alight)
dlight.setColor((0.2, 0.2, 0.2, 1))
alight.setColor((0.7, 0.7, 0.7, 1))
dlnp.setHpr(60, -60, 0)
render.setLight(dlnp)
render.setLight(alnp)
|
{"/Tools/terrain_generator/pandaWorld.py": ["/main.py"], "/Tools/terrain_generator/gui.py": ["/main.py"], "/Entities/Objects/Trucks.py": ["/Tools/__init__.py"], "/Tools/terrain_generator/data_object.py": ["/main.py"], "/Controls/Manager.py": ["/Controls/__init__.py"], "/main.py": ["/Controls/__init__.py", "/Entities/__init__.py", "/Entities/Objects/__init__.py"], "/Tools/terrain_generator/camera.py": ["/main.py"]}
|
11,680
|
Jerommaas/Thunderstruck
|
refs/heads/master
|
/Tools/terrain_generator/main.py
|
import sys
# Panda
from panda3d.core import *
loadPrcFileData("", "window-type none")
from direct.showbase.DirectObject import DirectObject
from panda3d.core import WindowProperties
# QT
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
# local
import gui
from pandaWorld import World
getModelPath().appendDirectory('/c/Panda3D-1.9.4-x64/models/')
class QTPandaWidget(QWidget):
def __init__(self, parent=None):
super(QWidget, self).__init__(parent)
self.setSizePolicy(QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding))
def resizeEvent(self, evt):
wp = WindowProperties()
wp.setSize(self.width(), self.height())
wp.setOrigin(self.x(),self.y())
base.win.requestProperties(wp)
def minimumSizeHint(self):
return QSize(400,300)
class QTMainWindow(QDialog):
def __init__(self, pandaWorld=None, parent=None):
super(QDialog, self).__init__(parent)
self.setWindowTitle("Test")
s = 80
self.setGeometry(0,0,21*s,9*s)
self.pandaContainer = QTPandaWidget(self)
layout = QHBoxLayout()
layout.addWidget(self.pandaContainer)
user_interface = gui.Gui(self, pandaWorld=pandaWorld)
layout.addWidget(user_interface)
self.setLayout(layout)
self.pandaWorld = pandaWorld
pandaWorld.bindToWindow(int(self.winId())) # window.pandaContainer.winId() or window.winId()?
# this basically creates an idle task
# TODO(victor): run panda in separate thread if possible
self.timer = QTimer(self)
self.timer.timeout.connect( pandaWorld.step )
self.timer.start(0.01)
def main():
pandaWorld = World()
app = QApplication(sys.argv)
window = QTMainWindow(pandaWorld=pandaWorld)
window.show()
# ensure both qt and panda close
sys.exit(app.exec_())
if __name__ == '__main__':
main()
|
{"/Tools/terrain_generator/pandaWorld.py": ["/main.py"], "/Tools/terrain_generator/gui.py": ["/main.py"], "/Entities/Objects/Trucks.py": ["/Tools/__init__.py"], "/Tools/terrain_generator/data_object.py": ["/main.py"], "/Controls/Manager.py": ["/Controls/__init__.py"], "/main.py": ["/Controls/__init__.py", "/Entities/__init__.py", "/Entities/Objects/__init__.py"], "/Tools/terrain_generator/camera.py": ["/main.py"]}
|
11,681
|
Jerommaas/Thunderstruck
|
refs/heads/master
|
/Tools/terrain_generator/camera.py
|
import time
import numpy as np
from direct.task import Task
import main
#
# Camera
#
class Camera(object):
def __init__(self, world):
self.world = world
self.world.accept( "w", self.move_y, [1] )
self.world.accept( "s", self.move_y, [-1])
self.world.accept( "a", self.move_x, [-1] )
self.world.accept( "d", self.move_x, [1] )
self.world.accept( "w-up", self.move_y,[ 0])
self.world.accept( "s-up", self.move_y, [0])
self.world.accept( "a-up", self.move_x, [0])
self.world.accept( "d-up", self.move_x, [0])
self.time = time.time()
self.pos = np.array( [0,0,1.8] )
self.rot = np.array( [0,0,0] )
self.control = np.array( [0,0] )
# print( self.world.camera)
# self.world.camera.setFov(120)
# camera control
taskMgr.add(self.tick, 'TickCameraMovement')
def move_x(self, x):
self.control[0] = x
def move_y(self, y):
self.control[1] = y
def get_forward_vector(self):
cam = self.world.camera
yaw = np.deg2rad( cam.get_h() )
# pitch = np.deg2rad( cam.get_p() )
# return np.array( [np.cos(yaw) , np.sin(yaw), 0 ])
return self.world.camera.getQuat().getForward()
def get_right_vector(self):
forward = self.get_forward_vector()
right = np.cross( forward, np.array([0,0,1]) )
return right / np.linalg.norm( right )
def tick(self, task ):
t_cur = time.time()
dt = t_cur - self.time
cam = self.world.camera
vmax = 3.6
yaw_deg = cam.get_h()
pitch_deg = cam.get_p()
roll_deg = cam.get_r()
yaw = np.pi * yaw_deg / 180.
pitch = np.pi * pitch_deg / 180.
roll = np.pi * roll_deg / 180.
# cam.set_p( t_cur )
f = np.pi / 2
forward = np.array( [np.cos(yaw), np.sin(yaw), 0 ]) # TODO(victor): calculate correct vectors
right = np.array( [np.cos(yaw+f), np.sin(yaw+f), 0])
# self.pos = self.pos + (dt * self.control[0] * right )
# self.pos = self.pos + (dt * self.control[1] * forward)
self.pos = self.pos + (dt * self.control[0] * self.get_right_vector() )
self.pos = self.pos + (dt * self.control[1] * self.get_forward_vector() )
# cam.setPos( *self.pos )
cam.set_x( self.pos[0] )
cam.set_y( self.pos[1] )
cam.set_z( self.pos[2] )
# cam.set_p( )
cam.set_r( 0 )
self.time = t_cur
return task.cont
if __name__ == "__main__":
main.main()
|
{"/Tools/terrain_generator/pandaWorld.py": ["/main.py"], "/Tools/terrain_generator/gui.py": ["/main.py"], "/Entities/Objects/Trucks.py": ["/Tools/__init__.py"], "/Tools/terrain_generator/data_object.py": ["/main.py"], "/Controls/Manager.py": ["/Controls/__init__.py"], "/main.py": ["/Controls/__init__.py", "/Entities/__init__.py", "/Entities/Objects/__init__.py"], "/Tools/terrain_generator/camera.py": ["/main.py"]}
|
11,682
|
Jerommaas/Thunderstruck
|
refs/heads/master
|
/Tools/terrain_generator/picker.py
|
#
# this class contains the picker node, which handles everything related to selecting 3d objects
#
#
class picker(object):
def __init__(self, mouse, render, camera):
self.mouse = mouse
self.render = render
self.camera = camera
# see:
# http://www.panda3d.org/manual/index.php/Clicking_on_3D_Objects
self.pickerNode = CollisionNode('mouseRay')
pickerNP = camera.attachNewNode(self.pickerNode)
self.pickerNode.setFromCollideMask(GeomNode.getDefaultCollideMask())
self.pickerRay = CollisionRay()
self.pickerNode.addSolid(self.pickerRay)
self.traverser.addCollider(self.pickerNP, self.handle_picker() )
def handle_picker(self):
self.set_picker_ray()
self.traverser.traverse(self.render)
# Assume for simplicity's sake that myHandler is a CollisionHandlerQueue.
if myHandler.getNumEntries() > 0:
# This is so we get the closest object
myHandler.sortEntries()
pickedObj = myHandler.getEntry(0).getIntoNodePath()
def set_picker_ray(self):
# First we check that the mouse is not outside the screen.
if not base.mouseWatcherNode.hasMouse():
return
mpos = base.mouseWatcherNode.getMouse()
self.pickerRay.setFromLens(base.camNode, mpos.getX(), mpos.getY())
|
{"/Tools/terrain_generator/pandaWorld.py": ["/main.py"], "/Tools/terrain_generator/gui.py": ["/main.py"], "/Entities/Objects/Trucks.py": ["/Tools/__init__.py"], "/Tools/terrain_generator/data_object.py": ["/main.py"], "/Controls/Manager.py": ["/Controls/__init__.py"], "/main.py": ["/Controls/__init__.py", "/Entities/__init__.py", "/Entities/Objects/__init__.py"], "/Tools/terrain_generator/camera.py": ["/main.py"]}
|
11,730
|
michaelpbAD/Dual-Net-Gaming-with-Python
|
refs/heads/master
|
/VierOpEenRij.py
|
""" Vier op een rij - Client"""
import pygame
from PodSixNet.Connection import ConnectionListener, connection
from time import sleep
class VierOpEenRijGame(ConnectionListener):
def Network_close(self, data):
exit()
def Network_connected(self, data):
print("Connected to the server.")
def Network_error(self, data):
print("Error connecting to the server.")
exit()
def Network_disconnected(self, data):
print("Disconnected from the server.")
exit()
def Network_nickname(self,data):
self.playerNaam[data["playerNR"]-1] = data["nickname"]
def Network_startgame(self, data):
self.running = True
self.num = data["player"]
self.gameid = data["gameid"]
self.playerAantal = data["playerAantal"]
self.boardBoxH = data["boardBoxH"]
self.boardBoxW = data["boardBoxW"]
# define game board dimensions
self.board = [[0 for x in range(self.boardBoxW)] for y in range(self.boardBoxH)]
#dimensies van scherm aanpassen naar spel grote
# gameboard dimensions px
self.boardH = self.boardBoxH * self.boxD - (self.boardBoxH - 1) * self.boxB + self.boxB * 4
self.boardW = self.boardBoxW * self.boxD - (self.boardBoxW - 1) * self.boxB + self.boxB * 4
# score board height
self.panelH = 200
# window dimensions
self.width = self.boardW
self.height = self.boardH + self.boxD + self.panelH
# score board width
self.panelW = self.width
# initialize the screen with windows dimensions
self.screen = pygame.display.set_mode((self.width, self.height))
pygame.display.set_caption("Vier op een rij")
def Network_place(self, data):
# get attributes
self.pijlx = data["pijlx"]
K_DOWN = data["K_DOWN"]
if K_DOWN == True and self.board[0][self.pijlx] == 0:
self.board[0][self.pijlx] = self.playerTurn
self.playerTurn = data["playerTurn"]
self.pijl = self.playerBox[self.playerTurn - 1]
def Network_win(self, data):
self.wint = data["speler"]
self.scorePlayer = data["score"]
def Network_boardWipe(self, data):
self.wint = data["wint"]
self.board = data["board"]
self.playerTurn = data["playerTurn"]
self.pijl = self.playerBox[self.playerTurn - 1]
# initialize VierOpEenRijGame
def __init__(self, socket, nickname):
pygame.init()
pygame.font.init()
# dimensions tiles game board
self.boardBoxH = 7
self.boardBoxW = 14
# box dimensions and border
self.boxD = 50 # px length square side
self.boxB = int(self.boxD / 10) # px border square
# gameboard dimensions px
self.boardH = self.boardBoxH * self.boxD - (self.boardBoxH - 1) * self.boxB + self.boxB * 4
self.boardW = self.boardBoxW * self.boxD - (self.boardBoxW - 1) * self.boxB + self.boxB * 4
# score board height
self.panelH = 200
# window dimensions
self.width = self.boardW
self.height = self.boardH + self.boxD + self.panelH
# score board width
self.panelW = self.width
# initialize the screen with windows dimensions
self.screen = pygame.display.set_mode((self.width, self.height))
pygame.display.set_caption("Vier op een rij")
# initialize pygame clock
self.clock = pygame.time.Clock()
self.initGraphics()
# define game board dimensions
self.board = [[0 for x in range(self.boardBoxW)] for y in range(self.boardBoxH)]
# define who starts
self.playerTurn = 1
# defineer spaeler naam
self.playerNaam = ["speler1", "speler2", "speler3", "speler4"]
# defineer player color
self.playerBox = [self.greenBox, self.blueBox, self.redBox, self.yellowBox]
# define scores
self.scorePlayer = [0, 0, 0, 0]
self.wint = 0
# define pijl
self.pijl = self.playerBox[self.playerTurn - 1]
self.pijlx = 0
self.pijly = 0
# try to connect
try:
self.Connect((socket[0], int(socket[1])))
except:
pass
self.gameid = None
self.num = None
self.running = False
# wait until game starts
while not self.running:
self.Pump()
connection.Pump()
sleep(0.001)
# determine attributes from player #
self.playerNR = self.num + 1
self.playerNaam[self.num] = "me > "+nickname
connection.Send({"action": "nickname", "nickname": nickname, "gameid": self.gameid, "playerNR": self.playerNR})
# initialize graphics images
def initGraphics(self):
self.legeBox = pygame.transform.scale(pygame.image.load("img/legeBox.png"), (self.boxD, self.boxD))
self.greenBox = pygame.transform.scale(pygame.image.load("img/greenBox.png"), (self.boxD, self.boxD))
self.blueBox = pygame.transform.scale(pygame.image.load("img/blueBox.png"), (self.boxD, self.boxD))
self.redBox = pygame.transform.scale(pygame.image.load("img/redBox.png"), (self.boxD, self.boxD))
self.yellowBox = pygame.transform.scale(pygame.image.load("img/yellowBox.png"), (self.boxD, self.boxD))
#self.scorePanel = pygame.transform.scale(pygame.image.load("img/scorePanel.png"), (self.panelW, self.panelH))
# update game
def update(self):
connection.Pump()
self.Pump()
# sleep to make the game 60 fps
self.clock.tick(60)
# clear the screen
self.screen.fill((255, 255, 255))
self.drawBoard()
self.drawPanel()
# vult bord op met winaars kleur
if self.wint != 0:
for x in range(self.boardBoxW):
self.board[0][x] = self.wint
# update the screen
pygame.display.flip()
# events/key press
self.eventAndKeys()
# handling events and key presses
def eventAndKeys(self):
for event in pygame.event.get():
# quit if the quit button was pressed
if event.type == pygame.QUIT:
pygame.display.quit()
exit()
# key press
if event.type == pygame.KEYDOWN and self.playerTurn == self.playerNR:
# pijl move links wanneer linker pijl
if event.key == pygame.K_LEFT:
if 0 < self.pijlx:
self.pijlx -= 1
connection.Send(
{"action": "place", "playerTurn": self.playerTurn, "pijlx": self.pijlx, "K_DOWN": False,
"gameid": self.gameid, "playerNR": self.playerNR})
# pijl move rechts wanneer rechterpijl
if event.key == pygame.K_RIGHT:
if self.pijlx < (self.boardBoxW - 1):
self.pijlx += 1
connection.Send(
{"action": "place", "playerTurn": self.playerTurn, "pijlx": self.pijlx, "K_DOWN": False,
"gameid": self.gameid, "playerNR": self.playerNR})
# place box wanneer enter of pijl naar beneden
if (event.key == pygame.K_KP_ENTER or event.key == pygame.K_DOWN) and self.board[0][self.pijlx] == 0:
connection.Send(
{"action": "place", "playerTurn": self.playerTurn, "pijlx": self.pijlx, "K_DOWN": True,
"gameid": self.gameid, "playerNR": self.playerNR})
# print dropped box, gameboard en pijl
def drawBoard(self):
# drop box
for x in range(self.boardBoxW):
for y in range(self.boardBoxH - 1):
if self.board[y][x] != 0:
if self.board[y + 1][x] == 0:
self.board[y + 1][x] = self.board[y][x]
self.board[y][x] = 0
# draw game board
for x in range(self.boardBoxW):
for y in range(self.boardBoxH):
if self.board[y][x] == 0:
self.screen.blit(self.legeBox, [(self.boxB * 2) + ((x) * self.boxD) - self.boxB * x,
self.boxD + (self.boxB * 2) + ((y) * self.boxD) - self.boxB * y])
if self.board[y][x] != 0:
self.screen.blit(self.playerBox[self.board[y][x] - 1],
[(self.boxB * 2) + ((x) * self.boxD) - self.boxB * x,
self.boxD + (self.boxB * 2) + ((y) * self.boxD) - self.boxB * y])
# place pijl
self.screen.blit(self.pijl, ((self.boxB * 2) + ((self.pijlx) * self.boxD) - self.boxB * self.pijlx,
(self.boxB * 2) + ((self.pijly) * self.boxD) - self.boxB * self.pijly))
# print score paneel
def drawPanel(self):
panelP = self.height - self.panelH
# achtergrond paneel kleur of foto
# self.screen.blit(self.scorePanel, [0, panelP])
pygame.draw.rect(self.screen, (0, 0, 0), [0, panelP, self.panelW, self.panelH])
# print Player Score Labels
x, y = 0, panelP
for i in range(self.playerAantal):
if (self.width / 2) > 300:
if i % 2 == 0:
x = 25
y += 35
else:
x = (self.width / 2) + 25
else:
x = 25
y += 35
self.printPlayerScoreLabel(x, y, self.playerBox[i], self.playerNaam[i], self.scorePlayer[i])
# print player scores
def printPlayerScoreLabel(self, x, y, icon, naam, score):
myfont = pygame.font.SysFont(None, 42)
fScore = myfont.render(str(score), 1, (255, 255, 255))
fNaam = myfont.render(str(naam), 1, (255, 255, 255))
wNaam, hNaam = fNaam.get_size()
self.screen.blit(pygame.transform.scale(icon, (25, 25)), (x, y))
self.screen.blit(fNaam, (x + 50, y))
self.screen.blit(fScore, (x + 250, y))
|
{"/start.py": ["/screen_joinorhost.py"], "/screen_hostserver.py": ["/vieropeenrijserver.py"], "/testing material/bootscreen.py": ["/VierOpEenRij.py"], "/screen_joinorhost.py": ["/VierOpEenRij.py", "/screen_hostserver.py"]}
|
11,731
|
michaelpbAD/Dual-Net-Gaming-with-Python
|
refs/heads/master
|
/testing material/vieropeenrijClient.py
|
# connect to the server - optionally pass hostname and port like: ("mccormick.cx", 31425)
from PodSixNet.Connection import ConnectionListener
class MyNetworkListener(ConnectionListener):
def Network(self, data):
print('network data:', data)
def Network_connected(self, data):
print("connected to the server")
def Network_error(self, data):
print ("error:", data['error'][1])
def Network_disconnected(self, data):
print( "disconnected from the server")
def Network_myaction(data):
print ("myaction:", data)
class MyPlayerListener(ConnectionListener):
def Network_numplayers(data):
# update gui element displaying the number of currently connected players
print(data['players'])
|
{"/start.py": ["/screen_joinorhost.py"], "/screen_hostserver.py": ["/vieropeenrijserver.py"], "/testing material/bootscreen.py": ["/VierOpEenRij.py"], "/screen_joinorhost.py": ["/VierOpEenRij.py", "/screen_hostserver.py"]}
|
11,732
|
michaelpbAD/Dual-Net-Gaming-with-Python
|
refs/heads/master
|
/testing material/vieropeenrijServer.py
|
from time import sleep
from PodSixNet.Server import Server
from PodSixNet.Channel import Channel
class ClientChannel(Channel):
def Network(self, data):
self.gameid = data["gameid"]
print(data)
def Network_placeBox(self, data):
# deconsolidate all of the data from the dictionary
playerTurn = data["playerTurn"]
pijlx = data["pijlx"]
playerNR = data["playerNR"]
# id of game given by server at start of game
self.gameid = data["gameid"]
# tells server to place box
self._server.placeBox(playerTurn, pijlx, data, self.gameid, playerNR)
def Network_movePijl(self,data):
pijlx = data["pijlx"]
self.gameid = data["gameid"]
self._server.movePijl(pijlx, self.gameid, data)
def Close(self):
self._server.close(self.gameid)
class vieropeenrijServer(Server):
channelClass = ClientChannel
def __init__(self, *args, **kwargs):
Server.__init__(self, *args, **kwargs)
self.games = []
self.queue = None
self.currentIndex = 0
self.numPlayers = 0
def Connected(self, channel, addr):
self.numPlayers += 1
print('new connection:', channel)
print(self.queue)
if self.queue == None:
self.currentIndex += 1
channel.gameid = self.currentIndex
self.queue = Game(channel, self.currentIndex)
else:
channel.gameid = self.currentIndex
self.queue.player[(self.numPlayers-1)%self.queue.playerAantal] = channel
if self.numPlayers > 1 and self.numPlayers%self.queue.playerAantal == 0:
for i in range(self.queue.playerAantal):
self.queue.player[i].Send({"action": "startgame", "player": i, "gameid": self.queue.gameid,"playerAantal": self.queue.playerAantal})
self.games.append(self.queue)
for a in self.games:
print(a)
self.queue = None
def movePijl(self,pijlx,gameid, data):
game = [a for a in self.games if a.gameid == gameid]
if len(game) == 1:
game[0].movePijl(pijlx, data)
def placeBox(self, playerTurn, pijlx, data, gameid, playerNR):
game = [a for a in self.games if a.gameid == gameid]
if len(game) == 1:
game[0].placeBox(playerTurn, pijlx, data, playerNR)
def close(self,gameid):
try:
game = [a for a in self.games if a.gameid == gameid][0]
for i in range(game.playerAantal):
game.player[i].Send({"action": "close", "gameid": gameid})
except:
pass
class Game: # controleren
def __init__(self, player0, currentIndex):
# whose turn
self.Turn = 1
self.playerAantal = 2
# dimensions tiles game board
self.boardBoxH = 7
self.boardBoxW = 14
# define game board dimensions
self.board = [[0 for x in range(self.boardBoxW)] for y in range(self.boardBoxH)]
# initialize the players including the one who started the game
self.player=[player0,None,None,None]
# gameid of game
self.gameid = currentIndex
def movePijl(self, pijlx, data):
for i in range(self.playerAantal):
self.player[i].Send(data)
def placeBox(self, playerTurn, pijlx, data, playerNR):
# make sure it's their turn
if playerNR == self.Turn:
# and self.board[0][pijlx]==0:
# self.board[0][pijlx]=self.playerTurn
if self.playerAantal > self.Turn:
self.Turn += 1
else:
self.Turn = 1
data["playerTurn"] = self.Turn
# send data and turn data to each player
for i in range(self.playerAantal):
self.player[i].Send(data)
print("STARTING SERVER ON LOCALHOST")
server = vieropeenrijServer(localaddr=("LOCALHOST", 31425))
while 1:
server.Pump()
sleep(0.01)
# def updateServer():
# print("Clock is ticking")
# vieropenrijServer.Pump()
# sleep(0.0001)
|
{"/start.py": ["/screen_joinorhost.py"], "/screen_hostserver.py": ["/vieropeenrijserver.py"], "/testing material/bootscreen.py": ["/VierOpEenRij.py"], "/screen_joinorhost.py": ["/VierOpEenRij.py", "/screen_hostserver.py"]}
|
11,733
|
michaelpbAD/Dual-Net-Gaming-with-Python
|
refs/heads/master
|
/start.py
|
""" This python script starts screen_joinorhost.py (object) """
# import screen_joinorhost.py
import screen_joinorhost
# make object to initialize the window for joining or hosting a server
start = screen_joinorhost.joinorhost()
# keep updating object
while not start.closedWindow:
start.update()
|
{"/start.py": ["/screen_joinorhost.py"], "/screen_hostserver.py": ["/vieropeenrijserver.py"], "/testing material/bootscreen.py": ["/VierOpEenRij.py"], "/screen_joinorhost.py": ["/VierOpEenRij.py", "/screen_hostserver.py"]}
|
11,734
|
michaelpbAD/Dual-Net-Gaming-with-Python
|
refs/heads/master
|
/vieropeenrijserver.py
|
""" Vier op een rij - Server"""
from time import sleep
from PodSixNet.Server import Server
from PodSixNet.Channel import Channel
class ClientChannel(Channel):
def Network(self, data):
print(data)
def Network_place(self, data):
# deconsolidate all of the data from the dictionary
playerTurn = data["playerTurn"]
pijlx = data["pijlx"]
K_DOWN = data["K_DOWN"]
playerNR = data["playerNR"]
# id of game given by server at start of game
self.gameid = data["gameid"]
# tells server to place line
self._server.placeLine(playerTurn, pijlx, K_DOWN, data, self.gameid, playerNR)
def Network_nickname(self, data):
self._server.nickname(data)
def Close(self):
self._server.close(self.gameid)
class vieropeenrijServer(Server):
def __init__(self, maxPlayers,*args, **kwargs):
Server.__init__(self, *args, **kwargs)
self.maxPlayers = maxPlayers
self.games = []
self.queue = None
self.currentIndex = 0
self.numPlayers = 0
# verplicht voor de module PodSixNet
channelClass = ClientChannel
def Connected(self, channel, addr):
self.numPlayers += 1
print('new connection:', channel)
if self.queue == None:
self.currentIndex += 1
channel.gameid = self.currentIndex
self.queue = Game(channel, self.currentIndex, self.maxPlayers)
elif self.numPlayers == 2:
channel.gameid = self.currentIndex
self.queue.player[1] = channel
elif self.numPlayers == 3:
channel.gameid = self.currentIndex
self.queue.player[2] = channel
elif self.numPlayers == 4:
channel.gameid = self.currentIndex
self.queue.player[3] = channel
if self.numPlayers >= self.queue.playerAantal:
for i in range(self.queue.playerAantal):
self.queue.player[i].Send({"action": "startgame", "player": i, "gameid": self.queue.gameid, "playerAantal": self.queue.playerAantal, "boardBoxH": self.queue.boardBoxH, "boardBoxW": self.queue.boardBoxW})
self.games.append(self.queue)
self.queue = None
self.numPlayers=0
def placeLine(self, playerTurn, pijlx, K_DOWN, data, gameid, playerNR):
game = [a for a in self.games if a.gameid == gameid]
if len(game) == 1:
game[0].placeLine(playerTurn, pijlx, K_DOWN, data, playerNR)
def tick(self):
for game in self.games:
if game.wint != 0 or game.draw:
game.wint = 0
game.Turn = 1
game.draw = False
game.board = [[0 for x in range(game.boardBoxW)] for y in range(game.boardBoxH)]
sleep(2)
for i in range(game.playerAantal):
game.player[i].Send(
{"action": "boardWipe", "board": game.board, "playerTurn": game.Turn, "wint": game.wint})
self.Pump()
def close(self, gameid):
try:
game = [a for a in self.games if a.gameid == gameid][0]
for i in range(game.playerAantal):
game.player[i].Send({"action": "close"})
except:
pass
def nickname(self, data):
game = [a for a in self.games if a.gameid == data["gameid"]][0]
for i in range(game.playerAantal):
if i != data["playerNR"]-1:
game.player[i].Send({"action": "nickname", "playerNR": data["playerNR"], "nickname": data["nickname"]})
class Game(object):
def __init__(self, player0, currentIndex, maxPlayers):
# whose turn
self.Turn = 1
self.playerAantal = maxPlayers
# dimensions tiles game board
self.boardBoxH = 7
self.boardBoxW = 14
# define game board dimensions
self.board = [[0 for x in range(self.boardBoxW)] for y in range(self.boardBoxH)]
# initialize the players including the one who started the game
self.player = [player0, None, None, None]
self.scorePlayer = [0, 0, 0, 0]
self.wint = 0
self.draw = False
# gameid of game
self.gameid = currentIndex
def placeLine(self, playerTurn, pijlx, K_DOWN, data, playerNR):
# make sure it's their turn
if playerNR == self.Turn:
if K_DOWN == True and self.board[0][pijlx] == 0:
# plaats box
self.board[0][pijlx] = self.Turn
# volgende speler
if self.playerAantal > self.Turn:
self.Turn += 1
else:
self.Turn = 1
data["playerTurn"] = self.Turn
# send data and turn data to each player
for i in range(self.playerAantal):
self.player[i].Send(data)
self.dropBox()
self.controle()
if self.wint != 0:
self.scorePlayer[self.wint - 1] += 1
for i in range(self.playerAantal):
self.player[i].Send({"action": "win", "speler": self.wint, "score": self.scorePlayer})
def dropBox(self):
for x in range(self.boardBoxW):
for y in range(self.boardBoxH - 1):
if self.board[y][x] != 0:
if self.board[y + 1][x] == 0:
self.board[y + 1][x] = self.board[y][x]
self.board[y][x] = 0
def controle(self):
# controle gebeurt alleen (y,x) (0,+),(+,0),(+,+),(+,-)
geenNull = True
for y in range(self.boardBoxH):
for x in range(self.boardBoxW):
if self.board[y][x] != 0:
var = self.board[y][x]
# horizontale controle
if x < (self.boardBoxW - 3):
if var == self.board[y][x + 1] and var == self.board[y][x + 2] and var == self.board[y][x + 3]:
self.wint = var
# verticale controle
if y < (self.boardBoxH - 3):
if var == self.board[y + 1][x] and var == self.board[y + 2][x] and var == self.board[y + 3][x]:
self.wint = var
# rechts naar beneden controle
if y < (self.boardBoxH - 3) and x < (self.boardBoxW - 3):
if var == self.board[y + 1][x + 1] and var == self.board[y + 2][x + 2] and var == \
self.board[y + 3][x + 3]:
self.wint = var
# links naar beneden controle
if y < (self.boardBoxH - 3) and x > 2:
if var == self.board[y + 1][x - 1] and var == self.board[y + 2][x - 2] and var == \
self.board[y + 3][x - 3]:
self.wint = var
# controleer of het gameboard lege vakken bevat
else:
geenNull=False
self.draw = geenNull
|
{"/start.py": ["/screen_joinorhost.py"], "/screen_hostserver.py": ["/vieropeenrijserver.py"], "/testing material/bootscreen.py": ["/VierOpEenRij.py"], "/screen_joinorhost.py": ["/VierOpEenRij.py", "/screen_hostserver.py"]}
|
11,735
|
michaelpbAD/Dual-Net-Gaming-with-Python
|
refs/heads/master
|
/screen_hostserver.py
|
""" Form with tkinter: hosting the server """
# import modules
from tkinter import *
from tkinter import ttk
from time import sleep
class screenServer():
def __init__(self, socket, maxPlayers):
self.closedWindow = False
# create window
self.root = Tk()
self.root.title("Vier op een rij: Server")
self.root.resizable(False, False)
# make frame to show widgets in
self.serverframe = ttk.Frame(self.root, padding="80 80 80 80")
self.serverframe.grid(column=0, row=0, sticky=(N, W, E, S))
self.serverframe.columnconfigure(0, weight=1)
self.serverframe.rowconfigure(0, weight=1)
ttk.Label(self.serverframe, text="Running the server...").grid(column=2, row=1, sticky=(W, E))
# import vieropeenrijserver
import vieropeenrijserver
# make object from server class with arguments maxPlayers and socket = localaddr
self.hosting = vieropeenrijserver.vieropeenrijServer(maxPlayers, localaddr=(socket[0], int(socket[1])))
# protocol handler for checking if window gets closed by clicking (WM_DELETE_WINDOW) and will do function on_closing
self.root.protocol("WM_DELETE_WINDOW", self.on_closing)
def on_closing(self):
# ask the user if he wants to quit?
if messagebox.askokcancel("Quit", "Do you want to quit?"):
# start.py loops until closedWindow = True
self.closedWindow = True
# close the window
self.root.destroy()
def update(self):
# update window
self.root.update()
# check for sockets / data / buffers
self.hosting.Pump()
sleep(0.01)
self.hosting.tick()
|
{"/start.py": ["/screen_joinorhost.py"], "/screen_hostserver.py": ["/vieropeenrijserver.py"], "/testing material/bootscreen.py": ["/VierOpEenRij.py"], "/screen_joinorhost.py": ["/VierOpEenRij.py", "/screen_hostserver.py"]}
|
11,736
|
michaelpbAD/Dual-Net-Gaming-with-Python
|
refs/heads/master
|
/testing material/bootscreen.py
|
""" Form with tkinter: nickname, server selection """
# import tkinter / ttk for GUI
from tkinter import *
from tkinter import ttk
# import regex to search for IP adress
import re
import pygame
from VierOpEenRij import *
gstart=False
# checking IP adress
def checkIp(*args):
isIp = ip.get()
print(isIp + " : " + str(len(isIp)))
if len(isIp) < 8 or len(isIp) > 15:
print("This is not an IP address.") # need to generate error
else:
patIp = re.compile(r'\d{2,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}')
matchIp = patIp.search(isIp)
if matchIp == None or matchIp.group() != isIp:
print("This is not a right address")
else:
print(matchIp.group())
print("join server")
# exec(open("./VierOpEenRij.py").read()) # experimental, not the right way
global bg
bg=VierOpEenRijGame() # init__ is called right here
global gstart
gstart=True
# checking if server can be hosted
def hostServer(*args):
server = Tk()
print("host server")
server.title("Vier op een rij: Server") # title of window
serverframe = ttk.Frame(server, padding="80 80 80 80") # padding of frame
serverframe.grid(column=0, row=0, sticky=(N, W, E, S)) # grid layout
serverframe.columnconfigure(0, weight=1)
serverframe.rowconfigure(0, weight=1)
ttk.Label(serverframe, text="Running the server....").grid(column=2, row=1, sticky=(W, E))
# root =
root = Tk()
root.title("Vier op een rij: Client or Server") # title of window
mainframe = ttk.Frame(root, padding="80 80 80 80") # padding of frame
mainframe.grid(column=0, row=0, sticky=(N, W, E, S)) # grid layout
mainframe.columnconfigure(0, weight=1)
mainframe.rowconfigure(0, weight=1)
# tkinter variable for entry (input field)
ip = StringVar()
# label for input field
ttk.Label(mainframe, text="Server IP-adress:").grid(column=2, row=1, sticky=(W, E))
# text input ipaddress
ip_entry = ttk.Entry(mainframe, width=20, textvariable=ip)
ip_entry.grid(column=2, row=2, sticky=(N, W, E, S)) # layout text input field ipaddress
ttk.Button(mainframe, text="Join server", command=checkIp).grid(column=2, row=3, sticky=(W, E))
# "or"-label
ttk.Label(mainframe, text="or").grid(column=2, row=4, sticky=(W, E))
# button for hosting the server
ttk.Button(mainframe, text="Host server", command=hostServer).grid(column=2, row=5, sticky=(W, E))
# loop through all child of the frame and add padding to x and y
for child in mainframe.winfo_children():
child.grid_configure(padx=10, pady=10)
# focus on ip text field when started
ip_entry.focus()
# loop for GUI
while 1:
root.update()
if gstart==True and (not bg.stopped):
bg.update()
|
{"/start.py": ["/screen_joinorhost.py"], "/screen_hostserver.py": ["/vieropeenrijserver.py"], "/testing material/bootscreen.py": ["/VierOpEenRij.py"], "/screen_joinorhost.py": ["/VierOpEenRij.py", "/screen_hostserver.py"]}
|
11,737
|
michaelpbAD/Dual-Net-Gaming-with-Python
|
refs/heads/master
|
/screen_joinorhost.py
|
""" Form with tkinter: join host or host server """
# import modules
from tkinter import *
from tkinter import messagebox
from tkinter import ttk
import re
class joinorhost():
def __init__(self):
self.closedWindow = False
self.hostS = None
self.playVierOpEenRij = None
# ============================ START FORM JOIN OR HOST SERVER =================================
# make a window
self.root = Tk()
# title of window
self.root.title("Vier op een rij: Client or Server")
self.root.resizable(False, False)
# making new derived styles
s = ttk.Style()
s.configure('vieropeenrij.TFrame', background='#1ABC9C')
s.configure('vieropeenrij.TLabel', background='#1ABC9C')
# frame is part of window (for showing form elements)
mainframe = ttk.Frame(self.root, padding="80 80 80 80", style="vieropeenrij.TFrame") # padding of frame
mainframe.grid(column=0, row=0, sticky=(N, W, E, S)) # grid layout
mainframe.columnconfigure(0, weight=1)
mainframe.rowconfigure(0, weight=1)
# tkinter variables for entries, spinbox
self.socket = StringVar()
self.nickname = StringVar()
self.socketServer = StringVar()
self.maxPlayers = StringVar()
# label for text entry
ttk.Label(mainframe, text="Server IP-adress: Server port", style="vieropeenrij.TLabel").grid(column=2, row=1,
sticky=(W, E))
# text entry for "socket" server to joing
socketEntry = ttk.Entry(mainframe, width=20, textvariable=self.socket)
socketEntry.grid(column=3, row=1, sticky=(N, W, E, S))
# label for nickname
ttk.Label(mainframe, text="Nickname:", style="vieropeenrij.TLabel").grid(column=2, row=2, sticky=(W, E))
# text entry for nickname
nicknameEntry = ttk.Entry(mainframe, width=20, textvariable=self.nickname)
nicknameEntry.grid(column=3, row=2, sticky=(N, W, E, S))
# button for function joinServer
ttk.Button(mainframe, text="Join server", command=self.joinServer).grid(column=3, row=3, sticky=(W, E))
# "or"-label
ttk.Label(mainframe, text="OR", style="vieropeenrij.TLabel").grid(column=2, row=4, sticky=(W, E))
# label for text entry server ip and port
ttk.Label(mainframe, text="Your PC's IP-adress: Server port", style="vieropeenrij.TLabel").grid(column=2, row=5,
sticky=(W, E))
# entry for "socketServer"
serverEntry = ttk.Entry(mainframe, width=15, textvariable=self.socketServer)
serverEntry.grid(column=3, row=5, sticky=(N, W, E, S))
# label for maximum number of players in a game
ttk.Label(mainframe, text="Maximum number of players in a game:", style="vieropeenrij.TLabel").grid(column=2, row=6, sticky=(W, E))
# spinbox for "maxplayers"
Spinbox(mainframe, from_=2, to=4, textvariable=self.maxPlayers, width=3).grid(column=3, row=6, sticky=(W))
# button for hosting the server, function hostServer
ttk.Button(mainframe, text="Host server", command=self.hostServer).grid(column=3, row=7, sticky=(W, E))
# loop through all child of the frame and add padding to x and y
for child in mainframe.winfo_children():
child.grid_configure(padx=10, pady=10)
# focus on text entry "socketEntry" when started
socketEntry.focus()
# ============================ END FORM JOIN OR HOST SERVER ===================================
# protocol handler (interaction between application and window manager) for checking if window gets closed (WM_DELETE_WINDOW) and will do function on_closing
self.root.protocol("WM_DELETE_WINDOW", self.on_closing)
def joinServer(self):
# get socket out of text entry and check if it is valid
checkedSocket = self.checkSocket(self.socket.get())
# get nickname out of text entry
nickname = self.nickname.get().strip()
# check can't be false and nickname can't be empty
if not (checkedSocket and nickname != ""):
messagebox.showerror("Error", "No empty nickname allowed.")
return False
else:
# close the window
self.root.destroy()
# import VierOpEenRij.py
import VierOpEenRij
print("Joining server at: " + checkedSocket[0] + " : " + checkedSocket[1] + " as " + nickname)
# join server by making an object from VierOpEenRijGame with arguments: checkedSocket and nickname
self.playVierOpEenRij = VierOpEenRij.VierOpEenRijGame(checkedSocket, nickname)
def hostServer(self):
# get socket out of text entry and check if it is valid
checkedSocket = self.checkSocket(self.socketServer.get())
# try saving maxPlayers as an int
try:
maxPlayers = int(self.maxPlayers.get())
except:
maxPlayers = 0
# checkedSocket can't be false and maxPlayers must be 2,3 or 4
if checkedSocket and (maxPlayers == 2 or maxPlayers == 3 or maxPlayers == 4):
# close the window
self.root.destroy()
# import screen_server.py
import screen_hostserver
print("Hosting server at: " + checkedSocket[0] + " : " + checkedSocket[
1] + " with maximum players in a game " + str(maxPlayers))
# hosting the server with arguments: checkedSocket, maxPlayers
self.hostS = screen_hostserver.screenServer(checkedSocket, maxPlayers)
else:
messagebox.showerror("Error", "Maximum players is 2, 3 or 4.")
return False
# check if socket entered is valid
def checkSocket(self, socket):
try:
# split socket if possible
isIp, isPort = socket.split(":")
except:
messagebox.showerror("Error", "Format is IP:Port")
return False
# lenth of IP adress may not be smaller than 7 or higher than 15
if len(isIp) < 7 or len(isIp) > 15:
messagebox.showerror("Error", "This can not be a valid IP address.")
return False
else:
# check if pattern of IP is valid (3 dots with groups of 1 to 3 digits
patIp = re.compile(r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}')
matchIp = patIp.search(isIp)
if matchIp == None or matchIp.group() != isIp:
messagebox.showerror("Error", "This can not be a valid IP address.")
return False
try:
# check if port is negative
if int(isPort) != abs(int(isPort)):
messagebox.showerror("Error", "Not a valid port number.")
return False
except:
messagebox.showerror("Error", "Not a valid port number.")
return False
# return the socket
return [isIp, isPort]
def on_closing(self):
# ask the user if he wants to quit?
if messagebox.askokcancel("Quit", "Do you want to quit?"):
# start.py loops until closedWindow = True
self.closedWindow = True
# close the window
self.root.destroy()
# update GUI 1 time
def update(self):
try:
self.root.update()
except:
pass
# only update when object exists
if self.hostS != None:
if self.hostS.closedWindow == False:
self.hostS.update()
else:
self.closedWindow = True
# only update when object exists
if self.playVierOpEenRij != None:
self.playVierOpEenRij.update()
|
{"/start.py": ["/screen_joinorhost.py"], "/screen_hostserver.py": ["/vieropeenrijserver.py"], "/testing material/bootscreen.py": ["/VierOpEenRij.py"], "/screen_joinorhost.py": ["/VierOpEenRij.py", "/screen_hostserver.py"]}
|
11,738
|
NicolasDutronc/FlappyBirdRL
|
refs/heads/master
|
/test.py
|
import gym
import gym_ple
import torch.optim as optim
import torch.nn as nn
from agents.dqn import DQNAgent
from models.cnn import CNNModel, DuelingCNNModel
from environment import Environment
lr = 0.00001
momentum = 0.95
num_episodes = 1000000000
batch_size = 32
env = Environment('FlappyBird-v0')
model = DuelingCNNModel(env.action_space())
optimizer = optim.RMSprop(params=model.parameters(), lr=lr, momentum=momentum)
loss = nn.SmoothL1Loss()
agent = DQNAgent(environment=env, model=model, optimizer=optimizer, loss=loss)
agent.train(num_episodes=num_episodes, batch_size=batch_size, verbose=True)
|
{"/test.py": ["/agents/dqn.py", "/models/cnn.py", "/environment.py"], "/test_play.py": ["/agents/dqn.py", "/models/cnn.py", "/environment.py"], "/agents/dqn.py": ["/experience_replay.py"]}
|
11,739
|
NicolasDutronc/FlappyBirdRL
|
refs/heads/master
|
/test_play.py
|
import gym
import gym_ple
from agents.dqn import DQNAgent
from models.cnn import DuelingCNNModel
from environment import Environment
import torch
env = Environment('FlappyBird-v0')
model = DuelingCNNModel(env.action_space())
agent = DQNAgent(environment=env, model=model)
agent.play()
|
{"/test.py": ["/agents/dqn.py", "/models/cnn.py", "/environment.py"], "/test_play.py": ["/agents/dqn.py", "/models/cnn.py", "/environment.py"], "/agents/dqn.py": ["/experience_replay.py"]}
|
11,740
|
NicolasDutronc/FlappyBirdRL
|
refs/heads/master
|
/environment.py
|
import gym
import gym_ple
import cv2
import numpy as np
class Environment:
def __init__(self, game, image_shape=(84, 84)):
self.game = gym.make(game)
self.image_shape = image_shape
def reset(self):
return self.preprocess(self.game.reset())
def preprocess(self, image):
image = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)
# print(image.shape)
image = cv2.resize(image, self.image_shape)
# print(image.shape)
image = image/255
image = image.reshape(self.image_shape)
# print(image.shape)
return image
def get_screen(self):
screen = self.game.render('rgb_array')
screen = self.preprocess(screen)
return screen
def step(self, action):
return self.game.step(action)
def action_space(self):
return self.game.action_space.n
def random_action(self):
return self.game.action_space.sample()
def render(self):
self.game.render()
|
{"/test.py": ["/agents/dqn.py", "/models/cnn.py", "/environment.py"], "/test_play.py": ["/agents/dqn.py", "/models/cnn.py", "/environment.py"], "/agents/dqn.py": ["/experience_replay.py"]}
|
11,741
|
NicolasDutronc/FlappyBirdRL
|
refs/heads/master
|
/models/cnn.py
|
import torch
import torch.nn as nn
import torch.nn.functional as F
class CNNModel(nn.Module):
def __init__(self, action_space):
super(CNNModel, self).__init__()
self.conv1 = nn.Conv2d(in_channels=4, out_channels=32, kernel_size=8, stride=4) # 20
self.bn1 = nn.BatchNorm2d(num_features=32)
self.conv2 = nn.Conv2d(in_channels=32, out_channels=64, kernel_size=4, stride=2) # 9
self.bn2 = nn.BatchNorm2d(num_features=64)
self.conv3 = nn.Conv2d(in_channels=64, out_channels=64, kernel_size=3, stride=1) # 7
self.bn3 = nn.BatchNorm2d(num_features=64)
self.fc1 = nn.Linear(in_features=7*7*64, out_features=512)
self.out = nn.Linear(in_features=512, out_features=action_space)
# self.out = nn.Linear(in_features=256, out_features=action_space)
# self.weights_init()
def forward(self, x):
x = F.relu(self.bn1(self.conv1(x)))
x = F.relu(self.bn2(self.conv2(x)))
x = F.relu(self.bn3(self.conv3(x)))
x = x.view(-1, self.num_flat_features(x))
x = F.relu(self.fc1(x))
# x = F.relu(self.fc2(x))
return self.out(x)
def num_flat_features(self, x):
size = x.size()[1:] # all dimensions except the batch dimension
num_features = 1
for s in size:
num_features *= s
return num_features
def weights_init(self):
for p in self.parameters():
print(p)
nn.init.xavier_normal(p, gain=nn.init.calculate_gain('relu'))
class DuelingCNNModel(nn.Module):
def __init__(self, action_space):
super(DuelingCNNModel, self).__init__()
self.conv1 = nn.Conv2d(in_channels=4, out_channels=32, kernel_size=8, stride=4) # 20
self.bn1 = nn.BatchNorm2d(num_features=32)
self.conv2 = nn.Conv2d(in_channels=32, out_channels=64, kernel_size=4, stride=2) # 9
self.bn2 = nn.BatchNorm2d(num_features=64)
self.conv3 = nn.Conv2d(in_channels=64, out_channels=64, kernel_size=3, stride=1) # 7
self.bn3 = nn.BatchNorm2d(num_features=64)
self.fcValue = nn.Linear(in_features=7*7*64, out_features=512)
self.fcAdvantage = nn.Linear(in_features=7*7*64, out_features=512)
self.value = nn.Linear(in_features=512, out_features=1)
self.advantages = nn.Linear(in_features=512, out_features=action_space)
def forward(self, x):
x = F.relu(self.bn1(self.conv1(x)))
x = F.relu(self.bn2(self.conv2(x)))
x = F.relu(self.bn3(self.conv3(x)))
x = x.view(-1, self.num_flat_features(x))
value = F.relu(self.fcValue(x))
value = self.value(value)
advantages = F.relu(self.fcAdvantage(x))
advantages = self.advantages(advantages)
out = value + (advantages - advantages.mean())
return out
def num_flat_features(self, x):
size = x.size()[1:] # all dimensions except the batch dimension
num_features = 1
for s in size:
num_features *= s
return num_features
def weights_init(self):
for p in self.parameters():
print(p)
nn.init.xavier_normal(p, gain=nn.init.calculate_gain('relu'))
model = CNNModel(2)
print(model)
|
{"/test.py": ["/agents/dqn.py", "/models/cnn.py", "/environment.py"], "/test_play.py": ["/agents/dqn.py", "/models/cnn.py", "/environment.py"], "/agents/dqn.py": ["/experience_replay.py"]}
|
11,742
|
NicolasDutronc/FlappyBirdRL
|
refs/heads/master
|
/experience_replay.py
|
import numpy as np
import random
from collections import deque, namedtuple
Transition = namedtuple('Transition', ['obs', 'action', 'reward', 'next_obs', 'done'])
class Experience_buffer:
def __init__(self, buffer_size=10000):
self.buffer = deque()
self.buffer_size = buffer_size
def __len__(self):
return len(self.buffer)
def __repr__(self):
s = 'buffer\n'
s += 'items:\n'
for item in self.buffer:
s += str(item)
s += '\n'
s += 'size: ' + str(len(self.buffer))
return s
def is_full(self):
return len(self.buffer) == self.buffer_size
def add(self, *args):
if self.is_full():
del self.buffer[0]
self.buffer.append(Transition(*args))
def sample(self, size):
return random.sample(self.buffer, size)
'''
buffer = Experience_buffer(10)
for _ in range(15):
exp = np.random.random((1, 5, 5))
buffer.add(exp)
print(buffer)
print()
batch = buffer.sample(7)
print(batch)
print(batch.shape)
#'''
|
{"/test.py": ["/agents/dqn.py", "/models/cnn.py", "/environment.py"], "/test_play.py": ["/agents/dqn.py", "/models/cnn.py", "/environment.py"], "/agents/dqn.py": ["/experience_replay.py"]}
|
11,743
|
NicolasDutronc/FlappyBirdRL
|
refs/heads/master
|
/agents/dqn.py
|
import copy
import numpy as np
import math
from collections import deque
from experience_replay import Experience_buffer
import torch
from torch.autograd import Variable
class DQNAgent:
def __init__(
self,
environment,
model=None,
optimizer=None,
loss=None,
model_path='./model.pt',
save_model_freq=5,
update_target_freq=1000,
update_model_freq=4,
replay_size_start=5000,
action_repeat=4,
frame_skipping=4,
discount_factor=0.99,
exploration_rate_start=0.2,
exploration_rate_end=0.01,
exploration_decay=1e5):
# objects
self.environment = environment
self.model = model
self.target_model = copy.deepcopy(self.model)
self.optimizer = optimizer
self.loss = loss
self.model_path = model_path
self.state_buffer = deque(maxlen=action_repeat)
self.replay_memory = Experience_buffer()
# statistics
self.num_updates = 0
self.num_steps = 0
self.last_rewards = deque(maxlen=100)
# frequences
self.save_model_freq = save_model_freq
self.update_target_freq = update_target_freq
self.update_model_freq = update_model_freq
# other parameters
self.replay_size_start = replay_size_start
self.action_repeat = action_repeat
self.frame_skipping = frame_skipping
self.discount_factor = discount_factor
self.current_best_reward = 0
self.playing = False
# exploration parameters
self.exploration_rate = exploration_rate_start
self.exploration_rate_end = exploration_rate_end
self.exploration_rate_step = (exploration_rate_start - exploration_rate_end) / exploration_decay
def select_action(self, state):
self.num_steps += 1
if self.playing:
state = Variable(torch.from_numpy(state).unsqueeze(0).float(), volatile=True)
q_values = self.model(state).data
return np.argmax(q_values.numpy())
else:
if self.num_steps > self.replay_size_start and self.exploration_rate > self.exploration_rate_end:
self.exploration_rate -= self.exploration_rate_step
if np.random.rand() < self.exploration_rate:
return self.environment.random_action()
else:
state = Variable(torch.from_numpy(state).unsqueeze(0).float(), volatile=True)
q_values = self.model(state).data
return np.argmax(q_values.numpy())
def update(self, data):
observations = Variable(torch.from_numpy(np.array(tuple(data[i].obs for i in range(len(data))))).float())
actions = Variable(torch.from_numpy(np.array(tuple(data[i].action for i in range(len(data))))).long())
rewards = Variable(torch.from_numpy(np.array(tuple(data[i].reward for i in range(len(data))))).float())
next_obs = Variable(torch.from_numpy(np.array(tuple(data[i].next_obs for i in range(len(data))))).float())
dones = Variable(torch.from_numpy(np.array(tuple(0. if data[i].done else 1. for i in range(len(data))))).float())
next_max_q_values = self.target_model(next_obs)
next_max_q_values = Variable(next_max_q_values.data)
best_actions = self.model(next_obs)
best_actions = Variable(best_actions.data)
_, best_actions = best_actions.max(dim=1, keepdim=True)
next_max_q_values = next_max_q_values.gather(1, best_actions)
next_max_q_values = next_max_q_values * dones.unsqueeze(1)
current_q_values = self.model(observations).gather(1, actions.unsqueeze(1)).squeeze()
target_values = rewards + self.discount_factor * next_max_q_values.squeeze()
target_values = Variable(target_values.data)
loss = self.loss(current_q_values, target_values)
self.optimizer.zero_grad()
loss.backward()
for param in self.model.parameters():
param.grad.data.clamp(-1, 1)
self.optimizer.step()
self.num_updates += 1
if self.num_updates % self.update_target_freq == 0:
self.update_target()
return loss.data[0]
def save_model(self):
print('INFO AGENT: SAVING MODEL...')
torch.save(self.model.state_dict(), self.model_path)
def load_model(self):
self.model.load_state_dict(torch.load(self.model_path))
def update_target(self):
print('INFO TARGET: target updating... -----------------------------------------------------------------------------------')
self.target_model.load_state_dict(self.model.state_dict())
def get_recent_states(self):
return np.array(self.state_buffer)
def play(self, verbose=True):
self.playing = True
i = 0
while True:
self.load_model()
if verbose:
print('Episode #', i)
i += 1
done = False
episode_reward = 0
num_episode_steps = 0
self.environment.reset()
# get first observation
current_obs = self.environment.get_screen()
self.state_buffer = deque(maxlen=self.action_repeat)
for _ in range(self.action_repeat):
self.state_buffer.append(current_obs)
while not done:
current_obs = self.get_recent_states()
action = self.select_action(current_obs)
num_episode_steps += 1
_, reward, done, _ = self.environment.step(action)
self.state_buffer.append(self.environment.get_screen())
if reward == 0:
reward = 1
elif reward == 1:
reward = 5
self.environment.render()
# update satistics
episode_reward += reward
if episode_reward > self.current_best_reward:
self.current_best_reward = episode_reward
self.last_rewards.append(episode_reward)
if verbose:
print('Reward:', episode_reward)
print('Current best reward:', self.current_best_reward)
print('Mean reward over the last 100 episodes:', np.mean(self.last_rewards))
print('Max reward over the last 100 episodes:', np.max(self.last_rewards))
print('Min reward over the last 100 episodes:', np.min(self.last_rewards))
print()
def train(self, num_episodes=10000, batch_size=32, verbose=True):
for i in range(num_episodes):
if verbose:
print('Episode #', i)
done = False
episode_reward = 0
num_episode_steps = 0
current_loss = 0
self.environment.reset()
# get first observation
current_obs = self.environment.get_screen()
self.state_buffer = deque(maxlen=self.action_repeat)
for _ in range(self.action_repeat):
self.state_buffer.append(current_obs)
while not done:
current_obs = self.get_recent_states()
if self.num_steps > self.replay_size_start:
if self.num_steps % self.frame_skipping == 0:
action = 1
self.num_steps += 1
else:
action = self.select_action(current_obs)
# action = self.select_action(current_obs)
else:
action = self.environment.random_action()
self.num_steps += 1
num_episode_steps += 1
# skip some frames
#for _ in range(self.frame_skipping):
# _, reward, done, _ = self.environment.step(action)
# self.state_buffer.append(self.environment.get_screen())
# if done:
# break
_, reward, done, _ = self.environment.step(action)
self.state_buffer.append(self.environment.get_screen())
if reward == 0:
reward = 1
elif reward == 1:
reward = 5
next_obs = self.get_recent_states()
self.replay_memory.add(current_obs, action, reward, next_obs, done)
# update satistics
episode_reward += reward
# if the buffer is filled enough, periodically update the model
if len(self.replay_memory) > batch_size and self.num_steps % self.update_model_freq == 0 and len(self.replay_memory) > self.replay_size_start:
if verbose:
print('INFO: agent updating...')
batch = self.replay_memory.sample(batch_size)
current_loss = self.update(batch)
self.last_rewards.append(episode_reward)
if i % self.save_model_freq == 0 and self.num_steps > self.replay_size_start:
self.save_model()
if episode_reward > self.current_best_reward:
self.current_best_reward = episode_reward
if verbose:
print('Reward:', episode_reward)
print('Mean reward over the last 100 episodes:', np.mean(self.last_rewards))
print('Max reward over the last 100 episodes:', np.max(self.last_rewards))
print('Min reward over the last 100 episodes:', np.min(self.last_rewards))
print('Current loss:', current_loss)
print('Current exploration rate:', self.exploration_rate)
print('Number of steps:', self.num_steps)
print('Number of updates:', self.num_updates)
print('Current best reward:', self.current_best_reward)
print()
|
{"/test.py": ["/agents/dqn.py", "/models/cnn.py", "/environment.py"], "/test_play.py": ["/agents/dqn.py", "/models/cnn.py", "/environment.py"], "/agents/dqn.py": ["/experience_replay.py"]}
|
11,744
|
jakirkham/anaconda-project
|
refs/heads/master
|
/anaconda_project/internal/test/test_default_conda_manager.py
|
# -*- coding: utf-8 -*-
# ----------------------------------------------------------------------------
# Copyright © 2016, Continuum Analytics, Inc. All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
# ----------------------------------------------------------------------------
from __future__ import absolute_import, print_function
import codecs
import json
import os
import platform
import pytest
import time
from anaconda_project.env_spec import EnvSpec
from anaconda_project.conda_manager import CondaManagerError
from anaconda_project.version import version
from anaconda_project.internal.default_conda_manager import DefaultCondaManager
import anaconda_project.internal.pip_api as pip_api
from anaconda_project.internal.test.tmpfile_utils import with_directory_contents
from anaconda_project.internal.test.test_conda_api import monkeypatch_conda_not_to_use_links
if platform.system() == 'Windows':
PYTHON_BINARY = "python.exe"
IPYTHON_BINARY = "Scripts\ipython.exe"
FLAKE8_BINARY = "Scripts\\flake8.exe"
else:
PYTHON_BINARY = "bin/python"
IPYTHON_BINARY = "bin/ipython"
FLAKE8_BINARY = "bin/flake8"
test_spec = EnvSpec(name='myenv', conda_packages=['ipython'], pip_packages=['flake8'], channels=[])
def test_conda_create_and_install_and_remove(monkeypatch):
monkeypatch_conda_not_to_use_links(monkeypatch)
spec = test_spec
assert spec.conda_packages == ('ipython', )
assert spec.pip_packages == ('flake8', )
spec_with_phony_pip_package = EnvSpec(name='myenv',
conda_packages=['ipython'],
pip_packages=['flake8', 'nope_not_a_thing'],
channels=[])
assert spec_with_phony_pip_package.conda_packages == ('ipython', )
assert spec_with_phony_pip_package.pip_packages == ('flake8', 'nope_not_a_thing')
assert spec_with_phony_pip_package.pip_package_names_set == set(('flake8', 'nope_not_a_thing'))
# package url is supposed to be on a nonexistent port, if it
# causes a problem we need to mock
spec_with_bad_url_pip_package = EnvSpec(name='myenv',
conda_packages=['ipython'],
pip_packages=['flake8', 'https://127.0.0.1:24729/nope#egg=phony'],
channels=[])
assert spec_with_bad_url_pip_package.conda_packages == ('ipython', )
assert spec_with_bad_url_pip_package.pip_packages == ('flake8', 'https://127.0.0.1:24729/nope#egg=phony')
assert spec_with_bad_url_pip_package.pip_package_names_set == set(('flake8', 'phony'))
def do_test(dirname):
envdir = os.path.join(dirname, spec.name)
manager = DefaultCondaManager()
assert not os.path.isdir(envdir)
assert not os.path.exists(os.path.join(envdir, IPYTHON_BINARY))
assert not os.path.exists(os.path.join(envdir, FLAKE8_BINARY))
assert not manager._timestamp_file_up_to_date(envdir, spec)
deviations = manager.find_environment_deviations(envdir, spec)
assert deviations.missing_packages == ('ipython', )
assert deviations.missing_pip_packages == ('flake8', )
manager.fix_environment_deviations(envdir, spec, deviations)
assert os.path.isdir(envdir)
assert os.path.isdir(os.path.join(envdir, "conda-meta"))
assert os.path.exists(os.path.join(envdir, IPYTHON_BINARY))
assert os.path.exists(os.path.join(envdir, FLAKE8_BINARY))
assert manager._timestamp_file_up_to_date(envdir, spec)
assert not manager._timestamp_file_up_to_date(envdir, spec_with_phony_pip_package)
# test bad pip package throws error
deviations = manager.find_environment_deviations(envdir, spec_with_phony_pip_package)
assert deviations.missing_packages == ()
assert deviations.missing_pip_packages == ('nope_not_a_thing', )
with pytest.raises(CondaManagerError) as excinfo:
manager.fix_environment_deviations(envdir, spec_with_phony_pip_package, deviations)
assert 'Failed to install missing pip packages' in str(excinfo.value)
assert not manager._timestamp_file_up_to_date(envdir, spec_with_phony_pip_package)
# test bad url package throws error
deviations = manager.find_environment_deviations(envdir, spec_with_bad_url_pip_package)
assert deviations.missing_packages == ()
assert deviations.missing_pip_packages == ('phony', )
with pytest.raises(CondaManagerError) as excinfo:
manager.fix_environment_deviations(envdir, spec_with_bad_url_pip_package, deviations)
assert 'Failed to install missing pip packages' in str(excinfo.value)
assert not manager._timestamp_file_up_to_date(envdir, spec_with_bad_url_pip_package)
# test that we can remove a package
assert manager._timestamp_file_up_to_date(envdir, spec)
manager.remove_packages(prefix=envdir, packages=['ipython'])
assert not os.path.exists(os.path.join(envdir, IPYTHON_BINARY))
assert not manager._timestamp_file_up_to_date(envdir, spec)
# test for error removing
with pytest.raises(CondaManagerError) as excinfo:
manager.remove_packages(prefix=envdir, packages=['ipython'])
# different versions of conda word this differently
assert 'no packages found to remove' in str(excinfo.value) or 'Package not found' in str(excinfo.value)
assert not manager._timestamp_file_up_to_date(envdir, spec)
# test failure to exec pip
def mock_call_pip(*args, **kwargs):
raise pip_api.PipError("pip fail")
monkeypatch.setattr('anaconda_project.internal.pip_api._call_pip', mock_call_pip)
with pytest.raises(CondaManagerError) as excinfo:
deviations = manager.find_environment_deviations(envdir, spec)
assert 'pip failed while listing' in str(excinfo.value)
with_directory_contents(dict(), do_test)
def test_timestamp_file_works(monkeypatch):
monkeypatch_conda_not_to_use_links(monkeypatch)
spec = test_spec
def do_test(dirname):
envdir = os.path.join(dirname, spec.name)
manager = DefaultCondaManager()
def print_timestamps(when):
newest_in_prefix = 0
for d in manager._timestamp_comparison_directories(envdir):
try:
t = os.path.getmtime(d)
if t > newest_in_prefix:
newest_in_prefix = t
except Exception:
pass
timestamp_file = 0
try:
timestamp_file = os.path.getmtime(manager._timestamp_file(envdir, spec))
except Exception:
pass
print("%s: timestamp file %d prefix %d" % (when, timestamp_file, newest_in_prefix))
print_timestamps("before env creation")
assert not os.path.isdir(envdir)
assert not os.path.exists(os.path.join(envdir, IPYTHON_BINARY))
assert not os.path.exists(os.path.join(envdir, FLAKE8_BINARY))
assert not manager._timestamp_file_up_to_date(envdir, spec)
deviations = manager.find_environment_deviations(envdir, spec)
assert deviations.missing_packages == ('ipython', )
assert deviations.missing_pip_packages == ('flake8', )
assert not deviations.ok
manager.fix_environment_deviations(envdir, spec, deviations)
print_timestamps("after fixing deviations")
assert os.path.isdir(envdir)
assert os.path.isdir(os.path.join(envdir, "conda-meta"))
assert os.path.exists(os.path.join(envdir, IPYTHON_BINARY))
assert os.path.exists(os.path.join(envdir, FLAKE8_BINARY))
assert manager._timestamp_file_up_to_date(envdir, spec)
called = []
from anaconda_project.internal.pip_api import _call_pip as real_call_pip
from anaconda_project.internal.conda_api import _call_conda as real_call_conda
def traced_call_pip(*args, **kwargs):
called.append(("pip", args, kwargs))
return real_call_pip(*args, **kwargs)
monkeypatch.setattr('anaconda_project.internal.pip_api._call_pip', traced_call_pip)
def traced_call_conda(*args, **kwargs):
called.append(("conda", args, kwargs))
return real_call_conda(*args, **kwargs)
monkeypatch.setattr('anaconda_project.internal.conda_api._call_conda', traced_call_conda)
deviations = manager.find_environment_deviations(envdir, spec)
assert [] == called
assert deviations.missing_packages == ()
assert deviations.missing_pip_packages == ()
assert deviations.ok
assert manager._timestamp_file_up_to_date(envdir, spec)
# now modify conda-meta and check that we DO call the package managers
time.sleep(1.1) # be sure we are in a new second
conda_meta_dir = os.path.join(envdir, "conda-meta")
print("conda-meta original timestamp: %d" % os.path.getmtime(conda_meta_dir))
inside_conda_meta = os.path.join(conda_meta_dir, "thing.txt")
with codecs.open(inside_conda_meta, 'w', encoding='utf-8') as f:
f.write(u"This file should change the mtime on conda-meta\n")
print("file inside conda-meta %d and conda-meta itself %d" % (os.path.getmtime(inside_conda_meta),
os.path.getmtime(conda_meta_dir)))
os.remove(inside_conda_meta)
print_timestamps("after touching conda-meta")
assert not manager._timestamp_file_up_to_date(envdir, spec)
deviations = manager.find_environment_deviations(envdir, spec)
assert len(called) == 2
assert deviations.missing_packages == ()
assert deviations.missing_pip_packages == ()
# deviations should not be ok (due to timestamp)
assert not deviations.ok
assert not manager._timestamp_file_up_to_date(envdir, spec)
# we want to be sure we update the timestamp file even though
# there wasn't any actual work to do
manager.fix_environment_deviations(envdir, spec, deviations)
print_timestamps("after fixing deviations 2")
assert manager._timestamp_file_up_to_date(envdir, spec)
with_directory_contents(dict(), do_test)
def test_timestamp_file_ignores_failed_write(monkeypatch):
monkeypatch_conda_not_to_use_links(monkeypatch)
spec = test_spec
def do_test(dirname):
from codecs import open as real_open
envdir = os.path.join(dirname, spec.name)
manager = DefaultCondaManager()
counts = dict(calls=0)
def mock_open(*args, **kwargs):
counts['calls'] += 1
if counts['calls'] == 1:
raise IOError("did not open")
else:
return real_open(*args, **kwargs)
monkeypatch.setattr('codecs.open', mock_open)
# this should NOT throw but also should not write the
# timestamp file (we ignore errors)
filename = manager._timestamp_file(envdir, spec)
assert filename.startswith(envdir)
assert not os.path.exists(filename)
manager._write_timestamp_file(envdir, spec)
assert not os.path.exists(filename)
# the second time we really write it (this is to prove we
# are looking at the right filename)
manager._write_timestamp_file(envdir, spec)
assert os.path.exists(filename)
# check on the file contents
with real_open(filename, 'r', encoding='utf-8') as f:
content = json.loads(f.read())
assert dict(anaconda_project_version=version) == content
with_directory_contents(dict(), do_test)
|
{"/anaconda_project/internal/test/test_default_conda_manager.py": ["/anaconda_project/internal/default_conda_manager.py"]}
|
11,745
|
jakirkham/anaconda-project
|
refs/heads/master
|
/anaconda_project/internal/default_conda_manager.py
|
# -*- coding: utf-8 -*-
# ----------------------------------------------------------------------------
# Copyright © 2016, Continuum Analytics, Inc. All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
# ----------------------------------------------------------------------------
"""Abstract high-level interface to Conda."""
from __future__ import absolute_import
import codecs
import glob
import json
import os
from anaconda_project.conda_manager import CondaManager, CondaEnvironmentDeviations, CondaManagerError
import anaconda_project.internal.conda_api as conda_api
import anaconda_project.internal.pip_api as pip_api
import anaconda_project.internal.makedirs as makedirs
from anaconda_project.version import version
class DefaultCondaManager(CondaManager):
def _timestamp_file(self, prefix, spec):
return os.path.join(prefix, "var", "cache", "anaconda-project", "env-specs", spec.channels_and_packages_hash)
def _timestamp_comparison_directories(self, prefix):
# this is a little bit heuristic; we are trying to detect
# if any packages are installed or removed. This may need
# to become more comprehensive. We don't want to check
# directories that would change at runtime like /var/run,
# and we need this to be reasonably fast (so we can't do a
# full directory walk or something). Remember that on
# Linux at least a new mtime on a directory means
# _immediate_ child directory entries were added or
# removed, changing the files themselves or the files in
# subdirs will not affect mtime. Windows may be a bit
# different.
# Linux
dirs = list(glob.iglob(os.path.join(prefix, "lib", "python*", "site-packages")))
dirs.append(os.path.join(prefix, "bin"))
dirs.append(os.path.join(prefix, "lib"))
# Windows
dirs.append(os.path.join(prefix, "Lib", "site-packages"))
dirs.append(os.path.join(prefix, "Library", "bin"))
dirs.append(os.path.join(prefix, "Scripts"))
# conda-meta
dirs.append(os.path.join(prefix, "conda-meta"))
return dirs
def _timestamp_file_up_to_date(self, prefix, spec):
# The goal here is to return False if 1) the env spec
# has changed (different hash) or 2) the environment has
# been modified (e.g. by pip or conda).
filename = self._timestamp_file(prefix, spec)
try:
stamp_mtime = os.path.getmtime(filename)
except OSError:
return False
dirs = self._timestamp_comparison_directories(prefix)
for d in dirs:
try:
d_mtime = os.path.getmtime(d)
except OSError:
d_mtime = 0
# When we write the timestamp, we put it 1s in the
# future, so we want >= here (if the d_mtime has gone
# into the future from when we wrote the timestamp,
# the directory has changed).
if d_mtime >= stamp_mtime:
return False
return True
def _write_timestamp_file(self, prefix, spec):
filename = self._timestamp_file(prefix, spec)
makedirs.makedirs_ok_if_exists(os.path.dirname(filename))
try:
with codecs.open(filename, 'w', encoding='utf-8') as f:
# we don't read the contents of the file for now, but
# recording the version in it in case in the future
# that is useful. We need to write something to the
# file to bump its mtime if it already exists...
f.write(json.dumps(dict(anaconda_project_version=version)) + "\n")
# set the timestamp 1s in the future, which guarantees
# it doesn't have the same mtime as any files in the
# environment changed by us; if another process
# changes some files during the current second, then
# we would not notice those changes. The alternative
# is that we falsely believe we changed things
# ourselves. Ultimately clock resolution keeps us from
# perfection here without some sort of cross-process
# locking.
actual_time = os.path.getmtime(filename)
next_tick_time = actual_time + 1
os.utime(filename, (next_tick_time, next_tick_time))
except (IOError, OSError):
# ignore errors because this is just an optimization, if we
# fail we will survive
pass
def _find_conda_missing(self, prefix, spec):
try:
installed = conda_api.installed(prefix)
except conda_api.CondaError as e:
raise CondaManagerError("Conda failed while listing installed packages in %s: %s" % (prefix, str(e)))
# TODO: we don't verify that the environment contains the right versions
# https://github.com/Anaconda-Server/anaconda-project/issues/77
missing = set()
for name in spec.conda_package_names_set:
if name not in installed:
missing.add(name)
return sorted(list(missing))
def _find_pip_missing(self, prefix, spec):
# this is an important optimization to avoid a slow "pip
# list" operation if the project has no pip packages
if len(spec.pip_package_names_set) == 0:
return []
try:
installed = pip_api.installed(prefix)
except pip_api.PipError as e:
raise CondaManagerError("pip failed while listing installed packages in %s: %s" % (prefix, str(e)))
# TODO: we don't verify that the environment contains the right versions
# https://github.com/Anaconda-Server/anaconda-project/issues/77
missing = set()
for name in spec.pip_package_names_set:
if name not in installed:
missing.add(name)
return sorted(list(missing))
def find_environment_deviations(self, prefix, spec):
if not os.path.isdir(os.path.join(prefix, 'conda-meta')):
return CondaEnvironmentDeviations(
summary="'%s' doesn't look like it contains a Conda environment yet." % (prefix),
missing_packages=tuple(spec.conda_package_names_set),
wrong_version_packages=(),
missing_pip_packages=tuple(spec.pip_package_names_set),
wrong_version_pip_packages=(),
broken=True)
if self._timestamp_file_up_to_date(prefix, spec):
conda_missing = []
pip_missing = []
timestamp_ok = True
else:
conda_missing = self._find_conda_missing(prefix, spec)
pip_missing = self._find_pip_missing(prefix, spec)
timestamp_ok = False
if len(conda_missing) > 0 or len(pip_missing) > 0:
summary = "Conda environment is missing packages: %s" % (", ".join(conda_missing + pip_missing))
elif not timestamp_ok:
summary = "Conda environment needs to be marked as up-to-date"
else:
summary = "OK"
return CondaEnvironmentDeviations(summary=summary,
missing_packages=conda_missing,
wrong_version_packages=(),
missing_pip_packages=pip_missing,
wrong_version_pip_packages=(),
broken=(not timestamp_ok))
def fix_environment_deviations(self, prefix, spec, deviations=None, create=True):
if deviations is None:
deviations = self.find_environment_deviations(prefix, spec)
command_line_packages = set(spec.conda_packages)
# conda won't let us create a completely empty environment
if len(command_line_packages) == 0:
command_line_packages = set(['python'])
if os.path.isdir(os.path.join(prefix, 'conda-meta')):
missing = deviations.missing_packages
if len(missing) > 0:
specs = spec.specs_for_conda_package_names(missing)
assert len(specs) == len(missing)
try:
conda_api.install(prefix=prefix, pkgs=specs, channels=spec.channels)
except conda_api.CondaError as e:
raise CondaManagerError("Failed to install missing packages: {}: {}".format(", ".join(missing), str(
e)))
elif create:
# Create environment from scratch
try:
conda_api.create(prefix=prefix, pkgs=list(command_line_packages), channels=spec.channels)
except conda_api.CondaError as e:
raise CondaManagerError("Failed to create environment at %s: %s" % (prefix, str(e)))
else:
raise CondaManagerError("Conda environment at %s does not exist" % (prefix))
# now add pip if needed
missing = list(deviations.missing_pip_packages)
if len(missing) > 0:
specs = spec.specs_for_pip_package_names(missing)
assert len(specs) == len(missing)
try:
pip_api.install(prefix=prefix, pkgs=specs)
except pip_api.PipError as e:
raise CondaManagerError("Failed to install missing pip packages: {}: {}".format(", ".join(missing), str(
e)))
# write a file to tell us we can short-circuit next time
self._write_timestamp_file(prefix, spec)
def remove_packages(self, prefix, packages):
try:
conda_api.remove(prefix, packages)
except conda_api.CondaError as e:
raise CondaManagerError("Failed to remove packages from %s: %s" % (prefix, str(e)))
|
{"/anaconda_project/internal/test/test_default_conda_manager.py": ["/anaconda_project/internal/default_conda_manager.py"]}
|
11,848
|
weaverba137/comparator
|
refs/heads/main
|
/comparator/test/__init__.py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
"""
comparator.test
===============
Used to initialize the unit test framework.
"""
|
{"/comparator/test/test_top_level.py": ["/comparator/__init__.py"], "/comparator/find.py": ["/comparator/db.py"], "/comparator/initialize.py": ["/comparator/db.py", "/comparator/find.py"]}
|
11,849
|
weaverba137/comparator
|
refs/heads/main
|
/comparator/__init__.py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst.
# -*- coding: utf-8 -*-
"""
comparator
==========
Obtain filesystem metadata necessary for comparing the same data set
at different locations.
"""
__version__ = '0.2.0.dev17'
|
{"/comparator/test/test_top_level.py": ["/comparator/__init__.py"], "/comparator/find.py": ["/comparator/db.py"], "/comparator/initialize.py": ["/comparator/db.py", "/comparator/find.py"]}
|
11,850
|
weaverba137/comparator
|
refs/heads/main
|
/comparator/test/test_top_level.py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
"""
comparator.test.test_top_level
==============================
Test top-level comparator functions.
"""
import re
from .. import __version__ as theVersion
def test_version_string():
"""Ensure the version conforms to PEP386/PEP440.
"""
versionre = re.compile(r'([0-9]+!)?([0-9]+)(\.[0-9]+)*((a|b|rc|\.post|\.dev)[0-9]+)?')
assert versionre.match(theVersion) is not None
|
{"/comparator/test/test_top_level.py": ["/comparator/__init__.py"], "/comparator/find.py": ["/comparator/db.py"], "/comparator/initialize.py": ["/comparator/db.py", "/comparator/find.py"]}
|
11,851
|
weaverba137/comparator
|
refs/heads/main
|
/comparator/checksum.py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst.
# -*- coding: utf-8 -*-
"""
comparator.checksum
===================
Obtain checksum files from an authoritative source.
"""
|
{"/comparator/test/test_top_level.py": ["/comparator/__init__.py"], "/comparator/find.py": ["/comparator/db.py"], "/comparator/initialize.py": ["/comparator/db.py", "/comparator/find.py"]}
|
11,852
|
weaverba137/comparator
|
refs/heads/main
|
/setup.py
|
#!/usr/bin/env python
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# NOTE: The configuration for the package, including the name, version, and
# other information are set in the setup.cfg file.
import sys
from setuptools import setup
# First provide helpful messages if contributors try and run legacy commands
# for tests or docs.
TEST_HELP = """
Note: running tests is no longer done using 'python setup.py test'. Instead
you will need to run:
pytest
If you don't already have pytest installed, you can install it with:
pip install pytest
"""
DOCS_HELP = """
Note: building the documentation is no longer done using
'python setup.py {0}'. Instead you will need to run:
sphinx-build -W --keep-going -b html doc doc/_build/html
If you don't already have Sphinx installed, you can install it with:
pip install Sphinx
"""
message = {'test': TEST_HELP,
'build_docs': DOCS_HELP.format('build_docs'),
'build_sphinx': DOCS_HELP.format('build_sphinx'), }
for m in message:
if m in sys.argv:
print(message[m])
sys.exit(1)
setup()
|
{"/comparator/test/test_top_level.py": ["/comparator/__init__.py"], "/comparator/find.py": ["/comparator/db.py"], "/comparator/initialize.py": ["/comparator/db.py", "/comparator/find.py"]}
|
11,853
|
weaverba137/comparator
|
refs/heads/main
|
/comparator/db.py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst.
# -*- coding: utf-8 -*-
"""
comparator.db
=============
Contains SQLAlchemy classes.
"""
import os
from sqlalchemy import (ForeignKey, Column, Integer, String, Float,
DateTime, Boolean)
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.orm import (scoped_session, sessionmaker, relationship,
backref, reconstructor)
from sqlalchemy.orm.exc import NoResultFound, MultipleResultsFound
from sqlalchemy.orm.collections import attribute_mapped_collection
from sqlalchemy.types import TypeDecorator
Base = declarative_base()
engine = None
Session = scoped_session(sessionmaker())
_missing = object() # sentinel object for missing values
class cached_hybrid_property(hybrid_property):
def __get__(self, instance, owner):
if instance is None:
# getting the property for the class
return self.expr(owner)
else:
# getting the property for an instance
name = self.fget.__name__
value = instance.__dict__.get(name, _missing)
if value is _missing:
value = self.fget(instance)
instance.__dict__[name] = value
return value
class FileSystem(Base):
"""Representation of a filesystem.
"""
__tablename__ = 'filesystem'
id = Column(Integer, primary_key=True)
name = Column(String, nullable=False, unique=True)
def __repr__(self):
return ("<FileSystem(id={0.id:d}, name='{0.name}')>").format(self)
class Directory(Base):
"""Representation of a directory.
Notes
-----
See https://docs.sqlalchemy.org/en/latest/_modules/examples/adjacency_list/adjacency_list.html
"""
__tablename__ = 'directory'
id = Column(Integer, primary_key=True)
filesystem_id = Column(Integer, ForeignKey('filesystem.id'), nullable=False)
parent_id = Column(Integer, ForeignKey(id), nullable=False, index=True)
nfiles = Column(Integer, nullable=False, default=0)
name = Column(String, nullable=False)
filesystem = relationship('FileSystem', back_populates='directories')
children = relationship("Directory",
cascade="all, delete-orphan", # cascade deletions
# many to one + adjacency list - remote_side is
# required to reference the 'remote' column
# in the join condition.
backref=backref("parent", remote_side=id),
# children will be represented as a dictionary
# on the "name" attribute.
collection_class=attribute_mapped_collection("name"))
def __repr__(self):
return ("<Directory(id={0.id:d}, " +
"filesystem_id={0.filesystem_id:d}, " +
"parent_id={0.parent_id:d}, " +
"nfiles={0.nfiles:d}, " +
"name='{0.name}')>").format(self)
@cached_hybrid_property
def fullpath(self):
"""Full system directory path.
"""
if not self.name:
return self.filesystem.name
fp = [self.name]
parent = self.parent
while parent.name:
fp.insert(0, parent.name)
parent = parent.parent
fp.insert(0, self.filesystem.name)
return os.path.join(*fp)
FileSystem.directories = relationship('Directory', back_populates='filesystem')
class File(Base):
"""Representation of an ordinary file or a symlink.
"""
__tablename__ = 'file'
id = Column(Integer, primary_key=True)
directory_id = Column(Integer, ForeignKey('directory.id'), nullable=False)
# mode = Column(String(10), nullable=False)
# uid = Column(Integer, ForeignKey('users.uid'), nullable=False)
# gid = Column(Integer, ForeignKey('groups.gid'), nullable=False)
size = Column(Integer, nullable=False)
# mtime = Column(AwareDateTime(timezone=True), nullable=False)
mtime = Column(Integer, nullable=False)
name = Column(String, nullable=False)
link = Column(Boolean, nullable=False, default=False)
destination = Column(String, nullable=False, default='')
directory = relationship('Directory', back_populates='files')
def __repr__(self):
return ("<File(id={0.id:d}, " +
"directory_id={0.directory_id:d}, " +
# "mode='{0.mode}', " +
# "uid={0.uid:d}, " +
# "gid={0.gid:d}, " +
"size={0.size:d}, " +
# "mtime='{0.mtime}', " +
"mtime={0.mtime:d}, " +
"name='{0.name}', " +
"link={0.link}" +
"destination='{0.destination}')>").format(self)
@property
def path(self):
"""Full system path to the file.
"""
return os.path.join(self.directory.fullpath, self.name)
@property
def realpath(self):
"""Full system path to the target of a symlink, if the file is a
symlink.
"""
if self.link:
return os.path.realpath(self.path)
else:
return self.path
Directory.files = relationship('File', order_by=File.name,
back_populates='directory')
|
{"/comparator/test/test_top_level.py": ["/comparator/__init__.py"], "/comparator/find.py": ["/comparator/db.py"], "/comparator/initialize.py": ["/comparator/db.py", "/comparator/find.py"]}
|
11,854
|
weaverba137/comparator
|
refs/heads/main
|
/comparator/find.py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst.
# -*- coding: utf-8 -*-
"""
comparator.find
===============
Utilities for scanning a filesystem.
"""
import os
from .db import Session, Directory, File
def walk(top):
"""Simplified directory tree generator.
Adapted from :func:`os.walk`, the yield is similar, but symbolic
links are *always* treated as files, even if they point to directories,
and never followed.
For each directory in the directory tree rooted at `top` (including `top`
itself, but excluding '.' and '..'), yields a 3-tuple::
dirpath, dirnames, filenames
``dirpath`` is a string, the path to the directory. ``dirnames`` is a
list of :class:`os.DirEntry` objects for subdirectories in dirpath
(excluding '.' and '..'). ``filenames`` is a list of :class:`os.DirEntry`
objects for the non-directory files in ``dirpath``.
"""
dirs = []
nondirs = []
# We may not have read permission for top, in which case we can't
# get a list of the files the directory contains. os.walk
# always suppressed the exception then, rather than blow up for a
# minor reason when (say) a thousand readable directories are still
# left to visit. That logic is copied here.
try:
scandir_it = os.scandir(top)
except OSError as error:
return
with scandir_it:
while True:
try:
try:
entry = next(scandir_it)
except StopIteration:
break
except OSError as error:
return
try:
is_dir = entry.is_dir(follow_symlinks=False)
except OSError:
# If is_dir() raises an OSError, consider that the entry is not
# a directory, same behaviour than os.path.isdir().
is_dir = False
if is_dir:
dirs.append(entry)
else:
nondirs.append(entry)
yield top, dirs, nondirs
# Recurse into sub-directories
for d in dirs:
new_path = os.path.join(top, d.name)
# Issue #23605: os.path.islink() is used instead of caching
# entry.is_symlink() result during the loop on os.scandir() because
# the caller can replace the directory entry during the "yield"
# above.
if not os.path.islink(new_path):
yield from walk(new_path)
def directories(fs, directory_id=1):
"""Find all physical directories on filesystem `fs`.
Parameters
----------
fs : :class:`FileSystem`
The filesystem to scan.
directory_id : :class:`int`, optional
The id number of the directory corresponding to the root of `fs`.
Returns
-------
:class:`int`
The id of the last directory found. If scanning multiple filesystems,
add one (1) to this number to set the `directory_id` for top of the
next filesystem.
"""
parents = {fs.name: directory_id}
Session.add(Directory(id=directory_id, filesystem_id=fs.id,
parent_id=parents[fs.name], name=''))
Session.commit()
for dirpath, dirnames, filenames in walk(fs.name):
p = Session.query(Directory).filter(Directory.id == parents[dirpath]).one()
p.nfiles = len(filenames)
for d in dirnames:
directory_id += 1
parents[os.path.join(dirpath, d.name)] = directory_id
Session.add(Directory(id=directory_id, filesystem_id=fs.id,
parent_id=parents[dirpath], name=d.name))
Session.commit()
return directory_id
def files(directory):
"""Find files in `directory`; identify symlinks.
Parameters
----------
directory : :class:`Directory`
Directory to scan with :func:`os.scandir()`.
"""
p = directory.fullpath
with os.scandir(p) as it:
for entry in it:
if not entry.is_dir(follow_symlinks=False):
if entry.is_symlink():
d = os.readlink(os.path.join(p, entry.name))
f = File(directory_id=directory.id,
size=0, mtime=0,
name=entry.name,
link=True, destination=d)
else:
st = entry.stat(follow_symlinks=False)
f = File(directory_id=directory.id,
size=st.st_size,
mtime=int(st.st_mtime),
name=entry.name)
Session.add(f)
Session.commit()
|
{"/comparator/test/test_top_level.py": ["/comparator/__init__.py"], "/comparator/find.py": ["/comparator/db.py"], "/comparator/initialize.py": ["/comparator/db.py", "/comparator/find.py"]}
|
11,855
|
weaverba137/comparator
|
refs/heads/main
|
/comparator/initialize.py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst.
# -*- coding: utf-8 -*-
"""
comparator.initialize
=====================
Obtain filesystem metadata necessary for comparing the same data set
at different locations.
"""
import os
from sqlalchemy import create_engine, func
from sqlalchemy.orm.exc import NoResultFound, MultipleResultsFound
from .db import engine, Session, Base, FileSystem, Directory, File
from .find import directories, files
def _options():
"""Parse the command-line options.
Returns
-------
The parsed options.
"""
from sys import argv
from argparse import ArgumentParser
xct = os.path.basename(argv[0])
desc = "Obtain filesystem metadata necessary for comparing the same data set at different locations."
prsr = ArgumentParser(description=desc, prog=xct)
prsr.add_argument('-f', '--filesystem', action='append',
dest='filesystem', metavar="DIR",
help='FileSystem(s) to examine.')
prsr.add_argument('-F', '--skip-files', action='store_true',
dest='skip_files', help='Skip the file search stage.')
# prsr.add_argument('-l', '--log-dir', dest='logging', metavar='DIR',
# default=os.path.join(os.environ['HOME'], 'Documents', 'Logs'),
# help='Log files in DIR (default %(default)s).')
# prsr.add_argument('-R', '--root', dest='root', metavar='DIR',
# default='/global/project/projectdirs',
# help='Path containing metadata directory (default %(default)s).')
# prsr.add_argument('-s', '--sql', dest='sql', action='store_true',
# help='Output SQL statements instead of loading database.')
prsr.add_argument('-o', '--overwrite', action='store_true',
dest='overwrite', help='Overwrite any existing database.')
prsr.add_argument('-v', '--verbose', action='store_true', dest='verbose',
help='Log extra debugging information.')
prsr.add_argument('release', metavar='RELEASE',
help='Release to examine, e.g. "dr15".')
prsr.add_argument('database', metavar='DB',
help='Path to database file.')
return prsr.parse_args()
def main():
"""Entry point for command-line scripts.
Returns
-------
:class:`int`
An integer suitable for passing to :func:`sys.exit`.
"""
#
# Arguments
#
options = _options()
#
# Initialize database.
#
if options.overwrite and os.path.exists(options.database):
os.remove(options.database)
engine = create_engine('sqlite:///'+options.database, echo=options.verbose)
Session.remove()
Session.configure(bind=engine, autocommit=False,
autoflush=True, expire_on_commit=True)
Base.metadata.create_all(engine)
#
# Add filesystems.
#
try:
q = Session.query(FileSystem).one()
except NoResultFound:
Session.add_all([FileSystem(name=os.path.join(root, options.release))
for root in options.filesystem])
Session.commit()
#
# Scan Directories.
#
last_id = 0
for fs in Session.query(FileSystem).all():
if os.path.exists(fs.name):
try:
q = Session.query(Directory).filter(Directory.filesystem_id == fs.id).one()
except NoResultFound:
last_id = directories(fs, last_id+1)
except MultipleResultsFound:
last_id = Session.query(func.max(Directory.id)).scalar()
else:
#
# Apparently there was exactly one directory.
# This is not as weird as it sounds, because the release
# directory in the filesystem may be present but empty.
#
last_id = q.id
#
# Scan files.
#
if not options.skip_files:
for fs in Session.query(FileSystem).all():
if os.path.exists(fs.name):
try:
q = Session.query(File).join(Directory).filter(Directory.filesystem_id == fs.id).one()
except NoResultFound:
for d in Session.query(Directory).filter(Directory.filesystem_id == fs.id).filter(Directory.nfiles > 0).all():
files(d)
except MultipleResultsFound:
#
# Already scanned.
#
pass
else:
#
# Apparently there was exactly one file. OK, fine.
#
pass
#
# Exit gracefully.
#
Session.close()
return 0
|
{"/comparator/test/test_top_level.py": ["/comparator/__init__.py"], "/comparator/find.py": ["/comparator/db.py"], "/comparator/initialize.py": ["/comparator/db.py", "/comparator/find.py"]}
|
11,860
|
Wentao795/face_torch
|
refs/heads/master
|
/train_softmax.py
|
from config import config
from model.model import MobileFaceNet,Am_softmax,Arcface,Softmax
from torch.nn import DataParallel
from dataset.dataloder import Train_DATA
import torch.nn as nn
import torch.optim as optim
from torch.utils.data import DataLoader
from symbols.utils import Metric
import os
from torch.autograd import Variable
import torchvision
import torch
import numpy as np
def main():
#model pack
model = MobileFaceNet(config.embedding_size)
model = DataParallel(model,device_ids=config.gpu_id)
if config.loss_type == 0:
loss_cess = Softmax()
elif config.loss_type == 1:
loss_cess = Arcface(config.embedding_size,config.num_classe,config.margin_s,config.margin_m)
else:
loss_cess = Am_softmax(config.embedding_size,config.num_classe)
loss_cess = DataParallel(loss_cess,device_ids=config.gpu_id)
train_data = Train_DATA(config.train_data)
train_loader = DataLoader(train_data,batch_size=config.batch_size,shuffle=True,num_workers=config.num_work,pin_memory=True)
criterion = nn.CrossEntropyLoss()
criterion = DataParallel(criterion,device_ids=config.gpu_id)
optimizer = optim.SGD(model.parameters(),lr=config.lr,momentum=config.momentum,weight_decay=config.weight_decay)
optimizer = DataParallel(optimizer,device_ids=config.gpu_id)
scheduler = optim.lr_scheduler.StepLR(optimizer,step_size=20,gamma=0.1)
train_loss = Metric()
train_acc = Metric()
best_precision1 = 0
start_epoch = 0
fold = 0
if config.resume:
checkpoint = torch.load(config.model_path)
start_epoch = checkpoint["epoch"]
fold = checkpoint["fold"]
best_precision1 = checkpoint["best_precision1"]
model.load_state_dict(checkpoint["state_dict"])
optimizer.load_state_dict(checkpoint["optimizer"])
print("input data ,start run ,this is %d epoch "%(start_epoch))
if not os.path.exists(config.model_output):
os.makedirs(config.model_output)
for epoch in range(start_epoch,config.end_epoch):
scheduler.step(epoch)
for iter,(input,target) in enumerate(train_loader):
model.train()
input = Variable(input)
target = Variable(torch.from_numpy(np.array(target)).long())
input = DataParallel(input,device_ids=config.gpu_id)
target = DataParallel(input,device_ids=config.gpu_id)
optimizer.zero_grad()
embeddings = model(input)
output = loss_cess(embeddings,target)
loss = criterion(output,target)
optimizer.zero_grad()
loss.backward()
optimizer.step()
output = output.data.cpu().numpy()
output = np.argmax(output,axis=1)
label = target.data.cpu().numpy()
acc = np.mean((label==output).astype(int))
train_loss.updata(loss.data.cpu().numpy(),input.size(0))
train_acc.updata(acc,input.size(0))
if iter%20 ==0:
print("Add valyue loss:%.3f acc:%.3f"%(train_loss.avg,train_acc.avg))
is_best = train_acc.avg >best_precision1
best_precision1 = max(train_acc.avg,best_precision1)
model_savename = config.model_output+'/'+'epoch%d'%epoch+'_checkpoint.pth.tar'
torch.save({
"epoch":epoch+1,
"model_name":config.model_name,
"state_dict":model.state_dict(),
"best_precision1":best_precision1,
"optimizer":optimizer.state_dict(),
"fold":fold,
"train_loss":train_loss.avg
},model_savename)
|
{"/train_softmax.py": ["/config.py", "/model/model.py", "/dataset/dataloder.py"], "/dataset/path.py": ["/config.py"]}
|
11,861
|
Wentao795/face_torch
|
refs/heads/master
|
/dataset/dataloder.py
|
from PIL import Image
from torchvision import transforms as T
class Train_DATA(object):
def __init__(self,root):
imgs = []
file = open(root,'r')
for i in file.readlines():
temp = i.replace('\n','').split('\t')
imgs.append(temp)
self.imgs = imgs
self.transforms = T.Compose([
T.RandomHorizontalFlip(),
T.ToTensor(),
T.Normalize([0.5,0.5,0.5],[0.5,0.5,0.5])])
def __getitem__(self, index):
img_path = self.imgs[index][0]
label = int(self.imgs[index][1])
data = Image.open(img_path)
data = self.transforms(data)
return data,label
def __len__(self):
return len(self.imgs)
|
{"/train_softmax.py": ["/config.py", "/model/model.py", "/dataset/dataloder.py"], "/dataset/path.py": ["/config.py"]}
|
11,862
|
Wentao795/face_torch
|
refs/heads/master
|
/dataset/path.py
|
from config import config
import os
def main():
srcfloder = config.train_path
outFile = open(config.train_path.split('/')[-1],'w')
childFolders = os.listdir(srcfloder)
num = 0
for childfloder in childFolders:
secondfile = srcfloder + '/' + childfloder
allFiles = os.listdir(secondfile)
for fileline in allFiles:
print(num)
imgfile = secondfile + '/' + fileline +'\t'+str(num)+'\n'
outFile.write(imgfile)
outFile.flush()
num += 1
outFile.close()
if __name__ == "__main__":
main()
|
{"/train_softmax.py": ["/config.py", "/model/model.py", "/dataset/dataloder.py"], "/dataset/path.py": ["/config.py"]}
|
11,863
|
Wentao795/face_torch
|
refs/heads/master
|
/model/model.py
|
from torch.nn import Linear,Conv2d,BatchNorm1d,BatchNorm2d,PReLU,ReLU,Sigmoid,Dropout2d,Dropout,AvgPool2d,MaxPool2d,AdaptiveAvgPool2d,Sequential,Module,Parameter
import torch.nn.functional as F
import torch
import math
import pdb
from collections import namedtuple
class Flatten(Module):
def forward(self,input):
return input.view(input.size(0),-1)
def l2_norm(input,axis=1):
#axis grap row so axis = 0 replace cel,kuahang.
norm = torch.norm(input,2,axis,True)#(x,l2 norm,kualieqiu,baozhiweidububian)
output = torch.div(input,norm)#a / |a|
return output
##input attetion??
class SEModule(Module):
def __init__(self,channels,reduction):
super(SEModule,self).__init__()
self.avg_pool = AdaptiveAvgPool2d(1)
self.fc1 = Conv2d(channels,channels // reduction,kernel_size=1,padding=0,bias=False)
self.relu = ReLU(inplace=True)
self.fc2 = Conv2d(channels // reduction,channels,kernel_size=1,padding=0,bias=False)
self.sigmoid = Sigmoid()
def forward(self, x):
module_input = x
x = self.avg_pool(x)
x = self.fc1(x)
x = self.relu(x)
x = self.fc2(x)
x = self.sigmoid(x)
return module_input * x
class bottleneck_IR(Module):
def __init__(self,in_channel,depth,stride):
super(bottleneck_IR,self).__init__()
if in_channel == depth:
self.shorcut_layer = MaxPool2d(1,stride)
else:
self.shorcut_layer = Sequential(
Conv2d(in_channel,depth,(1,1),stride,bias=False),
BatchNorm2d(depth)
)
self.res_layer = Sequential(
BatchNorm2d(in_channel),
Conv2d(in_channel,depth,(3,3),(1,1),1,bias=False),
PReLU(depth),
Conv2d(depth,depth,(3,3),stride,1,bias=False),
BatchNorm2d(depth)
)
def forward(self, x):
shortcut = self.shorcut_layer(x)
res = self.res_layer(x)
return res + shortcut
class bottleneck_IR_SE(Module):
def __init__(self,in_channel,depth,stride):
super(bottleneck_IR_SE,self).__init__()
if in_channel == depth:
self.shortcut_layer = MaxPool2d(1,stride)
else:
self.shortcut_layer = Sequential(
Conv2d(in_channel,depth,(1,1),stride,bias=False),
BatchNorm2d(depth)
)
self.res_layer = Sequential(
BatchNorm2d(in_channel),
Conv2d(in_channel,depth,(3,3),(1,1),1,bias=False),
PReLU(depth),
Conv2d(depth,depth,(3,3),stride,1,bias=False),
BatchNorm2d(depth),
SEModule(depth,16)
)
def forward(self, x):
shortcut = self.shortcut_layer(x)
res = self.res_layer(x)
return res + shortcut
class Bottleneck(namedtuple('Block',['in_channel','depth','stride'])):
""""""
def get_block(in_channel,depth,num_units,stride = 2):
return [Bottleneck(in_channel,depth,stride)] + [Bottleneck(depth,depth,1) for i in range(num_units - 1)]
def get_blocks(num_layers):
if num_layers == 50:
blocks = [
get_block(in_channel=64,depth=64,num_units=3),
get_block(in_channel=64, depth=128, num_units=4),
get_block(in_channel=128, depth=256, num_units=14),
get_block(in_channel=256, depth=512, num_units=3),
]
elif num_layers == 100:
blocks = [
get_block(in_channel=64, depth=64, num_units=3),
get_block(in_channel=64, depth=128, num_units=13),
get_block(in_channel=128, depth=256, num_units=30),
get_block(in_channel=256, depth=512, num_units=3),
]
elif num_layers == 152:
blocks = [
get_block(in_channel=64, depth=64, num_units=3),
get_block(in_channel=64, depth=128, num_units=8),
get_block(in_channel=128, depth=256, num_units=36),
get_block(in_channel=256, depth=512, num_units=3),
]
return blocks
class Backbone(Module):
def __init__(self,num_layers,drop_ration,mode='ir'):
super(Backbone,self).__init__()
assert num_layers in [50,100,152]
assert mode in ['ir','ir_se']
blocks = get_blocks(num_layers)
if mode == 'ir':
unit_module = bottleneck_IR
elif mode == 'ir_se':
unit_module = bottleneck_IR_SE
self.input_layer = Sequential(
Conv2d(3,64,(3,3),1,1,bias=False),
BatchNorm2d(64),
PReLU(64)
)
self.output_layer = Sequential(
BatchNorm2d(512),
Dropout(drop_ration),
Flatten(),
Linear(512*7*7,512),
BatchNorm1d(512)
)
modules = []
for block in blocks:
for bottleneck in block:
modules.append(unit_module(bottleneck.in_channel,
bottleneck.depth,
bottleneck.stride))
self.body = Sequential(*modules)
def forward(self, x):
x = self.input_layer(x)
x = self.body(x)
x = self.output_layer(x)
return l2_norm(x)
class Conv_block(Module):
def __init__(self,in_c,out_c,kernel=(1,1),stride=(1,1),padding=(1,1),groups=1):
super(Conv_block,self).__init__()
self.conv = Conv2d(in_c,out_channels=out_c,kernel_size=kernel,groups=groups,stride=stride,padding=padding,bias=False)
self.bn = BatchNorm2d(out_c)
self.prelu = PReLU(out_c)
def forward(self, x):
x = self.conv(x)
x = self.bn(x)
x = self.prelu(x)
return x
class Linear_block(Module):
def __init__(self,in_c,out_c,kernel=(1,1),stride=(1,1),padding=(0,0),groups=1):
super(Linear_block,self).__init__()
self.conv = Conv2d(in_c,out_channels=out_c,kernel_size=kernel,groups=groups,stride=stride,padding=padding,bias=False)
self.bn = BatchNorm2d(out_c)
def forward(self, x):
x = self.conv(x)
x = self.bn(x)
return x
class Depth_Wise(Module):
def __init__(self,in_c,out_c,residual=False,kernel=(3,3),stride=(2,2),padding=(1,1),groups=1):
super(Depth_Wise,self).__init__()
self.conv = Conv_block(in_c,out_c=groups,kernel=(1,1),padding=(0,0),stride=(1,1))
self.conv_dw = Conv_block(groups,groups,groups=groups,kernel=kernel,padding=padding,stride=stride)
self.project = Linear_block(groups,out_c,kernel=(1,1),padding=(0,0),stride=(1,1))
self.residual = residual
def forward(self, x):
if self.residual:
short_cut = x
x = self.conv(x)
x = self.conv_dw(x)
x = self.project(x)
if self.residual:
output = short_cut + x
else:
output = x
return output
class Residual(Module):
def __init__(self,c,num_block,groups,kernel=(3,3),stride=(1,1),padding=(1,1)):
super(Residual,self).__init__()
modules = []
for _ in range(num_block):
modules.append(Depth_Wise(c,c,residual=True,kernel=kernel,padding=padding,stride=stride,groups=groups))
self.model = Sequential(*modules)
def forward(self, x):
return self.model(x)
class MobileFaceNet(Module):
def __init__(self,embedding_size):
super(MobileFaceNet,self).__init__()
self.conv1 = Conv_block(3,64,kernel=(3,3),stride=(2,2),padding=(1,1))
self.conv2_dw = Conv_block(64,64,kernel=(3,3),stride=(1,1),padding=(1,1),groups=64)
self.conv_23 = Depth_Wise(64,64,kernel=(3,3),stride=(2,2),padding=(1,1),groups=128)
self.conv_3 = Residual(64,num_block=4,groups=128,kernel=(3,3),stride=(1,1),padding=(1,1))
self.conv_34 = Depth_Wise(64,128,kernel=(3,3),stride=(2,2),padding=(1,1),groups=256)
self.conv_4 = Residual(128,num_block=6,groups=256,kernel=(3,3),stride=(1,1),padding=(1,1))
self.conv_45 = Depth_Wise(128,128,kernel=(3,3),stride=(2,2),padding=(1,1),groups=512)
self.conv_5 = Residual(128,num_block=2,groups=256,kernel=(3,3),stride=(2,2),padding=(1,1))
self.conv_6_sep = Conv_block(128,512,kernel=(1,1),stride=(1,1),padding=(0,0))
self.conv_6_dw = Linear_block(512,512,groups=512,kernel=(7,7),stride=(1,1),padding=(0,0))
self.conv_6_flatten = Flatten()
self.linear = Linear(512,embedding_size,bias=False)
self.bn = BatchNorm1d(embedding_size)
def forward(self, x):
out = self.conv1(x)
out = self.conv2_dw(out)
out = self.conv_23(out)
out = self.conv_3(out)
out = self.conv_34(out)
out = self.conv_4(out)
out = self.conv_45(out)
out = self.conv_5(out)
out = self.conv_6_sep(out)
out = self.conv_6_dw(out)
out = self.conv_6_flatten(out)
out = self.linear(out)
out = self.bn(out)
return l2_norm(out)
class Arcface(Module):
def __init__(self,embedding_size=512,classnum=51332,s=64,m=0.5):
super(Arcface,self).__init__()
self.classnum = classnum
self.kernel = Parameter(torch.Tensor(embedding_size,classnum))
self.kernel.data.uniform_(-1,1).renorm_(2,1,1e-5).mul_(1e5)
self.m = m
self.s = s
self.cos_m = math.cos(m)
self.sin_m = math.sin(m)
self.mm = self.sin_m*m
self.threshold = math.cos(math.pi - m)
def forward(self, embedding,label):
nB = len(embedding)
kernel_norm = l2_norm(self.kernel,axis=0)
cos_theta = torch.mm(embedding,kernel_norm)
cos_theta = cos_theta.clamp(-1,1)
cos_theta_2 = torch.pow(cos_theta,2)
sin_theta_2 = 1 - cos_theta_2
sin_theta = torch.sqrt(sin_theta_2)
cos_theta_m = (cos_theta*self.cos_m-sin_theta*self.sin_m)
cond_v = cos_theta - self.threshold
cond_mask = cond_v <= 0
keep_val = (cos_theta - self.mm)
cos_theta_m[cond_mask] = keep_val[cond_mask]
output = cos_theta*1.0
idx_ = torch.arange(0,nB,dtype=torch.long)
output[idx_,label] = cos_theta_m[idx_,label]
output *= self.s
return output
class Am_softmax(Module):
def __init__(self,embedding_size=512,classnum=51332):
super(Am_softmax,self).__init__()
self.classnum = classnum
self.kernel = Parameter(torch.Tensor(embedding_size,classnum))
self.kernel.data.uniform_(-1,1).renorm_(2,1,1e-5).mul_(1e5)
self.m = 0.35
self.s = 30
def forward(self, embbedings,label):
kernel_norm = l2_norm(self.kernel,axis=0)
cos_theta = torch.mm(embbedings,kernel_norm)
cos_theta = cos_theta.clamp(-1,1)
phi = cos_theta - self.m
lable = label.view(-1,1)
index = cos_theta.data *0.0
index.scatter_(1,label.data.view(-1,1),1)
index = index.byte()
output = cos_theta * 1.0
output[index] = phi[index]
output *=self.s
return output
class Softmax(Module):
pass
|
{"/train_softmax.py": ["/config.py", "/model/model.py", "/dataset/dataloder.py"], "/dataset/path.py": ["/config.py"]}
|
11,864
|
Wentao795/face_torch
|
refs/heads/master
|
/config.py
|
import os
class Defaultconfig(object):
train_path = ''
train_data = './dataset/'+train_path.split('/')[-1]
embedding_size = 128
loss_type = 0 # 0 softmaxe 1 arcface 3 am_softmax
num_classe = 180000
margin_s = 64
margin_m = 0.5
gpu_id = [0,1,2,3]
lr = 0.1
momentum = 0.9
weight_decay = 5e-4
batch_size = 512
num_work = 128
resume = 0
model_path = ''
model_output = ''
end_epoch = 100
model_name = 'face_mobile'
config = Defaultconfig()
|
{"/train_softmax.py": ["/config.py", "/model/model.py", "/dataset/dataloder.py"], "/dataset/path.py": ["/config.py"]}
|
11,902
|
gok03/slack_clone
|
refs/heads/master
|
/core/views.py
|
from core.models import Comments, User
from core.forms import *
from django.shortcuts import render, render_to_response
from django.http import HttpResponse, HttpResponseServerError, HttpResponseRedirect
from django.views.decorators.csrf import csrf_exempt, csrf_protect
from django.contrib.sessions.models import Session
from django.contrib.auth.decorators import login_required
from django.contrib.auth import logout
from django.template import RequestContext
import redis
@login_required
def home(request):
#comments = Comments.objects.select_related().all()[0:100]
#return render(request, 'index.html', locals())
lst = Comments.objects.order_by().values('channel').distinct()
return render_to_response('home.html',{ 'user': request.user ,'room' : lst})
@csrf_exempt
def node_api(request):
try:
#Get User from sessionid
session = Session.objects.get(session_key=request.POST.get('sessionid'))
user_id = session.get_decoded().get('_auth_user_id')
user = User.objects.get(id=user_id)
#Create comment
Comments.objects.create(user=user, text=request.POST.get('comment'), channel= request.POST.get('channel'))
#Once comment has been created post it to the chat channel
r = redis.StrictRedis(host='localhost', port=6379, db=0)
r.publish('chat', request.POST.get('channel') +"~"+ user.username + ': ' + request.POST.get('comment'))
return HttpResponse("Everything worked :)")
except Exception as e:
return HttpResponseServerError(str(e))
@csrf_protect
def register(request):
if request.method == 'POST':
form = RegistrationForm(request.POST)
if form.is_valid():
user = User.objects.create_user(
username=form.cleaned_data['username'],
password=form.cleaned_data['password1'],
email=form.cleaned_data['email']
)
return HttpResponseRedirect('/register/success/')
else:
form = RegistrationForm()
variables = RequestContext(request, {
'form': form
})
return render_to_response(
'registration/register.html',
variables,
)
def register_success(request):
return render_to_response(
'registration/success.html',
)
def logout_page(request):
logout(request)
return HttpResponseRedirect('/')
@login_required
def homes(request):
return render_to_response(
'home.html',
{ 'user': request.user }
)
def channel(request, chatroom):
comments = Comments.objects.filter(channel__contains = chatroom)[0:100]
chat = chatroom
return render(request, 'index.html', locals())
|
{"/core/views.py": ["/core/models.py"]}
|
11,903
|
gok03/slack_clone
|
refs/heads/master
|
/core/models.py
|
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Comments(models.Model):
user = models.ForeignKey(User)
text = models.CharField(max_length=255)
channel = models.CharField(max_length=50)
|
{"/core/views.py": ["/core/models.py"]}
|
11,999
|
tjacek/hsne
|
refs/heads/master
|
/hsne.py
|
import os,time
import utils,knn,markov,tsne,plot
import numpy as np
from scipy import sparse
from knn import NNGraph
from sklearn.datasets import fetch_mldata
#def make_dataset(dataset_name="MNIST original",out_path="mnist_d/imgs"):
# dataset=utils.downsample_dataset(dataset_name)
# utils.save_as_img(dataset,out_path)
#def make_graph(dataset_name="mnist_d/imgs",out_path="mnist_d/nn_graph",k=100):
# dataset=utils.read_as_dataset(dataset_name)
# print("dataset loaded")
# knn.save_nn_graph(dataset,out_path)
def prepare_hsne(graph_path='mnist_d/nn_graph',
scale_path='mnist_d/scale1'):
os.mkdir(scale_path)
trans=scale_path+ "/trans.txt"
states=scale_path+ "/states.txt"
nn_graph=knn.read_nn_graph(graph_path)
print("nn graph loaded")
t0=time.time()
mc=markov.make_eff_markov_chain(nn_graph)
print("markov chain constructed %d" % (time.time()-t0))
mc.save(trans,states)
def hsne(dataset_name="MNIST original",
scale_path='mnist_d/scale1',
weights_in=None):
landmarks,sparse_pairs=load_hsne(scale_path)
W=get_weights(weights_in,sparse_pairs)
T,W_next=tsne.compute_t(landmarks,sparse_pairs,W)
t_embd=time.time()
embd=tsne.create_embedding(T)
print("embeding created %d" % (time.time() - t_embd))
mnist = fetch_mldata(dataset_name)
plot.plot_embedding(embd,mnist.target,landmarks,title="beta_threshold=1.5")
save_hsne(T,embd,W_next,scale_path)
def load_hsne(scale_path):
landmark_file= scale_path+"/landmarks.txt"
print(landmark_file)
landmarks=utils.read_ints(landmark_file)
print("landmarks loaded")
influence_file=scale_path+"/influence.txt"
sparse_pairs=utils.read_pairs(influence_file)
print("pairs loaded %d" % len(sparse_pairs))
return landmarks,sparse_pairs
def save_hsne(T,embd,W_next,scale_path):
t_file=scale_path+"/T.txt"
weights_out=scale_path+"/W.txt"
utils.save_object(T,t_file)
utils.save_array(W_next,weights_out)
embd_file=scale_path+"/embd"
utils.save_object(embd,embd_file)
def next_iter(in_scale="mnist_d/scale1",out_scale="mnist_d/scale2" ):
# os.mkdir(out_scale)
landmarks,trans=load_iter(in_scale)
trans=markov.to_cum_matrix(trans)
states_str=",".join([ str(l) for l in landmarks])
save_iter(trans,states_str,out_scale)
def load_iter(in_scale):
landmark_file=in_scale+"/landmarks.txt"
landmarks=utils.read_ints(landmark_file)
print("landmarks loaded")
t_file=in_scale+"/T.txt"
trans=utils.read_object(t_file)#np.loadtxt(t_file,delimiter=',')
print("trans matrix loaded")
return landmarks,trans
def save_iter(trans,states_str,out_scale):
trans_file=out_scale+"/trans.txt"
utils.save_array(trans,trans_file)
states_file=out_scale+"/states.txt"
utils.save_str(states_str,states_file)
def get_weights(weights_in,sparse_pairs):
if(weights_in is None):
n_points=len(sparse_pairs)
W=sparse.dok_matrix(np.ones((n_points,1)),dtype=np.float32)
else:
weights_file=weights_in+"/W.txt"
W=np.loadtxt(weights_file,delimiter=',')
W=np.expand_dims(W,axis=1)
W=sparse.dok_matrix(W)
return W
#prepare_hsne(graph_path='mnist_pca/graph',scale_path='mnist_pca/scale1')
#hsne(scale_path="mnist_pca/scale1",weights_in=None)#"mnist/scale1")
next_iter(in_scale="mnist_pca/scale1",out_scale="mnist_pca/scale2")
|
{"/hsne.py": ["/utils.py", "/knn.py", "/markov.py", "/tsne.py", "/plot.py"], "/knn.py": ["/markov.py", "/utils.py"], "/tsne.py": ["/knn.py", "/markov.py", "/utils.py", "/plot.py"], "/reconstruct.py": ["/tsne.py", "/utils.py", "/plot.py"], "/preproc.py": ["/knn.py"], "/markov.py": ["/knn.py", "/utils.py"]}
|
12,000
|
tjacek/hsne
|
refs/heads/master
|
/utils.py
|
import numpy as np
import cv2
import pickle,os,re
#from sets import Set
from scipy.sparse import dok_matrix
import sklearn.datasets.base
from sklearn.datasets import fetch_mldata
def downsample_dataset(dataset_name,factor=10):
dataset=fetch_mldata(dataset_name)
examples=[ example_i
for i,example_i in enumerate(dataset.data)
if((i % factor) ==0)]
target = [ example_i
for i,example_i in enumerate(dataset.target)
if((i % factor) ==0)]
return sklearn.datasets.base.Bunch(data=examples, target=target)
def save_as_img(dataset,out_path,new_shape=(28,28),selected=None):
if(not selected is None):
selected=Set(selected)
def save_helper(i,img_i):
img_i=np.reshape(img_i,new_shape)
cat_i=str(int(dataset.target[i]))
name_i=out_path+'/'+str(i)+'_'+ cat_i +'.png'
cv2.imwrite(name_i,img_i)
print(name_i)
for i,img_i in enumerate(dataset.data):
if((selected is None) or (i in selected)):
save_helper(i,img_i)
def read_as_dataset(in_path):
def read_helper(filename_i):
img_path_i=in_path+'/'+filename_i
img_i=cv2.imread(img_path_i,0)
print(img_i.shape)
img_i=img_i.flatten()
cat_i=int(extract_int(filename_i)[1])
return img_i,cat_i
imgs=[ read_helper(filename_i)
for filename_i in os.listdir(in_path)]
data=np.array([ img_i[0] for img_i in imgs])
target=np.array([ img_i[1] for img_i in imgs])
return sklearn.datasets.base.Bunch(data=data, target=target)
def extract_int(str_i):
return re.findall('\d+', str_i )
def read_ints(filename):
with open(filename) as f:
raw_ints = f.readlines()
return [ int(raw_i) for raw_i in raw_ints]
def save_str(txt,out_path):
text_file = open(out_path, "w")
text_file.write(txt)
text_file.close()
def save_array(arr,out_path,prec='%.4e'):
np.savetxt(out_path, arr, fmt=prec, delimiter=',', newline='\n')
def save_object(nn,path):
file_object = open(path,'wb')
pickle.dump(nn,file_object)
file_object.close()
def read_object(path):
file_object = open(path,'rb')
obj=pickle.load(file_object)
file_object.close()
return obj
def to_sparse_matrix(sparse_pairs,n_states,n_landmarks):
infl_matrix=dok_matrix((n_states, n_landmarks), dtype=np.float32)
for i,pairs_i in enumerate(sparse_pairs):
for j,value_j in pairs_i:
infl_matrix[i,j]=value_j
return infl_matrix
def read_pairs(filename):
with open(filename) as f:
lines = f.readlines()
def parse_pair(pair):
key,value=pair.split(",")
return int(key),float(value)
def parse_line(line):
pairs=line.split(")(")
pairs[0]=pairs[0].replace("(","")
pairs[-1]=pairs[-1].replace(")","")
return [ parse_pair(pair_i) for pair_i in pairs]
sparse_pairs=[parse_line(line_i) for line_i in lines]
return sparse_pairs
|
{"/hsne.py": ["/utils.py", "/knn.py", "/markov.py", "/tsne.py", "/plot.py"], "/knn.py": ["/markov.py", "/utils.py"], "/tsne.py": ["/knn.py", "/markov.py", "/utils.py", "/plot.py"], "/reconstruct.py": ["/tsne.py", "/utils.py", "/plot.py"], "/preproc.py": ["/knn.py"], "/markov.py": ["/knn.py", "/utils.py"]}
|
12,001
|
tjacek/hsne
|
refs/heads/master
|
/knn.py
|
from sklearn.datasets import fetch_mldata
from sklearn.neighbors import LSHForest,NearestNeighbors
import time
import markov,utils
class NNGraph(object):
def __init__(self,names,distances,target):
self.names=names
self.distances=distances
self.target=target
def __len__(self):
return len(self.names)
def __getitem__(self,i):
return self.names[i],self.distances[i]
def make_nn_graph(dataset,k=100):
nbrs = NearestNeighbors(n_neighbors=k, algorithm='ball_tree').fit(dataset.data)
# nbrs=LSHForest(n_estimators=20, n_candidates=200,n_neighbors=k).fit(X)
distances, indices = nbrs.kneighbors(dataset.data)
print(indices.shape)
return NNGraph(indices,distances,dataset.target)
def read_nn_graph(in_path):
t0=time.time()
nn_graph=utils.read_object(in_path)
print(time.time()-t0)
return nn_graph
def save_nn_graph(data,out_path):
t0=time.time()
nn_graph=make_nn_graph(data)
print(time.time()-t0)
utils.save_object(nn_graph,out_path)
if __name__ == "__main__":
dataset=fetch_mldata("MNIST original")
save_nn_graph(dataset,"mnist/graph")
|
{"/hsne.py": ["/utils.py", "/knn.py", "/markov.py", "/tsne.py", "/plot.py"], "/knn.py": ["/markov.py", "/utils.py"], "/tsne.py": ["/knn.py", "/markov.py", "/utils.py", "/plot.py"], "/reconstruct.py": ["/tsne.py", "/utils.py", "/plot.py"], "/preproc.py": ["/knn.py"], "/markov.py": ["/knn.py", "/utils.py"]}
|
12,002
|
tjacek/hsne
|
refs/heads/master
|
/tsne.py
|
from sklearn.datasets import fetch_mldata
import time
import knn,markov
from knn import NNGraph
import utils
from sklearn.manifold import TSNE
import numpy as np
import plot
def compute_t(landmarks,sparse_pairs,W):
infl_matrix=make_influence_matrix(landmarks,sparse_pairs)
t_comp=time.time()
T=markov.get_prob_matrix(infl_matrix,W)
print("T matrix computed %d" % (time.time() - t_comp))
print(T.shape)
def norm_helper(row):
row/=sum(row)
return row
T=np.array([norm_helper(t_i) for t_i in T])
print("norm %d" % check_norm(T))
W_next=(W.transpose()*infl_matrix).todense()
print("W_next"+str(type(W_next)))
print(W_next.shape)
# W_next=np.expand_dims(W_next,axis=0)
# print(W_next.shape)
return T,W_next
def make_influence_matrix(landmarks,sparse_pairs):
n_landmarks=len(landmarks)
print("Number of landmarks %d" % n_landmarks)
t_sparse=time.time()
n_states=len(sparse_pairs)
infl_matrix=utils.to_sparse_matrix(sparse_pairs,n_states,n_landmarks)
print("sparse matrix created %d" % ( time.time()- t_sparse))
norm_const=infl_matrix[0].sum()
infl_matrix/=norm_const
print("Norm const %d" % norm_const)
return infl_matrix
def check_norm(T):
s=np.sum(T,axis=1)
for s_i in s:
if( (1.0-s_i)>0.01 ):
return False
return sum(s)
def create_embedding(trans):
P=trans.T +trans
norm_const=2.0 * float(trans.shape[0])
P/=norm_const
embd=TSNE(n_components=2,perplexity=20).fit_transform(P)
return embd
def select_landmarks(dataset,in_file='landmarks.txt',out_file='landmarks'):
landmarks=utils.read_ints(in_file)
utils.save_as_img(dataset.data,dataset.target,out_path=out_file,new_shape=(28,28),selected=landmarks)
def compute_influence(graph_path,landmark_file):
nn_graph=knn.read_nn_graph(graph_path)
print("nn graph loaded")
mc=markov.make_eff_markov_chain(nn_graph)
print("markov chain built")
landmarks=utils.read_ints(landmark_file)
t0=time.time()
markov.compute_influence(mc,landmarks,beta=100)
print("Time %d" % (time.time() - t0))
|
{"/hsne.py": ["/utils.py", "/knn.py", "/markov.py", "/tsne.py", "/plot.py"], "/knn.py": ["/markov.py", "/utils.py"], "/tsne.py": ["/knn.py", "/markov.py", "/utils.py", "/plot.py"], "/reconstruct.py": ["/tsne.py", "/utils.py", "/plot.py"], "/preproc.py": ["/knn.py"], "/markov.py": ["/knn.py", "/utils.py"]}
|
12,003
|
tjacek/hsne
|
refs/heads/master
|
/reconstruct.py
|
import numpy as np
import tsne,utils,plot
from sklearn.datasets import fetch_mldata
def reconstruct(matrix_path,embd_path,out_path):
embd =utils.read_object(embd_path)
infl=utils.read_object(matrix_path)
n_points=infl.shape[0]
n_embd=embd.shape[0]
def recon_helper(i):
print(i)
embd_weighted=np.array([ infl[i,j]*embd[j] for j in range(n_embd)])
rec_i=np.sum(embd_weighted,axis=0)
print(rec_i.shape)
return rec_i
reconstruction=np.array([recon_helper(i) for i in range(n_points)])
utils.save_object(reconstruction,out_path)
def make_embd(scale_path="mnist/scale1"):
in_file=scale_path+"/T.txt"
out_file=scale_path+"/emd"
trans=utils.read_object(in_file)
T=tsne.create_embedding(trans)
utils.save_object(T,out_file)
def show_embd(scale_path="mnist/scale1",dataset_name="MNIST original",threshold=1.5,embd_path=None):
if(embd_path is None):
in_file=scale_path+"/emd"
else:
in_file=embd_path
X=utils.read_object(in_file)
landmark_file= scale_path+"/landmarks.txt"
landmarks=utils.read_ints(landmark_file)
mnist = fetch_mldata(dataset_name)
title="beta_threshold="+str(threshold)
plot.plot_embedding(X,mnist.target,landmarks,title=title)
def rec_matrix(scale_paths,out_path):
def inf_matrix(scale_i):
landmarks=utils.read_ints(scale_i +"/landmarks.txt")
sparse_pairs=utils.read_pairs(scale_i +"/influence.txt")
return tsne.make_influence_matrix(landmarks,sparse_pairs)
infl_matrixs=[inf_matrix(scale_i)
for scale_i in scale_paths]
rec_matrix=infl_matrixs[0]
for infl_i in infl_matrixs[1:]:
rec_matrix=rec_matrix*infl_i
utils.save_object(rec_matrix.todense(),out_path)
#print(rec_matrix.shape)
#for infl_i in infl_matrixs:
# print(infl_i.shape)
#make_embd(scale_path="/mnist/scale1")
#show_embd()
scales=["mnist/scale1","mnist/scale2","mnist/scale3"]
rec_matrix(scales,"mnist/rec_")
#reconstruct("mnist/rec","mnist/scale2/emd",out_path="mnist/embd")
#show_embd(scale_path="mnist/scale1",embd_path="mnist/embd")
|
{"/hsne.py": ["/utils.py", "/knn.py", "/markov.py", "/tsne.py", "/plot.py"], "/knn.py": ["/markov.py", "/utils.py"], "/tsne.py": ["/knn.py", "/markov.py", "/utils.py", "/plot.py"], "/reconstruct.py": ["/tsne.py", "/utils.py", "/plot.py"], "/preproc.py": ["/knn.py"], "/markov.py": ["/knn.py", "/utils.py"]}
|
12,004
|
tjacek/hsne
|
refs/heads/master
|
/preproc.py
|
import time
import numpy as np
from sklearn.decomposition import PCA
from sklearn.datasets import fetch_mldata
from sklearn.datasets.base import Bunch
import knn
def pca_preproc(dataset_name="MNIST original"):
dataset=fetch_mldata(dataset_name)
n_dim=dataset.data.shape[1]
transform=PCA(n_components=n_dim)
t0=time.time()
transformed=transform.fit_transform(dataset.data)
print("PCA transform %d" % (time.time()-t0))
n_feats=find_suff_size(transform.explained_variance_ratio_ )
reduced=transformed[:,:n_feats]
return Bunch(data=reduced,target=dataset.target)
def find_suff_size(expl_variance,threshold=0.95):
var=0.0
for i,var_i in enumerate(expl_variance):
var+=var_i
if(var>=threshold):
return i
return len(list(expl_variance))
if __name__ == "__main__":
dataset=pca_preproc(dataset_name="MNIST original")
knn.save_nn_graph(dataset,"mnist_pca/graph")
|
{"/hsne.py": ["/utils.py", "/knn.py", "/markov.py", "/tsne.py", "/plot.py"], "/knn.py": ["/markov.py", "/utils.py"], "/tsne.py": ["/knn.py", "/markov.py", "/utils.py", "/plot.py"], "/reconstruct.py": ["/tsne.py", "/utils.py", "/plot.py"], "/preproc.py": ["/knn.py"], "/markov.py": ["/knn.py", "/utils.py"]}
|
12,005
|
tjacek/hsne
|
refs/heads/master
|
/markov.py
|
import numpy as np
import knn,utils
#from sets import Set
import random
class EffMarkovChain(object):
def __init__(self, trans,states):
self.trans=trans
self.states=states
self.n_states=trans.shape[0]
self.k=trans.shape[1]
def get_states(self):
return range(self.n_states)
def __call__(self,beta,theta,start_state):
result=np.zeros((beta,))
for s in xrange(beta):
current_state=start_state
for t in xrange(theta):
i=self.next_state(current_state)
result[s]=self.states[current_state][i]
return result
def next_state(self,state_i):
r=random.random()
for j in xrange(self.k):
if(r<self.trans[state_i][j]):
return j
return self.k
def seek_landmark(self,start,landmarks):
current_state=start
while(not (current_state in landmarks)):
j=self.next_state(current_state)
current_state=self.states[current_state][j]
return current_state
def save(self,trans_file='trans.txt',states_file='states.txt'):
utils.save_array(self.trans,trans_file)
utils.save_array(self.states,states_file,prec='%i')
def make_eff_markov_chain(nn_graph):
trans=[]
states=[]
for i in range(len(nn_graph)):
names_i,distances_i=nn_graph[i]
sigma_i=np.min(distances_i[distances_i!=0])
dist_i=np.exp(distances_i/sigma_i)
dist_i/=np.sum(dist_i)
dist_i=np.cumsum(dist_i)
trans.append(dist_i)
states.append(names_i)
return EffMarkovChain(np.array(trans),np.array(states))
def find_landmarks(markov_chain,beta=100,theta=50,beta_theshold=3.0):
states=markov_chain.get_states()
hist=np.zeros((len(states),))
for state_i in states:
if( state_i % 10 ==0):
print(state_i)
end_states=markov_chain(beta,theta,state_i)
for end_state_i in end_states:
hist[end_state_i]+=1
treshold=beta_theshold*beta
landmarks=[ i
for i,hist_i in enumerate(hist)
if(hist_i>treshold)]
return landmarks
def compute_influence(markov_chain,landmarks,beta=50):
n_states=len(markov_chain.get_states())
n_landmarks=len(landmarks)
infl_matrix=np.zeros((n_states,n_landmarks),dtype=float)
landmark_dict={ landmark_i:i
for i,landmark_i in enumerate(landmarks)}
landmarks=Set(landmarks)
for state_i in range(n_states):
print(state_i)
for j in range(beta):
end_state=markov_chain.seek_landmark(state_i,landmarks)
landmark_index=landmark_dict[end_state]
infl_matrix[state_i][landmark_index]+=1.0
infl_matrix/=float(beta)
return infl_matrix
def get_prob_matrix(infl_matrix,W):
weighted_infl=infl_matrix.multiply(W)
sp=weighted_infl.transpose()*infl_matrix
T=sp.toarray()
print(T.shape)
return T
def to_cum_matrix(matrix):
const=np.sum(matrix,axis=1)
const=1.0/const
print("T")
n_dist=const.shape[0]
prob=np.array([const[i]* row_i
for i,row_i in enumerate(matrix)])
prob=np.cumsum(prob,axis=1)
return prob
if __name__ == "__main__":
make_markov_chain("mnist_graph")
|
{"/hsne.py": ["/utils.py", "/knn.py", "/markov.py", "/tsne.py", "/plot.py"], "/knn.py": ["/markov.py", "/utils.py"], "/tsne.py": ["/knn.py", "/markov.py", "/utils.py", "/plot.py"], "/reconstruct.py": ["/tsne.py", "/utils.py", "/plot.py"], "/preproc.py": ["/knn.py"], "/markov.py": ["/knn.py", "/utils.py"]}
|
12,006
|
tjacek/hsne
|
refs/heads/master
|
/plot.py
|
import matplotlib.pyplot as plt
from matplotlib import offsetbox
import numpy as np
def plot_embedding(X,cats,landmarks,title=None):
# print(landmarks)
n_points=X.shape[0]
fraction=get_fraction(n_points)
print("Fraction %d" % fraction)
y = [cats[l]
for l in landmarks]
print("Unique categories")
print(np.unique(y))
x_min, x_max = np.min(X, 0), np.max(X, 0)
X = (X - x_min) / (x_max - x_min)
plt.figure()
ax = plt.subplot(111)
for i in range(n_points):
if( (i%fraction) == 0):
plt.text(X[i, 0], X[i, 1], str(y[i]),
color=plt.cm.Set3( float(y[i]) / 10.),
fontdict={'weight': 'bold', 'size': 9})
plt.xticks([]), plt.yticks([])
if title is not None:
plt.title(title)
plt.show()
def get_fraction(n_points,max_points=3000):
if(n_points>max_points):
return int(n_points/max_points)
else:
return 1
|
{"/hsne.py": ["/utils.py", "/knn.py", "/markov.py", "/tsne.py", "/plot.py"], "/knn.py": ["/markov.py", "/utils.py"], "/tsne.py": ["/knn.py", "/markov.py", "/utils.py", "/plot.py"], "/reconstruct.py": ["/tsne.py", "/utils.py", "/plot.py"], "/preproc.py": ["/knn.py"], "/markov.py": ["/knn.py", "/utils.py"]}
|
12,008
|
Thomasjkeel/nerc-climate-modelling-practical
|
refs/heads/main
|
/experiments.py
|
import os
import pandas as pd
import numpy as np
from scripts import model
from scripts.model import KRAK_VALS, KRAKATOA_YEAR
import matplotlib.pyplot as plt
import seaborn as sns
import matplotlib.patches as mpatches
## GLOBALS
FORCING_SENSITIVITY = 1
COLORS = ['#f7564a', '#e6ac1c', '#5963f0']
sns.set_context('paper')
sns.set_style('white')
## load in data (Move to get_data_func)
def load_data(data_path):
## TODO: will be extend to allow for getting climate model data on the fly
data = np.loadtxt(data_path, delimiter=None, dtype=str)
return data
def calc_anomaly(data, num_years):
years = np.array([])
anom = np.array([])
for row in range(num_years):
years = np.append(years, float(data[row][0]))
anom = np.append(anom, float(data[row][1]))
return anom
def load_forcing_data(filename, volcanic=True):
ERF_data = pd.read_csv(filename)
ERF_data = ERF_data.set_index('year')
if volcanic == True:
plot_volcanic_record(ERF_data)
KRAK_VALS[1883] = ERF_data['volcanic'][KRAKATOA_YEAR]
KRAK_VALS[1884] = ERF_data['volcanic'][KRAKATOA_YEAR+1]
KRAK_VALS[1885] = ERF_data['volcanic'][KRAKATOA_YEAR+2]
KRAK_VALS[1886] = ERF_data['volcanic'][KRAKATOA_YEAR+3]
past_volcanic_record = len(ERF_data['volcanic'].loc[1850:2024])
new_vals = list(ERF_data['total'].loc[:2024].values)
new_vals.extend(ERF_data['total'].loc[2024:2023+past_volcanic_record].values + ERF_data['volcanic'].loc[1850:2024].values)
new_vals.extend(ERF_data['total'].loc[2024+past_volcanic_record+1:].values)
ERF_data['total'] = new_vals
ERF = np.array(ERF_data.loc[1850:2020]['total']) * FORCING_SENSITIVITY
ERF_fut = np.array(ERF_data.loc[1850:2100]['total'] * FORCING_SENSITIVITY)
return ERF, ERF_fut
def calc_confidence_interval(data):
n = len(data)
x_bar = data.mean()
st_dev = np.std(data)
upper_conf_int = x_bar + 1.960 * st_dev/np.sqrt(n)
lower_conf_int = x_bar - 1.960 * st_dev/np.sqrt(n)
return upper_conf_int, lower_conf_int
def plot_model(years, model, ax=None, fig=None, legend=True, **kwargs):
if not ax:
fig, ax = plt.subplots(1)
plt.plot(years, model, **kwargs)
plt.hlines(0,1850,2100, linestyle='--', color='k')
plt.xlim(1850, 2100)
plt.ylim(-1,7)
plt.xlabel('Year', fontsize=12)
plt.ylabel('Temperature Anomaly (K) (w.r.t. 1961-1990)', fontsize=10)
ax.grid(True)
if legend:
plt.legend(loc='upper left')
return fig, ax
def plot_volcanic_record(data):
past_volcanic_record = len(data['volcanic'].loc[1850:2024])
# print('sum volcanic record added = ', data['volcanic'].loc[1850:1850+77].values.sum())
sns.set_style('white')
fig, ax = plt.subplots(1, figsize=(7, 5))
data['volcanic'].loc[1850:2024].plot(ax=ax)
ax.grid(axis='y')
ax.plot(np.arange(2024,2024+past_volcanic_record), data['volcanic'].loc[1850:2024].values)
plt.vlines(2024, -2, 2,color='k', linestyle='--')
plt.ylim(-2,2)
plt.xlim(1850,2100)
plt.xlabel('Year', size=13)
plt.title("\'New\' Volcanic record", size=14)
plt.ylabel('Effective Radiative Forcing (ERF)', size=13)
plt.savefig('outputs/volcanic_record_extended.png', bbox_inches='tight')
plt.close()
def plot_temp_anom(data, data2):
fig, ax = plt.subplots(1, figsize=(7, 5))
ax.plot(np.arange(1850,2021), data, marker='s', label='HadCRUT record')
ax.plot(np.arange(1850,2101),data2, label='SSP5 projection')
ax.grid(axis='y')
plt.hlines(0,1850,2020, linestyle='--', color='k')
plt.xlim(1850, 2020)
plt.xlabel('Year', fontsize=12)
plt.ylabel('Temperature anomaly (K) (w.r.t. 1961-1990)', fontsize=12)
plt.title("HadCRUT global 2 m temperature record", size=13)
plt.legend(loc='upper left')
plt.savefig('outputs/hadCRUT_time.png', bbox_inches='tight')
plt.close()
def get_non_volcanic_results(scen_file, forcing_scenario_path, temp_anom, VOLCANIC_RESULTS, krakatwoa=False):
ERF, ERF_fut = load_forcing_data(forcing_scenario_path, volcanic=False)
alpha_val, alpha_stderr = model.get_opt_model(temp_anom=temp_anom, F=ERF)
projection = model.upper_ocean_temp(t=len(ERF_fut), alpha=alpha_val, F=ERF_fut, krakatwoa=krakatwoa)
proj_upper = model.upper_ocean_temp(t=len(ERF_fut), alpha=alpha_val+1.96*0.048, F=ERF_fut, krakatwoa=krakatwoa)
proj_lower = model.upper_ocean_temp(t=len(ERF_fut), alpha=alpha_val-1.96*0.048, F=ERF_fut, krakatwoa=krakatwoa)
VOLCANIC_RESULTS[scen_file[5:8] + 'non_volcanic'] = [proj_lower[-1], proj_upper[-1]]
return VOLCANIC_RESULTS
def main(krakatwoa=False, save_filename='outputs/upper_ocean_projection_volcanic.png'):
# array for time, in years and seconds
t = np.array(range(0,171), dtype='int64')
years = t + 1850
t_fut = np.array(range(0,251), dtype='int64')
years_fut = t_fut + 1850
## file locations
data_dir = './data'
filename = 'hadCRUT_data.txt'
path_to_ssp_forcings = os.path.join(data_dir, 'SSPs/')
## load data and calc temperature anomaly
data_path = os.path.join(data_dir, filename)
model_data_used = load_data(data_path)
temp_anom = calc_anomaly(model_data_used, num_years=171)
## initialise_plot
fig, ax = plt.subplots(1, figsize=(10,6))
fig, ax = plot_model(years, temp_anom, label='HadCRUT', fig=fig, ax=ax, marker='s', markersize=2, linewidth=1)
## run model under different forcing scenarios
scenario_files = sorted(os.listdir(path_to_ssp_forcings), reverse=True)
for ind, scen_file in enumerate(scenario_files):
forcing_scenario_path = os.path.join(path_to_ssp_forcings, scen_file)
## TODO: clean up
get_non_volcanic_results(scen_file, forcing_scenario_path, temp_anom, VOLCANIC_RESULTS)
## TODO: clean up above
ERF, ERF_fut = load_forcing_data(forcing_scenario_path)
alpha_val, alpha_stderr = model.get_opt_model(temp_anom=temp_anom, F=ERF)
projection = model.upper_ocean_temp(t=len(ERF_fut), alpha=alpha_val, F=ERF_fut, krakatwoa=krakatwoa)
proj_upper = model.upper_ocean_temp(t=len(ERF_fut), alpha=alpha_val+1.96*0.048, F=ERF_fut, krakatwoa=krakatwoa)
proj_lower = model.upper_ocean_temp(t=len(ERF_fut), alpha=alpha_val-1.96*0.048, F=ERF_fut, krakatwoa=krakatwoa)
if not krakatwoa:
## IPCC
# print("expected temperature anomaly for %s " % (scen_file[5:8]), proj_lower[-1], proj_upper[-1])
VOLCANIC_RESULTS[scen_file[5:8]] = [proj_lower[-1], proj_upper[-1]]
low_proj = model.upper_ocean_temp(t=len(ERF_fut), alpha=1.04-0.36, F=ERF_fut, krakatwoa=krakatwoa)
high_proj = model.upper_ocean_temp(t=len(ERF_fut), alpha=1.04+0.36, F=ERF_fut, krakatwoa=krakatwoa)
fig, ax = plot_model(years_fut, low_proj, fig=fig, ax=ax, alpha=.2, linestyle='--', color=COLORS[ind], legend=False)
fig, ax = plot_model(years_fut, high_proj, fig=fig, ax=ax, alpha=.2, linestyle='--', color=COLORS[ind], legend=False)
ax.add_patch(mpatches.Rectangle((2105,low_proj[-1]),2, (high_proj[-1]- low_proj[-1]),facecolor=COLORS[ind],
clip_on=False,linewidth = 0, alpha=.7))
plt.text(2110, 7, r'AR5 $\alpha$ range')
plt.text(2108, (high_proj.max() + low_proj.max())/2, '%s – RCP %s.%s' % (scen_file[4:8].upper(), scen_file[8:9], scen_file[9:10]), color=COLORS[ind])
else:
# print("krakatwoa: expected temperature anomaly for %s " % (scen_file[5:8]), proj_lower[-1], proj_upper[-1])
VOLCANIC_RESULTS[scen_file[5:8] + '_krakatwoa'] = [proj_lower[-1], proj_upper[-1]]
ax.add_patch(mpatches.Rectangle((2105,proj_lower[-1]),2, (proj_upper[-1]- proj_lower[-1]),facecolor=COLORS[ind],
clip_on=False,linewidth = 0, alpha=.7))
## plot and save ouputs
fig, ax = plot_model(years_fut, projection, label='%s' % (scen_file[:-16].replace('_', '–').upper()), fig=fig, ax=ax, color=COLORS[ind])
fig, ax = plot_model(years_fut, proj_upper, label=None, fig=fig, ax=ax, alpha=.4, color=COLORS[ind])
fig, ax = plot_model(years_fut, proj_lower, label=None, fig=fig, ax=ax, alpha=.4, color=COLORS[ind])
fig.savefig(save_filename, bbox_inches='tight', dpi=300)
plt.close()
## plot temp anomaly
plot_temp_anom(temp_anom,projection)
if __name__ == '__main__':
## Store results
VOLCANIC_RESULTS = {}
main()
main(krakatwoa=True, save_filename='outputs/upper_ocean_projection_volcanic_krakatwoa.png')
print(VOLCANIC_RESULTS)
## comparison plot
different_scenarios = ['sp1', 'sp4', 'sp5']
different_types = ['non_volcanic', 'krakatwoa']
counter = 2
fig, axes = plt.subplots(1, 3, sharey=True, figsize=(7,4))
for ax, scen in zip(axes, different_scenarios):
ax.set_title('S' + scen.upper(), size=14)
ax.grid(axis='x')
for key in VOLCANIC_RESULTS.keys():
if scen in key:
if 'non_volcanic' in key:
ax.hlines(1, VOLCANIC_RESULTS[key][0], VOLCANIC_RESULTS[key][1], color=COLORS[counter])
ax.vlines(VOLCANIC_RESULTS[key][0], 0.9, 1.1, color=COLORS[counter])
ax.vlines(VOLCANIC_RESULTS[key][1], 0.9, 1.1, color=COLORS[counter])
elif 'krakatwoa' in key:
ax.hlines(3, VOLCANIC_RESULTS[key][0], VOLCANIC_RESULTS[key][1], color=COLORS[counter])
ax.vlines(VOLCANIC_RESULTS[key][0], 2.9, 3.1, color=COLORS[counter])
ax.vlines(VOLCANIC_RESULTS[key][1], 2.9, 3.1, color=COLORS[counter])
else:
ax.hlines(2, VOLCANIC_RESULTS[key][0], VOLCANIC_RESULTS[key][1], color=COLORS[counter])
ax.vlines(VOLCANIC_RESULTS[key][0], 1.9, 2.1, color=COLORS[counter])
ax.vlines(VOLCANIC_RESULTS[key][1], 1.9, 2.1, color=COLORS[counter])
plt.yticks(np.arange(1,4,1), ['Non-volcanic', 'Volcanic', 'Krakatwoa'])
counter -= 1
# plt.ylim(0, 10)
# plt.xlim(0,5)
axes[0].set_ylabel('Experiment', size=12)
axes[1].set_xlabel('Temperature Anomaly (K) (w.r.t 1961-1990)', size=12)
plt.suptitle('2100 Temperature anomaly', size=14)
plt.subplots_adjust(top=.85)
fig.savefig('outputs/compare_results.png', bbox_inches='tight', dpi=300)
|
{"/experiments.py": ["/scripts/model.py"]}
|
12,009
|
Thomasjkeel/nerc-climate-modelling-practical
|
refs/heads/main
|
/scripts/model.py
|
import numpy as np
import pandas as pd
import os
import lmfit
global KRAK_VALS, KRAKATOA_YEAR
KRAK_VALS = {}
KRAKATOA_YEAR = 1883
# set constants
data_dir = 'data'
ERF_data = pd.read_csv(os.path.join(data_dir, 'SSPs/','ERF_ssp585_1750-2500.csv'))
ERF_data = ERF_data.set_index('year')
ERF = np.array(ERF_data.loc[1850:2020]['total'])
start_point = 1961-1850
end_point = 1990 -1850
rho = 1000 # density of water kgm-3
c_p = 4218 # specific heat of water Jkg-1K-1
kap = 1e-4 # vertical diffusivity m2s-1
h_u = 100 # upper ocean height m
h_d = 900 # deep ocean height m
gamma = (2*kap*c_p*rho)/(h_u+h_d) # prop constant for heat transfer to deep ocean Wm-2K-1
C_u = rho*c_p*h_u # specific heat of upper ocean Jm-2K-1
C_d = rho*c_p*h_d # specific heat of deep ocean Jm-2K-1
dt = 365*24*60*60 # seconds in year
# Solved second order differential equation to find expression for T_u:
# T_u = Aexp(lambda1*t) + Bexp(lambda2*t) + F/alpha
# where lambda1,2 are found using quadratic formula from homogenous 2nd order ODE solution, and
# A and B are constants, where A + B = -F/alpha (from inhomogenous solution)
def upper_ocean_temp(t, alpha, F=None, krakatwoa=False):
if type(F) != np.array and type(F) != np.ndarray:
F = ERF
T_u = np.zeros(t)
T_d = np.zeros(t)
for i in range(t-1):
if krakatwoa:
if i == 200:
F[i] += (KRAK_VALS[KRAKATOA_YEAR] * 2)
if i == 201:
F[i] += (KRAK_VALS[KRAKATOA_YEAR+1] * 2)
if i == 202:
F[i] += (KRAK_VALS[KRAKATOA_YEAR+2] * 2)
if i == 203:
F[i] += (KRAK_VALS[KRAKATOA_YEAR+3] * 2)
T_u[i+1] = (1/C_u)*(F[i] - (alpha+gamma)*T_u[i] + T_d[i]*gamma)*dt + T_u[i]
T_d[i+1] = (gamma/C_d)*(T_u[i]-T_d[i])*dt + T_d[i]
T_u = T_u - np.mean(T_u[start_point:end_point])
return T_u
def get_opt_model(temp_anom, F, t=171):
alpha_val, opt_error = opt_alpha(temp_anom=temp_anom, F=F, t=t)
return alpha_val, opt_error
def opt_alpha(temp_anom, F, t=171):
mod = lmfit.Model(upper_ocean_temp, F=F)
params = mod.make_params(alpha=1)
fit_result = mod.fit(temp_anom, params, t=t)
return fit_result.params['alpha'].value, fit_result.params['alpha'].stderr
|
{"/experiments.py": ["/scripts/model.py"]}
|
12,021
|
AnikethSDeshpande/Order-Management
|
refs/heads/main
|
/order_management/__init__.py
|
from order_management.catalogue.catalogue import CATALOGUE
|
{"/order_management/__init__.py": ["/order_management/catalogue/catalogue.py"], "/order_management/order/test_order.py": ["/order_management/order/order.py", "/order_management/__init__.py"], "/order_management/order/order.py": ["/order_management/config.py", "/order_management/order_item/order_item.py"], "/order_management/order_item/order_item.py": ["/order_management/__init__.py"], "/order_management/order_item/test_order_item.py": ["/order_management/order/order.py", "/order_management/order_item/order_item.py", "/order_management/catalogue/catalogue.py"]}
|
12,022
|
AnikethSDeshpande/Order-Management
|
refs/heads/main
|
/order_management/order/test_order.py
|
import unittest
from order_management.order.order import Order
from order_management import CATALOGUE
class Test_Order_Creation(unittest.TestCase):
def test_order_creation_1(self):
order1 = Order()
self.assertEqual(order1.order_id, 0)
order2 = Order()
self.assertEqual(order2.order_id, 1)
def test_order_customer_compulsory(self):
order = Order()
print(order.customer)
def test_order_item_addition(self):
order = Order()
order.customer = 'Aniketh'
order.gst_number = '123'
item_name = 'Pen'
qty = 100
order.add_item(item_name, qty)
self.assertEqual(order.order_total, qty*CATALOGUE[item_name])
def test_order_repr(self):
o = Order()
o.customer = 'Aniketh'
print(o)
if __name__ == '__main__':
unittest.main()
|
{"/order_management/__init__.py": ["/order_management/catalogue/catalogue.py"], "/order_management/order/test_order.py": ["/order_management/order/order.py", "/order_management/__init__.py"], "/order_management/order/order.py": ["/order_management/config.py", "/order_management/order_item/order_item.py"], "/order_management/order_item/order_item.py": ["/order_management/__init__.py"], "/order_management/order_item/test_order_item.py": ["/order_management/order/order.py", "/order_management/order_item/order_item.py", "/order_management/catalogue/catalogue.py"]}
|
12,023
|
AnikethSDeshpande/Order-Management
|
refs/heads/main
|
/order_management/order/order.py
|
'''
Author: Aniketh Deshpande
Order Class
- Maintains order related information
Fields
- Customer
- GST Number
- Order Items
- Order Value
- Tax
- Delivery Status
'''
import logging
from order_management.config import GST_NUMBER_LENGHT
from order_management.order_item.order_item import OrderItem
import re
class Order:
_order_ids = [0]
def __init__(self, customer=None):
self.order_id = self.get_last_order_id()
Order._order_ids.append(self.order_id + 1)
self.order_items = []
self.customer = customer
@classmethod
def get_last_order_id(cls):
return Order._order_ids[-1]
@property
def customer(self):
return self._customer
@customer.setter
def customer(self, customer):
try:
if not isinstance(customer, str):
if customer == None:
self._customer = customer
else:
raise Exception('invalied customer name')
self._customer = customer
except Exception as e:
logging.ERROR('error while setting customer: {e}')
@property
def gst_number(self):
return self._gst_number
@gst_number.setter
def gst_number(self, gst_number):
try:
if not isinstance(gst_number, str):
raise Exception('gst_not_string')
if not len(gst_number) == GST_NUMBER_LENGHT:
raise Exception('gst_len_error')
self._gst_number = gst_number
except Exception as e:
logging.ERROR('error while setting gst_number: {e}')
@property
def order_total(self):
'''
calculates the total value of order based on the items added to the order
'''
order_total = 0
for order_item in self.order_items:
order_total += order_item.amount
return order_total
def add_item(self, item_name=None, qty=None, rate=None):
'''
add item to the list of items in the order
'''
item = OrderItem(order_id=self.order_id,
item_name=item_name,
qty=qty
)
self.order_items.append(item)
def __repr__(self) -> str:
repr = f'order_id: {self.order_id}, customer: {self.customer}'
return repr
def print_order(self):
order_string = str()
order_id = self.order_id
customer = self.customer
gst_number = self.gst_number
order_total = self.order_total
order_string += f'Order ID: {order_id}\n'
order_string += f'Customer: {customer}\n'
order_string += f'GST Number: {gst_number}\n\n'
order_string += 'Items'
items = [item for item in self.order_items]
for i, item in enumerate(items):
order_string += f'\n{i+1}--------------------------------------\n{item}'
order_string += '\n---------------------------------------'
order_string += f'\n Total: {order_total}'
# comment!
return order_string
|
{"/order_management/__init__.py": ["/order_management/catalogue/catalogue.py"], "/order_management/order/test_order.py": ["/order_management/order/order.py", "/order_management/__init__.py"], "/order_management/order/order.py": ["/order_management/config.py", "/order_management/order_item/order_item.py"], "/order_management/order_item/order_item.py": ["/order_management/__init__.py"], "/order_management/order_item/test_order_item.py": ["/order_management/order/order.py", "/order_management/order_item/order_item.py", "/order_management/catalogue/catalogue.py"]}
|
12,024
|
AnikethSDeshpande/Order-Management
|
refs/heads/main
|
/order_management/config.py
|
# lenght of gst number field
GST_NUMBER_LENGHT = 3
|
{"/order_management/__init__.py": ["/order_management/catalogue/catalogue.py"], "/order_management/order/test_order.py": ["/order_management/order/order.py", "/order_management/__init__.py"], "/order_management/order/order.py": ["/order_management/config.py", "/order_management/order_item/order_item.py"], "/order_management/order_item/order_item.py": ["/order_management/__init__.py"], "/order_management/order_item/test_order_item.py": ["/order_management/order/order.py", "/order_management/order_item/order_item.py", "/order_management/catalogue/catalogue.py"]}
|
12,025
|
AnikethSDeshpande/Order-Management
|
refs/heads/main
|
/order_management/order_item/order_item.py
|
'''
Author: Aniketh Deshpande
Order Item
- Maintains information regarding the order items
Fields
- Item Name
- Qty
- Rate
- Amount
'''
import logging
from order_management import CATALOGUE
class OrderItem:
_order_item_ids = [0]
def __init__(self, order_id, item_name, qty):
self.order_id = order_id
self.item_name = item_name
self.qty = qty
self.order_item_id = self.get_last_order_item_id()
OrderItem._order_item_ids.append(self.order_item_id + 1)
self.amount = 0
@classmethod
def get_last_order_item_id(cls):
return OrderItem._order_item_ids[-1]
@property
def item_name(self):
return self.item_name_
@item_name.setter
def item_name(self, item_name):
try:
if not item_name in CATALOGUE:
raise Exception('unknown_item')
self.item_name_ = item_name
self.rate = CATALOGUE[item_name]
except Exception as e:
logging.ERROR(f'{e}: item not in catalogue')
@property
def amount(self):
return self.amount_
@amount.setter
def amount(self, _):
try:
self.amount_ = self.rate * self.qty
except:
self.amount_ = 0
logging.error(f'error while setting order_item.amount for order_id: {self.order_id} and item: {self.item_name}')
# order_id = 1
# item = 'Book'
# qty = 20
# oi = OrderItem(order_id, item, qty)
# print(oi)
# print(' ')
|
{"/order_management/__init__.py": ["/order_management/catalogue/catalogue.py"], "/order_management/order/test_order.py": ["/order_management/order/order.py", "/order_management/__init__.py"], "/order_management/order/order.py": ["/order_management/config.py", "/order_management/order_item/order_item.py"], "/order_management/order_item/order_item.py": ["/order_management/__init__.py"], "/order_management/order_item/test_order_item.py": ["/order_management/order/order.py", "/order_management/order_item/order_item.py", "/order_management/catalogue/catalogue.py"]}
|
12,026
|
AnikethSDeshpande/Order-Management
|
refs/heads/main
|
/order_management/catalogue/catalogue.py
|
'''
Author: Aniketh Deshpande
'''
'''
Catalogue:
key: Item;
value: Cost;
'''
CATALOGUE = {
"Book": 200,
"Pen": 10
}
|
{"/order_management/__init__.py": ["/order_management/catalogue/catalogue.py"], "/order_management/order/test_order.py": ["/order_management/order/order.py", "/order_management/__init__.py"], "/order_management/order/order.py": ["/order_management/config.py", "/order_management/order_item/order_item.py"], "/order_management/order_item/order_item.py": ["/order_management/__init__.py"], "/order_management/order_item/test_order_item.py": ["/order_management/order/order.py", "/order_management/order_item/order_item.py", "/order_management/catalogue/catalogue.py"]}
|
12,027
|
AnikethSDeshpande/Order-Management
|
refs/heads/main
|
/order_management/order_item/test_order_item.py
|
import unittest
from order_management.order.order import Order
from order_management.order_item.order_item import OrderItem
from order_management.catalogue.catalogue import CATALOGUE
class Test_OrderItem(unittest.TestCase):
def test_order_item_amount(self):
order = Order()
order_id = order.order_id
item = 'Pen'
qty = 250
oi = OrderItem(order_id, item, qty)
self.assertEqual(oi.amount, qty*CATALOGUE[item])
|
{"/order_management/__init__.py": ["/order_management/catalogue/catalogue.py"], "/order_management/order/test_order.py": ["/order_management/order/order.py", "/order_management/__init__.py"], "/order_management/order/order.py": ["/order_management/config.py", "/order_management/order_item/order_item.py"], "/order_management/order_item/order_item.py": ["/order_management/__init__.py"], "/order_management/order_item/test_order_item.py": ["/order_management/order/order.py", "/order_management/order_item/order_item.py", "/order_management/catalogue/catalogue.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.