index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
6,113
|
gr33ndata/irlib
|
refs/heads/master
|
/irlib/matrixcooccurrence.py
|
'''
Informations Retrieval Library
==============================
MatrixCooccurrence: You give it a Matrix,
and it creates new co-occurrence matrix of its features
'''
# Author: Tarek Amr <@gr33ndata>
import sys, math
from matrix import Matrix
from superlist import SuperList
from itertools import permutations
class MatrixCooccurrence(Matrix):
def __init__(self, mx=None):
self.orig_mx = mx
self.terms = self.orig_mx.vocabulary()
N = len(self.terms)
square = [[0 for _ in range(0,N)] for _ in range(0,N)]
for doc in self.orig_mx.docs:
terms_indeces = self._nonzeros(doc['terms'])
for c in permutations(terms_indeces,2):
square[c[0]][c[1]] += 1
for cc in terms_indeces:
square[cc][cc] += 1
self.docs = []
for i in range(len(self.terms)):
self.docs.append({ 'id': self.terms[i],
'class': '',
'terms': square[i]})
def _nonzeros(self, x):
nz = []
for i in range(0,len(x)):
if x[i] != 0:
nz.append(i)
return nz
def normalize(self):
for i in range(len(self.docs)):
terms = self.docs[i]['terms']
idf = terms[i]
for j in range(len(terms)):
terms[j] = float(terms[j]) / idf
self.docs[i]['terms'] = terms
if __name__ == '__main__':
mx = Matrix()
mx.add_doc(doc_id=1,
doc_terms=['apple', 'juice', 'fruit'],
doc_class= '0',
frequency=True, do_padding=True)
mx.add_doc(doc_id=2,
doc_terms=['orange', 'juice', 'fruit'],
doc_class= '0',
frequency=True, do_padding=True)
mx.add_doc(doc_id=3,
doc_terms=['tomato', 'juice', 'food'],
doc_class= '0',
frequency=True, do_padding=True)
print 'Matrix'
print mx.vocabulary()
for doc in mx.docs:
print doc['terms']
#print mx
mxcc = MatrixCooccurrence(mx)
print 'MatrixCooccurrence'
print mxcc.vocabulary()
for doc in mxcc.docs:
print doc['id'], doc['terms']
#print mxcc
print 'MatrixCooccurrence (Normalized)'
#mxcc.normalize()
mxcc.tf_idf(do_idf=True)
print mxcc.vocabulary()
for doc in mxcc.docs:
print doc['id'], doc['terms']
|
{"/tests/__init__.py": ["/tests/TestLM.py", "/tests/TestSuperList.py", "/tests/TestPreprocessor.py", "/tests/TestMatrix.py", "/tests/TestMetrics.py", "/tests/TestProgress.py", "/tests/TestAnalysis.py", "/tests/TestEvaluation.py"]}
|
6,114
|
gr33ndata/irlib
|
refs/heads/master
|
/examples/twitter/search.py
|
# Search in tweets
import os, sys
# Adding this to path to be able to import irlib
sys.path.append('../../')
from irlib.preprocessor import Preprocessor
from irlib.matrix import Matrix
def readfiles(fold_path='all-folds/fold1/'):
prep = Preprocessor()
mx = Matrix()
files = os.listdir(fold_path)
for filename in files:
fd = open('%s/%s' % (fold_path, filename), 'r')
file_data = fd.read()
terms = prep.ngram_tokenizer(text=file_data)
mx.add_doc(doc_id=filename, doc_terms=terms,
frequency=True, do_padding=True)
print 'Number of read documents:', len(mx.docs)
print 'Number of read terms', len(mx.terms)
#print mx.terms[0:5], mx.terms[-5:-1]
print mx.terms
print mx.docs
def search():
while True:
q = raw_input("Search: ")
q = q.strip()
if not q:
return
else:
#search here
pass
def main():
readfiles()
#search()
if __name__ == "__main__":
main()
|
{"/tests/__init__.py": ["/tests/TestLM.py", "/tests/TestSuperList.py", "/tests/TestPreprocessor.py", "/tests/TestMatrix.py", "/tests/TestMetrics.py", "/tests/TestProgress.py", "/tests/TestAnalysis.py", "/tests/TestEvaluation.py"]}
|
6,115
|
gr33ndata/irlib
|
refs/heads/master
|
/tests/__init__.py
|
import os
import sys
import irlib
from tests.TestLM import TestLM
from tests.TestSuperList import TestSuperList
from tests.TestPreprocessor import TestPreprocessor
from tests.TestMatrix import TestMatrix
from tests.TestMetrics import TestMetrics
from tests.TestProgress import TestProgress
from tests.TestAnalysis import TestAnalysis
from tests.TestEvaluation import TestEvaluation
|
{"/tests/__init__.py": ["/tests/TestLM.py", "/tests/TestSuperList.py", "/tests/TestPreprocessor.py", "/tests/TestMatrix.py", "/tests/TestMetrics.py", "/tests/TestProgress.py", "/tests/TestAnalysis.py", "/tests/TestEvaluation.py"]}
|
6,116
|
gr33ndata/irlib
|
refs/heads/master
|
/tests/TestPreprocessor.py
|
from unittest import TestCase
from irlib.preprocessor import Preprocessor, my_nltk
class TestPreprocessor(TestCase):
def setUp(self):
pass
def test_term2ch(self):
p = Preprocessor()
charlist = p.term2ch('help')
self.assertEqual(charlist, ['h', 'e', 'l', 'p'])
def test_stemmer(self):
p = Preprocessor(stem=True)
stemmed = p.stemmer('running')
if my_nltk:
self.assertEqual(stemmed,'run')
else:
self.assertTrue(False,'NLTK is not installed')
def test_stemmer_lower(self):
p = Preprocessor(lower=True, stem=True)
stemmed = p.stemmer('Running')
if my_nltk:
self.assertEqual(stemmed,'run')
else:
self.assertTrue(False,'NLTK is not installed')
def test_tokenizer_lower(self):
p = Preprocessor(lower=True, stem=False)
tokens = p.tokenizer('This is IRLib')
self.assertEqual(tokens,['this','is','irlib'])
def test_2gram_tokenizer(self):
p = Preprocessor(lower=False, stem=False, ngram=2)
returned_tokens = p.ngram_tokenizer('how do you do?')
expected_tokens = ['how do', 'do you', 'you do']
self.assertEqual(returned_tokens, expected_tokens)
def test_3gram_tokenizer(self):
p = Preprocessor(lower=False, stem=False, ngram=3)
returned_tokens = p.ngram_tokenizer('how do you do?')
expected_tokens = ['how do you', 'do you do']
self.assertEqual(returned_tokens, expected_tokens)
def test_is_mention(self):
is_it = Preprocessor.is_mention('@twitter')
self.assertEqual(is_it, True)
is_it = Preprocessor.is_mention('#twitter')
self.assertEqual(is_it, False)
def test_is_hashtag(self):
is_it = Preprocessor.is_hashtag('@twitter')
self.assertEqual(is_it, False)
is_it = Preprocessor.is_hashtag('#twitter')
self.assertEqual(is_it, True)
def test_is_link(self):
is_it = Preprocessor.is_link('hello world')
self.assertEqual(is_it, False)
is_it = Preprocessor.is_link('http://www.yahoo.com')
self.assertEqual(is_it, True)
is_it = Preprocessor.is_link('https://www.yahoo.com')
self.assertEqual(is_it, True)
is_it = Preprocessor.is_link('www.yahoo.com')
self.assertEqual(is_it, True)
|
{"/tests/__init__.py": ["/tests/TestLM.py", "/tests/TestSuperList.py", "/tests/TestPreprocessor.py", "/tests/TestMatrix.py", "/tests/TestMetrics.py", "/tests/TestProgress.py", "/tests/TestAnalysis.py", "/tests/TestEvaluation.py"]}
|
6,117
|
gr33ndata/irlib
|
refs/heads/master
|
/setup.py
|
from distutils.core import setup
setup(
name='irlib',
version='0.1.1',
author='Tarek Amr',
author_email='gr33ndata@yahoo.com',
url='https://github.com/gr33ndata/irlib',
packages=['irlib'],
license='LICENSE.txt',
description='Inforamtion Retrieval Library',
long_description=open('README.rst').read()
)
|
{"/tests/__init__.py": ["/tests/TestLM.py", "/tests/TestSuperList.py", "/tests/TestPreprocessor.py", "/tests/TestMatrix.py", "/tests/TestMetrics.py", "/tests/TestProgress.py", "/tests/TestAnalysis.py", "/tests/TestEvaluation.py"]}
|
6,118
|
gr33ndata/irlib
|
refs/heads/master
|
/tests/TestEvaluation.py
|
from unittest import TestCase
from irlib.evaluation import Evaluation
class TestEvaluation(TestCase):
def setUp(self):
pass
def test_correct_label_list(self):
e = Evaluation()
e.ev('Apples', 'Oranges')
e.ev('Melons', 'Bananas')
expected_labels = ['Apples', 'Oranges', 'Melons', 'Bananas']
returned_labels = e.get_classes_labels()
self.assertItemsEqual(returned_labels, expected_labels)
def test_correct_overall_accuracy(self):
e = Evaluation()
e.ev('Apples' , 'Oranges')
e.ev('Oranges', 'Oranges')
e.ev('Apples' , 'Apples')
e.ev('Oranges', 'Apples')
expected_accuracy = 0.5
returned_accuracy = e.overall_accuracy(percent=False)
self.assertEqual(returned_accuracy, expected_accuracy)
def test_correct_overall_fp(self):
e = Evaluation()
e.ev('Apples' , 'Oranges')
e.ev('Apples' , 'Bananas')
e.ev('Apples' , 'Apples')
expected_fp = 2
returned_fp = e.fp('Apples')
self.assertEqual(returned_fp, expected_fp)
def test_correct_overall_tp(self):
e = Evaluation()
e.ev('Apples' , 'Oranges')
e.ev('Apples' , 'Apples')
e.ev('Apples' , 'Apples')
expected_tp = 2
returned_tp = e.tp('Apples')
self.assertEqual(returned_tp, expected_tp)
def test_correct_overall_fn(self):
e = Evaluation()
e.ev('Apples' , 'Oranges')
e.ev('Bananas', 'Apples')
e.ev('Apples' , 'Apples')
expected_fn = 1
returned_fn = e.fn('Apples')
self.assertEqual(returned_fn, expected_fn)
def test_correct_overall_tn(self):
e = Evaluation()
e.ev('Apples' , 'Oranges')
e.ev('Apples' , 'Apples')
expected_tn = 0
returned_tn = e.tn('Apples')
self.assertEqual(returned_tn, expected_tn)
|
{"/tests/__init__.py": ["/tests/TestLM.py", "/tests/TestSuperList.py", "/tests/TestPreprocessor.py", "/tests/TestMatrix.py", "/tests/TestMetrics.py", "/tests/TestProgress.py", "/tests/TestAnalysis.py", "/tests/TestEvaluation.py"]}
|
6,119
|
gr33ndata/irlib
|
refs/heads/master
|
/examples/turing chat/qa.py
|
# Using IR to answer your question
# Not so smart question and answer system
import os, sys
import random
# Adding this to path to be able to import irlib
sys.path.append('../../')
from irlib.preprocessor import Preprocessor
from irlib.matrix import Matrix
from irlib.metrics import Metrics
#qa_list = {'id':{'q': 'question', 'a': 'answer')}
class QA:
def __init__(self):
self.file_name = 'qa.txt'
self.qa_list = {}
self.qa_id = 0
self.prep = Preprocessor()
self.mx = Matrix()
self.metric = Metrics()
def randomize(self, a):
for i in range(len(a)):
a[i] = random.randint(0,1)
def readfile(self):
fd = open(self.file_name,'r')
for line in fd.readlines():
line = line.strip().lower().split(':')
if len(line) != 2:
continue
elif line[0] == 'q':
q_line = ' '.join(line[1:])
self.qa_id += 1
self.qa_list[self.qa_id] = {'q': q_line, 'a': ''}
terms = self.prep.ngram_tokenizer(text=q_line)
self.mx.add_doc(doc_id=self.qa_id, doc_terms=terms,
frequency=True, do_padding=True)
elif line[0] == 'a':
a_line = ' '.join(line[1:])
self.qa_list[self.qa_id]['a'] = a_line
#print 'Number of read questions and answers:', len(self.mx.docs)
#print 'Number of read terms', len(self.mx.terms)
def ask(self, q=''):
q_id = 0
q_distance = 99999
terms = self.prep.ngram_tokenizer(text=q)
q_vector = self.mx.query_to_vector(terms, frequency=False)
if sum(q_vector) == 0:
self.randomize(q_vector)
for doc in self.mx.docs:
distance = self.metric.euclid_vectors(doc['terms'], q_vector)
if distance < q_distance:
q_distance = distance
q_id = doc['id']
print 'Tarek:', self.qa_list[q_id]['a']
def main():
qa = QA()
qa.readfile()
while True:
q = raw_input("\nAsk me something: ")
q = q.strip()
if not q:
return
else:
qa.ask(q=q)
if __name__ == "__main__":
main()
|
{"/tests/__init__.py": ["/tests/TestLM.py", "/tests/TestSuperList.py", "/tests/TestPreprocessor.py", "/tests/TestMatrix.py", "/tests/TestMetrics.py", "/tests/TestProgress.py", "/tests/TestAnalysis.py", "/tests/TestEvaluation.py"]}
|
6,122
|
DinarKH/WebServerFlask
|
refs/heads/master
|
/WebServerFlask/__init__.py
|
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_bcrypt import Bcrypt
from flask_login import LoginManager
import redis
app = Flask(__name__)
POSTGRES_URL = "192.168.99.100:5432"
POSTGRES_USER = "postgresuser"
POSTGRES_PW = "123456"
POSTGRES_DB = "servdb"
REDIS_HOST = '127.0.0.1'
REDIS_PORT = 6379
REDIS_SET = 'post_set'
REDIS_POST_TTL = 300 # 5 minutes in seconds
app.config['SECRET_KEY'] = 'b1af4eff3b8bde7a0982fcbc9905fb82'
app.config['DEBUG'] = True
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql+psycopg2' \
'://{user}:{pw}@{url}/{db}'.format(user=POSTGRES_USER,
pw=POSTGRES_PW,
url=POSTGRES_URL,
db=POSTGRES_DB)
db = SQLAlchemy(app)
bcrypt = Bcrypt(app)
login_manager = LoginManager(app)
login_manager.login_view = 'login'
login_manager.login_message_category = 'info'
r_client = redis.Redis(host=REDIS_HOST, port=REDIS_PORT)
from WebServerFlask import routes
|
{"/WebServerFlask/routes.py": ["/WebServerFlask/__init__.py"]}
|
6,123
|
DinarKH/WebServerFlask
|
refs/heads/master
|
/WebServerFlask/routes.py
|
from flask import render_template, url_for, redirect, flash, request
from .forms import RegistationForm, LoginForm, PostForm
from .models import User
from WebServerFlask import app, bcrypt, db, r_client, REDIS_SET, REDIS_POST_TTL
from flask_login import login_user, current_user, logout_user, login_required
import datetime, time
@app.route('/')
def home():
return render_template('home.html')
@app.route('/posts/', methods=['GET', 'POST'])
@login_required
def posts_page():
'''
Show posts from redis cache and delete
'''
dt = datetime.datetime.now()
curr_time = time.mktime(dt.timetuple())
r_client.zremrangebyscore(REDIS_SET, min='-inf', max=curr_time) # Delete old posts
if request.method == 'POST':
r_client.zrem(REDIS_SET, request.values.get('post_name')) # Delete redis post by name
return redirect(url_for('posts_page'))
redis_posts = r_client.zrange(REDIS_SET, 0, -1) # Get redis posts
return render_template('post.html', redis_posts=redis_posts)
@app.route('/register/', methods=['GET', 'POST'])
def register():
if current_user.is_authenticated:
return redirect(url_for('posts_page'))
form = RegistationForm()
if form.validate_on_submit():
hashed_password = bcrypt.generate_password_hash(form.password.data).decode('utf-8')
user = User(username=form.username.data, email=form.email.data,
password=hashed_password)
db.session.add(user)
db.session.commit()
flash('Account create', 'success')
return redirect(url_for('login'))
return render_template('register.html', form=form)
@app.route('/login/', methods=['GET', 'POST'])
def login():
if current_user.is_authenticated:
return redirect(url_for('posts_page'))
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(username=form.username.data).first()
if user and bcrypt.check_password_hash(user.password, form.password.data):
flash('You log in system', 'success')
login_user(user, remember=True)
next_page = request.args.get('next')
return redirect(next_page) if next_page else redirect(url_for('posts_page'))
else:
flash('Invalid data', 'danger')
return render_template('login.html', form=form)
@app.route('/logout/')
def logout():
logout_user()
return redirect(url_for('posts_page'))
@app.route('/post/new/', methods=['GET', 'POST'])
@login_required
def post_new():
'''
Create new post for reids
'''
form = PostForm()
if form.validate_on_submit():
dt = datetime.datetime.now()
curr_time = time.mktime(dt.timetuple())
# Create post with time in value = current time + 5 minutes
r_client.zadd(REDIS_SET, {form.content.data: curr_time + REDIS_POST_TTL})
flash('Post was created', 'success')
return redirect(url_for('posts_page'))
return render_template('new_post.html', form=form)
|
{"/WebServerFlask/routes.py": ["/WebServerFlask/__init__.py"]}
|
6,137
|
mantasarul/Search-Validation
|
refs/heads/main
|
/search_app/views.py
|
from django.http.response import HttpResponseRedirect
from django.views.generic.base import TemplateView
from search_app.models import Index
from search_app.forms import IndexForm
from django.shortcuts import render
from django.views import View
from django.views.generic.edit import CreateView, FormView
# Create your views here.
"""
class SearchView(View):
def get(self, request):
form = IndexForm()
return render(request, 'search_app/search_page.html', {
"form": form
})
def post(self, request):
form = IndexForm(request.POST)
if form.is_valid():
form.save()
return HttpResponseRedirect("/thank-you")
return render(request, "search_app/search_page.html", {
'form': form
})
"""
""" class SearchView(CreateView):
model = Index
fields = '__all__' """
class SearchView(FormView):
template_name = 'search_app/search_page.html'
form_class = IndexForm
success_url = '/thank-you'
def form_valid(self, form):
index_object = Index.objects.all()
for index in index_object:
print(index.id)
if index.link == form.cleaned_data['link']:
return HttpResponseRedirect("/error.html")
break
form.save()
return super().form_valid(form)
class ThankYouView(TemplateView):
template_name = "search_app/thank_you.html"
class ErrorView(TemplateView):
template_name = "search_app/error.html"
"""
if form.cleaned_data['title'] in Index.objects.all():
print("Exist")
else:
print("doesn't exist")
for index in index_object:
if index.title == form.cleaned_data['title']:
print("exist")
else:
print("doesn't exist")
pass
#print(index.link)
print(form.cleaned_data['title'])
"""
|
{"/search_app/views.py": ["/search_app/models.py", "/search_app/forms.py"], "/search_app/forms.py": ["/search_app/models.py"], "/search_app/admin.py": ["/search_app/models.py"]}
|
6,138
|
mantasarul/Search-Validation
|
refs/heads/main
|
/search_app/urls.py
|
from django import views
from django.urls import path
from . import views
urlpatterns = [
path('', views.SearchView.as_view(), name='search_url'),
path('thank-you', views.ThankYouView.as_view()),
path('error.html', views.ErrorView.as_view()),
]
|
{"/search_app/views.py": ["/search_app/models.py", "/search_app/forms.py"], "/search_app/forms.py": ["/search_app/models.py"], "/search_app/admin.py": ["/search_app/models.py"]}
|
6,139
|
mantasarul/Search-Validation
|
refs/heads/main
|
/search_app/forms.py
|
from django.forms import fields
from search_app.models import Index
from django import forms
class IndexForm(forms.ModelForm):
class Meta:
model = Index
fields = '__all__'
labels = {
'title': 'Title',
'link': 'Links',
}
"""
class IndexForm(forms.Form):
title = forms.CharField(max_length= 150)
link = forms.CharField(max_length=250)
"""
|
{"/search_app/views.py": ["/search_app/models.py", "/search_app/forms.py"], "/search_app/forms.py": ["/search_app/models.py"], "/search_app/admin.py": ["/search_app/models.py"]}
|
6,140
|
mantasarul/Search-Validation
|
refs/heads/main
|
/search_app/admin.py
|
from django.contrib import admin
from .models import Index
# Register your models here.
class IndexAdmin(admin.ModelAdmin):
list_display = ('title', 'link')
class Meta:
verbose_name_plural = 'Index'
admin.site.register(Index, IndexAdmin)
|
{"/search_app/views.py": ["/search_app/models.py", "/search_app/forms.py"], "/search_app/forms.py": ["/search_app/models.py"], "/search_app/admin.py": ["/search_app/models.py"]}
|
6,141
|
mantasarul/Search-Validation
|
refs/heads/main
|
/search_app/models.py
|
from django.db import models
from django.db.models.base import Model
# Create your models here.
class Index(models.Model):
title = models.CharField(max_length=150)
link = models.CharField(max_length=200)
#id = models.BigAutoField(primary_key=True)
|
{"/search_app/views.py": ["/search_app/models.py", "/search_app/forms.py"], "/search_app/forms.py": ["/search_app/models.py"], "/search_app/admin.py": ["/search_app/models.py"]}
|
6,143
|
debasmitadasgupta/Assignment
|
refs/heads/master
|
/mysite/todo/views.py
|
# todo/views.py
# from django.shortcuts import render
from rest_framework import viewsets # add this
from .serializers import TodoSerializer,BucketSerializer # add this
# from .models import Todo # add this
#
# add this
# queryset = Todo.objects.all() # add this
from django.db import connection
from django.shortcuts import render
from .models import Todo,Bucket,User
from rest_framework import status
from rest_framework.decorators import api_view
# Create your views here.
from django.http import HttpResponse, HttpRequest
from django.core import serializers
import json
def index(request):
return HttpResponse("Hello, world. You're at the todos index.")
def get_todos(request):
qs=Todo.objects.raw("SELECT * FROM todo_todo")
serializer = TodoSerializer(qs,many=True)
# print(serializer.data)
# qs_json = serializers.serialize('json', qs)
output_dict = json.loads(json.dumps(serializer.data))
print(output_dict)
return HttpResponse(json.dumps(output_dict), 200)
def add_todo(request):
params=json.loads(request.body)
print(params)
title = params['title']
description =params['description']
completed =params['completed']
bucket_id =int(params['bucket_id'])
with connection.cursor() as cursor:
cursor.execute("INSERT INTO todo_todo (title,description,completed,bucket_id) VALUES ('{}','{}',{},{})".format(title,description,completed,bucket_id))
return HttpResponse("Success",200)
def get_buckets(request):
qs=Bucket.objects.raw("SELECT * FROM todo_bucket")
serializer = BucketSerializer(qs,many=True)
output_dict = json.loads(json.dumps(serializer.data))
print(output_dict)
return HttpResponse(json.dumps(output_dict), 200)
@api_view(['POST'])
def add_bucket(request):
params=json.loads(request.body)
print(params)
bucket_name = params['bucket_name']
with connection.cursor() as cursor:
cursor.execute("INSERT INTO todo_bucket (bucket_name) VALUES ('{}')".format(bucket_name))
return HttpResponse("Success",200)
@api_view((['PUT']))
def update_todo(request,todo_id):
params=json.loads(request.body)
print(params)
title = params['title']
description =params['description']
completed =params['completed']
bucket_id =int(params['bucket_id'])
with connection.cursor() as cursor:
cursor.execute("UPDATE todo_todo SET title= '{}',description= '{}',completed= {},bucket_id = {} WHERE id = {}".format(title,description,completed,bucket_id,todo_id))
return HttpResponse("Success",200)
@api_view(['DELETE'])
def delete_todo(request,todo_id):
with connection.cursor() as cursor:
cursor.execute("DELETE FROM todo_todo WHERE id = {}".format(todo_id))
return HttpResponse("Success",200)
@api_view(['POST'])
def save_userInfo(request):
params = json.loads(request.body)
print(params)
email = params['email']
password = params['password']
browser = params['browser']
with connection.cursor() as cursor:
cursor.execute("INSERT INTO todo_user (email,password,browser) VALUES ('{}','{}','{}')".format(email,password,browser))
return HttpResponse("Success",200)
|
{"/mysite/todo/views.py": ["/mysite/todo/serializers.py", "/mysite/todo/models.py"], "/mysite/todo/serializers.py": ["/mysite/todo/models.py"]}
|
6,144
|
debasmitadasgupta/Assignment
|
refs/heads/master
|
/mysite/todo/models.py
|
# todo/models.py
from django.db import models
# Create your models here.
class Bucket(models.Model):
bucket_name = models.CharField(max_length=100)
# add this
class Todo(models.Model):
title = models.CharField(max_length=120)
description = models.TextField()
completed = models.BooleanField(default=False)
bucket = models.ForeignKey(Bucket,on_delete=models.CASCADE)
class User(models.Model):
email = models.CharField(max_length=120)
password = models.TextField()
browser = models.TextField()
|
{"/mysite/todo/views.py": ["/mysite/todo/serializers.py", "/mysite/todo/models.py"], "/mysite/todo/serializers.py": ["/mysite/todo/models.py"]}
|
6,145
|
debasmitadasgupta/Assignment
|
refs/heads/master
|
/mysite/todo/urls.py
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.index, name='index'),
# path('<int:question_id>/', views.detail, name='detail'),
# # ex: /polls/5/results/
# path('<int:question_id>/results/', views.results, name='results'),
# ex: /polls/5/vote/
# path('<int:question_id>/vote/', views.vote, name='vote'),
path('todos', views.get_todos, name='getTodos'),
path('buckets', views.get_buckets, name='buckets'),
path('addTodo', views.add_todo, name='addTodos'),
path('addBucket', views.add_bucket, name='addBucket'),
path('updateTodo/<int:todo_id>/', views.update_todo, name='updateBucket'),
path('deleteTodo/<int:todo_id>',views.delete_todo, name = 'deleteTodo'),
path('saveUserInfo',views.save_userInfo, name = 'SaveUserInfo')
]
|
{"/mysite/todo/views.py": ["/mysite/todo/serializers.py", "/mysite/todo/models.py"], "/mysite/todo/serializers.py": ["/mysite/todo/models.py"]}
|
6,146
|
debasmitadasgupta/Assignment
|
refs/heads/master
|
/mysite/polls/views.py
|
from django.shortcuts import render
from .models import Question
# Create your views here.
from django.http import HttpResponse
def index(request):
return HttpResponse("Hello, world. You're at the polls index.")
def detail(request, question_id):
return HttpResponse("You're looking at question %s." % question_id)
def results(request, question_id):
response = "You're looking at the results of question %s."
return HttpResponse(response % question_id)
def vote(request, question_id):
return HttpResponse("You're voting on question %s." % question_id)
#This is the simplest view possible in Django. To call the view, we need to map it to a URL - and for this we need a URLconf.
#To create a URLconf in the polls directory, create a file called urls.py
|
{"/mysite/todo/views.py": ["/mysite/todo/serializers.py", "/mysite/todo/models.py"], "/mysite/todo/serializers.py": ["/mysite/todo/models.py"]}
|
6,147
|
debasmitadasgupta/Assignment
|
refs/heads/master
|
/mysite/todo/serializers.py
|
# todo/serializers.py
from rest_framework import serializers
from .models import Todo,Bucket
class TodoSerializer(serializers.ModelSerializer):
class Meta:
model = Todo
fields = ['id', 'title', 'description', 'completed','bucket_id']
class BucketSerializer(serializers.ModelSerializer):
class Meta:
model = Bucket
fields = ['id','bucket_name']
|
{"/mysite/todo/views.py": ["/mysite/todo/serializers.py", "/mysite/todo/models.py"], "/mysite/todo/serializers.py": ["/mysite/todo/models.py"]}
|
6,150
|
aclifford3/eq-deeps-parser
|
refs/heads/master
|
/eq_deeps_parser.py
|
"""
This class uses a log puller to read new combat logs every second. It keeps track of character
performances for each fight.
"""
import configparser
import logging
import re
import time
import visualize
from log_puller import LogPuller
logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', datefmt='%d-%b-%y %H:%M:%S',
level=logging.INFO)
class FightReport:
'''Fight object containing a list of fight participants and their contributions'''
def __init__(self):
self.is_complete = False
self.contribution_aggregates = {}
class ContributionAggregate:
'''Total contribution for a given participant for a given fight'''
def __init__(self, participant):
self.participant = participant
self.damage_dealt = 0
self.healing_dealt = 0
class Contribution:
'''A single contribution for a participant to be added to aggregate contribution'''
def __init__(self, participant, target, damage_dealt, healing_dealt):
self.participant = participant
self.target = target
self.damage_dealt = damage_dealt
self.healing_dealt = healing_dealt
def is_combat_log(log):
'''Returns true if log is a combat log'''
if is_new_fight(log) or is_fight_complete(log) or 'points of damage' in log:
return True
return False
def is_new_fight(combat_log):
'''Returns true if a new fight has begun'''
return 'You have entered combat...' in combat_log
def is_fight_complete(combat_log):
'''Returns true if the fight is completed'''
return 'You are no longer in combat.' in combat_log
def is_damage_shield_message(combat_log):
'''We treat damage shield as a separate participant, if log is for damage shield damage
returns True
'''
return 'was hit by non-melee for' in combat_log
def get_participant(log_message, verb_start_pos):
'''Gets the participant that is performing the combat action'''
return log_message[0:verb_start_pos - 1]
def get_damage_shield_contribution(log_message, verb_start_pos, amount):
'''Get contribution of damage shield'''
return Contribution('Damage Shield', log_message[0:verb_start_pos-1], amount, 0)
def get_healing_contribution(log_message, amount):
'''Get participant healing contribution'''
for_pos = log_message.find(' for ')
verb_match = re.search('((has|have) healed)', log_message)
participant = get_participant(log_message, verb_match.start())
target = log_message[verb_match.end() + 1:for_pos]
return Contribution(participant, target, 0, amount)
def get_damage_contribution(log_message, amount, verb_match):
'''Get participant damage contribution'''
for_pos = log_message.find(' for ')
participant = get_participant(log_message, verb_match.start())
target = log_message[verb_match.end() + 1:for_pos]
return Contribution(participant, target, amount, 0)
def is_healing_log(log_message):
'''Returns true if log message is a healing event'''
verb_match = re.search('((has|have) healed)', log_message)
return verb_match is not None
def get_contribution(combat_log):
'''Converts raw combat log into a more usable Contribution object'''
# Split timestamp from the rest of the log message
log_message = combat_log.split('] ')[1]
amount = int(((log_message.split(' for ')[1]).split(' '))[0])
# Locate the attack verb to use in figuring out the participant and target
verb_match = re.search('(was )?(bite[s]?|bash[es]?|strike[s]?|slash[es]?|punch[es]?|hit[s]?'
'|pierce[s]?|crush[es]?|gore[s]?|kick[s]?|slap[s]?|claw[s]?|maul[s]?'
'|shoot[s]?|sting[s]?)',
log_message)
if is_damage_shield_message(log_message):
return get_damage_shield_contribution(log_message, verb_match.start(), amount)
if is_healing_log(log_message):
return get_healing_contribution(log_message, amount)
return get_damage_contribution(log_message, amount, verb_match)
def update_fight_contribution(contribution_aggregates, combat_log):
'''Updates a participant's fight contribution based on combat log'''
try:
contribution = get_contribution(combat_log)
logging.debug("Created contribution for actor %s", contribution.participant)
if contribution.participant in contribution_aggregates.keys():
contribution_aggregate = contribution_aggregates[contribution.participant]
else:
contribution_aggregate = ContributionAggregate(contribution.participant)
contribution_aggregate.damage_dealt += contribution.damage_dealt
contribution_aggregate.healing_dealt += contribution.healing_dealt
contribution_aggregates[contribution.participant] = contribution_aggregate
except KeyError:
logging.exception('Failed to get combat event for log: %s \n ', combat_log)
def process_combat_logs(logs, fight_reports):
'''Given new combat logs, create fight reports'''
if len(fight_reports) > 0:
current_fight_report = fight_reports.pop()
else:
current_fight_report = FightReport()
for combat_log in logs:
if is_new_fight(combat_log):
if not current_fight_report.is_complete:
current_fight_report.is_complete = True
else:
current_fight_report = FightReport()
fight_reports.append(current_fight_report)
elif is_fight_complete(combat_log):
current_fight_report.is_complete = True
else:
update_fight_contribution(current_fight_report.contribution_aggregates, combat_log)
fight_reports.append(current_fight_report)
def filter_combat_logs(logs):
'''Filter out logs other than combat logs'''
return list(filter(is_combat_log, logs))
def get_log_file_path():
'''Gets the EQ log file path from configuration file'''
config = configparser.ConfigParser()
config.read('./config.ini')
return config['DEFAULT']['COMBAT_LOG_PATH']
if __name__ == '__main__':
path = get_log_file_path()
logging.info("Starting parser.")
fights_reports = []
log_puller = LogPuller(path)
MAX_FIGHTS_TO_RETAIN = 1
while True:
new_logs = log_puller.pull_new_logs()
combat_logs = filter_combat_logs(new_logs)
process_combat_logs(combat_logs, fights_reports)
while len(fights_reports) > MAX_FIGHTS_TO_RETAIN:
fights_reports.pop(0)
for fight_report in fights_reports:
logging.debug('Found %s total fights', len(fights_reports))
logging.debug('Found fight report %s', fight_report.contribution_aggregates)
visualize.plot(fights_reports[0])
time.sleep(4)
|
{"/eq_deeps_parser.py": ["/visualize.py", "/log_puller.py"], "/test_eq_deeps_parser.py": ["/eq_deeps_parser.py"]}
|
6,151
|
aclifford3/eq-deeps-parser
|
refs/heads/master
|
/test_eq_deeps_parser.py
|
'''Tests for eq_deeps_parser.py'''
import unittest
import eq_deeps_parser
class TestEqDeepsParser(unittest.TestCase):
'''Tests for eq_deeps_parser.py'''
def test_get_contribution_from_melee_dmg_log(self):
'''Get a contribution when combat log is a damage dealt event'''
log = '[Tue Jul 21 05:12:05 2020] You kick Sssszzz the Stone for 1 point of damage.'
actual = eq_deeps_parser.get_contribution(log)
expected = eq_deeps_parser.Contribution('You', 'Sssszzz the Stone', 1, 0)
self.assertEqual(expected.participant, actual.participant)
self.assertEqual(expected.target, actual.target)
self.assertEqual(expected.damage_dealt, actual.damage_dealt)
self.assertEqual(expected.healing_dealt, actual.healing_dealt)
def test_get_contribution_from_healing_log(self):
'''Get contribution when combat log is healing event'''
log = '[Tue Jul 21 05:12:05 2020] Wocas has healed you for 15 points of damage.'
actual = eq_deeps_parser.get_contribution(log)
expected = eq_deeps_parser.Contribution('Wocas', 'you', 0, 15)
self.assertEqual(expected.participant, actual.participant)
self.assertEqual(expected.target, actual.target)
self.assertEqual(expected.damage_dealt, actual.damage_dealt)
self.assertEqual(expected.healing_dealt, actual.healing_dealt)
def test_get_contribution_damage_shield_log(self):
'''In different logs participant capitalization is different'''
log = '[Fri Jul 24 19:32:04 2020] a belligerent beach bum was hit by non-melee for 7 points of damage.'
actual = eq_deeps_parser.get_contribution(log)
expected = eq_deeps_parser.Contribution('Damage Shield', 'a belligerent beach bum', 7, 0)
self.assertEqual(expected.participant, actual.participant)
self.assertEqual(expected.target, actual.target)
self.assertEqual(expected.damage_dealt, actual.damage_dealt)
self.assertEqual(expected.healing_dealt, actual.healing_dealt)
if __name__ == '__main__':
unittest.main()
|
{"/eq_deeps_parser.py": ["/visualize.py", "/log_puller.py"], "/test_eq_deeps_parser.py": ["/eq_deeps_parser.py"]}
|
6,152
|
aclifford3/eq-deeps-parser
|
refs/heads/master
|
/visualize.py
|
'''Creates visualizations of fight reports'''
import logging
import matplotlib.pyplot as plt
import pandas as pd
def plot(fight_report):
'''Plots fight report on a horizontal bar graph'''
if len(fight_report.contribution_aggregates.keys()) > 0:
data = []
index = []
for participant in fight_report.contribution_aggregates.keys():
performance = fight_report.contribution_aggregates[participant]
data.append([performance.damage_dealt, performance.healing_dealt])
index.append(participant)
data_frame = pd.DataFrame(data, columns=['Damage', 'Healing'], index=index)
data_frame = data_frame.sort_values(by=['Damage'])
axes = data_frame.plot.barh()
axes.set_xlabel('')
plt.show()
else:
logging.debug('Nothing to plot')
|
{"/eq_deeps_parser.py": ["/visualize.py", "/log_puller.py"], "/test_eq_deeps_parser.py": ["/eq_deeps_parser.py"]}
|
6,153
|
aclifford3/eq-deeps-parser
|
refs/heads/master
|
/log_puller.py
|
import csv
import logging
"""
This class pulls logs from a log file. It is given a file path and starts reading from the end of the log file.
"""
def get_starting_line(path):
with open(path) as log_file:
csv_reader = csv.reader(log_file)
log_lines = 0
for row in csv_reader:
log_lines = log_lines + 1
logging.debug("Starting on line %s", log_lines)
return log_lines
class LogPuller:
def __init__(self, path):
self.path = path
self.last_processed_log_line = get_starting_line(path)
"""
Returns the log file line on which to start parsing. We only want to process new events that come in, so we will
just point our process to the last line in the log file to begin with.
"""
def pull_new_logs(self):
logs = []
with open(self.path) as log_file:
csv_reader = csv.reader(log_file)
current_log_line = 0
for row in csv_reader:
current_log_line = current_log_line + 1
if len(row) > 0 and self.last_processed_log_line < current_log_line:
self.last_processed_log_line = current_log_line
log = row[0]
logs.append(log)
return logs
|
{"/eq_deeps_parser.py": ["/visualize.py", "/log_puller.py"], "/test_eq_deeps_parser.py": ["/eq_deeps_parser.py"]}
|
6,193
|
DevanshSoni/RESTAPI_USING_Django
|
refs/heads/master
|
/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/migrations/0005_auto_20190806_1216.py
|
# Generated by Django 2.2.3 on 2019-08-06 06:46
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('webapp', '0004_auto_20190806_1215'),
]
operations = [
migrations.RemoveField(
model_name='user',
name='user_id',
),
migrations.AlterField(
model_name='user',
name='emailid',
field=models.CharField(max_length=40, primary_key=True, serialize=False),
),
]
|
{"/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/interestModel.py": ["/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/userModel.py"], "/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/admin.py": ["/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/interestModel.py", "/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/userModel.py"], "/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/userModel.py": ["/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/interestModel.py"]}
|
6,194
|
DevanshSoni/RESTAPI_USING_Django
|
refs/heads/master
|
/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/migrations/0006_auto_20190806_2208.py
|
# Generated by Django 2.2.3 on 2019-08-06 16:38
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('webapp', '0005_auto_20190806_1216'),
]
operations = [
migrations.AddField(
model_name='interest',
name='temp_id',
field=models.ForeignKey(default=django.utils.timezone.now, max_length=40, on_delete=django.db.models.deletion.PROTECT, to='webapp.user'),
preserve_default=False,
),
migrations.AddField(
model_name='user',
name='user_id',
field=models.AutoField(default=django.utils.timezone.now, max_length=40, primary_key=True, serialize=False),
preserve_default=False,
),
migrations.AlterField(
model_name='interest',
name='id',
field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='user',
name='emailid',
field=models.CharField(max_length=40, unique=True),
),
]
|
{"/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/interestModel.py": ["/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/userModel.py"], "/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/admin.py": ["/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/interestModel.py", "/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/userModel.py"], "/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/userModel.py": ["/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/interestModel.py"]}
|
6,195
|
DevanshSoni/RESTAPI_USING_Django
|
refs/heads/master
|
/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/interestModel.py
|
from django.db import models
# import datetime
from .userModel import *
class interest(models.Model):
temp_id=models.ForeignKey(to=user,max_length=40,on_delete=models.PROTECT)
InterestArea=models.CharField(max_length=40)
def __str__(self):
return f"{self.InterestArea}"
|
{"/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/interestModel.py": ["/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/userModel.py"], "/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/admin.py": ["/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/interestModel.py", "/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/userModel.py"], "/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/userModel.py": ["/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/interestModel.py"]}
|
6,196
|
DevanshSoni/RESTAPI_USING_Django
|
refs/heads/master
|
/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/admin.py
|
from django.contrib import admin
# from . models import *
from . interestModel import interest
from . userModel import user
# Register your models here.
admin.site.register(user)
admin.site.register(interest)
# admin.site.register(questions)
|
{"/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/interestModel.py": ["/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/userModel.py"], "/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/admin.py": ["/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/interestModel.py", "/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/userModel.py"], "/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/userModel.py": ["/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/interestModel.py"]}
|
6,197
|
DevanshSoni/RESTAPI_USING_Django
|
refs/heads/master
|
/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/userModel.py
|
from django.db import models
# import datetime
from . interestModel import *
# Create your models here.
class user(models.Model):
user_id=models.AutoField(primary_key=True,max_length=40)
username=models.CharField(max_length=40)
emailid=models.CharField(unique=True,max_length=40)
password=models.CharField(max_length=40)
def __str__(self):
return f"{self.emailid}"
|
{"/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/interestModel.py": ["/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/userModel.py"], "/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/admin.py": ["/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/interestModel.py", "/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/userModel.py"], "/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/userModel.py": ["/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/interestModel.py"]}
|
6,198
|
DevanshSoni/RESTAPI_USING_Django
|
refs/heads/master
|
/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/migrations/0004_auto_20190806_1215.py
|
# Generated by Django 2.2.3 on 2019-08-06 06:45
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('webapp', '0003_auto_20190806_1213'),
]
operations = [
migrations.AlterField(
model_name='user',
name='emailid',
field=models.CharField(max_length=40),
),
]
|
{"/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/interestModel.py": ["/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/userModel.py"], "/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/admin.py": ["/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/interestModel.py", "/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/userModel.py"], "/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/userModel.py": ["/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/interestModel.py"]}
|
6,199
|
DevanshSoni/RESTAPI_USING_Django
|
refs/heads/master
|
/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/migrations/0003_auto_20190806_1213.py
|
# Generated by Django 2.2.3 on 2019-08-06 06:43
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('webapp', '0002_questions_time_and_date'),
]
operations = [
migrations.RenameField(
model_name='interest',
old_name='interest',
new_name='InterestArea',
),
migrations.RemoveField(
model_name='interest',
name='emailid',
),
migrations.AddField(
model_name='user',
name='user_id',
field=models.CharField(default=django.utils.timezone.now, max_length=40, primary_key=True, serialize=False),
preserve_default=False,
),
migrations.AlterField(
model_name='interest',
name='id',
field=models.CharField(max_length=40, primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='user',
name='emailid',
field=models.CharField(max_length=40, unique=True),
),
migrations.DeleteModel(
name='questions',
),
]
|
{"/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/interestModel.py": ["/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/userModel.py"], "/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/admin.py": ["/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/interestModel.py", "/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/userModel.py"], "/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/userModel.py": ["/REST_api_UsingDjango/tech_youth_api/techAPI/webapp/interestModel.py"]}
|
6,201
|
Julien-V/P5
|
refs/heads/master
|
/main.py
|
#!/usr/bin/python3
# coding : utf-8
from openff import core
def main(debug=False):
app = core.App(debug)
app.run()
if __name__ == "__main__":
main(False)
|
{"/init_db.py": ["/config.py"], "/openff/core.py": ["/init_db.py", "/config.py"], "/openff/models/db.py": ["/config.py"], "/openff/controllers/product.py": ["/config.py"], "/openff/controllers/controller.py": ["/config.py"]}
|
6,202
|
Julien-V/P5
|
refs/heads/master
|
/init_db.py
|
#!/usr/bin/python3
# coding : utf-8
import json
import requests
import config as cfg
from openff.controllers import product, category
class Populate:
"""This class gets all the products in a category
and insert them in database
"""
def __init__(self, param, db, cat_id):
"""This method initializes the class
:param param: url arguments for the request
:param db: database object
:param cat_id: categorie id in table Categories
"""
self.name = param["tag_0"]
self.param = param
self.db = db
self.cat_id = cat_id
self.url = cfg.url
self.headers = cfg.headers
self.count = 0
self.result_list = []
def get_and_load(self):
"""This method does the request and decode returned JSON
:return: JSON decoded by json.loads()"""
requesting = True
while requesting:
try:
r = requests.get(
self.url,
headers=self.headers,
params=self.param)
requesting = False
except requests.exceptions.Timeout:
print("[!] Timeout.")
except requests.exceptions.RequestException as e:
print(f"[!] Error : {e}")
result = json.loads(r.text)
return result
def keep_nutri_g_only(self):
"""This method keeps only products with nutrition grades"""
prod_list = self.result_list
temp = [x for x in prod_list if "nutrition_grades" in x.keys()]
self.result_list = temp
def insert(self):
"""This method inserts current category and its products in database"""
if not self.result_list:
print("resultList empty")
return
cat_obj = category.Category(self.name, self.db)
cat_obj.insert()
print("lastrowid ", self.cat_id)
for prod in self.result_list:
prod_obj = product.Product(self.db, self.cat_id)
prod_obj.get_validate_insert(prod)
def run(self):
result = self.get_and_load()
if "count" in result.keys():
self.count = int(result["count"])
if "products" in result.keys():
self.result_list += result["products"]
while self.count < len(self.result_list):
self.param["page"] = int(self.param["page"])+1
result = self.get_and_load()
if "products" in result.keys():
self.result_list += result["products"]
else:
break
print(len(self.result_list))
self.keep_nutri_g_only()
print(len(self.result_list))
self.insert()
return
if __name__ == "__main__":
pass
|
{"/init_db.py": ["/config.py"], "/openff/core.py": ["/init_db.py", "/config.py"], "/openff/models/db.py": ["/config.py"], "/openff/controllers/product.py": ["/config.py"], "/openff/controllers/controller.py": ["/config.py"]}
|
6,203
|
Julien-V/P5
|
refs/heads/master
|
/openff/core.py
|
#!/usr/bin/python3
# coding : utf-8
import init_db
from openff.models import db
from openff.models import req_sql
import config as cfg
from openff.controllers import product
class App:
"""Main Class : controllers, views and models used by this class
>>> app = core.App()
>>> app.run()
or with debug information :
>>> app = core.App(True)
>>> app.run()
"""
def __init__(self, debug=False):
"""Class initialization and database loading
:param debug: debug
"""
self.debug = debug
self.running = False
self.first = False
# Const
self.sql = req_sql.sql.copy()
self.result = []
self.item = dict()
self.cat_id = None
self.prod = None
self.prod_s = None
# Step
self.step = cfg.step_app.copy()
self.cwp = self.step
self.cws = None
self.old_path = list()
# DB
# while not self.db ?
self.load_db()
def load_db(self):
"""Load db.DB() class and execute a query to test
the presence of rows in database
(if not, core.App.first_run() is called)
"""
self.db = db.DB()
self.cursor = self.db.get_cursor()
# select * from Categories;
query = self.sql['test']
self.cursor.execute(query)
result = self.cursor.fetchall()
# if Categories return empty set:
if not result:
self.first = True
self.first_run()
else:
self.first = False
def first_run(self):
"""Create a Populate object for each category
in openff.models.config.cat to get products and insert them in db
"""
for cat_id, category in enumerate(cfg.cat):
param = cfg.param.copy()
param['tag_0'] = category
pop = init_db.Populate(param, self.cursor, cat_id+1)
pop.run()
def display_view(self, view, ctrl, args, r):
"""This method chooses which view is given by the current step
then create this view with appropriate controller
:param view: a view (openff.views.menu_models)
:param ctrl: a controller (openff.controllers.controller)
:param args: args for the view
:param r: previous user's anwser
"""
if view == cfg.choice:
self.view = view(args[0], **args[1])
self.ctrl = ctrl(self.view, self.cursor)
rep = self.ctrl.choice_menu(self.debug)
elif view == cfg.print_line and self.result:
if r >= len(self.result):
val = self.result[0]
else:
val = self.result[r]
self.view = view(val, **args[1])
self.ctrl = ctrl(self.view, self.cursor)
rep = self.ctrl.print_line_db(self.debug)
if not self.view.substitute and rep not in [777, 999]:
rep = r
else:
rep = 777
return rep
def format_display(self, formatting_rules):
"""format result following the list of formatting rules
of the current step (lamda function)
:param formatting_rules: list of lambda functions
:return formatted: result (list) formatted
>>> app.result
["test"]
>>> fRules = [
(lambda i: "[*]" + i,
(lambda i: i + ".")
]
>>> app.format_display(fRules)
"[*] test."
"""
formatted = []
for elem in self.result:
r = elem
for rule in formatting_rules:
r = rule(r)
formatted.append(r)
return formatted
def query(self, query, rep=None):
"""This method executes a query on the loaded database
:param query: SQL string
:param rep: anwser of the previous step, set by default to None
:return: result of the query
"""
if isinstance(rep, int) and "%s" in query:
q = query
q_args = (rep+1,)
self.cursor.execute(q, q_args)
else:
q = query
self.cursor.execute(q)
return self.cursor.fetchall()
def process_result(self, process_rules):
"""This method processes the result of the query following the list
of proccessing rules of the current step
:param process_rules: list of lambda functions
:return: return processed data
"""
p = self
for rule in process_rules:
p = rule(p)
return p
def update_prod(self):
"""This methode updates a product by adding a substitute_id"""
if self.product and self.prod_s:
subs = self.prod_s
prod = self.product
prod_obj = product.Product(self.cursor, prod['category_id'])
update = {'substitute_id': subs['id']}
prod_obj.get_validate_insert(prod, update)
else:
if self.debug:
print(f'prod/prod_s not loaded : {self.prod}//{self.prod_s}')
def run(self):
"""This method handles steps organization by :
doing the actions required by the current step
displaying the result
analysing user anwser
and changing the current step for the next/previous one
"""
print('core.App running')
self.running = True
rep = 0
old_rep = list()
previous = False
while self.running:
result_formatted = []
if previous:
previous = False
if old_rep:
rep = old_rep.pop()
else:
rep = 0
exit_requested = False
# param = [view, controller, [list, kwargs]]
if 'param' not in self.cwp.keys():
self.cwp_is_intersection = False
if self.cws is None:
key = list(self.cwp.keys())[0]
self.cws = key
param = self.cwp[self.cws].copy()
else:
self.cwp_is_intersection = True
self.cws = None
param = self.cwp['param'].copy()
# loading params
view, ctrl = param[0], param[1]
if param[2]:
args = param[2].copy()
if len(param) == 4:
param_ext = param[3]
else:
param_ext = dict()
if self.debug:
print(f"cws: {self.cws}")
print(f"param: {param}")
print(f"len(old_path): {len(self.old_path)}")
print(f"old_rep: {old_rep}")
print(f"rep: {rep}")
# query
if 'query' in param_ext.keys():
if self.result:
self.item = self.result[rep]
else:
self.item = self.item
if '4query' in param_ext.keys():
print(self.item.keys())
for_query = param_ext['4query']
if for_query in self.item.keys():
rep = self.item[for_query]-1
self.result = self.query(param_ext['query'], rep)
# process
if 'process' in param_ext.keys():
self.result = self.process_result(param_ext['process'])
# format choice list
if 'format' in param_ext.keys() and self.result:
result_formatted = self.format_display(param_ext['format'])
args[0] = result_formatted
if self.debug:
print(f"len(result): {len(self.result)}")
# display choice list
if view and ctrl and args:
rep = self.display_view(view, ctrl, args, rep)
else:
rep = None
# previous and exit
if rep == int(cfg.back) or rep == int(cfg.exit):
if rep == int(cfg.back):
previous = True
else:
exit_requested = True
else:
pass
# changing view
if self.cwp_is_intersection and not exit_requested:
if previous and self.old_path:
self.cwp = self.old_path.pop()
elif not previous:
selected_key = list(self.cwp.keys())[rep]
self.old_path.append(self.cwp)
self.cwp = self.cwp[selected_key]
old_rep.append(rep)
elif not self.cwp_is_intersection and not exit_requested:
keys = list(self.cwp.keys())
if self.cws in keys:
index_key = keys.index(self.cws)
if index_key > 0 and previous:
previousKey = keys[index_key-1]
self.cws = previousKey
elif index_key == 0 and previous:
self.cwp = self.old_path.pop()
elif index_key < len(keys)-1 and not previous:
old_rep.append(rep)
next_key = keys[index_key+1]
self.cws = next_key
if next_key == 'prod_update':
self.prod_s = self.result[rep]
self.update_prod()
elif next_key == 'end':
self.cwp = self.step.copy()
self.cws = None
self.result = list()
self.old_path = list()
old_rep = list()
elif next_key == 'subs_choice':
self.product = self.result[rep]
elif exit_requested:
self.running = False
print('Bye')
|
{"/init_db.py": ["/config.py"], "/openff/core.py": ["/init_db.py", "/config.py"], "/openff/models/db.py": ["/config.py"], "/openff/controllers/product.py": ["/config.py"], "/openff/controllers/controller.py": ["/config.py"]}
|
6,204
|
Julien-V/P5
|
refs/heads/master
|
/openff/views/menu_models.py
|
#!/usr/bin/python3
# coding : utf-8
from openff.views import menu_component as menu_c
class ChoiceList:
"""This class gathers multiples components from openff.views.menu_component
in order to create and display a view.
This view display a list and an input to get user's anwser.
Title, some lines before the input and input customization are optionals
>>> cl = menu_models.ChoiceList(values)
>>> cl.get()
"""
def __init__(self, values, **kwargs):
"""This method initializes the class and call self.gen()
:param values: list of values to display
:param kwargs: optional dict for optional display
"""
self.values = values
if kwargs.keys():
self.kwargs = kwargs
else:
self.kwargs = False
self.text = ''
self.resized_result = None
self.disp_order = [
menu_c.Title,
menu_c.PrintList,
menu_c.PrintLine,
]
self.gen()
def process_kwargs(self):
"""This method processes optionals parameters in self.kwargs"""
keys = self.kwargs.keys()
if 'lines' in keys:
for line in self.kwargs['lines']:
print_line_obj = menu_c.PrintLine(line)
self.queue.append(print_line_obj)
if 'title' in keys:
title = self.kwargs['title']
title_obj = menu_c.Title(title)
self.queue.append(title_obj)
if 'text' in keys:
self.text = self.kwargs['text']
def gen(self):
"""This method creates the menu components needed and
add them to queue (list)
"""
self.queue = []
print_list_obj = menu_c.PrintList(self.values)
self.resized_result = print_list_obj.result
self.queue.append(print_list_obj)
if self.kwargs:
self.process_kwargs()
temp = []
for obj in self.disp_order:
for elem in self.queue:
if isinstance(elem, obj):
temp.append(elem)
self.queue = temp
# Input:
self.prompt_obj = menu_c.Prompt(self.text)
def get(self):
"""This method prints the result of get() method in each
menu_components added to queue
:return: the method get() of a
openff.views.menu_component.Prompt object
"""
for elem in self.queue:
for line in elem.get():
print(line)
return self.prompt_obj.get()
class PrintLineDB:
"""This class gathers multiples components from openff.views.menu_component
in order to create and display a view.
This view display a product details (dict)
Title, some lines before the input and input customization are optionals
>>> pl = menu_models.PrintLineDB(values)
>>> pl.get()
"""
def __init__(self, val, **kwargs):
"""This method initializes the class and call self.gen()
:param val: a product (dict)
:param kwargs: optional dict for optional display
"""
self.val = val
if kwargs.keys():
self.kwargs = kwargs
else:
self.kwargs = False
self.text = ''
self.prod, self.substitute = dict(), dict()
self.disp_order = [
menu_c.Title,
menu_c.PrintLine
]
# Get terminal size
init_geom = menu_c.PrintLine(' ')
self.col = init_geom.col
self.colors = init_geom.colors
self.gen()
def process_kwargs(self):
"""This method processes optionals parameters in self.kwargs"""
keys = self.kwargs.keys()
if 'text' in keys:
self.text = self.kwargs['text']
def comp(self):
"""This method adds to queue the details of a product and
his substitute
"""
b = self.colors['blue']
B = self.colors['bold']
end = self.colors['endc']
for key in self.prod.keys():
size_key = len(key)+6 # something like ' [ key ] '
val = str(self.prod[key])
prod_size = len(val)
max_size = int((self.col/2)-(size_key/2)-3)
if key in self.substitute:
val_s = str(self.substitute[key])
subs_size = len(val_s)
else:
val_s = list()
subs_size = 0
range_p_lines = range(0, prod_size, max_size)
range_s_lines = range(0, subs_size, max_size)
p_lines = [val[i:i+max_size] for i in range_p_lines]
s_lines = [val_s[i:i+max_size] for i in range_s_lines]
(pl, sl) = (len(p_lines), len(s_lines))
# key on top
side = " "*max_size
txt = f"{end}{side} {b}{B}[ {key} ]{end} {side}"
line_obj = menu_c.PrintLine(txt)
self.queue.append(line_obj)
max_size = int((self.col/2)-(len(' || ')/2)-3)
if pl > sl:
for i in range(0, pl-sl):
s_lines.append("")
elif pl < sl:
for i in range(0, sl-pl):
p_lines.append("")
for i, elem in enumerate(p_lines):
left = (max_size-len(elem))*" "
s = s_lines[i]
right = " "*(max_size-len(s))
txt = (
f"{end}{left}{elem} "
f"{B}{'||'}{end} {s}{right}")
line_obj = menu_c.PrintLine(txt)
self.queue.append(line_obj)
sep = menu_c.PrintLine("-"*(self.col-4))
self.queue.append(sep)
def gen(self):
"""This method creates the menu components needed and
add them to queue (list)
"""
self.queue = []
if self.kwargs:
self.process_kwargs()
# items
for item in self.val.items():
(key, val) = item
if key[-1] == 'S':
self.substitute[key[:-1]] = val
else:
self.prod[key] = val
if self.substitute:
self.comp()
else:
for item in self.prod.items():
# modify display
(key, val) = item
g = self.colors['green']
end = self.colors['endc']
txt = f"{end}{g}{key} :{end} {val}{end}"
self.queue.append(menu_c.PrintLine(txt))
# Input:
self.prompt_obj = menu_c.Prompt(self.text)
def get(self):
"""This method prints the result of get() method in each
menu_components added to queue
:return: the result of an input
"""
# sorting
temp = list()
for obj in self.disp_order:
for elem in self.queue:
if isinstance(elem, obj):
temp.append(elem)
self.queue = temp
# display
for elem in self.queue:
for line in elem.get():
print(line)
return self.prompt_obj.get() or "0"
|
{"/init_db.py": ["/config.py"], "/openff/core.py": ["/init_db.py", "/config.py"], "/openff/models/db.py": ["/config.py"], "/openff/controllers/product.py": ["/config.py"], "/openff/controllers/controller.py": ["/config.py"]}
|
6,205
|
Julien-V/P5
|
refs/heads/master
|
/openff/views/menu.py
|
#!/usr/bin/python3
# coding : utf-8
import os
import shutil
class MenuItem:
"""This class is the parent class of menu_component class"""
def __init__(self, indent=1):
"""This method initializes the class
:param indent: set by default to 1
"""
self.geometry = shutil.get_terminal_size()
self.col = self.geometry.columns
self.rows = self.geometry.lines
if not indent:
self.indent = 1
else:
self.indent = indent
self.result = []
self.colors = {
'yellow': '\033[93m',
'blue': '\033[96m',
'red': '\033[91m',
'purple': '\033[95m',
'green': '\033[92m',
'bold': '\033[1m',
'endc': '\033[0m',
}
if os.name != "posix":
for key in self.colors.keys():
self.colors[key] = ''
pass
def get(self):
for line in self.result:
yield line.center(int(self.col/self.indent))
pass
|
{"/init_db.py": ["/config.py"], "/openff/core.py": ["/init_db.py", "/config.py"], "/openff/models/db.py": ["/config.py"], "/openff/controllers/product.py": ["/config.py"], "/openff/controllers/controller.py": ["/config.py"]}
|
6,206
|
Julien-V/P5
|
refs/heads/master
|
/openff/models/db.py
|
#!/usr/bin/python3
# coding : utf-8
import os
import getpass
import mysql.connector as mysql_c
# from mysql.connector import errorcode
import config as cfg
class DB:
"""This class connects or creates a MySQL database."""
def __init__(self):
"""This method initializes the class and call self.connect()"""
self.name = cfg.db["name"]
self.user = cfg.db['connect']['user']
self.sql_filename = cfg.db["create_file_name"]
self.sql_filepath = cfg.db["create_file_path"]
self.dict_result = False
self.exist = False
self.connect()
def connect(self):
"""This method attempts a connection to mysql, check if db exist
and use it
"""
try:
self.cnx = mysql_c.connect(
user=self.user,
password=getpass.getpass())
except mysql_c.error as e:
if e.errno in cfg.db["error"].keys():
print(cfg.db["error"][e.errno])
else:
print(cfg.db["Uerror"].format(e.msg))
return
self.cursor = self.cnx.cursor()
self.cursor.execute(cfg.db["show"])
databases = [elem[0] for elem in self.cursor.fetchall()]
if self.name not in databases:
self.create()
# parameter insertion doesn't seem to work with database name
self.cursor.execute("USE {}".format(self.name))
def create(self):
"""This method is called by connect()
and create database with a *.sql file
"""
dirs = os.listdir(self.sql_filepath)
if self.sql_filename not in dirs:
print(self.sql_filename, " not found")
return
path = os.path.join(self.sql_filepath, self.sql_filename)
with open(path, "r") as file_a:
queries = file_a.read().split(";")
for query in queries:
try:
self.cursor.execute(query)
except mysql_c.error as e:
print(e)
print(query)
def get_cursor(self):
"""This method returns a cursor to execute queries"""
# result will always be returned as a dict
if not self.dict_result:
self.cursor.close()
self.cursor = self.cnx.cursor(dictionary=True)
self.dict_result = True
return self.cursor
def save(self):
"""This method commits modification into database"""
self.cnx.commit()
def __del__(self):
"""This method saves, closes cursor and connection to DB
when this object is removed
"""
self.save()
self.cursor.close()
self.cnx.close()
|
{"/init_db.py": ["/config.py"], "/openff/core.py": ["/init_db.py", "/config.py"], "/openff/models/db.py": ["/config.py"], "/openff/controllers/product.py": ["/config.py"], "/openff/controllers/controller.py": ["/config.py"]}
|
6,207
|
Julien-V/P5
|
refs/heads/master
|
/openff/models/req_sql.py
|
#!/usr/bin/python3
# coding : utf-8
# SQL
#########################
sql = dict()
sql["test"] = "SELECT * from Categories"
sql["insert_cat"] = (
"""INSERT INTO Categories """
"""(category_name) VALUES (%s)""")
sql["insert_PiC"] = (
"""INSERT INTO Prod_in_Cat """
"""(category_id, product_id) VALUES (%s, %s)""")
sql["insert_prod"] = (
"""INSERT INTO Products """
"""(product_name, brands, code, """
"""categories, nutrition_grades, """
"""stores, url, added_timestamp) """
"""VALUES (%s, %s, %s, """
"""%s, %s, """
"""%s, %s, %s)""")
sql['prod_update'] = (
"""UPDATE Products """
"""SET substitute_id = %s, """
"""updated_timestamp = %s """
"""WHERE id = %s""")
# SQL Subs Menu
sql['displayByCat'] = (
"""SELECT * FROM Products LEFT JOIN Prod_in_Cat """
"""ON Products.id=Prod_in_Cat.product_id """
"""WHERE Prod_in_Cat.category_id=%s""")
sql['subst'] = sql['displayByCat'] + """ ORDER BY nutrition_grades"""
sql['prod'] = """SELECT * FROM Products WHERE id = %s"""
# SQL Disp Menu
sql['displayAll'] = (
"""SELECT * FROM Products WHERE substitute_id IS NOT NULL""")
sql['disp'] = sql['displayAll']
|
{"/init_db.py": ["/config.py"], "/openff/core.py": ["/init_db.py", "/config.py"], "/openff/models/db.py": ["/config.py"], "/openff/controllers/product.py": ["/config.py"], "/openff/controllers/controller.py": ["/config.py"]}
|
6,208
|
Julien-V/P5
|
refs/heads/master
|
/openff/views/menu_component.py
|
#!/usr/bin/python3
# coding : utf-8
from . import menu
class Title(menu.MenuItem):
def __init__(self, text, indent=1):
super().__init__(indent)
self.text = text
self.gen()
def gen(self):
B = self.colors["bold"]
end = self.colors["endc"]
# \n self.text \n
self.result = [
"\n",
f"{B}{self.text}{end}",
"\n"
]
def get(self):
for line in self.result:
yield line.center(int(self.col/self.indent))
class Prompt(menu.MenuItem):
def __init__(self, text=""):
super().__init__()
self.text = text
self.line = ""
self.gen()
def gen(self):
g = self.colors["green"]
B = self.colors["bold"]
end = self.colors["endc"]
# [+] self.text >>>
self.line = (
f"{B}[{g}+{end}{B}] "
f"{self.text} >>> {end}")
def get(self):
return input(self.line)
class PrintList(menu.MenuItem):
"""This class (a subclass of menu.MenuItem) prints a list"""
def __init__(self, values, num=True, indent=3, limit=15):
"""This method initializes the class and call gen() method
:param values: list to display
:param num: numerotation beside list elem, set by default to True
:param indent: number of indent, set by default to 3
:param limit: max number of list element to be displayed,
set by default to 15
"""
super().__init__(indent)
self.values = values
self.num = num
self.limit = limit
self.pages = list()
self.page = 0
self.gen()
def lines_length_check(self):
"""This method adds extra spaces to all element
.center() is sensible to the length of a str
"""
# we want a nicely displayed list
line_size = max([len(x) for x in self.result])
temp = []
for line in self.result:
if len(line) < line_size:
line += " "*(line_size-len(line))
temp.append(line)
self.result = temp
self.result.append("\n\n")
def gen(self):
"""This method formats all elements in self.values
and add them to self.result then self.result is resized to
match self.limit
"""
b = self.colors["blue"]
B = self.colors["bold"]
end = self.colors["endc"]
# "[] val"
self.result.append("\n\n")
for id_item, item in enumerate(self.values):
if self.num:
# [id_item] item
line = (
f"{B}[{b}{id_item}{end}{B}]{end} "
f"{item}")
self.result.append(line)
else:
# [*] item
line = (
f"{B}[{b}*{end}{B}]{end} "
f"{item}")
self.result.append(line)
# list into sublists of size self.limit
# for future implementation of pages
lim = self.limit
r = self.result
# slice a list into multiple sublist (with their length matching limit)
temp = [r[i:i+lim] for i in range(0, len(r), lim)]
self.pages = temp
self.result = temp[self.page]
self.lines_length_check()
def gen_next_page(self):
"""This method displays the next page of the list
.. note: method unused
"""
if len(self.result)-1 > self.page:
self.page += 1
else:
self.page = 0
self.result = self.pages[self.page]
def get(self):
for line in self.result:
yield line.center(int(self.col/self.indent))
class PrintLine(menu.MenuItem):
def __init__(self, text, indent=1000):
super().__init__(indent)
self.text = text
self.gen()
def gen(self):
b = self.colors["blue"]
B = self.colors["bold"]
end = self.colors["endc"]
# [*] self.text
line = (
f"{B}{b}[*] {end}{B}"
f"{self.text}{end}"
)
self.result.append(line)
def get(self):
for line in self.result:
yield line.center(int(self.col/self.indent))
|
{"/init_db.py": ["/config.py"], "/openff/core.py": ["/init_db.py", "/config.py"], "/openff/models/db.py": ["/config.py"], "/openff/controllers/product.py": ["/config.py"], "/openff/controllers/controller.py": ["/config.py"]}
|
6,209
|
Julien-V/P5
|
refs/heads/master
|
/openff/controllers/product.py
|
#!/usr/bin/python3
# coding : utf-8
from datetime import datetime
from openff.models import req_sql
import config as cfg
class Product:
"""This class represents a product,
control and insert its caracteristics into database
"""
def __init__(self, model, category_id):
"""This method initializes the class
:param model: cursor object linked to database
:param category_id:
"""
self.cursor = model
self.cat_id = category_id
self.spec = dict()
def get_validate_insert(self, prod, update=False):
"""This method checks if all NOT NULL value exit in self.spec
:param prod: product (dict)
:param update: set by default to False
"""
for key in cfg.db['product_check']:
if key in prod.keys():
# validate and fix can go here no ?
if key == 'code':
self.spec[key] = int(prod[key])
else:
self.spec[key] = prod[key]
else:
# view.error(errormessage)
pass
ts = int(datetime.now().timestamp())
self.spec['added_timestamp'] = ts
self.spec['category_id'] = self.cat_id
test = self._validate_product_spec()
if test and not update:
self._insert()
elif test and update:
update['substitued_id'] = prod['id']
self._update(update)
def _validate_product_spec(self):
"""This method validates types and length of self.spec
:return: True if self.spec is valided else False
"""
s = self.spec
missing = []
for key in cfg.db['product_check']:
if key in s.keys():
if isinstance(s[key], cfg.db['product_type'][key]):
pass
else:
print(f"{key} not a {cfg.db['product_type']}")
if key == 'nutrition_grades':
if len(s['nutrition_grades']) != 1:
return False
else:
missing.append(key)
if missing:
# print(missing)
return False
return True
def _insert(self):
"""This method inserts the product into database"""
s = self.spec
sql = req_sql.sql['insert_prod']
sql_args = (
s['product_name'], s['brands'], s['code'],
s['categories'], s['nutrition_grades'],
s['stores'], s['url'], s['added_timestamp']
)
sql2 = req_sql.sql['insert_PiC']
try:
self.cursor.execute(sql, sql_args)
sql2_args = (self.cat_id, self.cursor.lastrowid)
self.cursor.execute(sql2, sql2_args)
except Exception as e:
print(e)
return
def _update(self, update):
"""This method updates a product row
:param update: a dict with information to update
"""
ts = int(datetime.now().timestamp())
# sql = UPDATE Products SET {} = {}, {}, {} WHERE id= {}
sql = req_sql.sql['prod_update']
key = 'substitute_id'
sql_args = (
update[key],
ts,
update['substitued_id'])
self.cursor.execute(sql, sql_args)
|
{"/init_db.py": ["/config.py"], "/openff/core.py": ["/init_db.py", "/config.py"], "/openff/models/db.py": ["/config.py"], "/openff/controllers/product.py": ["/config.py"], "/openff/controllers/controller.py": ["/config.py"]}
|
6,210
|
Julien-V/P5
|
refs/heads/master
|
/openff/controllers/controller.py
|
#!/usr/bin/python3
# coding : utf-8
import os
import config as cfg
class Controller:
"""This class controls the validity of user's anwser"""
def __init__(self, view, model):
"""This method initializes the class
:param view: a view (openff.views.menu_models)
:param model: a model, database or cursor"""
self.view = view
self.model = model
def _clear(self):
"""This method clears the screen"""
os.system('clear')
def choice_menu(self, debug=False):
"""This method controls the choice of an element in a list
:param debug: debug set by default to False
:return: int of user's anwser
"""
valid = False
while not valid:
if not debug:
self._clear()
rep = self.view.get()
choice_list = self.view.resized_result.copy()
# get rid of \n
choice_list.remove(choice_list[0])
choice_list.remove(choice_list[-1])
if rep == cfg.back or rep == cfg.exit:
valid = True
else:
try:
choice_list[int(rep)]
valid = True
except Exception as e:
print(e)
valid = False
return int(rep)
def print_line_db(self, debug=False):
"""This method controls user's anwser
:param debug: debug set by default to False
:return: int of user's anwser
"""
valid = False
while not valid:
if not debug:
self._clear()
rep = self.view.get()
if rep == cfg.back or rep == cfg.exit:
valid = True
else:
try:
rep = int(rep)
valid = True
except Exception as e:
print(e)
valid = False
return int(rep)
|
{"/init_db.py": ["/config.py"], "/openff/core.py": ["/init_db.py", "/config.py"], "/openff/models/db.py": ["/config.py"], "/openff/controllers/product.py": ["/config.py"], "/openff/controllers/controller.py": ["/config.py"]}
|
6,211
|
Julien-V/P5
|
refs/heads/master
|
/openff/controllers/category.py
|
#!/usr/bin/python3
# coding : utf-8
from openff.models import req_sql
class Category:
"""This class represents a category,
control and insert it into database
"""
def __init__(self, name, model):
"""This method initializes the class
:param name: category name
:param model: cursor linked to database
"""
self.name = name
self.cursor = model
self.prod_list = []
def add_product(self, product):
"""This method adds a product in self.prod_list"""
self.prod_list.append(product)
def _validate(self):
"""This method checks category's type
:return: boolean
"""
if not isinstance(self.name, str):
print(f"{self.name} not str")
return False
else:
return True
def insert(self):
"""This method inserts this category into database"""
valid = self._validate()
if not valid:
return
sql = req_sql.sql['insert_cat']
sql_args = (self.name,)
try:
self.cursor.execute(sql, sql_args)
except Exception as e:
print(e)
return
|
{"/init_db.py": ["/config.py"], "/openff/core.py": ["/init_db.py", "/config.py"], "/openff/models/db.py": ["/config.py"], "/openff/controllers/product.py": ["/config.py"], "/openff/controllers/controller.py": ["/config.py"]}
|
6,212
|
Julien-V/P5
|
refs/heads/master
|
/config.py
|
#!/usr/bin/python3
# coding : utf-8
import getpass
from openff.models import req_sql
# from openff.views import menu
from openff.views import menu_models as mm
from openff.controllers import controller as ctrl
locale = "fr"
title = "Projet 5"
back = '777'
exit = '999'
# DB
#########################
db = dict()
db["name"] = "openff"
db["create_file_name"] = "create-db.sql"
db["create_file_path"] = "openff/models/"
db["connect"] = {
'user': getpass.getuser(),
}
db["product_check"] = {
'product_name': str,
'brands': str,
'code': int,
'categories': str,
'nutrition_grades': str,
'stores': str,
'url': str
}
db["product_type"] = db["product_check"].copy()
db["product_type"]["category_id"] = int
db["product_type"]["added_timestamp"] = int
db["product_val"] = db['product_type'].keys()
db["error"] = {
1044: "",
1698: "Invalid Password, Access Denied"
}
db["Uerror"] = "Unknown Error : \n {}"
db["show"] = "SHOW DATABASES"
# Requests
#########################
headers = {'user-agent': 'OC_P5/0.1'}
# url = "https://{}.openfoodfacts.org/categorie/{}.json"
url = "https://{}.openfoodfacts.org/cgi/search.pl?".format(locale)
param = {
"action": "process",
"page": 1,
# only 200 to avoid timeout
"page_size": 200,
"tagtype_0": "categories",
"tag_contains_0": "contains",
"tag_0": "",
"json": True
}
cat = ["desserts-au-chocolat", "boissons-instantanees"]
cat.append("cereales-au-chocolat")
# Text for Menu
#########################
text = dict()
text['start'] = [
["Substituer un aliment", "Afficher les aliments substitués"], {
'text': 'Menu',
'title': title
}]
# Substitute text
text['cat_choice'] = [
cat, {
'text': 'Catégorie',
'title': 'Choix de la catégorie'
}
]
text['prod_choice'] = [
[], {
'text': 'Produit',
'title': 'Choix du produit'
}
]
text['prod_details'] = [
[], {
'text': '',
'title': ''
}
]
text['subs_choice'] = [
[], {
'text': 'Substitution',
'title': 'Choix du produit à substituer'
}
]
text['subs_details'] = [
[], {
'text': '777 to go back, enter to save substitute',
'title': ''
}
]
# Display subs text
all_s = "Tout mes produits substitués"
text['display'] = [
["Par catégorie", all_s], {
'text': 'Afficher',
'title': 'Afficher'
}
]
text['displayByCat'] = [
cat, {
'text': "Catégorie",
'title': "Afficher par catégorie"
}
]
text['disp_choice'] = [
[], {
'text': "Produit"
}
]
# paramExt
#########################
# paramExt = dict()
# paramExt['format'] = [
# (lambda i: i['product_name'] + " // " + str(i['barcode'])
# ]
paramExt = dict()
# cat_choice
param_ext_cc = {
'query': req_sql.sql['test']
}
# prod_choice
param_ext_pc = {
'4query': 'category_id',
'query': req_sql.sql['displayByCat'],
'format': [
lambda i: [i, f"{i['product_name']} // {i['brands']}"],
lambda i: f"{i[1]} // {i[0]['nutrition_grades']}"
]
}
# prod_details
nG = 'nutrition_grades'
# substChoice
param_ext_sc = {
'4query': 'category_id',
'query': req_sql.sql['subst'],
'process': [lambda i: [x for x in i.result if x[nG] < i.item[nG]]],
'format': [
lambda i: [i, f"{i['product_name']}"],
lambda i: f"{i[1]} // {i[0]['nutrition_grades']}"
],
}
# display
prod_by_id = req_sql.sql['prod']
param_ext_dc = {
'query': req_sql.sql['displayAll'],
'process': [
lambda i: [
i,
[i.query(prod_by_id, x['substitute_id']-1) for x in i.result]
],
lambda i: [
i[0],
[{k+'S': v for k, v in list(elem[0].items())} for elem in i[1]]
],
lambda i: [
i[0],
[i[0].result[idD].update(elem) for idD, elem in enumerate(i[1])]
],
lambda i: i[0].result
],
'format': [
lambda i: [i, f"{i['product_name']} // {i[nG]}"],
lambda i: [i[0], f"{i[1]} --> {i[0]['product_nameS']}"],
lambda i: f"{i[1]} // {i[0]['nutrition_gradesS']}"
]
}
# Menu :
#########################
""" STEP
kwargs.keys() = ['text', 'title', lines'] # possible key
args = [listValues, **kwargs]
var = [view, controller, args, paramExt]
"""
choice = mm.ChoiceList
print_line = mm.PrintLineDB
ctrller = ctrl.Controller
cat_choice = [choice, ctrller, text['cat_choice'], param_ext_cc]
prod_choice = [choice, ctrller, text['prod_choice'], param_ext_pc]
prod_details = [print_line, ctrller, text['prod_details']]
# subs_choice = [choice, ctrller, text['subs_choice'], param_ext_sc]
subs_prop = [print_line, ctrller, text['subs_details'], param_ext_sc]
end = [None, None, None] # the end
disp_choice = [choice, ctrller, text['disp_choice'], param_ext_dc]
# Substitute
step_sub = {
'cat_choice': cat_choice,
'prod_choice': prod_choice,
'subs_choice': subs_prop,
'prod_update': end,
'end': end
}
# Display
step_disp_all = {
'disp_choice': disp_choice,
'prod_details': prod_details,
'end': end
}
"""Group of steps contained in a dict with a key 'param'
'param': [view, controller, args]
with args = [listValues, **kwargs]
"""
step_app = {
'substitute': step_sub,
'display': step_disp_all,
'param': [choice, ctrller, text['start']]
}
|
{"/init_db.py": ["/config.py"], "/openff/core.py": ["/init_db.py", "/config.py"], "/openff/models/db.py": ["/config.py"], "/openff/controllers/product.py": ["/config.py"], "/openff/controllers/controller.py": ["/config.py"]}
|
6,262
|
LucienXian/NBA_search_engine
|
refs/heads/master
|
/search/views.py
|
from django.http import HttpResponse
from django.http import JsonResponse
from django.shortcuts import render
from elasticsearch import Elasticsearch
from search.elasticsearch import ElasticSearchClass
import datetime
# Create your views here.
try:
from django.utils import simplejson as json
except ImportError:
import json
class DateEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime.datetime):
return obj.strftime('%Y-%m-%d %H:%M:%S')
elif isinstance(obj, date):
return obj.strftime("%Y-%m-%d")
else:
return json.JSONEncoder.default(self, obj)
def toShortName(name):
short_name = name[-5:-1]
if short_name == '凯尔特人' :
return short_name
short_name = name[-4:-1]
if short_name in ['76人','步行者','独行侠','森林狼'] :
return short_name
short_name = name[-3:-1]
return short_name
def index(request):
if request.method == 'GET':
passed = {}
return render(request, 'index.html', passed)
print("xxxxxxxxxxxxx")
es = ElasticSearchClass()
category = request.POST.get('label', None)
if category == '0':
# this is all
es.make_index_table()
keyword_list = request.POST.get('keyword', None)
source = request.POST.getlist('source[]', None)
team = request.POST.getlist('team[]', None)
player = request.POST.getlist('star[]', None)
search_str = keyword_list
search_type = "news"
search_order = "by score"
search_source = source
team_str = ""
if team != []:
for i in team:
team_str = team_str + toShortName(i) + " "
if team_str != "":
search_str = team_str[0:-1]
list2 = es.search_keywords(search_type, search_str)
list2 = es.filter(list2, 'source', search_source)
recent = request.POST.get('recent', None)
if recent == '1':
list2 = es.filter(list2, 'recent_1day', [])
elif recent == '2':
list2 = es.filter(list2, 'recent_7day', [])
elif recent == '3':
list2 = es.filter(list2, 'recent_30day', [])
temp = {}
temp['recent_list'] = es.count_recent(list2)
temp['source_list'] = es.count_source(list2)
temp['data_list'] = list2
#print(temp['recent_list'])
#print(temp['source_list'])
#print(list2)
#list2 = es.sort(list2, search_order)
keyword_list = request.POST.get('keyword', None)
search_str = keyword_list
search_type = "team"
search_order = "by defen"
list2 = es.search_team(search_str)
list2 = es.filter(list2, 'by team', team)
temp['team_list'] = list2
list0 = []
i = 0
for i in list2:
name = i['_source']['name']
short_name = toShortName(name)
listx = es.search_data_season(short_name, "2017")
listx = es.sort(listx, search_order)
list0.append(listx)
temp['team_data_list'] = list0
keyword_list = request.POST.get('keyword', None)
search_str = keyword_list
search_type = "player"
search_order = "by season"
list2 = es.search_player(search_str)
list2 = es.filter(list2, 'by player', player)
temp['player_list'] = list2
list0 = []
for i in list2:
name = i['_source']['中文名']
listx = es.search_data(name)
listx = es.sort(listx, search_order)
list0.append(listx)
break
temp['player_data_list'] = list0
if category == '1':
# this is a news
es.make_index_table()
keyword_list = request.POST.get('keyword', None)
source = request.POST.getlist('source[]', None)
search_str = keyword_list
search_type = "news"
search_order = "by score"
search_source = source
list2 = es.search_keywords(search_type, search_str)
list2 = es.filter(list2, 'source', search_source)
recent = request.POST.get('recent', None)
if recent == '1':
list2 = es.filter(list2, 'recent_1day', [])
elif recent == '2':
list2 = es.filter(list2, 'recent_7day', [])
elif recent == '3':
list2 = es.filter(list2, 'recent_30day', [])
temp = {}
temp['recent_list'] = es.count_recent(list2)
temp['source_list'] = es.count_source(list2)
temp['data_list'] = list2
#print(temp['recent_list'])
#print(temp['source_list'])
#print(list2)
#list2 = es.sort(list2, search_order)
elif category == '2':
# this is a team
temp = {}
keyword_list = request.POST.get('keyword', None)
team = request.POST.getlist('team[]', None)
search_str = keyword_list
search_type = "team"
search_order = "by defen"
list2 = es.search_team(search_str)
list2 = es.filter(list2, 'by team', team)
temp = {}
temp['team_list'] = list2
list0 = []
i = 0
for i in list2:
name = i['_source']['name']
short_name = toShortName(name)
listx = es.search_data_season(short_name, "2017")
listx = es.sort(listx, search_order)
list0.append(listx)
temp['team_data_list'] = list0
#temp['data_list'] =
elif category == '3':
# this is a player
temp = {}
keyword_list = request.POST.get('keyword', None)
player = request.POST.getlist('star[]', None)
search_str = keyword_list
search_type = "player"
search_order = "by season"
list2 = es.search_player(search_str)
list2 = es.filter(list2, 'by player', player)
temp = {}
temp['player_list'] = list2
list0 = []
for i in list2:
name = i['_source']['中文名']
listx = es.search_data(name)
listx = es.sort(listx, search_order)
list0.append(listx)
break
temp['player_data_list'] = list0
elif category == '4':
# this is a news of label
keyword_list = request.POST.get('keyword', None)
source = request.POST.getlist('source[]', None)
search_str = keyword_list
search_type = "news"
search_order = "by score"
search_source = source
list2 = es.get_by_index(keyword_list)
list2 = es.filter(list2, 'source', search_source)
recent = request.POST.get('recent', None)
if recent == '1':
list2 = es.filter(list2, 'recent_1day', [])
elif recent == '2':
list2 = es.filter(list2, 'recent_7day', [])
elif recent == '3':
list2 = es.filter(list2, 'recent_30day', [])
temp = {}
temp['recent_list'] = es.count_recent(list2)
temp['source_list'] = es.count_source(list2)
temp['data_list'] = list2
#print(temp['recent_list'])
#print(temp['source_list'])
#print(list2)
#list2 = es.sort(list2, search_order)
'''
#print(es.search_all("news"))
search_str = ["詹姆斯"]
search_type = "data"
search_order = "by season"
search_source = ["all"]
list = es.search_keywords(search_type, search_str)
list = es.filter(list, 'source', search_source)
list = es.sort(list, search_order)
print(list)
'''
passed = {}
passed['result'] = temp
#print(passed['list'])
print("xxxxxxxxxxxxx")
#print(list2)
return HttpResponse(json.dumps(passed, cls=DateEncoder), content_type='application/json')
|
{"/search/views.py": ["/search/elasticsearch.py"], "/data/views.py": ["/search/elasticsearch.py"]}
|
6,263
|
LucienXian/NBA_search_engine
|
refs/heads/master
|
/data/views.py
|
from django.http import HttpResponse
from elasticsearch import Elasticsearch
import es_client
from elasticsearch import helpers
from search.elasticsearch import ElasticSearchClass
import xlrd
# Create your views here.
TEAM_DIC = {'ATL':'老鹰','BKN':'篮网','BOS':'凯尔特人','CHI':'公牛','CHA':'黄蜂','CLE':'骑士','MIA':'热火',
'DET':'活塞','NYK':'尼克斯','ORL':'魔术','IND':'步行者','PHI':'76人','WAS':'奇才','MIL':'雄鹿',
'TOR':'猛龙','GSW':'勇士','DEN':'掘金','DAL':'独行侠','LAC':'快船','MIN':'森林狼','HOU':'火箭',
'LAL':'湖人','OKC':'雷霆','MEM':'灰熊','PHO':'太阳','POR':'开拓者','NOH':'鹈鹕','SAC':'国王',
'UTA':'爵士','SAS':'马刺'}
NEWS = {'新浪体育' : ['sina1', 'sina5', 'sina6', 'sina12', 'sina14', 'sina15', 'sina25'],
'搜狐体育' : ['sohu'],
'网易体育' : ['netease'],
'虎扑体育' : ['hupu']
}
DATA_SIZE = 22000
TEAM_SIZE = 30
try:
from django.utils import simplejson as json
except ImportError:
import json
def replaceAll(old, new, str):
while str.find(old) > -1:
str = str.replace(old, new)
return str
'''
es.index("news", {"name": "14日夏季联赛综述:伯顿压哨抛投绝杀魔术", "label" : "魔术 魔术王", "my_time" : "2018-07-14", "source": "baidu", "link" : "http://nbachina.qq.com/a/20180714/013195.htm", "content" : "尼克斯102-83轻取鹈鹕,丹尼尔-奥切夫22分8篮板,泰瑞斯-沃克16分5篮板6助攻5抢断,丹伊恩-多特森15分6篮板5助攻,米切尔-罗宾逊14分12篮板5封盖;鹈鹕方面,加尔伦-格林23分,DJ-霍格16分,查森-兰德尔11分3篮板。"})
es.index("news", {"name": "曝若安东尼恢复自由身 火箭将最有希望得到他", "label" : "魔术王", "time" : "2018-07-13", "source": "yahoo", "link" : "http://nbachina.qq.com/a/20180714/003886.htm", "content" : "北京时间7月14日,据雅虎记者沙姆斯-查拉尼亚报道,消息人士透露,一旦尼克斯前锋卡梅隆-安东尼成为自由球员,那么休斯顿火箭将会是青睐甜瓜的所有球队中的领跑者。"})
es.index("player", {"name": "詹姆斯", "content" : "Lebron James", "中文名字" : "勒布朗·詹姆斯", "英文名字" : "Lebron James", "身高" : "2.03m", "体重" : "113kg", "出生日期" : "1984-12-30", "出生地点" : "俄亥俄州阿克伦城", "位置" : "前锋/后卫"})
es.index("team", {"name": "湖人", "content" : "Lakers", "中文名字" : "湖人队", "英文名字" : "Lakers", "所属地区" : "西部", "成立时间" : "1948", "主球馆" : "斯台普斯中心球馆", "拥有者" : "珍妮-巴斯(Jeanie Buss)", "赛区" : "太平洋区", "主教练" : "卢克-沃顿(Luke Walton)"})
es.index("data", {"name": "", "team" : "湖人", "content" : "湖人 詹姆斯 2018-2019", "season" : "2018-2019", "player" : "詹姆斯", "出场次数" : "82", "首发次数" : "82", "场均上场时间" : "46.5", "投篮命中率" : "60.0", "场均投篮出手次数" : "25.0", "场均投篮命中次数" : "15.0", "三分球命中率" : "36.5", "场均三分出手次数" : "9.0", "场均三分命中次数" : "3.3", "罚球命中率" : "90.0", "场均罚球出手次数" : "10.0", "场均罚球命中次数" : "9.0", "场均篮板" : "15.0", "前场篮板" : "7.0", "后场篮板" : "8.0", "场均助攻" : "10.2", "场均抢断" : "2.0", "场均盖帽" : "1.2", "场均失误" : "6.2", "场均犯规" : "4.3", "场均得分" : "36.4"})
es.index("data", {"name": "", "team" : "骑士", "content" : "骑士 詹姆斯 2017-2018", "season" : "2017-2018", "player" : "詹姆斯", "出场次数" : "82", "首发次数" : "82", "场均上场时间" : "46.5", "投篮命中率" : "60.0", "场均投篮出手次数" : "25.0", "场均投篮命中次数" : "15.0", "三分球命中率" : "36.5", "场均三分出手次数" : "9.0", "场均三分命中次数" : "3.3", "罚球命中率" : "90.0", "场均罚球出手次数" : "10.0", "场均罚球命中次数" : "9.0", "场均篮板" : "15.0", "前场篮板" : "7.0", "后场篮板" : "8.0", "场均助攻" : "10.2", "场均抢断" : "2.0", "场均盖帽" : "1.2", "场均失误" : "6.2", "场均犯规" : "4.3", "场均得分" : "36.4"})
'''
def read_news(f, source):
es = ElasticSearchClass()
data = json.load(f)
try:
for i in data:
content = i['content']
content = content.replace('\n','<br>')
content = content.replace('\u3000', "")
content = replaceAll('<br><br>', '<br>', content)
try:
if content.index('<br>') == 0:
content = content[4:-1]
except Exception:
print("None extra <br>")
es.index("news", {"name" : i['title'], "label" : i['label'],
"my_time": i['time'], "source" : source, "link" : i['url'], "content" : content})
except Exception:
print('File add error')
def read_team(table):
es = ElasticSearchClass()
title = table.row_values(0)
line = 1
try:
while len(table.row_values(line)) > 0 :
body = {}
p = table.row_values(line)
es.index("team", {"name": p[1], "content" : p[2], "中文名字" : p[1], "英文名字" : p[2], "所属地区" : p[3], "成立时间" : p[4], "主球馆" : p[5], "拥有者" : p[6], "赛区" : p[7], "主教练" : p[8], "image_link" : p[9]})
line = line + 1
except Exception:
print("read team over")
def read_player(table):
es = ElasticSearchClass()
title = table.row_values(0)
line = 1
try:
while len(table.row_values(line)) > 0 :
body = {}
p = table.row_values(line)
es.index("player", {"name": p[1], "content" : p[7], "位置" : p[2], "身高" : p[3], "体重" : p[4], "出生年月" : p[5], "出生城市" : p[6], "中文名" : p[7], "image_link" : p[8]})
line = line + 1
except Exception:
print("read player over")
def read_data(table):
es = ElasticSearchClass()
title = table.row_values(0)
print(title)
line = 1
try:
while len(table.row_values(line)) > 0 :
body = {}
p = table.row_values(line)
es.index("data", {"name": p[1], "content" : p[7], "位置" : p[2], "身高" : p[3], "体重" : p[4], "出生年月" : p[5], "出生城市" : p[6], "中文名" : p[7], "image_link" : p[8]})
line = line + 1
except Exception:
print("read data over")
def index(request):
es = ElasticSearchClass()
'''
print(len(es.search_all("news")))
es.delete_all()
for key in NEWS:
for file_name in NEWS[key]:
try:
file_name = file_name + ".json"
print(file_name)
f = open( file_name, encoding='utf-8')
read_news(f, key)
except Exception:
print('File not found')
try:
data = xlrd.open_workbook('team_info.xlsx')
table = data.sheet_by_name('team_info')
except Exception:
print('File not found')
read_team(table)
try:
data = xlrd.open_workbook('nba_player_info.xlsx')
table = data.sheet_by_name('player_info')
except Exception:
print('File not found')
read_player(table)
#es.delete_all()
for key,value in TEAM_DIC.items():
data = xlrd.open_workbook('nba_team_reg_data(%s).xlsx' % key)
for i in range(1946,2018):
try:
table = data.sheet_by_name('regularseason_data ' + str(i))
except Exception:
continue
title = table.row_values(0)
line = 1
try:
while len(table.row_values(line)) > 0 :
#while line <= 1:
body = {}
p = table.row_values(line)
if(p[1] != '总计'):
es.index("data", {
"name": p[1],
"content" : p[1],
"team" : value,
"season" : str(i),
"球员" : p[1],
"出场" : p[2],
"首发" : p[3],
"时间" : p[4],
"投篮" : p[5],
"投篮命中" : p[6],
"投篮出手" : p[7],
"三分" : p[8],
"三分命中" : p[9],
"三分出手" : p[10],
"罚球" : p[11],
"罚球命中" : p[12],
"罚球出手" : p[13],
"篮板" : p[14],
"前场篮板" : p[15],
"后场篮板" : p[16],
"助攻" : p[17],
"抢断" : p[18],
"盖帽" : p[19],
"失误" : p[20],
"犯规" : p[21],
"得分" : p[22]
}
)
line = line + 1
print(key, i, line - 1, p[1])
except Exception:
print("read data over")
'''
#print(es.search_data("詹姆斯"))
#print(table.row_values(0))
return HttpResponse(es.count_all())
|
{"/search/views.py": ["/search/elasticsearch.py"], "/data/views.py": ["/search/elasticsearch.py"]}
|
6,264
|
LucienXian/NBA_search_engine
|
refs/heads/master
|
/search/elasticsearch.py
|
from elasticsearch import Elasticsearch
from elasticsearch import helpers
import datetime
import time
abstract_length = 40
def toShortName(name):
short_name = name[-5:-1]
if short_name == '凯尔特人' :
return short_name
short_name = name[-4:-1]
if short_name in ['76人','步行者','独行侠','森林狼'] :
return short_name
short_name = name[-3:-1]
return short_name
class ElasticSearchClass(object):
type_list = ["all", "news", "team", "player", "data"]
max_window_size = 1000
idx = 0
index_table = {}
def __init__(self):
self.es = Elasticsearch(['localhost:9200'])
def count_all(self):
return self.es.count(index = None, doc_type = None)['count']
def index(self, type_name, body):
self.es.index(index = 'my_index', doc_type = type_name, id = None ,body = body)
self.idx = self.idx + 1
print(self.idx)
def delete_all(self):
allDoc = self.search_all("all")
for i in allDoc:
j = i['_type']
try:
print("delete "+ j + " " + i['_id'])
self.es.delete(index = 'my_index', doc_type = j, id = i['_id'])
except Exception:
print("delete "+ j + " " + i['_id'] + " error")
continue
def get_by_id(self, id):
try:
query = {'query': {"term": { "_id" : id}}}
allDoc = self.es.search(None, None, query)
return allDoc['hits']['hits'][0]
except Exception as err:
print(err)
return {}
def get_by_index(self, index):
list = []
try:
for i in self.index_table[index]:
list.append(self.get_by_id(i))
except Exception as err:
print(err)
return list
def make_index_table(self):
list = self.search_all('news')
if self.index_table != {}:
self.index_table = {}
for i in list:
for j in i['_source']['label'].strip(',').split(' '):
try:
self.index_table[j].append(i['_id'])
except Exception:
self.index_table[j] = [i['_id']]
def make_abstract(self, list_step1, search_str):
list_step2 = []
abstract = ""
list_search_str = []
if('Untitled' not in search_str):
list_i = search_str.strip(',').split(' ')
list_search_str.extend(list_i)
for j in list_step1:
content = j['_source']['content']
title = j['_source']['name']
j['_source']['keyword'] = search_str
j['_score'] = 0
index = -1
for temp in list_search_str:
try:
if title.count(temp) != 0:
index = title.index(temp)
j['_source']['place'] = 'title'
j['_source']['keyword'] = temp
j['_score'] = title.count(temp) + content.count(temp) * 0.05
'''
abstract = title[index : index + abstract_length]
if(len(abstract) < abstract_length):
abstract = '...' + title[index + len(abstract) - abstract_length: index + abstract_length]
else:
abstract = abstract + '...'
'''
break
if content.count(temp) != 0:
index = content.index(temp)
j['_source']['place'] = 'content'
j['_source']['keyword'] = temp
j['_score'] = title.count(temp) + content.count(temp) * 0.05
'''
abstract = content[index : index + abstract_length]
if(len(abstract) < abstract_length):
abstract = '...' + content[index + len(abstract) - abstract_length: index + abstract_length]
else:
abstract = abstract + '...'
'''
break
except Exception:
continue
else:
j['_source']['place'] = 'nowhere'
j['_source']['index'] = index
#j['_source']['abstract'] = abstract
list_step2.append(j)
return list_step2
def search_all(self, type_name):
if type_name == 'all':
result = helpers.scan(
self.es,
query = {
'query': {
'match_all': {}
}
},
index = 'my_index',
doc_type = None
)
final_result = []
for item in result:
final_result.append(item)
allDoc = final_result
else:
result = helpers.scan(
self.es,
query = {
'query': {
'match_all': {}
}
},
index = 'my_index',
doc_type = type_name
)
final_result = []
for item in result:
final_result.append(item)
allDoc = final_result
list_step1 = allDoc
'''
for j in allDoc:
for k in list_step1:
if j['_id'] == k['_id']:
break
else:
list_step1.append(j)
'''
#list_step1 = self.make_abstract(list_step1, ['Untitled'])
return list_step1
def search(self, type_name, keywords):
if type_name == 'all':
result = helpers.scan(
self.es,
query = {
'query':{
"multi_match":{
"query" : keywords,
"fields": [ "name", "content" ]
}
}
},
index = 'my_index',
doc_type = None
)
final_result = []
for item in result:
final_result.append(item)
allDoc = final_result
else:
result = helpers.scan(
self.es,
query = {
'query': {
"multi_match": {
"query" : keywords,
"fields": [ "name", "content" ]
}
}
},
index = 'my_index',
doc_type = type_name
)
final_result = []
for item in result:
final_result.append(item)
allDoc = final_result
return allDoc
def search_keywords(self, search_type, search_str):
list_step1 = []
'''
for i in search_str:
temp = self.search(search_type, i)
for j in temp:
for k in list_step1:
if j['_id'] == k['_id']:
break
else:
list_step1.append(j)
'''
list_step1 = self.search(search_type, search_str)
list_step1 = self.make_abstract(list_step1, search_str)
return list_step1
def search_team(self, search_str):
temp = []
search_list = search_str.strip(',').split(' ')
if len(search_list) < 1:
return []
list_all_team = self.search_all("team")
for i in list_all_team:
for j in search_list:
#print(i['_source']['name'])
if j in i['_source']['name'] or j.upper() in i['_source']['content'].upper():
temp.append(i)
break
return temp
def search_player(self, search_str):
temp = []
search_list = search_str.strip(',').split(' ')
if len(search_list) < 1:
return []
list_all_team = self.search_all("player")
for i in list_all_team:
for j in search_list:
#print(i['_source']['name'])
if j in i['_source']['name'] or j.upper() in i['_source']['content'].upper():
temp.append(i)
break
return temp
def search_data(self, search_str):
temp = []
search_list = search_str.strip(',').split(' ')
if len(search_list) < 1:
return []
list_all_data = self.search_all("data")
for i in list_all_data:
for j in search_list:
#print(i['_source']['name'])
if j in i['_source']['name'] or j in i['_source']['team']:
temp.append(i)
break
return temp
def search_data_season(self, search_str, season):
temp = []
search_list = search_str.strip(',').split(' ')
if len(search_list) < 1:
return []
list_all_data = self.search_all("data")
for i in list_all_data:
for j in search_list:
#print(i['_source']['name'])
if j in i['_source']['team'] and season in i['_source']['season']:
temp.append(i)
break
return temp
def sort(self, list, sign) :
lenl = len(list)
if sign == "by time":
for i in range(0, lenl):
for j in range(i, lenl):
#print(list[i]['_source']['my_time'], list [j]['_source']['my_time'])
if(list[i]['_source']['my_time'] < list [j]['_source']['my_time']):
list[i], list[j] = list[j], list[i]
if sign == "by season":
for i in range(0, lenl):
for j in range(i, lenl):
#print(list[i]['_source']['season'], list [j]['_source']['season'])
if(list[i]['_source']['season'] < list [j]['_source']['season']):
list[i], list[j] = list[j], list[i]
if sign == "by defen":
for i in range(0, lenl):
for j in range(i, lenl):
#print(list[i]['_source']['season'], list [j]['_source']['season'])
if(list [j]['_source']['name'] == '全队数据'):
list[i], list[j] = list[j], list[i]
elif(list [j]['_source']['name'] == '对手数据'):
list[i], list[j] = list[j], list[i]
elif(float(list[i]['_source']['得分']) < float(list [j]['_source']['得分'])):
list[i], list[j] = list[j], list[i]
return list
def filter(self, list, condition, keywordlist):
if("all" in keywordlist):
return list
if(condition == 'source'):
if keywordlist == []:
return list
j = 0
while j < len(list):
try:
if condition == 'source':
temp = list[j]['_source']['source']
list_temp = temp.strip(',').split(' ')
for i in keywordlist:
if i in list_temp:
j = j + 1
break
else:
del list[j]
except Exception as err:
continue
elif(condition == 'label'):
j = 0
while j < len(list):
if condition == 'label':
temp = list[j]['_source']['label']
list_temp = temp.strip(',').split(' ')
for i in keywordlist:
if i in list_temp:
j = j + 1
break
else:
del list[j]
elif(condition == 'by team'):
if(keywordlist == []):
return list
j = 0
while j < len(list):
if condition == 'by team':
temp1 = list[j]['_source']['name']
temp2 = list[j]['_source']['content']
list_temp1 = temp1.strip(',').split(' ')
list_temp2 = temp2.strip(',').split(' ')
for i in keywordlist:
if i in list_temp1 or i in list_temp2:
j = j + 1
break
else:
del list[j]
elif(condition == 'by player'):
if(keywordlist == []):
return list
j = 0
while j < len(list):
if condition == 'by player':
temp1 = list[j]['_source']['name']
temp2 = list[j]['_source']['content']
list_temp1 = temp1.strip(',').split(' ')
list_temp2 = temp2.strip(',').split(' ')
for i in keywordlist:
if i in list_temp1 or i in list_temp2:
j = j + 1
break
else:
del list[j]
elif(condition == 'recent_1day'):
now = datetime.datetime.now()
recent_1day = (now - datetime.timedelta(days=2)).strftime("%Y-%m-%d %H:%M:%S")
j = 0
while j < len(list):
try:
temp = list[j]['_source']['my_time']
if(temp < recent_1day):
del list[j]
else:
j = j + 1
except Exception as err:
continue
elif(condition == 'recent_7day'):
now = datetime.datetime.now()
recent_1day = (now - datetime.timedelta(days=8)).strftime("%Y-%m-%d %H:%M:%S")
j = 0
while j < len(list):
try:
temp = list[j]['_source']['my_time']
if(temp < recent_1day):
del list[j]
else:
j = j + 1
except Exception as err:
continue
elif(condition == 'recent_30day'):
now = datetime.datetime.now()
recent_1day = (now - datetime.timedelta(days=31)).strftime("%Y-%m-%d %H:%M:%S")
j = 0
while j < len(list):
try:
temp = list[j]['_source']['my_time']
if(temp < recent_1day):
del list[j]
else:
j = j + 1
except Exception as err:
continue
return list
def count_recent(self, list):
try:
list1 = []
list2 = []
list3 = []
list1.extend(list)
list2.extend(list)
list3.extend(list)
list_recent_1day = self.filter(list1, 'recent_1day', [])
list_recent_7day = self.filter(list2, 'recent_7day', [])
list_recent_30day = self.filter(list3, 'recent_30day', [])
list_recent_all = self.filter(list, 'recent_all', [])
count_recent_1day = len(list_recent_1day)
count_recent_7day = len(list_recent_7day)
count_recent_30day = len(list_recent_30day)
count_recent_all = len(list)
return {'count_recent_all' : count_recent_all,
'count_recent_1day' : count_recent_1day,
'count_recent_7day' : count_recent_7day,
'count_recent_30day' : count_recent_30day}
except Exception:
return {}
def count_source(self, list):
try:
list1 = []
list2 = []
list3 = []
list4 = []
list1.extend(list)
list2.extend(list)
list3.extend(list)
list4.extend(list)
list_source_souhu = self.filter(list1, 'source', ['搜狐体育'])
list_source_sina = self.filter(list2, 'source', ['新浪体育'])
list_source_wangyi = self.filter(list3, 'source', ['网易体育'])
list_source_hupu = self.filter(list4, 'source', ['虎扑体育'])
count_source_souhu = len(list_source_souhu)
count_source_sina = len(list_source_sina)
count_source_wangyi = len(list_source_wangyi)
count_source_hupu = len(list_source_hupu)
return {'count_source_souhu' : count_source_souhu,
'count_source_sina' : count_source_sina,
'count_source_wangyi' : count_source_wangyi,
'count_source_hupu' : count_source_hupu}
except Exception:
return {}
|
{"/search/views.py": ["/search/elasticsearch.py"], "/data/views.py": ["/search/elasticsearch.py"]}
|
6,265
|
pravallika-ganji/bloggy
|
refs/heads/master
|
/blog/urls.py
|
from blog.views import *
from django.urls import path
from django.contrib.auth import views as v
urlpatterns = [
path('',home,name="home"),
path('article/<int:pk>',ArticleView.as_view(), name = "detail"),
path('createpost/',PostCreate.as_view(),name="create"),
path('postupdate/<int:m>',PostUpdate,name="update"),
path('postdelete/<int:n>',PostDelete,name="delete"),
path('createcat/',CategoryCreate.as_view(),name="createcat"),
path('category/<str:cats>/',CategoryView,name="category"),
path('category/',CategoryListView,name="catlist"),
path('likes/<int:pk>',LikeView,name='like'),
path('article/<int:pk>/comment/',AddCommentView.as_view(),name="comment"),
path('search/',searchView,name="search"),
]
|
{"/blog/urls.py": ["/blog/views.py"], "/blog/forms.py": ["/blog/models.py"], "/blog/views.py": ["/blog/models.py", "/blog/forms.py"], "/blog/admin.py": ["/blog/models.py"], "/bloggers/urls.py": ["/bloggers/views.py"], "/bloggers/views.py": ["/bloggers/forms.py", "/blog/models.py", "/blog/forms.py"], "/bloggers/forms.py": ["/blog/models.py"]}
|
6,266
|
pravallika-ganji/bloggy
|
refs/heads/master
|
/blog/models.py
|
from django.db import models
from django.contrib.auth.models import User
from django.urls import reverse
class Category(models.Model):
name = models.CharField(max_length=200)
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('home')
class Profile(models.Model):
user = models.OneToOneField(User, null=True,on_delete=models.CASCADE)
bio = models.TextField()
propic = models.ImageField(upload_to = 'blogimages/profile',blank=True)
website_url = models.CharField(max_length=200, unique=True,blank=True)
fb_url = models.CharField(max_length=200, unique=True,blank=True)
instagram_url = models.CharField(max_length=200, unique=True,blank=True)
linkedin_url = models.CharField(max_length=200, unique=True,blank=True)
def __str__(self):
return str(self.user)
def gey_absolute_url(self):
return reverse('home')
class Post(models.Model):
# approval = models.BooleanField(default=False)
title = models.CharField(max_length=200, unique=True)
description = models.TextField(blank=True, null=True)
image = models.ImageField(upload_to = 'blogimages/')
created_at = models.DateTimeField(auto_now_add = True)
updated_at = models.DateTimeField(auto_now = True)
user = models.ForeignKey(User,on_delete = models.CASCADE)
category = models.CharField(max_length = 255, default='coding')
likes = models.ManyToManyField(User,related_name='blog_posts')
def total_likes(self):
return self.likes.count()
def __str__(self):
return self.title + ' | ' +str(self.user)
def gey_absolute_url(self):
return reverse('home')
class Comment(models.Model):
post = models.ForeignKey(Post,related_name="comments", on_delete = models.CASCADE)
name = models.CharField(max_length=200)
comment = models.TextField()
commented_at = models.DateTimeField(auto_now_add = True)
def __str__(self):
return self.post.title
|
{"/blog/urls.py": ["/blog/views.py"], "/blog/forms.py": ["/blog/models.py"], "/blog/views.py": ["/blog/models.py", "/blog/forms.py"], "/blog/admin.py": ["/blog/models.py"], "/bloggers/urls.py": ["/bloggers/views.py"], "/bloggers/views.py": ["/bloggers/forms.py", "/blog/models.py", "/blog/forms.py"], "/bloggers/forms.py": ["/blog/models.py"]}
|
6,267
|
pravallika-ganji/bloggy
|
refs/heads/master
|
/blog/forms.py
|
from blog.models import *
from django import forms
from django.contrib.auth.models import User
choices = Category.objects.all().values_list('name','name')
choice_list = []
for i in choices:
choice_list.append(i)
class PostForm(forms.ModelForm):
class Meta:
model = Post
fields = ["title","category","user","image","description"]
widgets = {
"title":forms.TextInput(attrs={
"class":"form-control my-2",
"placeholder":"Enter Blog title",
}),
"category":forms.Select(choices=choice_list,attrs={
"class":"form-control my-2",
"placeholder":"select",
}),
"user":forms.TextInput(attrs={
"class":"form-control my-2",
"value":"",
"id":'elder',
"type":'hidden'
}),
"description":forms.Textarea(attrs={
"class":"form-control my-2",
"placeholder":"Write your description here.",
# "rows":8,
}),
}
class EditPostForm(forms.ModelForm):
class Meta:
model = Post
fields = ["title","category","image","description"]
widgets = {
"title":forms.TextInput(attrs={
"class":"form-control my-2",
"placeholder":"Enter Blog title",
}),
"category":forms.Select(choices=choice_list,attrs={
"class":"form-control my-2",
"placeholder":"select",
}),
"description":forms.Textarea(attrs={
"class":"form-control my-2",
"placeholder":"Write your description here.",
"rows":8,
}),
}
class CommentForm(forms.ModelForm):
class Meta:
model = Comment
fields = ["name","comment"]
widgets = {
"name":forms.TextInput(attrs={
"class":"form-control my-2",
"placeholder":"Enter name",
}),
"comment":forms.Textarea(attrs={
"class":"form-control my-2",
"placeholder":"Write your comment here.",
# "rows":8,
}),
}
|
{"/blog/urls.py": ["/blog/views.py"], "/blog/forms.py": ["/blog/models.py"], "/blog/views.py": ["/blog/models.py", "/blog/forms.py"], "/blog/admin.py": ["/blog/models.py"], "/bloggers/urls.py": ["/bloggers/views.py"], "/bloggers/views.py": ["/bloggers/forms.py", "/blog/models.py", "/blog/forms.py"], "/bloggers/forms.py": ["/blog/models.py"]}
|
6,268
|
pravallika-ganji/bloggy
|
refs/heads/master
|
/blog/views.py
|
from django.shortcuts import render,redirect,get_object_or_404
from blog.models import *
from django.views.generic import View, TemplateView, CreateView, FormView, DetailView, ListView
from .forms import *
from django.contrib import messages
from django.contrib.auth import authenticate, login, logout
from django.core.paginator import Paginator
from django.urls import reverse_lazy, reverse
from django.core.mail import send_mail
from django.http import HttpResponseRedirect
from django.contrib.auth.mixins import LoginRequiredMixin
from django.urls import reverse_lazy
# Create your views here.
def home(request):
p = Post.objects.all()
c = Category.objects.all()
context = {'posts': p , 'c':c}
return render(request,'home.html',context)
def homee(request):
c = Category.objects.all()
context = {'c':c}
return render(request,'navbar.html',context)
class AddCommentView(CreateView):
form_class = CommentForm
template_name = 'addcomment.html'
success_url = ('/')
def form_valid(self,form):
form.instance.post_id = self.kwargs['pk']
return super().form_valid(form)
def CategoryListView(request):
c = Category.objects.all()
context = {'c':c}
return render(request,'catlist.html',context)
class ArticleView(LoginRequiredMixin,DetailView):
login_url='login'
model = Post
template_name = 'detail.html'
def get_context_data(self,*args,**kwargs):
context = super(ArticleView,self).get_context_data(*args,**kwargs)
stuff = get_object_or_404(Post,id=self.kwargs['pk'])
liked = False
if stuff.likes.filter(id=self.request.user.id).exists():
liked=True
total_likes = stuff.total_likes()
context['total_likes'] = total_likes
context['liked'] = liked
return context
def CategoryView(request,cats):
category_posts = Post.objects.filter(category=cats.replace('-'," "))
return render(request, 'category.html', {'cats':cats.title().replace('-'," "),'category_posts':category_posts})
class PostCreate(LoginRequiredMixin,CreateView):
login_url='login'
model = Post
form_class=PostForm
template_name = 'createpost.html'
success_url = reverse_lazy('home')
class CategoryCreate(CreateView):
model = Category
template_name = 'createcat.html'
fields= "__all__"
def PostUpdate(request,m):
k = Post.objects.get(id=m)
if request.method == "POST":
e = PostForm(request.POST,request.FILES,instance=k)
if e.is_valid():
e.save()
messages.warning(request,"{} Blog Updated Successfully".format(k.title))
return redirect('/')
e = PostForm(instance=k)
return render(request,'update.html',{'x':e,'k':k})
def PostDelete(request,n):
v = Post.objects.get(id=n)
if request.method == "POST":
v.delete()
messages.info(request,"{} BLog Deleted Successfully".format(v.title))
return redirect('/')
return render(request,'delete.html',{'q':v})
def LikeView(request,pk):
post = get_object_or_404(Post,id=request.POST.get('post_id'))
liked = False
if post.likes.filter(id=request.user.id).exists():
post.likes.remove(request.user)
liked=False
else:
post.likes.add(request.user)
liked=True
return HttpResponseRedirect(reverse('detail',args=[str(pk)]))
def searchView(request):
if request.method == "POST":
searched = request.POST['searched']
p = Post.objects.filter(title__icontains=searched,description__icontains=searched)
return render(request,'search.html',{'posts':p})
else:
return render(request,'search.html',{})
|
{"/blog/urls.py": ["/blog/views.py"], "/blog/forms.py": ["/blog/models.py"], "/blog/views.py": ["/blog/models.py", "/blog/forms.py"], "/blog/admin.py": ["/blog/models.py"], "/bloggers/urls.py": ["/bloggers/views.py"], "/bloggers/views.py": ["/bloggers/forms.py", "/blog/models.py", "/blog/forms.py"], "/bloggers/forms.py": ["/blog/models.py"]}
|
6,269
|
pravallika-ganji/bloggy
|
refs/heads/master
|
/blog/admin.py
|
from django.contrib import admin
from .models import Post,Category,Profile,Comment
# Register your models here.
admin.site.register(Post)
admin.site.register(Category)
admin.site.register(Profile)
admin.site.register(Comment)
|
{"/blog/urls.py": ["/blog/views.py"], "/blog/forms.py": ["/blog/models.py"], "/blog/views.py": ["/blog/models.py", "/blog/forms.py"], "/blog/admin.py": ["/blog/models.py"], "/bloggers/urls.py": ["/bloggers/views.py"], "/bloggers/views.py": ["/bloggers/forms.py", "/blog/models.py", "/blog/forms.py"], "/bloggers/forms.py": ["/blog/models.py"]}
|
6,270
|
pravallika-ganji/bloggy
|
refs/heads/master
|
/bloggers/urls.py
|
from bloggers.views import *
from django.urls import path
from django.contrib.auth import views as auth_views
urlpatterns = [
path('register/',UserRegisterView.as_view(),name = "register"),
path('editprofile/',UserEditView.as_view(),name = "editprofile"),
path('pwd/',PasswordsChangeView.as_view(),name="change"),
path('pwdsuccess/',PasswordSuccess,name="pwdsucc"),
path('<int:pk>/profile/',ShowProfileView.as_view(),name="profile"),
path('<int:pk>/editprofile/',EditProfileView.as_view(),name="edituserprofile"),
path('createprofile/',CreateProfilePageView.as_view(),name="createuserprofile"),
path('password-reset/',
auth_views.PasswordResetView.as_view(
template_name='registration/passwordreset.html',
# success_url='/login/'
),
name='password-reset'),
path('password-reset/done/',
auth_views.PasswordResetDoneView.as_view(
template_name='registration/passwordresetdone.html'
),
name='password_reset_done'),
path('password-reset-confirm/<uidb64>/<token>/',
auth_views.PasswordResetConfirmView.as_view(
template_name='registration/passwordresetconfirm.html'
),
name='password_reset_confirm'),
path('password-reset-complete/',
auth_views.PasswordResetCompleteView.as_view(
template_name='registration/passwordresetcomplete.html'
),
name='password_reset_complete'),
]
|
{"/blog/urls.py": ["/blog/views.py"], "/blog/forms.py": ["/blog/models.py"], "/blog/views.py": ["/blog/models.py", "/blog/forms.py"], "/blog/admin.py": ["/blog/models.py"], "/bloggers/urls.py": ["/bloggers/views.py"], "/bloggers/views.py": ["/bloggers/forms.py", "/blog/models.py", "/blog/forms.py"], "/bloggers/forms.py": ["/blog/models.py"]}
|
6,271
|
pravallika-ganji/bloggy
|
refs/heads/master
|
/bloggers/views.py
|
from django.shortcuts import render,HttpResponseRedirect,redirect,get_object_or_404,reverse
from django.views import generic
from django.views.generic import DetailView,UpdateView,CreateView
from django.contrib.auth.forms import UserCreationForm,UserChangeForm
from django.urls import reverse_lazy
from .forms import RegisterForm,EditProfileForm,PasswordChangingForm,ProfilePageForm,EditProfilePageForm
from django.contrib.auth.views import *
from blog.models import Profile
from blog.forms import *
# Create your views here.
class CreateProfilePageView(CreateView):
form_class = ProfilePageForm
template_name = 'registration/createuser.html'
success_url = reverse_lazy('home')
def form_valid(self,form):
form.instance.user = self.request.user
return super().form_valid(form)
class ShowProfileView(DetailView):
model = Profile
template_name= "registration/userprof.html"
def get_context_data(self,*args,**kwargs):
users = Profile.objects.all()
user = self.request.user
p = Post.objects.filter(user=user)
context = super(ShowProfileView,self).get_context_data(*args,**kwargs)
page_user = get_object_or_404(Profile,id=self.kwargs['pk'])
context['page_user'] = page_user
context['posts'] = p
return context
class EditProfileView(UpdateView):
form_class = EditProfilePageForm
template_name = 'registration/edituser.html'
success_url = reverse_lazy('home')
def get_object(self):
return self.request.user.profile
class UserRegisterView(generic.CreateView):
form_class = RegisterForm
template_name = 'registration/register.html'
success_url = reverse_lazy('login')
class UserEditView(generic.UpdateView):
form_class = EditProfileForm
template_name = 'registration/editprofile.html'
success_url = reverse_lazy('home')
def get_object(self):
return self.request.user
class PasswordsChangeView(PasswordChangeView):
form_class = PasswordChangingForm
success_url = reverse_lazy('pwdsucc')
template_name = 'registration/changepwd.html'
def PasswordSuccess(request):
return render(request,'registration/pwdsucc.html',{})
|
{"/blog/urls.py": ["/blog/views.py"], "/blog/forms.py": ["/blog/models.py"], "/blog/views.py": ["/blog/models.py", "/blog/forms.py"], "/blog/admin.py": ["/blog/models.py"], "/bloggers/urls.py": ["/bloggers/views.py"], "/bloggers/views.py": ["/bloggers/forms.py", "/blog/models.py", "/blog/forms.py"], "/bloggers/forms.py": ["/blog/models.py"]}
|
6,272
|
pravallika-ganji/bloggy
|
refs/heads/master
|
/bloggers/forms.py
|
from blog.models import *
from django import forms
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm,PasswordChangeForm,UserChangeForm
class RegisterForm(UserCreationForm):
password1 = forms.CharField(max_length=100,widget=forms.PasswordInput(attrs={'class':'form-control','type':'password'}))
password2 = forms.CharField(max_length=100,widget=forms.PasswordInput(attrs={'class':'form-control','type':'password'}))
class Meta:
model = User
fields = ["username","first_name","last_name","email","password1","password2"]
widgets = {
"username":forms.TextInput(attrs={
"class":"form-control my-2",
"placeholder":"Enter Username",
}),
"first_name":forms.TextInput(attrs={
"class":"form-control my-2",
"placeholder":"Enter firstname",
}),
"last_name":forms.TextInput(attrs={
"class":"form-control my-2",
"placeholder":"Enter lastname",
}),
"email":forms.EmailInput(attrs={
"class":"form-control my-2",
"placeholder":"Enter email address",
}),
}
class EditProfileForm(UserChangeForm):
class Meta:
model = User
fields = ["username","first_name","last_name","email"]
widgets = {
"username":forms.TextInput(attrs={
"class":"form-control my-2",
"placeholder":"Enter Username",
}),
"first_name":forms.TextInput(attrs={
"class":"form-control my-2",
"placeholder":"Enter firstname",
}),
"last_name":forms.TextInput(attrs={
"class":"form-control my-2",
"placeholder":"Enter lastname",
}),
"email":forms.EmailInput(attrs={
"class":"form-control my-2",
"placeholder":"Enter email address",
}),
}
class PasswordChangingForm(PasswordChangeForm):
old_password = forms.CharField(widget=forms.PasswordInput(attrs={'class':'form-control','type':'password'}))
new_password1 = forms.CharField(max_length=100,widget=forms.PasswordInput(attrs={'class':'form-control','type':'password'}))
new_password2 = forms.CharField(max_length=100,widget=forms.PasswordInput(attrs={'class':'form-control','type':'password'}))
class Meta:
model = User
fields = ('old_password','new_password2','new_password1')
class ProfilePageForm(forms.ModelForm):
class Meta:
model = Profile
fields = ('bio','propic','website_url','fb_url','instagram_url','linkedin_url')
widgets = {
"bio":forms.Textarea(attrs={
"class":"form-control my-2",
"placeholder":"Enter bio here",
}),
"website_url":forms.TextInput(attrs={
"class":"form-control my-2",
"placeholder":"Enter bio here",
}),
"fb_url":forms.TextInput(attrs={
"class":"form-control my-2",
"placeholder":"Enter bio here",
}),
"instagram_url":forms.TextInput(attrs={
"class":"form-control my-2",
"placeholder":"Enter bio here",
}),
"linkedin_url":forms.TextInput(attrs={
"class":"form-control my-2",
"placeholder":"Enter bio here",
}),
}
class EditProfilePageForm(forms.ModelForm):
class Meta:
model = Profile
fields = ('bio','propic','website_url','fb_url','instagram_url','linkedin_url')
widgets = {
"bio":forms.Textarea(attrs={
"class":"form-control my-2",
"placeholder":"Enter bio here",
}),
"website_url":forms.TextInput(attrs={
"class":"form-control my-2",
"placeholder":"Enter bio here",
}),
"fb_url":forms.TextInput(attrs={
"class":"form-control my-2",
"placeholder":"Enter bio here",
}),
"instagram_url":forms.TextInput(attrs={
"class":"form-control my-2",
"placeholder":"Enter bio here",
}),
"linkedin_url":forms.TextInput(attrs={
"class":"form-control my-2",
"placeholder":"Enter bio here",
}),
}
|
{"/blog/urls.py": ["/blog/views.py"], "/blog/forms.py": ["/blog/models.py"], "/blog/views.py": ["/blog/models.py", "/blog/forms.py"], "/blog/admin.py": ["/blog/models.py"], "/bloggers/urls.py": ["/bloggers/views.py"], "/bloggers/views.py": ["/bloggers/forms.py", "/blog/models.py", "/blog/forms.py"], "/bloggers/forms.py": ["/blog/models.py"]}
|
6,273
|
pravallika-ganji/bloggy
|
refs/heads/master
|
/bloggers/apps.py
|
from django.apps import AppConfig
class BloggersConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'bloggers'
|
{"/blog/urls.py": ["/blog/views.py"], "/blog/forms.py": ["/blog/models.py"], "/blog/views.py": ["/blog/models.py", "/blog/forms.py"], "/blog/admin.py": ["/blog/models.py"], "/bloggers/urls.py": ["/bloggers/views.py"], "/bloggers/views.py": ["/bloggers/forms.py", "/blog/models.py", "/blog/forms.py"], "/bloggers/forms.py": ["/blog/models.py"]}
|
6,274
|
Lugiax/AutomaticControl
|
refs/heads/master
|
/Subrutinas.py
|
# -*- coding: utf-8 -*-
"""
Created on Thu Jan 14 22:11:43 2016
@author: root
"""
from __future__ import division
import numpy as np
import random as rnd
import matplotlib.pyplot as plt
###############################################################################
def Perturbaciones(rango,dt=0.01,n_perts=50,plot=0,tipo=None,seed=None,sin_pert=False):
rnd.seed(seed);np.random.seed(seed)
'''
Generador aleatorio de funciones tipo perturbaciones en un rango (inicio,fin)
'''
def f_pert(x,mag,tipo,mitad):
if tipo==1:#rampa
return((x-inicio/l_L)*mag)
elif tipo==2:
return(mag)
elif tipo==3:
return (mag*np.exp(-(x-mitad)**2/(2*0.001**2)))
x0,xmax=rango
xmax/=dt
L=(np.random.rand(xmax-x0)*2-1)*.1
l_L=len(L)
perts=list()
for i in range(n_perts):
L_p=np.copy(L)
if sin_pert and rnd.random()>0.8:
perts.append(L_p)
if plot==1:
plt.plot(L_p)
plt.show()
continue
proporcion=rnd.random() #Proporcion de la perturbacion
prop_inicio=rnd.random() #lugar del inicio de la perturbacion
prop_magnitud=rnd.random() #proporcion de la magnitud
if not tipo: tipo=rnd.randint(1,3) #Perturbación tipo:1-rampa, 2-escalon, 3-dirac
if tipo==1: #Rampa
magnitud=rnd.randint(-10,10)*prop_magnitud
elif tipo==2:#Escalon
magnitud=rnd.randint(-5,5)*prop_magnitud
elif tipo==3:#Dirac
magnitud=rnd.randint(-10,10)*prop_magnitud
inicio=int((1-proporcion)*prop_inicio*l_L)
num_datos_modificados=int(l_L*proporcion)
for j in range(num_datos_modificados):
L_p[inicio+j]+=f_pert((inicio+j)/l_L,magnitud,tipo,(num_datos_modificados/2+inicio)/l_L)
L_p[inicio+j+1:]+=L_p[inicio+j]
perts.append(L_p)
if plot==1:
plt.plot(L_p)
plt.show()
return(perts)
###############################################################################
def Perturbar(pert=None,Lvar0=None,inter=(0,1),dt=0.01):
'''
Generador de perturbaciones en un arreglo Lvar0 en un intervalo (inicio,fin)
'''
Lvar=np.copy(Lvar0)
gauss= lambda x:10*np.exp(-(x-0.5)**2/(2*0.05**2))
for i in range(1):
inicio=inter[0]
intervalo=inter[1]-inter[0]
fin=int(inicio+intervalo)
x=0
for j in range(int(inicio/dt),int(fin/dt)):
if pert==0:
Lvar[j]=Lvar[j]+gauss(x*dt)
elif pert==1:
Lvar[j]=Lvar[j]-gauss(x*dt)
elif pert==2:
Lvar[j]=Lvar[j]+(x*dt)*3
elif pert==3:
Lvar[j]=Lvar[j]-(x*dt)*3
elif pert==4:
Lvar[j]=Lvar[j]-(x*dt)*3+gauss(x*dt)
elif pert==5:
Lvar[j]=Lvar[j]+(x*dt)*3-gauss(x*dt)
x+=1
return(Lvar)
###############################################################################
def norm(a,maxmin=None):
arr=np.copy(a)
if not maxmin:
minimo=np.min(arr)
maximo=np.max(arr)
return((arr-minimo)/(maximo-minimo),(maximo,minimo))
else:
maximo=maxmin[0];minimo=maxmin[1]
return((arr-minimo)/(maximo-minimo))
def denorm(a,maxmin):
maximo,minimo=maxmin
arr=np.copy(a)
return(arr*(maximo-minimo)+minimo)
|
{"/columna2.py": ["/numericos.py"], "/Main2.py": ["/columna2.py", "/RN2.py", "/Subrutinas.py"]}
|
6,275
|
Lugiax/AutomaticControl
|
refs/heads/master
|
/Main3.py
|
# -*- coding: utf-8 -*-
"""
Created on Mon Jan 4 11:24:45 2016
Entrenamiento de la red neuronal con los resultados de Main.py
@author: carlos
"""
from __future__ import division
import numpy as np
from RN2 import RedNeuronal
import matplotlib.pyplot as plt
import re
from Subrutinas import norm
with open('resultadosAG.txt','r') as f:
datos=f.read()
datosx=re.findall('\<pert0\>([-\d.,]+)',datos)
datosy=re.findall('\<par\>([-\d.,]+)',datos)
interx=float(re.findall('\<interx\>([-\d.]+)',datos)[0])
intery=float(re.findall('\<intery\>([-\d.]+)',datos)[0])
dt=float(re.findall('\<dt\>([-\d.]+)',datos)[0])
tabla_resultados=list()
npert=len(datosx)
contr_l=[list(),list(),list()]
for i in range(npert):
datox=datosx[i].split(',')
datox=[float(x) for x in datox]
datoy=datosy[i].split(',')
datoy=[float(x) for x in datoy]
contr_l[0].append(datoy[0])
contr_l[1].append(datoy[1])
contr_l[2].append(datoy[2])
if len(datox)==100 and len(datoy)==3:
tabla_resultados.append([datox,datoy])
else:
print 'La perturbacion {} no cumple con las dimensiones-> x:{} ; y:{}'.format(i+1,len(datosx),len(datosy))
plt.figure(figsize=(15,7))
plt.subplot(1,3,1)
plt.plot(contr_l[0])
plt.title('Controlador Proporcional')
plt.subplot(1,3,2)
plt.plot(contr_l[1])
plt.title('Controlador Derivativo')
plt.subplot(1,3,3)
plt.plot(contr_l[2])
plt.title('Controlador Integrativo')
plt.show()
x_ent=list();y_ent=list()
for res in tabla_resultados:
x=list(norm(res[0],(interx,-interx)))
y=list(norm(res[1],(intery,0)))
x_ent.append(list(x))
y_ent.append(list(y))
'''
Entrenamiento de la red neuronal
'''
tipo_ent='EL'
datos=(np.array(x_ent),np.array(y_ent))
print '\nInicio de la red'
estructura=[10,10,5]
red=RedNeuronal(estructura=estructura,deb=True)
pesos=red.Entrenar(datos_ent=datos,tipo_entrenamiento=tipo_ent,max_iter=1000000)
print '\nFinalización de entrenamiento, se comienza a almacenar los pesos'
with open('./pruebasent.txt','w') as f:
info='<entrenamiento><estructura>{}</estructura><interx>{}</interx><intery>{}</intery><dt>{}</dt>\n'.format(','.join([str(k) for k in red.estructura]),interx,intery,dt)
f.write(info)
p=''
for i in pesos:
for k in i:
for l in k:
p=p+str(l)+','
w=p[:-1]+'\n' #Todo junto
f.write('<pesos>'+w+'</pesos>\</entrenamiento>')
print '\nFin de la operación'
|
{"/columna2.py": ["/numericos.py"], "/Main2.py": ["/columna2.py", "/RN2.py", "/Subrutinas.py"]}
|
6,276
|
Lugiax/AutomaticControl
|
refs/heads/master
|
/RN2.py
|
# -*- coding: utf-8 -*-
"""
Created on Wed Dec 23 08:49:02 2015
@author: carlos
Se creará la clase de Red Neuronal
"""
from __future__ import division
import numpy as np
import numpy.random as rnd
from AGmultivar import AG
class RedNeuronal(object):
def __init__(self,estructura,datos_de_entrenamiento=None,neurodos_entrada_salida=None,deb=False,bias=1,seed=None):
## La estructura de la red deberá ser configurada como una tupla con
## los neurodos por capa que se desee que tenga la red, por ejemplo:
## Para una red con 3 capas ocultas y 5 neurodos por capa se deberá
## ingresar una tupla como la siguiente: (5,5,5)
self.estructura=estructura[::]
self.datos_de_entrenamiento=datos_de_entrenamiento
if isinstance(neurodos_entrada_salida,(list,tuple)):
entrada,salida=neurodos_entrada_salida
self.estructura.insert(0,entrada);self.estructura.append(salida)
if isinstance(datos_de_entrenamiento,(list,tuple)):
nx=len(datos_de_entrenamiento[0]);ny=len(datos_de_entrenamiento[1])
nueva_estructura=self.estructura[::]
nueva_estructura.insert(0,nx);nueva_estructura.append(ny)
self.estructura=nueva_estructura
## Variables
self.pesos=None
self.bias=bias
self.deb=deb
self.seed=seed
def FP(self,pesos=None,xi=None,seed=None,estructura=None):
if not estructura:
estructura=self.estructura
if not isinstance(pesos,(np.ndarray,list)):
pesos=self.pesos
##Ya que los pesos y los datos son correctos, se hace la propagación
##hacia adelante
activaciones=[xi]
for capa in range(len(estructura)-1):
x=activaciones[capa]
x=np.append(x,[[self.bias]])
z=np.dot(pesos[capa],x)
activaciones.append(self.sigmoide(z))
return(activaciones)
def Entrenar(self,datos_ent=None,pesos=None,alpha=0.3,max_iter=500000,seed=None,tipo_entrenamiento='EL',parametrosAG=(30,500),pruebasAG=1):
##Parametros AG:(Nind, Ngen) ; agpruebas-> Numero de pruebas para asegurar convergencia
##El tipo de entrenamiento por defecto es Fuera de Linea (FL), también
##puede seleccionarse el tipo En Linea (EL)
if not seed: seed=self.seed
##Se confirma que los datos ingresados sean correctos, además de obtner
##el número de datos de entrada y de salida
if not isinstance(datos_ent,(np.ndarray,list,tuple)):
xi,yi=self.datos_de_entrenamiento
nx,mx=verificarDatosEnt(xi);ny,my=verificarDatosEnt(yi)
else:
xi,yi=datos_ent
nx,mx=verificarDatosEnt(xi);ny,my=verificarDatosEnt(yi)
nueva_estructura=self.estructura[::]
nueva_estructura.insert(0,nx);nueva_estructura.append(ny)
self.estructura=nueva_estructura
if mx!=my:
raise ValueError('Las dimensiones de muestras de x y y no concuerdan')
##Se copia la estructura predeterminada de la red y se añade
##la cantidad de neurodos en la entrada y a la salida
matrices,npesos=dimensionarMatricesDePesos(self.estructura)
if self.deb:print 'Estructura/Matrices:',nueva_estructura,matrices
##Si no se ingresan los pesos, se crean aleatoriamente unos
if not isinstance(pesos,np.ndarray):
pesos=self.AsignarPesos(nueva_estructura,seed)
if self.deb:print 'Pesos asignados'
else:
##Se verifican los pesos para que puedan ser utilizados
verificarPesos(nueva_estructura,pesos)
if self.deb:print 'Pesos verificados'
## Entrenamiento
if self.deb:print '\nInicio de entrenamiento ,tipo: {}'.format(tipo_entrenamiento)
if tipo_entrenamiento=='EL':
'''
Entrenamiento tipo En Linea, por cada muestra se actualizan los pesos
'''
for ent in range(max_iter):
##Seleccion de datos de entrenamiento
num_entrenamiento=rnd.randint(0,mx)
x=xi[num_entrenamiento];y=yi[num_entrenamiento]
##Propagacion hacia adelante
activaciones=self.FP(pesos=pesos,xi=x,seed=seed)
y_red=activaciones[-1]
error=y-y_red
##Se calculan y almacenan las deltas de cada capa
##Para la capa final:
d_final=(np.atleast_2d(error*self.sig_prim(y_red))).T
deltas=[d_final]
##Para las demás capas:
for i in range(len(activaciones)-2,0,-1):
filas=range(len(pesos[i].T)-1)
wi=(pesos[i].T)[filas,:]
act=np.array([activaciones[i]]).T
delta=np.dot(wi,deltas[-1])*self.sig_prim(act)
deltas.append(delta)
##Se invierten para facilidad de uso posterior
deltas.reverse()
##Actualizacion de pesos en linea
for i in range(len(pesos)):
act=np.atleast_2d(np.append(activaciones[i],[self.bias]))
delta_pesos=np.dot(deltas[i],act)
pesos[i]+=alpha*delta_pesos
if ent%int(max_iter*.1)==0 and self.deb: print '\nIteracion:',ent,' error:',.5*np.sum(error)**2,'\n',y_red,'->',y
elif tipo_entrenamiento=='FL':
'''
Entrenamiento fuera de linea, se prueba con todas las muestras y se actualizan después los pesos
'''
for ent in range(max_iter):
deltas_pesos=list()
for m in range(mx):
# print '\nMuestra',m+1,'-'*10
##Seleccion de datos de entrenamiento
x=xi[m];y=yi[m][0]
##Propagacion hacia adelante, se almacenan todas las activaciones
##de los neurodos
activaciones=self.FP(pesos=pesos,xi=x,seed=seed)
y_red=activaciones[-1]
error=y-y_red#-y*np.log(y_red)+(1-y)*np.log(1-y_red)
##Se calculan y almacenan las deltas de cada capa
##Para la capa final:
d_final=(np.atleast_2d(error*self.sig_prim(y_red))).T
deltas=[d_final]
##Para las demás capas:
for i in range(len(activaciones)-2,0,-1):
filas=range(len(pesos[i].T)-1)
wi=(pesos[i].T)[filas,:]
act=np.array([activaciones[i]]).T
delta=np.dot(wi,deltas[-1])*self.sig_prim(act)
deltas.append(delta)
##Se invierten para facilidad de uso posterior
deltas.reverse()
##Actualizacion de pesos con el error total
for i in range(len(pesos)):
# print '\nPesos',i+1
act=np.atleast_2d(np.append(activaciones[i],[self.bias]))
delta_pesos=np.dot(deltas[i],act)
try:
# print 'Anteriores',deltas_pesos[i]
# print 'A añadir',delta_pesos
deltas_pesos[i]+=delta_pesos/mx
#print 'Se suma la matriz de delta_pesos'
except:
# print 'Añadidos',delta_pesos
deltas_pesos.append(delta_pesos/mx)
#print 'Se agrega la matriz de delta_pesos'
for i in range(len(deltas_pesos)):
pesos[i]+=alpha*delta_pesos
if ent%int(max_iter*.1)==0 and self.deb: print '\nIteracion:{:^8} - error:{:^5.4e}'.format(ent,.5*np.sum(error)**2)
elif tipo_entrenamiento=='AG':
'''
Entrenamiento por medio de algoritmo genético!!! :D
'''
if not parametrosAG: ##Revisando los parámetros
raise ValueError('No hay parametros para el Algoritmo genético')
else:
num_individuos,num_generaciones=parametrosAG
ag=AG(deb=self.deb)##Se inicia el AG y se asignan parámetros
ag.parametros(optim=0,Nind=num_individuos,Ngen=num_generaciones,pruebas=pruebasAG)
##Se define la función objetivo, la que deberá optimizarse
def fobj(pesos,est):
error_tot=0
for m in range(mx):#mx):
x=xi[m]
W=redimensionarPesos(pesos,est)
y_red=self.FP(pesos=W,xi=x)[-1]
y=yi[m]
error_par=(y-y_red)**2#-y*np.log(a)-(1-y)*np.log(1-a)
error_tot+=np.array([np.sum(error_par)])
return(error_tot/(m+1))
ag.variables(comun=[npesos,-50,50])
ag.Fobj(fobj,matrices)
Went,error=ag.start()
pesos=redimensionarPesos(Went,matrices)
print 'Error min: {:.4e}'.format(error[0])
elif tipo_entrenamiento=='CM':
'''
Entrenamiento combinado entre Algoritmo Genético y
BackPropagation
'''
if not parametrosAG: ##Revisando los parámetros
raise ValueError('No hay parametros para el Algoritmo genético')
else:
num_individuos,num_generaciones=parametrosAG
ag=AG(deb=self.deb)##Se inicia el AG y se asignan parámetros
ag.parametros(optim=0,Nind=num_individuos,Ngen=num_generaciones,pruebas=pruebasAG)
##Se define la función objetivo, la que deberá optimizarse
def fobj(pesos,est):
error_tot=0
for m in range(mx):#mx):
x=xi[m]
W=redimensionarPesos(pesos,est)
y_red=self.FP(pesos=W,xi=x)[-1]
y=yi[m]
error_par=(y-y_red)**2#-y*np.log(a)-(1-y)*np.log(1-a)
error_tot+=np.array([np.sum(error_par)])
return(error_tot/(m+1))
ag.variables(comun=[npesos,-50,50])
ag.Fobj(fobj,matrices)
Went,error=ag.start()
pesos=redimensionarPesos(Went,matrices)
if self.deb:print 'Error min: {:.4e}\n\nComienza BackProp'.format(error[0])
for ent in range(max_iter):
##Seleccion de datos de entrenamiento
num_entrenamiento=rnd.randint(0,mx)
x=xi[num_entrenamiento];y=yi[num_entrenamiento]
##Propagacion hacia adelante
activaciones=self.FP(pesos=pesos,xi=x,seed=seed)
y_red=activaciones[-1]
error=y-y_red
##Se calculan y almacenan las deltas de cada capa
##Para la capa final:
d_final=(np.atleast_2d(error*self.sig_prim(y_red))).T
deltas=[d_final]
##Para las demás capas:
for i in range(len(activaciones)-2,0,-1):
filas=range(len(pesos[i].T)-1)
wi=(pesos[i].T)[filas,:]
act=np.array([activaciones[i]]).T
delta=np.dot(wi,deltas[-1])*self.sig_prim(act)
deltas.append(delta)
##Se invierten para facilidad de uso posterior
deltas.reverse()
##Actualizacion de pesos en linea
for i in range(len(pesos)):
act=np.atleast_2d(np.append(activaciones[i],[self.bias]))
delta_pesos=np.dot(deltas[i],act)
pesos[i]+=alpha*delta_pesos
if ent%int(max_iter*.1)==0 and self.deb: print '\nIteracion:',ent,' error:',.5*np.sum(error)**2,'\n',y_red,'->',y
if self.deb:print 'Fin del entrenamiento'
self.pesos=pesos
return(pesos)
def AsignarPesos(self,estructura,seed=None):
rnd.seed(seed)
##Si no hay pesos asignados se crea la matriz de pesos aleatorios
est,num_total_pesos=dimensionarMatricesDePesos(estructura)
pesos_sin_formato=2*rnd.rand(num_total_pesos)-1
pesos=redimensionarPesos(pesos_sin_formato,est)
return(pesos)
def sigmoide(self,a):
return(1.0/(1.0+np.exp(-a)))
def sig_prim(self,a):
return(a*(1.-a))
'''
-------------------------------------------------------------------------------
'''
def verificarDatosEnt(datos):
l=None
m=0
for i in datos:
li=len(i)
if not l:
l=li
else:
if li!=l:
raise ValueError('La configuración de los datos no es la misma')
break
m+=1
return(l,m)
def verificarPesos(est,pesos):
matriz=dimensionarMatricesDePesos(est)[0]
for i in range(len(pesos)):
if matriz[i]!=pesos[i].shape:
raise ValueError('La matriz de pesos no concuerda don la estructura')
def dimensionarMatricesDePesos(arq):
matrices=[]
nPesos=0
for i in range(1,len(arq)):
##Se le suma el uno para contar el neurodo bias
matrices.append((arq[i],arq [i-1]+1))
nPesos+=arq[i]*(arq[i-1]+1)
return(matrices,nPesos)
def redimensionarPesos(pesos,estructura):
W=list()
lant=0
cont=0
for i in estructura:
l=i[0]*i[1]
W.append(np.reshape(pesos[lant:lant+l],i))
lant+=l;cont+=1
return(W)
'''
-------------------------------------------------------------------------------
'''
if __name__=='__main__':
est=[2,2]#real es [2,2,2,1]
nn=RedNeuronal(est,deb=True)
xi=np.array([[0,0],
[1,0],
[0,1],
[1,1]])
yi=[[0],[1],[1],[0]]
yi=np.array(yi)
# xi=np.array([[.05,.1]])
# yi=np.array([[.01,.99]])
nuevos_pesos=nn.Entrenar(datos_ent=(xi,yi),tipo_entrenamiento='CM',parametrosAG=(20,60),max_iter=3000)
res=nn.FP(xi=[xi[0]],pesos=nuevos_pesos)
print '\nPredicciones'
print xi[0],res[-1]
res=nn.FP(xi=[xi[1]],pesos=nuevos_pesos)
print xi[1],res[-1]
res=nn.FP(xi=[xi[2]],pesos=nuevos_pesos)
print xi[2],res[-1]
res=nn.FP(xi=[xi[3]],pesos=nuevos_pesos)
print xi[3],res[-1]
|
{"/columna2.py": ["/numericos.py"], "/Main2.py": ["/columna2.py", "/RN2.py", "/Subrutinas.py"]}
|
6,277
|
Lugiax/AutomaticControl
|
refs/heads/master
|
/columna2.py
|
# -*- coding: utf-8 -*-
"""
Created on Thu Oct 15 08:53:04 2015
@author: carlos
Modulo que contiene las diferentes clases que componen a una
columna de destilación
"""
from numericos import newton
import numpy as np
import matplotlib.pyplot as plt
'''
Rehervidor
Su única alimentación es el líquido que baja del primer plato.
Se añade además una cantidad de calor y se extraen dos corrientes:
una de fondos y otra de vapor.
La temperatura de referencia es 273.15K=0ºC
'''
class reboiler():
def __init__(self,deb=False):
self.deb=deb
'''Entradas'''
self.L=0;self.xl=0;self.hl=0;self.Pt=0
'''Salidas'''
self.V=0;self.H=0
self.B=0;self.x=0;self.h=0
self.M=0;self.T=0
'''Propiedades de mezcla y substancias'''
self.lamvapsubs=(854,2260)
self.cpsubs=(2.42,4.18)
self.tono=((8.20417,1642.89,230.3),(8.0713,1730.63,233.426))
self.alpha=2.54
'''Constantes de Control'''
self.kcb=0.7;self.tdb=0.9;self.Bref=9 ##Para fondos
self.kcq=0.5;self.tdq=0.5 ##Para reboiler
self.Mref=30##Para la masa del interior del reboiler
'''Ecuaciones diferenciales'''
self.dhdt=lambda M,B,h,Q,x:(self.L*self.hl - (self.Qvap*Q/self.lamvap_f(x)*(h+self.lamvap_f(x))+B*h) + Q - h * (self.L-(Q/self.lamvap_f(x)+B)))/M
self.dMdt=lambda B,Q,x:self.L-(self.Qvap*Q/self.lamvap_f(x)+B)
self.dQdt=lambda M,B,Q,x:-self.kcq*self.lamvap_f(x)*(self.Mref-M-self.tdq*(self.L-(self.Qvap*Q/self.lamvap_f(x)+B)))
self.dBdt=lambda B:self.kcb*(self.Bref-B)/(1+self.tdb*self.kcb)
self.dxdt=lambda M,B,Q,x:(self.L*self.xl-(Q/self.lamvap_f(x)*self.equil(x)+B*x)-x*(self.L-(self.Qvap*Q/self.lamvap_f(x)+B)))/M
'''Ecuaciones no diferenciales'''
self.lamvap_f=lambda x:(self.lamvapsubs[0]-self.lamvapsubs[1])*x+self.lamvapsubs[1]
self.cp=lambda x:(self.cpsubs[0]-self.cpsubs[1])*x+self.cpsubs[1]
self.equil=lambda x:self.alpha*x/(1+(self.alpha-1)*x)
def Teb(self):
def tonof(indice,T):
A,B,C=self.tono[indice]
return(10**(A-B/(C+T)))
def fobj(T):
Pa=tonof(0,T)
Pb=tonof(1,T)
x= (self.Pt-Pb)/(Pa-Pb)-self.x
return(x)
def teb(T):
return(tonof(0,T)-self.Pt)
return(newton(fobj,newton(teb,0)))
def condini(self):
## Condiciones iniciales:
self.L=20;self.xl=0.8
self.M=30.;self.Q=1.1e4;self.B=9
## Estado Estacionario: self.M=30;self.Q=1.08e4;self.B=9;self.V=11
self.x=self.xl;self.y=self.equil(self.x);lamvap=self.lamvap_f(self.x)
self.hl=220.;self.h=self.hl;self.H=self.h+lamvap
self.T=self.h/self.cp(self.x);self.V=self.Q/self.lamvap_f(self.x)
self.Pt=760.##mmHg
def actualizar(self,t,paso=0.1):
Teb=self.Teb()
M=self.M;B=self.B;h=self.h;x=self.x;Q=self.Q
if self.T>=Teb:
self.Qvap=1
else:
self.Qvap=0
k11=self.dhdt(M,B,h,Q,x)
k12=self.dMdt(B,Q,x)
k13=self.dQdt(M,B,Q,x)
k14=self.dBdt(B)
k15=self.dxdt(M,B,Q,x)
k21=self.dhdt(M+paso*k12/2,B+paso*k14/2,h+paso*k11/2,Q+paso*k13/2,x+paso*k15/2)
k22=self.dMdt(B+paso*k14/2,Q+paso*k13/2,x+paso*k15/2)
k23=self.dQdt(M+paso*k12/2,B+paso*k14/2,Q+paso*k13/2,x+paso*k15/2)
k24=self.dBdt(B+paso*k14/2)
k25=self.dxdt(M+paso*k12/2,B+paso*k14/2,Q+paso*k13/2,x+paso*k15/2)
k31=self.dhdt(M+paso*k22/2,B+paso*k24/2,h+paso*k21/2,Q+paso*k23/2,x+paso*k25/2)
k32=self.dMdt(B+paso*k24/2,Q+paso*k23/2,x+paso*k25/2)
k33=self.dQdt(M+paso*k22/2,B+paso*k24/2,Q+paso*k23/2,x+paso*k25/2)
k34=self.dBdt(B+paso*k24/2)
k35=self.dxdt(M+paso*k22/2,B+paso*k24/2,Q+paso*k23/2,x+paso*k25/2)
k41=self.dhdt(M+paso*k32,B+paso*k34,h+paso*k31,Q+paso*k33,x+paso*k35)
k42=self.dMdt(B+paso*k34,Q+paso*k33,x+paso*k35/2)
k43=self.dQdt(M+paso*k32,B+paso*k34,Q+paso*k33,x+paso*k35)
k44=self.dBdt(B+paso*k34)
k45=self.dxdt(M+paso*k32,B+paso*k34,Q+paso*k33,x+paso*k35)
if M>0:
dx=paso*(k15+2*k25+2*k35+k45)/6
dM=paso*(k12+2*k22+2*k32+k42)/6
else:
dx=0;dM=0
self.x+=dx
self.M+=dM
self.Q+=paso*(k13+2*k23+2*k33+k43)/6
self.h+=paso*(k11+2*k21+2*k31+k41)/6
self.B+=paso*(k14+2*k24+2*k34+k44)/6
if self.Q<0:
self.Q=0
self.V=self.Qvap*self.Q/self.lamvap_f(self.x)
self.T=self.h/self.cp(self.x)
def simular(self,controladores,Lvar,dt=0.01,plot=0):
self.kcb,self.tdb,self.kcq,self.tdq=controladores
self.condini()
t=0
Ml,Bl,Vl,tl,Tl,xli=[self.M],[self.B],[self.V],[t],[self.T],[self.x]
for i in range(len(Lvar)):
t+=dt
self.L=float(Lvar[i])
self.actualizar(t,dt)
Ml.append(self.M),Bl.append(self.B),Vl.append(self.V),Tl.append(self.T),tl.append(t),xli.append(self.x)
if plot==1:
print('Graficando la simulacion')
plt.figure(figsize=(16,10))
plt.subplot(2,2,1);plt.grid(True)
plt.plot(tl,Ml,'b.',label='Acumulacion')
plt.plot(tl,Bl,'g.',label='Fondos')
plt.plot(tl,Vl,'r.',label='Vapor')
plt.xlabel('tiempo');plt.ylabel('kg/min')
plt.legend(loc=4)
plt.subplot(2,2,2);plt.grid(True)
plt.plot(tl,Tl,'b.')
plt.xlabel('tiempo');plt.ylabel('Temperatura')
plt.subplot(2,2,3);plt.grid(True)
plt.title('Perturbacion')
plt.plot(Lvar)
plt.xlabel('tiempo');plt.ylabel('Flujo de entrada')
plt.subplot(2,2,4);plt.grid(True)
plt.title('FraccionMolar')
plt.plot(tl,xli,'b.')
plt.xlabel('tiempo');plt.ylabel('X')
plt.show()
return(np.trapz(np.abs(np.array(Ml[-25:])-20)))
if __name__=='__main__':
"""
Cambia de Script
"""
|
{"/columna2.py": ["/numericos.py"], "/Main2.py": ["/columna2.py", "/RN2.py", "/Subrutinas.py"]}
|
6,278
|
Lugiax/AutomaticControl
|
refs/heads/master
|
/Main.py
|
# -*- coding: utf-8 -*-
"""
Created on Mon Dec 7 10:13:00 2015
@author: carlosaranda
Genera datos para el entrenamiento de la red neuronal
"""
from __future__ import division
from ModeloReboiler import Reboiler
from AGmultivar import AG
import numpy as np
import time
from Subrutinas import Perturbaciones
import matplotlib.pyplot as plt
dt=0.01
numero_de_perturbaciones=100
Lvar0=np.random.random(int(1/dt))*2-1+20
set_de_perturbaciones=Perturbaciones((0,1),dt=dt,
n_perts=numero_de_perturbaciones,
plot=0,sin_pert=True)
print 'Generación de perturbaciones exitosa'
interx=10 #magnitud de las perturbaciones
intery=3 #valor máximo que pueden tomar los valores de los controladores
AGS=AG(deb=True)
AGnInd=30;AGnGen=100; cores=6
AGS.parametros(optim=0,Nind=AGnInd,Ngen=AGnGen,cores=cores)
AGS.variables(comun=[3,0,intery])## 3 variables para los controladores, tomando
## valores mínimos de 0 y máximos de 5
#tabla_resultados=list()
archivo='resultadosAG.txt' ##Nombre del archivo en donde se almacenarán los datos
with open(archivo,'w') as f:
f.write('<prueba>\n<parametros> <nind>{}</nind> <ngen>{}</ngen> \
<interx>{}</interx> <intery>{}</intery> <dt>{}</dt> </parametros> \
Ahora se suma tambien los parametros a la funcion a minimizar\n'\
.format(AGnInd,AGnGen,interx,intery,dt))
print 'Inicio de pruebas'
for prueba in range(numero_de_perturbaciones):
perturbacion=set_de_perturbaciones[prueba]
t1=time.time()
print '--------------------------------\nPerturbación:',prueba+1
Lvar=perturbacion+Lvar0
# plt.plot(Lvar)
# plt.show()
AGS.Fobj(Reboiler,Lvar)
print 'Inicio correcto del AG'
resultados=AGS.start()
t2=time.time()
#Reboiler(resultados[0],Lvar,plot=1)
with open(archivo,'a') as f:
res0str=[str(x) for x in Lvar]
res1str=[str(x) for x in perturbacion]
res2str=[str(x) for x in resultados[0]]
w='<pert>'+','.join(res0str)+'</pert> <pert0>'+','.join(res1str)+\
'</pert0> <par>'+','.join(res2str)+'</par>\n'
f.write(w)
#tabla_resultados.append([Lvar,set_de_perturbaciones[prueba],resultados[0]])
print 'Tiempo por prueba:',(t2-t1)/60,' error:',resultados[1]
with open(archivo,'a') as f:
f.write('</prueba>')
|
{"/columna2.py": ["/numericos.py"], "/Main2.py": ["/columna2.py", "/RN2.py", "/Subrutinas.py"]}
|
6,279
|
Lugiax/AutomaticControl
|
refs/heads/master
|
/RN.py
|
# -*- coding: utf-8 -*-
"""
Created on Fri Oct 16 18:40:50 2015
@author: carlos
"""
import numpy as np
class RN(object):
def __init__(self,deb=False):
self.deb=deb
## Parámetros de red:
self.bias=-1
##Matrices extra
# self.xelim=[]
# self.xenorm=[]
# self.yelim=[]
# self.yenorm=[]
self.nxe=None;self.nye=None
self.sig= lambda mat: 1/(1+np.exp(-mat))
def parametros(self,arq=None,W=None,arq_io=None):
self.arq=arq
self.W=W
if arq_io:
self.arq.insert(0,arq_io[0])
self.arq.append(arq_io[1])
self.N=len(self.arq)
def datosEnt(self,datos):
self.xe=datos[0];self.ye=datos[1]
#Se verifica que el numero de variables en cada conjunto de datos sea el mismo y se almacena
self.nxe,mx=verificarDatosEnt(self.xe);self.nye,my=verificarDatosEnt(self.ye)
if mx!=my:
raise ValueError('Diferente numero de muestras en x-y')
self.m=mx#Se almacena el número de muestras de los datos
# almacenar(self.xe,self.xenorm,self.xelim)
# almacenar(self.ye,self.yenorm,self.yelim)
#Se almacena el número de variables por observación
# self.nxe=len(self.xe[0])
# self.nye=len(self.ye[0])
if self.deb:print('No. DatEnt:',self.xe,'-',self.ye)
self.arq.insert(0,self.nxe)
self.arq.append(self.nye)
self.N=len(self.arq)
def generarPesos(self,seed=None):
## Generación de los pesos:
np.random.seed(seed)
self.W=dict()
for capa in range(1,self.N):
nxcapa=self.arq[capa]
nxcapa_anterior=self.arq[capa-1]
#print nxcapa, nxcapa_anterior
self.W[str(capa)]=np.random.random([nxcapa,nxcapa_anterior+1])
#print w[str(capa)]
def FP(self,W=None,xi=None):
Ys=list()#Lista para almacenar las salidas de cada capa
yi=list()
self.error=0
if not W:
W=self.W
if not isinstance(xi,np.ndarray):
xi=self.xe
## Propagación hacia adelante
for m in range(len(xi)): #Por cada serie de datos de entrenamiento
if self.deb:print('\nMuestra%d'%(m+1))
Ys.append(list())#Se agrega una nueva lista donde almacenar las salidas
for i in range(1,self.N):#Por cada capa posterior a la primera
if self.deb:print('\nFP:',i)
if i==1:#si es la segunda capa se reciben los datos de entrada en xe
x=xi[m]
# x=list()#Se crea la lista de datos de entrada
# for j in range(self.nxe):
# x.append(self.xe[m][j])
else:
x=Ys[m][i-2]
x=np.append(x,[self.sig(self.bias)])
v=np.dot(W[str(i)],np.array(x))
y=self.sig(v)[0]
Ys[m].append(y)
if self.deb:print('x{}{}: {}'.format(m,i,x))
if self.deb:print('Pesos\n',W[str(i)])
if self.deb:print('y{}{}: {}'.format(m,i,Ys[m][i-1]))
if self.nye:
for y in range(self.nye):
yred=Ys[m][-1][y]
ye=self.ye[m][y]
er=0.5*(yred-ye)**2
self.error+=er
if self.deb:print('Error{}-> red:{:.4f} - ent:{:.4f} - error:{:.4f}'.format(y,yred,ye,er))
yi.append(Ys[m][-1])
if self.deb:print('Error Tot',self.error)
return(yi,self.error)
#def almacenar(datos,norm,lim,cont=0):
# print('Datos',datos,type(datos[0]))
# if isinstance(datos[cont],(np.float32,np.float64,np.int32,np.int64,)):
# lim.append([min(datos),max(datos)])
# norm.append((datos-lim[cont][0])/(lim[cont][1]-lim[cont][0]))
# elif isinstance(datos[0],np.ndarray):
# for i in range(len(datos)):
# almacenar(datos[i],norm,lim,cont=i)
def verificarDatosEnt(datos):
l=None
m=0
for i in datos:
li=len(i)
if not l:
l=li
else:
if li!=l:
raise ValueError('La configuración de los datos no es la misma')
break
m+=1
return(l,m)
def dimensionarMatricesPesos(arq):
matrices=[]
nPesos=0
for i in range(1,len(arq)):
matrices.append((arq[i],arq [i-1]+1))
nPesos+=arq[i]*(arq[i-1]+1)
return(matrices,nPesos)
def redimensionarPesos(pesos,estructura):
W=dict()
lant=0
cont=1
for i in estructura:
W[str(cont)]=list()
l=i[0]*i[1]
W[str(cont)].append(np.reshape(pesos[lant:lant+l],i))
lant+=l;cont+=1
return(W)
if __name__=='__main__':
print('Hola :D')
xe=np.array([[0.1],
[0.9]])
ye=np.array([[0.05],[0.1]])
red=RN(True)
red.parametros(arq=[2,2])
red.datosEnt((xe,ye))
red.generarPesos(1)
print('Arquitectura:',red.arq)
y,err=red.FP()
print(err)
|
{"/columna2.py": ["/numericos.py"], "/Main2.py": ["/columna2.py", "/RN2.py", "/Subrutinas.py"]}
|
6,280
|
Lugiax/AutomaticControl
|
refs/heads/master
|
/ModeloReboiler.py
|
# -*- coding: utf-8 -*-
"""
Created on Wed Feb 3 21:08:01 2016
@author: root
"""
from __future__ import division
import numpy
import numericos
import matplotlib
#import matplotlib.pyplot as plt
def Reboiler(Controladores,Lvar,
mezcla=('C8','C10'),dt=0.01,plot=0,delay=0):
##Base de datos de los compuestos a utilizar. datos obtenidos de:
## Perry R. (2010) "Manual del Ingeniero Quimico" 7Ed. McGraw-Hill:España
## Datos de Alcanos y Agua
bd_subs={
'C8':{'Pvap':(96.084,-7900.2,-11.003,7.1802e-6,2), #Perry 2-55
'Dens':(0.53731,0.26115,568.7,0.28034), #Perry 2-101
'Lamvap':(5.518e7,0.38467,0,0), #Perry 2-163
'Cp':(2.2483e5,-1.8663e2,9.5891e-1,0,0), #Perry 2-177
'Tc':568.7,#K #Perry 2-55
'MM':114.231 ##kg/kmol
},
'C9':{'Pvap':(109.35,-9030.4,-12.882,7.8544e-6,2),
'Dens':(0.48387,0.26147,594.6,0.28281),
'Lamvap':(6.037e7,0.38522,0,0),
'Cp':(3.8308e5,-1.1398e3,2.7101,0,0),
'Tc':594.6,#K
'MM':128.258 ##kg/kmol
},
'C10':{'Pvap':(112.73,-9749.6,-13.245,7.1266e-6,2),
'Dens':(0.42831,0.25795,617.7,0.28912),
'Lamvap':(6.6126e7,0.39797,0,0),
'Cp':(2.7862e5,-1.9791e2,1.0737,0,0),
'Tc':617.7,#K
'MM':142.285 ##kg/kmol
},
'H2O':{'Pvap':(73.649,-7258.2,-7.3037,4.1653e-6,2),
'Lamvap':(5.2053e7,0.3199,-0.212,0.25795),
'Cp':(2.7637e5,-2.0901e3,8.125,-1.4116e-2,9.3701e-6),
'Tc':647.13,#K
'MM':18.015 ##kg/kmol
},
'EtOH':{'Pvap':(74.475,-7164.3,-7.327,3.134e-6,2),
'Lamvap':(5.69e7,0.3359,0,0),
'Cp':(1.0264e5,-1.3963e2,-3.0341e-2,2.0386e-3,0),
'Tc':513.92,#K
'MM':46.069 ##kg/kmol
}
}
## La temperatura esta dada en Kelvin
def Densidad(substancia,T):## Función para calcular la densidad
C1,C2,C3,C4=bd_subs[substancia]['Dens']
return C1/C2**(1+(1-T/C3)**C4) ## kmol/m3
def Cp(substancia,T):## Función para calcular el Cp
C1,C2,C3,C4,C5=bd_subs[substancia]['Cp']
return C1+C2*T+C3*T**2+C4*T**3+C5*T**4 ## kJ/(kmol K)
def Lamvap(substancia,T):## Función para calcular el calor de vaporización
C1,C2,C3,C4=bd_subs[substancia]['Lamvap']
Tc=bd_subs[substancia]['Tc']; Tr=T/Tc
return (C1*(1-Tr)**(C2+C3*Tr+C4*Tr**2))#/1000 ## kJ/kmol
def Entalpia(substancia,T): ## Funcion para calcular la entalpia
C1,C2,C3,C4,C5=bd_subs[substancia]['Cp']
return C1*T+C2*T**2/2.+C3*T**3/3.+C4*T**4/4.+C5*T**5/5. ## kJ/kmol
def Pvap(substancia,T): ## Funcion para calcular la presión de vapor
C1,C2,C3,C4,C5=bd_subs[substancia]['Pvap']
return numpy.exp(C1+C2/T+C3*numpy.log(T)+C4*T**C5) ## Pa
def F_alpha(mezcla,T): ## Calculo de la volatilidad relativa
Pvap1=Pvap(mezcla[0],T);Pvap2=Pvap(mezcla[1],T)
return Pvap1/Pvap2
def Ponderacion(Prop1,Prop2,x1): ## Pondera para obtener propiedades de mezcla
return (Prop1-Prop2)*x1+Prop2
## Funcionas propias para la destilación
def TempEbullicion(subs,x1=1,T0=300.,Pt=101325.): ##La presion debe ser dada en Pa
def Funcion_objetivo(T):## Funcion objetivo, será pasada como argumento al
## buscador de ceros
if isinstance(subs, tuple): ##Si hay 2 compuestos
Pvap1=Pvap(subs[0],T)
Pvap2=Pvap(subs[1],T)
return (Pt-Pvap2)/(Pvap1-Pvap2)-x1 ## Condicion de equilibrio
else: ## Si solo hay un compuesto
return Pvap(subs,T)-Pt##Condicion de equilibrio para un compuesto
## Uso del metodo numerico de la Falsa Posicion, se usa este ya que falla
## menos que el Newton-Raphson al encontrar la raíz y se especifica desde
## un inicio el intervalo en el que se está operando-> T0,T0+100K
if isinstance(subs,tuple):
## Se comienza con una aproximacion de la temperatura de ebullicion
## del componente más volátil
T0_metodo=TempEbullicion(subs[0],T0=T0,Pt=Pt)
return numericos.FalsaPosicion(Funcion_objetivo,T0_metodo,T0_metodo+100)
else:
return numericos.FalsaPosicion(Funcion_objetivo,T0,T0+100)
def Equilibrio(x,alpha): ##Equilibrio líquido-vapor de mezcla
return alpha*x/(1+(alpha-1)*x)
###############################################################################
def Er_rel(a,b): ##Función de error relativo
return(2*abs(a-b)/(abs(a)+abs(b)))
def RK4(vals,paso):
M0,x0,h0,Q0,intM0=vals
#Calculo con Euler
M_E=M0+paso*dMdt(*vals)
x_E=x0+paso*dxdt(*vals)
h_E=h0+paso*dhdt(*vals)
Q_E=Q0+paso*dQdt(*vals)
#Cálculo de todas las k1
k11=dMdt(*vals);k12=dxdt(*vals);k13=dhdt(*vals);k14=dQdt(*vals)
#Se calculan los valores de los trapecios
M_1_2=(Mref-M0)+paso*k11/2.;intM1=(M0+M_1_2)*paso/4.
#Cálculo de todas las k2
vals1=(M0+paso*k11/2.,x0+paso*k12/2.,h0+paso*k13/2.,Q0+paso*k14/2.,intM1)
k21=dMdt(*vals1);k22=dxdt(*vals1);k23=dhdt(*vals1);k24=dQdt(*vals1)
#Cálculo de todas las k3
vals2=M0+paso*k21/2.,x0+paso*k22/2.,h0+paso*k23/2.,Q0+paso*k24/2.,intM1
k31=dMdt(*vals2);k32=dxdt(*vals2);k33=dhdt(*vals2);k34=dQdt(*vals2)
#Se calculan los valores de los trapecios de la segunda parte
M1=M_1_2+paso*k11/2.;intM2=(M1+M_1_2)*paso/4.;n_intM=intM1+intM2
#Cálculo de todas las k4
vals3=M0+paso*k31/2.,x0+paso*k32/2,h0+paso*k33/2.,Q0+paso*k34/2.,n_intM
k41=dMdt(*vals3);k42=dxdt(*vals3);k43=dhdt(*vals3);k44=dQdt(*vals3)
dM=paso*(k11+2*k21+2*k31+k41)/6.
dx=paso*(k12+2*k22+2*k32+k42)/6.
dh=paso*(k13+2*k23+2*k33+k43)/6.
dQ=paso*(k14+2*k24+2*k34+k44)/6.
M_RK4=M0+dM;x_RK4=x0+dx;h_RK4=h0+dh;Q_RK4=Q0+dQ
errores=(Er_rel(h_E,h_RK4),
Er_rel(M_E,M_RK4),
Er_rel(Q_E,Q_RK4),
Er_rel(x_E,x_RK4))
error_max=max(errores)
#print 'Errores',errores
return(error_max,M_RK4,x_RK4,h_RK4,Q_RK4,n_intM)
def Recur(vals,paso,recursion=0,tol=.1):
if recursion>=50: ## muúmero máximo de recursiones
return(None)
if recursion: ## Si ya es una recursion se repite 2 veces
repeticiones=2
else: ## Sino sólo se repite 1
repeticiones=1
for dummy_i in range(repeticiones):
resRK4=RK4(vals,paso)
if resRK4[0]<=tol: ## Si el error es menor a la tolerancia
vals=resRK4[1:] ## Actualización de los valores
else: ## Sino se hace una recursion partiendo a la mitad el paso
vals=Recur(vals,paso/2,recursion+1,tol)
#print resRK4[0]
return(vals)
###############################################################################
## Ecuaciones diferenciales para modelado del sistema
def dMdt(M,x,h,Q,intM):
return L-(Evap*Q/LamvapMezcla+B)
def dxdt(M,x,h,Q,intM):
return (L*(x_L-x)+Evap*Q/LamvapMezcla*(x-y))/M
def dhdt(M,x,h,Q,intM):
return (L*(h_L-h)+Evap*Q/LamvapMezcla*(h-H)+Q)/M
def dQdt(M,x,h,Q,intM):
return -LamvapH2O*kc*(Mref-M\
-tau_D*(L-(Evap*Q/LamvapMezcla+B))\
+tau_I*intM)
###############################################################################
#######################INICIO DE LA SIMULACION#################################
###############################################################################
substancia1=mezcla[0]
substancia2=mezcla[1]
Pt=101325 # Pa
## Condiciones iniciales:
x_L=0.6 #kmol/min
T_L=412 # K
## Si se considera que la capacidad calorifica permanece constante, entonces
## podemos tomar el valor C1 de las constantes para el cálculo del Cp
Cp1=bd_subs[substancia1]['Cp'][0];Cp2=bd_subs[substancia2]['Cp'][0]
Cp_L=Ponderacion(Cp1,Cp2,x_L)
## De esta manera entonces podemos calcular la entalpia
h_L=Cp_L*T_L
#print h_L
'''
Método anterior para calcular entalpías y Cps...
h_L1=Entalpia(substancia1,T_L);h_L2=Entalpia(substancia2,T_L)
h_L=Ponderacion(h_L1,h_L2,x_L) #kJ/kmol
print h_L
Cp1=Cp(substancia1,T_L);Cp2=Cp(substancia2,T_L);CpMez=Ponderacion(Cp1,Cp2,x_L)
CpMez=h_L/T_L
'''
## Datos iniciales para el vapor de calentamiento
Q=4.5e8;LamvapH2O=Lamvap('H2O',TempEbullicion('H2O',Pt=2*Pt)) # kJ/kmol
Teb=TempEbullicion(mezcla,x_L,Pt=Pt) ## Temperatura de ebullicion de la mezcla
M=30;B=10 #kmol/min
x=0.445
T=Teb## Para que la mezcla se encuentre en su punto de ebullicion
## Si se considera que la capacidad calorifica permanece constante, entonces
## podemos tomar el valor C1 de las constantes para el cálculo del Cp
CpMez=Ponderacion(Cp1,Cp2,x)
h=CpMez*T
y=Equilibrio(x,F_alpha(mezcla,T))
## Cálculo de la entalpia de vaporizacion
LamvapM1=Lamvap(substancia1,T);LamvapM2=Lamvap(substancia2,T)
LamvapMezcla=Ponderacion(LamvapM1,LamvapM2,x) # kJ/kmol
#print '{:.4e} - {:.4e} - T:{}'.format(h_L1,h_L2,h_L/CpMez)
#print 'Teb Volatil:',TempEbullicion(substancia1,Pt=Pt),' Teb Mezcla',Teb
#print LamvapMezcla,LamvapH2O,h
## Cálculo de la entalpia del vapor. Se le suma la entalpia de vaporizacion
## a la entalpia del líquido
H=h+LamvapMezcla # kJ/kmol
Evap=1 # Bandera que indica si hay evaporación
intM=0 # Valor de la integral de (Mref-M)
## Constantes de control
kc,tau_D,tau_I=1,.5,.5 ; parametros_iniciales=True
Mref=30
## Desarrollo de la simulación
t=0
t_l,M_l,B_l,V_l,x_l,T_l,Teb_l,Q_l,LVM_l,y_l=[t],[M],[B],[Q/LamvapMezcla],[x],[T],[Teb],[Q],[LamvapMezcla],[y]
#T_p=np.linspace(500,600,100)
#plt.plot(T_p,Lamvap(substancia1,T_p))
#plt.show()
#print '{:^5}|{:^8}|{:^8}|{:^8}|{:^8}|{:^8}|{:^8}'.format('t','M','h','Q','V','T','LVM')
for i in range(len(Lvar)):
if i>len(Lvar)*delay and parametros_iniciales:
kc,tau_D,tau_I=Controladores; parametros_iniciales=False
L=Lvar[i]
if T>Teb:
Evap=1
else:
Evap=0
# Cp1=Cp(substancia1,T);Cp2=Cp(substancia2,T);CpMez=Ponderacion(Cp1,Cp2,x)
# print 'CpMez 1:{} ; 2:{}'.format(Cp1,Cp2)
# print 'LamvapMez 1:{} ; 2:{}'.format(LamvapM1,LamvapM2)
# print 'Temperatura:',T
#if i%int(len(Lvar)*.1)==0:print '{:^5}|{:^8.2f}|{:^6.2e}|{:^6.2e}|{:^6.2e}|{:^8.2f}|{:^6.2e}'.format(t,M,h,Q,Evap*Q/LamvapMezcla,T,LamvapMezcla)
M,x,h,Q,intM=Recur((M,x,h,Q,intM),paso=dt,tol=0.001)
if M>=50:
M=50
elif M<=0:
M=0.0001
if Q<=0:#Seguro por si Q es negativo
Q=0
elif Q/LamvapMezcla>=20:#Y por si el flujo de vapor pasa del máximo
Q=20*LamvapMezcla #No podrá ingresar mayor cantidad de calor al sistema
t+=dt
t_l.append(t);M_l.append(M);B_l.append(B);x_l.append(x)
V_l.append(Evap*Q/LamvapMezcla);Teb_l.append(Teb);Q_l.append(Q)
LVM_l.append(LamvapMezcla);y_l.append(y)
if T==0:
T_l.append(T_l[-1])
else:
T_l.append(T)
## Actualizacion de los valores iniciales
CpMez=Ponderacion(Cp1,Cp2,x)
T=h/CpMez
#if i%int(tf/dt*.1)==0:print (L*(h_L-h)+Evap*Q/LamvapMezcla*(h-H)+Q)/M
Teb=TempEbullicion(mezcla,x_L,Pt=Pt)
y=Equilibrio(x,F_alpha(mezcla,T))
# LamvapM1=Lamvap(substancia1,T);LamvapM2=Lamvap(substancia2,T)
# LamvapMezcla=Ponderacion(LamvapM1,LamvapM2,x)
H=h+LamvapMezcla
if plot:
plt=matplotlib.pyplot
plt.figure(figsize=(100,100))
plt.subplot(2,2,1);plt.grid();plt.title('Flujos Molares')
plt.plot(t_l,M_l,t_l,B_l,t_l,V_l,t_l[:-1],Lvar)#,t_l,numpy.array(V_l)+B_l)
plt.subplot(2,2,2);plt.grid();plt.title('Fraccion molar')
plt.plot(t_l,x_l,t_l,y_l)
plt.subplot(2,2,3);plt.grid();plt.title('Temperaturas')
plt.plot(t_l,T_l,t_l,Teb_l)
plt.subplot(2,2,4);plt.grid();plt.title('Calores')
plt.plot(t_l,Q_l,t_l,LVM_l)
plt.show()
return numpy.trapz(numpy.abs(numpy.array(M_l)-Mref))+\
numpy.log10(numpy.sum(numpy.abs(numpy.diff(M_l))+\
numpy.abs(numpy.diff(V_l))+numpy.abs(numpy.diff(Q_l))))+\
sum((kc,tau_D,tau_I))
if __name__=='__main__':
dt=0.01
tf=10
Lvar=numpy.random.random(int(tf/dt))*2-1+15
controladores=(2,.5,.5)
reb=Reboiler(controladores,Lvar,dt=dt,plot=1)
print reb
|
{"/columna2.py": ["/numericos.py"], "/Main2.py": ["/columna2.py", "/RN2.py", "/Subrutinas.py"]}
|
6,281
|
Lugiax/AutomaticControl
|
refs/heads/master
|
/Main2.py
|
# -*- coding: utf-8 -*-
"""
Created on Mon Dec 14 12:45:43 2015
@author: carlosaranda
Prueba final Red Neuronal para control
"""
import re
from columna2 import reboiler
from RN2 import RedNeuronal, redimensionarPesos, dimensionarMatricesDePesos
import numpy as np
from Subrutinas import Perturbaciones, denorm, norm
import matplotlib.pyplot as plt
with open('pruebasent.txt','r') as f:
archivo=f.read()
cont=-1
pesos=re.findall('\<pesos\>([-\d.,]+)',archivo)[0]
estructura=re.findall('\<estructura\>([\d,]+)',archivo)[0]
interx=float(re.findall('\<interx\>([-\d.]+)',archivo)[0])
intery=float(re.findall('\<intery\>([-\d.]+)',archivo)[0])
dt=float(re.findall('\<dt\>([-\d.]+)',archivo)[0])
pesos=[float(x) for x in pesos.split(',')]
estructura=[int(x) for x in estructura.split(',')]
'''
Se inicializa el reboiler con todos sus parámetros
'''
n_perts=20
set_de_perturbaciones=Perturbaciones((0,1),n_perts=n_perts)
Lvar=[]
base=20
for i in range(2*n_perts):
if i%1000==3:
for k in set_de_perturbaciones[int(i/2)]:
Lvar.append(k+base)
else:
Lvar0=(np.random.rand(int(1/dt))*2-1)*.1+base
for k in Lvar0:
Lvar.append(k)
base=Lvar[-1]
plt.plot(Lvar)
plt.show()
#se añaden las perturbaciones
'''
Se inicia la red neuronal
'''
ar=estructura[1:-1]
neurodos_entrada_salida=(estructura[0],estructura[-1])
red=RedNeuronal(estructura=ar,neurodos_entrada_salida=neurodos_entrada_salida)
est,nPesos=dimensionarMatricesDePesos(red.estructura)
W=redimensionarPesos(pesos,est)
reb=reboiler()
reb.condini()
t=0
Ml,Bl,Vl,tl,Tl,xli,kc,td=[reb.M],[reb.B],[reb.V],[t],[reb.T],[reb.x],[reb.kcq],[reb.tdq]
for i in range(len(Lvar)):
t+=dt
reb.L=Lvar[i]
reb.actualizar(t,dt)
Ml.append(reb.M),Bl.append(reb.B),Vl.append(reb.V),Tl.append(reb.T)
tl.append(t),xli.append(reb.x),kc.append(reb.kcq),td.append(reb.tdq)
if i < int(1/dt):
continue
else:
ventana=[Lvar[i-int(1/dt):i]]
ventana_norm=norm(ventana,(interx+20,20-interx))
y=red.FP(pesos=W,xi=ventana)[-1]
reb.kcb,reb.tdb,reb.kcq,reb.tdq=denorm(y,(intery,0))
if i%1000==0:
plt.figure(figsize=(16,10))
plt.subplot(2,2,1);plt.grid(True)
plt.plot(tl[i-int(1/dt):i],Ml[i-int(1/dt):i],'b.',label='Acumulacion')
plt.plot(tl[i-int(1/dt):i],Bl[i-int(1/dt):i],'g.',label='Fondos')
plt.plot(tl[i-int(1/dt):i],Vl[i-int(1/dt):i],'r.',label='Vapor')
plt.xlabel('tiempo');plt.ylabel('kg/min')
#plt.legend(loc=4)
plt.subplot(2,2,2);plt.grid(True)
plt.title('Control Proporcional')
plt.plot(tl[i-int(1/dt):i],kc[i-int(1/dt):i],'b.')
plt.xlabel('tiempo');plt.ylabel('Var Prop')
plt.subplot(2,2,3);plt.grid(True)
plt.title('Perturbacion')
plt.plot(tl[i-int(1/dt):i],ventana_norm[0])#Lvar[i-int(1/dt):i])
plt.xlabel('tiempo');plt.ylabel('Flujo de entrada')
plt.subplot(2,2,4);plt.grid(True)
plt.title('Control Derivativo')
plt.plot(tl[i-int(1/dt):i],td[i-int(1/dt):i],'b.')
plt.xlabel('tiempo');plt.ylabel('Var Derivativa')
plt.show()
print('Graficando la simulacion')
plt.figure(figsize=(16,10))
plt.subplot(2,2,1);plt.grid(True)
plt.xlim((0,2*n_perts))
plt.plot(tl,Ml,'b.',label='Acumulacion')
plt.plot(tl,Bl,'g.',label='Fondos')
plt.plot(tl,Vl,'r.',label='Vapor')
plt.xlabel('tiempo');plt.ylabel('kg/min')
#plt.legend(loc=4)
plt.subplot(2,2,2);plt.grid(True)
plt.xlim((0,2*n_perts))
plt.title('Control Proporcional')
plt.plot(tl,kc,'b.')
plt.xlabel('tiempo');plt.ylabel('control proporcional')
plt.subplot(2,2,3);plt.grid(True)
plt.title('Perturbacion')
plt.plot(norm(Lvar,(interx+20,20-interx)))
plt.xlabel('tiempo');plt.ylabel('Flujo de entrada')
plt.subplot(2,2,4);plt.grid(True)
plt.xlim((0,2*n_perts))
plt.title('Control derivativo')
plt.plot(tl,td,'b.')
plt.xlabel('tiempo');plt.ylabel('X')
plt.show()
|
{"/columna2.py": ["/numericos.py"], "/Main2.py": ["/columna2.py", "/RN2.py", "/Subrutinas.py"]}
|
6,282
|
Lugiax/AutomaticControl
|
refs/heads/master
|
/numericos.py
|
# -*- coding: utf-8 -*-
"""
Created on Sun Aug 23 19:07:23 2015
Compilación de métodos numéricos
@author: carlos
"""
from __future__ import division
def newton(f,x0,it=20,dstep=0.0001):
x=x0
for i in range(it):
df=(f(x+dstep)-f(x-dstep))/dstep
if df==0:
break
x=x-f(x)/df
#print 'Nuevo valor:',x
return(x)
def FalsaPosicion(f,x1,x2,xr=0,it=40,tol=1e-5):
f1=f(x1)
f2=f(x2)
repeticiones1=0;repeticiones2=0
for i in range(it):
xr_viejo=xr
xr=x2-(f2*(x1-x2))/(f1-f2)
error=abs((xr_viejo-xr)/xr)
fr=f(xr)
comprobacion=f1*fr
if comprobacion<0:
x2=xr;f2=fr
repeticiones2=0
repeticiones1+=1
if repeticiones2==2: f1=f1/2
elif comprobacion>0:
x1=xr;f1=fr
repeticiones1=0
repeticiones2+=1
if repeticiones1==2: f2=f2/2
else:
error=0
if error<=tol:
break
return xr
def Euler(f, x=0,t=0, step=0.1, inter=[0,10]):
if isinstance(f, (tuple, list)):
print('Functions')
else:
print('function')
xlist=[]
tlist=[]
for i in range(inter[0], int(inter[1]/step)):
x+=f(x,t)*step
t=step
xlist.append(x)
tlist.append(t)
return(tlist,xlist)
if __name__ == '__main__':
pass
|
{"/columna2.py": ["/numericos.py"], "/Main2.py": ["/columna2.py", "/RN2.py", "/Subrutinas.py"]}
|
6,284
|
LialinMaxim/Alnicko
|
refs/heads/master
|
/test_sender.py
|
import multiprocessing
import time
from unittest import TestCase, mock
from files_sender import Uploader
def mock_file_sender(path_to_file, ):
"""
Simulator of the loading process
:param path_to_file: string
:return: (path_to_file, status code, status name)
"""
time.sleep(.6)
if '7' in path_to_file:
result = path_to_file, 403, 'Forbidden'
elif '9' in path_to_file:
result = path_to_file, 404, 'Not Found'
else:
result = path_to_file, 200, 'OK'
print(f'{multiprocessing.current_process().name} -> {result}')
return result
class TestUploader(TestCase):
def test_file_not_found(self):
uploader = Uploader([], 2, None)
result = uploader.file_sender('3.txt')
expect = ('3.txt', 404, 'File not found')
self.assertEqual(result, expect)
def test_send_file(self):
uploader = Uploader([], 2, None)
with mock.patch('requests.post'):
file, status_code, reason = uploader.file_sender('requirements.txt')
self.assertEqual(file, 'requirements.txt')
def test_start(self):
test_files_list = [f'{i}.txt' for i in range(7)]
m = multiprocessing.Manager()
q = m.Queue()
uploader = Uploader(test_files_list, 2, q)
uploader.start()
expect_errors = {'0.txt': 404, '1.txt': 404, '2.txt': 404, '3.txt': 404,
'4.txt': 404, '5.txt': 404, '6.txt': 404}
self.assertEqual(uploader.errors, expect_errors)
q_result = []
while uploader.is_active():
q_result.append(q.get())
self.assertEqual(len(q_result), 7)
|
{"/test_sender.py": ["/files_sender.py"], "/files_sender.py": ["/test_sender.py"]}
|
6,285
|
LialinMaxim/Alnicko
|
refs/heads/master
|
/files_sender.py
|
"""
Write module to upload some files to the remote server.
Uploading should be done in parallel.
Python multiprocessing module should be used.
Real uploading is not part of this task, use some dummy function for emulate upload.
Input data:
- List of files to upload
- Maximum number of parallel uploading process
- Queue for passing progress to the caller
Output data:
- Uploading progress
- Final uploading report (uploaded files and not uploaded)
"""
import multiprocessing
import ntpath
import os
import time
import datetime
import requests
class Uploader:
server_url = 'http://my_site.com/test'
def __init__(self, files, num_process, queue, worker=None):
self.files_list = files
self.num_process = num_process
self.queue = queue
self.worker = worker or self.file_sender
self.errors = {}
self.done = 0
self._complete = 0
self._star_time = None
self._end_time = None
self._is_terminate = None
def __str__(self):
return f'< Uploader obj - ' \
f'files: {len(self.files_list)}, ' \
f'done: {self.done}, ' \
f'errors: {len(self.errors)}, ' \
f'loading time: {self.get_loading_time()} sec >'
def start(self):
self._star_time = datetime.datetime.utcnow()
self._is_terminate = False
with multiprocessing.Pool(processes=self.num_process) as pool:
for result in pool.imap_unordered(self.worker, self.files_list):
f, status_code, status_name = result
if status_code != 200:
self.errors[f] = status_code
self.queue.put({
'file': f,
'error': status_name
})
else:
self.done += 1
self.queue.put({
'file': f,
'done': status_name
})
self._complete += 1
# interrupted all process
if self._is_terminate:
pool.terminate()
self._end_time = datetime.datetime.utcnow()
return 'The pool was terminated'
self._end_time = datetime.datetime.utcnow()
def stop(self):
self._is_terminate = True
def file_sender(self, path_to_file):
time.sleep(.3)
if os.path.isfile(path_to_file):
head_path, file_name = ntpath.split(path_to_file)
with open(path_to_file, 'rb') as f:
r = requests.post(self.server_url, files={file_name: f})
return path_to_file, r.status_code, r.reason
else:
return path_to_file, 404, 'File not found'
def is_active(self):
return self.queue.empty() is False
def get_loading_time(self):
loading_time = 0
if self._end_time:
loading_time = (self._end_time - self._star_time).total_seconds()
return loading_time
if __name__ == '__main__':
""" Shows how it works. """
from test_sender import mock_file_sender
test_files_list = [f'{i}.txt' for i in range(14)]
m = multiprocessing.Manager()
q = m.Queue()
uploader = Uploader(test_files_list, 3, q, worker=mock_file_sender)
uploader.start()
print('\n---Lading complete---')
while uploader.is_active():
print('Result:', q.get())
print('\n', uploader)
print('loading errors:', uploader.errors)
print('loading time:', uploader.get_loading_time(), 'seconds')
|
{"/test_sender.py": ["/files_sender.py"], "/files_sender.py": ["/test_sender.py"]}
|
6,288
|
vdeleon/clickTwitch
|
refs/heads/master
|
/clickTwitch/__init__.py
|
'''
clickTwitch is an automation tool that claims your twitch reward while watching a stream.
API:
========
`detectPosition()`
`clickPosition()`
`randomMovement()`
`isLetterY()`
'''
import mouse
def detectPosition():
'''
Waits until client presses middle button, then parses the mouse position to variables used by clickPosition().
'''
print('Position your mouse where the claiming button is. When you are ready, click the middle button on your mouse to confirm the location')
mouse.wait(button='middle')
pos = mouse.get_position()
detectPosition.x = pos[0]
detectPosition.y = pos[1]
def clickPosition(x, y):
'''
Moves to passed coordenates and clicks.
'''
mouse.move(x, y, absolute=True, duration=0.1)
mouse.click(button='left')
def randomMovement():
'''
Moves to random position.
'''
import random
xRandomPosition = random.randrange(-50, 50)
yRandomPosition = random.randrange(-50, 50)
mouse.move(xRandomPosition, yRandomPosition, absolute=False, duration=0.1)
def isLetterY(letter):
'''
Returns true is parameter is a string equal to 'y'.
'''
letter.lower()
if letter == 'y':
return True
return False
|
{"/main.py": ["/clickTwitch/__init__.py"]}
|
6,289
|
vdeleon/clickTwitch
|
refs/heads/master
|
/main.py
|
import subprocess
import clickTwitch
from time import sleep
# Set up.
try:
clickInterval = int(input('Set time in seconds the mouse will click (default = 30): '))
if clickInterval <= 1:
raise ValueError
except ValueError:
clickInterval = 30
finally:
subprocess.run('cls', shell=True)
wantsRandom = str(input('Do you want to make a random movement every second? Y/n: '))
subprocess.run('cls', shell=True)
clickTwitch.detectPosition()
# Loops until window is closed.
while True:
subprocess.run('cls', shell=True)
i = 0
clickTwitch.clickPosition(clickTwitch.detectPosition.x, clickTwitch.detectPosition.y)
while i < clickInterval:
sleep(1)
i += 1
print('%is' % i)
if clickTwitch.isLetterY(wantsRandom):
clickTwitch.randomMovement()
|
{"/main.py": ["/clickTwitch/__init__.py"]}
|
6,295
|
NURx2/CleaningManager
|
refs/heads/master
|
/src/modules/executor.py
|
from src.database import db
class Executor(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(16))
telegram_id = db.Column(db.String(32))
# db.session.add(my_object)
|
{"/src/database/additions.py": ["/src/modules/executor.py"], "/main.py": ["/src/parser/parser.py"], "/src/parser/parser.py": ["/src/database/additions.py", "/src/static/constants.py"]}
|
6,296
|
NURx2/CleaningManager
|
refs/heads/master
|
/src/database/additions.py
|
from . import db
from typing import List
from src.modules.executor import Executor
def update_executors(names: List[str]):
for name in names:
executor = Executor(name=name)
|
{"/src/database/additions.py": ["/src/modules/executor.py"], "/main.py": ["/src/parser/parser.py"], "/src/parser/parser.py": ["/src/database/additions.py", "/src/static/constants.py"]}
|
6,297
|
NURx2/CleaningManager
|
refs/heads/master
|
/setup.py
|
from setuptools import setup, find_packages
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name="Cleaning Manager",
author="NURx2",
description="It allows to track the execution and quality of a cleaning.",
long_description=long_description,
long_description_content_type="text/markdown",
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.7',
)
|
{"/src/database/additions.py": ["/src/modules/executor.py"], "/main.py": ["/src/parser/parser.py"], "/src/parser/parser.py": ["/src/database/additions.py", "/src/static/constants.py"]}
|
6,298
|
NURx2/CleaningManager
|
refs/heads/master
|
/main.py
|
from flask import Flask
from src.database import db
from src.parser.parser import parse
app = Flask(__name__)
# connection string of database
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///data.db'
app.config['SQLACLHEMY_TRACK_MODIFICATIONS'] = False
db.init_app(app)
def main():
parse()
if __name__ == '__main__':
main()
|
{"/src/database/additions.py": ["/src/modules/executor.py"], "/main.py": ["/src/parser/parser.py"], "/src/parser/parser.py": ["/src/database/additions.py", "/src/static/constants.py"]}
|
6,299
|
NURx2/CleaningManager
|
refs/heads/master
|
/src/parser/parser.py
|
import openpyxl
from src.database.additions import update_executors
from src.static.constants import cnt_weeks
def colored(cell):
return True
def parse():
table = openpyxl.load_workbook(filename='table.xlsx')
main_sheet = table['Лист1']
names = [value.value for value in main_sheet['A'][1:]]
update_executors(names)
n = len(names)
start_row = 2
start_col = ord('B')
for i in range(start_col, start_col + cnt_weeks):
for j in range(start_row, start_row + n):
if colored(main_sheet[j][i]):
date_of_the_duty = main_sheet[j][1]
|
{"/src/database/additions.py": ["/src/modules/executor.py"], "/main.py": ["/src/parser/parser.py"], "/src/parser/parser.py": ["/src/database/additions.py", "/src/static/constants.py"]}
|
6,300
|
NURx2/CleaningManager
|
refs/heads/master
|
/src/static/constants.py
|
cnt_weeks = 8
|
{"/src/database/additions.py": ["/src/modules/executor.py"], "/main.py": ["/src/parser/parser.py"], "/src/parser/parser.py": ["/src/database/additions.py", "/src/static/constants.py"]}
|
6,303
|
OpiumSmoke/django-tutorial-todolist
|
refs/heads/master
|
/todos/urls.py
|
from django.urls import path
from . import views
app_name = 'todos'
urlpatterns = [
path('', views.index, name='index'),
path('details/<int:id>', views.details, name='detail'),
path('add', views.add, name='add'),
path('update/<int:id>', views.update, name='update'),
path('delete/<int:id>', views.delete, name='delete'),
path('category', views.category_index, name='category/index'),
path('category/add', views.category_add, name='category/add'),
path('category/details/<int:category_id>', views.category_detail, name='category/details'),
path('category/update/<int:category_id>', views.category_update, name='category/update'),
path('category/delete/<int:category_id>', views.category_delete, name='category/delete'),
]
|
{"/tools/.pythonrc.py": ["/todos/models.py"], "/tools/populate-test-models.py": ["/todos/models.py"], "/todos/views.py": ["/todos/models.py"]}
|
6,304
|
OpiumSmoke/django-tutorial-todolist
|
refs/heads/master
|
/tools/.pythonrc.py
|
import os, sys, importlib
from django.test.utils import setup_test_environment
setup_test_environment()
prj = importlib.__import__(os.environ['DJANGO_SETTINGS_MODULE'].split('.')[0], fromlist=('settings',))
settings = prj.settings
app_names = [ app.split('.')[0] for app in settings.INSTALLED_APPS if not app.startswith('django') ]
apps = {}
for app in app_names:
apps[app] = importlib.__import__(app)
### The code so far is general and reusable.
### Add project specific code from here.
from todos.models import Todo
from todos.models import Category
|
{"/tools/.pythonrc.py": ["/todos/models.py"], "/tools/populate-test-models.py": ["/todos/models.py"], "/todos/views.py": ["/todos/models.py"]}
|
6,305
|
OpiumSmoke/django-tutorial-todolist
|
refs/heads/master
|
/tools/populate-test-models.py
|
import os, sys, django
def add_test_todos():
print('Populating Todo objects...')
if not Todo.objects.filter(title='My 1st Todo'):
todo = Todo(title='My 1st Todo', text='Reading...')
todo.save()
if not Todo.objects.filter(title='My 2nd Todo'):
todo = Todo(title='My 2nd Todo', text='Playing with my dog...')
todo.save()
if not Todo.objects.filter(title='My 3rd Todo'):
todo = Todo(title='My 3rd Todo', text='Eating...')
todo.save()
if not Todo.objects.filter(title='My 4th Todo'):
todo = Todo(title='My 4th Todo', text='Sleeping...')
todo.save()
if not Todo.objects.filter(title='My 5th Todo'):
todo = Todo(title='My 5th Todo', text='Debugging...')
todo.save()
def add_test_categories():
print('Populating Category objects...')
if not Category.objects.filter(name='work'):
category = Category(name='work')
category.save()
if not Category.objects.filter(name='life'):
category = Category(name='life')
category.save()
if not Category.objects.filter(name='hobby'):
category = Category(name='hobby')
category.save()
if not Category.objects.filter(name='study'):
category = Category(name='study')
category.save()
def add_relations():
print('Populating relations between objects...')
category = Category.objects.filter(name='work').get()
if not category.todos.all():
category.todos.add(Todo.objects.filter(title='My 5th Todo').get())
category = Category.objects.filter(name='life').get()
if not category.todos.all():
category.todos.add(Todo.objects.filter(title='My 3rd Todo').get())
category.todos.add(Todo.objects.filter(title='My 4th Todo').get())
category = Category.objects.filter(name='hobby').get()
if not category.todos.all():
category.todos.add(Todo.objects.filter(title='My 1st Todo').get())
category.todos.add(Todo.objects.filter(title='My 2nd Todo').get())
category = Category.objects.filter(name='study').get()
if not category.todos.all():
category.todos.add(Todo.objects.filter(title='My 1st Todo').get())
if __name__ == "__main__":
sys.path.append(os.path.abspath(os.path.curdir))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "todolist.settings")
django.setup()
from todos.models import Todo
from todos.models import Category
if Todo.objects.all():
print('You have data in the db.')
print('Executing this script may mess with the existing data.')
print('Still want to proceed? (y/n)')
y = input().lower()
if y != 'y' and y != 'yes':
print('Aborting...')
sys.exit()
add_test_todos()
add_test_categories()
add_relations()
|
{"/tools/.pythonrc.py": ["/todos/models.py"], "/tools/populate-test-models.py": ["/todos/models.py"], "/todos/views.py": ["/todos/models.py"]}
|
6,306
|
OpiumSmoke/django-tutorial-todolist
|
refs/heads/master
|
/todos/views.py
|
from django.shortcuts import render, redirect, get_object_or_404
from django.urls import reverse
from django.contrib import messages
# from django.http import HttpResponse
from .models import Todo, Category
def index(request):
todos = Todo.objects.all()[:10]
context = {
'todos':todos
}
return render(request, 'index.html', context)
def details(request, id):
todo = get_object_or_404(Todo, pk=id)
context = {
'todo':todo,
'categories':todo.category_set.all()
}
return render(request, 'details.html', context)
def add(request):
if (request.method == 'POST'):
title = request.POST['title']
text = request.POST['text']
todo = Todo(title=title, text=text)
todo.save()
categories = request.POST.getlist('category-select')
if categories:
for id in categories:
category = Category.objects.get(id=id)
todo.category_set.add(category)
messages.success(request, 'Todo Added : %s' % todo.title)
return redirect(reverse('todos:index'))
else:
categories = Category.objects.all()
context = {
'categories':categories
}
return render(request, 'add.html', context)
def update(request, id):
todo = get_object_or_404(Todo, pk=id)
if (request.method == 'POST'):
todo.title = request.POST['title']
todo.text = request.POST['text']
categories = request.POST.getlist('category-select')
todo.category_set.clear()
if categories:
for cid in categories:
category = Category.objects.get(id=cid)
todo.category_set.add(category)
todo.save()
messages.success(request, 'Todo Updated : %s' % todo.title)
return redirect(reverse('todos:detail', args=[ id ]))
else:
attached_categories = todo.category_set.all()
categories = [(True,c) if attached_categories.filter(id=c.id) else (False, c) for c in Category.objects.all() ]
context = {
'todo':todo,
'categories':categories
}
return render(request, 'edit.html', context)
def delete(request, id):
if (request.method == 'POST'):
todo = get_object_or_404(Todo, pk=id)
messages.success(request, 'Todo Deleted : %s' % todo.title)
todo.delete()
return redirect(reverse('todos:index'))
def category_index(request):
categories = Category.objects.all()
context = {
'categories':categories
}
return render(request, 'category/index.html', context)
def category_add(request):
if (request.method == 'POST'):
name = request.POST['name']
category = Category(name=name)
category.save()
messages.success(request, 'Category Added : %s' % category.name)
return redirect(reverse('todos:index'))
else:
categories = Category.objects.all()
context = {
'categories':categories
}
return render(request, 'category/add.html', context)
def category_detail(request, category_id):
category = get_object_or_404(Category, pk=category_id)
context = {
'category':category,
'todos':category.todos.all()
}
return render(request, 'category/details.html', context)
def category_update(request, category_id):
category = get_object_or_404(Category, pk=category_id)
if (request.method == 'POST'):
category.name = request.POST['name']
category.save()
messages.success(request, 'Category Updated : %s' % category.name)
return redirect(reverse('todos:category/details', args=[ category_id ]))
else:
categories = Category.objects.all()
context = {
'categories':categories,
'category':category
}
return render(request, 'category/edit.html', context)
def category_delete(request, category_id):
if (request.method == 'POST'):
category = get_object_or_404(Category, pk=category_id)
messages.success(request, 'Category Deleted : %s' % category.name)
category.delete()
return redirect(reverse('todos:index'))
|
{"/tools/.pythonrc.py": ["/todos/models.py"], "/tools/populate-test-models.py": ["/todos/models.py"], "/todos/views.py": ["/todos/models.py"]}
|
6,307
|
OpiumSmoke/django-tutorial-todolist
|
refs/heads/master
|
/todos/models.py
|
from django.db import models
from django.utils import timezone
class Todo(models.Model):
title = models.CharField(max_length=200)
text = models.TextField()
created_at = models.DateTimeField(default=timezone.now, blank=True)
def __str__(self):
return '%s: %s' % (self.id, self.title)
class Category(models.Model):
name = models.CharField(max_length=50)
todos = models.ManyToManyField(Todo)
def __str__(self):
return '%s: %s' % (self.id, self.name)
|
{"/tools/.pythonrc.py": ["/todos/models.py"], "/tools/populate-test-models.py": ["/todos/models.py"], "/todos/views.py": ["/todos/models.py"]}
|
6,312
|
eltondornelas/django-semana-dev-python-treinaweb
|
refs/heads/master
|
/gerenciador_tarefas/urls.py
|
"""gerenciador_tarefas URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('app.urls')),
# path, como não tem view, ele vai no app a ser chamado
]
# mysql -u root
# create database gerenciador_tarefas;
# use gerenciador_tarefas;
# show tables;
# desc app_task;
# select * from app_task
# select * from auth_user
# desc app_task;
|
{"/app/services/task_service.py": ["/app/models.py"], "/app/urls.py": ["/app/views/task_views.py"]}
|
6,313
|
eltondornelas/django-semana-dev-python-treinaweb
|
refs/heads/master
|
/app/views/task_views.py
|
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
from django.shortcuts import render, redirect
from ..forms import TaskForm
from app.entities.task import Task
from ..services import task_service
# com o @login_required é um decorator que verifica se usuário esta logado
# se não estiver ele é redirecionado
# perceba que a página padrão é o account/login; para ajustar isso deve ir em
# settings.py e criar LOGIN_URL
@login_required()
def task_list(request):
tasks = task_service.task_list(request.user)
# request.user é o usuário logado
return render(request, 'tasks/task_list.html',
{"tasks": tasks})
@login_required()
def register_task(request):
if request.method == 'POST':
form_task = TaskForm(request.POST)
# o TaskForm ja valida os campos automaticamente
if form_task.is_valid():
title = form_task.cleaned_data['title']
description = form_task.cleaned_data['description']
expiration_date = form_task.cleaned_data['expiration_date']
priority = form_task.cleaned_data['priority']
new_task = Task(title, description,
expiration_date, priority, request.user)
task_service.register_task(new_task)
return redirect('task_list_route')
else:
form_task = TaskForm()
return render(request, 'tasks/form_task.html', {"form_task": form_task})
@login_required()
def edit_task(request, id):
task_db = task_service.task_list_id(id)
if task_db.user != request.user:
return HttpResponse('Não Permitido!')
form_task = TaskForm(request.POST or None, instance=task_db)
if form_task.is_valid():
title = form_task.cleaned_data['title']
description = form_task.cleaned_data['description']
expiration_date = form_task.cleaned_data['expiration_date']
priority = form_task.cleaned_data['priority']
new_task = Task(title, description,
expiration_date, priority, request.user)
task_service.edit_task(task_db, new_task)
return redirect('task_list_route')
return render(request, 'tasks/form_task.html', {"form_task": form_task})
@login_required()
def remove_task(request, id):
task_db = task_service.task_list_id(id)
if task_db.user != request.user:
return HttpResponse('Não Permitido!')
if request.method == 'POST':
task_service.remove_task(task_db)
return redirect('task_list_route')
return render(request, 'tasks/confirmation.html', {'task': task_db})
|
{"/app/services/task_service.py": ["/app/models.py"], "/app/urls.py": ["/app/views/task_views.py"]}
|
6,314
|
eltondornelas/django-semana-dev-python-treinaweb
|
refs/heads/master
|
/app/models.py
|
from django.db import models
from django.contrib.auth.models import User
class Task(models.Model):
PRIORITY_CHOICES = [
('H', 'High'),
('N', 'Normal'),
('L', 'Low')
]
title = models.CharField(max_length=30, null=False, blank=False)
description = models.CharField(max_length=100, null=False, blank=False)
expiration_date = models.DateField(null=False, blank=False)
priority = models.CharField(max_length=1, choices=PRIORITY_CHOICES,
null=False, blank=False)
# relacionando uma tarefa a um usuário
# porém, um usuário a varias tarefas
# relação de 1xN = ForeignKey
user = models.ForeignKey(User, null=True, on_delete=models.CASCADE)
# como possuímos tarefas cadastradas sem relação com usuário
# ele poderá ser null
# cascade para apagar todas as tarefas relacionadas a este usuáiro
|
{"/app/services/task_service.py": ["/app/models.py"], "/app/urls.py": ["/app/views/task_views.py"]}
|
6,315
|
eltondornelas/django-semana-dev-python-treinaweb
|
refs/heads/master
|
/app/templatetags/my_filters.py
|
from django import template
register = template.Library()
@register.filter(name='add_class')
def add_class(value, arg):
return value.as_widget(attrs={'class': arg})
# recebe um input e a classe;
# depois adiciona nesse input a classe
# form_task.title é o input nesse caso
|
{"/app/services/task_service.py": ["/app/models.py"], "/app/urls.py": ["/app/views/task_views.py"]}
|
6,316
|
eltondornelas/django-semana-dev-python-treinaweb
|
refs/heads/master
|
/app/services/task_service.py
|
from app.models import Task
def register_task(task):
Task.objects.create(title=task.title, description=task.description,
expiration_date=task.expiration_date,
priority=task.priority, user=task.user)
def task_list(user):
return Task.objects.filter(user=user).all()
# com esse filter, só traz os referentes ao usuario
# é um SELECT * FROM app_task
def task_list_id(id):
return Task.objects.get(id=id)
def edit_task(task_db, new_task):
task_db.title = new_task.title
task_db.description = new_task.description
task_db.expiration_date = new_task.expiration_date
task_db.priority = new_task.priority
task_db.save(force_update=True)
def remove_task(task_db):
task_db.delete()
|
{"/app/services/task_service.py": ["/app/models.py"], "/app/urls.py": ["/app/views/task_views.py"]}
|
6,317
|
eltondornelas/django-semana-dev-python-treinaweb
|
refs/heads/master
|
/app/urls.py
|
from django.urls import path
from .views.task_views import *
from .views.user_views import *
urlpatterns = [
path('task_list/', task_list, name='task_list_route'),
path('register_task/', register_task, name='register_task_route'),
path('edit_task/<int:id>', edit_task, name='edit_task_route'),
path('remove_task/<int:id>', remove_task, name='remove_task_route'),
path('register_user/', register_user, name='register_user_route'),
path('login_user/', login_user, name='login_user_route'),
path('logout_user/', logout_user, name='logout_user_route'),
# url , method/class, route
# <tipo:nome_parametro> -> o nome tem que bater com o do método
]
|
{"/app/services/task_service.py": ["/app/models.py"], "/app/urls.py": ["/app/views/task_views.py"]}
|
6,318
|
jowr/jopy
|
refs/heads/master
|
/jopy/styles/__init__.py
|
import matplotlib.pyplot as plt
try:
from .plots import Figure
except:
from jopy.styles.plots import Figure
def get_figure(orientation='landscape',width=110,fig=None,axs=False):
"""Creates a figure with some initial properties
The object can be customised with the parameters. But since it is an
object, it can also be modified later.
Parameters
----------
orientation : str
either landscape or portrait
width : float
width in mm, used for scaling the default A4 format
if width is less than 10, use it as a factor to apply to A4
fig : matplotlib.figure.Figure, jopy.plots.Figure
The figure object to handle, None creates a new figure
axs : boolean
True or False - Should an axis object be added to the figure?
Returns
-------
jopy.plots.Figure, matplotlib.figure.Figure
The figure object
"""
if fig is None: fig = plt.figure()
fig.__class__ = Figure
sideA = 297. # height of A4
sideB = 210. # width of A4
mm_to_inch = 3.93700787401575/100.0 # factor mm to inch
if width < 0: raise ValueError("size cannot be less than zero.")
width *= mm_to_inch
sideA *= mm_to_inch
sideB *= mm_to_inch
if orientation=='landscape':
if width < 10*mm_to_inch: width *= sideA
scale = width/sideA
width = sideA*scale #=width
height = sideB*scale
elif orientation=='portrait':
if width < 10*mm_to_inch: width *= sideB
scale = width/sideB
width = sideB*scale #=width
height = sideA*scale
else:
raise ValueError("Unknown orientation")
fig.set_size_inches(width,height)
if axs: fig.add_subplot(111)
return fig
def plot_axis(data,kind,ax=None):
if ax is None: ax = plt.gca()
if __name__ == "__main__":
from jopy.utils import module_class_dict
import jopy.styles.mplib as mpl
dic = module_class_dict(mpl)
for i in dic:
line_fig,map_fig = dic[i]()._show_info()
line_fig.savefig(i+"_lines.pdf")
map_fig.savefig(i+"_maps.pdf")
|
{"/jopy/styles/__init__.py": ["/jopy/styles/plots.py", "/jopy/utils.py", "/jopy/styles/mplib.py"], "/jopy/recip/__init__.py": ["/jopy/recip/mechanisms.py"], "/jopy/styles/mplib.py": ["/jopy/base.py"], "/test/test_thermo/__init__.py": ["/jopy/thermo/__init__.py"], "/jopy/thermo/__init__.py": ["/jopy/utils.py", "/jopy/thermo/utils.py"], "/test/test_jopy.py": ["/jopy/recip/mechanisms.py", "/jopy/styles/mplib.py", "/jopy/utils.py"], "/jopy/recip/mechanisms.py": ["/jopy/base.py"]}
|
6,319
|
jowr/jopy
|
refs/heads/master
|
/jopy/recip/__init__.py
|
# -*- coding: utf-8 -*-
from __future__ import print_function, division
if __name__ == "__main__":
from jopy.recip.mechanisms import RecipExplicit, RecipImplicit
#from math import pi
import numpy as np
import matplotlib.pyplot as plt
me = RecipImplicit()
metoo = RecipExplicit()
cr = 0.05
cl = 0.1
bo = 0.1
pp = 0.75*cr
cv = 20e-6
me.set_geometry(cr, cl, bo, pp, cv)
metoo.set_geometry(cr, cl, bo, pp, cv)
full = me.revolution(1000)
pos = me.l(full)
postoo = metoo.l(full)
#fultoo = metoo._calc_theta_from_distance(postoo)
#plt.figure()
#plt.plot(full,pos)
#plt.plot(full,postoo)
#plot(fultoo,postoo,':')
print(" TDC : BDC ")
print("{0:8.2f} : {1:8.2f}".format(np.degrees(me.theta_0_TDC), np.degrees(me.theta_0_BDC)))
print("{0:8.2f} : {1:8.2f}".format(np.degrees(metoo.theta_0_TDC), np.degrees(metoo.theta_0_BDC)))
print("{0:8.2f} : {1:8.2f}".format(np.degrees(me.TDC()), np.degrees(me.BDC())))
print("{0:8.2f} : {1:8.2f}".format(np.degrees(metoo.TDC()), np.degrees(metoo.BDC())))
print("{0:8.4f} : {1:8.4f}".format(me.l_cr_max, me.l_cr_min))
print("{0:8.4f} : {1:8.4f}".format(metoo.l_cr_max, metoo.l_cr_min))
print("{0:8.4f} : {1:8.4f}".format(np.min(pos), np.max(pos)))
print("{0:8.4f} : {1:8.4f}".format(np.min(postoo),np.max(postoo)))
#print(pi)
plt.figure()
for p in np.linspace(0, 0.8, 5):
pp = p*cr
me.set_geometry(cr, cl, bo, pp, cv)
metoo.set_geometry(cr, cl, bo, pp, cv)
diff = (me.V(full)-metoo.V(full))/metoo.V(full)*100.0
plt.plot(full,diff,label=str(p)+": (Dubbel-Bjarne)/Bjarne")
plt.legend(loc=3)
plt.savefig("recip_vol_dif.pdf")
plt.figure()
plt.plot(full,me.V(full)*1e6,label='Dubbel')
plt.plot(full,metoo.V(full)*1e6,label='Bjarne')
plt.legend(loc=3)
plt.savefig("recip_vol.pdf")
plt.figure()
#plt.plot(full,me.V(full)*1e6,label='Dubbel')
plt.plot(full,metoo.dVdtheta(full)*1e6,label='Bjarne')
plt.legend(loc=3)
plt.savefig("recip_dvoldtheta.pdf")
plt.figure()
#plt.plot(full,me.V(full)*1e6,label='Dubbel')
plt.plot(full,metoo.d2Vdtheta2(full)*1e6,label='Bjarne')
plt.legend(loc=3)
plt.savefig("recip_d2voldtheta2.pdf")
plt.show()
me.info()
metoo.info()
|
{"/jopy/styles/__init__.py": ["/jopy/styles/plots.py", "/jopy/utils.py", "/jopy/styles/mplib.py"], "/jopy/recip/__init__.py": ["/jopy/recip/mechanisms.py"], "/jopy/styles/mplib.py": ["/jopy/base.py"], "/test/test_thermo/__init__.py": ["/jopy/thermo/__init__.py"], "/jopy/thermo/__init__.py": ["/jopy/utils.py", "/jopy/thermo/utils.py"], "/test/test_jopy.py": ["/jopy/recip/mechanisms.py", "/jopy/styles/mplib.py", "/jopy/utils.py"], "/jopy/recip/mechanisms.py": ["/jopy/base.py"]}
|
6,320
|
jowr/jopy
|
refs/heads/master
|
/jopy/styles/mplib.py
|
# -*- coding: utf-8 -*-
from __future__ import print_function, division
from ..base import JopyBaseClass
import matplotlib as mpl
import matplotlib.cm as mplcm
import numpy as np
from matplotlib.colors import LinearSegmentedColormap
import brewer2mpl
from itertools import cycle
import sys
import platform
class BaseStyle(JopyBaseClass):
default_map = "cubehelix_kindl"
default_lst = 4
def __init__(self):
JopyBaseClass.__init__(self)
self._figure = None
self._lines = {}
self._ccycle = None # Colour cycle
self._scycle = None # Style cycle
self._mcycle = None # Marker cycle
self._black = "#222222"
#self._black = "#000000"
#self._black = 'green'
self._lgrey = "#FBFBFB" #"GhostWhite"
self._linewi = 0.75
self._color_maps = {}
self._register_color_maps()
self._color_lists = {}
self._register_color_lists()
def update_rc_params(self):
#mpl.rcParams['legend.fontsize'] = 'medium'
mpl.rcParams['font.size'] = 11.0
mpl.rcParams['font.serif'] = "Bitstream Vera Serif, New Century Schoolbook, Century Schoolbook L, Utopia, ITC Bookman, Bookman, Nimbus Roman No9 L, Times New Roman, Times, Palatino, Charter, serif"
mpl.rcParams['font.sans-serif'] = "Bitstream Vera Sans, Lucida Grande, Verdana, Geneva, Lucid, Arial, Helvetica, Avant Garde, sans-serif"
mpl.rcParams['font.cursive'] = "Apple Chancery, Textile, Zapf Chancery, Sand, cursive"
mpl.rcParams['font.fantasy'] = "Comic Sans MS, Chicago, Charcoal, Impact, Western, fantasy"
mpl.rcParams['font.monospace'] = "Bitstream Vera Sans Mono, Andale Mono, Nimbus Mono L, Courier New, Courier, Fixed, Terminal, monospace"
#mpl.rcParams['text.usetex'] = True
mpl.rcParams['text.latex.unicode']=True
mpl.rcParams['text.latex.preamble'] = self._mplpreamble()
mpl.rcParams['mathtext.fontset'] = "cm" # Should be 'cm' (Computer Modern), 'stix', 'stixsans'
if platform.system() == 'Windows':
mpl.rcParams['pdf.fonttype'] = 42
mpl.rcParams['ps.fonttype'] = 42
# ######################
mpl.rcParams["patch.edgecolor"] = self._black
mpl.rcParams["text.color"] = self._black
mpl.rcParams["axes.edgecolor"] = self._black # axes edge color
mpl.rcParams["axes.linewidth"] = self._linewi # edge linewidth
mpl.rcParams["axes.labelcolor"] = self._black
mpl.rcParams["xtick.major.pad"] = 6 # distance to major tick label in points
mpl.rcParams["xtick.minor.pad"] = 6 # distance to the minor tick label in points
mpl.rcParams["xtick.color"] = self._black # color of the tick labels
mpl.rcParams["ytick.major.pad"] = 6 # distance to major tick label in points
mpl.rcParams["ytick.minor.pad"] = 6 # distance to the minor tick label in points
mpl.rcParams["ytick.color"] = self._black # color of the tick labels
mpl.rcParams["grid.color"] = self._lgrey # grid color
mpl.rcParams["legend.numpoints"] = 1 # the number of points in the legend line
mpl.rcParams["legend.fontsize"] = "medium"
mpl.rcParams["legend.scatterpoints"] = 1 # number of scatter points
mpl.rcParams["path.simplify"] = True # When True, simplify paths by removing "invisible" points
mpl.rcParams["savefig.dpi"] = 300 # figure dots per inch
#mpl.rcParams["savefig.format"] = "pdf" # png, ps, pdf, svg
mpl.rcParams["savefig.bbox"] = "tight" # 'tight' or 'standard'.
#
# Update the colours to be black again
#mpl.rcParams["patch.edgecolor"] = self._black
#mpl.rcParams["text.color"] = self._black
#mpl.rcParams["axes.edgecolor"] = self._black # axes edge color
#mpl.rcParams["axes.labelcolor"] = self._black
#mpl.rcParams["xtick.color"] = "000000" # color of the tick labels
#mpl.rcParams["ytick.color"] = "000000" # color of the tick labels
#
mpl.rcParams['contour.negative_linestyle'] = 'solid'
#
#mpl.use("pgf")
#mpl.rcParams['text.usetex'] = True
mpl.rcParams["pgf.texsystem"] = "pdflatex"
mpl.rcParams["pgf.preamble"] = self._mplpreamble()
mpl.rcParams["legend.handlelength"] = 1.75 # the length of the legend lines in fraction of fontsize
#: [
# r"\usepackage[utf8x]{inputenc}",
# r"\usepackage[T1]{fontenc}",
# r"\usepackage{cmbright}",
# ]
# }
#mpl.rcParams["axes.color_cycle"] = ', '.join([self.rgb_to_hex(col) for col in self.color_cycle()])
#print(self.cycle_to_list(self.color_cycle()))
mpl.rcParams["axes.color_cycle"] = self.cycle_to_list(self.color_cycle())
#axes.color_cycle : b, g, r, c, m, y, k # color cycle for plot lines
# as list of string colorspecs:
# single letter, long name, or
# web-style hex
def _mplpreamble(self):
preamble = []
preamble.append(r'\usepackage[binary-units=true,abbreviations=true]{siunitx}')
preamble.append(r'\usepackage{amsmath}')
#preamble.append(r'\usepackage{amssymb}')
preamble.append(r'\usepackage[version=3]{mhchem}')
return preamble
def _add_to_list(self,objs,lst):
"""Takes a list of object, adds them to lst and returns the new list. If any of the objects was in the list before, it will be removed."""
# https://docs.python.org/2/library/itertools.html#recipes
#seen = set()
#seen_add = seen.add
#for element in ifilterfalse(seen.__contains__, objs+lst):
# seen_add(element)
# yield element
res = []
for i in objs+lst:
try: iy = i.strip()
except: iy = i; pass
if iy not in res:
res.append(iy)
return res
def _add_to_rc_font_list(self,objs,lst):
mpl.rcParams[lst] = self._add_to_list(objs.split(','), mpl.rcParams[lst])
def cycle_to_list(self, cyc):
"""Convert a cycle to a list of unique entries
Takes a cycle object and extract elements until
it receives and object that has been extracted before
"""
lst = []
while True:
obj = next(cyc)
if obj not in lst: lst.append(obj)
else: return lst
def multiply_list(self, lst, doubles=1):
out = []
for i in lst:
for _ in range(doubles):
out.append(i)
return out
def multiply_cycle(self, cyc, doubles=1):
out = self.cycleToList(cyc)
out = self.multiplyList(out, doubles=doubles)
return cycle(out)
# http://stackoverflow.com/questions/214359/converting-hex-color-to-rgb-and-vice-versa
def hex_to_rgb(self,hex_string):
rgb = mpl.colors.hex2color(hex_string)
return tuple([int(255*x) for x in rgb])
def rgb_to_hex(self,rgb_tuple):
return mpl.colors.rgb2hex([1.0*x/255 for x in rgb_tuple])
def _to_rgb(self,inp):
cc = mpl.colors.ColorConverter()
return cc.to_rgb(inp)
def get_color_map(self,name=None,reverse=False):
"""A function to get a matplotlib colour map by name"""
if name is None: name = self.default_map
if name.endswith('_r'):
name = name[:-2] # remove "_r"
reverse = not reverse
# Use the standard maps
if reverse:
cm = mplcm.get_cmap(name+'_r')
else:
cm = mplcm.get_cmap(name)
return cm #LinearSegmentedColormap.from_list(cm)
def _register_color_maps(self):
"""A function to create and register the custom colour map objects
in a way matplotlib can digest. The cubehelix (including Kindl et al.,
the Brewer3 colour maps (YlOrRd, PuBuGn, YlGnBu) all provide proper
desaturation in grey-scale.
"""
specs = {}
# We start out with the custom cubehelix maps
#========= =======================================================
#Keyword Description
#========= =======================================================
#gamma gamma factor to emphasise either low intensity values
# (gamma < 1), or high intensity values (gamma > 1);
# defaults to 1.0.
#s the start color; defaults to 0.5 (i.e. purple).
#r the number of r,g,b rotations in color that are made
# from the start to the end of the color scheme; defaults
# to -1.5 (i.e. -> B -> G -> R -> B).
#h the hue parameter which controls how saturated the
# colors are. If this parameter is zero then the color
# scheme is purely a greyscale; defaults to 1.0.
#========= =======================================================
# 0 = blue, 1 = red, 2 = green
specs['cubehelix_alt'] = mpl._cm.cubehelix(h=1.5) # standard colours but more intensity
specs['cubehelix_blue'] = mpl._cm.cubehelix(s=0.3,r=-0.5,h=1.5) # blue colours and higher intensity
specs['cubehelix_red'] = mpl._cm.cubehelix(s=1.3,r=-0.5,h=1.5) # blue colours and higher intensity
specs['cubehelix_green'] = mpl._cm.cubehelix(s=2.3,r=-0.5,h=1.5) # blue colours and higher intensity
specs['cubehelix_kindl'] = mpl._cm.cubehelix(gamma=1.4,s=0.4,r=-0.8,h=2.0) # blue colours and higher intensity
# A Python version of Matteo Niccoli's colourmaps
# http://nbviewer.ipython.org/github/kwinkunks/notebooks/blob/master/Matteo_colourmaps.ipynb
#
## The original data is approximated with polynomials
#p0 = np.array([ -3.00669779e-36, 6.26525719e-33, -5.87240910e-30, 3.25751282e-27, -1.18087586e-24, 2.89863878e-22, -4.75604889e-20, 4.67614077e-18, -1.13599364e-16, -4.16063333e-14, 7.27326802e-12, -6.41161566e-10, 3.52560300e-08, -1.23850903e-06, 2.67527478e-05, -3.20955377e-04, 1.51205802e-03, 8.78290363e-03, 2.40494252e-02])
#p1 = np.array([ 8.13719543e-37, -1.77388587e-33, 1.75833582e-30, -1.04750030e-27, 4.17412745e-25, -1.17054612e-22, 2.36196641e-20, -3.43234035e-18, 3.50200516e-16, -2.30787699e-14, 6.79825384e-13, 3.17731005e-11, -4.59193023e-09, 2.36050767e-07, -6.49812536e-06, 9.42940406e-05, -6.24155259e-04, 4.04636648e-03, 1.11088863e-02])
#p2 = np.array([ -1.85874163e-35, 4.32740491e-32, -4.60790627e-29, 2.97271126e-26, -1.29744258e-23, 4.05138291e-21, -9.33419995e-19, 1.61295373e-16, -2.10418623e-14, 2.06972791e-12, -1.52201298e-10, 8.23377786e-09, -3.19603306e-07, 8.58912760e-06, -1.52305419e-04, 1.67708019e-03, -1.05536314e-02, 3.80789592e-02, 5.82194596e-03])
#x = range(256)
#LinL = np.empty((256,3))
#LinL[:,0] = np.polyval(p0,x)
#LinL[:,1] = np.polyval(p1,x)
#LinL[:,2] = np.polyval(p2,x)
#LinL = np.clip(LinL,0,1)
LinL = np.array([[ 1.43000000e-02, 1.43000000e-02, 1.43000000e-02],
[ 4.04000000e-02, 1.25000000e-02, 3.25000000e-02],
[ 5.16000000e-02, 1.54000000e-02, 4.43000000e-02],
[ 6.16000000e-02, 1.84000000e-02, 5.30000000e-02],
[ 6.99000000e-02, 2.15000000e-02, 6.15000000e-02],
[ 8.14000000e-02, 2.29000000e-02, 6.87000000e-02],
[ 8.57000000e-02, 2.73000000e-02, 7.63000000e-02],
[ 9.28000000e-02, 3.05000000e-02, 8.05000000e-02],
[ 1.00800000e-01, 3.30000000e-02, 8.46000000e-02],
[ 1.06400000e-01, 3.56000000e-02, 9.39000000e-02],
[ 1.11000000e-01, 3.96000000e-02, 9.79000000e-02],
[ 1.18400000e-01, 4.15000000e-02, 1.02000000e-01],
[ 1.22400000e-01, 4.53000000e-02, 1.06200000e-01],
[ 1.26300000e-01, 4.89000000e-02, 1.10500000e-01],
[ 1.30800000e-01, 5.18000000e-02, 1.15000000e-01],
[ 1.35700000e-01, 5.41000000e-02, 1.20000000e-01],
[ 1.41300000e-01, 5.55000000e-02, 1.25600000e-01],
[ 1.45300000e-01, 5.90000000e-02, 1.25600000e-01],
[ 1.50600000e-01, 6.03000000e-02, 1.30900000e-01],
[ 1.53300000e-01, 6.37000000e-02, 1.34400000e-01],
[ 1.56900000e-01, 6.64000000e-02, 1.38500000e-01],
[ 1.62400000e-01, 6.78000000e-02, 1.42500000e-01],
[ 1.65400000e-01, 7.08000000e-02, 1.47100000e-01],
[ 1.70300000e-01, 7.29000000e-02, 1.50400000e-01],
[ 1.74200000e-01, 7.55000000e-02, 1.54200000e-01],
[ 1.79600000e-01, 7.68000000e-02, 1.59500000e-01],
[ 1.80900000e-01, 7.83000000e-02, 1.77500000e-01],
[ 1.79800000e-01, 8.07000000e-02, 1.96700000e-01],
[ 1.78300000e-01, 8.29000000e-02, 2.15900000e-01],
[ 1.78200000e-01, 8.43000000e-02, 2.34100000e-01],
[ 1.76500000e-01, 8.66000000e-02, 2.51400000e-01],
[ 1.77000000e-01, 8.86000000e-02, 2.64600000e-01],
[ 1.76100000e-01, 9.11000000e-02, 2.78200000e-01],
[ 1.75400000e-01, 9.33000000e-02, 2.92200000e-01],
[ 1.77300000e-01, 9.51000000e-02, 3.02600000e-01],
[ 1.75800000e-01, 9.72000000e-02, 3.17400000e-01],
[ 1.75100000e-01, 9.99000000e-02, 3.29000000e-01],
[ 1.74400000e-01, 1.02500000e-01, 3.40500000e-01],
[ 1.73500000e-01, 1.04700000e-01, 3.53400000e-01],
[ 1.74800000e-01, 1.07100000e-01, 3.62700000e-01],
[ 1.74700000e-01, 1.09400000e-01, 3.73900000e-01],
[ 1.72600000e-01, 1.12200000e-01, 3.85800000e-01],
[ 1.73100000e-01, 1.15300000e-01, 3.94000000e-01],
[ 1.73100000e-01, 1.17500000e-01, 4.05100000e-01],
[ 1.73100000e-01, 1.19700000e-01, 4.16100000e-01],
[ 1.72000000e-01, 1.22400000e-01, 4.26800000e-01],
[ 1.73000000e-01, 1.26100000e-01, 4.33000000e-01],
[ 1.71600000e-01, 1.28500000e-01, 4.44500000e-01],
[ 1.71000000e-01, 1.31400000e-01, 4.54000000e-01],
[ 1.70600000e-01, 1.34000000e-01, 4.64200000e-01],
[ 1.66400000e-01, 1.38100000e-01, 4.73900000e-01],
[ 1.58200000e-01, 1.48200000e-01, 4.71700000e-01],
[ 1.48700000e-01, 1.58300000e-01, 4.68300000e-01],
[ 1.42200000e-01, 1.65300000e-01, 4.69900000e-01],
[ 1.35200000e-01, 1.72900000e-01, 4.69400000e-01],
[ 1.28100000e-01, 1.79600000e-01, 4.70800000e-01],
[ 1.25400000e-01, 1.85400000e-01, 4.71900000e-01],
[ 1.20700000e-01, 1.90400000e-01, 4.76200000e-01],
[ 1.16700000e-01, 1.96200000e-01, 4.77300000e-01],
[ 1.16600000e-01, 2.00400000e-01, 4.81400000e-01],
[ 1.14700000e-01, 2.05700000e-01, 4.82300000e-01],
[ 1.13700000e-01, 2.09300000e-01, 4.88800000e-01],
[ 1.09100000e-01, 2.14900000e-01, 4.90400000e-01],
[ 1.08900000e-01, 2.19000000e-01, 4.94400000e-01],
[ 1.07400000e-01, 2.23400000e-01, 4.98400000e-01],
[ 1.10800000e-01, 2.27000000e-01, 5.02200000e-01],
[ 1.09600000e-01, 2.31500000e-01, 5.05600000e-01],
[ 1.05800000e-01, 2.36700000e-01, 5.07700000e-01],
[ 1.04800000e-01, 2.40200000e-01, 5.14500000e-01],
[ 1.04700000e-01, 2.44400000e-01, 5.18200000e-01],
[ 1.06800000e-01, 2.48200000e-01, 5.22300000e-01],
[ 1.08600000e-01, 2.52100000e-01, 5.26400000e-01],
[ 1.06700000e-01, 2.56800000e-01, 5.29000000e-01],
[ 1.06100000e-01, 2.60700000e-01, 5.34600000e-01],
[ 1.05900000e-01, 2.64800000e-01, 5.38600000e-01],
[ 1.05600000e-01, 2.69000000e-01, 5.42700000e-01],
[ 9.69000000e-02, 2.76900000e-01, 5.34300000e-01],
[ 8.79000000e-02, 2.84700000e-01, 5.25100000e-01],
[ 8.32000000e-02, 2.90700000e-01, 5.21800000e-01],
[ 7.93000000e-02, 2.97200000e-01, 5.15300000e-01],
[ 6.86000000e-02, 3.04400000e-01, 5.06800000e-01],
[ 6.39000000e-02, 3.10600000e-01, 5.00600000e-01],
[ 5.86000000e-02, 3.16600000e-01, 4.95500000e-01],
[ 5.36000000e-02, 3.22700000e-01, 4.88800000e-01],
[ 3.88000000e-02, 3.29200000e-01, 4.81700000e-01],
[ 4.09000000e-02, 3.34300000e-01, 4.78600000e-01],
[ 3.45000000e-02, 3.40100000e-01, 4.72200000e-01],
[ 3.00000000e-02, 3.45800000e-01, 4.66500000e-01],
[ 2.90000000e-02, 3.50600000e-01, 4.64700000e-01],
[ 2.26000000e-02, 3.56400000e-01, 4.57800000e-01],
[ 1.54000000e-02, 3.61900000e-01, 4.52900000e-01],
[ 1.46000000e-02, 3.67000000e-01, 4.48700000e-01],
[ 1.69000000e-02, 3.71800000e-01, 4.46400000e-01],
[ 1.17000000e-02, 3.77300000e-01, 4.40000000e-01],
[ 5.50000000e-03, 3.82800000e-01, 4.33400000e-01],
[ 5.20000000e-03, 3.87300000e-01, 4.32700000e-01],
[ 8.00000000e-04, 3.92700000e-01, 4.26700000e-01],
[ 0.00000000e+00, 3.97700000e-01, 4.22000000e-01],
[ 3.00000000e-04, 4.02400000e-01, 4.20000000e-01],
[ 1.30000000e-03, 4.07200000e-01, 4.16600000e-01],
[ 0.00000000e+00, 4.13200000e-01, 4.10700000e-01],
[ 0.00000000e+00, 4.17900000e-01, 4.07100000e-01],
[ 0.00000000e+00, 4.24200000e-01, 3.97700000e-01],
[ 0.00000000e+00, 4.30200000e-01, 3.91900000e-01],
[ 0.00000000e+00, 4.34500000e-01, 3.89000000e-01],
[ 0.00000000e+00, 4.39500000e-01, 3.84900000e-01],
[ 0.00000000e+00, 4.45600000e-01, 3.77600000e-01],
[ 0.00000000e+00, 4.50800000e-01, 3.72800000e-01],
[ 0.00000000e+00, 4.56300000e-01, 3.66600000e-01],
[ 0.00000000e+00, 4.61300000e-01, 3.59700000e-01],
[ 0.00000000e+00, 4.67000000e-01, 3.54200000e-01],
[ 0.00000000e+00, 4.71600000e-01, 3.50400000e-01],
[ 0.00000000e+00, 4.76300000e-01, 3.46400000e-01],
[ 0.00000000e+00, 4.82000000e-01, 3.37500000e-01],
[ 0.00000000e+00, 4.87000000e-01, 3.33100000e-01],
[ 0.00000000e+00, 4.91800000e-01, 3.25600000e-01],
[ 0.00000000e+00, 4.96900000e-01, 3.19800000e-01],
[ 0.00000000e+00, 5.02400000e-01, 3.12600000e-01],
[ 0.00000000e+00, 5.06000000e-01, 3.10100000e-01],
[ 0.00000000e+00, 5.11800000e-01, 3.01200000e-01],
[ 0.00000000e+00, 5.16600000e-01, 2.93800000e-01],
[ 0.00000000e+00, 5.21400000e-01, 2.87100000e-01],
[ 0.00000000e+00, 5.26800000e-01, 2.81600000e-01],
[ 0.00000000e+00, 5.31500000e-01, 2.72600000e-01],
[ 0.00000000e+00, 5.35600000e-01, 2.67500000e-01],
[ 0.00000000e+00, 5.40700000e-01, 2.59700000e-01],
[ 0.00000000e+00, 5.46100000e-01, 2.55200000e-01],
[ 0.00000000e+00, 5.51900000e-01, 2.50600000e-01],
[ 0.00000000e+00, 5.56500000e-01, 2.46900000e-01],
[ 0.00000000e+00, 5.62400000e-01, 2.39600000e-01],
[ 0.00000000e+00, 5.67800000e-01, 2.36000000e-01],
[ 0.00000000e+00, 5.72700000e-01, 2.33800000e-01],
[ 0.00000000e+00, 5.77800000e-01, 2.28700000e-01],
[ 0.00000000e+00, 5.82900000e-01, 2.25000000e-01],
[ 0.00000000e+00, 5.88300000e-01, 2.18000000e-01],
[ 0.00000000e+00, 5.93100000e-01, 2.14600000e-01],
[ 0.00000000e+00, 5.99100000e-01, 2.08900000e-01],
[ 0.00000000e+00, 6.03600000e-01, 2.05600000e-01],
[ 0.00000000e+00, 6.08400000e-01, 1.99900000e-01],
[ 0.00000000e+00, 6.13800000e-01, 1.96100000e-01],
[ 0.00000000e+00, 6.18900000e-01, 1.89900000e-01],
[ 0.00000000e+00, 6.23900000e-01, 1.84800000e-01],
[ 0.00000000e+00, 6.29000000e-01, 1.75900000e-01],
[ 0.00000000e+00, 6.34500000e-01, 1.70700000e-01],
[ 0.00000000e+00, 6.38100000e-01, 1.63800000e-01],
[ 0.00000000e+00, 6.43800000e-01, 1.59200000e-01],
[ 0.00000000e+00, 6.48900000e-01, 1.51900000e-01],
[ 0.00000000e+00, 6.53600000e-01, 1.41000000e-01],
[ 0.00000000e+00, 6.59000000e-01, 1.32200000e-01],
[ 0.00000000e+00, 6.64500000e-01, 1.22200000e-01],
[ 0.00000000e+00, 6.65600000e-01, 9.09000000e-02],
[ 0.00000000e+00, 6.64400000e-01, 3.22000000e-02],
[ 3.51000000e-02, 6.66000000e-01, 0.00000000e+00],
[ 7.97000000e-02, 6.70300000e-01, 0.00000000e+00],
[ 1.12900000e-01, 6.73900000e-01, 0.00000000e+00],
[ 1.39200000e-01, 6.77600000e-01, 0.00000000e+00],
[ 1.56600000e-01, 6.81400000e-01, 0.00000000e+00],
[ 1.76500000e-01, 6.84700000e-01, 0.00000000e+00],
[ 1.89000000e-01, 6.89000000e-01, 0.00000000e+00],
[ 2.03000000e-01, 6.92800000e-01, 0.00000000e+00],
[ 2.16700000e-01, 6.96600000e-01, 0.00000000e+00],
[ 2.29900000e-01, 7.00300000e-01, 0.00000000e+00],
[ 2.39100000e-01, 7.04400000e-01, 0.00000000e+00],
[ 2.51700000e-01, 7.08100000e-01, 0.00000000e+00],
[ 2.57400000e-01, 7.12400000e-01, 0.00000000e+00],
[ 2.67900000e-01, 7.16200000e-01, 0.00000000e+00],
[ 2.79000000e-01, 7.20000000e-01, 0.00000000e+00],
[ 2.87800000e-01, 7.24000000e-01, 0.00000000e+00],
[ 2.96500000e-01, 7.28000000e-01, 0.00000000e+00],
[ 3.05200000e-01, 7.31900000e-01, 0.00000000e+00],
[ 3.10100000e-01, 7.36200000e-01, 0.00000000e+00],
[ 3.18700000e-01, 7.40200000e-01, 0.00000000e+00],
[ 3.27200000e-01, 7.44100000e-01, 0.00000000e+00],
[ 3.34500000e-01, 7.48200000e-01, 0.00000000e+00],
[ 3.40600000e-01, 7.52300000e-01, 0.00000000e+00],
[ 3.60400000e-01, 7.54900000e-01, 0.00000000e+00],
[ 3.89800000e-01, 7.56300000e-01, 0.00000000e+00],
[ 4.16900000e-01, 7.57400000e-01, 0.00000000e+00],
[ 4.46100000e-01, 7.58000000e-01, 0.00000000e+00],
[ 4.68100000e-01, 7.59400000e-01, 0.00000000e+00],
[ 4.90000000e-01, 7.61200000e-01, 0.00000000e+00],
[ 5.08900000e-01, 7.62700000e-01, 0.00000000e+00],
[ 5.30400000e-01, 7.63700000e-01, 0.00000000e+00],
[ 5.50000000e-01, 7.64900000e-01, 0.00000000e+00],
[ 5.69800000e-01, 7.66000000e-01, 0.00000000e+00],
[ 5.82500000e-01, 7.68800000e-01, 0.00000000e+00],
[ 5.99900000e-01, 7.70100000e-01, 0.00000000e+00],
[ 6.17300000e-01, 7.71300000e-01, 0.00000000e+00],
[ 6.31400000e-01, 7.73000000e-01, 0.00000000e+00],
[ 6.48700000e-01, 7.74100000e-01, 0.00000000e+00],
[ 6.63200000e-01, 7.76300000e-01, 0.00000000e+00],
[ 6.75700000e-01, 7.78200000e-01, 0.00000000e+00],
[ 6.91200000e-01, 7.79500000e-01, 0.00000000e+00],
[ 7.06100000e-01, 7.80800000e-01, 0.00000000e+00],
[ 7.22200000e-01, 7.81800000e-01, 0.00000000e+00],
[ 7.30500000e-01, 7.85200000e-01, 0.00000000e+00],
[ 7.44200000e-01, 7.86600000e-01, 0.00000000e+00],
[ 7.58000000e-01, 7.88000000e-01, 0.00000000e+00],
[ 7.70900000e-01, 7.89600000e-01, 0.00000000e+00],
[ 7.83300000e-01, 7.91500000e-01, 0.00000000e+00],
[ 7.87200000e-01, 7.89100000e-01, 9.51000000e-02],
[ 7.97200000e-01, 7.90300000e-01, 1.98800000e-01],
[ 8.07200000e-01, 7.91700000e-01, 2.56000000e-01],
[ 8.11600000e-01, 7.94900000e-01, 3.00100000e-01],
[ 8.21100000e-01, 7.96400000e-01, 3.39700000e-01],
[ 8.30800000e-01, 7.98000000e-01, 3.71000000e-01],
[ 8.35000000e-01, 8.01100000e-01, 4.02800000e-01],
[ 8.45000000e-01, 8.02600000e-01, 4.29200000e-01],
[ 8.54800000e-01, 8.04100000e-01, 4.55500000e-01],
[ 8.60200000e-01, 8.07300000e-01, 4.73500000e-01],
[ 8.65800000e-01, 8.10000000e-01, 4.99300000e-01],
[ 8.75800000e-01, 8.11600000e-01, 5.18400000e-01],
[ 8.85600000e-01, 8.13000000e-01, 5.40200000e-01],
[ 8.89500000e-01, 8.16400000e-01, 5.60200000e-01],
[ 8.99400000e-01, 8.18000000e-01, 5.77500000e-01],
[ 9.07700000e-01, 8.20200000e-01, 5.91800000e-01],
[ 9.10600000e-01, 8.24100000e-01, 6.09400000e-01],
[ 9.20500000e-01, 8.25700000e-01, 6.25300000e-01],
[ 9.28400000e-01, 8.27800000e-01, 6.42000000e-01],
[ 9.34300000e-01, 8.30700000e-01, 6.57600000e-01],
[ 9.41500000e-01, 8.32900000e-01, 6.76200000e-01],
[ 9.51200000e-01, 8.34800000e-01, 6.86800000e-01],
[ 9.54900000e-01, 8.38400000e-01, 7.02600000e-01],
[ 9.62200000e-01, 8.40800000e-01, 7.17000000e-01],
[ 9.71200000e-01, 8.42900000e-01, 7.28700000e-01],
[ 9.70800000e-01, 8.48200000e-01, 7.40900000e-01],
[ 9.71300000e-01, 8.53000000e-01, 7.55500000e-01],
[ 9.69100000e-01, 8.59100000e-01, 7.65500000e-01],
[ 9.69900000e-01, 8.64200000e-01, 7.74600000e-01],
[ 9.70300000e-01, 8.69100000e-01, 7.87100000e-01],
[ 9.71000000e-01, 8.74000000e-01, 7.99900000e-01],
[ 9.69500000e-01, 8.80000000e-01, 8.06700000e-01],
[ 9.69600000e-01, 8.85100000e-01, 8.18800000e-01],
[ 9.68600000e-01, 8.90800000e-01, 8.27800000e-01],
[ 9.68100000e-01, 8.96200000e-01, 8.37800000e-01],
[ 9.68800000e-01, 9.01300000e-01, 8.46700000e-01],
[ 9.69600000e-01, 9.06400000e-01, 8.55700000e-01],
[ 9.70300000e-01, 9.11500000e-01, 8.64700000e-01],
[ 9.70800000e-01, 9.16300000e-01, 8.77300000e-01],
[ 9.69100000e-01, 9.22400000e-01, 8.83800000e-01],
[ 9.69200000e-01, 9.27300000e-01, 8.96100000e-01],
[ 9.69900000e-01, 9.32300000e-01, 9.05100000e-01],
[ 9.69300000e-01, 9.38100000e-01, 9.10800000e-01],
[ 9.71400000e-01, 9.42500000e-01, 9.23000000e-01],
[ 9.71200000e-01, 9.47800000e-01, 9.31100000e-01],
[ 9.70000000e-01, 9.53700000e-01, 9.38100000e-01],
[ 9.70700000e-01, 9.58700000e-01, 9.47000000e-01],
[ 9.71300000e-01, 9.63800000e-01, 9.56000000e-01],
[ 9.72600000e-01, 9.68700000e-01, 9.64800000e-01],
[ 9.73800000e-01, 9.73800000e-01, 9.71100000e-01],
[ 9.78000000e-01, 9.78000000e-01, 9.78000000e-01],
[ 9.82400000e-01, 9.82400000e-01, 9.82400000e-01],
[ 9.86800000e-01, 9.86800000e-01, 9.86800000e-01],
[ 9.91200000e-01, 9.91200000e-01, 9.91200000e-01],
[ 9.95600000e-01, 9.95600000e-01, 9.95600000e-01],
[ 1.00000000e+00, 1.00000000e+00, 1.00000000e+00]])
b3 = LinL[:,2] # value of blue at sample n
b2 = LinL[:,2] # value of blue at sample n
b1 = np.linspace(0, 1, len(b2)) # position of sample n - ranges from 0 to 1
# Setting up columns for tuples
g3 = LinL[:,1]
g2 = LinL[:,1]
g1 = np.linspace(0,1,len(g2))
r3 = LinL[:,0]
r2 = LinL[:,0]
r1 = np.linspace(0,1,len(r2))
# Creating tuples
R = zip(r1,r2,r3)
G = zip(g1,g2,g3)
B = zip(b1,b2,b3)
# Transposing
RGB = zip(R,G,B)
rgb = zip(*RGB)
# Creating dictionary
k = ['red', 'green', 'blue']
specs['matteoniccoli'] = dict(zip(k,rgb))
for name in specs:
mplcm.register_cmap(name=name, data=specs[name])
mplcm.register_cmap(name=name+"_r", data=mplcm._reverse_cmap_spec(specs[name]))
self._color_maps[name] = self.get_color_map(name)
self._color_maps[name+"_r"] = self.get_color_map(name+"_r")
def _register_color_lists(self, length=default_lst):
cc = mpl.colors.ColorConverter()
self._color_lists['matplotlib'] = ['b', 'g', 'r', 'c', 'm', 'y', 'k']
self._color_lists['simple'] = ['#FF0000', '#FFD300', '#3914AF', '#00CC00']
self._color_lists['brewer'] = ['#e66101', '#fdb863', '#b2abd2', '#5e3c99']
for i in self._color_lists:
self._color_lists[i] = [cc.to_rgb(item) for item in self._color_lists[i]]
self._color_lists['cblind'] = [(0.9,0.6,0.0), (0.35,0.70,0.90), (0.00,0.60,0.50), (0.00,0.45,0.70), (0.80,0.40,0.00), (0.80,0.60,0.70)]
self._color_lists['brewer1'] = brewer2mpl.get_map('Set1', 'qualitative', length).mpl_colors
self._color_lists['brewer2'] = brewer2mpl.get_map('Set2', 'qualitative', length).mpl_colors
def color_cycle(self, name='map', cmap=None, length=None):
'''Returns the current colour cycle, creates a new one if necessary.
Parameters
----------
name : str
selector for colour cycle:
'cmap': use a colourmap to generate cycle, see http://matplotlib.org/1.2.1/examples/pylab_examples/show_colormaps.html
'DTU': uses the colours from the DTU design guide
'DTU_dark': uses the colours from the DTU design guide, darker colours first, good for presentations
'cblind': A scheme for colourblind people
'matplotlib': The standard matplotlib scheme
'simple': A simple four-colour scheme that work for greyscale and colourblind people
'brewer1' and 'brewer2': See http://colorbrewer2.org, works for colourblind and greyscale
'''
if name=='map':
if length is None: length = self.default_lst
if cmap is None: cmap = self.default_map
cmap = self.get_color_map(cmap)
clist = [cmap(i) for i in np.linspace(0.25, 0.75, length)]
else:
clist = self._color_lists[name]
if length is not None:
if length<1:
return cycle(['none'])
elif length<=len(clist):
return cycle(list(clist)[0:length])
elif length>len(clist):
self.autolog("Colour cycle is too short, cannot extend it.")
return cycle(clist)
def _show_info(self,show=True):
self.update_rc_params()
lsts = self._color_lists.keys()
l = len(lsts)
import matplotlib.pyplot as plt
line_fig = plt.figure()
xdata = np.linspace(0,6)
for i, m in enumerate(lsts):
plt.subplot(1,l,i+1)
plt.axis("off")
for j in self._color_lists[m]:
plt.plot(xdata,np.random.normal(size=len(xdata)),lw=1.5,color=j)
plt.title(m,rotation=45)
plt.tight_layout()
xdata=np.outer(np.arange(0,1,0.01),np.ones(10))
maps = [m for m in self._color_maps.keys() if not m.endswith("_r")]
l=len(maps)
map_fig = plt.figure()
for i, m in enumerate(maps):
plt.subplot(1,l,i+1)
plt.axis("off")
plt.imshow(xdata,aspect='auto',cmap=plt.get_cmap(m),origin="lower")
plt.title(m,rotation=45)
plt.tight_layout()
if show: plt.show()
#else: plt.close('all')
return line_fig,map_fig
class DtuStyle(BaseStyle):
def __init__(self):
BaseStyle.__init__(self)
def update_rc_params(self):
BaseStyle.update_rc_params(self)
self._add_to_rc_font_list('Myriad Pro, Arial, Verdana','font.sans-serif')
self._add_to_rc_font_list('Minion Pro, Times New Roman, Times','font.serif')
#mpl.rcParams['font.monospace'] = 'CMU Typewriter Text'
#mpl.rcParams['font.family'] = 'sans-serif'
#mpl.rcParams['mathtext.it'] = 'Myriad Pro:italic'
#mpl.rcParams['mathtext.bf'] = 'Myriad Pro:bold'
mpl.rcParams['font.family'] = 'serif'
#mpl.rcParams['mathtext.it'] = 'Minion Pro:italic'
#mpl.rcParams['mathtext.bf'] = 'Minion Pro:bold'
mpl.rcParams['mathtext.fontset'] = "custom" # Should be 'cm' (Computer Modern), 'stix', 'stixsans'
mpl.rcParams['mathtext.cal'] = 'serif:italic'
mpl.rcParams['mathtext.rm'] = 'serif'
mpl.rcParams['mathtext.tt'] = 'monospace'
mpl.rcParams['mathtext.it'] = 'serif:italic'
mpl.rcParams['mathtext.bf'] = 'serif:bold'
mpl.rcParams['mathtext.sf'] = 'sans'
def _mplpreamble(self):
preamble = BaseStyle._mplpreamble(self)
preamble.append(r'\usepackage{MnSymbol}')
preamble.append(r'\usepackage[lf]{MyriadPro} % Sans font')
preamble.append(r'\usepackage[lf]{MinionPro} % Serif font')
return preamble
def _register_color_maps(self):
BaseStyle._register_color_maps(self)
cc = mpl.colors.ColorConverter()
redwhite = [cc.to_rgba('#FF0000',alpha=1),cc.to_rgba('#FFFFFF',alpha=1)]
greenwhite = [cc.to_rgba('#99CC33',alpha=1),cc.to_rgba('#FFFFFF',alpha=1)]
purplewhite = [cc.to_rgba('#660099',alpha=1),cc.to_rgba('#FFFFFF',alpha=1)]
yellowwhite = [cc.to_rgba('#FFCC00',alpha=1),cc.to_rgba('#FFFFFF',alpha=1)]
# create map and register it together with reversed colours
maps = []
maps.append(LinearSegmentedColormap.from_list('DTU1', redwhite))
maps.append(LinearSegmentedColormap.from_list('DTU1_r', redwhite[::-1]))
maps.append(LinearSegmentedColormap.from_list('DTU2', greenwhite))
maps.append(LinearSegmentedColormap.from_list('DTU2_r', greenwhite[::-1]))
maps.append(LinearSegmentedColormap.from_list('DTU3', purplewhite))
maps.append(LinearSegmentedColormap.from_list('DTU3_r', purplewhite[::-1]))
maps.append(LinearSegmentedColormap.from_list('DTU4', yellowwhite))
maps.append(LinearSegmentedColormap.from_list('DTU4_r', yellowwhite[::-1]))
for cmap in maps:
mplcm.register_cmap(cmap=cmap)
self._color_maps[cmap.name] = cmap
def _register_color_lists(self, length=BaseStyle.default_lst):
BaseStyle._register_color_lists(self)
cc = mpl.colors.ColorConverter()
self._color_lists['DTU'] = [ cc.to_rgb(item) for item in ['#FF0000', '#99CC33', '#660099', '#FFCC00', '#999999', '#000000', '#33CCFF', '#3366CC', '#FF9900', '#CC3399', '#66CC00'] ]
self._color_lists['DTU_dark'] = [ cc.to_rgb(item) for item in ['#FF0000', '#660099', '#99CC33', '#3366CC', '#999999', '#FFCC00', '#000000', '#33CCFF', '#FF9900', '#CC3399', '#66CC00'] ]
class IpuStyle(BaseStyle):
default_map = "IPU"
def __init__(self):
BaseStyle.__init__(self)
def update_rc_params(self):
BaseStyle.update_rc_params(self)
self._add_to_rc_font_list('Helvetica, Arial, cmbright, Verdana','font.sans-serif')
self._add_to_rc_font_list('Times New Roman, Times','font.serif')
self._add_to_rc_font_list('sourcecodepro','font.monospace')
mpl.rcParams['font.family'] = 'sans-serif'
#mpl.rcParams['font.monospace'] = 'CMU Typewriter Text'
mpl.rcParams['mathtext.fontset'] = "custom" # Should be 'cm' (Computer Modern), 'stix', 'stixsans'
mpl.rcParams['mathtext.cal'] = 'sans:italic'
mpl.rcParams['mathtext.rm'] = 'sans'
mpl.rcParams['mathtext.tt'] = 'monospace'
mpl.rcParams['mathtext.it'] = 'sans:italic'
mpl.rcParams['mathtext.bf'] = 'sans:bold'
mpl.rcParams['mathtext.sf'] = 'sans'
def _mplpreamble(self):
preamble = BaseStyle._mplpreamble(self)
preamble.append(r'\usepackage{cmbright}')
preamble.append(r'\usepackage{helvet}')
#preamble.append(r'\usepackage{sansmath}')
#preamble.append(r'\sansmath')
#preamble.append(r'\renewcommand*{\familydefault}{\sfdefault}')
return preamble
def _register_color_maps(self):
BaseStyle._register_color_maps(self)
rgb = [
( 0./255. , 0./255. , 0./255.),
( 0./255. , 102./255. , 51./255.),
(114./255. , 121./255. , 126./255.),
( 91./255. , 172./255. , 38./255.),
(217./255. , 220./255. , 222./255.),
(255./255. , 255./255. , 255./255.)
]
# create map and register it together with reversed colours
maps = []
maps.append(LinearSegmentedColormap.from_list('IPU' , rgb))
maps.append(LinearSegmentedColormap.from_list('IPU_r', rgb[::-1]))
for cmap in maps:
mplcm.register_cmap(cmap=cmap)
self._color_maps[cmap.name] = cmap
def _register_color_lists(self, length=BaseStyle.default_lst):
BaseStyle._register_color_lists(self)
self._color_lists['IPU'] = [
( 0./255. , 0./255. , 0./255.),
( 0./255. , 102./255. , 51./255.),
(114./255. , 121./255. , 126./255.),
( 91./255. , 172./255. , 38./255.),
(217./255. , 220./255. , 222./255.),
#(255./255. , 255./255. , 255./255.)
]
def color_cycle(self, name='IPU', cmap=None, length=None):
return BaseStyle.color_cycle(self, name, map, length)
class VdgStyle(BaseStyle):
default_map = "VDG"
def __init__(self):
BaseStyle.__init__(self)
mpl.rcParams['lines.linewidth'] = 1.25 # line width in points
def update_rc_params(self):
BaseStyle.update_rc_params(self)
self._add_to_rc_font_list('Helvetica, Arial, cmbright, Verdana','font.sans-serif')
self._add_to_rc_font_list('Times New Roman, Times','font.serif')
self._add_to_rc_font_list('sourcecodepro','font.monospace')
mpl.rcParams['font.family'] = 'sans-serif'
mpl.rcParams['mathtext.fontset'] = "custom" # Should be 'cm' (Computer Modern), 'stix', 'stixsans'
mpl.rcParams['mathtext.cal'] = 'sans:italic'
mpl.rcParams['mathtext.rm'] = 'sans'
mpl.rcParams['mathtext.tt'] = 'monospace'
mpl.rcParams['mathtext.it'] = 'sans:italic'
mpl.rcParams['mathtext.bf'] = 'sans:bold'
mpl.rcParams['mathtext.sf'] = 'sans'
def _mplpreamble(self):
preamble = BaseStyle._mplpreamble(self)
preamble.append(r'\usepackage{cmbright}')
preamble.append(r'\usepackage{helvet}')
#preamble.append(r'\usepackage{sansmath}')
#preamble.append(r'\sansmath')
#preamble.append(r'\renewcommand*{\familydefault}{\sfdefault}')
return preamble
def _register_color_maps(self):
BaseStyle._register_color_maps(self)
rgb = [
( 25./255. , 13./255. , 25./255.),
( 0./255. , 51./255. , 102./255.),
#(205./255. , 51./255. , 51./255.),
( 0./255. , 109./255. , 148./255.),
(127./255. , 186./255. , 50./255.),
(255./255. , 255./255. , 255./255.)]
# create map and register it together with reversed colours
maps = []
maps.append(LinearSegmentedColormap.from_list('VDG' , rgb))
maps.append(LinearSegmentedColormap.from_list('VDG_r', rgb[::-1]))
for cmap in maps:
mplcm.register_cmap(cmap=cmap)
self._color_maps[cmap.name] = cmap
def _register_color_lists(self, length=BaseStyle.default_lst):
BaseStyle._register_color_lists(self)
self._color_lists['VDG'] = [
( 0./255. , 51./255. , 102./255.),
(205./255. , 51./255. , 51./255.),
( 0./255. , 109./255. , 148./255.),
(127./255. , 186./255. , 50./255.),
#(255./255. , 255./255. , 255./255.)
]
def color_cycle(self, name='VDG', cmap=None, length=None):
return BaseStyle.color_cycle(self, name, map, length)
|
{"/jopy/styles/__init__.py": ["/jopy/styles/plots.py", "/jopy/utils.py", "/jopy/styles/mplib.py"], "/jopy/recip/__init__.py": ["/jopy/recip/mechanisms.py"], "/jopy/styles/mplib.py": ["/jopy/base.py"], "/test/test_thermo/__init__.py": ["/jopy/thermo/__init__.py"], "/jopy/thermo/__init__.py": ["/jopy/utils.py", "/jopy/thermo/utils.py"], "/test/test_jopy.py": ["/jopy/recip/mechanisms.py", "/jopy/styles/mplib.py", "/jopy/utils.py"], "/jopy/recip/mechanisms.py": ["/jopy/base.py"]}
|
6,321
|
jowr/jopy
|
refs/heads/master
|
/jopy/data/sources.py
|
# -*- coding: utf-8 -*-
from __future__ import print_function, division
import os
from blaze.interactive import Data
def get_sqlite_handle(path):
"""Gets a blaze object for an sqlite database
Does not simplify things, but helps me remember to use blaze more
Parameters
----------
path : str
The actual path to the sqlite file
Returns
-------
blaze.interactive.Data
The database handle or None
"""
if os.path.isfile(os.path.abspath(path)):
tt = os.path.abspath(path)
#tt = pathname2url(tt)
sql = Data('sqlite:///'+tt) # A SQLite database
return sql
else:
print("Could not find "+path+".")
return None
|
{"/jopy/styles/__init__.py": ["/jopy/styles/plots.py", "/jopy/utils.py", "/jopy/styles/mplib.py"], "/jopy/recip/__init__.py": ["/jopy/recip/mechanisms.py"], "/jopy/styles/mplib.py": ["/jopy/base.py"], "/test/test_thermo/__init__.py": ["/jopy/thermo/__init__.py"], "/jopy/thermo/__init__.py": ["/jopy/utils.py", "/jopy/thermo/utils.py"], "/test/test_jopy.py": ["/jopy/recip/mechanisms.py", "/jopy/styles/mplib.py", "/jopy/utils.py"], "/jopy/recip/mechanisms.py": ["/jopy/base.py"]}
|
6,322
|
jowr/jopy
|
refs/heads/master
|
/jopy/styles/plots.py
|
# -*- coding: utf-8 -*-
from __future__ import print_function, division
import matplotlib
import matplotlib.pyplot as plt
import copy
class Figure(matplotlib.figure.Figure):
def _get_axis(self,**kwargs):
ax = kwargs.pop('ax', self._get_axes()[0])
return ax
def _get_axes(self,**kwargs):
ax = kwargs.pop('ax', [])
ax = kwargs.pop('axs', ax)
if ax is None or len(ax)<1:
try: ax = super(Figure, self)._get_axes()
except: ax = [plt.gca()]; pass
return ax
def get_legend_handles_labels_axis(self,ax=None,axs=None):
"""Extracts the handles and labels from an axis or from a list of axes.
Useful for manual legend processing and customisation.
"""
ax = self._get_axes(ax=ax,axs=axs)
handles = []; labels = []
for a in ax:
handlestmp, labelstmp = a.get_legend_handles_labels()
handles.extend(handlestmp)
labels.extend(labelstmp)
return handles, labels, ax[0]
def draw_legend(self, **kwargs):
"""Puts a legend on the provided axis.
Can be used with kwargs like ncol=2 and alike, which are passed
on to the corrresponding pyplot routines.
"""
tc = kwargs.pop('textcolour', matplotlib.rcParams["text.color"])
tc = kwargs.pop('textcolor', tc)
#kwargs.setdefault('loc', 0)
#kwargs.setdefault('frameon', True)
h, l, a = self.get_legend_handles_labels_axis(ax=kwargs.pop('ax', None),axs=kwargs.pop('axs', None))
#handles = copy.copy(kwargs.pop('handles', handles))
handles = []
for h in kwargs.pop('handles', h):
handles.append(copy.copy(h))
handles[-1].set_alpha(1.0)
labels = []
for l in kwargs.pop('labels', l):
labels.append(copy.copy(l))
legend = a.legend(handles,labels,**kwargs)
try:
rect = legend.get_frame()
rect.set_facecolor(matplotlib.rcParams["grid.color"])
rect.set_linewidth(0)
rect.set_edgecolor(tc)
# Change the alpha value, make sure it is visible
def set_alpha(objList):
for o in objList:
try: o.set_alpha(1.0)
except: matplotlib.artist.setp(o, alpha=1.0); pass
#mpl.artist.setp(o, markersize=6)
#mpl.artist.setp(o, alpha=np.max([1.0,o.get_alpha()]))
# h.set_alpha(np.max([1.0,h.get_alpha()]))
# #mpl.artist.setp(h, alpha=np.max([1.0,h.get_alpha()]))
# mpl.artist.setp(h, markersize=6)
set_alpha(legend.legendHandles)
set_alpha(legend.get_lines())
set_alpha(legend.get_patches())
#
#for h in legend.legendHandles:
# h.set_alpha(np.max([1.0,h.get_alpha()]))
# #mpl.artist.setp(h, alpha=np.max([1.0,h.get_alpha()]))
# mpl.artist.setp(h, markersize=6)
# Change the legend label colors to almost black, too
for t in legend.texts:
t.set_color(tc)
except AttributeError:
# There are no labled objects
pass
return legend
def to_file(self, name, **kwargs):
dic = dict(bbox_inches='tight')
dic.update(**kwargs)
self.savefig(name, **dic)
def to_raster(self, name, **kwargs):
dic = dict(dpi=300)
dic.update(**kwargs)
if name.endswith(".png") or name.endswith(".jpg"):
self.to_file(name, **dic)
else:
raise ValueError("You can only save jpg and png images as raster images.")
def to_power_point(self, name, **kwargs):
dic = dict(dpi=600, transparent=True)
dic.update(**kwargs)
if name.endswith(".png"):
self.to_raster(name, **dic)
else:
raise ValueError("You should use png images with MS PowerPoint.")
|
{"/jopy/styles/__init__.py": ["/jopy/styles/plots.py", "/jopy/utils.py", "/jopy/styles/mplib.py"], "/jopy/recip/__init__.py": ["/jopy/recip/mechanisms.py"], "/jopy/styles/mplib.py": ["/jopy/base.py"], "/test/test_thermo/__init__.py": ["/jopy/thermo/__init__.py"], "/jopy/thermo/__init__.py": ["/jopy/utils.py", "/jopy/thermo/utils.py"], "/test/test_jopy.py": ["/jopy/recip/mechanisms.py", "/jopy/styles/mplib.py", "/jopy/utils.py"], "/jopy/recip/mechanisms.py": ["/jopy/base.py"]}
|
6,323
|
jowr/jopy
|
refs/heads/master
|
/test/test_thermo/__init__.py
|
# -*- coding: utf-8 -*-
from __future__ import print_function, division
import jopy.thermo
class TestUtils(object):
@classmethod
def setup_class(cls):
pass
def test_lmtd(self):
res = jopy.thermo.lmtd(0.0, 0.0)
assert res == 0.0
res = jopy.thermo.lmtd(10.0, 10.0)
print(res,10.0)
@classmethod
def teardown_class(cls):
pass
|
{"/jopy/styles/__init__.py": ["/jopy/styles/plots.py", "/jopy/utils.py", "/jopy/styles/mplib.py"], "/jopy/recip/__init__.py": ["/jopy/recip/mechanisms.py"], "/jopy/styles/mplib.py": ["/jopy/base.py"], "/test/test_thermo/__init__.py": ["/jopy/thermo/__init__.py"], "/jopy/thermo/__init__.py": ["/jopy/utils.py", "/jopy/thermo/utils.py"], "/test/test_jopy.py": ["/jopy/recip/mechanisms.py", "/jopy/styles/mplib.py", "/jopy/utils.py"], "/jopy/recip/mechanisms.py": ["/jopy/base.py"]}
|
6,324
|
jowr/jopy
|
refs/heads/master
|
/jopy/thermo/utils.py
|
# -*- coding: utf-8 -*-
from __future__ import print_function, division
import numpy as np
import warnings
class UnitError(ValueError):
pass
def check_T(T):
"""Check for valid temperature
Parameters
----------
T : float or numpy.array in Kelvin [K]
Returns
-------
bool
True if valid, else False
"""
if np.any(T<0.0): raise UnitError("negative temperature: "+str(T))
if np.any(T>1e4): raise UnitError("too high temperature: "+str(T))
if np.any(T<250.0): warnings.warn("very low temperature: "+str(T))
if np.any(T>1.0e3): warnings.warn("very high temperature: "+str(T))
return True
def check_p(p):
"""Check for valid pressure
Parameters
----------
p : float or numpy.array in Pascal [Pa]
Returns
-------
bool
True if valid, else False
"""
if np.any(p<0.0): raise UnitError("negative pressure: "+str(p))
if np.any(p>1e8): raise UnitError("too high pressure: "+str(p))
if np.any(p<100.0): warnings.warn("very low pressure: "+str(p))
if np.any(p>1.0e7): warnings.warn("very high pressure: "+str(p))
return True
|
{"/jopy/styles/__init__.py": ["/jopy/styles/plots.py", "/jopy/utils.py", "/jopy/styles/mplib.py"], "/jopy/recip/__init__.py": ["/jopy/recip/mechanisms.py"], "/jopy/styles/mplib.py": ["/jopy/base.py"], "/test/test_thermo/__init__.py": ["/jopy/thermo/__init__.py"], "/jopy/thermo/__init__.py": ["/jopy/utils.py", "/jopy/thermo/utils.py"], "/test/test_jopy.py": ["/jopy/recip/mechanisms.py", "/jopy/styles/mplib.py", "/jopy/utils.py"], "/jopy/recip/mechanisms.py": ["/jopy/base.py"]}
|
6,325
|
jowr/jopy
|
refs/heads/master
|
/jopy/utils.py
|
# -*- coding: utf-8 -*-
from __future__ import print_function, division
import numpy as np
from numpy import pi,e
def module_class_dict(mod):
"""Get all classes from a module in a dict with the names
Parameters
----------
mod : Python module
The module to extract the classes from.
"""
mod_name = str(mod).split("'")[1]
ret = {}
for name, cls in mod.__dict__.items():
if isinstance(cls, type):
if mod_name in str(cls):
ret[name] = cls
return ret
# def module_class_dict(mod):
# """
# Returns a list of names of the abstract base
# classes that should not be instantiated. Can
# be used to build an ignore list.
# """
# mod_name = str(mod).split("'")[1]
# print(mod_name)
# ignList = {}
# for i in inspect.getmembers(mod):
# if inspect.isclass(i[1]):
# ignList[i[0]] = i[1]
# return ignList
#dict([(name, cls) for name, cls in mod.__dict__.items() if isinstance(cls, type)])
def transition_factor(start=0.25, stop=0.75, position=0.5, order=2):
"""Weighting factor for smooth transition (from 0 to 1)
This function returns a value between 0 and 1. A smooth transition
is achieved by means of defining the position and the transition
interval from start to stop parameter. Outside this interval,
the 0 and the 1 are returned, respectively. This transition function
with up to two smooth derivatives was taken from [1]. If you provide
an order higher than 2, the generalised logistic function [2] will be
used to calculated the transition curve.
Parameters
----------
start : float
start of transition interval; default 0.25
stop : float
end of transition interval; default 0.75
position : float
current position; default 0.5
order : integer
Smooth up to which derivative?; default 2
Returns
-------
float
smooth transition between 0 and 1 from start to stop [-]
Use tFactor in an equation like this:
tFactor = transition_factor(start=start,stop=stop,position=position);
smoothed = tFactor*1stValue + (1 - tFactor)*2ndValue;
References
----------
[1] Christoph C Richter, Proposal of New Object-Oriented Equation-Based Model
Libraries for Thermodynamic Systems, PhD thesis, Technical University
Carolo-Wilhelmina Braunschweig, 2008
[2] Generalised logistic function on http://en.wikipedia.org/wiki/Generalised_logistic_function
"""
a_map = [-1./2., -2./pi, -3./4., -8./pi] #First parameters
b_map = [ 1./2., 1./2., 1./2., 1./2.] #Second parameters
#Rename variables to match with Richter2008, p.68ff
phi = 0.0 #"current phase";
a = 0.0 # "multiplier";
b = 0.0 # "addition";
x_t = 0.0 # "Start of transition";
x = 0.0 # "Current position";
DELTAx = 0.0 # "Length of transition";
#Parameters for generalised logistic function
A = 0. #"Lower asymptote";
K = 1. #"Upper asymptote";
B = 8. #"Growth rate";
nu= 1. #"Symmetry changes";
Q = nu #"Zero correction";
M = nu #"Maximum growth for Q = nu";
X = 0.
END = 0.
START = 0.
factor = 0.
order = int(order)
if order < 0:
raise ValueError("This function only supports positive values for the order of smooth derivatives.")
swapper = None
if start>stop:
swapper = start
start = stop
stop = swapper
#raise Exception("There is only support for positive differences, please provide start < stop.")
position = np.array(position)
res = np.zeros_like(position)
res[position < start] = 1.0
res[position > stop ] = 0.0
theMask = (position >= start) & (position <= stop)
position = position[theMask]
#0th to 2nd order
if order <= 2:
a = a_map[order];
b = b_map[order];
x = position;
DELTAx = stop - start;
x_t = start + 0.5*DELTAx;
phi = (x - x_t) / DELTAx * pi;
else: #higher order
#We need to do some arbitrary scaling:
END = 4.0
START = -2.0
factor= (END-START) / (stop-start)
X = START + (position - start) * factor
resTMP = np.zeros_like(position)
if (order == 0):
for i in range(len(position)):
resTMP[i] = a * np.sin(phi[i]) + b
elif (order == 1):
for i in range(len(position)):
resTMP[i] = a * ( 1./2. * np.cos(phi[i]) * np.sin(phi[i]) + 1./2.*phi[i]) + b
elif (order == 2):
for i in range(len(position)):
resTMP[i] = a * ( 1./3. * np.cos(phi[i])**2. * np.sin(phi[i]) + 2./3. * np.sin(phi[i])) + b
else:
for i in range(len(position)):
resTMP[i] = 1. - (A + (K-A) / np.power( 1. + Q * np.power(e,(-B*(X[i] - M))),1./nu))
res[theMask] = resTMP
if swapper is None: return 1-res
else: return res
def transition_factor_alt(center = 0.5, length = 1.0, position = 0.0, order = 2):
"""Please see :py:func:`.transition_factor` for documentation
"""
return transition_factor(start=center-0.5*length, stop=center+0.5*length, position=position, order=order)
|
{"/jopy/styles/__init__.py": ["/jopy/styles/plots.py", "/jopy/utils.py", "/jopy/styles/mplib.py"], "/jopy/recip/__init__.py": ["/jopy/recip/mechanisms.py"], "/jopy/styles/mplib.py": ["/jopy/base.py"], "/test/test_thermo/__init__.py": ["/jopy/thermo/__init__.py"], "/jopy/thermo/__init__.py": ["/jopy/utils.py", "/jopy/thermo/utils.py"], "/test/test_jopy.py": ["/jopy/recip/mechanisms.py", "/jopy/styles/mplib.py", "/jopy/utils.py"], "/jopy/recip/mechanisms.py": ["/jopy/base.py"]}
|
6,326
|
jowr/jopy
|
refs/heads/master
|
/jopy/thermo/__init__.py
|
# -*- coding: utf-8 -*-
from __future__ import print_function, division
import matplotlib.pyplot as plt
import numpy as np
from ..utils import transition_factor
from .utils import check_T
def eta_carnot(T_cold,T_hot):
check_T(T_cold)
check_T(T_hot)
return 1. - T_cold / T_hot
def __lmtd(Delta_T1, Delta_T2):
return (Delta_T1 - Delta_T2) / np.log(Delta_T1 / Delta_T2)
def _lmtd_libpf(Delta_T1, Delta_T2, deadBand=0.1):
"""Logarithmic mean temperature difference
This function returns the logarithmic mean temperature
difference as calculated from temperature differences at
both end. It is a relatively robust solution that also
is used in libpf for process simulations.
Parameters
----------
Delta_T1 : scalar or array_like
Temperature difference on one side of the heat exchanger
Delta_T2 : scalar or array_like
Temperature difference on the other side of the heat exchanger
deadBand : scalar or array_like
Absolute temperature difference that is considered to be 0.0 K
Returns
-------
scalar or array_like
The calculated logarithmic mean temperature difference
"""
same = np.abs(Delta_T1-Delta_T2) <= 0.1
if (np.abs(Delta_T1) <= deadBand) or same: # small first value
if (np.abs(Delta_T2) <= deadBand) or same: # small second value
LMTD = (Delta_T1 + Delta_T2) / 2.0
else: # normal second value
Delta_T1clip = deadBand*Delta_T2 / np.abs(Delta_T2) #{absolute value is set to deadBand, sign is the same as Delta_T2}
LMTD = ((Delta_T1clip - Delta_T2) / (Delta_T1 - Delta_T2)) * __lmtd(Delta_T1clip,Delta_T2)
else:
if (np.abs(Delta_T2) <= deadBand):
Delta_T2clip = deadBand*Delta_T1/np.abs(Delta_T1)
LMTD = ((Delta_T1 - Delta_T2clip) / (Delta_T1 - Delta_T2)) * __lmtd(Delta_T1,Delta_T2clip)
else:
if ((Delta_T1 * Delta_T2) <= 0.0): # The deltas have different signs
if (np.abs(Delta_T1) <= np.abs(Delta_T2)): # smaller first value
Delta_T1clip = deadBand*Delta_T2/np.abs(Delta_T2)
LMTD = ((Delta_T1clip - Delta_T2) / (Delta_T1 - Delta_T2)) * __lmtd(Delta_T1clip,Delta_T2)
else: # smaller second value
Delta_T2clip = deadBand*Delta_T1/np.abs(Delta_T1)
LMTD = ((Delta_T1 - Delta_T2clip) / (Delta_T1 - Delta_T2)) * __lmtd(Delta_T1,Delta_T2clip)
else:
LMTD = __lmtd(Delta_T1,Delta_T2)
return LMTD
def _lmtd_jopy(Delta_T1, Delta_T2, dead_band=0.1):
"""Logarithmic mean temperature difference
This function returns the logarithmic mean temperature
difference as calculated from temperature differences at
both end.
Parameters
----------
Delta_T1 : scalar or array_like
Temperature difference on one side of the heat exchanger
Delta_T2 : scalar or array_like
Temperature difference on the other side of the heat exchanger
dead_band : scalar or array_like
Absolute temperature difference that is considered to be 0.0 K
Returns
-------
scalar or array_like
The calculated logarithmic mean temperature difference
"""
Delta_T1 = np.asarray(Delta_T1)
Delta_T2 = np.asarray(Delta_T2)
res = np.empty_like(Delta_T1)
mask = np.abs(Delta_T1) < np.abs(Delta_T2)
res[mask] = np.sign(Delta_T2[mask])
Delta_T1[mask] = res[mask] * np.maximum(res[mask] * Delta_T1[mask], dead_band)
mask = np.logical_not(mask)
res[mask] = np.sign(Delta_T1[mask])
Delta_T2[mask] = res[mask] * np.maximum(res[mask] * Delta_T2[mask], dead_band)
# Now the inputs are sanitised
mask = np.abs(Delta_T1-Delta_T2) <= dead_band
res[mask] = (Delta_T1[mask] + Delta_T2[mask]) / 2.0
#res[mask] = transition_factor(-dead_band, dead_band, Delta_T1-Delta_T2[mask])
mask = np.logical_not(mask)
res[mask] = __lmtd(Delta_T1[mask], Delta_T2[mask])
return res
def lmtd(Delta_T1, Delta_T2, dead_band=0.1):
"""Logarithmic mean temperature difference
This function returns the logarithmic mean temperature
difference as calculated from temperature differences at
both end.
Parameters
----------
Delta_T1 : scalar or array_like
Temperature difference on one side of the heat exchanger
Delta_T2 : scalar or array_like
Temperature difference on the other side of the heat exchanger
dead_band : scalar or array_like
Absolute temperature difference that is considered to be the minimum
Returns
-------
array_like
The calculated logarithmic mean temperature difference
Please see :py:func:`._lmtd_jopy` for more documentation.
"""
return _lmtd_jopy(Delta_T1, Delta_T2, dead_band)
#return np.vectorize(_lmtd_libpf)(Delta_T1, Delta_T2, dead_band)
if __name__ == "__main__":
var = np.linspace(-20, 20)
con = np.zeros_like(var)+10
plt.plot(var,lmtd(var,con),'o')
plt.show()
#print(np.vectorize(_lmtd_libpf)([0,1,5,10,10,10,10],[10,10,10,10,5,1,0]))
#print(np.vectorize(_lmtd_libpf)([-0,-1,-5,-10,-10,-10,-10],[10,10,10,10,5,1,0]))
#res = _lmtd_libpf(10.0, 10.0)
#print(res,10.0)
|
{"/jopy/styles/__init__.py": ["/jopy/styles/plots.py", "/jopy/utils.py", "/jopy/styles/mplib.py"], "/jopy/recip/__init__.py": ["/jopy/recip/mechanisms.py"], "/jopy/styles/mplib.py": ["/jopy/base.py"], "/test/test_thermo/__init__.py": ["/jopy/thermo/__init__.py"], "/jopy/thermo/__init__.py": ["/jopy/utils.py", "/jopy/thermo/utils.py"], "/test/test_jopy.py": ["/jopy/recip/mechanisms.py", "/jopy/styles/mplib.py", "/jopy/utils.py"], "/jopy/recip/mechanisms.py": ["/jopy/base.py"]}
|
6,327
|
jowr/jopy
|
refs/heads/master
|
/jopy/base.py
|
# -*- coding: utf-8 -*-
from __future__ import print_function, division
class JopyBaseClass(object):
"""The base class for all objects
The mother of all classes in the jopy module. Implements
basic functionality for debugging and exception handling.
Extended description of function, just a usage example
for the NumPy style docstrings. See also:
http://sphinx-doc.org/ext/example_numpy.html#example-numpy
"""
def __init__(self):
self.DEBUG = False
@property
def DEBUG(self):
return self._DEBUG
@DEBUG.setter
def DEBUG(self, value):
self._DEBUG = value
@DEBUG.deleter
def DEBUG(self):
del self._DEBUG
def autolog(self, message):
"""Centralised logging facility
Use this function in your code to write to the log files. It can
also be extended to perform some more sophisticated actions
for advanced error detection.
Function name and line number get prepended automatically.
Parameters
----------
message : str
message to log
"""
import inspect, logging
# Get the previous frame in the stack, otherwise it would
# be this function!!!
func = inspect.currentframe().f_back.f_code
msg = "%s: %s in %s:%i" % (
message,
func.co_name,
func.co_filename,
func.co_firstlineno
)
# Dump the message + the name of this function to the log.
logging.debug(msg)
if self.DEBUG:
print(msg)
|
{"/jopy/styles/__init__.py": ["/jopy/styles/plots.py", "/jopy/utils.py", "/jopy/styles/mplib.py"], "/jopy/recip/__init__.py": ["/jopy/recip/mechanisms.py"], "/jopy/styles/mplib.py": ["/jopy/base.py"], "/test/test_thermo/__init__.py": ["/jopy/thermo/__init__.py"], "/jopy/thermo/__init__.py": ["/jopy/utils.py", "/jopy/thermo/utils.py"], "/test/test_jopy.py": ["/jopy/recip/mechanisms.py", "/jopy/styles/mplib.py", "/jopy/utils.py"], "/jopy/recip/mechanisms.py": ["/jopy/base.py"]}
|
6,328
|
jowr/jopy
|
refs/heads/master
|
/dev/builder.py
|
import jinja2
import os
import requests
import json
from distutils.version import LooseVersion #, StrictVersion
import codecs
import datetime
from jinja2.environment import Environment
import subprocess
import sys
#import conda_api
#import sys
template_dir = os.path.dirname(os.path.abspath(__file__))
target_dir = os.path.abspath(os.path.join(os.path.dirname(__file__),'..'))
docs_dir = os.path.abspath(os.path.join(os.path.dirname(__file__),'..','docs'))
author = 'Jorrit Wronski'
email = 'jopy@jorrit.org'
tpl_first_line = "{0} CAUTION: This file is automatically generated from {1}, do not edit it manually\n"
mtime = datetime.datetime.fromtimestamp(os.path.getmtime(__file__)).strftime('%Y-%m-%d %H:%M')
bas_pkgs = ["conda-env", "conda-build", "binstar", "binstar-build", "jinja2"]
cus_pkgs = ["numpy", "scipy", "matplotlib", "pandas", "blaze"]
pip_cus_pkgs = ["coolprop", "texttable", "brewer2mpl","future"]
dev_pkgs = cus_pkgs + ["pip", "pyyaml", "nose", "sphinx", "jinja2"]
pip_dev_pkgs = pip_cus_pkgs + ["coveralls", "nose-cov", "codecov", "tox"]
if sys.platform.lower().startswith('win'):
#conda_api.set_root_prefix(r'C:\Miniconda3')
activa = r'C:\Miniconda3\Scripts\activate.bat'
source = ''
else:
#conda_api.set_root_prefix(r'/opt/miniconda')
activa = r'source /opt/miniconda/bin/activate'
source = 'source'
#print(str(conda_api.get_conda_version()))
#print(str(conda_api.search(spec='ipython')))
def run_command(cmd, **kwargs):
'''given shell command, returns communication tuple of stdout and stderr'''
kw = dict(stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE)
kw.update(**kwargs)
return subprocess.Popen(cmd,**kwargs).communicate()
def find_packages(full):
"""Searches binstar for packages"""
con_pkgs = []
pip_pkgs = []
for pkg in [f.lower() for f in full]:
res = run_command(['conda','search',pkg])[0].decode("utf-8").splitlines()
if len(res)>=2:
re = res[1].split()
#print(re)
if len(re)>0 and re[0]==pkg:
print("Found {0} on binstar.".format(re[0]))
con_pkgs.append(pkg)
else:
pip_pkgs.append(pkg)
else:
pip_pkgs.append(pkg)
return con_pkgs
tmp_env = False #'tmpEnv'
if tmp_env:
all_dev_pkgs = list(dev_pkgs + pip_dev_pkgs)
con_dev_pkgs = list(find_packages(all_dev_pkgs))
pip_dev_pkgs = list(set(all_dev_pkgs).difference(con_dev_pkgs))
all_cus_pkgs = list(cus_pkgs + pip_cus_pkgs)
con_cus_pkgs = list(set(all_cus_pkgs).intersection(con_dev_pkgs))
pip_cus_pkgs = list(set(all_cus_pkgs).difference(con_dev_pkgs))
new_dev_pkgs = []
print(pip_dev_pkgs)
print(run_command(['conda','create','-yn',tmp_env,'pip','setuptools'])[0].decode("utf-8"))
print(run_command([activa,tmp_env,'&','pip','install']+list(pip_dev_pkgs))[0].decode("utf-8"))
for res in run_command([activa,tmp_env,'&','pip','freeze'])[0].decode("utf-8").splitlines():
re = res.split('==')
if len(re)==2:
#print("New dependency: "+re[0])
new_dev_pkgs.append(re[0].lower())
all_new_pkgs = list(set(new_dev_pkgs).difference(all_dev_pkgs))
con_new_pkgs = list(find_packages(all_new_pkgs))
pip_new_pkgs = list(set(all_new_pkgs).difference(con_new_pkgs))
print("cus conda:"+' '.join(con_cus_pkgs))
print("dev conda:"+' '.join(con_dev_pkgs))
print("new conda:"+' '.join(con_new_pkgs))
print(" cus pip:"+' '.join(pip_cus_pkgs))
print(" dev pip:"+' '.join(pip_dev_pkgs))
print(" new pip:"+' '.join(pip_new_pkgs))
print(run_command(['conda','remove','-y','--all',tmp_env])[0].decode("utf-8"))
# #{% if upload %}binstar login{% endif %}
# tpl_string_start = """
# conda create -n {{ env }} pip setuptools binstar
# {{ source }}activate {{ env }}
# {% if upload %}conda config --set binstar_upload yes{% endif %}
# {% if not upload %}conda config --set binstar_upload no{% endif %}
# {% for pkg in pip_pkgs %}
# conda skeleton pypi {{ pkg }}
# conda build --python all -c https://conda.binstar.org/jowr {{ pkg }}
# """
# tpl_string_stop = """
# {% endfor %}
# {{ source }}deactivate
# conda remove --all {{ env }}
# conda config --set binstar_upload no
# exit 0
# """
# tpl_string_cent = """
# {% if upload %}binstar upload `conda build --output --python all {{ pkg }}`{% endif %}
# """
#{% if upload %}binstar login{% endif %}
tpl_string = """
conda create -n {{ env }} pip setuptools binstar
{{ source }}activate {{ env }}
{% if upload %}conda config --set binstar_upload yes{% endif %}
{% if not upload %}conda config --set binstar_upload no{% endif %}
{% for pkg in pip_pkgs %}
conda skeleton pypi {{ pkg }}
conda build --python all -c https://conda.binstar.org/jowr {{ pkg }}
{% endfor %}
{{ source }}deactivate
conda remove --all {{ env }}
conda config --set binstar_upload no
exit 0
"""
#binstar login
#conda skeleton pypi somepypipackage
#conda build somepypipackage
#binstar upload somepypipackage
#conda config --add channels jowr
#conda install --use-local option
upload = True
target = 'maintain_repo.bsh'
template =Environment().from_string(tpl_string)
f = codecs.open(os.path.join(target_dir,target),mode='wb',encoding='utf-8')
f.write(tpl_first_line.format("#!/bin/bash\n# "+mtime,'builder.py'))
f.write(template.render(env="tmpEnv",source="source ", upload=upload, pip_pkgs=pip_new_pkgs+pip_dev_pkgs))
f.close()
# tpl_string_cent = """
# set nameValue=
# for /f "delims=" %%a in ('conda build --output --python all {{ pkg }}') do @set nameValue=%%a
# {% if upload %}binstar upload %nameValue% {% endif %}
# """
target = 'maintain_repo.bat'
template =Environment().from_string(tpl_string)
f = codecs.open(os.path.join(target_dir,target),mode='wb',encoding='utf-8')
f.write(tpl_first_line.format(":: "+mtime,'builder.py'))
f.write(template.render(env="tmpEnv",source="" , upload=upload, pip_pkgs=pip_new_pkgs+pip_dev_pkgs))
f.close()
os.chdir(template_dir)
loader = jinja2.FileSystemLoader(['.','jopy'])
environment = jinja2.Environment(loader=loader)
tags = {}
#r = requests.get('https://api.github.com/repos/coolprop/coolprop/tags')
r = requests.get('https://api.github.com/repos/jowr/jopy/tags')
if(r.ok):
item = json.loads(r.text or r.content)
for com in item:
tags[com['name']] = com['commit']['sha']
#tag = sorted(tags.keys())[-1]
tag = "v0.0.1"
if tag[0]=='v': version = tag[1:]
else: version = tag
local_dict = dict(
author = author,
email = email,
version = version,
bas_pkgs = bas_pkgs,
cus_pkgs = cus_pkgs,
pip_cus_pkgs = pip_cus_pkgs,
dev_pkgs = dev_pkgs,
pip_dev_pkgs = pip_dev_pkgs
)
target = 'travis.yml'
template_path = target+'.tpl'
template = environment.get_template(template_path)
f = codecs.open(os.path.join(target_dir,"."+target),mode='wb',encoding='utf-8')
f.write(tpl_first_line.format("# "+mtime,template_path))
f.write(template.render(**local_dict))
f.close()
target = 'appveyor.yml'
template_path = target+'.tpl'
template = environment.get_template(template_path)
f = codecs.open(os.path.join(target_dir,target),mode='wb',encoding='utf-8')
f.write(tpl_first_line.format("# "+mtime,template_path))
f.write(template.render(**local_dict))
f.close()
target = 'requirements.txt'
template_path = target+'.tpl'
template = environment.get_template(template_path)
f = codecs.open(os.path.join(target_dir,target),mode='wb',encoding='utf-8')
f.write(tpl_first_line.format("# "+mtime,template_path))
f.write(template.render(**local_dict))
f.close()
target = 'setup.py'
template_path = target+'.tpl'
template = environment.get_template(template_path)
f = codecs.open(os.path.join(target_dir,target),mode='wb',encoding='utf-8')
f.write("#!/usr/bin/env python \n")
f.write(tpl_first_line.format("# "+mtime,template_path))
f.write(template.render(**local_dict))
f.close()
target = '__init__.py'
template_path = target+'.tpl'
template = environment.get_template(template_path)
f = codecs.open(os.path.join(target_dir,'jopy',target),mode='wb',encoding='utf-8')
f.write(tpl_first_line.format("# "+mtime,__file__))
f.write(template.render(**local_dict))
f.close()
target = 'meta.yaml'
template_path = target+'.tpl'
template = environment.get_template(template_path)
f = codecs.open(os.path.join(target_dir,target),mode='wb',encoding='utf-8')
f.write(tpl_first_line.format("# "+mtime,template_path))
f.write(template.render(**local_dict))
f.close()
# Build the docs automatically
#print(run_command(["make","html"], shell=True, cwd=docs_dir)[0].decode("utf-8"))
print(run_command(["make","html"], shell=True, cwd=docs_dir))
#target = 'meta.yaml'
#template = environment.get_template(os.path.join(template_dir,target+'.tpl'))
#tags = {}
#r = requests.get('https://api.github.com/repos/coolprop/coolprop/tags')
#if(r.ok):
#item = json.loads(r.text or r.content)
#for com in item:
#tags[com['name']] = com['commit']['sha']
##tag = sorted(tags.keys())[-1]
##for tag in sorted(tags.keys()):
## print tag
## r = requests.get('https://api.github.com/repos/coolprop/coolprop/git/tags/'+tags[tag])
## if(r.ok):
## items = json.loads(r.text or r.content)
## print str(items)
##def cmp(x,y): return LooseVersion(x).__cmp__(y)
##tag = sorted(tags.keys(),cmp=cmp)[-1]
#tag = sorted(tags.keys())[-1]
##from pkg_resources import parse_version
##>>> parse_version('1.4') > parse_version('1.4-rc2')
#if tag[0]=='v': version = tag[1:]
#else: version = tag
#f = codecs.open(os.path.join(target_dir,target),mode='wb',encoding='utf-8')
##f = open(name,mode='w')
#f.write(template.render(version=version,tag=tag,pkgs=pkgs))
#f.close()
|
{"/jopy/styles/__init__.py": ["/jopy/styles/plots.py", "/jopy/utils.py", "/jopy/styles/mplib.py"], "/jopy/recip/__init__.py": ["/jopy/recip/mechanisms.py"], "/jopy/styles/mplib.py": ["/jopy/base.py"], "/test/test_thermo/__init__.py": ["/jopy/thermo/__init__.py"], "/jopy/thermo/__init__.py": ["/jopy/utils.py", "/jopy/thermo/utils.py"], "/test/test_jopy.py": ["/jopy/recip/mechanisms.py", "/jopy/styles/mplib.py", "/jopy/utils.py"], "/jopy/recip/mechanisms.py": ["/jopy/base.py"]}
|
6,329
|
jowr/jopy
|
refs/heads/master
|
/test/test_jopy.py
|
# -*- coding: utf-8 -*-
from __future__ import print_function, division
from matplotlib.figure import Figure
from jopy.recip.mechanisms import RecipExplicit, RecipImplicit, RecipBase
import numpy as np
from jopy.styles.mplib import BaseStyle, DtuStyle, IpuStyle
from jopy.utils import module_class_dict
import jopy.styles.mplib as mpl
LOCAL=True
class TestJopy(object):
@classmethod
def setup_class(cls):
pass
def test_something(self):
pass
@classmethod
def teardown_class(cls):
pass
class TestJopyRecip(object):
exp = RecipExplicit()
imp = RecipImplicit()
@classmethod
def setup_class(cls):
cr = 0.05
cl = 0.15
bo = 0.09
pp = 0.00
cv = 20e-6
TestJopyRecip.exp.set_geometry(cr,cl,bo,pp,cv)
TestJopyRecip.imp.set_geometry(cr,cl,bo,pp,cv)
pass
def test_recip_objects(self):
assert not isinstance(TestJopyRecip.exp, RecipImplicit)
assert isinstance(TestJopyRecip.imp, RecipBase)
def test_recip_functions(self):
rev = TestJopyRecip.exp.revolution(100)
diff = np.abs((TestJopyRecip.exp.volume(rev)-TestJopyRecip.imp.volume(rev))/TestJopyRecip.exp.volume(rev))
#print(np.max(diff),np.mean(diff))
assert np.mean(diff)<0.005 # less than 0.5%
@classmethod
def teardown_class(cls):
pass
class TestJopyStyle(object):
objs = []
@classmethod
def setup_class(cls):
#TestJopyStyle.objs.append(BaseStyle())
#TestJopyStyle.objs.append(DtuStyle())
#TestJopyStyle.objs.append(IpuStyle())
pass
def test_style_show(self):
dic = module_class_dict(mpl)
for i in dic:
line_fig,map_fig = dic[i]()._show_info(show=False)
assert isinstance(line_fig, Figure)
assert isinstance(map_fig, Figure)
@classmethod
def teardown_class(cls):
pass
|
{"/jopy/styles/__init__.py": ["/jopy/styles/plots.py", "/jopy/utils.py", "/jopy/styles/mplib.py"], "/jopy/recip/__init__.py": ["/jopy/recip/mechanisms.py"], "/jopy/styles/mplib.py": ["/jopy/base.py"], "/test/test_thermo/__init__.py": ["/jopy/thermo/__init__.py"], "/jopy/thermo/__init__.py": ["/jopy/utils.py", "/jopy/thermo/utils.py"], "/test/test_jopy.py": ["/jopy/recip/mechanisms.py", "/jopy/styles/mplib.py", "/jopy/utils.py"], "/jopy/recip/mechanisms.py": ["/jopy/base.py"]}
|
6,330
|
jowr/jopy
|
refs/heads/master
|
/jopy/recip/mechanisms.py
|
# -*- coding: utf-8 -*-
from __future__ import print_function, division
from ..base import JopyBaseClass
import numpy as np
from numpy import pi
from texttable import Texttable
from scipy.optimize import minimize, minimize_scalar
from abc import ABCMeta, abstractmethod
class RecipBase(JopyBaseClass):
"""
The basic object that handles many tasks for the reciprocating
machine. It exploits as many interelations as possible and the
object can be optimised heavily by using explicit equations
instead of the solver calls.
"""
__metaclass__ = ABCMeta
def __init__(self):
super(RecipBase, self).__init__()
# direct geometry
self._cr = None # crankshaft radius
self._cl = None # conrod length
self._bo = None # bore
self._pp = None # piston pin offset
# Angle values used for offset calculations
self._theta_0_TDC = None
self._theta_0_BDC = None
# Determines whether to use offset or not
self._of = None
# Other variables used to save calculation time
self._l_min = None # clearance from dead volume
self._l_max = None
self._l_cr_max = None
self._l_cr_min = None
@property
def cr(self):
"""Crankshaft radius [m]"""
return self._cr
#@cr.setter
#def cr(self, value):
# self._cr = value
#
#@cr.deleter
#def cr(self):
# del self._cr
@property
def cl(self):
"""Conrod length [m]"""
return self._cl
#@cl.setter
#def cl(self, value):
# self._cl = value
#
#@cl.deleter
#def cl(self):
# del self._cl
@property
def bo(self):
"""Cylinder bore (diameter) [m]"""
return self._bo
#@bo.setter
#def bo(self, value):
# self._bo = value
#
#@bo.deleter
#def bo(self):
# del self._bo
@property
def pp(self):
"""Piston pin offset from crankshaft centre [m]"""
return self._pp
#@pp.setter
#def pp(self, value):
# self._pp = value
#
#@pp.deleter
#def pp(self):
# del self._pp
@property
def l_min(self):
"""Clearance height at TDC/minimum volume [m]"""
if self._l_min is None: self._l_min = self._calc_distance_to_head(self.theta_0_TDC)
return self._l_min
@property
def l_max(self):
"""Clearance height at BDC/maximum volume [m]"""
if self._l_max is None: self._l_max = self._calc_distance_to_head(self.theta_0_BDC)
return self._l_max
@property
def l_cr_min(self):
"""Clearance height at TDC/minimum volume [m]"""
if self._l_cr_min is None: self._l_cr_min = self._calc_distance_to_shaft(self.theta_0_BDC)
return self._l_cr_min
@property
def l_cr_max(self):
"""Clearance height at TDC/minimum volume [m]"""
if self._l_cr_max is None: self._l_cr_max = self._calc_distance_to_shaft(self.theta_0_TDC)
return self._l_cr_max
@property
def theta_0_TDC(self):
"""Crankshaft angle at TDC without offset [rad] - 0 = crankshaft top, not TDC"""
if self._theta_0_TDC is None: self._theta_0_TDC = self._calc_theta_0_TDC()
return self._theta_0_TDC
#@theta_0_TDC.setter
#def theta_0_TDC(self, value):
# self._theta_0_TDC = value
#
#@theta_0_TDC.deleter
#def theta_0_TDC(self):
# del self._theta_0_TDC
@property
def theta_0_BDC(self):
"""Crankshaft angle at BDC without offset [rad] - pi = crankshaft bottom, not BDC"""
if self._theta_0_BDC is None: self._theta_0_BDC = self._calc_theta_0_BDC()
return self._theta_0_BDC
#@theta_0_BDC.setter
#def theta_0_BDC(self, value):
# self._theta_0_BDC = value
#
#@theta_0_BDC.deleter
#def theta_0_BDC(self):
# del self._theta_0_BDC
def set_geometry(self,cr,cl,bo,pp=0,cv=0,of=False):
"""
Update the geometric variables and perform some calculations.
Parameters
----------
cr : float
crankshaft radius [m]
cl : float
conrod length [m]
bo : float
bore [m]
pp : float
piston pin offset [m]
cv : float
clearance volume at TDC in [m³]
of : float
offset for the crank angle position [boolean]
Allow for theta_TDC other than 0. If false (theta_TDC = 0)
is assured by precalculating the angular offset.
"""
self._cr = cr
self._cl = cl
self._bo = bo
self._pp = pp
# Angle values used for offset calculations
self._theta_0_TDC = None
self._theta_0_BDC = None
# Determines whether to use offset or not
self._of = of
# Other variables used to save calculation time
self._l_min = cv / self.A() # distance from piston to head at TDC
self._l_max = None
self._l_cr_max = None
self._l_cr_min = None
def _calc_theta_0(self,theta):
"""Process the crankshaft angle input to yield a value without offset."""
if not self._of: return theta + self.theta_0_TDC
else: return theta
def _calc_theta(self,theta_0):
"""Process the crankshaft angle input to yield a value with offset."""
if not self._of: return theta_0 - self.theta_0_TDC
else: return theta_0
def _head_to_crank(self,position):
return self.l_cr_max - position + self.l_min
def _crank_to_head(self,position):
return self.l_cr_max - position + self.l_min
@staticmethod
def _calc_dthetadt(rpm):
"""Return the radians per time"""
return 2.0*pi*rpm/60.0
def _calc_theta_bounds(self,TDCtoBDC=None,BDCtoTDC=None,dldtheta=None,dldt=None):
"""Determine whether we travel from TDC to BDC or the other way."""
small = 1e-12
downstroke = None
if TDCtoBDC is not None:
downstroke = np.asanyarray(TDCtoBDC)*1.
elif BDCtoTDC is not None:
downstroke = np.asanyarray(BDCtoTDC)*-1.
elif dldtheta is not None:
downstroke = np.asanyarray(dldtheta)*-1.
elif dldt is not None:
downstroke = np.asanyarray(dldt)*-1.
else:
self.autolog(str(TDCtoBDC))
self.autolog(str(BDCtoTDC))
self.autolog(str(dldtheta))
self.autolog(str(dldt))
raise ValueError("Unable to detect downstroke or upstroke.")
goingdo = downstroke > small
goingup = downstroke < -small
undefin = np.logical_not(np.logical_or(goingdo,goingup))
if np.sum(undefin)>0:
self.autolog(str(TDCtoBDC))
self.autolog(str(BDCtoTDC))
self.autolog(str(dldtheta))
self.autolog(str(dldt))
raise ValueError("Unable to detect downstroke or upstroke, the derivative is close to zero.")
ret = np.empty(downstroke.shape, dtype=(float,2))
ret[goingdo] = ( self.TDC(), self.BDC())
ret[goingup] = (-2.0*pi+self.BDC(), self.TDC())
return ret,goingdo,goingup
def stroke(self):
"""Stroke length of the current geometry"""
return self.l(self.BDC())-self.l(self.TDC())
def TDC(self):
"""Crankshaft angle at TDC"""
return self._calc_theta(self.theta_0_TDC)
def BDC(self):
"""Crankshaft angle at BDC"""
return self._calc_theta(self.theta_0_BDC)
# Mandatory functions needed to fill the geometry variables,
# the methods are abstract even though they have an implementation
# to force the user to actively reimplement the calls. This is
# done to draw attention to the fact the solvers are highly
# inefficient.
@abstractmethod
def _calc_theta_0_TDC(self):
"""Calculate the crankshaft angle at TDC"""
def f(x): return self._calc_distance_to_head(x)
res = minimize_scalar(f, bounds=(-0.5*pi, 0.5*pi), method='bounded')
self.autolog(str(res))
return res.x
@abstractmethod
def _calc_theta_0_BDC(self):
"""Calculate the crankshaft angle at BDC"""
def f(x): return -self._calc_distance_to_head(x)
res = minimize_scalar(f, bounds=(0.5*pi, 1.5*pi), method='bounded')
self.autolog(str(res))
return res.x
# Some of the functions are written in a way that causes
# indefinite recursion - two functions calling each other
# require a redefinition of at least one of them.
@abstractmethod
def _calc_distance_to_head(self,theta_0):
"""Calculate the distance from cylinder head to piston pin"""
return self._crank_to_head(self._calc_distance_to_shaft(theta_0))
@abstractmethod
def _calc_distance_to_shaft(self,theta_0):
"""Calculate the distance from crankshaft centre to piston pin"""
return self._head_to_crank(self._calc_distance_to_head(theta_0))
@abstractmethod
def _calc_theta_0_from_head(self,distance,TDCtoBDC=None,BDCtoTDC=None,dldtheta=None,dldt=None):
"""Calculate the crankshaft angle from the distance between piston pin and head"""
return self._calc_theta_0_from_crank(self._head_to_crank(distance),TDCtoBDC=TDCtoBDC,BDCtoTDC=BDCtoTDC,dldtheta=dldtheta,dldt=dldt)
@abstractmethod
def _calc_theta_0_from_crank(self,distance,TDCtoBDC=None,BDCtoTDC=None,dldtheta=None,dldt=None):
"""Calculate the crankshaft angle from the distance between piston pin and crankshaft centre"""
return self._calc_theta_0_from_head(self._crank_to_head(distance),TDCtoBDC=TDCtoBDC,BDCtoTDC=BDCtoTDC,dldtheta=dldtheta,dldt=dldt)
# Some shorthand notations for further calculations
def A(self):
"""Piston surface facing the control volume [m²]"""
return pi * np.power(self.bo,2.0) / 4.0
def V(self,theta):
"""Volume in cylinder"""
return self._calc_distance_to_head(self._calc_theta_0(theta)) * self.A()
def l(self,theta):
"""Piston position from TDC + clearance height"""
return self._calc_distance_to_head(self._calc_theta_0(theta))
def theta(self,position,TDCtoBDC=None,BDCtoTDC=None,dldtheta=None,dldt=None):
return self._calc_theta_0_from_head
def dldtheta(self,theta):
"""dl/dtheta - derivative of piston position w.r.t. crankshaft angle"""
raise NotImplementedError("Missing function")
def dldt(self,theta,rpm):
"""dl/dt - derivative of piston position w.r.t. time a.k.a. the piston velocity"""
return self.dldtheta(theta)*self._calc_dthetadt(rpm)
def d2ldtheta2(self,theta):
"""d2l/dtheta2 - 2nd derivative of piston position w.r.t. crankshaft angle"""
raise NotImplementedError("Missing function")
def d2ldt2(self,theta,rpm):
"""dl/dt - 2nd derivative of piston position w.r.t. time a.k.a. the piston acceleration"""
return self.d2ldtheta2(theta)*np.power(self._calc_dthetadt(rpm),2.0)
def dVdtheta(self,theta):
"""dV/dtheta - derivative of volume w.r.t. crankshaft angle"""
return self.dldtheta(theta)*self.A()
def d2Vdtheta2(self,theta):
"""d2V/dtheta2 - 2nd derivative of volume w.r.t. crankshaft angle"""
return self.d2ldtheta2(theta)*self.A()
def info(self):
table = Texttable()
table.set_deco(Texttable.HEADER)
# table.set_chars(['-', '|', '+', '='])
table.set_cols_dtype(['t', # text
'f', # float (decimal)
't', # text
't']) # text
table.set_cols_align(["l", "r", "l", "l"])
table.set_cols_valign(["m", "m", "m", "m"])
# table.set_cols_width([10, 12, 13, 13, 13])
table.header(["Variable","Value","Unit","Description"])
table.add_row(["cr", self._cr,"m","crankshaft radius"])
table.add_row(["cl", self._cl,"m","conrod length"])
table.add_row(["bo", self._bo,"m","bore"])
table.add_row(["pp", self._pp,"m","piston pin offset"])
table.add_row(["cv", self.V(self.TDC())*1e6,"cm3","clearance volume at TDC"])
table.add_row(["TDC", np.degrees(self.TDC()),"deg","angle of piston TDC"])
table.add_row(["BDC", np.degrees(self.BDC()),"deg","angle of piston BDC"])
table.add_row(["Max V", self.V(self.BDC())*1e6,"cm3","volume at BDC"])
table.add_row(["Min V", self.V(self.TDC())*1e6,"cm3","volume at TDC"])
print("\n"+table.draw()+"\n")
def revolution(self,num):
# num: steps you get
full = np.linspace(-pi,pi,num+1)
return full[:-1]
def plotVolume(self):
full = self.revolution(1000)
volu = self.volume(full)
import matplotlib as mpl
#mpl.use('Qt4Agg')
from matplotlib.pyplot import plot, show
import matplotlib.pylab as plt
if self.DEBUG:
fig, axs = plt.subplots(2, 1, sharex=True)
rev = self.revolution(2000)
pos = self._position(rev)
axs[0].plot(rev*180/pi,pos*100)
axs[0].plot(self.TDC()*180/pi,self._position(self.TDC())*100,'o')
iMin = np.where(pos==pos.min())
axs[0].plot(rev[iMin]*180/pi,self._position(rev[iMin])*100,'o')
iMax = np.where(pos==pos.max())
axs[0].plot(rev[iMax]*180/pi,self._position(rev[iMax])*100,'o')
axs[0].set_ylabel(r'Piston position (cm)')
ax = axs[1]
self.autolog("Position: ", str(pos.min()), str(pos.max()), str(self.stroke()))
self.autolog("Volume: ",str(volu.min()), str(volu.max()))
else:
fig, ax = plt.subplots(1, 1)
ax.plot(full*180/pi,volu*1e6)
ax.plot(self.TDC()*180/pi,self.volume(self.TDC())*1e6,'o')
iMin = np.where(volu==volu.min())
ax.plot(full[iMin]*180/pi,self.volume(full[iMin])*1e6,'o')
iMax = np.where(volu==volu.max())
ax.plot(full[iMax]*180/pi,self.volume(full[iMax])*1e6,'o')
ax.set_xlabel(r'Crankshaft angle $\theta$ (deg)')
ax.set_ylabel(r'Cylinder volume $V$ (cm$^3$)')
show()
class RecipExplicit(RecipBase):
"""Class that implements the formulations from Bjarne Dindler Rasmussen's PhD
thesis and adds an explicit equation for the position to crankshaft angle
conversion."""
def __init__(self):
super(RecipExplicit, self).__init__()
# Dimensionless properties
self._sigma = None
self._lambda = None
def set_geometry(self,cr,cl,bo,pp=0,cv=0,of=False):
"""
Update the geometric variables and perform some calculations.
cr : float
crankshaft radius [m]
cl : float
conrod length [m]
bo : float
bore [m]
pp : float
piston pin offset [m]
cv : float
clearance volume at TDC in [m3]
of : float
offset for the crank angle position [boolean]
Allow for theta_TDC other than 0. If false (theta_TDC = 0)
is assured by precalculating the angular offset.
"""
super(RecipExplicit, self).set_geometry(cr,cl,bo,pp,cv,of)
# Dimensionless properties
self._sigma = pp / cl
self._lambda = cr / cl
#Carry out some basic comparisons
_l_cr_max = np.sqrt(np.power(self.cl+self.cr,2.0)-np.power(self.pp,2.0))
_l_cr_min = np.sqrt(np.power(self.cl-self.cr,2.0)-np.power(self.pp,2.0))
#if self.l_cr_max <> _l_cr_max:
# raise ValueError("l_cr_max is not equal: {1:7.5f} vs. {1:7.5f}".format(self.l_cr_max,_l_cr_max))
#if self.l_cr_min <> _l_cr_min:
# raise ValueError("l_cr_min is not equal: {1:7.5f} vs. {1:7.5f}".format(self.l_cr_min,_l_cr_min))
# Mandatory functions needed to fill the geometry variables
def _calc_theta_0_TDC(self):
"""Calculate the crankshaft angle at TDC"""
return -np.arcsin(self.pp/(self.cl+self.cr))
def _calc_theta_0_BDC(self):
"""Calculate the crankshaft angle at BDC"""
return -np.arcsin(self.pp/(self.cl-self.cr)) + pi
# Some of the functions are written in a way that causes
# indefinite recursion - two functions calling each other
# require a redefinition of at least one of them.
def _calc_distance_to_head(self,theta_0):
"""Calculate the distance from cylinder head to piston pin"""
return super(RecipExplicit, self)._calc_distance_to_head(theta_0)
def _calc_distance_to_shaft(self,theta_0):
"""Calculate the distance from crankshaft centre to piston pin"""
def p(x): return np.power(x,2)
return self.cl*np.sqrt(1.0 - p(self.cr*np.sin(theta_0) + self.pp)/p(self.cl)) + self.cr*np.cos(theta_0)
def _calc_theta_0_from_head(self,distance,TDCtoBDC=None,BDCtoTDC=None,dldtheta=None,dldt=None):
"""Calculate the crankshaft angle from the distance between piston pin and head"""
return super(RecipExplicit, self)._calc_theta_0_from_head(distance,TDCtoBDC=TDCtoBDC,BDCtoTDC=BDCtoTDC,dldtheta=dldtheta,dldt=dldt)
def _calc_theta_0_from_crank(self,distance,TDCtoBDC=None,BDCtoTDC=None,dldtheta=None,dldt=None):
"""Calculate the crankshaft angle from the distance between piston pin and crankshaft centre"""
def p(x): return np.power(x,2)
bounds,do,up = self._calc_theta_bounds(TDCtoBDC, BDCtoTDC, dldtheta, dldt)
x = np.empty_like(distance)
x[do] = - np.sqrt(self._calc_theta_of_pos_root(distance))
x[up] = + np.sqrt(self._calc_theta_of_pos_root(distance))
return -2.0*np.arctan((2.0*self.cr*self.pp + x)/(-p(self.cl) + p(self.cr) + 2.0*self.cr*distance + p(distance) + p(self.pp)))
def _calc_theta_of_pos_root(self, position):
def p(x): return np.power(x,2)
return -1.0*(p(self.cl) - 2.0*self.cl*self.cr + p(self.cr) - p(position) - p(self.pp))*(p(self.cl) + 2.0*self.cl*self.cr + p(self.cr) - p(position) - p(self.pp))
def dldtheta(self,theta):
"""dl/dtheta - derivative of piston position w.r.t. crankshaft angle"""
def p(x): return np.power(x,2)
theta_0 = self._calc_theta_0(theta)
dcrankdtheta = -self.cr*np.sin(theta_0) - self.cr*(self.cr*np.sin(theta_0) + self.pp)*np.cos(theta_0)/(self.cl*np.sqrt(1.0 - p(self.cr*np.sin(theta_0) + self.pp)/p(self.cl)))
return dcrankdtheta * -1.0
def d2ldtheta2(self,theta):
"""d2l/dtheta2 - 2nd derivative of piston position w.r.t. crankshaft angle"""
def p(x,y=2.0): return np.power(x,y)
theta_0 = self._calc_theta_0(theta)
d2crankdtheta2 = -self.cr*np.cos(theta_0) - p(self.cr)*p(np.cos(theta_0))/(self.cl*np.sqrt(1.0 - p(self.cr*np.sin(theta_0) + self.pp)/p(self.cl))) + self.cr*(self.cr*np.sin(theta_0) + self.pp)*np.sin(theta_0)/(self.cl*np.sqrt(1.0 - p(self.cr*np.sin(theta_0) + self.pp)/p(self.cl))) - p(self.cr)*p(self.cr*np.sin(theta_0) + self.pp)*p(np.cos(theta_0))/(p(self.cl,3)*p(1.0 - p(self.cr*np.sin(theta_0) + self.pp)/p(self.cl),3./2.))
#d2crankdtheta2 = -cr*cos(theta) - cr**2*cos(theta)**2/(cl*sqrt(1.0 - (cr*sin(theta) + pp)**2/cl**2)) + cr*(cr*sin(theta) + pp)*sin(theta)/(cl*sqrt(1.0 - (cr*sin(theta) + pp)**2/cl**2)) - cr**2*(cr*sin(theta) + pp)**2*cos(theta)**2/(cl**3*(1.0 - (cr*sin(theta) + pp)**2/cl**2)**(3/2))
return d2crankdtheta2 * -1.0
class RecipImplicit(RecipBase):
"""Alternative definition of the reciprocating machine that uses solvers to determine
minimun and maximun values. Mostly used for testing of the analytical solutions.
Definition of piston movement and formulae taken from Dubbel, pages P5 to P7"""
def set_geometry(self,cr,cl,bo,pp=0,cv=0,of=False):
"""
Update the geometric variables and perform some calculations.
cr : float
crankshaft radius [m]
cl : float
conrod length [m]
bo : float
bore [m]
pp : float
piston pin offset [m]
cv : float
clearance volume at TDC in [m3]
of : float
offset for the crank angle position [boolean]
Allow for theta_TDC other than 0. If false (theta_TDC = 0)
is assured by precalculating the angular offset.
"""
super(RecipImplicit, self).set_geometry(cr,cl,bo,pp,cv,of)
#Carry out some basic comparisons
_l_max = np.sin( self.theta_0_TDC+0.5*pi) * (self.cl + self.cr)
_l_min = np.cos(pi-self.theta_0_BDC ) * (self.cl - self.cr)
#if self.l_max <> _l_max:
# raise ValueError("l_cr_max is not equal: {1:7.5f} vs. {1:7.5f}".format(self.l_max,_l_max))
#if self.l_min <> _l_min:
# raise ValueError("l_cr_min is not equal: {1:7.5f} vs. {1:7.5f}".format(self.l_min,_l_min))
# Custom helper functions
def _beta(self,_theta_0):
"""Conrod angle"""
return (self._pp+self._cr*np.sin(_theta_0))/self._cl
# Mandatory functions needed to fill the geometry variables
def _calc_theta_0_TDC(self):
"""Calculate the crankshaft angle at TDC"""
return super(RecipImplicit, self)._calc_theta_0_TDC()
def _calc_theta_0_BDC(self):
"""Calculate the crankshaft angle at BDC"""
return super(RecipImplicit, self)._calc_theta_0_BDC()
# Some of the functions are written in a way that causes
# indefinite recursion - two functions calling each other
# require a redefinition of at least one of them.
def _calc_distance_to_head(self,theta_0):
"""Calculate the distance from cylinder head to piston pin"""
return np.sqrt(np.power(self.cl+self.cr,2.0)-np.power(self.pp,2.0))-self.cl*np.cos(self._beta(-theta_0 + pi))+self.cr*np.cos(-theta_0 + pi) + self.l_min
def _calc_distance_to_shaft(self,theta_0):
"""Calculate the distance from crankshaft centre to piston pin"""
def p(x): return np.power(x,2)
return self.cl*np.sqrt(1.0 - p(self.cr*np.sin(theta_0) + self.pp)/p(self.cl)) + self.cr*np.cos(theta_0)
def _calc_theta_0_from_head(self,distance,TDCtoBDC=None,BDCtoTDC=None,dldtheta=None,dldt=None):
"""Calculate the crankshaft angle from the distance between piston pin and head"""
bounds,do,up = self._calc_theta_bounds(TDCtoBDC, BDCtoTDC, dldtheta, dldt)
pos = np.asarray(distance)
guess = np.ones_like(pos) * np.mean(bounds)
def f(x): return np.sum(np.power(pos-self.l(x),2.0))
res = minimize(f, guess, bounds=bounds, method='L-BFGS-B')
self.autolog(str(res))
return res.x
def _calc_theta_0_from_crank(self,distance,TDCtoBDC=None,BDCtoTDC=None,dldtheta=None,dldt=None):
"""Calculate the crankshaft angle from the distance between piston pin and crankshaft centre"""
return super(RecipImplicit, self)._calc_theta_0_from_head(distance,TDCtoBDC=TDCtoBDC,BDCtoTDC=BDCtoTDC,dldtheta=dldtheta,dldt=dldt)
# class CylinderHetaTransfer(object):
#
# def basicRePrCorrelation(self,fluid,T_f,rho_f,char_vel,char_len,a,b,c,DEBUG=False):
#
# # sanitise two-phase situations
# Q = PropsSI('Q', 'T', T_f, 'D', rho_f, fluid)
#
# mu_f = numpy.array([])
# lambda_f = numpy.array([])
# cp_f = numpy.array([])
# for i,qVal in enumerate(Q):
# mu_f = numpy.append(mu_f, [PropsSI('V', 'T', T_f[i], 'D', rho_f[i], fluid)])
# lambda_f = numpy.append(lambda_f,[PropsSI('L', 'T', T_f[i], 'D', rho_f[i], fluid)])
# if 0>Q[i] or 1<Q[i]:
# #mu_f = numpy.append(mu_f, [PropsSI('V', 'T', T_f[i], 'D', rho_f[i], fluid)])
# #lambda_f = numpy.append(lambda_f,[PropsSI('L', 'T', T_f[i], 'D', rho_f[i], fluid)])
# cp_f = numpy.append(cp_f, [PropsSI('C', 'T', T_f[i], 'D', rho_f[i], fluid)])
# else:
# #mu_f = numpy.append(mu_f, [-1])
# #lambda_f = numpy.append(lambda_f,[-1])
# cp_f = numpy.append(cp_f, [1e5])
#
#
#
# if mu_f.any <= 0: print "Invalid viscosity, make sure transport properties are calculated."
# if lambda_f.any <= 0: print "Invalid thermal conductivity, make sure transport properties are calculated."
# if cp_f.any <= 0: print "Invalid heat capacity, make sure properties are calculated correctly."
# Pr = cp_f * mu_f / lambda_f
# Re = (rho_f * char_vel * char_len) / mu_f
# Nu = a * numpy.power(Re,b) * numpy.power(Pr,c)
# h = Nu * lambda_f / char_len
# if DEBUG:
# printDebug(lambda_f,namespace=locals())
# printDebug(cp_f,namespace=locals())
# printDebug(mu_f,namespace=locals())
# printDebug(Pr,namespace=locals())
# printDebug(Re,namespace=locals())
# printDebug(Nu,namespace=locals())
# printDebug(h,namespace=locals())
# return h,Nu
#
# def Kornhauser1994(self,fluid,T_f,rho_f,char_vel,char_len,a=0.56,b=0.69,DEBUG=False):
# lambda_f = CP.PropsU('L', 'T', T_f, 'D', rho_f, fluid, 'SI')
# cp_f = CP.PropsU('C', 'T', T_f, 'D', rho_f, fluid, 'SI')
# alpha_f = lambda_f / (rho_f * cp_f)
# Pe = (char_vel * char_len**2) / (4*alpha_f)
# Nu = a * Pe**b
# h = Nu * lambda_f / char_len
# if DEBUG:
# printDebug(lambda_f,namespace=locals())
# printDebug(cp_f,namespace=locals())
# printDebug(alpha_f,namespace=locals())
# printDebug(Pe,namespace=locals())
# printDebug(Nu,namespace=locals())
# printDebug(h,namespace=locals())
# return h,Nu
#
#
# def Destoop1986(self,fluid,T_f,rho_f,Lambda,Gamma,DEBUG=False):
# """
# Destoop's simplified correlation for ammonia compressors
# """
# h,Nu = self.basicRePrCorrelation(fluid,T_f,rho_f,Lambda,Gamma,0.6,0.8,0.6,DEBUG=DEBUG)
# return h,Nu
#
#
# def Annand1963(self,fluid,T_f,rho_f,Lambda,Gamma,DEBUG=False):
# """
# Annand's IC engine correlation
# """
# h,Nu = self.basicRePrCorrelation(fluid,T_f,rho_f,Lambda,Gamma,0.575,0.7,0.0,DEBUG=DEBUG)
# return h,Nu
#
#
# def Annand1963b(self,fluid,T_f,rho_f,char_vel,char_len,T_w,DEBUG=False):
# """
# Annand's SI engine correlation
# """
# c1 = 0.575
# #c1 = 0.35 # for comparison with Irimescu
# c2 = 4.3e-9
#
# mu_f = CP.PropsU('V', 'T', T_f, 'D', rho_f, fluid, 'SI')
# lambda_f = CP.PropsU('L', 'T', T_f, 'D', rho_f, fluid, 'SI')
# #cp_f = CP.PropsU('C', 'T', T_f, 'D', rho_f, fluid, 'SI')
# if mu_f.any <= 0: print "Invalid viscosity, make sure transport properties are calculated."
# if lambda_f.any <= 0: print "Invalid thermal conductivity, make sure transport properties are calculated."
# #if cp_f.any <= 0: print "Invalid heat capacity, make sure properties are calculated correctly."
# #Pr = cp_f * mu_f / lambda_f
# Re = (rho_f * char_vel * char_len) / mu_f
#
# h = c1 * lambda_f / char_len * numpy.power(Re,0.7) + c2 * (numpy.power(T_f,4)-numpy.power(T_w,4))/(T_f-T_w)
# Nu = h / lambda_f * char_len
#
# return h,Nu
#
#
# def Woschni1967(self,fluid,T_f,rho_f,Lambda,Gamma,DEBUG=False):
# """
# Annand's IC engine correlation
# """
# h,Nu = self.basicRePrCorrelation(fluid,T_f,rho_f,Lambda,Gamma,0.035,0.7,0.0,DEBUG=DEBUG)
# return h,Nu
#
#
# def Adair1972(self,theta,omega,T_f,rho_f,fluid,bore,position,DEBUG=False):
# """
# Adair's heat transfer correlation
# theta: crankshaft angle in rad with TDC at 0.
# omega: angular velocity RPS (revolutions per second), get converted into rad/s
# T_f: bulk fluid temperature
# rho_f: bulk fluid density
# fluid: fluid string
# bore: cylinder bore
# position: distance piston-head
# """
#
# theta = numpy.mod(theta-math.pi,2*math.pi) # Fix TDC-BDC problem
# #omega = der(crankshaftAngle)
# # Equation 15 from Adair et al.
# omega = 2.*math.pi*omega
# # TODO: Check conversion from 1/s to rad/s
# omega_g1 = 2.*omega * (1.04+numpy.cos(2.*theta))
# omega_g2 = 2.*omega * 0.5 * (1.04+numpy.cos(2.*theta))
#
# omega_g = 0. * omega_g1
#
# for i in range(len(theta)):
# if theta[i]>0.5*math.pi and theta[i]<1.5*math.pi:
# omega_g[i] = omega_g2[i]
# else:
# omega_g[i] = omega_g1[i]
#
# #surfaceArea = pistonCrossArea + 2. * math.sqrt(pistonCrossArea*math.pi)*position
# volume = math.pi * numpy.power(bore,2.) / 4. * position #Get volumes"
# surfaceArea = math.pi * numpy.power(bore,2.) / 4. * 2. + math.pi * bore * position
#
# d_e = 6. / surfaceArea * volume
#
# Gamma = d_e
# Lambda = 0.5 * d_e * omega_g
#
# h,Nu = self.basicRePrCorrelation(fluid,T_f,rho_f,Lambda,Gamma,0.053,0.8,0.6,DEBUG=DEBUG)
#
# return h,Nu
# # # There is a small mistake in equation 19 of the paper, DeltaT goes in the numerator.
# # Ts = PropsSI('T', 'H', h_f, 'P', p_f, fluid)
# # q_w = -1. * h * (Ts - T_wall)
# # Q_flows[i] = surfaceAreas[i]*q_w[i]
#
#
# def BasicGnielinski(self,fluid,T_f,rho_f,char_vel,char_len,L,zeta,xtra,K,DEBUG=False):
#
# D = char_len
#
# mu_f = CP.PropsU('V', 'T', T_f, 'D', rho_f, fluid, 'SI')
# lambda_f = CP.PropsU('L', 'T', T_f, 'D', rho_f, fluid, 'SI')
# cp_f = CP.PropsU('C', 'T', T_f, 'D', rho_f, fluid, 'SI')
# if mu_f.any <= 0: print "Invalid viscosity, make sure transport properties are calculated."
# if lambda_f.any <= 0: print "Invalid thermal conductivity, make sure transport properties are calculated."
# if cp_f.any <= 0: print "Invalid heat capacity, make sure properties are calculated correctly."
# Pr = cp_f * mu_f / lambda_f
# Re = (rho_f * char_vel * char_len) / mu_f
# numerator = (zeta/8.) * (Re-xtra) * Pr
# denominator = 1 + 12.7 * numpy.sqrt(zeta/8.) * (numpy.power(Pr,2./3.)-1.)
# Nu = numerator / denominator * (1 + numpy.power(D/L,2./3.)) * K
# h = Nu * lambda_f / char_len
# if DEBUG:
# printDebug(lambda_f,namespace=locals())
# printDebug(cp_f,namespace=locals())
# printDebug(mu_f,namespace=locals())
# printDebug(Pr,namespace=locals())
# printDebug(Re,namespace=locals())
# printDebug(Nu,namespace=locals())
# printDebug(h,namespace=locals())
# return h,Nu
#
#
# def Gnielinski1976(self,fluid,T_f,rho_f,char_vel,char_len,L,T_w,rho_w,DEBUG=False):
# mu_f = CP.PropsU('V', 'T', T_f, 'D', rho_f, fluid, 'SI')
# mu_w = CP.PropsU('V', 'T', T_w, 'D', rho_w, fluid, 'SI')
# if mu_f.any <= 0: print "Invalid viscosity, make sure transport properties are calculated."
# Re = (rho_f * char_vel * char_len) / mu_f
#
# zeta = numpy.power((1.82 * numpy.log10(Re)-1.64),-2)
# xtra = 1000.
# K = numpy.power(mu_f/mu_w,0.14)
# #for xi in numpy.array(numpy.power(mu_f/mu_w,0.14)):
# # print xi
#
# h,Nu = self.BasicGnielinski(fluid,T_f,rho_f,char_vel,char_len,L,zeta,xtra,K,DEBUG=DEBUG)
# return h,Nu
#
#
# def Gnielinski2010(self,fluid,T_f,rho_f,char_vel,char_len,L,T_w,DEBUG=False):
# mu_f = CP.PropsU('V', 'T', T_f, 'D', rho_f, fluid, 'SI')
# if mu_f.any <= 0: print "Invalid viscosity, make sure transport properties are calculated."
# Re = (rho_f * char_vel * char_len) / mu_f
#
# zeta = numpy.power((1.80 * numpy.log10(Re)-1.50),-2)
# xtra = 0.
# K = 1. #numpy.power(T_f/T_w,0.25)
#
# h,Nu = self.BasicGnielinski(fluid,T_f,rho_f,char_vel,char_len,L,zeta,xtra,K,DEBUG=DEBUG)
# return h,Nu
#
# class SteadyState(object):
#
# def idealNozzle(self,fluid,h_up,p_up,p_down,DEBUG=False):
# """Model of a nozzle flow using perfect gas parameter gamma.
#
# """
# from pyrp.refpropClasses import RefpropSI
# RP = RefpropSI()
# RP.SETUPFLEX(FluidNames=fluid)
# T_up,p_up,rho_up,Dl_up,Dv_up,q_up,e_up,h_up,s_up,cv_up,cp_up,w_up = RP.PHFLSH(p_up,h_up)
# v_up = 1/rho_up
# gamma0 = cp_up / cv_up
#
# def function(gamma):
# p_thr_crit = p_up*numpy.power(2/(gamma+1),gamma/(gamma-1))
# p_thr_a = max(p_thr_crit,p_down)
# v_thr_a = ((p_up*v_up**gamma)/p_thr_a)**(1./gamma)
# T_thr,p_thr,rho_thr,Dl_thr,Dv_thr,q_thr,e_thr,h_thr,s_thr,cv_thr,cp_thr,w_thr = RP.PSFLSH(p_thr_a,s_up)
# return (v_thr_a-1/rho_thr)**2
#
# res = minimize(function, gamma0)
# #if DEBUG:
# # print res, "\n"
# gamma = res.x[0]
# p_thr_crit = p_up*(2/(gamma+1))**(gamma/(gamma-1))
# p_thr = max(p_thr_crit,p_down)
# T_thr,p_thr,rho_thr,Dl_thr,Dv_thr,q_thr,e_thr,h_thr,s_thr,cv_thr,cp_thr,w_thr = RP.PSFLSH(p_thr,s_up)
# vel_thr = ((h_up - h_thr)*2.)**(1/2.)
# if DEBUG:
# print p_thr, vel_thr, w_thr, "\n"
# return T_thr,p_thr,rho_thr,Dl_thr,Dv_thr,q_thr,e_thr,h_thr,s_thr,cv_thr,cp_thr,w_thr,vel_thr,gamma
#
#
# def simplyfiedModel(self,fluid,r,l,q,b=0.1,c=20e-6):
# mechanism = MechanicsAlt()
# mechanism.setGeometry(r,l,q,b=b,c=c)
#
# fluid = "pentane"
#
# from pyrp.refpropClasses import RefpropSI
# RP = RefpropSI()
# RP.SETUPFLEX(FluidNames=fluid)
#
# d_thr_su = 0.022
# d_leak = 0.00022
# d_thr_ex = 0.022
#
# N_exp = 1000.
# N_exp_s = N_exp / 60.
#
# V_s = 750e-6
# V_0 = 36e-6
#
# p_supply = 15e5
# T_supply = 155+273.15
# T_su,p_su,rho_su,Dl_su,Dv_su,q_su,e_su,h_su,s_su,cv_su,cp_su,w_su = RP.TPFLSH(T_supply,p_supply)
#
# p_exhaust = 1.5e5
# T_exhaust = 90+273.15
# T_ex,p_ex,rho_ex,Dl_ex,Dv_ex,q_ex,e_ex,h_ex,s_ex,cv_ex,cp_ex,w_ex = RP.TPFLSH(T_exhaust,p_exhaust)
#
# StSt = SteadyState()
#
# def cycle(r_p_su1,r_M_dot_leak):
# p_su1 = r_p_su1 * p_su
# T_thr_su,p_thr_su,rho_thr_su,Dl_thr_su,Dv_thr_su,q_thr_su,e_thr_su,h_thr_su,s_thr_su,cv_thr_su,cp_thr_su,w_thr_su,vel_thr_su,gamma_thr_su = StSt.idealNozzle(fluid, h_su, p_su, p_su1,DEBUG=True)
# A_thr_su = (pi*d_thr_su^2)/4
# V_dot_thr_su = vel_thr_su*A_thr_su
# M_dot = V_dot_thr_su * rho_thr_su
# T_su1,p_su1,rho_su1,Dl_su1,Dv_su1,q_su1,e_su1,h_su1,s_su1,cv_su1,cp_su1,w_su1 = RP.PHFLSH(p_su1,h_su)
# # No heat transfer
# T_su2,p_su2,rho_su2,Dl_su2,Dv_su2,q_su2,e_su2,h_su2,s_su2,cv_su2,cp_su2,w_su2 = RP.PHFLSH(p_su1,h_su)
# # Leakage
# T_leak,p_leak,rho_leak,Dl_leak,Dv_leak,q_leak,e_leak,h_leak,s_leak,cv_leak,cp_leak,w_leak,vel_leak,gamma_leak = StSt.idealNozzle(fluid, h_su2, p_su2, p_ex,DEBUG=True)
# A_leak = (pi*d_leak^2)/4
# V_dot_leak = vel_leak*A_leak
# M_dot_leak = V_dot_leak * rho_leak
#
# r_M_dot_leak = M_dot_leak / M_dot
# M_dot_in = M_dot - M_dot_leak
#
# W_dot_suc = N_exp_s*(V_s-V_0)*p_su2
#
if __name__ == "__main__":
raise ValueError("Do not call this package directly.")
# StSt = SteadyState()
# p = linspace(5e5,12.5e5,10)
# for pi in p:
# StSt.idealNozzle("pentane", 6e5, 15e5, pi,DEBUG=True)
#
# import matplotlib as mpl
# mpl.use('Qt4Agg')
#
# from matplotlib.pyplot import plot, show
#
# me = Mechanics()
# r = 0.05
# l = 0.11
# q = 0.03
# me.setGeometry(r,l,q,b=0.1,c=20e-6)
#
# full = me.revolution(1000)
# posi = me.position(full)
#
#
# print posi.max(), posi[0], posi.min()
# print me.stroke()
# print posi.max() - posi.min()
#
# plot(full,posi)
# plot(full[0],posi[0],'o')
#
# print me.position(me.TDC())
# print me.position(me.BDC())
#
# print me.theta(0.3*me.position(me.BDC()), afterTDC=True, result=True)
# print me.theta(0.3*me.position(me.BDC()), afterTDC=True) * 180 / pi
# print ""
#
# print me.theta(0.3*me.position(me.BDC()), afterTDC=False, result=True)
# print me.theta(0.3*me.position(me.BDC()), afterTDC=False) * 180 / pi
# print ""
#
# me.info()
#
# show()
#print ""
#print me.stroke(), me.position(0) - me.position(me.offsetBDC()-me.offsetTDC())
#class Mechanics2(object):
# def __init__(self):
# self.DEBUG = False
#
# self.stroke = 0.1
# self.bore = 0.1
# self.V_min = 20e-6
# self.steps = 400
# self.theta = self.getAngles(self.steps)
# self.l_con = 0.15
#
# self.A_pis = pi * self.bore**2 / 4
# self.x_min = self.V_min / self.A_pis
# self.V_max = self.V_min + self.stroke * self.A_pis
# self.x = self.positionNoOffset(self.theta, None)
# self.V = self.x * self.A_pis
#
# def info(self):
# print "stroke: " + str(self.stroke)
# print "bore: " + str(self.bore)
# print "piston position:" + str(self.x_min) + " to " + str(self.x_min+self.stroke)
# print "maximum volume: " + str(self.V_max)
# print "minimum volume: " + str(self.V_min)
#
# def validate(self):
# if self.DEBUG: print self.V_min, self.V_max
# if self.DEBUG: print self.x[0]*self.A_pis, self.x[round(self.steps/2)]*self.A_pis
# if self.DEBUG: print "\nThis should be a sequence from 0 to 180:"
# if self.DEBUG: print self.thetaNoOffset(numpy.linspace(me.x[0],me.x[round(me.steps/2)],10))*180/pi
#
# def positionNoOffset(self,theta,pos):
# # r: crankshaft radius
# r = self.stroke / 2.
# # l: conrod length
# l = self.l_con
# # theta: crankshaft angle from TDC
# theta = numpy.array(theta)
# position = numpy.array(r - r*cos(theta)+l-sqrt(l**2-(r*sin(theta))**2)) + self.x_min
# if pos is None:
# return position
# else:
# residuals = position - pos
# return residuals
#
# def thetaNoOffset(self,pos):
# pos = numpy.array(pos)
# x0 = [(0.25*pi) for s in pos] # initial guess for cut-off
# #bounds = (0.1*pi, 0.9*pi)
# #res = minimize(self.positionNoOffset, x0, bounds=bounds, args=(pos))
# res = fsolve(self.positionNoOffset, x0, args=(pos), xtol=1e-06)
# #print "Found best x of ",res.x," with an error of ",residuals," after ",iterationCounter," iterations."
# return res
#
# def getAngles(self,num):
# # num: steps you get
# full = numpy.linspace(0,2*pi,num+1)
# return full[:-1]
|
{"/jopy/styles/__init__.py": ["/jopy/styles/plots.py", "/jopy/utils.py", "/jopy/styles/mplib.py"], "/jopy/recip/__init__.py": ["/jopy/recip/mechanisms.py"], "/jopy/styles/mplib.py": ["/jopy/base.py"], "/test/test_thermo/__init__.py": ["/jopy/thermo/__init__.py"], "/jopy/thermo/__init__.py": ["/jopy/utils.py", "/jopy/thermo/utils.py"], "/test/test_jopy.py": ["/jopy/recip/mechanisms.py", "/jopy/styles/mplib.py", "/jopy/utils.py"], "/jopy/recip/mechanisms.py": ["/jopy/base.py"]}
|
6,334
|
sbxg/sbxg
|
refs/heads/master
|
/sbxg/subcomponent.py
|
# Copyright (c) 2017 Jean Guyomarc'h
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os
import subprocess
class Subcomponent(object):
def __init__(self, templater, program='subcomponent'):
self._templater = templater
self._program = program
self._text = ""
def add_components(self, components_names):
for component_name in components_names:
self.add_component(component_name)
def add_component(self, component_name):
filename = component_name + ".j2"
self._text += '\n' + self._templater.template_file(filename)
def call(self, in_directory, **kwargs):
# Create the directory structure if it does not already exist
path_dir = os.path.join(in_directory, 'subcomponent')
if not os.path.exists(path_dir):
os.makedirs(path_dir)
# Generate the main subcomponent file by aggregating all different
# configurations together.
path = os.path.join(path_dir, 'components.sub')
with open(path, 'w') as stream:
stream.write("subcomponents {")
stream.write(self._text)
stream.write("}\n")
# Subcomponent!
cmd = [self._program, "-C", in_directory, "fetch"]
if kwargs.get('no_download') is True:
cmd.append("--dry-run")
subprocess.check_call(cmd)
|
{"/sbxg/__main__.py": ["/sbxg/utils.py"]}
|
6,335
|
sbxg/sbxg
|
refs/heads/master
|
/tests/test_boostrap.py
|
# Copyright (c) 2017 Jean Guyomarc'h
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os
import pytest
import subprocess
import sys
import tempfile
TOP_SRC_DIR = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
def test_bootstrap_in_source_dir(env):
"""
Running the bootstrap script from the source directory should fail.
"""
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_call([
sys.executable, "bootstrap.py",
"--board", "cubietruck", "--toolchain", "local"
], cwd=TOP_SRC_DIR)
def test_no_toolchain_for_board(env):
"""
Running the bootstrap script from the source directory should fail,
"""
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_call([
sys.executable, "bootstrap.py",
"--board", "cubietruck", "--toolchain", "local"
], cwd=TOP_SRC_DIR)
@pytest.mark.parametrize("variant", [None, "xen", "board"])
@pytest.mark.parametrize("toolchain", ["armv7-eabihf"])
def test_quick_cubietruck_bootstrap(env, variant, toolchain):
"""
This test runs a bootstrap for the different cubietruck variants.
It uses the available toolchains. Nothing is downloaded.
"""
cmd = [
sys.executable,
os.path.join(TOP_SRC_DIR, "bootstrap.py"),
"--board", "cubietruck",
"--toolchain", toolchain,
"--no-download",
]
if variant is not None:
cmd.extend(['--board-variant', variant])
subprocess.check_call(cmd, cwd=env.build_dir)
@pytest.mark.parametrize("source", ["linux-4.12.0"])
@pytest.mark.parametrize("config", [
"linux-4.12-sunxi", "linux-4.12-sunxi-xen-dom0", "linux-4.12-xen-domu"
])
@pytest.mark.parametrize("toolchain", ["armv7-eabihf"])
def test_bootstrap_kernel_only(env, source, config, toolchain):
build_dir = tempfile.TemporaryDirectory()
subprocess.check_call([
sys.executable,
os.path.join(TOP_SRC_DIR, "bootstrap.py"),
"--kernel", source, config,
"--toolchain", toolchain,
"--no-download",
], cwd=build_dir.name)
@pytest.mark.parametrize("source", ["2017.07"])
@pytest.mark.parametrize("config", ["2017.07-minimal"])
@pytest.mark.parametrize("toolchain", ["armv7-eabihf"])
def test_bootstrap_uboot_only(env, source, config, toolchain):
build_dir = tempfile.TemporaryDirectory()
subprocess.check_call([
sys.executable,
os.path.join(TOP_SRC_DIR, "bootstrap.py"),
"--uboot", source, config,
"--toolchain", toolchain,
"--no-download",
], cwd=build_dir.name)
@pytest.mark.parametrize("source", ["4.8.2"])
@pytest.mark.parametrize("config", ["4.8-sunxi"])
@pytest.mark.parametrize("toolchain", ["armv7-eabihf"])
def test_bootstrap_xen_only(env, source, config, toolchain):
build_dir = tempfile.TemporaryDirectory()
subprocess.check_call([
sys.executable,
os.path.join(TOP_SRC_DIR, "bootstrap.py"),
"--xen", source, config,
"--toolchain", toolchain,
"--no-download",
], cwd=build_dir.name)
|
{"/sbxg/__main__.py": ["/sbxg/utils.py"]}
|
6,336
|
sbxg/sbxg
|
refs/heads/master
|
/sbxg/utils.py
|
# Copyright (c) 2017 Jean Guyomarc'h
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os
import subprocess
# This is derivated from https://stackoverflow.com/a/287944
# I don't want to use a module just for color, as this is an extra dependency
# that adds more failure paths. SBXG will only run on Linux anyway, as we are
# compiling U-Boot and Linux. We can still stop echoing colors on demand.
ANSI_STYLE = {
'header': '\033[95m',
'okblue': '\033[94m',
'okgreen': '\033[92m',
'warning': '\033[93m',
'fail': '\033[91m',
'endc': '\033[0m',
'bold': '\033[1m',
'underline': '\033[4m',
}
def get_board_config(search_dirs, board, filename):
filename = filename + '.yml'
board_cfg = os.path.join(board, filename)
for search_dir in search_dirs:
config_file = os.path.join(search_dir, board_cfg)
if os.path.isfile(config_file):
return config_file, os.path.join(search_dir, board)
raise FileNotFoundError(board_cfg)
def _get_lib_config(lib_dirs, kind, component, filename):
for lib_dir in lib_dirs:
config = os.path.join(lib_dir, kind, component, filename)
if os.path.isfile(config):
return config
raise FileNotFoundError(filename)
def get_toolchain(lib_dirs, toolchain):
return _get_lib_config(lib_dirs, "sources", "toolchain", toolchain + '.yml')
def get_kernel_source(lib_dirs, kernel):
return _get_lib_config(lib_dirs, "sources", "kernel", kernel + '.yml')
def get_kernel_config(lib_dirs, kernel):
return _get_lib_config(lib_dirs, "configs", "kernel", kernel)
def get_uboot_source(lib_dirs, uboot):
return _get_lib_config(lib_dirs, "sources", "uboot", uboot + '.yml')
def get_uboot_config(lib_dirs, uboot):
return _get_lib_config(lib_dirs, "configs", "uboot", uboot)
def get_xen_source(lib_dirs, xen):
return _get_lib_config(lib_dirs, "sources", "xen", xen + '.yml')
def get_xen_config(lib_dirs, xen):
return _get_lib_config(lib_dirs, "configs", "xen", xen)
def get_arch():
"""
Returns the arch as it is determined by Linux. What is below is the rewritting
of the SUBARCH variable assignment in Linux' top-level Makefile.
"""
return subprocess.check_output(
"uname -m | sed"
" -e s/i.86/x86/"
" -e s/x86_64/x86/"
" -e s/sun4u/sparc64/"
" -e s/arm.*/arm/"
" -e s/sa110/arm/"
" -e s/s390x/s390/"
" -e s/parisc64/parisc/"
" -e s/ppc.*/powerpc/"
" -e s/mips.*/mips/"
" -e s/sh[234].*/sh/"
" -e s/aarch64.*/arm64/",
shell=True,
universal_newlines=True
).rstrip()
|
{"/sbxg/__main__.py": ["/sbxg/utils.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.