index int64 | repo_name string | branch_name string | path string | content string | import_graph string |
|---|---|---|---|---|---|
59,437 | fpwanderley/InfoMe-Backend | refs/heads/master | /infome/views.py | from django.shortcuts import render
from rest_framework.schemas import get_schema_view
from rest_framework_swagger.renderers import SwaggerUIRenderer, OpenAPIRenderer
# Create your views here.
# View for accessing the Swagger interface.
schema_view = get_schema_view(title='InfoMe API', renderer_classes=[OpenAPIRenderer, SwaggerUIRenderer])
| {"/infome/admin.py": ["/infome/models.py"], "/test_db.py": ["/infome/models.py"], "/infome/hand/serializers.py": ["/infome/models.py"], "/infome/call/serializers.py": ["/infome/models.py"], "/infome/call/endpoints.py": ["/infome/models.py", "/infome/call/serializers.py"], "/infome/hand/endpoints.py": ["/infome/models.py", "/infome/hand/serializers.py"], "/infome/call/urls.py": ["/infome/call/endpoints.py"], "/infome/hand/urls.py": ["/infome/hand/endpoints.py"]} |
59,438 | fpwanderley/InfoMe-Backend | refs/heads/master | /infome/call/serializers.py | # -*- coding: utf-8 -*-
from rest_framework import serializers
from django.db import transaction
from infome.models import Call, Location
from infome.location.serializers import LocationSerializer
class CallSerializer(serializers.ModelSerializer):
location = LocationSerializer(many=False)
class Meta:
model = Call
fields = ('id', 'description', 'location')
@transaction.atomic
def create(self, validated_data):
# Create Location.
location_data = validated_data.pop('location')
location = Location.objects.create(**location_data)
call = Call.objects.create(**validated_data)
call.location = location
call.save()
return call
# @transaction.atomic
def update(self, instance, validated_data):
# Create Location.
location_data = validated_data.pop('location')
location = Location.objects.update(**location_data)
import pdb;
pdb.set_trace()
call = Call.objects.create(**validated_data)
call.location = location
call.save()
return call
| {"/infome/admin.py": ["/infome/models.py"], "/test_db.py": ["/infome/models.py"], "/infome/hand/serializers.py": ["/infome/models.py"], "/infome/call/serializers.py": ["/infome/models.py"], "/infome/call/endpoints.py": ["/infome/models.py", "/infome/call/serializers.py"], "/infome/hand/endpoints.py": ["/infome/models.py", "/infome/hand/serializers.py"], "/infome/call/urls.py": ["/infome/call/endpoints.py"], "/infome/hand/urls.py": ["/infome/hand/endpoints.py"]} |
59,439 | fpwanderley/InfoMe-Backend | refs/heads/master | /infome/apps.py | from django.apps import AppConfig
class InfomeConfig(AppConfig):
name = 'infome'
| {"/infome/admin.py": ["/infome/models.py"], "/test_db.py": ["/infome/models.py"], "/infome/hand/serializers.py": ["/infome/models.py"], "/infome/call/serializers.py": ["/infome/models.py"], "/infome/call/endpoints.py": ["/infome/models.py", "/infome/call/serializers.py"], "/infome/hand/endpoints.py": ["/infome/models.py", "/infome/hand/serializers.py"], "/infome/call/urls.py": ["/infome/call/endpoints.py"], "/infome/hand/urls.py": ["/infome/hand/endpoints.py"]} |
59,440 | fpwanderley/InfoMe-Backend | refs/heads/master | /infome/models.py | # -*- coding: utf-8 -*-
from django.db import models
class Call(models.Model):
DEFAULT_STATUS = (1, 'Aberta')
STATUS = (
DEFAULT_STATUS,
(2, 'Resolvida'),
(3, 'Cancelada')
)
description = models.TextField(max_length=300, default='')
tags = models.ManyToManyField('Tag')
location = models.OneToOneField('Location', null=True)
status = models.IntegerField(choices=STATUS, default=DEFAULT_STATUS[0], null=False, blank=False)
created = models.DateTimeField(auto_now_add=True, auto_now=False, null=False)
class Hand(models.Model):
call = models.OneToOneField('Call')
location = models.OneToOneField('Location')
created = models.DateTimeField(auto_now_add=True, auto_now=False, null=False)
class Location(models.Model):
latitude = models.DecimalField(max_digits=9, decimal_places=6, null=False, blank=False)
longitude = models.DecimalField(max_digits=9, decimal_places=6, null=False, blank=False)
class Tag(models.Model):
description = models.CharField(max_length=50, null=False, blank=False)
| {"/infome/admin.py": ["/infome/models.py"], "/test_db.py": ["/infome/models.py"], "/infome/hand/serializers.py": ["/infome/models.py"], "/infome/call/serializers.py": ["/infome/models.py"], "/infome/call/endpoints.py": ["/infome/models.py", "/infome/call/serializers.py"], "/infome/hand/endpoints.py": ["/infome/models.py", "/infome/hand/serializers.py"], "/infome/call/urls.py": ["/infome/call/endpoints.py"], "/infome/hand/urls.py": ["/infome/hand/endpoints.py"]} |
59,441 | fpwanderley/InfoMe-Backend | refs/heads/master | /infome/call/endpoints.py | # -*- coding: utf-8 -*-
"""Call Endpoints."""
from rest_framework import permissions, viewsets, status
from infome.models import Call
from infome.call.serializers import CallSerializer
class CallViewSet(viewsets.ModelViewSet):
"""
A simple ViewSet for viewing creating and editing Calls.
Filter fields such as default, name and group are available for search.
"""
permission_classes = (permissions.AllowAny,)
queryset = Call.objects.all()
serializer_class = CallSerializer
| {"/infome/admin.py": ["/infome/models.py"], "/test_db.py": ["/infome/models.py"], "/infome/hand/serializers.py": ["/infome/models.py"], "/infome/call/serializers.py": ["/infome/models.py"], "/infome/call/endpoints.py": ["/infome/models.py", "/infome/call/serializers.py"], "/infome/hand/endpoints.py": ["/infome/models.py", "/infome/hand/serializers.py"], "/infome/call/urls.py": ["/infome/call/endpoints.py"], "/infome/hand/urls.py": ["/infome/hand/endpoints.py"]} |
59,442 | fpwanderley/InfoMe-Backend | refs/heads/master | /infome/hand/endpoints.py | # -*- coding: utf-8 -*-
"""Call Endpoints."""
from rest_framework import permissions, viewsets, status
from infome.models import Hand
from infome.hand.serializers import HandSerializer
class HandViewSet(viewsets.ModelViewSet):
"""
A simple ViewSet for viewing creating and editing Hands.
Filter fields such as default, name and group are available for search.
"""
permission_classes = (permissions.AllowAny,)
queryset = Hand.objects.all()
serializer_class = HandSerializer
| {"/infome/admin.py": ["/infome/models.py"], "/test_db.py": ["/infome/models.py"], "/infome/hand/serializers.py": ["/infome/models.py"], "/infome/call/serializers.py": ["/infome/models.py"], "/infome/call/endpoints.py": ["/infome/models.py", "/infome/call/serializers.py"], "/infome/hand/endpoints.py": ["/infome/models.py", "/infome/hand/serializers.py"], "/infome/call/urls.py": ["/infome/call/endpoints.py"], "/infome/hand/urls.py": ["/infome/hand/endpoints.py"]} |
59,443 | fpwanderley/InfoMe-Backend | refs/heads/master | /infome/call/urls.py | # -*- coding: utf-8 -*-
from rest_framework import routers
from infome.call.endpoints import CallViewSet
call_router = routers.SimpleRouter(trailing_slash=False)
call_router.register(r'calls', CallViewSet)
| {"/infome/admin.py": ["/infome/models.py"], "/test_db.py": ["/infome/models.py"], "/infome/hand/serializers.py": ["/infome/models.py"], "/infome/call/serializers.py": ["/infome/models.py"], "/infome/call/endpoints.py": ["/infome/models.py", "/infome/call/serializers.py"], "/infome/hand/endpoints.py": ["/infome/models.py", "/infome/hand/serializers.py"], "/infome/call/urls.py": ["/infome/call/endpoints.py"], "/infome/hand/urls.py": ["/infome/hand/endpoints.py"]} |
59,444 | fpwanderley/InfoMe-Backend | refs/heads/master | /infome/hand/urls.py | # -*- coding: utf-8 -*-
from rest_framework import routers
from infome.hand.endpoints import HandViewSet
hand_router = routers.SimpleRouter(trailing_slash=False)
hand_router.register(r'hands', HandViewSet)
| {"/infome/admin.py": ["/infome/models.py"], "/test_db.py": ["/infome/models.py"], "/infome/hand/serializers.py": ["/infome/models.py"], "/infome/call/serializers.py": ["/infome/models.py"], "/infome/call/endpoints.py": ["/infome/models.py", "/infome/call/serializers.py"], "/infome/hand/endpoints.py": ["/infome/models.py", "/infome/hand/serializers.py"], "/infome/call/urls.py": ["/infome/call/endpoints.py"], "/infome/hand/urls.py": ["/infome/hand/endpoints.py"]} |
59,446 | mohamed-aziz/realworld-flask | refs/heads/master | /conduit/extensions.py | # -*- coding: utf-8 -*-
"""Extensions module. Each extension is initialized in the app factory located in app.py."""
from flask_bcrypt import Bcrypt
from flask_caching import Cache
from flask_migrate import Migrate
from flask_sqlalchemy import SQLAlchemy
from flask_jwt import JWT
from flask_cors import CORS
bcrypt = Bcrypt()
db = SQLAlchemy()
migrate = Migrate()
cache = Cache()
cors = CORS()
from conduit.utils import authenticate, jwt_identity # noqa
jwt = JWT(authentication_handler=authenticate, identity_handler=jwt_identity)
| {"/conduit/extensions.py": ["/conduit/utils.py"], "/conduit/user/views.py": ["/conduit/utils.py", "/conduit/extensions.py"], "/conduit/app.py": ["/conduit/extensions.py"]} |
59,447 | mohamed-aziz/realworld-flask | refs/heads/master | /tests/test_articles.py | # coding: utf-8
from flask import url_for
class TestArticleViews:
def test_get_articles_by_author(self, testapp, user):
user = user.get()
resp = testapp.post_json(url_for('user.login_user'), {'user': {
'email': user.email,
'password': 'myprecious'
}})
token = str(resp.json['user']['token'])
for _ in range(2):
testapp.post_json(url_for('articles.make_article'), {
"article": {
"title": "How to train your dragon {}".format(_),
"description": "Ever wonder how?",
"body": "You have to believe",
"tagList": ["reactjs", "angularjs", "dragons"]
}
}, headers={
'Authorization': 'Token {}'.format(token)
})
resp = testapp.get(url_for('articles.get_articles', author=user.username))
assert len(resp.json['articles']) == 2
def test_favorite_an_article(self, testapp, user):
user = user.get()
resp = testapp.post_json(url_for('user.login_user'), {'user': {
'email': user.email,
'password': 'myprecious'
}})
token = str(resp.json['user']['token'])
resp1 = testapp.post_json(url_for('articles.make_article'), {
"article": {
"title": "How to train your dragon",
"description": "Ever wonder how?",
"body": "You have to believe",
"tagList": ["reactjs", "angularjs", "dragons"]
}
}, headers={
'Authorization': 'Token {}'.format(token)
})
resp = testapp.post(url_for('articles.favorite_an_article',
slug=resp1.json['article']['slug']),
headers={
'Authorization': 'Token {}'.format(token)
}
)
assert resp.json['article']['favorited']
def test_get_articles_by_favoriter(self, testapp, user):
user = user.get()
resp = testapp.post_json(url_for('user.login_user'), {'user': {
'email': user.email,
'password': 'myprecious'
}})
token = str(resp.json['user']['token'])
for _ in range(2):
testapp.post_json(url_for('articles.make_article'), {
"article": {
"title": "How to train your dragon {}".format(_),
"description": "Ever wonder how?",
"body": "You have to believe",
"tagList": ["reactjs", "angularjs", "dragons"]
}
}, headers={
'Authorization': 'Token {}'.format(token)
})
resp = testapp.get(url_for('articles.get_articles', author=user.username))
assert len(resp.json['articles']) == 2
def test_make_article(self, testapp, user):
user = user.get()
resp = testapp.post_json(url_for('user.login_user'), {'user': {
'email': user.email,
'password': 'myprecious'
}})
token = str(resp.json['user']['token'])
resp = testapp.post_json(url_for('articles.make_article'), {
"article": {
"title": "How to train your dragon",
"description": "Ever wonder how?",
"body": "You have to believe",
"tagList": ["reactjs", "angularjs", "dragons"]
}
}, headers={
'Authorization': 'Token {}'.format(token)
})
assert resp.json['article']['author']['email'] == user.email
assert resp.json['article']['body'] == 'You have to believe'
| {"/conduit/extensions.py": ["/conduit/utils.py"], "/conduit/user/views.py": ["/conduit/utils.py", "/conduit/extensions.py"], "/conduit/app.py": ["/conduit/extensions.py"]} |
59,448 | mohamed-aziz/realworld-flask | refs/heads/master | /conduit/user/views.py | # -*- coding: utf-8 -*-
"""User views."""
from flask import Blueprint
from .serializers import user_schema
from .models import User
from conduit.profile.models import UserProfile
from flask_jwt import current_identity, jwt_required
from flask_apispec import use_kwargs, marshal_with
from sqlalchemy.exc import IntegrityError
from conduit.exceptions import USER_ALREADY_REGISTERED, InvalidUsage, USER_NOT_FOUND
from conduit.utils import jwt_optional
from conduit.database import db
from conduit.extensions import cors
blueprint = Blueprint('user', __name__)
cors.init_app(blueprint)
@blueprint.route('/api/users', methods=('POST',))
@use_kwargs(user_schema)
@marshal_with(user_schema)
def register_user(username, password, email, **kwargs):
try:
userprofile = UserProfile(User(username, email, password=password, **kwargs).save()).save()
except IntegrityError:
db.session.rollback()
raise InvalidUsage(**USER_ALREADY_REGISTERED)
return userprofile.user
@blueprint.route('/api/users/login', methods=('POST',))
@jwt_optional()
@use_kwargs(user_schema)
@marshal_with(user_schema)
def login_user(email, password, **kwargs):
user = User.query.filter_by(email=email).first()
if user is not None and user.check_password(password):
return user
else:
raise InvalidUsage(**USER_NOT_FOUND)
@blueprint.route('/api/user', methods=('GET',))
@jwt_required()
@marshal_with(user_schema)
def get_user():
return current_identity
@blueprint.route('/api/user', methods=('PUT',))
@jwt_required()
@use_kwargs(user_schema)
@marshal_with(user_schema)
def update_user(**kwargs):
user = current_identity
# take in consideration the password
password = kwargs.pop('password', None)
if password:
user.set_password(password)
user.update(**kwargs)
return user
| {"/conduit/extensions.py": ["/conduit/utils.py"], "/conduit/user/views.py": ["/conduit/utils.py", "/conduit/extensions.py"], "/conduit/app.py": ["/conduit/extensions.py"]} |
59,449 | mohamed-aziz/realworld-flask | refs/heads/master | /conduit/app.py | # -*- coding: utf-8 -*-
"""The app module, containing the app factory function."""
from flask import Flask
from conduit.extensions import bcrypt, cache, db, migrate, jwt
from conduit import commands, user, profile, articles
from conduit.settings import ProdConfig
from conduit.exceptions import InvalidUsage
def create_app(config_object=ProdConfig):
"""An application factory, as explained here:
http://flask.pocoo.org/docs/patterns/appfactories/.
:param config_object: The configuration object to use.
"""
app = Flask(__name__.split('.')[0])
app.url_map.strict_slashes = False
app.config.from_object(config_object)
register_extensions(app)
register_blueprints(app)
register_errorhandlers(app)
register_shellcontext(app)
register_commands(app)
return app
def register_extensions(app):
"""Register Flask extensions."""
bcrypt.init_app(app)
cache.init_app(app)
db.init_app(app)
migrate.init_app(app, db)
jwt.init_app(app)
def register_blueprints(app):
"""Register Flask blueprints."""
app.register_blueprint(user.views.blueprint)
app.register_blueprint(profile.views.blueprint)
app.register_blueprint(articles.views.blueprint)
def register_errorhandlers(app):
def errorhandler(error):
response = error.to_json()
response.status_code = error.status_code
return response
app.errorhandler(InvalidUsage)(errorhandler)
def register_shellcontext(app):
"""Register shell context objects."""
def shell_context():
"""Shell context objects."""
return {
'db': db,
'User': user.models.User,
'UserProfile': profile.models.UserProfile,
'Article': articles.models.Article,
'Tag': articles.models.Tags,
}
app.shell_context_processor(shell_context)
def register_commands(app):
"""Register Click commands."""
app.cli.add_command(commands.test)
app.cli.add_command(commands.lint)
app.cli.add_command(commands.clean)
app.cli.add_command(commands.urls)
| {"/conduit/extensions.py": ["/conduit/utils.py"], "/conduit/user/views.py": ["/conduit/utils.py", "/conduit/extensions.py"], "/conduit/app.py": ["/conduit/extensions.py"]} |
59,450 | mohamed-aziz/realworld-flask | refs/heads/master | /conduit/utils.py | # -*- coding: utf-8 -*-
"""Helper utilities and decorators."""
from flask import flash, _request_ctx_stack
from functools import wraps
from flask_jwt import _jwt
import jwt
def jwt_optional(realm=None):
def wrapper(fn):
@wraps(fn)
def decorator(*args, **kwargs):
token = _jwt.request_callback()
try:
payload = _jwt.jwt_decode_callback(token)
except jwt.exceptions.DecodeError:
pass
else:
_request_ctx_stack.top.current_identity = _jwt.identity_callback(payload)
return fn(*args, **kwargs)
return decorator
return wrapper
from conduit.user.models import User # noqa
def flash_errors(form, category='warning'):
"""Flash all errors for a form."""
for field, errors in form.errors.items():
for error in errors:
flash('{0} - {1}'.format(getattr(form, field).label.text, error), category)
def jwt_identity(payload):
user_id = payload['identity']
return User.get_by_id(user_id)
def authenticate(email, password):
user = User.query.filter_by(email=email).first()
if user and user.check_password(password):
return user
| {"/conduit/extensions.py": ["/conduit/utils.py"], "/conduit/user/views.py": ["/conduit/utils.py", "/conduit/extensions.py"], "/conduit/app.py": ["/conduit/extensions.py"]} |
59,454 | jacobandreas/unnatural-language | refs/heads/master | /exp/overnight/collate.py | #!/usr/bin/env python3
import os
results = {}
for dataset in os.listdir("."):
if not os.path.isdir(dataset):
continue
results[dataset] = {}
for experiment in os.listdir(dataset):
result_line = None
result_filename = os.path.join(dataset, experiment, "predict.log")
if not os.path.exists(result_filename):
continue
with open(result_filename) as f:
for line in f:
if "Stats for iter=1.dev" in line:
result_line = line.strip()
if result_line is None:
continue
parts = result_line.split()
correct = parts[3]
assert "correct" in correct
score = float(correct.split("=")[1])
results[dataset][experiment] = score
experiments = sorted(list(results["basketball"].keys()))
datasets = list(sorted(results.keys()))
print("\\documentclass{article} \\begin{document}")
print("\\begin{tabular}{l" + ("c" * len(results) ) + "}")
print("& " + " & ".join(datasets) + " \\\\")
for experiment in experiments:
line = [experiment]
for dataset in datasets:
if experiment not in results[dataset]:
line.append("")
else:
line.append("{:.2f}".format(results[dataset][experiment]))
print(" & ".join(line) + " \\\\")
print("\\end{tabular}")
print("\\end{document}")
| {"/predict_interactive.py": ["/common.py"], "/train.py": ["/common.py"], "/preproc.py": ["/common.py", "/util.py"], "/predict.py": ["/common.py"], "/models/sim.py": ["/util.py"], "/rep_service.py": ["/common.py"], "/models/seq2seq.py": ["/models/torchdec.py", "/util.py"], "/common.py": ["/models/sim.py", "/models/seq2seq.py", "/util.py"]} |
59,455 | jacobandreas/unnatural-language | refs/heads/master | /predict_interactive.py | #!/usr/bin/env python3
import common
from common import _model
from absl import app, flags
import numpy as np
import os
import sexpdata
import sys
FLAGS = flags.FLAGS
def main(argv):
model = _model()
model.load(FLAGS.write_model)
for line in sys.stdin:
utterance = line.strip()
pred_lf = model.predict(utterance, None)
print(pred_lf, file=sys.stderr)
print(pred_lf)
sys.stdout.flush()
sys.stderr.flush()
if __name__ == "__main__":
app.run(main)
| {"/predict_interactive.py": ["/common.py"], "/train.py": ["/common.py"], "/preproc.py": ["/common.py", "/util.py"], "/predict.py": ["/common.py"], "/models/sim.py": ["/util.py"], "/rep_service.py": ["/common.py"], "/models/seq2seq.py": ["/models/torchdec.py", "/util.py"], "/common.py": ["/models/sim.py", "/models/seq2seq.py", "/util.py"]} |
59,456 | jacobandreas/unnatural-language | refs/heads/master | /train.py | #!/usr/bin/env python3
import common
from common import _model
from absl import app, flags
import os
import sexpdata
FLAGS = flags.FLAGS
def main(argv):
model = _model()
fake_utts = []
real_utts = []
lfs = []
train_file = os.path.join(FLAGS.data_dir, "data", "{}.paraphrases.train.examples".format(FLAGS.dataset))
with open(train_file) as f:
train_str = f.read()
train_data = sexpdata.loads("({})".format(train_str))
num_train = len(train_data)
if FLAGS.max_examples is not None:
num_train = min(num_train, FLAGS.max_examples)
num_train = int(num_train * FLAGS.train_frac)
train_data = train_data[:num_train]
for datum in train_data:
real = datum[1][1]
fake = datum[2][1]
lf = sexpdata.dumps(datum[3][1]).replace("\\.", ".")
fake_utts.append(fake)
real_utts.append(real)
lfs.append(lf)
model.train(real_utts, fake_utts, lfs)
model.save(FLAGS.write_model)
if __name__ == "__main__":
app.run(main)
| {"/predict_interactive.py": ["/common.py"], "/train.py": ["/common.py"], "/preproc.py": ["/common.py", "/util.py"], "/predict.py": ["/common.py"], "/models/sim.py": ["/util.py"], "/rep_service.py": ["/common.py"], "/models/seq2seq.py": ["/models/torchdec.py", "/util.py"], "/common.py": ["/models/sim.py", "/models/seq2seq.py", "/util.py"]} |
59,457 | jacobandreas/unnatural-language | refs/heads/master | /preproc.py | #!/usr/bin/env python3
import common
from common import _sent_representer, _word_representer, _device
import util
from absl import app, flags
import json
import numpy as np
import os
import sexpdata
import torch
from tqdm import tqdm
FLAGS = flags.FLAGS
def _pad_cat(reps):
max_len = max(rep.shape[1] for rep in reps)
data = np.zeros((max_len, len(reps), reps[0].shape[2]), dtype=np.float32)
for i, rep in enumerate(reps):
data[:rep.shape[1], i, :] = rep[0, ...]
return data
def main(argv):
canonical_utt_file = os.path.join(FLAGS.data_dir, "genovernight.out", FLAGS.dataset, "utterances_formula.tsv")
train_file = os.path.join(FLAGS.data_dir, "data", "{}.paraphrases.train.examples".format(FLAGS.dataset))
vocab = {}
with open(train_file) as f:
train_str = f.read()
train_data = sexpdata.loads("({})".format(train_str))
for datum in train_data:
real = datum[1][1]
words = util.word_tokenize(real)
for word in words:
if word not in vocab:
vocab[word] = len(vocab)
with open(canonical_utt_file) as f:
for line in f:
utt, _ = line.strip().split("\t")
words = util.word_tokenize(utt)
for word in words:
if word not in vocab:
vocab[word] = len(vocab)
sent_representer = _sent_representer(vocab)
word_representer = _word_representer(vocab)
sent_reps = []
word_reps = []
utts = []
lfs = []
with open(canonical_utt_file) as f:
for line in tqdm(f):
utt, lf = line.strip().split("\t")
sent_reps.append(sent_representer(utt).squeeze(0).detach().cpu().numpy())
word_reps.append(word_representer(utt).detach().cpu().numpy())
utts.append(utt)
lfs.append(lf)
with open(FLAGS.write_vocab, "w") as f:
json.dump(vocab, f)
np.save(FLAGS.write_utt_reps, sent_reps)
np.save(FLAGS.write_word_reps, _pad_cat(word_reps))
with open(FLAGS.write_utts, "w") as f:
json.dump(utts, f)
with open(FLAGS.write_lfs, "w") as f:
json.dump(lfs, f)
if __name__ == "__main__":
app.run(main)
| {"/predict_interactive.py": ["/common.py"], "/train.py": ["/common.py"], "/preproc.py": ["/common.py", "/util.py"], "/predict.py": ["/common.py"], "/models/sim.py": ["/util.py"], "/rep_service.py": ["/common.py"], "/models/seq2seq.py": ["/models/torchdec.py", "/util.py"], "/common.py": ["/models/sim.py", "/models/seq2seq.py", "/util.py"]} |
59,458 | jacobandreas/unnatural-language | refs/heads/master | /predict.py | #!/usr/bin/env python3
import common
from common import _model
from absl import app, flags
import numpy as np
import os
import sexpdata
FLAGS = flags.FLAGS
def main(argv):
model = _model()
model.load(FLAGS.write_model)
test_file = os.path.join(FLAGS.data_dir, "data", "{}.paraphrases.test.examples".format(FLAGS.dataset))
with open(test_file) as f:
data_str = f.read()
data = sexpdata.loads("({})".format(data_str))
predictions = []
scores = []
for datum in data:
utterance = datum[1][1]
lf = sexpdata.dumps(datum[3][1]).replace("\\.", ".")
print()
print(utterance)
pred_lf = model.predict(utterance, lf)
predictions.append(pred_lf)
scores.append(pred_lf == lf)
print(pred_lf == lf)
print(np.mean(scores))
if __name__ == "__main__":
app.run(main)
| {"/predict_interactive.py": ["/common.py"], "/train.py": ["/common.py"], "/preproc.py": ["/common.py", "/util.py"], "/predict.py": ["/common.py"], "/models/sim.py": ["/util.py"], "/rep_service.py": ["/common.py"], "/models/seq2seq.py": ["/models/torchdec.py", "/util.py"], "/common.py": ["/models/sim.py", "/models/seq2seq.py", "/util.py"]} |
59,459 | jacobandreas/unnatural-language | refs/heads/master | /models/sim.py | from util import lf_detokenize
from absl import flags
import numpy as np
import torch
from torch import nn, optim
from tqdm import tqdm
import sys
FLAGS = flags.FLAGS
flags.DEFINE_enum("sim__scorer", "bilinear", ["", "linear", "bilinear", "rnn"], "scoring function to use")
flags.DEFINE_integer("sim__hidden_size", 1024, "size of hidden state for similarity model")
flags.DEFINE_boolean("sim__supervised", False, "train on examples")
def dist_l2(query, target):
return torch.norm(query - target, dim=1)
def dist_cos(query, target):
query = query / torch.norm(query, dim=1).unsqueeze(1)
target = target / torch.norm(target, dim=1).unsqueeze(1)
return (1 - (query * target).sum(dim=1)) / 2
class Scorer(nn.Module):
def __init__(self, size, emb_size):
super().__init__()
self.query_score = nn.Linear(size, 1)
if FLAGS.sim__scorer == "linear":
self.pred_score = nn.Linear(size, 1)
elif FLAGS.sim__scorer == "bilinear":
self.pred_score = nn.Bilinear(size, size, 1)
elif FLAGS.sim__scorer == "rnn":
assert(FLAGS.bert_features)
self.encoder_query = nn.LSTM(emb_size, FLAGS.sim__hidden_size, 1, bidirectional=True)
self.encoder_target = nn.LSTM(emb_size, FLAGS.sim__hidden_size, 1, bidirectional=True)
else:
assert False
def forward(self, query_rep, target_rep):
if FLAGS.sim__scorer == "linear":
return (self.query_score(query) + self.pred_score(query * target * np.sqrt(query.shape[1]))).squeeze(1)
elif FLAGS.sim__scorer == "bilinear":
return (self.query_score(query) + self.pred_score(query, target)).squeeze(1)
elif FLAGS.sim__scorer == "rnn":
_, (query_enc, _) = self.encoder_query(query_rep)
_, (target_enc, _) = self.encoder_target(target_rep)
return (query_enc * target_enc).sum(dim=2).sum(dim=0)
else:
assert False
class DistScorer(nn.Module):
def __init__(self, size):
super().__init__()
def forward(self, query, target):
return dist_cos(query, target)
class SimModel(object):
def __init__(self, utt_reps, utt_embs, utts, lfs, representer, embedder, device):
self.utt_reps = utt_reps
self.utt_embs = utt_embs
self.utts = utts
self.lfs = lfs
self.representer = representer
self.embedder = embedder
self.device = device
n_features = utt_reps.shape[-1]
n_emb_features = utt_embs.shape[-1]
if FLAGS.sim__supervised:
self.scorer = Scorer(n_features, n_emb_features)
else:
self.scorer = DistScorer(n_features)
self.scorer.to(device)
def _pad_cat(self, reps):
max_len = max(rep.shape[1] for rep in reps)
data = torch.zeros((max_len, len(reps), reps[0].shape[2])).to(self.device)
for i, rep in enumerate(reps):
data[:rep.shape[1], i, :] = rep[0, ...]
return data
def train(self, real_utts, fake_utts, lfs):
self.train_cls(real_utts, fake_utts, lfs)
#def train_cls(self, real_utts, fake_utts, lfs):
# labels = [self.lfs.index(l) for l in lfs]
# opt = optim.Adam(self.scorer.parameters(), lr=0.0003)
# real_reps = [self.representer(utt) for utt in tqdm(real_utts)]
# real_embs = [self.embedder(utt) for utt in tqdm(real_utts)]
# total_loss = 0
# for i in range(FLAGS.train_iters):
# if (i+1) % 100 == 0:
# print("{:.3f}".format(total_loss / 100), file=sys.stderr)
# batch_indices = np.random.randint(len(real_reps), size=FLAGS.batch_size)
# if FLAGS.sim__scorer = "rnn":
# query = self
def train_metric(self, real_utts, fake_utts, lfs):
if isinstance(self.scorer, DistScorer):
return
real_reps = [self.representer(utt) for utt in tqdm(real_utts)]
fake_reps = [self.representer(utt) for utt in tqdm(fake_utts)]
real_embs = [self.embedder(utt) for utt in tqdm(real_utts)]
fake_embs = [self.embedder(utt) for utt in tqdm(fake_utts)]
opt = optim.Adam(self.scorer.parameters(), lr=0.001)
total_loss = 0
for i in range(FLAGS.train_iters):
if (i+1) % 100 == 0:
print("{:.3f}".format(total_loss / 100), file=sys.stderr)
total_loss = 0
true_indices = np.random.randint(len(real_reps), size=FLAGS.batch_size)
false_indices = np.random.randint(len(real_reps), size=FLAGS.batch_size)
if FLAGS.sim__scorer == "rnn":
pred_reps = self._pad_cat([real_embs[i] for i in true_indices])
true_reps = self._pad_cat([fake_embs[i] for i in true_indices])
false_reps = self._pad_cat([fake_embs[i] for i in false_indices])
else:
pred_reps = torch.cat([real_reps[i] for i in true_indices], dim=0)
true_reps = torch.cat([fake_reps[i] for i in true_indices], dim=0)
false_reps = torch.cat([fake_reps[i] for i in false_indices], dim=0)
true_dist = self.scorer(true_reps, pred_reps)
false_dist = self.scorer(false_reps, pred_reps)
diff = true_dist - false_dist + 1
loss = torch.max(diff, torch.zeros_like(diff)).mean()
opt.zero_grad()
loss.backward()
opt.step()
total_loss += loss.item()
def save(self, location):
torch.save(self.scorer.state_dict(), location)
def load(self, location):
self.scorer.load_state_dict(torch.load(location))
def predict(self, utt, gold_lf):
if FLAGS.sim__scorer == "rnn":
rep = self.embedder(utt).squeeze(0).unsqueeze(1).expand(-1, self.utt_embs.shape[1], -1)
scores = self.scorer(self.utt_embs, rep)
else:
rep = self.representer(utt).expand_as(self.utt_reps)
scores = self.scorer(self.utt_reps, rep)
best = scores.argmin()
nbest = scores.argsort().cpu().numpy()
for n, i in enumerate(nbest):
gold = "*" if self.lfs[i] == gold_lf else " "
if n < 10 or gold == "*":
print("{:4d} {:4d} {} {:0.3f}".format(n, i, gold, scores[i]), self.utts[i], file=sys.stderr)
print(best, file=sys.stderr)
return self.lfs[best]
| {"/predict_interactive.py": ["/common.py"], "/train.py": ["/common.py"], "/preproc.py": ["/common.py", "/util.py"], "/predict.py": ["/common.py"], "/models/sim.py": ["/util.py"], "/rep_service.py": ["/common.py"], "/models/seq2seq.py": ["/models/torchdec.py", "/util.py"], "/common.py": ["/models/sim.py", "/models/seq2seq.py", "/util.py"]} |
59,460 | jacobandreas/unnatural-language | refs/heads/master | /rep_service.py | #!/usr/bin/env python3
import common
from common import _representer
from absl import app, flags
import numpy as np
import os
import sexpdata
import sys
FLAGS = flags.FLAGS
def main(argv):
rep = _representer(vocab=dict())
for line in sys.stdin:
utterance = line.strip()
pred_rep = rep(utterance)[0, :].detach().cpu().numpy().tolist()
pred_rep = ["%.4f" % v for v in pred_rep]
print(" ".join(pred_rep))
sys.stdout.flush()
sys.stderr.flush()
if __name__ == "__main__":
app.run(main)
| {"/predict_interactive.py": ["/common.py"], "/train.py": ["/common.py"], "/preproc.py": ["/common.py", "/util.py"], "/predict.py": ["/common.py"], "/models/sim.py": ["/util.py"], "/rep_service.py": ["/common.py"], "/models/seq2seq.py": ["/models/torchdec.py", "/util.py"], "/common.py": ["/models/sim.py", "/models/seq2seq.py", "/util.py"]} |
59,461 | jacobandreas/unnatural-language | refs/heads/master | /models/seq2seq.py | from models.torchdec import Vocab, Encoder, Decoder, SimpleAttention, batch_seqs
import util
import sexpdata
from absl import flags
import numpy as np
import torch
from torch import nn, optim
import sys
FLAGS = flags.FLAGS
flags.DEFINE_boolean("seq2seq__pretrained_enc", False, "use pretrained representations for encoder")
flags.DEFINE_boolean("seq2seq__fixed_enc", False, "attend directly to pretrained representation")
flags.DEFINE_integer("seq2seq__hidden_size", 1024, "size of hidden state for seq2seq model")
flags.DEFINE_integer("seq2seq__embed_size", 256, "size of embedding for seq2seq model")
class Implementation(nn.Module):
def __init__(self, rep_size, vocab, embedder, device):
super().__init__()
hidden_size = FLAGS.seq2seq__hidden_size
embed_size = FLAGS.seq2seq__embed_size
self.vocab = vocab
self.embedder = embedder
self.device = device
self._emb_cache = {}
if FLAGS.seq2seq__pretrained_enc:
if FLAGS.seq2seq__fixed_enc:
self.encoder = nn.Linear(rep_size, hidden_size)
else:
self.encoder = nn.LSTM(rep_size, hidden_size, 1, bidirectional=True)
else:
self.encoder = Encoder(vocab, embed_size, hidden_size, 1)
self.pre_proj = nn.Linear(rep_size, embed_size)
self.proj = nn.Linear(hidden_size * 2, hidden_size)
self.decoder = Decoder(
vocab,
embed_size,
hidden_size,
1,
copy=False,
attention=[SimpleAttention(hidden_size, hidden_size)],
)
def _encode_pretrained(self, utts_raw):
emb_words = []
for utt in utts_raw:
if utt in self._emb_cache:
emb = self._emb_cache[utt]
else:
emb = self.embedder(utt).permute(1, 0, 2)
self._emb_cache[utt] = emb
emb_words.append(emb)
max_len = max(e.shape[0] for e in emb_words)
att_toks = np.ones((max_len, len(emb_words)), dtype=np.int64) * self.vocab.pad()
for i in range(len(emb_words)):
att_toks[:emb_words[i].shape[0]] = self.vocab.unk()
att_toks = torch.tensor(att_toks).to(self.device)
emb_words = [
torch.cat((
e,
torch.zeros(max_len - e.shape[0], 1, e.shape[2]).to(self.device)
), dim=0)
for e in emb_words
]
emb_words = torch.cat(emb_words, dim=1)
if FLAGS.seq2seq__fixed_enc:
enc_words = self.encoder(emb_words)
enc_utt = enc_words.mean(dim=0, keepdim=True)
else:
proj_words = self.pre_proj(emb_words)
enc_words, (enc_utt, _) = self.encoder(proj_words)
enc_words = self.proj(enc_words)
enc_utt = self.proj(torch.cat(enc_utt.split(1), dim=2))
return enc_words, enc_utt, att_toks
def _encode_basic(self, utt_data):
enc_words, (enc_utt, _) = self.encoder(utt_data)
enc_words = self.proj(enc_words)
enc_utt = self.proj(torch.cat(enc_utt.split(1), dim=2))
return enc_words, enc_utt, utt_data
def forward(self, utts_raw, utt_data, lf_data):
if FLAGS.seq2seq__pretrained_enc:
enc_words, enc_utt, att_toks = self._encode_pretrained(utts_raw)
else:
enc_words, enc_utt, att_toks = self._encode_basic(utt_data)
dec_state = (enc_utt, torch.zeros_like(enc_utt))
logits, *_ = self.decoder(
dec_state,
lf_data.shape[0],
ref_tokens=lf_data,
att_features=(enc_words,),
att_tokens=(att_toks,),
)
return logits
def predict(self, utt_raw, utt_data):
if FLAGS.seq2seq__pretrained_enc:
enc_words, enc_utt, att_toks = self._encode_pretrained(utt_raw)
else:
enc_words, enc_utt, att_toks = self._encode_basic(utt_data)
dec_state = (enc_utt, torch.zeros_like(enc_utt))
#preds = self.decoder.beam(
# dec_state,
# 10,
# 50,
# att_features=(enc_words,),
# att_tokens=(att_toks,),
#)
#preds_out = []
#for pred in preds:
# try:
# pred_str = util.lf_detokenize(self.vocab.decode(pred))
# parsed = sexpdata.loads(pred_str)
# except Exception:
# parsed = None
# if parsed:
# preds_out.append(pred)
#return preds_out
preds, _ = self.decoder.sample(
dec_state,
100,
att_features=(enc_words,),
att_tokens=(att_toks,),
greedy=True
)
return preds
class Seq2SeqModel(nn.Module):
def __init__(self, rep_size, base_vocab, utt_reps, utts, lfs, embedder, device):
super().__init__()
self.utts = utts
self.lfs = lfs
self.device = device
vocab = Vocab()
for word in base_vocab:
vocab.add(word)
for lf in lfs:
for token in util.lf_tokenize(lf):
vocab.add(token)
self.vocab = vocab
self.implementation = Implementation(rep_size, vocab, embedder, device)
self.loss = nn.CrossEntropyLoss(ignore_index=vocab.pad())
self.device = device
self.to(device)
def train(self, real_utts, fake_utts, lfs):
lfs_raw = lfs
utts_raw = []
utts_indexed = []
lfs_indexed = []
for real, fake, lf in zip(real_utts, fake_utts, lfs):
real_indexed = self.vocab.encode(util.word_tokenize(real))
fake_indexed = self.vocab.encode(util.word_tokenize(fake))
lf_indexed = self.vocab.encode(util.lf_tokenize(lf))
if real not in utts_raw:
utts_raw.append(real)
utts_indexed.append(real_indexed)
lfs_indexed.append(lf_indexed)
if fake not in utts_raw:
utts_raw.append(fake)
utts_indexed.append(fake_indexed)
lfs_indexed.append(lf_indexed)
opt = optim.Adam(self.parameters(), lr=0.0003)
opt_sched = optim.lr_scheduler.StepLR(opt, step_size=FLAGS.train_iters//2, gamma=0.1)
total_loss = 0
self.implementation.train()
for i in range(FLAGS.train_iters):
if (i+1) % 10 == 0:
print("{:.3f}".format(total_loss / 10), file=sys.stderr)
sys.stderr.flush()
total_loss = 0
indices = np.random.randint(len(utts_indexed), size=FLAGS.batch_size)
batch_utts_raw = [utts_raw[i] for i in indices]
batch_utts_indexed = [utts_indexed[i] for i in indices]
batch_utt_data = batch_seqs(batch_utts_indexed).to(self.device)
lfs = [lfs_indexed[i] for i in indices]
lf_data = batch_seqs(lfs).to(self.device)
lf_ctx = lf_data[:-1, :]
lf_tgt = lf_data[1:, :].view(-1)
logits = self.implementation(batch_utts_raw, batch_utt_data, lf_ctx)
logits = logits.view(-1, logits.shape[-1])
loss = self.loss(logits, lf_tgt)
opt.zero_grad()
loss.backward()
opt.step()
opt_sched.step()
total_loss += loss.item()
#correct = 0
#total = 0
#for utt, lf in zip(real_utts, lfs_raw):
# pred_lf = self.predict(utt, lf)
# print(len(util.lf_tokenize(pred_lf)))
# print(utt)
# print(lf)
# print(pred_lf)
# print(pred_lf == utt)
# print()
# total += 1
# correct += int(lf == pred_lf)
#print(correct / total)
def save(self, location):
torch.save(self.implementation.state_dict(), location)
def load(self, location):
self.implementation.load_state_dict(torch.load(location))
def predict(self, utt, gold_lf):
self.implementation.eval()
utt_raw = [utt]
utt_data = batch_seqs([self.vocab.encode(util.word_tokenize(utt), unk=True)]).to(self.device)
preds = self.implementation.predict(utt_raw, utt_data)
if len(preds) == 0:
return None
lfs = [util.lf_detokenize(self.vocab.decode(pred)) for pred in preds]
print("best guess", lfs[0], file=sys.stderr)
lfs = [lf for lf in lfs if lf in self.lfs]
if len(lfs) > 0:
return lfs[0]
return self.lfs[np.random.randint(len(self.lfs))]
| {"/predict_interactive.py": ["/common.py"], "/train.py": ["/common.py"], "/preproc.py": ["/common.py", "/util.py"], "/predict.py": ["/common.py"], "/models/sim.py": ["/util.py"], "/rep_service.py": ["/common.py"], "/models/seq2seq.py": ["/models/torchdec.py", "/util.py"], "/common.py": ["/models/sim.py", "/models/seq2seq.py", "/util.py"]} |
59,462 | jacobandreas/unnatural-language | refs/heads/master | /common.py | from models.sim import SimModel
from models.seq2seq import Seq2SeqModel
import util
from absl import flags
import json
import numpy as np
from pytorch_transformers import BertModel, BertTokenizer, GPT2Model, GPT2Tokenizer
import torch
import torch.nn.functional as F
FLAGS = flags.FLAGS
flags.DEFINE_string("data_dir", None, "location of overnight data")
flags.DEFINE_string("dataset", None, "dataset to use")
flags.DEFINE_string("bert_version", "bert-base-uncased", "version of BERT pretrained weights to use")
flags.DEFINE_string("device", "cuda:0", "torch device")
flags.DEFINE_enum("model", "sim", ["sim", "seq2seq"], "model to train")
flags.DEFINE_boolean("lex_features", True, "use lexical features")
flags.DEFINE_boolean("bert_features", True, "use bert features")
flags.DEFINE_integer("max_examples", None, "maximum number of examples to read")
flags.DEFINE_float("train_frac", 1, "fraction of examples to train on")
flags.DEFINE_integer("batch_size", 100, "batch size")
flags.DEFINE_integer("train_iters", 1000, "number of training iterations")
flags.DEFINE_string("write_vocab", "vocab.json", "")
flags.DEFINE_string("write_utt_reps", "utt_reps.npy", "")
flags.DEFINE_string("write_word_reps", "word_reps.npy", "")
flags.DEFINE_string("write_utts", "utts.json", "")
flags.DEFINE_string("write_lfs", "lfs.json", "")
flags.DEFINE_string("write_model", "model.p", "")
BERT_SINGLETONS = {}
def _ensure_bert():
if "tokenizer" not in BERT_SINGLETONS:
tokenizer = BertTokenizer.from_pretrained(FLAGS.bert_version)
BERT_SINGLETONS["tokenizer"] = tokenizer
if "representer" not in BERT_SINGLETONS:
representer = BertModel.from_pretrained(FLAGS.bert_version, output_hidden_states=True).to(_device())
BERT_SINGLETONS["representer"] = representer
return BERT_SINGLETONS["tokenizer"], BERT_SINGLETONS["representer"]
def _device():
return torch.device(FLAGS.device)
def _sent_representer(vocab):
if FLAGS.bert_features:
tokenizer, representer = _ensure_bert()
else:
tokenizer = representer = None
def represent(utt):
out = []
if FLAGS.bert_features:
utt_enc = torch.tensor([tokenizer.encode(utt)]).to(_device())
with torch.no_grad():
_, _, hiddens = representer(utt_enc)
word_rep = hiddens[0].mean(dim=1)
seq_rep = hiddens[-1].mean(dim=1)
out.append(F.normalize(word_rep, dim=1))
out.append(F.normalize(seq_rep, dim=1))
if FLAGS.lex_features:
utt_lex = np.zeros((1, len(vocab)), dtype=np.float32)
for word in util.word_tokenize(utt):
if word in vocab:
utt_lex[0, vocab[word]] = 1
out.append(F.normalize(torch.tensor(utt_lex).to(_device()), dim=1))
if len(out) == 1:
return out[0].detach()
else:
return torch.cat(out, dim=1).detach()
return represent
def _word_representer(vocab):
tokenizer, representer = _ensure_bert()
def represent(utt):
out = []
utt_words = util.word_tokenize(utt)
utt_enc = torch.tensor([tokenizer.encode(utt)]).to(_device())
if FLAGS.bert_features:
with torch.no_grad():
_, _, hiddens = representer(utt_enc)
out.append(hiddens[0])
out.append(hiddens[-1])
if FLAGS.lex_features:
one_hot = torch.zeros(1, utt_enc.shape[1], len(vocab))
j = 0
for i in range(len(utt_enc)):
dec = tokenizer.decode(utt_enc[i])
if not dec.startswith("##"):
word = utt_words[j]
if word in vocab:
one_hot[0, i, vocab[word]] = 1
j += 1
one_hot = one_hot.to(_device())
out.append(one_hot)
if len(out) == 1:
return out[0].detach()
else:
return torch.cat(out, dim=2).detach()
return represent
def _model():
with open(FLAGS.write_vocab) as f:
vocab = json.load(f)
with open(FLAGS.write_utts) as f:
utts = json.load(f)
with open(FLAGS.write_lfs) as f:
lfs = json.load(f)
utt_reps = torch.tensor(np.load(FLAGS.write_utt_reps)).to(_device())
word_reps = torch.tensor(np.load(FLAGS.write_word_reps).astype(np.float32)).to(_device())
representer = _sent_representer(vocab)
embedder = _word_representer(vocab)
if FLAGS.model == "sim":
model = SimModel(utt_reps, word_reps, utts, lfs, representer, embedder, _device())
elif FLAGS.model == "seq2seq":
model = Seq2SeqModel(word_reps.shape[2], vocab, utt_reps, utts, lfs, embedder, _device())
else:
assert False
return model
| {"/predict_interactive.py": ["/common.py"], "/train.py": ["/common.py"], "/preproc.py": ["/common.py", "/util.py"], "/predict.py": ["/common.py"], "/models/sim.py": ["/util.py"], "/rep_service.py": ["/common.py"], "/models/seq2seq.py": ["/models/torchdec.py", "/util.py"], "/common.py": ["/models/sim.py", "/models/seq2seq.py", "/util.py"]} |
59,463 | jacobandreas/unnatural-language | refs/heads/master | /util.py | import spacy
NLP = spacy.load("en_core_web_sm")
def word_tokenize(utt):
analyzed_utt = NLP(utt)
return [token.lemma_ for token in analyzed_utt]
def lf_tokenize(lf):
tokens = lf.replace("(", "( ").replace(")", " )").split()
return tokens
def lf_detokenize(tokens):
lf = " ".join(tokens).replace("( ", "(").replace(" )", ")")
return lf
| {"/predict_interactive.py": ["/common.py"], "/train.py": ["/common.py"], "/preproc.py": ["/common.py", "/util.py"], "/predict.py": ["/common.py"], "/models/sim.py": ["/util.py"], "/rep_service.py": ["/common.py"], "/models/seq2seq.py": ["/models/torchdec.py", "/util.py"], "/common.py": ["/models/sim.py", "/models/seq2seq.py", "/util.py"]} |
59,464 | jacobandreas/unnatural-language | refs/heads/master | /models/torchdec.py | from collections import namedtuple
import numpy as np
import torch
from torch import nn
import torch.nn.functional as F
#_VF = torch._C._VariableFunctions
EPS = 1e-7
def batch_seqs(seqs):
max_len = max(len(s) for s in seqs)
data = np.zeros((max_len, len(seqs)))
for i, s in enumerate(seqs):
for j in range(len(s)):
data[j, i] = s[j]
return torch.LongTensor(data)
class Encoder(nn.Module):
def __init__(
self,
vocab,
n_embed,
n_hidden,
n_layers,
bidirectional=True,
dropout=0,
):
super().__init__()
self.vocab = vocab
self.embed = nn.Embedding(len(vocab), n_embed, vocab.pad())
self.embed_dropout = nn.Dropout(dropout)
self.rnn = nn.LSTM(
n_embed, n_hidden, n_layers, bidirectional=bidirectional
)
def forward(self, data):
emb = self.embed_dropout(self.embed(data))
return self.rnn(emb)
class SimpleAttention(nn.Module):
def __init__(self, n_features, n_hidden, key=True, value=False):
super().__init__()
self.key = key
self.value = value
self.make_key = nn.Linear(n_features, n_hidden)
self.make_val = nn.Linear(n_features, n_hidden)
self.n_out = n_hidden
def forward(self, features, hidden, mask):
# key
if self.key:
key = self.make_key(features)
else:
key = features
# attention
hidden = hidden.expand_as(key)
scores = (key * hidden).sum(dim=2) + mask * -99999 # "infinity"
distribution = F.softmax(scores, dim=0)
weighted = (features * distribution.unsqueeze(2).expand_as(features))
summary = weighted.sum(dim=0, keepdim=True)
# value
if self.value:
return self.make_val(summary), distribution
else:
return summary, distribution
DecoderState = namedtuple("DecoderState", "feed rnn_state hiddens tokens")
BeamState = namedtuple("BeamState", "feed rnn_state hiddens tokens score parent done")
class Decoder(nn.Module):
def __init__(
self,
vocab,
n_embed,
n_hidden,
n_layers,
attention=None,
copy=False,
self_attention=False,
dropout=0
):
super().__init__()
# setup
self.vocab = vocab
self.n_hidden = n_hidden
self.n_layers = n_layers
self.copy = copy
self.self_attention = self_attention
# attention
if attention is None:
attention = ()
attention = tuple(attention)
if self_attention:
attention = attention + (SimpleAttention(n_hidden, n_hidden),)
self.attention = attention
for i, att in enumerate(attention):
self.add_module("attention_%d" % i, att)
# modules
self.embed = nn.Embedding(len(vocab), n_embed, vocab.pad())
self.combine = nn.Linear(n_hidden * (1 + len(attention)), n_hidden)
self.dropout_in = nn.Dropout(dropout)
self.predict = nn.Linear(n_hidden, len(vocab))
self.copy_switch = nn.Linear(n_hidden, 1 + len(attention))
self.rnn = nn.LSTM(n_embed + n_hidden, n_hidden, n_layers)
self.dropout_out = nn.Dropout(dropout)
def step(
self,
decoder_state,
att_features,
att_tokens,
att_masks,
att_token_proj,
self_att_proj
):
# advance rnn
emb = self.embed(decoder_state.tokens[-1, :])
inp = self.dropout_in(torch.cat((emb, decoder_state.feed), dim=1))
hidden, rnn_state = self.rnn(inp.unsqueeze(0), decoder_state.rnn_state)
hiddens = torch.cat(decoder_state.hiddens + [hidden], dim=0)
# prep self-attention
if self.self_attention:
att_features = tuple(att_features) + (hiddens,)
att_tokens = tuple(att_tokens) + (decoder_state.tokens,)
att_masks = att_masks + (
(decoder_state.tokens == self.vocab.pad()).float(),
)
att_token_proj = att_token_proj + (self_att_proj,)
# advance attention
attended = [
attention(features, hidden, mask)
for attention, features, mask in zip(
self.attention, att_features, att_masks
)
]
if len(attended) > 0:
summary, distribution = zip(*attended)
else:
summary = distribution = ()
all_features = torch.cat([hidden] + list(summary), dim=2)
comb_features = self.dropout_out(self.combine(all_features).squeeze(0))
pred_logits = self.predict(comb_features)
# copy mechanism
### if self.copy:
### pred_probs = F.softmax(pred_logits, dim=1)
### copy_probs = [
### (dist.unsqueeze(2) * proj).sum(dim=0)
### for dist, proj in zip(distribution, att_token_proj)
### ]
### all_probs = torch.stack([pred_probs] + copy_probs, dim=1)
### copy_weights = F.softmax(self.copy_switch(comb_features), dim=1)
### comb_probs = (copy_weights.unsqueeze(2) * all_probs).sum(dim=1)
### pred_logits = torch.log(comb_probs)
if self.copy:
pred_probs = F.softmax(pred_logits, dim=1)
dist, = distribution
proj, = att_token_proj
copy_probs = (dist.unsqueeze(2) * proj).sum(dim=0)
copy_probs += EPS
copy_weights = pred_probs[:, self.vocab.copy()].unsqueeze(1)
comb_probs = (
copy_weights * copy_probs + (1 - copy_weights) * pred_probs
)
direct_logits = pred_logits
copy_logits = torch.log(copy_probs)
pred_logits = torch.log(comb_probs)
else:
direct_logits = pred_logits
copy_logits = None
# done
return (
pred_logits,
comb_features,
rnn_state,
hidden,
direct_logits, copy_logits
)
def _make_projection(self, tokens):
proj = tokens.new_zeros(
tokens.shape[0], tokens.shape[1], len(self.vocab)
).float()
for i in range(tokens.shape[0]):
proj[i, range(tokens.shape[1]), tokens[i, :]] = 1
#proj[i, :, tokens[i, :]] = 1
return proj
def forward(
self,
rnn_state,
max_len,
ref_tokens=None,
att_features=None,
att_tokens=None,
token_picker=None
):
# token picker
if token_picker is None:
self_att_proj = self._make_projection(ref_tokens)
token_picker = lambda t, logits: (
(ref_tokens[t, :], self_att_proj[:t+1, :, :])
)
# attention
if att_features is None:
att_features = ()
att_tokens = ()
att_masks = ()
att_token_proj = ()
else:
assert isinstance(att_features, list) \
or isinstance(att_features, tuple)
att_masks = tuple(
(toks == self.vocab.pad()).float() for toks in att_tokens
)
att_token_proj = tuple(
self._make_projection(toks) for toks in att_tokens
)
# init
pred = None
dummy_tokens, _ = token_picker(0, pred)
feed = dummy_tokens.new_zeros(
dummy_tokens.shape[0], self.n_hidden
).float()
hiddens = []
all_tokens = []
all_preds = []
all_extra = []
# iter
for t in range(max_len):
tokens, self_att_proj = token_picker(t, pred)
if tokens is None:
break
all_tokens.append(tokens)
decoder_state = DecoderState(
feed, rnn_state, hiddens, torch.stack(all_tokens)
)
pred, feed, rnn_state, hidden, *extra = self.step(
decoder_state,
att_features,
att_tokens,
att_masks,
att_token_proj,
self_att_proj,
)
hiddens.append(hidden)
all_preds.append(pred)
all_extra.append(extra)
return (
torch.stack(all_preds),
torch.stack(all_tokens),
rnn_state,
list(zip(*all_extra))
)
def sample(
self,
rnn_state,
max_len,
att_features=None,
att_tokens=None,
greedy=False
):
# init
n_batch = rnn_state[0].shape[1]
device = rnn_state[0].device
done = [False for _ in range(n_batch)]
running_proj = torch.zeros(max_len, n_batch, len(self.vocab)).to(device)
def token_picker(t, logits):
# first step
if t == 0:
toks = torch.LongTensor(
[self.vocab.sos() for _ in range(n_batch)]
).to(device)
running_proj[0, range(n_batch), toks] = 1
#running_proj[0, :, toks] = 1
return toks, running_proj[:1, :, :]
if all(done):
return None, None
# sample
probs = F.softmax(logits, dim=1)
probs = probs.detach().cpu().numpy()
tokens = []
for i, row in enumerate(probs):
if done[i]:
tokens.append(self.vocab.pad())
continue
row[self.vocab.copy()] = 0
if greedy:
choice = np.argmax(row)
else:
row /= row.sum()
choice = np.random.choice(len(self.vocab), p=row)
tokens.append(choice)
if choice == self.vocab.eos():
done[i] = True
toks = torch.LongTensor(tokens).to(device)
running_proj[t, :, toks] = 1
return toks, running_proj[:t+1, :, :]
preds, tokens, rnn_state, *_ = self(
rnn_state,
max_len,
att_features=att_features,
att_tokens=att_tokens,
token_picker=token_picker
)
tok_arr = tokens.detach().cpu().numpy().transpose()
tok_out = []
score_out = [0 for _ in range(tok_arr.shape[0])]
for i, row in enumerate(tok_arr):
row_out = []
for t, c in enumerate(row):
row_out.append(c)
score_out[i] += preds[t, i, c].item()
if c == self.vocab.eos():
break
tok_out.append(row_out)
return tok_out, score_out
def beam(
self,
rnn_state,
beam_size,
max_len,
att_features=None,
att_tokens=None,
):
assert rnn_state[0].shape[1] == 1
device = rnn_state[0].device
# init attention
if att_features is None:
att_features = ()
att_tokens = ()
att_masks = ()
att_token_proj = ()
else:
assert isinstance(att_features, list) \
or isinstance(att_features, tuple)
att_masks = tuple(
(toks == self.vocab.pad()).float() for toks in att_tokens
)
att_token_proj = tuple(
self._make_projection(toks) for toks in att_tokens
)
# initialize beam
beam = [BeamState(
rnn_state[0].new_zeros(self.n_hidden),
[s.squeeze(1) for s in rnn_state],
[],
[self.vocab.sos()],
0.,
None,
False
)]
for t in range(max_len):
if all(s.done for s in beam):
break
rnn_state = [
torch.stack([s.rnn_state[i] for s in beam], dim=1)
for i in range(len(beam[0].rnn_state))
]
tokens = torch.LongTensor([
[s.tokens[tt] if tt < len(s.tokens) else s.tokens[-1] for s in beam]
for tt in range(t+1)
]).to(device)
decoder_state = DecoderState(
torch.stack([s.feed for s in beam]),
rnn_state,
[torch.stack(
[s.hiddens[tt] if tt < len(s.hiddens) else s.hiddens[-1] for s in beam],
dim=1) for tt in range(t)],
tokens,
)
self_att_proj = self._make_projection(tokens)
pred, feed, rnn_state, hidden, *_ = self.step(
decoder_state,
tuple(f.expand(f.shape[0], len(beam), f.shape[2]) for f in att_features),
tuple(t.expand(t.shape[0], len(beam)) for t in att_tokens),
tuple(m.expand(m.shape[0], len(beam)) for m in att_masks),
tuple(p.expand(p.shape[0], len(beam), p.shape[2]) for p in att_token_proj),
self_att_proj
)
logprobs = F.log_softmax(pred, dim=1)
next_beam = []
for i, row in enumerate(logprobs):
row[self.vocab.copy()] = -np.inf
scores, toks = row.topk(beam_size)
if beam[i].done:
next_beam.append(beam[i])
else:
for s, t in zip(scores, toks):
next_beam.append(BeamState(
feed[i, :],
[s[:, i, :] for s in rnn_state],
beam[i].hiddens + [hidden[:, i, :]],
beam[i].tokens + [t.item()],
beam[i].score + s,
beam[i],
t == self.vocab.eos()
))
next_beam = sorted(next_beam, key=lambda x: -x.score)
beam = next_beam[:beam_size]
return [s.tokens for s in beam]
class Vocab(object):
PAD = '<pad>'
SOS = '<s>'
EOS = '</s>'
COPY = '<copy>'
UNK = '<unk>'
def __init__(self):
self._contents = {}
self._rev_contents = {}
self.add(self.PAD)
self.add(self.SOS)
self.add(self.EOS)
self.add(self.COPY)
self.add(self.UNK)
def add(self, sym):
if sym not in self._contents:
i = len(self._contents)
self._contents[sym] = i
self._rev_contents[i] = sym
return self._contents[sym]
def __getitem__(self, sym):
return self._contents[sym]
def __contains__(self, sym):
return sym in self._contents
def __len__(self):
return len(self._contents)
def encode(self, seq, unk=False):
if unk:
seq = [s if s in self else self.UNK for s in seq]
return [self.sos()] + [self[i] for i in seq] + [self.eos()]
def decode(self, seq):
out = [self._rev_contents[i] for i in seq]
return [o for o in out if o not in (self.SOS, self.EOS)]
def get(self, i):
return self._rev_contents[i]
def pad(self):
return self._contents[self.PAD]
def sos(self):
return self._contents[self.SOS]
def eos(self):
return self._contents[self.EOS]
def copy(self):
return self._contents[self.COPY]
def unk(self):
return self._contents[self.UNK]
def __str__(self):
out = (
["Vocab("]
+ ["\t%s:\t%s" % pair for pair in self._contents.items()]
+ [")"]
)
return "\n".join(out)
| {"/predict_interactive.py": ["/common.py"], "/train.py": ["/common.py"], "/preproc.py": ["/common.py", "/util.py"], "/predict.py": ["/common.py"], "/models/sim.py": ["/util.py"], "/rep_service.py": ["/common.py"], "/models/seq2seq.py": ["/models/torchdec.py", "/util.py"], "/common.py": ["/models/sim.py", "/models/seq2seq.py", "/util.py"]} |
59,466 | polesonair/task_5_1 | refs/heads/main | /dirty_main.py | from main import *
# Доступные функции
application.db.people.get_employees()
application.salary.calculate_salary()
| {"/dirty_main.py": ["/main.py"], "/main.py": ["/application/salary.py", "/application/db/people.py"]} |
59,467 | polesonair/task_5_1 | refs/heads/main | /application/db/people.py | def get_employees():
print('Сотрудники') | {"/dirty_main.py": ["/main.py"], "/main.py": ["/application/salary.py", "/application/db/people.py"]} |
59,468 | polesonair/task_5_1 | refs/heads/main | /main.py | from datetime import date
import application.salary
import application.db.people
def main():
application.salary.calculate_salary()
application.db.people.get_employees()
print(date.today().strftime("%m.%d.%Y"))
if __name__ == '__main__':
main() | {"/dirty_main.py": ["/main.py"], "/main.py": ["/application/salary.py", "/application/db/people.py"]} |
59,469 | polesonair/task_5_1 | refs/heads/main | /application/salary.py | def counting_salary():
print('Подсчет зарплаты') | {"/dirty_main.py": ["/main.py"], "/main.py": ["/application/salary.py", "/application/db/people.py"]} |
59,475 | chooyee/pdfocr | refs/heads/master | /models/intepret.py | import pandas as pd
import csv
import spacy
from infra.logger import Logger
# print(dataframe)
# for ind in dataframe.index:
# print(dataframe[3][ind])
# find_name(dataframe[3][ind])
# # data = dataframe.style.set_properties(align="left")
# #Converting it in a excel-file
# dataframe.to_csv("output.csv")
def read_text(dataframe):
try:
bankNames =[]
bankAccs = []
print(dataframe)
for ind in dataframe.index:
print(dataframe[dataframe.columns[-1]][ind])
bname, bacc = find_name(dataframe[dataframe.columns[-1]][ind])
bankNames += bname
bankAccs += bacc
return bankNames, bankAccs
except Exception as e:
print("Oops!", str(e), "occurred.")
print("Oops!", e.__class__, "occurred.")
Logger.Error(str(e))
def find_name(str):
bankName = []
bankAcc = []
nlp = spacy.load("en_core_web_trf")
# nlp = spacy.load("en_core_web_sm")
ruler = nlp.add_pipe("entity_ruler")
patterns = [{"label": "ORG", "pattern": "AmBank (M) Berhad"},
{"label": "ORG", "pattern": "CIMB Bank Berhad"},
{"label": "ORG", "pattern": "Public Bank Berhad"},
{"label": "ORG", "pattern": "Malayan Banking Berhad"},
{"label": "ORG", "pattern": "Bank Islam Malaysia"},
{"label": "ORG", "pattern": "Alliance Bank Malaysia"},
{"label": "ORG", "pattern": "Agro Bank Malaysia"},
{"label": "ORG", "pattern": "Hong Leong Bank Berhad"},
{"label": "ORG", "pattern": "RHB Bank Berhad"}]
ruler.add_patterns(patterns)
nlp.add_pipe("merge_entities")
doc = nlp(str)
for token in doc:
ent = [token.text, token.ent_iob_, token.ent_type_]
if (token.text.strip()!=''):
# print(ent)
if token.ent_type_=='ORG':
bankName.append(token.text)
elif (token.ent_type_=='CARDINAL' or token.ent_type_=='') and len(token.text)>2 and token.text.isnumeric():
bankAcc.append(token.text)
return bankName, bankAcc
# mytuple = (bankName, bankAcc)
# print(mytuple) | {"/models/intepret.py": ["/infra/logger.py"], "/controllers/conlist.py": ["/db/sqlite.py"], "/db/sqlite.py": ["/infra/logger.py"], "/main.py": ["/controllers/comprehend.py", "/db/sqlite.py"], "/app.py": ["/controllers/comprehend.py", "/db/sqlite.py", "/controllers/conlist.py"], "/controllers/comprehend.py": ["/models/pdf.py", "/db/sqlite.py", "/models/intepret.py", "/infra/logger.py"]} |
59,476 | chooyee/pdfocr | refs/heads/master | /controllers/conlist.py | from db.sqlite import ConMan
def GetConList(uuid):
with ConMan() as con:
sql = '''select * from conlist where uuid=?'''
rows = con.Select(sql, (uuid,))
return rows | {"/models/intepret.py": ["/infra/logger.py"], "/controllers/conlist.py": ["/db/sqlite.py"], "/db/sqlite.py": ["/infra/logger.py"], "/main.py": ["/controllers/comprehend.py", "/db/sqlite.py"], "/app.py": ["/controllers/comprehend.py", "/db/sqlite.py", "/controllers/conlist.py"], "/controllers/comprehend.py": ["/models/pdf.py", "/db/sqlite.py", "/models/intepret.py", "/infra/logger.py"]} |
59,477 | chooyee/pdfocr | refs/heads/master | /infra/logger.py | import logging
import datetime
# logging.debug('This message should go to the log file')
# logging.info('So should this')
# logging.warning('And this, too')
# logging.error('And non-ASCII stuff, too, like Øresund and Malmö')
class Logger:
def __init__(self):
now = datetime.datetime.now()
filename = now.strftime("%Y-%m-%d")
logging.basicConfig(filename=filename + '.log', level=logging.DEBUG)
def _debug(self, msg):
logging.debug(msg)
def _info(self, msg):
logging.info(msg)
def _warning(self, msg):
logging.warning(msg)
def _error(self, msg):
logging.error(msg)
@staticmethod
def Debug(msg):
logger = Logger()
logger._debug(msg)
@staticmethod
def Info(msg):
logger = Logger()
logger._info(msg)
@staticmethod
def Warning(msg):
logger = Logger()
logger._warning(msg)
@staticmethod
def Error(msg):
logger = Logger()
logger._error(msg) | {"/models/intepret.py": ["/infra/logger.py"], "/controllers/conlist.py": ["/db/sqlite.py"], "/db/sqlite.py": ["/infra/logger.py"], "/main.py": ["/controllers/comprehend.py", "/db/sqlite.py"], "/app.py": ["/controllers/comprehend.py", "/db/sqlite.py", "/controllers/conlist.py"], "/controllers/comprehend.py": ["/models/pdf.py", "/db/sqlite.py", "/models/intepret.py", "/infra/logger.py"]} |
59,478 | chooyee/pdfocr | refs/heads/master | /db/sqlite.py | import sqlite3
from sqlite3 import Error
from dotenv import load_dotenv
from infra.logger import Logger
import os
class ConMan:
def __init__(self):
root = os.getcwd()
load_dotenv(root + '/config.env')
print(os.environ.get("dbname"))
self.database = root + '/' + os.environ.get("dbname")
self._create_connection()
def _create_connection(self):
""" create a database connection to a SQLite database """
self.Conn = None
try:
self.Conn = sqlite3.connect(self.database)
print("Connected to " + sqlite3.version)
except Error as e:
print(e)
Logger.Error(e)
def _create_tables(self):
# Create table
con = self.Conn
cur = con.cursor()
cur.execute('''CREATE TABLE if not exists "conlist" (
"id" INTEGER,
"uuid" TEXT,
"listdate" TEXT,
"bankname" TEXT,
"bankaccno" TEXT,
PRIMARY KEY("id" AUTOINCREMENT)
)''')
con.commit()
def Execute(self, sql, params):
try:
con = self.Conn
cur = con.cursor()
if (type(params) is list):
cur.executemany(sql, params)
else:
cur.execute(sql, params)
con.commit()
except Exception as e:
print("Oops!", str(e), "occurred.")
Logger.Error(str(e))
def Select(self, sql, params):
print(type(params))
try:
con = self.Conn
cur = con.cursor()
cur.execute(sql, params)
rows = cur.fetchall()
return rows
except Exception as e:
print("Select: ", str(e))
print("Sql: ", sql)
print("Params: ", params)
Logger.Error("Select: " + str(e))
Logger.Error("sql: " + sql )
Logger.Error("Params: " + params )
return []
def Close_connection(self):
if self.Conn:
self.Conn.close()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
print('Disconneting from DB')
self.Close_connection()
# with ConMan() as dbConn:
# dbConn.method() | {"/models/intepret.py": ["/infra/logger.py"], "/controllers/conlist.py": ["/db/sqlite.py"], "/db/sqlite.py": ["/infra/logger.py"], "/main.py": ["/controllers/comprehend.py", "/db/sqlite.py"], "/app.py": ["/controllers/comprehend.py", "/db/sqlite.py", "/controllers/conlist.py"], "/controllers/comprehend.py": ["/models/pdf.py", "/db/sqlite.py", "/models/intepret.py", "/infra/logger.py"]} |
59,479 | chooyee/pdfocr | refs/heads/master | /main.py | from controllers.comprehend import Comprehend
from db.sqlite import ConMan
import os
import sys
# unique_filename = str(uuid.uuid4())
# folderPath = './temp/'
# decryptedPdfFileName = folderPath+ unique_filename + ' _decrypted.pdf'
# decrypt_pdf('Laporan-CCIS-Akaun-Mule-Bank23-07-2020.pdf', decryptedPdfFileName, 'CCID20200723')
# convert_img(decryptedPdfFileName, folderPath + unique_filename)
# if __name__ == '__main__':
# with ConMan() as con:
# con._create_tables();
# params = sys.argv[1:]
# filename = params[0]
# password = params[1]
# # ocr_table(params[0])
# if not os.path.exists('temp'):
# os.makedirs('temp')
# with Comprehend() as comprehend:
# comprehend.Start(filename=filename, password=password, decrypt=True) | {"/models/intepret.py": ["/infra/logger.py"], "/controllers/conlist.py": ["/db/sqlite.py"], "/db/sqlite.py": ["/infra/logger.py"], "/main.py": ["/controllers/comprehend.py", "/db/sqlite.py"], "/app.py": ["/controllers/comprehend.py", "/db/sqlite.py", "/controllers/conlist.py"], "/controllers/comprehend.py": ["/models/pdf.py", "/db/sqlite.py", "/models/intepret.py", "/infra/logger.py"]} |
59,480 | chooyee/pdfocr | refs/heads/master | /app.py | import imghdr
import os
from flask import Flask, render_template, request, redirect, url_for, abort, send_from_directory, jsonify
from werkzeug.utils import secure_filename
from controllers.comprehend import Comprehend
from db.sqlite import ConMan
from controllers.conlist import GetConList
from waitress import serve
app = Flask(__name__)
app.config['MAX_CONTENT_LENGTH'] = 2 * 1024 * 1024
app.config['UPLOAD_EXTENSIONS'] = ['.jpg', '.png', '.gif']
app.config['UPLOAD_PATH'] = 'uploads'
with ConMan() as con:
con._create_tables();
if not os.path.exists('temp'):
os.makedirs('temp')
def validate_image(stream):
header = stream.read(512)
stream.seek(0)
format = imghdr.what(None, header)
if not format:
return None
return '.' + (format if format != 'jpeg' else 'jpg')
@app.errorhandler(413)
def too_large(e):
return "File is too large", 413
@app.route('/')
def index():
files = os.listdir(app.config['UPLOAD_PATH'])
return render_template('index.html', files=files)
@app.route('/', methods=['POST'])
def upload_files():
try:
uploaded_file = request.files['file']
password = request.form['password']
print(password)
filename = secure_filename(uploaded_file.filename)
if filename != '':
# file_ext = os.path.splitext(filename)[1]
# if file_ext not in app.config['UPLOAD_EXTENSIONS'] or \
# file_ext != validate_image(uploaded_file.stream):
# return "Invalid image", 400
pdfFileName = os.path.join(app.config['UPLOAD_PATH'], filename)
uploaded_file.save(pdfFileName)
with Comprehend() as comprehend:
uuid = comprehend.Start(filename=pdfFileName, password=password, decrypt=True)
# resultList = GetConList(uuid)
# return jsonify(resultList)
# return render_template('result.html', rows=resultList)
return flask.redirect("/conlist/" + uuid)
except Exception as e:
return str(e)
@app.route('/uploads/<filename>')
def upload(filename):
return send_from_directory(app.config['UPLOAD_PATH'], filename)
@app.route('/conlist/<uuid>')
def getConList(uuid):
resultList = GetConList(uuid)
for r in resultList:
print(r[0])
print(resultList)
return render_template('result.html', rows=resultList)
if __name__ == "__main__":
#app.run(host='0.0.0.0')
#We now use this syntax to server our app.
serve(app, host='0.0.0.0', port=8080) | {"/models/intepret.py": ["/infra/logger.py"], "/controllers/conlist.py": ["/db/sqlite.py"], "/db/sqlite.py": ["/infra/logger.py"], "/main.py": ["/controllers/comprehend.py", "/db/sqlite.py"], "/app.py": ["/controllers/comprehend.py", "/db/sqlite.py", "/controllers/conlist.py"], "/controllers/comprehend.py": ["/models/pdf.py", "/db/sqlite.py", "/models/intepret.py", "/infra/logger.py"]} |
59,481 | chooyee/pdfocr | refs/heads/master | /controllers/comprehend.py | from models.pdf import decrypt_pdf, convert_img
from db.sqlite import ConMan
from models.ocr import ocr_table
from models.intepret import read_text
from infra.logger import Logger
import uuid
import os
import datetime
class Comprehend:
def __init__(self):
pass
def Start(self, filename, password=None, decrypt=False):
try:
listdate = datetime.datetime.now().strftime("%Y-%m-%d")
unique_filename = str(uuid.uuid4())
folderPath = './temp/'
decryptedPdfFileName = folderPath+ unique_filename + ' _decrypted.pdf'
if (decrypt):
decrypt_pdf(filename, decryptedPdfFileName, password)
#decrypt_pdf('Laporan-CCIS-Akaun-Mule-Bank23-07-2020.pdf', decryptedPdfFileName, 'CCID20200723')
images = convert_img(decryptedPdfFileName, folderPath + unique_filename)
j = 0
with ConMan() as con:
for i in range(len(images)):
# if (j==1):
print(folderPath + unique_filename + '_page'+ str(j) +'.jpg')
dataframe = ocr_table(folderPath + unique_filename + '_page'+ str(j) +'.jpg')
print('=============================Read text======================================')
bankNames, bankAccs = read_text(dataframe)
if (len(bankNames) != len(bankAccs)):
sql, param = self._cleanBatchRecordSql(unique_filename)
con.Execute(sql, param)
raise Exception('Number of banks and account number does not match!')
break;
print('=============================Massage======================================')
params = self._massage(bankNames, bankAccs, listdate, unique_filename)
#insert into sqlite
sql = '''insert into conlist (uuid, listdate, bankname, bankaccno) values (?,?,?,?)'''
print('=============================Insert======================================')
con.Execute(sql, params)
j = j+1
return unique_filename
except Exception as e:
print("Oops!", str(e), "occurred.")
print("Oops!", e.__class__, "occurred.")
Logger.Error(str(e))
def _cleanBatchRecordSql(self, uuid):
sql = "delete * from conlist where uuid = ?"
param = (uuid)
return sql, param
def _massage(self, bankNames, bankAccs, listdate, uuid):
params = []
for i in range(len(bankNames)):
params.append((uuid, listdate, bankNames[i], bankAccs[i] ))
return params
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
pass | {"/models/intepret.py": ["/infra/logger.py"], "/controllers/conlist.py": ["/db/sqlite.py"], "/db/sqlite.py": ["/infra/logger.py"], "/main.py": ["/controllers/comprehend.py", "/db/sqlite.py"], "/app.py": ["/controllers/comprehend.py", "/db/sqlite.py", "/controllers/conlist.py"], "/controllers/comprehend.py": ["/models/pdf.py", "/db/sqlite.py", "/models/intepret.py", "/infra/logger.py"]} |
59,482 | chooyee/pdfocr | refs/heads/master | /models/pdf.py | import pikepdf
# import module
from pdf2image import convert_from_path
def decrypt_pdf(input_path, output_path, password):
with pikepdf.open(input_path, password=password) as pdf:
pdf.save(output_path)
def convert_img(decrypted_pdf, unique_filename):
# Store Pdf with convert_from_path function
images = convert_from_path(decrypted_pdf)
for i in range(len(images)):
# Save pages as images in the pdf
images[i].save(unique_filename + '_page'+ str(i) +'.jpg', 'JPEG')
return images
# if __name__ == '__main__':
# unique_filename = str(uuid.uuid4())
# folderPath = './temp/'
# decryptedPdfFileName = folderPath+ unique_filename + ' _decrypted.pdf'
# decrypt_pdf('Laporan-CCIS-Akaun-Mule-Bank23-07-2020.pdf', decryptedPdfFileName, 'CCID20200723')
# convert_img(decryptedPdfFileName, folderPath + unique_filename) | {"/models/intepret.py": ["/infra/logger.py"], "/controllers/conlist.py": ["/db/sqlite.py"], "/db/sqlite.py": ["/infra/logger.py"], "/main.py": ["/controllers/comprehend.py", "/db/sqlite.py"], "/app.py": ["/controllers/comprehend.py", "/db/sqlite.py", "/controllers/conlist.py"], "/controllers/comprehend.py": ["/models/pdf.py", "/db/sqlite.py", "/models/intepret.py", "/infra/logger.py"]} |
59,483 | gregplechaty/homepage | refs/heads/main | /manage.py | ### Site Generator created 1/24 ###
### refactored 2/3/21 ###
### jinja2 added 2/13/21###
import sys
import utils
#print if no argument provided
if len(sys.argv) == 1:
print("Please specify ’build’ or ’new’ as an argument")
quit()
command = sys.argv[1]
if command == "build":
print("Build was specified")
if __name__ == "__main__":
utils.main()
elif command == "new":
print("New page was specified")
file_name_input = input("Please provide new file name: ")
new_file_name = utils.create_new_html_page(file_name_input)
print('Content page "' + new_file_name + '''" was created in 'content' folder.''')
else:
print("Please specify ’build’ or ’new’")
| {"/manage.py": ["/utils.py"]} |
59,484 | gregplechaty/homepage | refs/heads/main | /utils.py | ### Site Generator created 1/24 ###
### refactored 2/3/21 ###
### jinja2 added 2/13/21###
import datetime
import glob
import os
from jinja2 import Template
import re
##########################################################
### Create dictionaries - pages and blog_posts
def create_page_list():
all_html_files = glob.glob("content/*.html")
pages = []
for file in all_html_files:
filename = os.path.basename(file)
name_only, extension = os.path.splitext(filename)
title = name_only.capitalize()
content = "./content/" + filename
output = "./docs/" + filename
pages.append({
"title": title,
"filename": content,
"output": output,
"link": filename,
})
return pages
def create_blog_posts_list():
all_blog_posts = glob.glob("blog/*.html")
blog_posts_dict = []
for post in all_blog_posts:
post_filename = os.path.basename(post)
name_only, extension = os.path.splitext(post_filename)
filename = "blog/" + post_filename
output = "./docs/" + post_filename
#grab post info from blog post html
blog_post_html = open(filename).read()
image_search = re.search(r'src=(.*?) class', blog_post_html).group(1)
date_search = re.search(r'<h6>(.*?)</h6>', blog_post_html).group(1)
title_search = re.search(r'<h1>(.*?)</h1>', blog_post_html).group(1)
subtitle_search = re.search(r'<h3>(.*?)</h3>', blog_post_html).group(1)
#prevent func from bombing
if False:
print("Blog information in html files is incomplete. These must be filled in before site can be generated. Aborting site generation.")
quit()
#else:
# image = image_search.group(1)
# date = date_search.group(1)
######
blog_posts_dict.append({
"filename": filename,
"date": date_search,
"title": title_search,
"subtitle": subtitle_search,
"output": output,
"image": image_search,
})
return blog_posts_dict
### Create Blank html page (in content directory)
def create_new_html_page(file_name_input):
#file_name scrubbed
content_template_html = """<div class="row"></div>"""
open("./content/" + file_name_input, "w+").write(content_template_html)
return file_name_input
### Read input files
def read_content(file_name):
return open(file_name).read()
### Placeholder replacement, using Jinja
def placeholder_replacement_base(base,page_title,content,pages):
base = Template(base)
return base.render({
'pages_dict': pages,
'content': content,
'title': page_title,
'get_year': datetime.datetime.now().strftime("%Y")
})
### write 'thoughts' blog pages
def write_blog_posts(blog_posts,base,pages):
for post in blog_posts:
blog_content = open(post['filename']).read()
#write complete blog page
base_template = Template(base)
blog_page_final = base_template.render({
'title': 'Thoughts',
'pages_dict': pages,
'content': blog_content,
'get_year': datetime.datetime.now().strftime("%Y"),
})
open(post['output'], "w+").write(blog_page_final)
def write_thoughts_blog_past_posts(blog_posts_info,past_posts_html):
blog_past_posts = '' #this is the placeholder to append each old post info
for post in reversed(blog_posts_info):
#define variables
blog_post_title = post['title']
blog_post_subtitle = post['subtitle']
blog_post_date = post['date']
blog_post_output = post['output'].replace('/docs','')
blog_post_image = post['image']
blog_post_filename = post['filename']
#read input files
past_post_layout = open(past_posts_html).read()
#set variable text
past_post_layout_template = Template(past_post_layout)
past_post_layout_template_with_subs = past_post_layout_template.render({
'blog_post_link': blog_post_output,
'blog_post_title': blog_post_title,
'blog_post_date': blog_post_date,
'blog_post_subtitle': blog_post_subtitle,
'blog_post_image': blog_post_image,
})
blog_past_posts = blog_past_posts + past_post_layout_template_with_subs
return blog_past_posts
def write_thoughts_content(thoughts_base,blog_posts,blog_past_posts):
#write 'thoughts' (blog_post_image, blog_post_link, blog_post_title, blog_post_subtitle, blog_past_posts)
base_template = Template(thoughts_base)
last = True
for post in reversed(blog_posts):
if last:
last = False
return base_template.render(
blog_post_image = post['image'],
blog_post_link = post['output'].replace('/docs',''),
blog_post_title = post['title'],
blog_post_subtitle = post['subtitle'],
blog_past_posts = blog_past_posts,
)
###########################################################################################
def main():
pages = create_page_list()
for page in pages:
#define variables
file_name = page['filename']
file_output = page['output']
file_title = page['title']
#read input files
base = open("./templates/base.html").read()
base_html = read_content(file_name)
blog_posts_dict = create_blog_posts_list()
if page['title'] == 'Thoughts':
#write specific blog post
write_blog_posts(blog_posts_dict,base,pages)
#write thoughts - first past posts, then complete page
blog_past_posts = write_thoughts_blog_past_posts(blog_posts_dict,"./templates/blog_past_post_base.html")
thought_content = write_thoughts_content(base_html,blog_posts_dict,blog_past_posts)
#write 'content' for Thoughts main page
complete_page = placeholder_replacement_base(base,file_title,thought_content,pages)
else:
complete_page = placeholder_replacement_base(base,file_title,base_html,pages)
open(file_output, "w+").write(complete_page)
print('Site complete! Please review for accuracy.')
###########################################################################################
| {"/manage.py": ["/utils.py"]} |
59,485 | gregplechaty/homepage | refs/heads/main | /old blog_posts list of dictionaries.py |
blog_posts = [
{
"filename": "blog/1_too_many_pieces.html",
"date": "January 15th, 2021",
"title": "Too Many Pieces?",
"subtitle": "When the quantity of tools is immense, and your skills are not, how do you know where to start?",
"output": "./docs/1_too_many_pieces.html",
"image": "./images/legos.jpg",
},
{
"filename": "blog/2_one_month_in.html",
"date": "February 7th, 2021",
"title": "Balance is hard.",
"subtitle": "Doing something new takes time. What to give up?",
"output": "./docs/2_one_month_in.html",
"image": "./images/balance.jpg",
},
{
"filename": "blog/3.html",
"date": "February 10th, 2021",
"title": "My thoughts on Python so far",
"subtitle": "A lot can be done with just a few commands.",
"output": "./docs/3.html",
"image": "./images/coconuts.jpg",
},
]
| {"/manage.py": ["/utils.py"]} |
59,491 | wangxiaojiani/learning_excise_ui | refs/heads/master | /test_case/Conftest.py | # -*- coding: utf-8 -*-
#@Time :2019/10/10 0:31
#@Author :xj
#@Email :1027867874@qq.com
#@File :Conftest.py
#@Software :PyCharm
import pytest
from page.login_page import LoginPage
from page.home_oage import HomePage
from selenium.webdriver import Firefox
@pytest.fixture(scope='class')
def init_driver():
print('开始执行login类用例')
driver=Firefox()
driver.maximize_window()
po=LoginPage(driver)
h_po=HomePage(driver)
yield driver,po,h_po
driver.quit()
print('loggin类结束用例结束')
| {"/test_case/Conftest.py": ["/page/login_page.py", "/page/home_oage.py"], "/common/screen_shot.py": ["/common/path.py"], "/runner.py": ["/common/path.py"], "/common/mylog.py": ["/common/path.py"], "/page/login_page.py": ["/common/base.py", "/page/home_oage.py"], "/test_case/login_sta_test.py": ["/page/home_oage.py", "/page/login_page.py"], "/page/home_oage.py": ["/common/base.py"], "/common/base.py": ["/common/mylog.py", "/common/screen_shot.py"]} |
59,492 | wangxiaojiani/learning_excise_ui | refs/heads/master | /common/screen_shot.py | # -*- coding: utf-8 -*-
#@Time :2019/10/9 11:00
#@Author :xj
#@Email :1027867874@qq.com
#@File :screen_shot.py
#@Software :PyCharm
from selenium import webdriver
import random
import time,os
from common.path import project_path
from common import mylog
logger=mylog.MyLog('WJ')
def insert_img(driver):
r=''
for index in range(3):
r +=str(random.randint(100,1000))
img_path=os.path.join(project_path,'img','{}.png').format(time.strftime('%y-%m-%d %H-%M-%S_{}').format(r))
driver.get_screenshot_as_file(img_path)
logger.info('当前截图文件路径为{}'.format(img_path))
if __name__ =='__main__':
driver=webdriver.Chrome()
driver.get('http://www.baidu.com')
insert_img(driver)
| {"/test_case/Conftest.py": ["/page/login_page.py", "/page/home_oage.py"], "/common/screen_shot.py": ["/common/path.py"], "/runner.py": ["/common/path.py"], "/common/mylog.py": ["/common/path.py"], "/page/login_page.py": ["/common/base.py", "/page/home_oage.py"], "/test_case/login_sta_test.py": ["/page/home_oage.py", "/page/login_page.py"], "/page/home_oage.py": ["/common/base.py"], "/common/base.py": ["/common/mylog.py", "/common/screen_shot.py"]} |
59,493 | wangxiaojiani/learning_excise_ui | refs/heads/master | /main.py | # -*- coding: utf-8 -*-
#@Time :2019/10/10 1:06
#@Author :xj
#@Email :1027867874@qq.com
#@File :main.py
#@Software :PyCharm
import pytest
import pytest_rerunfailures
if __name__=='__main__':
# pytest.main(['-s','-m sm','--reruns=2','--reruns-delay=5',r'--html=result\report\test.html'])
pytest.main (['-s', '-q', '-m sm', '--reruns=2', '--reruns-delay=5', r'--alluredir=result/report/allure/'])
# pytest.main(['-s','-m sm',r'--junitxml=result\report\test.xml']) #生成xml格式的测试报告
# pytest.main(['-m login and not demo',r'--resultlog=result\log\testlog.log']) #生成log格式的测试报告 | {"/test_case/Conftest.py": ["/page/login_page.py", "/page/home_oage.py"], "/common/screen_shot.py": ["/common/path.py"], "/runner.py": ["/common/path.py"], "/common/mylog.py": ["/common/path.py"], "/page/login_page.py": ["/common/base.py", "/page/home_oage.py"], "/test_case/login_sta_test.py": ["/page/home_oage.py", "/page/login_page.py"], "/page/home_oage.py": ["/common/base.py"], "/common/base.py": ["/common/mylog.py", "/common/screen_shot.py"]} |
59,494 | wangxiaojiani/learning_excise_ui | refs/heads/master | /test_data/login_data.py | # -*- coding: utf-8 -*-
#@Time :2019/10/9 16:00
#@Author :xj
#@Email :1027867874@qq.com
#@File :login_data.py
#@Software :PyCharm
login_info_error=[{'account':'','pwd':'123213','expect_result':'请输入手机号'},
{'account':'1234','pwd':'32424','expect_result':'请输入正确的手机号'},{'account':'13296662567','pwd':'','expect_result':'请输入密码'},{'account':'','pwd':'','expect_result':'请输入手机号'}]
login_pwd_error=[{'account':'18684720553','pwd':'21321','expect_result':'此账号没有经过授权,请联系管理员!'}]
login_success_info=[{'account':'18684720553','pwd':'python','expect_result':'我的帐户'}] | {"/test_case/Conftest.py": ["/page/login_page.py", "/page/home_oage.py"], "/common/screen_shot.py": ["/common/path.py"], "/runner.py": ["/common/path.py"], "/common/mylog.py": ["/common/path.py"], "/page/login_page.py": ["/common/base.py", "/page/home_oage.py"], "/test_case/login_sta_test.py": ["/page/home_oage.py", "/page/login_page.py"], "/page/home_oage.py": ["/common/base.py"], "/common/base.py": ["/common/mylog.py", "/common/screen_shot.py"]} |
59,495 | wangxiaojiani/learning_excise_ui | refs/heads/master | /runner.py | # -*- coding: utf-8 -*-
#@Time :2019/10/9 16:17
#@Author :xj
#@Email :1027867874@qq.com
#@File :runner.py
#@Software :PyCharm
import unittest
from test_case import login_sta
from HTMLTestRunnerNew import HTMLTestRunner
import time
from common.path import current_report_path
#创建测试套件的对象
suit=unittest.TestSuite()
loader=unittest.TestLoader()
suit.addTest(loader.loadTestsFromModule(login_sta))
now =time.strftime('%Y-%m-%d %H-%M-%S')
file_path=current_report_path + '/'+now+ r'_test_report.html'
with open(file_path,'wb') as f:
runner=HTMLTestRunner(stream=f,description='这是练习的测试用例',verbosity=2,title='输出测试报告',tester='wj')
runner.run(suit)
| {"/test_case/Conftest.py": ["/page/login_page.py", "/page/home_oage.py"], "/common/screen_shot.py": ["/common/path.py"], "/runner.py": ["/common/path.py"], "/common/mylog.py": ["/common/path.py"], "/page/login_page.py": ["/common/base.py", "/page/home_oage.py"], "/test_case/login_sta_test.py": ["/page/home_oage.py", "/page/login_page.py"], "/page/home_oage.py": ["/common/base.py"], "/common/base.py": ["/common/mylog.py", "/common/screen_shot.py"]} |
59,496 | wangxiaojiani/learning_excise_ui | refs/heads/master | /common/path.py | # -*- coding: utf-8 -*-
#@Time :2019/10/9 10:45
#@Author :xj
#@Email :1027867874@qq.com
#@File :path.py
#@Software :PyCharm
import os
#获取当前目录路径
current_dir_path=os.path.dirname(__file__)
#获取项目工程路径
project_path=os.path.split(current_dir_path)[0]
#日志输出目录
current_log_path=os.path.join(project_path,'result','log','test_log.text')
#报告输出跟目录
current_report_path=os.path.join(project_path,'result','report')
| {"/test_case/Conftest.py": ["/page/login_page.py", "/page/home_oage.py"], "/common/screen_shot.py": ["/common/path.py"], "/runner.py": ["/common/path.py"], "/common/mylog.py": ["/common/path.py"], "/page/login_page.py": ["/common/base.py", "/page/home_oage.py"], "/test_case/login_sta_test.py": ["/page/home_oage.py", "/page/login_page.py"], "/page/home_oage.py": ["/common/base.py"], "/common/base.py": ["/common/mylog.py", "/common/screen_shot.py"]} |
59,497 | wangxiaojiani/learning_excise_ui | refs/heads/master | /common/mylog.py | # -*- coding: utf-8 -*-
#@Time :2019/10/9 10:12
#@Author :xj
#@Email :1027867874@qq.com
#@File :mylog.py
#@Software :PyCharm
import logging
from common.path import current_log_path
class MyLog:
def __init__(self,name):
self.name=name
def mylog(self,levelname,msg):
#设置日志收集器的名字
logger=logging.getLogger(self.name)
#设置日志收集器的级别
logger.setLevel('DEBUG')
'如果日志收集器不存在渠道则要建立渠道'
#首先设置日志收集器的格式
formatter=logging.Formatter('[%(asctime)s]-[%(levelname)s]-[日志信息]:%(message)s-【%(pathname)s】-%(filename)s-%(module)s-%(funcName)s-%(lineno)d')
#建立控制台渠道
ch=logging.StreamHandler()
ch.setFormatter(formatter)
ch.setLevel('DEBUG')
#建立文件渠道
fh=logging.FileHandler(filename=current_log_path,encoding='utf-8')
fh.setFormatter(formatter)
fh.setLevel('DEBUG')
#将日志收集器与渠道对接
logger.addHandler(fh)
logger.addHandler(ch)
if levelname =='DEBUG':
logger.debug(msg)
elif levelname=='INFO':
logger.info(msg)
elif levelname=='WARNING':
logger.warning(msg)
elif levelname=='ERROR':
logger.error(msg)
elif levelname=='CRITICAL':
logger.critical(msg)
else:
print('你输入的级别有误')
logger.removeHandler(ch)
logger.removeHandler(fh)
def debug(self,msg):
self.mylog('DEBUG',msg)
def info(self,msg):
self.mylog('INFO',msg)
def warning(self,msg):
self.mylog('WARNING',msg)
def error(self,msg):
self.mylog('ERROR',msg)
def critical(self,msg):
self.mylog('CRITICAL',msg)
if __name__ =='__main__':
logger = MyLog ('WJ')
logger.debug ('dad')
logger.error ('222') | {"/test_case/Conftest.py": ["/page/login_page.py", "/page/home_oage.py"], "/common/screen_shot.py": ["/common/path.py"], "/runner.py": ["/common/path.py"], "/common/mylog.py": ["/common/path.py"], "/page/login_page.py": ["/common/base.py", "/page/home_oage.py"], "/test_case/login_sta_test.py": ["/page/home_oage.py", "/page/login_page.py"], "/page/home_oage.py": ["/common/base.py"], "/common/base.py": ["/common/mylog.py", "/common/screen_shot.py"]} |
59,498 | wangxiaojiani/learning_excise_ui | refs/heads/master | /page/login_page.py | # -*- coding: utf-8 -*-
#@Time :2019/10/9 15:06
#@Author :xj
#@Email :1027867874@qq.com
#@File :login_page.py
#@Software :PyCharm
"登录页面交互细节的封装(元素以及行为)"
from common.base import Page
from page.home_oage import HomePage
class LoginPage(Page):
def __init__(self,selenium_driver):
super(LoginPage, self).__init__(selenium_driver)
self.p_home=HomePage(self.driver)
"""定位器 定位封装相当于属性"""
#帐号输入框
account="//input[@name='phone']"
#密码输入框
pwd="//input[@name='password']"
#登录按钮
login_button="//button[@class='btn btn-special']"
#手机号提示信息定位(手机号为空,或者格式不正确时),或者密码为空时提示信息定位
empty_pm="//div[@class='form-error-info']"
#密码不正确师提示信息定位
pwd_error_pm="//div[@class='layui-layer-content']"
"""行为封装"""
def type_account(self,username):
'输入账号'
self.find_element(self.account).send_keys(username)
def type_pwd(self,password):
'输入密码'
self.find_element(self.pwd).send_keys(password)
def click_login_button(self):
'点击登录按钮'
self.find_element(self.login_button).click()
def empty_hint(self):
'手机号为空或者格式不正确或者密码为空时,提示信息文本输出'
return self.find_element (self.empty_pm).text
def pwd_error_hint(self):
'密码错误时,登录后提示信息提取'
return self.find_element (self.pwd_error_pm).text
#设置统一的登录入口
def login(self,username,password):
#点击首页登录按钮
self.p_home.click_login_button()
#判断页面是否跳转成功
self.wait_eleVisible(self.account)
#输入用户名及密码
self.type_account(username)
self.type_pwd(password)
#点击登录
self.click_login_button()
| {"/test_case/Conftest.py": ["/page/login_page.py", "/page/home_oage.py"], "/common/screen_shot.py": ["/common/path.py"], "/runner.py": ["/common/path.py"], "/common/mylog.py": ["/common/path.py"], "/page/login_page.py": ["/common/base.py", "/page/home_oage.py"], "/test_case/login_sta_test.py": ["/page/home_oage.py", "/page/login_page.py"], "/page/home_oage.py": ["/common/base.py"], "/common/base.py": ["/common/mylog.py", "/common/screen_shot.py"]} |
59,499 | wangxiaojiani/learning_excise_ui | refs/heads/master | /test_case/login_sta_test.py | # -*- coding: utf-8 -*-
#@Time :2019/10/9 15:37
#@Author :xj
#@Email :1027867874@qq.com
#@File :login_sta.py
#@Software :PyCharm
import unittest,pytest
from selenium.webdriver import Firefox
from ddt import ddt,data
from page.home_oage import HomePage
from page.login_page import LoginPage
from test_data import login_data
@pytest.mark.demo
@pytest.mark.usefixtures('init_driver')
class TestLogin:
base_url='http://120.78.128.25:8765'
# @classmethod
# def setUpClass(cls):
# '每个测试脚本执行前需要运行的前置操作'
# pass
# @classmethod
# def tearDownClass(cls):
# '每个测试脚本执行后需要运行的清理工作'
# pass
#
# def setUp(self):
# print('开始执行用例')
#
# self.base_url='http://120.78.128.25:8765'
# self.driver=Chrome(r'C:\Users\1\AppData\Local\Google\Chrome\Application\chromedriver.exe')
# self.driver.maximize_window()
# self.driver.get(self.base_url)
# self.po=LoginPage(self.driver)
# self.h_po=HomePage(self.driver)
#
# def tearDown(self):
# print('测试用例执行完毕')
# self.driver.close()
# @data(*login_data.login_info_error)
@pytest.mark.sm
@pytest.mark.parametrize('case',login_data.login_info_error)
def test_login_error(self,case,init_driver):
'登录失败用例'
init_driver[0].get(self.base_url)
init_driver[1].login(case['account'],case['pwd'])
assert init_driver[1].empty_hint()==case['expect_result']
# @data(*login_data.login_pwd_error)
@pytest.mark.st
@pytest.mark.parametrize('case',login_data.login_info_error)
def test_login_empty_error(self,case,init_driver):
'登录错误用例'
init_driver[0].get(self.base_url)
init_driver[1].login(case['account'],case['pwd'])
assert init_driver[1].pwd_error_hint()==case['expect_result']
@pytest.mark.sa
@pytest.mark.parametrize('case',login_data.login_success_info)
# @data(*login_data.login_success_info)
def test_loginsuccess(self,case,init_driver):
init_driver[0].get(self.base_url)
init_driver[1].login(case['account'],case['pwd'])
assert case['expect_result'] in init_driver[2].select_username()
#
# if __name__=='__main__':
# pytest.main(['-s','-m sm','--reruns=2','--reruns-delay=5',r'--html=result\report\test.html'])
| {"/test_case/Conftest.py": ["/page/login_page.py", "/page/home_oage.py"], "/common/screen_shot.py": ["/common/path.py"], "/runner.py": ["/common/path.py"], "/common/mylog.py": ["/common/path.py"], "/page/login_page.py": ["/common/base.py", "/page/home_oage.py"], "/test_case/login_sta_test.py": ["/page/home_oage.py", "/page/login_page.py"], "/page/home_oage.py": ["/common/base.py"], "/common/base.py": ["/common/mylog.py", "/common/screen_shot.py"]} |
59,500 | wangxiaojiani/learning_excise_ui | refs/heads/master | /page/home_oage.py | # -*- coding: utf-8 -*-
#@Time :2019/10/9 15:23
#@Author :xj
#@Email :1027867874@qq.com
#@File :home_oage.py
#@Software :PyCharm
from common.base import Page
class HomePage(Page):
# 登录按钮
login_button = "//a[text()='登录']"
username = "//img[@class='mr-5']/parent::a"
def click_login_button(self):
'点击首页登录按钮'
self.find_element(self.login_button).click()
def select_username(self):
'返回用户姓名'
return self.find_element (self.username).text
| {"/test_case/Conftest.py": ["/page/login_page.py", "/page/home_oage.py"], "/common/screen_shot.py": ["/common/path.py"], "/runner.py": ["/common/path.py"], "/common/mylog.py": ["/common/path.py"], "/page/login_page.py": ["/common/base.py", "/page/home_oage.py"], "/test_case/login_sta_test.py": ["/page/home_oage.py", "/page/login_page.py"], "/page/home_oage.py": ["/common/base.py"], "/common/base.py": ["/common/mylog.py", "/common/screen_shot.py"]} |
59,501 | wangxiaojiani/learning_excise_ui | refs/heads/master | /common/base.py | # -*- coding: utf-8 -*-
#@Time :2019/10/9 11:46
#@Author :xj
#@Email :1027867874@qq.com
#@File :base.py
#@Software :PyCharm
"""所有页面的基类"""
from selenium import webdriver
from selenium.webdriver import Chrome
from common.mylog import MyLog
from selenium.webdriver.common.by import By
from selenium.common.exceptions import *
import time
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from common.screen_shot import insert_img
logger=MyLog('WJ')
class Page:
def __init__(self,selenium_driver:Chrome):
self.driver =selenium_driver
selenium_driver.implicitly_wait(30)
#等待元素存在
def wait_eleExist(self,locator,by=By.XPATH,wait_times=40):
if by not in By.__dict__.values():
logger.error("定位类型[{0}]不在支持的类型中,请修改定位类型".format(by))
raise InvalidSelectorException
#开始事件
t1=time.time()
try:
WebDriverWait(self.driver,wait_times).until(EC.presence_of_element_located((by,locator)))
t2=time.time()
logger.info('等待结束,等待开始时间:{},等待结束时间:{},等待时间长{}'.format(t1,t2,t2-t1))
except TimeoutException as e:
logger.error("等待元素超时,截取当前页面")
insert_img(self.driver)
raise e
except InvalidSelectorException as e:
logger.error('元素定位表达式[{}]不正确,请修证'.format(locator))
raise e
# 等待元素可见
def wait_eleVisible(self, locator, by=By.XPATH, wait_times=40):
if by not in By.__dict__.values ():
logger.error ("定位类型[{0}]不在支持的类型中,请修改定位类型".format (by))
raise InvalidSelectorException
# 开始事件
t1 = time.time ()
try:
WebDriverWait (self.driver, wait_times).until (EC.visibility_of_element_located((by, locator)))
t2 = time.time ()
logger.info ('等待结束,等待开始时间:{},等待结束时间:{},等待时间长{}'.format (t1, t2, t2 - t1))
except TimeoutException as e:
logger.error ("等待元素超时,截取当前页面")
insert_img (self.driver)
raise e
except InvalidSelectorException as e:
logger.error ('元素定位表达式[{}]不正确,请修证'.format (locator))
raise e
#等待元素可被点击
def wait_eleClickable(self, locator, by=By.XPATH, wait_times=40):
if by not in By.__dict__.values ():
logger.error ("定位类型[{0}]不在支持的类型中,请修改定位类型".format (by))
raise InvalidSelectorException
# 开始事件
t1 = time.time ()
try:
WebDriverWait (self.driver, wait_times).until (EC.element_to_be_clickable((by, locator)))
t2 = time.time ()
logger.info ('等待结束,等待开始时间:{},等待结束时间:{},等待时间长{}'.format (t1, t2, t2 - t1))
except TimeoutException as e:
logger.error ("等待元素超时,截取当前页面")
insert_img (self.driver)
raise e
except InvalidSelectorException as e:
logger.error ('元素定位表达式[{}]不正确,请修证'.format (locator))
raise e
#查找单个元素的封装
def find_element(self,locator,by=By.XPATH,wait_times=40,type='visible'):
"""
:param locator:元素定位表达式
:param by: 元素的定位类型
:param wait_times: 等待元素存在或者出现的时长 默认为40s
:param type: 等待的条件,是可见还是元素存在及是否可被点击
:return: 返回元素的webElement对象
"""
logger.info('当前元素风味的类型为{},当前查找元素的表达式为:{}'.format(by,locator))
if type=='visible':
logger.info('开始等待元素在页面可见')
self.wait_eleVisible(locator,by,wait_times)
elif type=='exist':
logger.info('开始等待元素在当前页面存在')
self.wait_eleExist(locator,by,wait_times)
else:
logger.info('开始等待元素可被点击')
self.wait_eleClickable(locator,by,wait_times)
try:
ele=self.driver.find_element(by,locator)
return ele
except NoSuchElementException as e:
logger.error('元素查找失败,找不到元素。开始截去当前页面图像')
insert_img(self.driver)
raise e
#处理alert弹出窗口
def alert_handler(self,action='accept'):
#等待alert出现
WebDriverWait(self.driver,10,0.5).until(EC.alert_is_present())
alert=self.driver.switch_to.alert
message=alert.text
if action == 'accept':
alert.accept()
else:
alert.dismiss()
return message
#执行js
def execute_script(self,js):
return self.driver.execute_script(js)
if __name__=='__main__':
Page(Chrome()).wait_eleExist('//input',by='xxx')
| {"/test_case/Conftest.py": ["/page/login_page.py", "/page/home_oage.py"], "/common/screen_shot.py": ["/common/path.py"], "/runner.py": ["/common/path.py"], "/common/mylog.py": ["/common/path.py"], "/page/login_page.py": ["/common/base.py", "/page/home_oage.py"], "/test_case/login_sta_test.py": ["/page/home_oage.py", "/page/login_page.py"], "/page/home_oage.py": ["/common/base.py"], "/common/base.py": ["/common/mylog.py", "/common/screen_shot.py"]} |
59,509 | weihengSu/basketApp | refs/heads/master | /login_check.py | import psycopg2
def create_user(username, email, password):
conn = psycopg2.connect(database="basketball", user="postgres", password="password")
cur = conn.cursor()
#cur.execute("INSERT INTO basket_user(user_id, user_email, user_password) VALUES ('" + username + "', '"+ email + "', '" + password+ "');")
cur.execute("INSERT INTO basket_user (user_id, user_email,user_password) VALUES (%s, %s, %s)",(username, email, password))
conn.commit()
cur.close()
conn.close()
return render_template('/showSignUp')
| {"/app.py": ["/login_check.py", "/models.py"]} |
59,510 | weihengSu/basketApp | refs/heads/master | /app.py | from flask import Flask, render_template,json, request, redirect, url_for, request, get_flashed_messages
from login_check import *
from flask.ext.login import LoginManager, UserMixin, current_user, login_user, logout_user, login_required
from models import *
from flask import session
from nocache import *
#The web app is based on python 3.4+. python2 may not be compatible.
app = Flask(__name__)
app.secret_key = 'secret'
login_manager = LoginManager()
login_manager.init_app(app)
@login_manager.user_loader
def load_user(user_id):
hashed_pwd = User.get(user_id)
if hashed_pwd is not None:
return User(user_id)
return None
#sess = Session()
@app.route("/")
@nocache
def main():
return render_template('index.html')
@app.route('/showSignUp')
@nocache
def showSignUp():
return render_template('signup.html')
@app.route('/signUp',methods=['GET','POST'])
@nocache
def signUp():
if request.method == 'POST':
conn = psycopg2.connect(database="basketball", user="postgres", password="password")
cur = conn.cursor()
username = request.form['username']
email = request.form['email']
password = request.form['password']
cur.execute("SELECT user_id FROM basket_user;")
result = cur.fetchall()
user_list = []
for i in result:
user_list.append(i[0])
if(username not in user_list):
cur.execute("INSERT INTO basket_user (user_id, user_email,user_password) VALUES (%s, %s, %s)",(username, email, password))
conn.commit()
return redirect(url_for('main'))
else:
return render_template('error.html',error = 'User already exists. Please try again')
cur.close()
conn.close()
else:
return render_template("signup.html")
@app.route('/showSignin')
@nocache
def showSignin():
return render_template('signin.html')
@app.route('/login', methods=['GET','POST'])
@nocache
def login():
try:
username = request.form['username']
password = request.form['password']
email = request.form['email']
conn = psycopg2.connect("dbname='basketball' user='postgres' "
"host='localhost' password='password'")
cur = conn.cursor()
cur.execute("SELECT user_id FROM basket_user WHERE user_id = %(id)s", {'id': username })
result = cur.fetchall()
if len(result) > 0:
cur.execute("SELECT user_email FROM basket_user WHERE user_id = %(id)s", {'id': username })
result_email = cur.fetchall()
cur.execute("SELECT user_password FROM basket_user WHERE user_id = %(id)s", {'id': username })
result_password = cur.fetchall()
if (email == result_email[0][0] and password == result_password[0][0]):
user = User(username)
login_user(user)
session['user'] = result[0][0]
return redirect(url_for('userHome'))
else:
return render_template('error.html',error = 'Wrong Email address or password.')
else:
return render_template('error.html',error = 'Wrong Email address or password.')
except Exception as e:
return render_template('error.html',error = str(e))
finally:
cur.close()
conn.close()
@app.route('/userHome')
@nocache
@login_required
def userHome():
if session.get('user'):
return render_template('userHome.html', user=session.get('user'))
else:
return render_template('error.html',error = 'Unauthorized access.')
@app.route('/logout')
@nocache
@login_required
def logout():
logout_user()
session.pop('user',None)
# return render_template('index.html')
return redirect(url_for('main'))
@app.route('/showViewUser')
@nocache
def showViewUser():
if session.get('user'):
username = session.get('user')
try:
conn = psycopg2.connect(database="basketball", user="postgres", password="password")
cur = conn.cursor()
cur.execute("SELECT user_admin FROM basket_user WHERE user_id = %(id)s", {'id': username })
result = cur.fetchall()
if (result[0][0] == True):
#return render_template('view_user.html')
return redirect(url_for('viewUsers'))
else:
return render_template('error.html', error = 'You must be an admin to access the page')
except Exception as e:
return render_template('error.html', error = str(e))
finally:
cur.close()
conn.close()
@app.route('/viewUsers')
@nocache
@login_required
def viewUsers():
if session.get('user'):
username = session.get('user')
try:
conn = psycopg2.connect(database="basketball", user="postgres", password="password")
cur = conn.cursor()
cur.execute("SELECT user_admin FROM basket_user WHERE user_id = %(id)s", {'id': username })
result = cur.fetchall()
if (result[0][0] == True):
cur.execute("CREATE VIEW USERS_VIEW AS SELECT user_id, user_email, user_admin FROM basket_user;")
cur.execute("SELECT * FROM USERS_VIEW;")
user_data = cur.fetchall()
users_data = []
sub_data =[]
for user in user_data:
# sub_data.append(user[0])
users_data.append(user)
return render_template('view_user.html', users_data = users_data, user = session.get('user'))
else:
return render_template('error.html', error = 'Unauthorized Access')
except Exception as e:
return render_template('error.html', error = str(e))
finally:
cur.close()
conn.close()
@app.route('/showPlayerInfo')
@nocache
def showPlayerInfo():
return render_template('player_info.html', user = session.get('user'))
@app.route('/addPlayerInfo',methods=['GET','POST'])
@nocache
def addPlayerInfo():
if request.method == 'POST':
conn = psycopg2.connect(database="basketball", user="postgres", password="password")
cur = conn.cursor()
playerId = request.form['player_id']
playerName = request.form['player_name']
playerPosition = request.form['player_position']
teamName = request.form['team_name']
cur.execute("SELECT player_id FROM player_info;")
result = cur.fetchall()
player_list = []
for i in result:
player_list.append(i[0])
if(playerId not in player_list):
cur.execute("INSERT INTO player_info (player_id, player_name, player_position, team_name) VALUES (%s, %s, %s, %s)",(playerId, playerName, playerPosition, teamName))
conn.commit()
return redirect(url_for('userHome'))
else:
return render_template('error.html',error = 'Player already exists. Try to add another player.')
cur.close()
conn.close()
else:
return render_template("userHome.html")
@app.route('/showViewPlayer')
@nocache
def showViewPlayer():
return redirect(url_for('viewPlayers'))
@app.route('/viewPlayers')
@nocache
@login_required
def viewPlayers():
try:
conn = psycopg2.connect(database="basketball", user="postgres", password="password")
cur = conn.cursor()
cur.execute("CREATE VIEW PLAYERS_VIEW AS SELECT player_id, player_name, player_position, team_name FROM player_info;")
cur.execute("SELECT * FROM PLAYERS_VIEW;")
players_data = cur.fetchall()
return render_template('view_playerInfo.html', players_data = players_data, user = session.get('user'))
except Exception as e:
return render_template('error.html', error = str(e))
finally:
cur.close()
conn.close()
@app.route('/deletePlayer',methods=['GET','POST'])
@nocache
def deletePlayer():
if request.method == 'POST':
conn = psycopg2.connect(database="basketball", user="postgres", password="password")
cur = conn.cursor()
playerId = request.form['player_id']
cur.execute("SELECT player_id FROM player_info;")
result = cur.fetchall()
player_list = []
for i in result:
player_list.append(i[0])
cur.execute("SELECT player_id FROM player_stat;")
resultI = cur.fetchall()
playerI_list = []
for i in resultI:
playerI_list.append(i[0])
cur.execute("SELECT player_id FROM player_injury;")
resultII = cur.fetchall()
playerII_list = []
for i in resultII:
playerII_list.append(i[0])
if(playerId in player_list and playerId not in playerI_list and playerId not in playerII_list):
cur.execute("DELETE from player_info where player_id = %(id)s",{'id': playerId})
conn.commit()
return render_template("player_info.html")
elif (playerId in player_list and playerId in playerII_list):
return render_template('error.html',error = "Please delete player's injury first and then delete from this table. ")
elif (playerId in player_list and playerId in playerI_list):
return render_template('error.html',error = "Please delete player's statistics first and then delete from this table. ")
else:
return render_template('error.html',error = "Player does not exist ")
cur.close()
conn.close()
else:
return render_template("view_playerInfo.html")
@app.route('/showPlayerStat')
@nocache
def showPlayerStat():
return render_template('player_stat.html', user = session.get('user'))
@app.route('/addPlayerStat',methods=['GET','POST'])
@nocache
def addPlayerStat():
if request.method == 'POST':
conn = psycopg2.connect(database="basketball", user="postgres", password="password")
cur = conn.cursor()
playerId = request.form['playerS_id']
playerName = request.form['playerS_name']
playerPoints = request.form['player_points']
rebounds = request.form['rebounds']
assists = request.form['rebounds']
steals = request.form['steals']
cur.execute("SELECT player_id FROM player_stat;")
result = cur.fetchall()
player_list = []
for i in result:
player_list.append(i[0])
cur.execute("SELECT player_id FROM player_info;")
resultI = cur.fetchall()
playerI_list = []
for i in resultI:
playerI_list.append(i[0])
if(playerId not in player_list and playerId in playerI_list):
cur.execute("INSERT INTO player_stat (player_id, player_name, player_points, rebounds, assists, steals) VALUES (%s, %s, %s, %s, %s, %s)",(playerId, playerName, playerPoints, rebounds, assists, steals))
conn.commit()
return redirect(url_for('userHome'))
else:
return render_template('error.html',error = 'Player statistics information already exists or player has not been added. Try to add another player.')
cur.close()
conn.close()
else:
return render_template("userHome.html")
@app.route('/showViewPlayerStat')
@nocache
def showViewPlayerStat():
return redirect(url_for('viewPlayerStat'))
@app.route('/viewPlayerStat')
@nocache
@login_required
def viewPlayerStat():
try:
conn = psycopg2.connect(database="basketball", user="postgres", password="password")
cur = conn.cursor()
cur.execute("CREATE VIEW PLAYERSTAT_VIEW AS SELECT player_id, player_name, player_points, rebounds, assists, steals FROM player_stat;")
cur.execute("SELECT * FROM PLAYERSTAT_VIEW;")
players_data = cur.fetchall()
return render_template('view_playerStat.html', players_data = players_data, user = session.get('user'))
except Exception as e:
return render_template('error.html', error = str(e))
finally:
cur.close()
conn.close()
@app.route('/deletePlayerStat',methods=['GET','POST'])
@nocache
def deletePlayerStat():
if request.method == 'POST':
conn = psycopg2.connect(database="basketball", user="postgres", password="password")
cur = conn.cursor()
playerId = request.form['player_id']
cur.execute("SELECT player_id FROM player_stat;")
result = cur.fetchall()
player_list = []
for i in result:
player_list.append(i[0])
if(playerId in player_list):
cur.execute("DELETE from player_stat where player_id = %(id)s",{'id': playerId})
conn.commit()
return render_template("player_stat.html")
else:
return render_template('error.html',error = "No statistics information for this player. ")
cur.close()
conn.close()
else:
return render_template("view_playerStat.html")
@app.route('/showTeamInfo')
@nocache
def showTeamInfo():
return render_template('team_info.html', user = session.get('user'))
@app.route('/addTeamInfo',methods=['GET','POST'])
@nocache
def addTeamInfo():
if request.method == 'POST':
conn = psycopg2.connect(database="basketball", user="postgres", password="password")
cur = conn.cursor()
teamId = request.form['team_id']
teamName = request.form['team_name']
divisionName = request.form['division_name']
cur.execute("SELECT team_id FROM team;")
result = cur.fetchall()
team_list = []
for i in result:
team_list.append(i[0])
if(teamId not in team_list):
cur.execute("INSERT INTO team (team_id, team_name, division_name) VALUES (%s, %s, %s)",(teamId, teamName, divisionName))
conn.commit()
return redirect(url_for('userHome'))
else:
return render_template('error.html',error = 'Team already exists or division has not been added. Try to add another team and add a new division.')
cur.close()
conn.close()
else:
return render_template("userHome.html")
@app.route('/showViewTeam')
@nocache
def showViewTeam():
return redirect(url_for('viewTeams'))
@app.route('/viewTeams')
@nocache
@login_required
def viewTeams():
try:
conn = psycopg2.connect(database="basketball", user="postgres", password="password")
cur = conn.cursor()
cur.execute("CREATE VIEW TEAM_VIEW AS SELECT team_id, team_name, division_name FROM team;")
cur.execute("SELECT * FROM TEAM_VIEW;")
teams_data = cur.fetchall()
return render_template('view_teamInfo.html', teams_data = teams_data, user = session.get('user'))
except Exception as e:
return render_template('error.html', error = str(e))
finally:
cur.close()
conn.close()
@app.route('/deleteTeam',methods=['GET','POST'])
@nocache
def deleteTeam():
if request.method == 'POST':
conn = psycopg2.connect(database="basketball", user="postgres", password="password")
cur = conn.cursor()
teamId = request.form['team_id']
cur.execute("SELECT team_id FROM team;")
result = cur.fetchall()
team_list = []
for i in result:
team_list.append(i[0])
cur.execute("SELECT team_id FROM team_stat;")
resultI = cur.fetchall()
teamI_list = []
for i in resultI:
teamI_list.append(i[0])
cur.execute("SELECT team_id FROM team_coach;")
resultII = cur.fetchall()
teamII_list = []
for i in resultII:
teamII_list.append(i[0])
if(teamId in team_list and teamId not in teamI_list and teamId not in teamII_list):
cur.execute("DELETE from team where team_id = %(id)s",{'id': teamId})
conn.commit()
return render_template("team_info.html")
elif (teamId in team_list and teamId in teamII_list):
return render_template('error.html',error = "Please delete the entry in coach table first and then delete the team from this table. ")
elif (teamId in team_list and teamId in teamI_list):
return render_template('error.html',error = "Please delete the team's statistics first and then delete the team from this table. ")
else:
return render_template('error.html',error = "Team does not exist ")
cur.close()
conn.close()
else:
return render_template("view_playerInfo.html")
@app.route('/showTeamStat')
@nocache
def showTeamStat():
return render_template('team_stat.html', user = session.get('user'))
@app.route('/addTeamStat',methods=['GET','POST'])
@nocache
def addTeamStat():
if request.method == 'POST':
conn = psycopg2.connect(database="basketball", user="postgres", password="password")
cur = conn.cursor()
teamId = request.form['team_id']
teamName = request.form['team_name']
divisionName = request.form['division_name']
teamPoints = request.form['team_points']
rebounds = request.form['rebounds']
assists = request.form['assists']
steals = request.form['steals']
cur.execute("SELECT team_id FROM team_stat;")
result = cur.fetchall()
team_list = []
for i in result:
team_list.append(i[0])
cur.execute("SELECT team_id FROM team;")
resultI = cur.fetchall()
teamI_list = []
for i in resultI:
teamI_list.append(i[0])
if(teamId not in team_list and teamId in teamI_list):
cur.execute("INSERT INTO team_stat (team_id, team_name, division_name, team_points, rebounds, assists, steals) VALUES (%s, %s, %s, %s, %s, %s, %s)",(teamId, teamName, divisionName, teamPoints, rebounds, assists, steals))
conn.commit()
return redirect(url_for('userHome'))
else:
return render_template('error.html',error = 'Team statistics information already exists or Team has not been added. Try to add a Team first.')
cur.close()
conn.close()
else:
return render_template("userHome.html")
@app.route('/showViewTeamStat')
@nocache
def showViewTeamStat():
return redirect(url_for('viewTeamStat'))
@app.route('/viewTeamStat')
@nocache
@login_required
def viewTeamStat():
try:
conn = psycopg2.connect(database="basketball", user="postgres", password="password")
cur = conn.cursor()
cur.execute("CREATE VIEW TEAMSTAT_VIEW AS SELECT team_id, team_name, division_name, team_points, rebounds, assists, steals FROM team_stat;")
cur.execute("SELECT * FROM TEAMSTAT_VIEW;")
teams_data = cur.fetchall()
return render_template('view_teamStat.html', teams_data = teams_data, user = session.get('user'))
except Exception as e:
return render_template('error.html', error = str(e))
finally:
cur.close()
conn.close()
@app.route('/deleteTeamStat',methods=['GET','POST'])
@nocache
def deleteTeamStat():
if request.method == 'POST':
conn = psycopg2.connect(database="basketball", user="postgres", password="password")
cur = conn.cursor()
teamId = request.form['team_id']
cur.execute("SELECT team_id FROM team_stat;")
result = cur.fetchall()
team_list = []
for i in result:
team_list.append(i[0])
if(teamId in team_list):
cur.execute("DELETE from team_stat where team_id = %(id)s",{'id': teamId})
conn.commit()
return render_template("team_stat.html")
else:
return render_template('error.html',error = "No statistics information for this team. ")
cur.close()
conn.close()
else:
return render_template("view_teamStat.html")
@app.route('/showPlayerInjury')
@nocache
def showPlayerInjury():
return render_template('player_injury.html', user = session.get('user'))
@app.route('/addPlayerInjury',methods=['GET','POST'])
@nocache
def addPlayerInjury():
if request.method == 'POST':
conn = psycopg2.connect(database="basketball", user="postgres", password="password")
cur = conn.cursor()
playerId = request.form['player_id']
playerName = request.form['player_name']
injuryName = request.form['injury_name']
cur.execute("SELECT player_id FROM player_injury;")
result = cur.fetchall()
players_list = []
for i in result:
players_list.append(i[0])
cur.execute("SELECT player_id FROM player_info;")
resultI = cur.fetchall()
playerI_list = []
for i in resultI:
playerI_list.append(i[0])
if(playerId not in players_list and playerId in playerI_list):
cur.execute("INSERT INTO player_injury (player_id, player_name, injury_name) VALUES (%s, %s, %s)",(playerId, playerName, injuryName))
conn.commit()
return redirect(url_for('userHome'))
else:
return render_template('error.html',error = 'Player Injury information already exists or Team has not been added. Try to add another player.')
cur.close()
conn.close()
else:
return render_template("userHome.html")
@app.route('/showViewPlayerInjury')
@nocache
def showViewPlayerInjury():
return redirect(url_for('viewPlayerInjury'))
@app.route('/viewPlayerInjury')
@nocache
@login_required
def viewPlayerInjury():
try:
conn = psycopg2.connect(database="basketball", user="postgres", password="password")
cur = conn.cursor()
cur.execute("CREATE VIEW PLAYERINJURY_VIEW AS SELECT player_id, player_name, injury_name FROM player_injury;")
cur.execute("SELECT * FROM PLAYERINJURY_VIEW;")
players_data = cur.fetchall()
return render_template('view_playerInjury.html', players_data = players_data, user = session.get('user'))
except Exception as e:
return render_template('error.html', error = str(e))
finally:
cur.close()
conn.close()
@app.route('/deletePlayerInjury',methods=['GET','POST'])
@nocache
def deletePlayerInjury():
if request.method == 'POST':
conn = psycopg2.connect(database="basketball", user="postgres", password="password")
cur = conn.cursor()
playerId = request.form['player_id']
cur.execute("SELECT player_id FROM player_injury;")
result = cur.fetchall()
player_list = []
for i in result:
player_list.append(i[0])
if(playerId in player_list):
cur.execute("DELETE from player_injury where player_id = %(id)s",{'id': playerId})
conn.commit()
return render_template("player_injury.html")
else:
return render_template('error.html',error = "No injury information for this player. ")
cur.close()
conn.close()
else:
return render_template("view_playerStat.html")
@app.route('/showCoach')
@nocache
def showCoach():
return render_template('team_coach.html', user = session.get('user'))
@app.route('/addCoach',methods=['GET','POST'])
@nocache
def addCoach():
if request.method == 'POST':
conn = psycopg2.connect(database="basketball", user="postgres", password="password")
cur = conn.cursor()
teamId = request.form['team_id']
teamName = request.form['team_name']
coachName = request.form['coach_name']
cur.execute("SELECT team_id FROM team_coach;")
result = cur.fetchall()
team_list = []
for i in result:
team_list.append(i[0])
cur.execute("SELECT team_id FROM team;")
resultI = cur.fetchall()
coach_list = []
for i in resultI:
coach_list.append(i[0])
if(teamId not in team_list and teamId in coach_list):
cur.execute("INSERT INTO team_coach (team_id, team_name, coach_name) VALUES (%s, %s, %s)",(teamId, teamName, coachName))
conn.commit()
return redirect(url_for('userHome'))
else:
return render_template('error.html',error = 'Coach information already exists or Team has not been added. Try to add a team or a coach of an existing team.')
cur.close()
conn.close()
else:
return render_template("userHome.html")
@app.route('/showViewCoach')
@nocache
def showViewCoach():
return redirect(url_for('viewCoach'))
@app.route('/viewCoach')
@nocache
@login_required
def viewCoach():
try:
conn = psycopg2.connect(database="basketball", user="postgres", password="password")
cur = conn.cursor()
cur.execute("CREATE VIEW COACH_VIEW AS SELECT team_id, team_name, coach_name FROM team_coach;")
cur.execute("SELECT * FROM COACH_VIEW;")
coaches_data = cur.fetchall()
return render_template('view_coach.html', coaches_data = coaches_data, user = session.get('user'))
except Exception as e:
return render_template('error.html', error = str(e))
finally:
cur.close()
conn.close()
@app.route('/deleteCoach',methods=['GET','POST'])
@nocache
def deleteCoach():
if request.method == 'POST':
conn = psycopg2.connect(database="basketball", user="postgres", password="password")
cur = conn.cursor()
coachId = request.form['team_id']
cur.execute("SELECT team_id FROM team_coach;")
result = cur.fetchall()
coach_list = []
for i in result:
coach_list.append(i[0])
if(coachId in coach_list):
cur.execute("DELETE from team_coach where team_id = %(id)s",{'id': coachId})
conn.commit()
return render_template("team_coach.html")
else:
return render_template('error.html',error = "No coach information for this team. ")
cur.close()
conn.close()
else:
return render_template("view_coach.html")
@app.route('/showDivision')
@nocache
def showDivision():
return render_template('division.html', user = session.get('user'))
@app.route('/addDivision',methods=['GET','POST'])
@nocache
def addDivision():
if request.method == 'POST':
conn = psycopg2.connect(database="basketball", user="postgres", password="password")
cur = conn.cursor()
divisionId = request.form['division_id']
divisionName = request.form['division_name']
cur.execute("SELECT division_id FROM division;")
result = cur.fetchall()
division_list = []
for i in result:
division_list.append(i[0])
if(divisionId not in division_list):
cur.execute("INSERT INTO division (division_id, division_name) VALUES (%s, %s)",(divisionId, divisionName))
conn.commit()
return redirect(url_for('userHome'))
else:
return render_template('error.html',error = 'Division already exists. Try to add another division.')
cur.close()
conn.close()
else:
return render_template("userHome.html")
@app.route('/showViewDivision')
@nocache
def showViewDivision():
return redirect(url_for('viewDivision'))
@app.route('/viewDivision')
@nocache
@login_required
def viewDivision():
try:
conn = psycopg2.connect(database="basketball", user="postgres", password="password")
cur = conn.cursor()
cur.execute("CREATE VIEW DIVISION_VIEW AS SELECT division_id, division_name FROM division;")
cur.execute("SELECT * FROM DIVISION_VIEW;")
divisions_data = cur.fetchall()
return render_template('view_division.html', divisions_data = divisions_data, user = session.get('user'))
except Exception as e:
return render_template('error.html', error = str(e))
finally:
cur.close()
conn.close()
@app.route('/deleteDivision',methods=['GET','POST'])
@nocache
def deleteDivision():
if request.method == 'POST':
conn = psycopg2.connect(database="basketball", user="postgres", password="password")
cur = conn.cursor()
divisionId = request.form['division_id']
cur.execute("SELECT division_id FROM division;")
result = cur.fetchall()
division_list = []
for i in result:
division_list.append(i[0])
cur.execute("SELECT division_id FROM attendance;")
resultI = cur.fetchall()
divisionI_list = []
for i in resultI:
divisionI_list.append(i[0])
cur.execute("SELECT division_id FROM referee;")
resultII = cur.fetchall()
divisionII_list = []
for i in resultII:
divisionII_list.append(i[0])
if(divisionId in division_list and divisionId not in divisionI_list and divisionId not in divisionII_list):
cur.execute("DELETE from division where division_id = %(id)s",{'id': divisionId})
conn.commit()
return render_template("division.html")
elif (divisionId in division_list and divisionId in divisionII_list):
return render_template('error.html',error = "Please delete the entry in referee table first and then delete the division information from this table. ")
elif (divisionId in division_list and divisionId in divisionI_list):
return render_template('error.html',error = "Please delete the entry in attendance table first and then delete the division information from this table. ")
else:
return render_template('error.html',error = "Division does not exist ")
cur.close()
conn.close()
else:
return render_template("view_playerInfo.html")
@app.route('/showAttendance')
@nocache
def showAttendance():
return render_template('attendance.html', user = session.get('user'))
@app.route('/addAttendance',methods=['GET','POST'])
@nocache
def addAttendance():
if request.method == 'POST':
conn = psycopg2.connect(database="basketball", user="postgres", password="password")
cur = conn.cursor()
divisionId = request.form['division_id']
divisionName = request.form['division_name']
numOfAttendee = request.form['number_of_attendee']
cur.execute("SELECT division_id FROM attendance;")
result = cur.fetchall()
division_list = []
for i in result:
division_list.append(i[0])
cur.execute("SELECT division_id FROM division;")
resultI = cur.fetchall()
divisionI_list = []
for i in resultI:
divisionI_list.append(i[0])
if(divisionId not in division_list and divisionId in divisionI_list):
cur.execute("INSERT INTO attendance (division_id, division_name, number_of_attendee) VALUES (%s, %s, %s)",(divisionId, divisionName, numOfAttendee))
conn.commit()
return redirect(url_for('userHome'))
else:
return render_template('error.html',error = 'Attendance information already exists or division has not been added. Try to add a division or the attendance information of an existing team.')
cur.close()
conn.close()
else:
return render_template("userHome.html")
@app.route('/showViewAttendance')
@nocache
def showViewAttendance():
return redirect(url_for('viewAttendance'))
@app.route('/viewAttendance')
@nocache
@login_required
def viewAttendance():
try:
conn = psycopg2.connect(database="basketball", user="postgres", password="password")
cur = conn.cursor()
cur.execute("CREATE VIEW Attendance_VIEW AS SELECT division_id, division_name, number_of_attendee FROM attendance;")
cur.execute("SELECT * FROM Attendance_VIEW;")
attendance_data = cur.fetchall()
return render_template('view_attendance.html', attendance_data = attendance_data, user = session.get('user'))
except Exception as e:
return render_template('error.html', error = str(e))
finally:
cur.close()
conn.close()
@app.route('/deleteAttendance',methods=['GET','POST'])
@nocache
def deleteAttendance():
if request.method == 'POST':
conn = psycopg2.connect(database="basketball", user="postgres", password="password")
cur = conn.cursor()
attendanceId = request.form['division_id']
cur.execute("SELECT division_id FROM attendance;")
result = cur.fetchall()
attendance_list = []
for i in result:
attendance_list.append(i[0])
if(attendanceId in attendance_list):
cur.execute("DELETE from attendance where division_id = %(id)s",{'id': attendanceId})
conn.commit()
return render_template("attendance.html")
else:
return render_template('error.html',error = "No attendance information for this division. ")
cur.close()
conn.close()
else:
return render_template("view_attendance.html")
@app.route('/showReferee')
@nocache
def showReferee():
return render_template('referee.html', user = session.get('user'))
@app.route('/addReferee',methods=['GET','POST'])
@nocache
def addReferee():
if request.method == 'POST':
conn = psycopg2.connect(database="basketball", user="postgres", password="password")
cur = conn.cursor()
divisionId = request.form['division_id']
divisionName = request.form['division_name']
refereeId = request.form['referee_id']
refereeName = request.form['referee_name']
cur.execute("SELECT referee_id FROM referee;")
result = cur.fetchall()
referee_list = []
for i in result:
referee_list.append(i[0])
cur.execute("SELECT division_id FROM division;")
resultI = cur.fetchall()
division_list = []
for i in resultI:
division_list.append(i[0])
if(divisionId in division_list and refereeId not in referee_list):
cur.execute("INSERT INTO referee (division_id, division_name, referee_id, referee_name) VALUES (%s, %s, %s, %s)",(divisionId, divisionName, refereeId, refereeName))
conn.commit()
return redirect(url_for('userHome'))
else:
return render_template('error.html',error = 'Referee information already exists. Try to add another referee information of an existing team.')
cur.close()
conn.close()
else:
return render_template("userHome.html")
@app.route('/showViewReferee')
@nocache
def showViewReferee():
return redirect(url_for('viewReferee'))
@app.route('/viewReferee')
@nocache
@login_required
def viewReferee():
try:
conn = psycopg2.connect(database="basketball", user="postgres", password="password")
cur = conn.cursor()
cur.execute("CREATE VIEW referee_VIEW AS SELECT division_id, division_name, referee_id, referee_name FROM referee;")
cur.execute("SELECT * FROM referee_VIEW;")
referee_data = cur.fetchall()
return render_template('view_referee.html', referee_data = referee_data, user = session.get('user'))
except Exception as e:
return render_template('error.html', error = str(e))
finally:
cur.close()
conn.close()
@app.route('/deleteReferee',methods=['GET','POST'])
@nocache
def deleteReferee():
if request.method == 'POST':
conn = psycopg2.connect(database="basketball", user="postgres", password="password")
cur = conn.cursor()
refereeId = request.form['referee_id']
cur.execute("SELECT referee_id FROM referee;")
result = cur.fetchall()
referee_list = []
for i in result:
referee_list.append(i[0])
if(refereeId in referee_list):
cur.execute("DELETE from referee where referee_id = %(id)s",{'id': refereeId})
conn.commit()
return render_template("referee.html")
else:
return render_template('error.html',error = "No referee information for this referee. ")
cur.close()
conn.close()
else:
return render_template("view_referee.html")
@app.route('/showChampions')
@nocache
def showChampions():
return render_template('champions.html', user = session.get('user'))
@app.route('/addChampions',methods=['GET','POST'])
@nocache
def addChampions():
if request.method == 'POST':
conn = psycopg2.connect(database="basketball", user="postgres", password="password")
cur = conn.cursor()
divisionId = request.form['division_id']
divisionName = request.form['division_name']
divisionChamp = request.form['division_champion']
finalChamp = request.form['final_champion']
cur.execute("SELECT division_id FROM champions;")
result = cur.fetchall()
division_list = []
for i in result:
division_list.append(i[0])
cur.execute("SELECT division_id FROM division;")
resultI = cur.fetchall()
divisionI_list = []
for i in resultI:
divisionI_list.append(i[0])
if(divisionId in divisionI_list):
cur.execute("INSERT INTO champions (division_id, division_name, division_champion, final_champion) VALUES (%s, %s, %s, %s)",(divisionId, divisionName, divisionChamp, finalChamp))
conn.commit()
return redirect(url_for('userHome'))
else:
return render_template('error.html',error = 'Champion information already exists or division has not been added. Try to add a division or the referee information of an existing team.')
cur.close()
conn.close()
else:
return render_template("userHome.html")
@app.route('/showViewChampion')
@nocache
def showViewChampion():
return redirect(url_for('viewChampion'))
@app.route('/viewChampion')
@nocache
@login_required
def viewChampion():
try:
conn = psycopg2.connect(database="basketball", user="postgres", password="password")
cur = conn.cursor()
cur.execute("CREATE VIEW champion_VIEW AS SELECT division_id, division_name, division_champion, final_champion FROM champions;")
cur.execute("SELECT * FROM champion_VIEW;")
champion_data = cur.fetchall()
return render_template('view_champion.html', champion_data = champion_data, user = session.get('user'))
except Exception as e:
return render_template('error.html', error = str(e))
finally:
cur.close()
conn.close()
@app.route('/deleteChampion',methods=['GET','POST'])
@nocache
def deleteChampion():
if request.method == 'POST':
conn = psycopg2.connect(database="basketball", user="postgres", password="password")
cur = conn.cursor()
champId = request.form['division_id']
cur.execute("SELECT division_id FROM champions;")
result = cur.fetchall()
champ_list = []
for i in result:
champ_list.append(i[0])
if(champId in champ_list):
cur.execute("DELETE from champions where division_id = %(id)s",{'id': champId})
conn.commit()
return render_template("champions.html")
else:
return render_template('error.html',error = "No information of this division. ")
cur.close()
conn.close()
else:
return render_template("view_champion.html")
#@app.route('/showSearchPlayer')
#@nocache
#def showSearchPlayer():
# return render_template('search_player.html', user = session.get('user'))
@app.route('/showSearchPlayer')
@nocache
def showSearchPlayer():
return render_template('search_player.html', user = session.get('user'))
@app.route('/addSearchPlayer',methods=['GET','POST'])
@nocache
def addSearchPlayer():
if request.method == 'POST':
conn = psycopg2.connect(database="basketball", user="postgres", password="password")
cur = conn.cursor()
playerName = request.form['player_name']
cur.execute("SELECT player_name FROM player_info where player_name LIKE %(id)s", {'id': '%'+ playerName +'%'})
resultI = cur.fetchall()
playerI_list = []
for i in resultI:
playerI_list.append(i[0])
if(len(playerI_list) > 0):
cur.execute("SELECT player_info.player_name, team_name, player_info.player_id, player_position, player_points, rebounds, assists, steals from player_info full join player_stat on player_info.player_name = player_stat.player_name where player_info.player_name LIKE %(id)s", {'id': '%'+ playerName +'%'})
search_data = cur.fetchall()
return render_template('search_player.html', search_data = search_data, user = session.get('user'))
else:
return render_template('error.html',error = 'No player was found. please try again.')
cur.close()
conn.close()
else:
return render_template("userHome.html")
@app.route('/showSearchTeam')
@nocache
def showSearchTeam():
return render_template('search_team.html', user = session.get('user'))
@app.route('/addSearchTeam',methods=['GET','POST'])
@nocache
def addSearchTeam():
if request.method == 'POST':
conn = psycopg2.connect(database="basketball", user="postgres", password="password")
cur = conn.cursor()
teamName = request.form['team_name']
cur.execute("SELECT team_name FROM team where team_name LIKE %(id)s", {'id': '%'+ teamName +'%'})
resultI = cur.fetchall()
teamI_list = []
for i in resultI:
teamI_list.append(i[0])
if(len(teamI_list) > 0):
cur.execute("SELECT team.team_name, team.team_id, team.division_name,coach_name, team_points, rebounds, assists, steals from team full join team_stat on team.team_name = team_stat.team_name full join team_coach on team.team_name = team_coach.team_name where team.team_name LIKE %(id)s", {'id': '%'+ teamName +'%'})
search_data = cur.fetchall()
return render_template('search_team.html', search_data = search_data, user = session.get('user'))
else:
return render_template('error.html',error = 'No team was found. please try again.')
cur.close()
conn.close()
else:
return render_template("userHome.html")
if __name__ =="__main__":
app.run(debug=True) | {"/app.py": ["/login_check.py", "/models.py"]} |
59,511 | weihengSu/basketApp | refs/heads/master | /models.py | from flask.ext.login import UserMixin
import psycopg2
# Take care of User model
class UserNotFoundException(Exception):
pass
class User(UserMixin):
CONN = psycopg2.connect("dbname='basketball' user='postgres' "
"host='localhost' password='password'")
CUR = CONN.cursor()
CUR.execute('SELECT user_id, user_password FROM basket_user')
USER = dict(CUR.fetchall())
CUR.execute('SELECT user_id, user_email FROM basket_user')
EMAIL = dict(CUR.fetchall())
CUR.execute('SELECT user_id, user_admin FROM basket_user')
USER_ADMIN = dict(CUR.fetchall())
CUR.close()
CONN.close()
def __init__(self, user_id):
if user_id not in self.USER or user_id not in self.EMAIL or user_id not in self.USER_ADMIN:
raise UserNotFoundException
self.user_id = user_id
self.password = self.USER[user_id]
self.name = self.EMAIL[user_id]
self.user_admin = self.USER_ADMIN[user_id]
def is_authenticated(self):
return True
def is_active(self):
return True
def is_anonymous(self):
return False
def get_id(self):
return self.user_id
def get_role(self):
return self.user_admin
def get_name(self):
return self.name
@classmethod
def get(cls, user_id):
try:
return cls.USER[user_id]
except KeyError:
return None | {"/app.py": ["/login_check.py", "/models.py"]} |
59,513 | Eelviny/ticksys | refs/heads/master | /codereader.py | #!/usr/bin/env python3
# Code Reader Module
from checksum import sumRead
from dbinterface import *
# Find the corresponding database record by submitting a query
def codeRead(ticketID):
listID = codeConv(ticketID)
# Check the checksum first before continuing onto the database
if not sumRead(listID):
raise ValueError
# Take a human-readable code and turn it into an integer list for the checksum generator
def codeConv(ticketID):
ticketList = []
for i in ticketID:
ticketList.append(int(i, 16))
return ticketList
| {"/codereader.py": ["/checksum.py", "/dbinterface.py"], "/codegenerator.py": ["/checksum.py", "/dbinterface.py"], "/reader.py": ["/codereader.py", "/dbinterface.py", "/fileaccess.py"], "/editor.py": ["/codereader.py", "/dbinterface.py", "/fileaccess.py"], "/fileaccess.py": ["/dbinterface.py"], "/creator.py": ["/codegenerator.py", "/dbinterface.py", "/fileaccess.py"]} |
59,514 | Eelviny/ticksys | refs/heads/master | /codegenerator.py | #!/usr/bin/env python3
# Code Generator Module
import random
from checksum import *
from dbinterface import *
def randomCode():
# Generate a 4-digit random hexadecimal string
randomauth = []
# values 0 to 15 for hexadecimal
for i in range(0,4):
randomauth.append(random.randint(0,15))
return randomauth
# The heart of the module - creating the code.
def codeGen(dbid, randomauth=""):
# If not supplied with a random code, create one anyway
if randomauth == "":
randomauth = randomCode()
# Take the inputted database ID
dbid = str(dbid)
dblist = []
# If the length of dbid is shorter than 3, add zeros to the start. No values submitted will be longer than 3 characters.
for i in range(len(dbid)):
dblist.append(int(dbid[i]))
while len(dblist) < 3:
dblist = [0] + dblist
# Call the checksum module to generate a valid checksum for the code
csum = sumGen(dblist + randomauth)
return dblist + randomauth + csum
# Take an inputted, raw code list and turn it into a human readable string
def codePrint(code):
output = ""
for i in range(len(code)):
# Format the integers with X, capital hexadecimal
output += format(code[i], 'X')
return output
def newCode(db):
# Find the next available database ID and generate a code from it
dbid = db.nextAvail("user_info")
return codeGen(dbid)
| {"/codereader.py": ["/checksum.py", "/dbinterface.py"], "/codegenerator.py": ["/checksum.py", "/dbinterface.py"], "/reader.py": ["/codereader.py", "/dbinterface.py", "/fileaccess.py"], "/editor.py": ["/codereader.py", "/dbinterface.py", "/fileaccess.py"], "/fileaccess.py": ["/dbinterface.py"], "/creator.py": ["/codegenerator.py", "/dbinterface.py", "/fileaccess.py"]} |
59,515 | Eelviny/ticksys | refs/heads/master | /createevent.py | #!/usr/bin/env python3
# Create Event Module
| {"/codereader.py": ["/checksum.py", "/dbinterface.py"], "/codegenerator.py": ["/checksum.py", "/dbinterface.py"], "/reader.py": ["/codereader.py", "/dbinterface.py", "/fileaccess.py"], "/editor.py": ["/codereader.py", "/dbinterface.py", "/fileaccess.py"], "/fileaccess.py": ["/dbinterface.py"], "/creator.py": ["/codegenerator.py", "/dbinterface.py", "/fileaccess.py"]} |
59,516 | Eelviny/ticksys | refs/heads/master | /statistics.py | #!/usr/bin/env python3
# Statistics Modules
| {"/codereader.py": ["/checksum.py", "/dbinterface.py"], "/codegenerator.py": ["/checksum.py", "/dbinterface.py"], "/reader.py": ["/codereader.py", "/dbinterface.py", "/fileaccess.py"], "/editor.py": ["/codereader.py", "/dbinterface.py", "/fileaccess.py"], "/fileaccess.py": ["/dbinterface.py"], "/creator.py": ["/codegenerator.py", "/dbinterface.py", "/fileaccess.py"]} |
59,517 | Eelviny/ticksys | refs/heads/master | /reader.py | #!/usr/bin/env python3
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk
import codereader
import dbinterface
import fileaccess
# Start the reader program class
class Reader():
def __init__(self):
# use Gtk Builder to build the UI from file
self.builder = Gtk.Builder()
self.builder.add_from_file("reader.glade")
# Connect all the events to the functions in this class
self.builder.connect_signals(self)
# Fetch the whole window
self.window = self.builder.get_object("window1")
# This value needs to be changed by the program, so fetch it too
self.entry1 = self.builder.get_object("entry1")
# Once everything is fetched, show the windows
self.window.show_all()
self.db = fileaccess.openDialog(self.window)
if self.db == None:
raise SystemExit(0)
# To prevent multiple database reads, store the whole table in memory at the start
self.tickets = self.db.read("ticket_types")
print(self.db.read("user_info"))
# Fetches the text from entry1
def textGet(self):
return self.entry1.get_text().upper()
# Sets the text in entry1
def textSet(self, text):
self.entry1.set_text(text)
# Takes the existing text in entry1 and adds the character to it
def textAdd(self, text):
# Clear the textbox after an error once the user starts tapping again
if self.textGet() == "Error!":
self.textSet("")
self.textSet(self.textGet() + text)
# Whenever the text changes, call the updater
def textUpdate(self):
# The magic part of the program. Once the value is long enough, cue the info
if len(self.textGet()) >= 9:
try:
# Take the input and run it against the checksum
code = self.textGet()
codereader.codeRead(code)
# Use dbinterface to create an easy to use list of values
ticket = self.db.returnOrders("code='{0}'".format(code))[0]
# Assign the popup window and set the title
popup = self.builder.get_object("window2")
popup.set_title(ticket[2])
# Show the customers name at the top
self.builder.get_object("label1").set_text("Name: " + ticket[0] + " " + ticket[1])
# Display a row for each ticket type
for i in range(4):
self.builder.get_object("label{0}".format(str(i+2))).set_text(self.tickets[i][1] + ": " + str(ticket[3][i]))
# Show the window, with all attributes set
popup.show_all()
# Reset the code reader for the next use
self.entry1.set_text("")
except:
# If the checksum fails, handle gracefully and give a nice error message to the user
self.textSet("Error!")
self.entry1.grab_focus()
# Close all windows on the deletion of the top-level window
def on_window1_delete_event(self, *args):
raise SystemExit(0)
# Each button is linked to one function
def on_entry1_icon_press(self, *args):
self.textSet("")
def on_button1_clicked(self, *args):
self.textAdd("0")
def on_button2_clicked(self, *args):
self.textAdd("1")
def on_button3_clicked(self, *args):
self.textAdd("2")
def on_button4_clicked(self, *args):
self.textAdd("3")
def on_button5_clicked(self, *args):
self.textAdd("4")
def on_button6_clicked(self, *args):
self.textAdd("5")
def on_button7_clicked(self, *args):
self.textAdd("6")
def on_button8_clicked(self, *args):
self.textAdd("7")
def on_button9_clicked(self, *args):
self.textAdd("8")
def on_button10_clicked(self, *args):
self.textAdd("9")
def on_button11_clicked(self, *args):
self.textAdd("A")
def on_button12_clicked(self, *args):
self.textAdd("B")
def on_button13_clicked(self, *args):
self.textAdd("C")
def on_button14_clicked(self, *args):
self.textAdd("D")
def on_button15_clicked(self, *args):
self.textAdd("E")
def on_button16_clicked(self, *args):
self.textAdd("F")
# Detect changes to entry1, and trigger the text update
def on_entry1_changed(self, *args):
self.textUpdate()
# When the popup is finished with, don't destroy it - hide it away for the next use
# The close button on the popup is disabled, so the only way to get rid of it is to use button17
def on_button17_clicked(self, *args):
self.builder.get_object("window2").hide()
def on_filechooserbutton1_file_set(self, *args):
pass
# Create the main event loop
main = Reader()
Gtk.main()
| {"/codereader.py": ["/checksum.py", "/dbinterface.py"], "/codegenerator.py": ["/checksum.py", "/dbinterface.py"], "/reader.py": ["/codereader.py", "/dbinterface.py", "/fileaccess.py"], "/editor.py": ["/codereader.py", "/dbinterface.py", "/fileaccess.py"], "/fileaccess.py": ["/dbinterface.py"], "/creator.py": ["/codegenerator.py", "/dbinterface.py", "/fileaccess.py"]} |
59,518 | Eelviny/ticksys | refs/heads/master | /dbinterface.py | #!/usr/bin/env python3
# Database Interface Module
import sqlite3
# File access will initialise the class
class Database():
def __init__(self, path=":memory:"):
if path == ":memory:":
print("WARNING: Database is in memory! Nothing will be saved")
# Connect to database
self.conn = sqlite3.connect(path)
self.c = self.conn.cursor()
# Once the database is finished with, it can delete itself from memory
def close(self, save=True):
# If changes need to be discarded save can be set to False
if save == True:
self.conn.commit()
# Close the database connection
self.conn.close()
# Finally delete itself from memory to free up space
del self
# Basic table read function
def read(self, table, query=""):
if query == "":
# By default the function returns all values
self.c.execute('SELECT * FROM {0}'.format(table))
else:
# If a query is specified, run it
self.c.execute('SELECT * FROM {0} WHERE {1}'.format(table, query))
# After executing, remember to fetch the results
return self.c.fetchall()
# Basic table write funcation
def write(self, table, values):
# For program use, if they require the id that was set
dbid = self.nextAvail(table)
# Each table is different and also hardcoded, so different definitions are used for each
if table == 'ticket_types':
self.c.execute('INSERT INTO ticket_types (tName, tPrice, tInfo) VALUES {0}'.format(values))
if table == 'user_info':
self.c.execute('INSERT INTO user_info (fName, lName, code) VALUES {0}'.format(values))
if table == 'orders':
self.c.execute('INSERT INTO orders (quantity, userID, ticketTypeID) VALUES {0}'.format(values))
# If a function needs the ID of the item created, it can catch this value
return dbid
def update(self, table, column, query, value):
try:
self.c.execute('UPDATE {0} SET {1} = {2} WHERE {3}'.format(table, column, value, query))
return True
except:
return False
# Uses the write function to sort data into the correct tables, with the correct foreign keys
def newEntry(self, fName, lName, code, tickets):
# The database takes all values of strings
for i in range(3):
tickets[i] = str(tickets[i])
# Find the database set ID while writing to the database
dbid = self.write("user_info", (fName, lName, code))
# Enumerate returns a list with first item: index, second item: value
for typ, quant in enumerate(tickets):
# Only write orders to the database if there are 1 or more tickets
if int(quant) > 0:
# Database index starts from 1, so add 1 to typ
self.write("orders", (quant, dbid, typ+1))
self.conn.commit()
print("Entry Saved") # debug code
def returnOrders(self, query=""):
users = []
for a in self.read("user_info", query):
orders = []
for b in self.read("ticket_types"):
order = self.read("orders", "ticketTypeID={0} AND userID={1}".format(b[0], a[0]))
if order != []:
orders.append(order[0][1])
else:
orders.append(0)
users.append([a[1], a[2], a[3], orders])
print(users)
return users
# Find the next free ID
def nextAvail(self, table):
# Find the largest ID in the table
self.c.execute('SELECT max(ID) FROM {0}'.format(table))
value = self.c.fetchone()[0]
if value == None:
# If there are no IDs in the database, the next value must be 1
return 1
else:
# If there are IDs, add 1 to the value found and return it
return value + 1
def verify(self):
# Read the database schema from the sqlite_master table
self.c.execute('SELECT * FROM sqlite_master')
# Sample the data by taking the first value to see if it matches
if self.c.fetchone() == ('table', 'ticket_types', 'ticket_types', 2, 'CREATE TABLE ticket_types (ID INTEGER PRIMARY KEY AUTOINCREMENT, tName TEXT, tPrice DECIMAL(10,2), tInfo TEXT)'):
return True
else:
# If not, the table must be broken or incorrect
return False
# Creates a new database with the correct tables
def newDB(path, keepalive=True):
db = Database(path)
db.c.execute('CREATE TABLE ticket_types (ID INTEGER PRIMARY KEY AUTOINCREMENT, tName TEXT, tPrice DECIMAL(10,2), tInfo TEXT);')
db.c.execute('CREATE TABLE user_info (ID INTEGER PRIMARY KEY AUTOINCREMENT, fName TEXT, lName TEXT, code CHAR(10));')
db.c.execute('CREATE TABLE orders (ID INTEGER PRIMARY KEY AUTOINCREMENT, quantity INTEGER, userID INTEGER, ticketTypeID INTEGER);')
db.conn.commit()
# If the database is not needed right now, close it after creation
if not keepalive:
db.close()
else:
# Give the database object
return db
# Purely for debugging and testing purposes. Create an already populated database for test usage
def sampleDB(path=":memory:"):
db = newDB(path)
db.write("ticket_types", ("Adult", "20.08", "A fully grown human being."))
db.write("ticket_types", ("Child", "10.50", "A slightly smaller human being."))
db.write("ticket_types", ("Student", "12.10", "A youthful human being."))
db.write("ticket_types", ("Senior", "18.46", "An old human being."))
db.newEntry("Elvin", "Luff", "001E5CBC5", [3, 0, 5, 0])
return db
| {"/codereader.py": ["/checksum.py", "/dbinterface.py"], "/codegenerator.py": ["/checksum.py", "/dbinterface.py"], "/reader.py": ["/codereader.py", "/dbinterface.py", "/fileaccess.py"], "/editor.py": ["/codereader.py", "/dbinterface.py", "/fileaccess.py"], "/fileaccess.py": ["/dbinterface.py"], "/creator.py": ["/codegenerator.py", "/dbinterface.py", "/fileaccess.py"]} |
59,519 | Eelviny/ticksys | refs/heads/master | /editor.py | #!/usr/bin/env python3
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk
import locale # Used for formatting strings to local currency
locale.setlocale( locale.LC_ALL, 'en_GB.UTF-8' )
import codereader
import dbinterface
import fileaccess
# Start the reader program class
class Editor():
def __init__(self):
self.firstrun = True
# Use the Gtk Builder to read the interface file
self.builder = Gtk.Builder()
self.builder.add_from_file("editor.glade")
self.builder.connect_signals(self)
# Create the treeview list
self.liststore1 = Gtk.ListStore(int, str, str, str)
# Find items that need to be dynamic
self.window = self.builder.get_object("window1")
# Find the treeview object and attach a cell renderer to it
treeview1 = self.builder.get_object("treeview1")
# For every item in the list, create a column for it
for i, column_title in enumerate(["#", "Ticket Type", "Price", "Information"]):
renderer = Gtk.CellRendererText()
column = Gtk.TreeViewColumn(column_title, renderer, text=i)
treeview1.append_column(column)
# Attach model to treeview
treeview1.set_model(self.liststore1)
self.liststore2 = Gtk.ListStore(str, str, str)
# Find the treeview object and attach a cell renderer to it
treeview2 = self.builder.get_object("treeview2")
# For every item in the list, create a column for it
for i, column_title in enumerate(["Ticket Code", "Name", "Orders"]):
renderer = Gtk.CellRendererText()
column = Gtk.TreeViewColumn(column_title, renderer, text=i)
treeview2.append_column(column)
# Attach model to treeview
treeview2.set_model(self.liststore2)
self.statusPush("Welcome to the Ticket Editor! Open or create a database to continue.")
self.window.show_all()
def statusPush(self, message):
status = self.builder.get_object("statusbar1")
status.push(status.get_context_id(""), message)
def newdbfile(self):
# Use the Open dialog to get a database path
newdb = fileaccess.openDialog(self.window)
# Do nothing if no path is given/valid
if newdb != None:
# Close the old database, if it exists
try:
self.db.close()
except:
pass
# Reassign the current database
self.db = newdb
# Update both tables with the new database information
self.updateTickets()
self.updateOrders()
if self.firstrun == True:
# Active all buttons once a database is opened
self.builder.get_object("toolbutton4").set_sensitive(True)
self.builder.get_object("toolbutton5").set_sensitive(True)
self.builder.get_object("toolbutton6").set_sensitive(True)
self.builder.get_object("toolbutton8").set_sensitive(True)
self.builder.get_object("toolbutton9").set_sensitive(True)
self.firstrun = False
self.statusPush("Database opened successfully!")
else:
self.statusPush("The file you've selected is invalid. Please try another.")
# Take the information from ticket_types and display everything
def updateTickets(self):
self.liststore1.clear()
# For each line in the table, display all columns except the ID column
for a,b in enumerate(self.db.read("ticket_types")):
self.liststore1.append([int(b[0]), str(b[1]), str(locale.currency(b[2])), str(b[3])])
# We need to present each order in a way that the user can read, bringing the data in from all tables.
def updateOrders(self):
self.liststore2.clear()
for i in self.db.returnOrders():
name = i[0] + " " + i[1]
code = i[2]
tickets = ""
for a, b in enumerate(i[3]):
if b != 0:
typename = self.db.read("ticket_types", "ID={0}".format(a+1))[0][1]
tickets += str(str(b) + " " + typename + ", ")
tickets = tickets[:-2]
self.liststore2.append([code, name, tickets])
def orderPopup(self, code=None):
self.omode = code
popup = self.builder.get_object("window2")
for i in range(1,5):
self.builder.get_object("label{0}".format(i)).set_text(self.db.read("ticket_types", "ID={0}".format(i))[0][1] + " Ticket")
if code == None:
user = ["", "", "", [0, 0, 0, 0]]
else:
user = self.db.returnOrders("code='{0}'".format(code))[0]
self.builder.get_object("entry1").set_text(user[0])
self.builder.get_object("entry2").set_text(user[1])
for i in range(4):
self.builder.get_object("adjustment{0}".format(i+1)).set_value(user[3][i])
popup.show_all()
def ticketPopup(self, dbid=None):
self.tmode = dbid
popup = self.builder.get_object("window3")
if dbid == None:
ticket = (1, '', 0.00, '')
else:
ticket = self.db.read("ticket_types", "ID='{0}'".format(dbid))[0]
self.builder.get_object("entry3").set_text(ticket[1])
self.builder.get_object("adjustment5").set_value(float(ticket[2]))
self.builder.get_object("entry4").set_text(ticket[3])
popup.show_all()
# Close all windows on the deletion of the top-level window
def on_window1_delete_event(self, *args):
raise SystemExit(0)
# New button
def on_toolbutton1_clicked(self, *args):
self.db = fileaccess.saveDialog(self.window)
self.updateOrders()
self.updateTickets()
# Open button
def on_toolbutton2_clicked(self, *args):
self.newdbfile()
# Save button
def on_toolbutton4_clicked(self, *args):
self.db.commit()
self.statusPush("Database saved successfully.")
# Save As button
def on_toolbutton5_clicked(self, *args):
self.db.commit()
self.db = fileaccess.saveDialog(self.window)
# Ticket Edit button
def on_toolbutton6_clicked(self, *args):
self.ticketPopup(self.ticketSelected())
# Order Edit button
def on_toolbutton7_clicked(self, *args):
self.orderPopup(self.orderSelected())
# Order Remove button
def on_toolbutton8_clicked(self, *args):
pass
# Order Add button
def on_toolbutton9_clicked(self, *args):
self.orderPopup()
def on_toolbutton10_clicked(self, *args):
self.ticketPopup()
def on_toolbutton11_clicked(self, *args):
pass
def on_treeview1_row_activated(self, *args):
print("rowactivate1", *args)
def on_treeview2_row_activated(self, *args):
print("rowactivate2", *args)
def on_button2_clicked(self, *args):
self.builder.get_object("window2").hide()
def on_button4_clicked(self, *args):
self.builder.get_object("window3").hide()
# When called, give the value of the current selection using a unique ID
def orderSelected(self):
model, liststore = self.builder.get_object("treeview-selection2").get_selected()
return str(model[liststore][0])
def ticketSelected(self):
model, liststore = self.builder.get_object("treeview-selection1").get_selected()
return int(model[liststore][0])
# Create the main event loop
main = Editor()
Gtk.main()
| {"/codereader.py": ["/checksum.py", "/dbinterface.py"], "/codegenerator.py": ["/checksum.py", "/dbinterface.py"], "/reader.py": ["/codereader.py", "/dbinterface.py", "/fileaccess.py"], "/editor.py": ["/codereader.py", "/dbinterface.py", "/fileaccess.py"], "/fileaccess.py": ["/dbinterface.py"], "/creator.py": ["/codegenerator.py", "/dbinterface.py", "/fileaccess.py"]} |
59,520 | Eelviny/ticksys | refs/heads/master | /validation.py | #!/usr/bin/env python3
# Validation Module
| {"/codereader.py": ["/checksum.py", "/dbinterface.py"], "/codegenerator.py": ["/checksum.py", "/dbinterface.py"], "/reader.py": ["/codereader.py", "/dbinterface.py", "/fileaccess.py"], "/editor.py": ["/codereader.py", "/dbinterface.py", "/fileaccess.py"], "/fileaccess.py": ["/dbinterface.py"], "/creator.py": ["/codegenerator.py", "/dbinterface.py", "/fileaccess.py"]} |
59,521 | Eelviny/ticksys | refs/heads/master | /checksum.py | #!/usr/bin/env python3
# Checksum Module
# Generate a checksum
def sumGen(ticketID):
idno = 0
# Multiply each value by its position, and add them together
for i in range(7):
idno += ticketID[i] * i+1
hexno = format(idno, 'x')
# Take first and last values if the hex is larger than 2 digits
if len(hexno) > 2:
hexno = hexno[0] + hexno[-1]
# Return the two values as decimals in a list
return [int(hexno[0], 16), int(hexno[1], 16)]
def sumRead(ticketID):
# Work out what the actual value is
original = sumGen(ticketID[0:7])
# Extract the checksum of the ticketID
actual = ticketID[7:9]
# Return true if the two checksums match
if original == actual:
return True
else:
return False
| {"/codereader.py": ["/checksum.py", "/dbinterface.py"], "/codegenerator.py": ["/checksum.py", "/dbinterface.py"], "/reader.py": ["/codereader.py", "/dbinterface.py", "/fileaccess.py"], "/editor.py": ["/codereader.py", "/dbinterface.py", "/fileaccess.py"], "/fileaccess.py": ["/dbinterface.py"], "/creator.py": ["/codegenerator.py", "/dbinterface.py", "/fileaccess.py"]} |
59,522 | Eelviny/ticksys | refs/heads/master | /fileaccess.py | #!/usr/bin/env python3
# File Access Module
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk
from dbinterface import *
#from shutil import copyfile
def setFile(path):
# Validate that it is a database file if the last 3 characters are .db
if path[-3:] != '.db':
raise TypeError
# Try opening the database. Remember to catch exceptions
db = Database(path)
# Verify that the database is for this program
if db.verify() == False:
# If not, remember to close the database without saving anything to it
db.close(False)
raise TypeError
# Return the new database object
return db
def newFile(path):
# Make sure the new file has the correct ending
if path[-3:] != ".db":
path += ".db"
return path
# Create a dialog box for selecting the sqlite file
def openDialog(parent=None):
# Create the dialog object
dialog = Gtk.FileChooserDialog("Please choose a file", parent, Gtk.FileChooserAction.OPEN, (Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, Gtk.STOCK_OPEN, Gtk.ResponseType.OK))
# Run the dialog object, catching responses
response = dialog.run()
# Process the response
if response == Gtk.ResponseType.OK:
print("File selected: " + dialog.get_filename()) # debug code
# If the file is not correct, an error will occur, so catch it
try:
# Pass to setFile to validate and open the database
response = setFile(dialog.get_filename())
# If it is an error, send None
except (TypeError, NameError):
response = None
print("This is an invalid file!") # debug code
# If cancel is pressed, send None
elif response == Gtk.ResponseType.CANCEL:
print("Cancel clicked") # debug code
response = None
# Remember to get rid of the dialog box
dialog.destroy()
# Return the database object
return response
def saveAsDialog(db, parent=None):
# Create the dialog object
dialog = Gtk.FileChooserDialog("Save Database As...", parent, Gtk.FileChooserAction.SAVE, (Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, Gtk.STOCK_SAVE, Gtk.ResponseType.OK))
# Run the dialog object, catching responses
response = dialog.run()
# Process the response
if response == Gtk.ResponseType.OK:
print("File selected: " + dialog.get_filename() + "(.db)") # debug code
# If the file is not correct, an error will occur, so catch it
# Create a custom,
newdb = newDB(newFile(dialog.get_filename()))
for i, ticket in enumerate(db.read("ticket_types")):
newdb.write("ticket_types", ticket[1:4])
for i, user in enumerate(db.read("user_info")):
newdb.write("user_info", user[1:4])
for i, order in enumerate(db.read("orders")):
newdb.write("orders", order[1:4])
# If it is an error, send None
#except (TypeError, NameError, sqlite3.OperationalError):
# response = None
# print("This is an invalid file!") # debug code
# If cancel is pressed, send None
elif response == Gtk.ResponseType.CANCEL:
print("Cancel clicked") # debug code
response = None
# Remember to get rid of the dialog box
dialog.destroy()
# Return the database object
return response
def newDialog(parent=None):
for i in range(4):
print(db.write('ticket_types', ('Undefined', '0.0', 'Undefined')))
| {"/codereader.py": ["/checksum.py", "/dbinterface.py"], "/codegenerator.py": ["/checksum.py", "/dbinterface.py"], "/reader.py": ["/codereader.py", "/dbinterface.py", "/fileaccess.py"], "/editor.py": ["/codereader.py", "/dbinterface.py", "/fileaccess.py"], "/fileaccess.py": ["/dbinterface.py"], "/creator.py": ["/codegenerator.py", "/dbinterface.py", "/fileaccess.py"]} |
59,523 | Eelviny/ticksys | refs/heads/master | /creator.py | #!/usr/bin/env python3
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk
import locale # Used for formatting strings to local currency
locale.setlocale( locale.LC_ALL, 'en_GB.UTF-8' )
import codegenerator
import dbinterface
import fileaccess
# Start the reader program class
class Creator():
def __init__(self):
# Use the Gtk Builder to read the interface file
self.builder = Gtk.Builder()
self.builder.add_from_file("creator.glade")
self.builder.connect_signals(self)
# Create the treeview list
self.liststore1 = Gtk.ListStore(str, str)
# Find items that need to be dynamic
self.window = self.builder.get_object("window1")
self.entry1 = self.builder.get_object("entry1")
self.entry2 = self.builder.get_object("entry2")
# Find the treeview object and attach a cell renderer to it
self.treeview = self.builder.get_object("treeview1")
# For every item in the list, create a column for it
for i, column_title in enumerate(["Ticket Type", "Price"]):
renderer = Gtk.CellRendererText()
column = Gtk.TreeViewColumn(column_title, renderer, text=i)
self.treeview.append_column(column)
# Attach model to treeview
self.treeview.set_model(self.liststore1)
# With all elements set, show the window
self.window.show_all()
if self.newdbfile() == False:
raise SystemExit(0)
print(self.tickets) # debug code
def newdbfile(self):
newdb = fileaccess.openDialog(self.window)
if newdb != None:
print("New File") # debug code
try:
self.db.close()
except:
pass
self.db = newdb
# Translate the database ticket types into a GUI list
self.dbticket = self.db.read("ticket_types")
self.tickets = []
# For every ticket type, label a button for it
for i in range(4):
self.tickets.append([self.dbticket[i][1], str(locale.currency(self.dbticket[i][2]))])
button = self.builder.get_object("button{0}".format(str(i+1)))
button.set_label(self.dbticket[i][1])
# Set a tooltip description that can be set by the user
button.set_tooltip_text(self.dbticket[i][3])
self.clearTable()
return True
else:
return False
def addValue(self, value):
# Add the ticket to the list view
self.liststore1.append(self.tickets[value])
# Also add to internal list
self.ticketlist[value] += 1
print(self.ticketlist) # debug code
# Make sure the price is up to date
self.updatePrice()
def clearTable(self):
# Make sure all lists are reset
self.liststore1.clear()
# Gtk lists are not very manageable - this is a more pythonic list that mirrors its changes
self.ticketlist = [0,0,0,0]
# Reset the name boxes
self.entry1.set_text("")
self.entry2.set_text("")
# Create a new code for a new ticket
self.code = codegenerator.codePrint(codegenerator.newCode(self.db))
self.builder.get_object("label2").set_text(str("Code: "+self.code))
# Set the price to 0 by updating it to the empty list
self.updatePrice()
def updatePrice(self):
# Price starts as a 0 decimal number
price = 0.0
# Take each ticket in turn, and multiply it by the quantity
for i in range(4):
price += self.ticketlist[i] * self.dbticket[i][2]
# Set the label object to show the price
self.builder.get_object("label1").set_text(str(locale.currency(price)))
# Link buttons and objects to events
def on_window1_delete_event(self, *args):
self.db.close()
# When the top-level window is closed, close everything
raise SystemExit(0)
def on_button1_clicked(self, *args):
self.addValue(0)
def on_button2_clicked(self, *args):
self.addValue(1)
def on_button3_clicked(self, *args):
self.addValue(2)
def on_button4_clicked(self, *args):
self.addValue(3)
def on_toolbutton1_clicked(self, *args):
self.clearTable()
def on_button5_clicked(self, *args):
self.newdbfile()
def on_toolbutton2_clicked(self, *args):
# Fetch the names written into the text fields
fName = self.entry1.get_text()
lName = self.entry2.get_text()
# Do not allow saving if fields are empty or read "Incomplete"
if self.ticketlist != [0,0,0,0] and fName != "" and fName != "Incomplete" and lName != "" and lName != "Incomplete":
# use the newEntry function to place all the info in the correct tables
self.db.newEntry(fName, lName, self.code, self.ticketlist)
# Once saved, clear the table for the next user
self.clearTable()
print(self.db.read("user_info")) # debug code
print(self.db.read("orders")) # debug code
# Alert the user to an incomplete section by setting "Incomplete" in it
if fName == "":
self.entry1.set_text("Incomplete")
if lName == "":
self.entry2.set_text("Incomplete")
# Assign the class and start the event loop
main = Creator()
Gtk.main()
| {"/codereader.py": ["/checksum.py", "/dbinterface.py"], "/codegenerator.py": ["/checksum.py", "/dbinterface.py"], "/reader.py": ["/codereader.py", "/dbinterface.py", "/fileaccess.py"], "/editor.py": ["/codereader.py", "/dbinterface.py", "/fileaccess.py"], "/fileaccess.py": ["/dbinterface.py"], "/creator.py": ["/codegenerator.py", "/dbinterface.py", "/fileaccess.py"]} |
59,527 | russab0/distractor_generation | refs/heads/master | /src/__init__.py | import sys
import os
dir_path = os.path.dirname(os.path.realpath(__file__))
print('dir', dir_path)
sys.path.append(os.path.abspath(os.path.join(dir_path, os.pardir)))
from src.model import *
from src.utility import *
import src.dump
import src.train
import src.eval
| {"/src/__init__.py": ["/src/utility/__init__.py"], "/src/utility/__init__.py": ["/src/utility/eval_metric.py"]} |
59,528 | russab0/distractor_generation | refs/heads/master | /setup.py | from setuptools import find_packages, setup
setup(
name='src',
packages=find_packages(),
version='0.1.0',
description='A short description of the project.',
author='Ruslan Sabirov - student, Vladimir Ivanov - superviser (Innopolis University)',
license='',
)
| {"/src/__init__.py": ["/src/utility/__init__.py"], "/src/utility/__init__.py": ["/src/utility/eval_metric.py"]} |
59,529 | russab0/distractor_generation | refs/heads/master | /src/utility/__init__.py | from .dataset import *
from .eval_metric import *
from .logger import *
from .loss import *
from .model_loader import *
from .tok import * | {"/src/__init__.py": ["/src/utility/__init__.py"], "/src/utility/__init__.py": ["/src/utility/eval_metric.py"]} |
59,530 | russab0/distractor_generation | refs/heads/master | /src/model/once/__init__.py | from .dataloader import get_data_from_file, get_feature_from_data, preprocessing_data
from .model import Model
| {"/src/__init__.py": ["/src/utility/__init__.py"], "/src/utility/__init__.py": ["/src/utility/eval_metric.py"]} |
59,531 | russab0/distractor_generation | refs/heads/master | /src/utility/eval_metric.py | from collections import defaultdict
import string
import re
from collections import Counter
def _normalize_answer(s):
"""Lower text and remove punctuation, articles and extra whitespace."""
def remove_articles(text):
if len(text) > 1:
return re.sub(r'\b(a|an|the)\b', ' ', text)
else:
return text
def white_space_fix(text):
return ' '.join(text.split())
def remove_punc(text):
exclude = set(string.punctuation)
return ''.join(ch for ch in text if ch not in exclude)
def lower(text):
return text.lower()
return white_space_fix(remove_articles(remove_punc(lower(s))))
def _f1_score(prediction, ground_truth):
prediction_tokens = _normalize_answer(prediction).split()
ground_truth_tokens = _normalize_answer(ground_truth).split()
common = Counter(prediction_tokens) & Counter(ground_truth_tokens)
num_same = sum(common.values())
if num_same == 0:
return 0
precision = 1.0 * num_same / len(prediction_tokens)
recall = 1.0 * num_same / len(ground_truth_tokens)
f1 = (2 * precision * recall) / (precision + recall)
return f1
class EvalMetric:
def __init__(self, tokenizer, max_candidate=6):
self.tasks = defaultdict(lambda: defaultdict(list))
self.max_candidate = max_candidate
self.tokenizer = tokenizer
self.target_list = defaultdict(lambda: defaultdict(int))
def tokenize_text(self, text):
return self.tokenizer.convert_tokens_to_string(self.tokenizer.tokenize(text))
def add_record(self, input, predicted, target, task='default'):
if isinstance(input, str):
input = self.tokenize_text(input.strip())
if isinstance(input, list):
for i, t in enumerate(input):
input[i] = self.tokenize_text(t.strip())
if isinstance(predicted, str):
predicted = self.tokenize_text(predicted)
if isinstance(predicted, list):
for i, t in enumerate(predicted):
predicted[i] = self.tokenize_text(t.strip())
if isinstance(target, str):
targets = []
if "[SEP]" in target:
targets.extend([self.tokenize_text(st.strip()) for st in target.split("[SEP]")])
else:
targets.append(self.tokenize_text(target.strip()))
if isinstance(target, list):
for i, t in enumerate(target):
target[i] = self.tokenize_text(t.strip())
targets = target
if self.max_candidate - len(targets) > 0 and "nlg" in task:
targets.extend([""] * (self.max_candidate - len(targets)))
for t in targets:
self.target_list[task][t] += 1
self.tasks[task]['input'].append(input)
self.tasks[task]['predicted'].append(predicted)
self.tasks[task]['predicteds'].append([predicted])
self.tasks[task]['target'].append(target)
self.tasks[task]['targets'].append(targets)
def get_record(self, task='default'):
return self.tasks[task]
def cal_score(self, metric):
data_score = []
for task_name, task in self.tasks.items():
print("Task : " + task_name + " report ")
if "emf1" in metric:
em = 0
total = 0
f1 = 0
for pos, predict in enumerate(task['predicted']):
em_list = []
f1_list = []
for target in task['targets'][pos]:
if _normalize_answer(str(predict)) == _normalize_answer(str(target)) and len(
_normalize_answer(str(predict))) > 0 or len(str(predict)) == len(str(target)) == 0:
em_score = 1
f1_score = 1
else:
em_score = 0
f1_score = _f1_score(str(predict), str(target))
em_list.append(em_score)
f1_list.append(f1_score)
em += max(em_list)
f1 += max(f1_list)
data_score.append([predict, task['targets'][pos][em_list.index(max(em_list))],
{'em': max(em_list), 'f1': max(f1_list)}])
total += 1
result = {"EM": em / (total or not total), "F1": f1 / (total or not total)}
data_score = sorted(data_score, key=lambda i: i[2]['em'], reverse=True)
if "nlg" in metric:
try:
from nlgeval import NLGEval
except ImportError:
print(
"nlg-eval package not install, plz install it: pip install git+https://github.com/voidful/nlg-eval.git ; nlg-eval --setup ./nlg-eval-data/")
raise
nlgeval = NLGEval(no_skipthoughts=True, no_glove=True, metrics_to_omit=["METEOR"])
targets = task['targets']
predicted = task['predicted']
for t, p in zip(targets, predicted):
data_score.append([p, t, nlgeval.compute_metrics(ref_list=list(map(list, zip(t))), hyp_list=[p])])
result = nlgeval.compute_metrics(ref_list=list(map(list, zip(*task['targets']))), # transpose
hyp_list=predicted)
data_score = sorted(data_score, key=lambda i: i[2]['ROUGE_L'])
if "clas" in metric:
from sklearn.metrics import classification_report
from sklearn.preprocessing import MultiLabelBinarizer
from sklearn.metrics import precision_recall_fscore_support
target_key = [t for t in self.target_list[task_name].keys() if len(t) > 0]
mlb = MultiLabelBinarizer().fit([target_key])
# remove all blank target
task['targets'] = [[j for j in sub if len(j) > 0] for sub in task['targets']]
# modify for tagging result
if isinstance(task['predicteds'][0][0], list):
task['targets'] = sum([[[j] for j in sub] for sub in task['targets']], [])
task['predicteds'] = sum([[[j] for j in sub] for sub in task['predicted']], [])
if len(task['targets']) != len(task['predicteds']):
diff = len(task['targets']) - len(task['predicteds'])
task['predicteds'].extend([['']] * diff)
targets = task['targets']
predicted = task['predicteds']
for p, t in zip(predicted, targets):
score = dict(zip(["precision", "recall", "fbeta_score", "support"],
precision_recall_fscore_support(mlb.transform([t]), mlb.transform([p]),
average='weighted')))
data_score.append([p, t, score])
print(mlb.classes_)
result = classification_report(
mlb.transform(targets),
mlb.transform(predicted),
target_names=list(mlb.classes_))
data_score = sorted(data_score, key=lambda i: i[2]['fbeta_score'])
yield (task_name, result, data_score)
| {"/src/__init__.py": ["/src/utility/__init__.py"], "/src/utility/__init__.py": ["/src/utility/eval_metric.py"]} |
59,535 | wanzhaohong/graphical_password_crafting_system | refs/heads/master | /sql_connect.py | import sqlite3
from sqlite3 import Error
#connect to the database
def connect():
conn = None
try:
#connect to the database
conn = sqlite3.connect("mydb")
except Error as e:
print(e)
return conn
#insert the username into the tables, and make the username as the primary key
def insert_username(info_name):
#insert statement and values
sql = """INSERT OR REPLACE INTO User(username, fail_email, fail_shopping, fail_banking, time_email_start, time_email_end, time_shopping_start, time_shopping_end, time_banking_start, time_banking_end, email_password, shopping_password, banking_password) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?)"""
value = (info_name,0,0,0,'','','','','','','','','')
conn = connect()
cur = conn.cursor()
cur.execute(sql, value)
conn.commit()
cur.close()
conn.close()
#insert the assigned password into the tables by using the username
def insert_password(info_name, info_epass, password_type):
#insert statement and values
if password_type == "email_password":
sql = """UPDATE User SET email_password = ? WHERE username = ?"""
elif password_type == "shopping_password":
sql = """UPDATE User SET shopping_password = ? WHERE username = ?"""
elif password_type == "banking_password":
sql = """UPDATE User SET banking_password = ? WHERE username = ?"""
value = (info_epass, info_name)
conn = connect()
cur = conn.cursor()
cur.execute(sql, value)
conn.commit()
cur.close()
conn.close()
#select the password from the tables by using the username
def get_password(info_name, password_type):
#insert statement
if password_type == "email_password":
sql = """SELECT email_password FROM User WHERE username = ?"""
elif password_type == "shopping_password":
sql = """SELECT shopping_password FROM User WHERE username = ?"""
elif password_type == "banking_password":
sql = """SELECT banking_password FROM User WHERE username = ?"""
value = (info_name,)
conn = connect()
cur = conn.cursor()
cur.execute(sql, value)
ar = [str(r[0]) for r in cur.fetchall()]
conn.commit()
cur.close()
conn.close()
result = ar[0]
r = result.split(' ')
for i in range(0, len(r)):
if r[i] == '':
r = r[:i]
return r
#insert the time of failures into the table by using username
def count_failure(info_name, info_count, password_type):
#insert statement and values
if password_type == "email_password":
sql = """UPDATE User SET fail_email = ? WHERE username = ?"""
elif password_type == "shopping_password":
sql = """UPDATE User SET fail_shopping = ? WHERE username = ?"""
elif password_type == "banking_password":
sql = """UPDATE User SET fail_banking = ? WHERE username = ?"""
value = (info_count, info_name)
conn = connect()
cur = conn.cursor()
cur.execute(sql, value)
conn.commit()
cur.close()
conn.close()
#insert the time cost into the table by using the username
def count_time(info_name, info_time_start, info_time_end, password_type):
#insert statement and values
if password_type == "email_password":
sql = """UPDATE User SET time_email_start = ? WHERE username = ?"""
sql2 = """UPDATE User SET time_email_end = ? WHERE username = ?"""
elif password_type == "shopping_password":
sql = """UPDATE User SET time_shopping_start = ? WHERE username = ?"""
sql2 = """UPDATE User SET time_shopping_end = ? WHERE username = ?"""
elif password_type == "banking_password":
sql = """UPDATE User SET time_banking_start = ? WHERE username = ?"""
sql2 = """UPDATE User SET time_banking_end = ? WHERE username = ?"""
value_start = (info_time_start, info_name)
value_end = (info_time_end, info_name)
conn = connect()
cur = conn.cursor()
cur.execute(sql, value_start)
cur.execute(sql2, value_end)
conn.commit()
cur.close()
conn.close()
| {"/program.py": ["/sql_connect.py", "/password.py"]} |
59,536 | wanzhaohong/graphical_password_crafting_system | refs/heads/master | /program.py | import Tkinter as tk
from Tkinter import *
import datetime
from sql_connect import *
from password import *
LARGE_FONT = ("Verdana", 12)
#create the GUI window of the application
class first(tk.Tk):
def __init__(self, *args, **kwargs):
tk.Tk.__init__(self, *args, **kwargs)
first_page = tk.Frame(self)
first_page.pack(side="top", fill="both", expand=True)
first_page.grid_rowconfigure(0, weight=1)
first_page.grid_columnconfigure(0, weight=1)
self.frames = {}
#let the frame can be choose from it
for F in (login, instruction_page, Email_practice_page, Shopping_practice_page, Banking_practice_page, prepare_page, real_email_page, real_shopping_page, real_banking_page, end_page):
frame = F(first_page, self)
self.frames[F] = frame
frame.grid(row=0, column=0, sticky="nsew")
self.show_frame(login)
#function to move to next frame
def show_frame(self,cont):
frame = self.frames[cont]
frame.tkraise()
#login page for the tester to enter their username, and it will be collect into the database
class login(tk.Frame):
#helper function to input username into the database, and then switch to the next page.
def get_username(self):
username_info = self.username_entry.get()
insert_username(username_info)
self.controller.SomeVar = username_info
#send the username to the next frame as a variable
self.controller.frames[instruction_page].name_label()
def __init__(self, parent, controller):
tk.Frame.__init__(self,parent)
self.controller = controller
tk.Label(self, text="Please enter your username", pady=10, padx=10, font=LARGE_FONT).pack()
#get the username from the user's input
username_label = Label(self, text="Username")
username_label.pack()
self.username_entry = Entry(self)
self.username_entry.pack()
#button to go to next page
next = tk.Button(self, text="Enter", fg="white", bg="#263D42", command=lambda:[self.get_username(), controller.show_frame(instruction_page)]).pack()
#Testing Email password
class instruction_page(tk.Frame):
def __init__(self, parent, controller):
self.controller = controller
tk.Frame.__init__(self, parent)
#label to give enough instruction for the user.
self.name = tk.Label(self, text="", pady=5, padx=10, font=LARGE_FONT)
self.name.pack()
tk.Label(self, text="Before the practice,", pady=5, padx=10, font=LARGE_FONT).pack()
tk.Label(self, text="In the following pages, you need to remember the face which will be shown on the left side,", pady=5, padx=10, font=LARGE_FONT).pack()
tk.Label(self, text="Press CHECK button to check if you entered the right password.", pady=5, padx=10, font=LARGE_FONT).pack()
tk.Label(self, text="Press Back button to go to the last page.", pady=5, padx=10, font=LARGE_FONT).pack()
tk.Label(self, text="Press CONTINUE button to go to the next page.", pady=5, padx=10, font=LARGE_FONT).pack()
tk.Label(self, text="Press submit button on right side to coloured the face.", pady=5, padx=10, font=LARGE_FONT).pack()
tk.Label(self, text="To contnue, press the NEXT PAGE button below", pady=5, padx=10, font=LARGE_FONT).pack()
#Back button to return to the three_mode page, and continue button to go the login page.
Next = tk.Button(self, text="NEXT PAGE", fg="white", bg="#263D42", command=lambda: controller.show_frame(Email_practice_page)).pack(side=BOTTOM)
def name_label(self):
self.name.config(text=self.controller.SomeVar)
self.controller.frames[Email_practice_page].name_label2()
#Pracetice page for the Email password
class Email_practice_page(tk.Frame):
def name_label2(self):
self.name.config(text=self.controller.SomeVar)
self.controller.frames[Shopping_practice_page].name_label3()
#get colours from the input and changes the colour of the clown face
def colouring(self, list):
colors = list
h = colors[0]
f = colors[1]
n = colors[2]
lc = colors[3]
rc = colors[4]
m = colors[5]
le = colors[6]
re = colors[7]
lew = colors[8]
rew = colors[9]
ha = colors[10]
coh = colors[11]
change_color(basic, h, f, n, lc, rc, m, le, re, lew, rew, ha, coh)
#get the username
def get(self):
x = self.name["text"]
return x
#record the assigned password into database
def get_name(self, h, f, n, lc, rc, m, le, re, lew, rew, ha, coh, p):
array = ['', '', '', '', '', '', '', '', '', '', '', '']
array[0] = h
array[1] = f
array[2] = n
array[3] = lc
array[4] = rc
array[5] = m
array[6] = le
array[7] = re
array[8] = lew
array[9] = rew
array[10] = ha
array[11] = coh
str = ""
for i in array:
str += i + " "
user = self.get()
insert_password(user, str, p)
#compare the input password and the assigned password
def check(self, lst, p):
user = self.get()
str = ""
for i in lst:
str+= i + " "
assigned_array = get_password(user, p)
str2 = ""
for j in assigned_array:
str2+= j + " "
if str == str2:
window = tk.Tk()
tk.Label(window, text="You have entered the correct password.").pack(side=TOP)
else:
window2 = tk.Tk()
tk.Label(window2, text="Wrong password, try again.").pack(side=TOP)
def __init__(self, parent, controller):
tk.Frame.__init__(self, parent)
self.controller = controller
global left, right, selection, basic, assigned
self.page = tk.Label(self, text="Email_practice_page", pady=5, padx=10, font=LARGE_FONT)
self.page.pack()
#username
self.name = tk.Label(self, text="", pady=5, padx=10, font=LARGE_FONT)
self.name.pack()
self.label = tk.Label(self, text="", pady=5, padx=10, font=LARGE_FONT)
self.label.pack()
canvas = Canvas(self, height=370, width=1000, bg="#263D42")
canvas.pack()
#building left frame for showing the assigned password, and the right frame to show the entered password, and the selection frame is for the user to enter their password.
left = Frame(canvas, bg="white")
right = Frame(canvas, bg="white")
selection = Frame(canvas, bg ="white")
left.place(x=10, y=10, height=330, width=300, anchor='nw')
right.place(x=320, y=10, height=330, width=300, anchor='nw')
selection.place(x=630, y=10, height=330, width=350, anchor='nw')
Label(left, text="Here is your assigned password", font=("Times", "8")).pack(side=TOP)
Label(right, text="Here is the graph you have coloured", font=("Times", "8")).pack(side=TOP)
Label(selection, text ="Please select the colour for each part, then press submit", font=("Times", "8")).pack(side=TOP)
#show the assigned password on left frame
hair, face, nose, leftc, rightc, mouth, lefteye, righteye, lefteyebow, righteyebow, hat, conhat = random_colour()
assigned = clown_face(left, hair, face, nose, leftc, rightc, mouth, lefteye, righteye, lefteyebow, righteyebow, hat, conhat)
assigned.pack()
#show the basic password you have on right frame
basic = clown_face(right, "white", "white", "white", "white", "white", "white", "white", "white", "white", "white", "white", "white")
basic.pack()
#option menu
list = dropbox(selection)
Submit = tk.Button(selection, text="Submit", command=lambda:self.colouring(list)).pack(side=RIGHT)
#Back button to return to the instruction_page, and continue button to go the next page.
Back = tk.Button(self, text="Back", fg="white", bg="#263D42", command=lambda: controller.show_frame(instruction_page)).pack(side=LEFT, expand=YES)
Check = tk.Button(self, text="Check", fg="white", bg="#263D42", command=lambda: [self.get_name(hair, face, nose, leftc, rightc, mouth, lefteye, righteye, lefteyebow, righteyebow, hat, conhat, "email_password"), self.check(list, "email_password")]).pack(side=LEFT, expand=YES)
Continue = tk.Button(self, text="Continue", fg="white", bg="#263D42", command=lambda: [self.get_name(hair, face, nose, leftc, rightc, mouth, lefteye, righteye, lefteyebow, righteyebow, hat, conhat, "email_password"), controller.show_frame(Shopping_practice_page)]).pack(side=LEFT, expand=YES)
#Pracetice page for the Shopping password
class Shopping_practice_page(tk.Frame):
def name_label3(self):
self.name.config(text=self.controller.SomeVar)
self.controller.frames[Banking_practice_page].name_label4()
def colouring2(self, list):
colors = list
h = colors[0]
f = colors[1]
n = colors[2]
lc = colors[3]
rc = colors[4]
m = colors[5]
le = colors[6]
re = colors[7]
lew = colors[8]
rew = colors[9]
ha = colors[10]
coh = colors[11]
change_color(basic_2, h, f, n, lc, rc, m, le, re, lew, rew, ha, coh)
def get(self):
x = self.name["text"]
return x
def get_name(self, h, f, n, lc, rc, m, le, re, lew, rew, ha, coh, p):
array = ['', '', '', '', '', '', '', '', '', '', '', '']
array[0] = h
array[1] = f
array[2] = n
array[3] = lc
array[4] = rc
array[5] = m
array[6] = le
array[7] = re
array[8] = lew
array[9] = rew
array[10] = ha
array[11] = coh
str = ""
for i in array:
str += i + " "
user = self.get()
insert_password(user, str, p)
def check(self, lst, p):
user = self.get()
str = ""
for i in lst:
str+= i + " "
assigned_array = get_password(user, p)
str2 = ""
for j in assigned_array:
str2+= j + " "
if str == str2:
window = tk.Tk()
tk.Label(window, text="You have entered the correct password.").pack(side=TOP)
else:
window2 = tk.Tk()
tk.Label(window2, text="Wrong password, try again.").pack(side=TOP)
def __init__(self, parent, controller):
tk.Frame.__init__(self, parent)
self.controller = controller
self.page = tk.Label(self, text="Shopping_practice_page", pady=5, padx=10, font=LARGE_FONT)
self.page.pack()
self.name = tk.Label(self, text="", pady=5, padx=10, font=LARGE_FONT)
self.name.pack()
canvas = Canvas(self, height=370, width=1000, bg="#263D42")
canvas.pack()
#building left frame for showing the assigned password, and the right frame to show the entered password, and the selection frame is for the user to enter their password.
global left_2, right_2, selection_2, basic_2, assigned_2
left_2 = Frame(canvas, bg="white")
right_2 = Frame(canvas, bg="white")
selection_2 = Frame(canvas, bg ="white")
left_2.place(x=10, y=10, height=330, width=300, anchor='nw')
right_2.place(x=320, y=10, height=330, width=300, anchor='nw')
selection_2.place(x=630, y=10, height=330, width=350, anchor='nw')
Label(left_2, text="Here is your assigned password", font=("Times", "8")).pack(side=TOP)
Label(right_2, text="Here is the graph you have coloured", font=("Times", "8")).pack(side=TOP)
Label(selection_2, text ="Please select the colour for each part, then press submit", font=("Times", "8")).pack(side=TOP)
#show the assigned password on left frame
hair, face, nose, leftc, rightc, mouth, lefteye, righteye, lefteyebow, righteyebow, hat, conhat = random_colour()
assigned_2 = clown_face(left_2, hair, face, nose, leftc, rightc, mouth, lefteye, righteye, lefteyebow, righteyebow, hat, conhat)
assigned_2.pack()
#show the basic password you have on right frame
basic_2 = clown_face(right_2, "white", "white", "white", "white", "white", "white", "white", "white", "white", "white", "white", "white")
basic_2.pack()
#option menu
list = dropbox(selection_2)
Submit = tk.Button(selection_2, text="Submit", command=lambda: self.colouring2(list)).pack(side=RIGHT)
#Back button to return to the instruction_page, and continue button to go the next page.
Back = tk.Button(self, text="Back", fg="white", bg="#263D42", command=lambda: controller.show_frame(Email_practice_page)).pack(side=LEFT, expand=YES)
Check = tk.Button(self, text="Check", fg="white", bg="#263D42", command=lambda: [self.get_name(hair, face, nose, leftc, rightc, mouth, lefteye, righteye, lefteyebow, righteyebow, hat, conhat, "shopping_password"), self.check(list, "shopping_password")]).pack(side=LEFT, expand=YES)
Continue = tk.Button(self, text="Continue", fg="white", bg="#263D42", command=lambda: [self.get_name(hair, face, nose, leftc, rightc, mouth, lefteye, righteye, lefteyebow, righteyebow, hat, conhat, "shopping_password"), controller.show_frame(Banking_practice_page)]).pack(side=LEFT, expand=YES)
#Pracetice page for the Shopping password
class Banking_practice_page(tk.Frame):
def name_label4(self):
self.name.config(text=self.controller.SomeVar)
self.controller.frames[prepare_page].name_label5()
def colouring3(self, list):
colors = list
h = colors[0]
f = colors[1]
n = colors[2]
lc = colors[3]
rc = colors[4]
m = colors[5]
le = colors[6]
re = colors[7]
lew = colors[8]
rew = colors[9]
ha = colors[10]
coh = colors[11]
change_color(basic_3, h, f, n, lc, rc, m, le, re, lew, rew, ha, coh)
def get(self):
x = self.name["text"]
return x
def get_name(self, h, f, n, lc, rc, m, le, re, lew, rew, ha, coh, p):
array = ['', '', '', '', '', '', '', '', '', '', '', '']
array[0] = h
array[1] = f
array[2] = n
array[3] = lc
array[4] = rc
array[5] = m
array[6] = le
array[7] = re
array[8] = lew
array[9] = rew
array[10] = ha
array[11] = coh
str = ""
for i in array:
str += i + " "
user = self.get()
insert_password(user, str, p)
def check(self, lst, p):
user = self.get()
str = ""
for i in lst:
str+= i + " "
assigned_array = get_password(user, p)
str2 = ""
for j in assigned_array:
str2+= j + " "
if str == str2:
window = tk.Tk()
tk.Label(window, text="You have entered the correct password.").pack(side=TOP)
else:
window2 = tk.Tk()
tk.Label(window2, text="Wrong password, try again.").pack(side=TOP)
def __init__(self, parent, controller):
tk.Frame.__init__(self, parent)
self.controller = controller
self.page = tk.Label(self, text="Banking_practice_page", pady=5, padx=10, font=LARGE_FONT)
self.page.pack()
self.name = tk.Label(self, text="", pady=5, padx=10, font=LARGE_FONT)
self.name.pack()
canvas = Canvas(self, height=370, width=1000, bg="#263D42")
canvas.pack()
#building left frame for showing the assigned password, and the right frame to show the entered password, and the selection frame is for the user to enter their password.
global left_3, right_3, selection_3, basic_3, assigned_3
left_3 = Frame(canvas, bg="white")
right_3 = Frame(canvas, bg="white")
selection_3 = Frame(canvas, bg ="white")
left_3.place(x=10, y=10, height=330, width=300, anchor='nw')
right_3.place(x=320, y=10, height=330, width=300, anchor='nw')
selection_3.place(x=630, y=10, height=330, width=350, anchor='nw')
Label(left_3, text="Here is your assigned password", font=("Times", "8")).pack(side=TOP)
Label(right_3, text="Here is the graph you have coloured", font=("Times", "8")).pack(side=TOP)
Label(selection_3, text ="Please select the colour for each part, then press submit", font=("Times", "8")).pack(side=TOP)
#show the assigned password on left frame
hair, face, nose, leftc, rightc, mouth, lefteye, righteye, lefteyebow, righteyebow, hat, conhat = random_colour()
assigned_3 = clown_face(left_3, hair, face, nose, leftc, rightc, mouth, lefteye, righteye, lefteyebow, righteyebow, hat, conhat)
assigned_3.pack()
#show the basic password you have on right frame
basic_3 = clown_face(right_3, "white", "white", "white", "white", "white", "white", "white", "white", "white", "white", "white", "white")
basic_3.pack()
#option menu
list = dropbox(selection_3)
Submit = tk.Button(selection_3, text="Submit", command=lambda: self.colouring3(list)).pack(side=RIGHT)
#Back button to return to the instruction_page, and continue button to go the next page.
Back = tk.Button(self, text="Back", fg="white", bg="#263D42", command=lambda: controller.show_frame(Shopping_practice_page)).pack(side=LEFT, expand=YES)
Check = tk.Button(self, text="Check", fg="white", bg="#263D42", command=lambda: [self.get_name(hair, face, nose, leftc, rightc, mouth, lefteye, righteye, lefteyebow, righteyebow, hat, conhat, "banking_password"), self.check(list, "banking_password")]).pack(side=LEFT, expand=YES)
Continue = tk.Button(self, text="Continue", fg="white", bg="#263D42", command=lambda: [self.get_name(hair, face, nose, leftc, rightc, mouth, lefteye, righteye, lefteyebow, righteyebow, hat, conhat, "banking_password"), controller.show_frame(prepare_page)]).pack(side=LEFT, expand=YES)
class prepare_page(tk.Frame):
def __init__(self, parent, controller):
self.controller = controller
tk.Frame.__init__(self, parent)
#label to give enough instruction for the user.
self.name = tk.Label(self, text="", pady=5, padx=10, font=LARGE_FONT)
self.name.pack()
tk.Label(self, text="Before the real test,", pady=5, padx=10, font=LARGE_FONT).pack()
tk.Label(self, text="make sure you are ready,", pady=5, padx=10, font=LARGE_FONT).pack()
tk.Label(self, text="once you press continue,", pady=5, padx=10, font=LARGE_FONT).pack()
tk.Label(self, text="you can not go back to check your assigned password.", pady=5, padx=10, font=LARGE_FONT).pack()
tk.Label(self, text="Good luck!!!!!", pady=5, padx=10, font=LARGE_FONT).pack()
#Back button to return to the password practice page, and continue button to go the login page.
Next = tk.Button(self, text="NEXT PAGE", fg="white", bg="#263D42", command=lambda: controller.show_frame(real_email_page)).pack(side=BOTTOM)
Back = tk.Button(self, text="Back", fg="white", bg="#263D42", command=lambda: controller.show_frame(Banking_practice_page)).pack(side=BOTTOM)
def name_label5(self):
self.name.config(text=self.controller.SomeVar)
self.controller.frames[real_email_page].name_label6()
#Real testing page for emai;
class real_email_page(tk.Frame):
def name_label6(self):
self.name.config(text=self.controller.SomeVar)
self.controller.frames[real_shopping_page].name_label7()
def email_colouring(self, list):
colors = list
h = colors[0]
f = colors[1]
n = colors[2]
lc = colors[3]
rc = colors[4]
m = colors[5]
le = colors[6]
re = colors[7]
lew = colors[8]
rew = colors[9]
ha = colors[10]
coh = colors[11]
change_color(email_basic, h, f, n, lc, rc, m, le, re, lew, rew, ha, coh)
def get(self):
x = self.name["text"]
return x
def get_e(self):
a = datetime.datetime.now()
return a
#get the current time and insert it into the database
def get_time_e(self, t):
time = datetime.datetime.now()
user = self.get()
self.controller.end_time = time
count_time(user, t, time, "email_password")
def __init__(self, parent, controller):
tk.Frame.__init__(self, parent)
start_e = self.get_e()
self.controller = controller
self.page = tk.Label(self, text="Real_Email_page", pady=5, padx=10, font=LARGE_FONT)
self.page.pack()
self.name = tk.Label(self, text="", pady=5, padx=10, font=LARGE_FONT)
self.name.pack()
self.count = tk.Label(self, text="0", pady=5, padx=10, font=LARGE_FONT)
canvas = Canvas(self, height=370, width=700, bg="#263D42")
canvas.pack()
#building left frame for showing the assigned password, and the right frame to show the entered password, and the selection frame is for the user to enter their password.
global email_pass, email_selection, email_basic
email_pass = Frame(canvas, bg="white")
email_selection = Frame(canvas, bg ="white")
email_pass.place(x=10, y=10, height=330, width=300, anchor='nw')
email_selection.place(x=320, y=10, height=330, width=350, anchor='nw')
Label(email_pass, text="Here is the graph you have coloured", font=("Times", "8")).pack(side=TOP)
Label(email_selection, text ="Please select the colour for each part, then press submit", font=("Times", "8")).pack(side=TOP)
#show the basic password you have on right frame
email_basic = clown_face(email_pass, "white", "white", "white", "white", "white", "white", "white", "white", "white", "white", "white", "white")
email_basic.pack()
#option menu
list = dropbox(email_selection)
Submit = tk.Button(email_selection, text="Submit", command=lambda: self.email_colouring(list)).pack(side=RIGHT)
#check function
def check(lst, p):
user = self.get()
str = ""
#entered password
for i in lst:
str+= i + " "
#get the assigned password from the database
assigned_array = get_password(user, p)
str2 = ""
for j in assigned_array:
str2+= j + " "
#compare the two password
if str == str2:
window = tk.Tk()
tk.Label(window, text="You have entered the correct password.").pack(side=TOP)
Continue = tk.Button(self, text="Continue", fg="white", bg="#263D42", command=lambda: [self.get_time_e(start_e), controller.show_frame(real_shopping_page)]).pack(side=BOTTOM)
else:
email_allow = self.count["text"]
counter = int(email_allow)
window2 = tk.Tk()
tk.Label(window2, text="Wrong password").pack(side=TOP)
if counter == 2:
self.get_time_e(start_e)
controller.show_frame(real_shopping_page)
counter += 1
count_failure(user, counter, "email_password")
self.count.config(text=counter)
#Back button to return to the instruction_page, and continue button to go the next page.
Check = tk.Button(self, text="Check", fg="white", bg="#263D42", command=lambda: check(list, "email_password")).pack(side=LEFT, expand=YES)
#Real testing page for shopping;
class real_shopping_page(tk.Frame):
def name_label7(self):
self.name.config(text=self.controller.SomeVar)
self.controller.frames[real_banking_page].name_label8()
def shopping_colouring(self, list):
colors = list
h = colors[0]
f = colors[1]
n = colors[2]
lc = colors[3]
rc = colors[4]
m = colors[5]
le = colors[6]
re = colors[7]
lew = colors[8]
rew = colors[9]
ha = colors[10]
coh = colors[11]
change_color(shopping_basic, h, f, n, lc, rc, m, le, re, lew, rew, ha, coh)
def get(self):
x = self.name["text"]
return x
def get_s(self):
a = datetime.datetime.now()
return a
def get_time_s(self):
t = self.controller.end_time
time = datetime.datetime.now()
user = self.get()
self.controller.end_time = time
count_time(user, t, time, "shopping_password")
def __init__(self, parent, controller):
tk.Frame.__init__(self, parent)
self.controller = controller
start_s = self.get_s()
self.page = tk.Label(self, text="Real_Shopping_page", pady=5, padx=10, font=LARGE_FONT)
self.page.pack()
self.name = tk.Label(self, text="", pady=5, padx=10, font=LARGE_FONT)
self.name.pack()
self.count = tk.Label(self, text="0", pady=5, padx=10, font=LARGE_FONT)
canvas = Canvas(self, height=370, width=700, bg="#263D42")
canvas.pack()
#building left frame for showing the assigned password, and the right frame to show the entered password, and the selection frame is for the user to enter their password.
global shopping_pass, shopping_selection, shopping_basic
shopping_pass = Frame(canvas, bg="white")
shopping_selection = Frame(canvas, bg ="white")
shopping_pass.place(x=10, y=10, height=330, width=300, anchor='nw')
shopping_selection.place(x=320, y=10, height=330, width=350, anchor='nw')
Label(shopping_pass, text="Here is the graph you have coloured", font=("Times", "8")).pack(side=TOP)
Label(shopping_selection, text ="Please select the colour for each part, then press submit", font=("Times", "8")).pack(side=TOP)
#show the basic password you have on right frame
shopping_basic = clown_face(shopping_pass, "white", "white", "white", "white", "white", "white", "white", "white", "white", "white", "white", "white")
shopping_basic.pack()
#option menu
list = dropbox(shopping_selection)
Submit = tk.Button(shopping_selection, text="Submit", command=lambda: self.shopping_colouring(list)).pack(side=RIGHT)
#check function
def check(lst, p):
user = self.get()
str = ""
for i in lst:
str+= i + " "
assigned_array = get_password(user, p)
str2 = ""
for j in assigned_array:
str2+= j + " "
if str == str2:
window = tk.Tk()
tk.Label(window, text="You have entered the correct password.").pack(side=TOP)
Continue = tk.Button(self, text="Continue", fg="white", bg="#263D42", command=lambda: [self.get_time_s(), controller.show_frame(real_banking_page)]).pack(side=BOTTOM)
else:
email_allow = self.count["text"]
counter = int(email_allow)
window2 = tk.Tk()
tk.Label(window2, text="Wrong password").pack(side=TOP)
if counter == 2:
self.get_time_s()
controller.show_frame(real_banking_page)
counter += 1
count_failure(user, counter, "shopping_password")
self.count.config(text=counter)
#Back button to return to the instruction_page, and continue button to go the next page.
Check = tk.Button(self, text="Check", fg="white", bg="#263D42", command=lambda: check(list, "shopping_password")).pack(side=LEFT, expand=YES)
#Real testing page for shopping;
class real_banking_page(tk.Frame):
def name_label8(self):
self.name.config(text=self.controller.SomeVar)
def banking_colouring(self, list):
colors = list
h = colors[0]
f = colors[1]
n = colors[2]
lc = colors[3]
rc = colors[4]
m = colors[5]
le = colors[6]
re = colors[7]
lew = colors[8]
rew = colors[9]
ha = colors[10]
coh = colors[11]
change_color(banking_basic, h, f, n, lc, rc, m, le, re, lew, rew, ha, coh)
def get(self):
x = self.name["text"]
return x
def get_b(self):
a = datetime.datetime.now()
return a
def get_time_b(self):
t = self.controller.end_time
time = datetime.datetime.now()
user = self.get()
self.controller.end_time = time
count_time(user, t, time, "banking_password")
def __init__(self, parent, controller):
tk.Frame.__init__(self, parent)
self.controller = controller
start_b = self.get_b()
self.page = tk.Label(self, text="Real_Banking_page", pady=5, padx=10, font=LARGE_FONT)
self.page.pack()
self.name = tk.Label(self, text="", pady=5, padx=10, font=LARGE_FONT)
self.name.pack()
self.count = tk.Label(self, text="0", pady=5, padx=10, font=LARGE_FONT)
canvas = Canvas(self, height=370, width=700, bg="#263D42")
canvas.pack()
#building left frame for showing the assigned password, and the right frame to show the entered password, and the selection frame is for the user to enter their password.
global banking_pass, banking_selection, banking_basic
banking_pass = Frame(canvas, bg="white")
banking_selection = Frame(canvas, bg ="white")
banking_pass.place(x=10, y=10, height=330, width=300, anchor='nw')
banking_selection.place(x=320, y=10, height=330, width=350, anchor='nw')
Label(banking_pass, text="Here is the graph you have coloured", font=("Times", "8")).pack(side=TOP)
Label(banking_selection, text ="Please select the colour for each part, then press submit", font=("Times", "8")).pack(side=TOP)
#show the basic password you have on right frame
banking_basic = clown_face(banking_pass, "white", "white", "white", "white", "white", "white", "white", "white", "white", "white", "white", "white")
banking_basic.pack()
#option menu
list = dropbox(banking_selection)
Submit = tk.Button(banking_selection, text="Submit", command=lambda: self.banking_colouring(list)).pack(side=RIGHT)
#check function
def check(lst, p):
user = self.get()
str = ""
for i in lst:
str+= i + " "
assigned_array = get_password(user, p)
str2 = ""
for j in assigned_array:
str2+= j + " "
if str == str2:
window = tk.Tk()
tk.Label(window, text="You have entered the correct password.").pack(side=TOP)
Continue = tk.Button(self, text="Continue", fg="white", bg="#263D42", command=lambda: [self.get_time_b(), controller.show_frame(end_page)]).pack(side=BOTTOM)
else:
email_allow = self.count["text"]
counter = int(email_allow)
window2 = tk.Tk()
tk.Label(window2, text="Wrong password").pack(side=TOP)
if counter == 2:
self.get_time_b()
controller.show_frame(end_page)
counter += 1
count_failure(user, counter, "banking_password")
self.count.config(text=counter)
#Back button to return to the instruction_page, and continue button to go the next page.
Check = tk.Button(self, text="Check", fg="white", bg="#263D42", command=lambda: check(list, "banking_password")).pack(side=LEFT, expand=YES)
class end_page(tk.Frame):
def __init__(self, parent, controller):
self.controller = controller
tk.Frame.__init__(self, parent)
#label to give enough instruction for the user.
self.name = tk.Label(self, text="", pady=5, padx=10, font=LARGE_FONT)
self.name.pack()
tk.Label(self, text="Thank you for the testing,", pady=5, padx=10, font=LARGE_FONT).pack()
tk.Label(self, text="Well done!!!!!", pady=5, padx=10, font=LARGE_FONT).pack()
def main():
app = first()
app.mainloop()
main()
| {"/program.py": ["/sql_connect.py", "/password.py"]} |
59,537 | wanzhaohong/graphical_password_crafting_system | refs/heads/master | /password.py | import Tkinter as tk
from Tkinter import *
import random
import time
#the function to create the assigned password
def random_colour():
colours = ["red", "blue", "green", "pink", "yellow", "black", "white"]
hair = random.choice(colours)
face = random.choice(colours)
nose = random.choice(colours)
leftc = random.choice(colours)
rightc = random.choice(colours)
mouth = random.choice(colours)
lefteye = random.choice(colours)
righteye = random.choice(colours)
lefteyebow = random.choice(colours)
righteyebow = random.choice(colours)
hat = random.choice(colours)
conhat = random.choice(colours)
return hair, face, nose, leftc, rightc, mouth, lefteye, righteye, lefteyebow, righteyebow, hat, conhat
#the function to create the GUI of the dropbox(password enter space), and return the selected value from the dropbox
def dropbox(frame):
#get the value from each drop box and stored into an array
def command(*args):
list[0] = var_1.get()
def command2(*args):
list[1] = var_2.get()
def command3(*args):
list[2] = var_3.get()
def command4(*args):
list[3] = var_4.get()
def command5(*args):
list[4] = var_5.get()
def command6(*args):
list[5] = var_6.get()
def command7(*args):
list[6] = var_7.get()
def command8(*args):
list[7] = var_8.get()
def command9(*args):
list[8] = var_9.get()
def command10(*args):
list[9] = var_10.get()
def command11(*args):
list[10] = var_11.get()
def command12(*args):
list[11] = var_12.get()
color = ["red", "blue", "green", "pink", "yellow", "black", "white"]
var_1 = StringVar(frame)
data_1 = OptionMenu(frame, var_1, *color)
data_1.place(x=10, y=20, anchor="nw")
Label(frame, text="<-hair", font=("Times", "12")).place(x=100,y=20,anchor="nw")
var_2 = StringVar(frame)
data_2 = OptionMenu(frame, var_2, *color)
data_2.place(x=10, y=50, anchor="nw")
Label(frame, text="<-face", font=("Times", "12")).place(x=100,y=50,anchor="nw")
var_3 = StringVar(frame)
data_3 = OptionMenu(frame, var_3, *color)
data_3.place(x=10, y=80, anchor="nw")
Label(frame, text="<-nose", font=("Times", "12")).place(x=100,y=80,anchor="nw")
var_4 = StringVar(frame)
data_4 = OptionMenu(frame, var_4, *color)
data_4.place(x=10, y=110, anchor="nw")
Label(frame, text="<-left cheek", font=("Times", "12")).place(x=100,y=110,anchor="nw")
var_5 = StringVar(frame)
data_5 = OptionMenu(frame, var_5, *color)
data_5.place(x=10, y=140, anchor="nw")
Label(frame, text="<-right cheek", font=("Times", "12")).place(x=100,y=140,anchor="nw")
var_6 = StringVar(frame)
data_6 = OptionMenu(frame, var_6, *color)
data_6.place(x=10, y=170, anchor="nw")
Label(frame, text="<-mouth", font=("Times", "12")).place(x=100,y=170,anchor="nw")
var_7 = StringVar(frame)
data_7 = OptionMenu(frame, var_7, *color)
data_7.place(x=10, y=200, anchor="nw")
Label(frame, text="<-left eye", font=("Times", "12")).place(x=100,y=200,anchor="nw")
var_8 = StringVar(frame)
data_8 = OptionMenu(frame, var_8, *color)
data_8.place(x=10, y=230, anchor="nw")
Label(frame, text="<-right eye", font=("Times", "12")).place(x=100,y=230,anchor="nw")
var_9 = StringVar(frame)
data_9 = OptionMenu(frame, var_9, *color)
data_9.place(x=10, y=260, anchor="nw")
Label(frame, text="<-left eyebrow", font=("Times", "12")).place(x=100,y=260,anchor="nw")
var_10 = StringVar(frame)
data_10 = OptionMenu(frame, var_10, *color)
data_10.place(x=10, y=290, anchor="nw")
Label(frame, text="<-right eyebrow", font=("Times", "12")).place(x=100,y=290,anchor="nw")
var_11 = StringVar(frame)
data_11 = OptionMenu(frame, var_11, *color)
data_11.place(x=160, y=20, anchor="nw")
Label(frame, text="<-hat", font=("Times", "12")).place(x=250,y=20,anchor="nw")
var_12 = StringVar(frame)
data_12 = OptionMenu(frame, var_12, *color)
data_12.place(x=160, y=50, anchor="nw")
Label(frame, text="<-circle on hat", font=("Times", "12")).place(x=250,y=50,anchor="nw")
list = ['', '', '', '', '', '', '', '', '', '', '', '']
var_1.trace("w", command)
var_2.trace("w", command2)
var_3.trace("w", command3)
var_4.trace("w", command4)
var_5.trace("w", command5)
var_6.trace("w", command6)
var_7.trace("w", command7)
var_8.trace("w", command8)
var_9.trace("w", command9)
var_10.trace("w", command10)
var_11.trace("w", command11)
var_12.trace("w", command12)
return list
#function the create the clown face in GUI
def clown_face(frame, h, f, n, lc, rc, m, le, re, lew, rew, ha, coh):
canvas = Canvas(frame)
global c_hair, c_face, c_nose, c_left_circle, c_right_circle, c_mouth, c_left_eyeball, c_right_eyeball, c_left_eyebrow, c_right_eyebrow, c_hat, c_circle_on_hat
#the clown face
#1
c_hair = canvas.create_polygon((60, 60, 80, 150, 220, 150, 240, 60, 180, 80, 120, 80), fill=h, outline="black")
#2
c_face = canvas.create_oval(80, 80, 220, 220, fill=f)
#3
c_nose = canvas.create_oval(140, 140, 160, 160, fill=n)
#4
c_left_circle = canvas.create_oval(90, 140, 120, 150, fill=lc)
#5
c_right_circle = canvas.create_oval(180, 140, 210, 150, fill=rc)
#6
c_mouth = canvas.create_oval(110, 170, 190, 200, fill=m)
c_line = canvas.create_line(130, 185, 170, 185)
c_left_eye = canvas.create_oval(120, 110, 140, 130, fill="white")
c_right_eye = canvas.create_oval(160, 110, 180, 130, fill="white")
#7
c_left_eyeball = canvas.create_oval(125, 115, 135, 125, fill=le)
#8
c_right_eyeball = canvas.create_oval(165, 115, 175, 125, fill=re)
#9
c_left_eyebrow = canvas.create_polygon((120, 105, 130, 90, 140, 105), fill=lew, outline="black")
#10
c_right_eyebrow = canvas.create_polygon((160, 105, 170, 90, 180, 105), fill=rew, outline="black")
#11
c_hat = canvas.create_polygon((120, 80, 150, 30, 180, 80), fill=ha, outline="black")
#12
c_circle_on_hat = canvas.create_oval(145, 60, 155, 70, fill=coh)
return canvas
#function to change the colour of the clown face in the GUI
def change_color(canvas, h, f, n, lc, rc, m, le, re, lew, rew, ha, coh):
new = canvas
new.itemconfig(c_hair, fill=h)
new.itemconfig(c_face, fill=f)
new.itemconfig(c_nose, fill=n)
new.itemconfig(c_left_circle, fill=lc)
new.itemconfig(c_right_circle, fill=rc)
new.itemconfig(c_mouth, fill=m)
new.itemconfig(c_left_eyeball, fill=le)
new.itemconfig(c_right_eyeball, fill=re)
new.itemconfig(c_left_eyebrow, fill=lew)
new.itemconfig(c_right_eyebrow, fill=rew)
new.itemconfig(c_hat, fill=ha)
new.itemconfig(c_circle_on_hat, fill=coh) | {"/program.py": ["/sql_connect.py", "/password.py"]} |
59,547 | ngoiz/model-sharpy-hale | refs/heads/main | /aircraft.py | #! /usr/bin/env python3
import h5py as h5
import configobj
import numpy as np
from structure import HaleStructure
from aero import HaleAero
import os
import sharpy.sharpy_main
class Hale:
def __init__(self, case_name, case_route, output_route):
self.case_name = case_name
self.case_route = case_route
self.output_route = output_route
self.structure = None
self.aero = None
self.settings = None
def init_structure(self, **kwargs):
self.structure = HaleStructure(self.case_name, self.case_route, **kwargs)
def init_aero(self, m, **kwargs):
self.aero = HaleAero(m, self.structure, self.case_name, self.case_route, **kwargs)
def set_flight_controls(self, thrust=0., elevator=0., rudder=0.):
self.structure.set_thrust(thrust)
if self.aero is not None:
self.aero.cs_deflection = elevator
self.aero.rudder_deflection = rudder
def generate(self):
if not os.path.isdir(self.case_route):
os.makedirs(self.case_route)
self.structure.generate()
if self.aero is not None:
self.aero.generate()
def create_settings(self, settings):
file_name = self.case_route + '/' + self.case_name + '.sharpy'
config = configobj.ConfigObj()
config.filename = file_name
for k, v in settings.items():
config[k] = v
config.write()
self.settings = settings
def clean(self):
fem_file_name = self.case_route + '/' + self.case_name + '.fem.h5'
if os.path.isfile(fem_file_name):
os.remove(fem_file_name)
dyn_file_name = self.case_route + '/' + self.case_name + '.dyn.h5'
if os.path.isfile(dyn_file_name):
os.remove(dyn_file_name)
aero_file_name = self.case_route + '/' + self.case_name + '.aero.h5'
if os.path.isfile(aero_file_name):
os.remove(aero_file_name)
solver_file_name = self.case_route + '/' + self.case_name + '.sharpy'
if os.path.isfile(solver_file_name):
os.remove(solver_file_name)
flightcon_file_name = self.case_route + '/' + self.case_name + '.flightcon.txt'
if os.path.isfile(flightcon_file_name):
os.remove(flightcon_file_name)
def run(self):
sharpy.sharpy_main.main(['', self.case_route + '/' + self.case_name + '.sharpy'])
| {"/aircraft.py": ["/structure.py", "/aero.py"], "/aero.py": ["/structure.py"]} |
59,548 | ngoiz/model-sharpy-hale | refs/heads/main | /structure.py | #! /usr/bin/env python3
import h5py as h5
import numpy as np
# MODEL GEOMETRY
# beam
span_main = 16.0
lambda_main = 0.25
lambda_dihedral = 20*np.pi/180
ea_main = 0.3
length_fuselage = 10
offset_fuselage = 1.25*0
sigma_fuselage = 10
m_bar_fuselage = 0.3
j_bar_fuselage = 0.1
ea = 1e4
ga = 1e4
gj = 1e4
eiy = 2e4
eiz = 70*eiy
# eiz = 4e6
m_bar_main = 0.75 + 3.125 / 2
j_bar_main = 0.4
span_tail = 2.5
ea_tail = 0.5
fin_height = 2.5
ea_fin = 0.5
sigma_tail = 10
m_bar_tail = 0.3
j_bar_tail = 0.1
class HaleStructure:
def __init__(self, case_name, case_route, **kwargs):
self.sigma = kwargs.get('sigma', 1)
self.n_elem_multiplier = kwargs.get('n_elem_multiplier', 1.5)
self.route = case_route
self.case_name = case_name
self.thrust = kwargs.get('thrust', 0.)
self.n_elem = None
self.n_node = None
self.n_node_elem = 3
self.x = None
self.y = None
self.z = None
self.n_elem_main = None
self.n_elem_fuselage = None
self.n_elem_fin = None
self.n_elem_tail = None
self.n_node_main = None
self.n_node_fuselage = None
self.n_node_fin = None
self.n_node_tail = None
self.span_main = span_main
self.span_tail = span_tail
def set_thrust(self, value):
self.thrust = value
def generate(self):
n_elem_multiplier = self.n_elem_multiplier
sigma = self.sigma
n_elem_main = int(4*n_elem_multiplier)
n_elem_tail = int(2*n_elem_multiplier)
n_elem_fin = int(2*n_elem_multiplier)
n_elem_fuselage = int(2*n_elem_multiplier)
# lumped masses
n_lumped_mass = 1
lumped_mass_nodes = np.zeros((n_lumped_mass, ), dtype=int)
lumped_mass = np.zeros((n_lumped_mass, ))
lumped_mass[0] = 5
lumped_mass_inertia = np.zeros((n_lumped_mass, 3, 3))
lumped_mass_position = np.zeros((n_lumped_mass, 3))
# beam processing
n_node_elem = self.n_node_elem
span_main1 = (1.0 - lambda_main)*span_main
span_main2 = lambda_main*span_main
n_elem_main1 = round(n_elem_main*(1 - lambda_main))
n_elem_main2 = n_elem_main - n_elem_main1
# total number of elements
n_elem = 0
n_elem += n_elem_main1 + n_elem_main1
n_elem += n_elem_main2 + n_elem_main2
n_elem += n_elem_fuselage
n_elem += n_elem_fin
n_elem += n_elem_tail + n_elem_tail
# number of nodes per part
n_node_main1 = n_elem_main1*(n_node_elem - 1) + 1
n_node_main2 = n_elem_main2*(n_node_elem - 1) + 1
n_node_main = n_node_main1 + n_node_main2 - 1
n_node_fuselage = n_elem_fuselage*(n_node_elem - 1) + 1
n_node_fin = n_elem_fin*(n_node_elem - 1) + 1
n_node_tail = n_elem_tail*(n_node_elem - 1) + 1
# total number of nodes
n_node = 0
n_node += n_node_main1 + n_node_main1 - 1
n_node += n_node_main2 - 1 + n_node_main2 - 1
n_node += n_node_fuselage - 1
n_node += n_node_fin - 1
n_node += n_node_tail - 1
n_node += n_node_tail - 1
# stiffness and mass matrices
n_stiffness = 3
base_stiffness_main = sigma*np.diag([ea, ga, ga, gj, eiy, eiz])
base_stiffness_fuselage = base_stiffness_main.copy()*sigma_fuselage
base_stiffness_fuselage[4, 4] = base_stiffness_fuselage[5, 5]
base_stiffness_tail = base_stiffness_main.copy()*sigma_tail
base_stiffness_tail[4, 4] = base_stiffness_tail[5, 5]
n_mass = 3
base_mass_main = np.diag([m_bar_main, m_bar_main, m_bar_main, j_bar_main, 0.5*j_bar_main, 0.5*j_bar_main])
base_mass_fuselage = np.diag([m_bar_fuselage,
m_bar_fuselage,
m_bar_fuselage,
j_bar_fuselage,
j_bar_fuselage*0.5,
j_bar_fuselage*0.5])
base_mass_tail = np.diag([m_bar_tail,
m_bar_tail,
m_bar_tail,
j_bar_tail,
j_bar_tail*0.5,
j_bar_tail*0.5])
# beam
x = np.zeros((n_node, ))
y = np.zeros((n_node, ))
z = np.zeros((n_node, ))
structural_twist = np.zeros((n_elem, n_node_elem))
beam_number = np.zeros((n_elem, ), dtype=int)
frame_of_reference_delta = np.zeros((n_elem, n_node_elem, 3))
conn = np.zeros((n_elem, n_node_elem), dtype=int)
stiffness = np.zeros((n_stiffness, 6, 6))
elem_stiffness = np.zeros((n_elem, ), dtype=int)
mass = np.zeros((n_mass, 6, 6))
elem_mass = np.zeros((n_elem, ), dtype=int)
boundary_conditions = np.zeros((n_node, ), dtype=int)
app_forces = np.zeros((n_node, 6))
stiffness[0, ...] = base_stiffness_main
stiffness[1, ...] = base_stiffness_fuselage
stiffness[2, ...] = base_stiffness_tail
mass[0, ...] = base_mass_main
mass[1, ...] = base_mass_fuselage
mass[2, ...] = base_mass_tail
we = 0
wn = 0
# inner right wing
beam_number[we:we + n_elem_main1] = 0
y[wn:wn + n_node_main1] = np.linspace(0.0, span_main1, n_node_main1)
for ielem in range(n_elem_main1):
conn[we + ielem, :] = ((np.ones((3, ))*(we + ielem)*(n_node_elem - 1)) +
[0, 2, 1])
for inode in range(n_node_elem):
frame_of_reference_delta[we + ielem, inode, :] = [-1.0, 0.0, 0.0]
elem_stiffness[we:we + n_elem_main1] = 0
elem_mass[we:we + n_elem_main1] = 0
boundary_conditions[0] = 1
app_forces[0] = [0, self.thrust, 0, 0, 0, 0]
we += n_elem_main1
wn += n_node_main1
# outer right wing
beam_number[we:we + n_elem_main1] = 0
y[wn:wn + n_node_main2 - 1] = y[wn - 1] + np.linspace(0.0, np.cos(lambda_dihedral)*span_main2, n_node_main2)[1:]
z[wn:wn + n_node_main2 - 1] = z[wn - 1] + np.linspace(0.0, np.sin(lambda_dihedral)*span_main2, n_node_main2)[1:]
for ielem in range(n_elem_main2):
conn[we + ielem, :] = ((np.ones((3, ))*(we + ielem)*(n_node_elem - 1)) +
[0, 2, 1])
for inode in range(n_node_elem):
frame_of_reference_delta[we + ielem, inode, :] = [-1.0, 0.0, 0.0]
elem_stiffness[we:we + n_elem_main2] = 0
elem_mass[we:we + n_elem_main2] = 0
boundary_conditions[wn + n_node_main2 - 2] = -1
we += n_elem_main2
wn += n_node_main2 - 1
# inner left wing
beam_number[we:we + n_elem_main1 - 1] = 1
y[wn:wn + n_node_main1 - 1] = np.linspace(0.0, -span_main1, n_node_main1)[1:]
for ielem in range(n_elem_main1):
conn[we + ielem, :] = ((np.ones((3, ))*(we+ielem)*(n_node_elem - 1)) +
[0, 2, 1])
for inode in range(n_node_elem):
frame_of_reference_delta[we + ielem, inode, :] = [1.0, 0.0, 0.0]
conn[we, 0] = 0
elem_stiffness[we:we + n_elem_main1] = 0
elem_mass[we:we + n_elem_main1] = 0
we += n_elem_main1
wn += n_node_main1 - 1
# outer left wing
beam_number[we:we + n_elem_main2] = 1
y[wn:wn + n_node_main2 - 1] = y[wn - 1] + np.linspace(0.0, -np.cos(lambda_dihedral)*span_main2, n_node_main2)[1:]
z[wn:wn + n_node_main2 - 1] = z[wn - 1] + np.linspace(0.0, np.sin(lambda_dihedral)*span_main2, n_node_main2)[1:]
for ielem in range(n_elem_main2):
conn[we + ielem, :] = ((np.ones((3, ))*(we+ielem)*(n_node_elem - 1)) +
[0, 2, 1])
for inode in range(n_node_elem):
frame_of_reference_delta[we + ielem, inode, :] = [1.0, 0.0, 0.0]
elem_stiffness[we:we + n_elem_main2] = 0
elem_mass[we:we + n_elem_main2] = 0
boundary_conditions[wn + n_node_main2 - 2] = -1
we += n_elem_main2
wn += n_node_main2 - 1
# fuselage
beam_number[we:we + n_elem_fuselage] = 2
x[wn:wn + n_node_fuselage - 1] = np.linspace(0.0, length_fuselage, n_node_fuselage)[1:]
z[wn:wn + n_node_fuselage - 1] = np.linspace(0.0, offset_fuselage, n_node_fuselage)[1:]
for ielem in range(n_elem_fuselage):
conn[we + ielem, :] = ((np.ones((3,))*(we + ielem)*(n_node_elem - 1)) +
[0, 2, 1])
for inode in range(n_node_elem):
frame_of_reference_delta[we + ielem, inode, :] = [0.0, 1.0, 0.0]
conn[we, 0] = 0
elem_stiffness[we:we + n_elem_fuselage] = 1
elem_mass[we:we + n_elem_fuselage] = 1
we += n_elem_fuselage
wn += n_node_fuselage - 1
global end_of_fuselage_node
end_of_fuselage_node = wn - 1
# fin
beam_number[we:we + n_elem_fin] = 3
x[wn:wn + n_node_fin - 1] = x[end_of_fuselage_node]
z[wn:wn + n_node_fin - 1] = z[end_of_fuselage_node] + np.linspace(0.0, fin_height, n_node_fin)[1:]
for ielem in range(n_elem_fin):
conn[we + ielem, :] = ((np.ones((3,))*(we + ielem)*(n_node_elem - 1)) +
[0, 2, 1])
for inode in range(n_node_elem):
frame_of_reference_delta[we + ielem, inode, :] = [-1.0, 0.0, 0.0]
conn[we, 0] = end_of_fuselage_node
elem_stiffness[we:we + n_elem_fin] = 2
elem_mass[we:we + n_elem_fin] = 2
we += n_elem_fin
wn += n_node_fin - 1
end_of_fin_node = wn - 1
# right tail
beam_number[we:we + n_elem_tail] = 4
x[wn:wn + n_node_tail - 1] = x[end_of_fin_node]
y[wn:wn + n_node_tail - 1] = np.linspace(0.0, span_tail, n_node_tail)[1:]
z[wn:wn + n_node_tail - 1] = z[end_of_fin_node]
for ielem in range(n_elem_tail):
conn[we + ielem, :] = ((np.ones((3, ))*(we + ielem)*(n_node_elem - 1)) +
[0, 2, 1])
for inode in range(n_node_elem):
frame_of_reference_delta[we + ielem, inode, :] = [-1.0, 0.0, 0.0]
conn[we, 0] = end_of_fin_node
elem_stiffness[we:we + n_elem_tail] = 2
elem_mass[we:we + n_elem_tail] = 2
boundary_conditions[wn + n_node_tail - 2] = -1
we += n_elem_tail
wn += n_node_tail - 1
# left tail
beam_number[we:we + n_elem_tail] = 5
x[wn:wn + n_node_tail - 1] = x[end_of_fin_node]
y[wn:wn + n_node_tail - 1] = np.linspace(0.0, -span_tail, n_node_tail)[1:]
z[wn:wn + n_node_tail - 1] = z[end_of_fin_node]
for ielem in range(n_elem_tail):
conn[we + ielem, :] = ((np.ones((3, ))*(we + ielem)*(n_node_elem - 1)) +
[0, 2, 1])
for inode in range(n_node_elem):
frame_of_reference_delta[we + ielem, inode, :] = [1.0, 0.0, 0.0]
conn[we, 0] = end_of_fin_node
elem_stiffness[we:we + n_elem_tail] = 2
elem_mass[we:we + n_elem_tail] = 2
boundary_conditions[wn + n_node_tail - 2] = -1
we += n_elem_tail
wn += n_node_tail - 1
with h5.File(self.route + '/' + self.case_name + '.fem.h5', 'a') as h5file:
coordinates = h5file.create_dataset('coordinates', data=np.column_stack((x, y, z)))
conectivities = h5file.create_dataset('connectivities', data=conn)
num_nodes_elem_handle = h5file.create_dataset(
'num_node_elem', data=n_node_elem)
num_nodes_handle = h5file.create_dataset(
'num_node', data=n_node)
num_elem_handle = h5file.create_dataset(
'num_elem', data=n_elem)
stiffness_db_handle = h5file.create_dataset(
'stiffness_db', data=stiffness)
stiffness_handle = h5file.create_dataset(
'elem_stiffness', data=elem_stiffness)
mass_db_handle = h5file.create_dataset(
'mass_db', data=mass)
mass_handle = h5file.create_dataset(
'elem_mass', data=elem_mass)
frame_of_reference_delta_handle = h5file.create_dataset(
'frame_of_reference_delta', data=frame_of_reference_delta)
structural_twist_handle = h5file.create_dataset(
'structural_twist', data=structural_twist)
bocos_handle = h5file.create_dataset(
'boundary_conditions', data=boundary_conditions)
beam_handle = h5file.create_dataset(
'beam_number', data=beam_number)
app_forces_handle = h5file.create_dataset(
'app_forces', data=app_forces)
lumped_mass_nodes_handle = h5file.create_dataset(
'lumped_mass_nodes', data=lumped_mass_nodes)
lumped_mass_handle = h5file.create_dataset(
'lumped_mass', data=lumped_mass)
lumped_mass_inertia_handle = h5file.create_dataset(
'lumped_mass_inertia', data=lumped_mass_inertia)
lumped_mass_position_handle = h5file.create_dataset(
'lumped_mass_position', data=lumped_mass_position)
self.n_elem = n_elem
self.n_node = n_node
self.x = x
self.y = y
self.z = z
self.n_elem_main = n_elem_main
self.n_elem_fuselage = n_elem_fuselage
self.n_elem_fin = n_elem_fin
self.n_elem_tail = n_elem_tail
self.n_node_main = n_node_main
self.n_node_fuselage = n_node_fuselage
self.n_node_fin = n_node_fin
self.n_node_tail = n_node_tail
| {"/aircraft.py": ["/structure.py", "/aero.py"], "/aero.py": ["/structure.py"]} |
59,549 | ngoiz/model-sharpy-hale | refs/heads/main | /aero.py | #! /usr/bin/env python3
import h5py as h5
import numpy as np
from structure import span_main
from sharpy.utils.geo_utils import generate_naca_camber
chord_main = 1.0
chord_tail = 0.5
chord_fin = 0.5
ea_main = 0.3
ea_fin = 0.5
ea_tail = 0.5
# reference area
area_ref = chord_main * 2 * span_main
class HaleAero:
def __init__(self, m, structure, case_name, case_route, **kwargs):
"""
Key-Word Arguments:
- cs_deflection (float): Elevator control surface deflection
- rudder_deflection (float): rudder deflection
- polars (np.array): 4-column array for AoA (rad), Cl, Cd, Cm of the airfoil polar
"""
self.m = m
self.structure = structure
self.route = case_route
self.case_name = case_name
self.cs_deflection = kwargs.get('cs_deflection', 0.)
self.rudder_deflection = kwargs.get('rudder_deflection', 0.)
self.chord_main = chord_main
self.chord_tail = chord_tail
self.chord_fin = chord_fin
self.polars = kwargs.get('polars', None)
def generate(self):
n_surfaces = 5
structure = self.structure
n_elem = structure.n_elem
n_node_elem = structure.n_node_elem
n_control_surfaces = 2
n_elem_main = structure.n_elem_main
n_node_main = structure.n_node_main
m = self.m
n_elem_fuselage = structure.n_elem_fuselage
n_node_fuselage = structure.n_node_fuselage
n_elem_fin = structure.n_elem_fin
n_node_fin = structure.n_node_fin
n_elem_tail = structure.n_elem_tail
n_node_tail = structure.n_node_tail
# aero
airfoil_distribution = np.zeros((structure.n_elem, structure.n_node_elem), dtype=int)
surface_distribution = np.zeros((structure.n_elem,), dtype=int) - 1
surface_m = np.zeros((n_surfaces, ), dtype=int)
m_distribution = 'uniform'
aero_node = np.zeros((structure.n_node,), dtype=bool)
twist = np.zeros((structure.n_elem, structure.n_node_elem))
sweep = np.zeros((structure.n_elem, structure.n_node_elem))
chord = np.zeros((structure.n_elem, structure.n_node_elem,))
elastic_axis = np.zeros((structure.n_elem, structure.n_node_elem,))
control_surface = np.zeros((n_elem, n_node_elem), dtype=int) - 1
control_surface_type = np.zeros((n_control_surfaces, ), dtype=int)
control_surface_deflection = np.zeros((n_control_surfaces, ))
control_surface_chord = np.zeros((n_control_surfaces, ), dtype=int)
control_surface_hinge_coord = np.zeros((n_control_surfaces, ), dtype=float)
# control surface type 0 = static
# control surface type 1 = dynamic
control_surface_type[0] = 0
control_surface_deflection[0] = self.cs_deflection
control_surface_chord[0] = m
control_surface_hinge_coord[0] = -0.25 # nondimensional wrt elastic axis (+ towards the trailing edge)
control_surface_type[1] = 0
control_surface_deflection[1] = self.rudder_deflection
control_surface_chord[1] = m // 2
control_surface_hinge_coord[1] = -0. # nondimensional wrt elastic axis (+ towards the trailing edge)
we = 0
wn = 0
# right wing (surface 0, beam 0)
i_surf = 0
airfoil_distribution[we:we + n_elem_main, :] = 0
surface_distribution[we:we + n_elem_main] = i_surf
surface_m[i_surf] = m
aero_node[wn:wn + n_node_main] = True
temp_chord = np.linspace(chord_main, chord_main, n_node_main)
temp_sweep = np.linspace(0.0, 0*np.pi/180, n_node_main)
node_counter = 0
for i_elem in range(we, we + n_elem_main):
for i_local_node in range(n_node_elem):
if not i_local_node == 0:
node_counter += 1
chord[i_elem, i_local_node] = temp_chord[node_counter]
elastic_axis[i_elem, i_local_node] = ea_main
sweep[i_elem, i_local_node] = temp_sweep[node_counter]
we += n_elem_main
wn += n_node_main
# left wing (surface 1, beam 1)
i_surf = 1
airfoil_distribution[we:we + n_elem_main, :] = 0
# airfoil_distribution[wn:wn + n_node_main - 1] = 0
surface_distribution[we:we + n_elem_main] = i_surf
surface_m[i_surf] = m
aero_node[wn:wn + n_node_main - 1] = True
# chord[wn:wn + num_node_main - 1] = np.linspace(main_chord, main_tip_chord, num_node_main)[1:]
# chord[wn:wn + num_node_main - 1] = main_chord
# elastic_axis[wn:wn + num_node_main - 1] = main_ea
temp_chord = np.linspace(chord_main, chord_main, n_node_main)
node_counter = 0
for i_elem in range(we, we + n_elem_main):
for i_local_node in range(n_node_elem):
if not i_local_node == 0:
node_counter += 1
chord[i_elem, i_local_node] = temp_chord[node_counter]
elastic_axis[i_elem, i_local_node] = ea_main
sweep[i_elem, i_local_node] = -temp_sweep[node_counter]
we += n_elem_main
wn += n_node_main - 1
we += n_elem_fuselage
wn += n_node_fuselage - 1 - 1
#
# # fin (surface 2, beam 3)
i_surf = 2
airfoil_distribution[we:we + n_elem_fin, :] = 1
# airfoil_distribution[wn:wn + n_node_fin] = 0
surface_distribution[we:we + n_elem_fin] = i_surf
surface_m[i_surf] = m
aero_node[wn:wn + n_node_fin] = True
# chord[wn:wn + num_node_fin] = fin_chord
for i_elem in range(we, we + n_elem_fin):
for i_local_node in range(n_node_elem):
chord[i_elem, i_local_node] = chord_fin
elastic_axis[i_elem, i_local_node] = ea_fin
control_surface[i_elem, i_local_node] = 1
# twist[end_of_fuselage_node] = 0
# twist[wn:] = 0
# elastic_axis[wn:wn + num_node_main] = fin_ea
we += n_elem_fin
wn += n_node_fin - 1
control_surface[we - 1, :] = -1
#
# # # right tail (surface 3, beam 4)
i_surf = 3
airfoil_distribution[we:we + n_elem_tail, :] = 2
# airfoil_distribution[wn:wn + n_node_tail] = 0
surface_distribution[we:we + n_elem_tail] = i_surf
surface_m[i_surf] = m
# XXX not very elegant
aero_node[wn:] = True
# chord[wn:wn + num_node_tail] = tail_chord
# elastic_axis[wn:wn + num_node_main] = tail_ea
for i_elem in range(we, we + n_elem_tail):
for i_local_node in range(n_node_elem):
twist[i_elem, i_local_node] = -0
for i_elem in range(we, we + n_elem_tail):
for i_local_node in range(n_node_elem):
chord[i_elem, i_local_node] = chord_tail
elastic_axis[i_elem, i_local_node] = ea_tail
control_surface[i_elem, i_local_node] = 0
we += n_elem_tail
wn += n_node_tail
#
# # left tail (surface 4, beam 5)
i_surf = 4
airfoil_distribution[we:we + n_elem_tail, :] = 2
# airfoil_distribution[wn:wn + n_node_tail - 1] = 0
surface_distribution[we:we + n_elem_tail] = i_surf
surface_m[i_surf] = m
aero_node[wn:wn + n_node_tail - 1] = True
# chord[wn:wn + num_node_tail] = tail_chord
# elastic_axis[wn:wn + num_node_main] = tail_ea
# twist[we:we + num_elem_tail] = -tail_twist
for i_elem in range(we, we + n_elem_tail):
for i_local_node in range(n_node_elem):
twist[i_elem, i_local_node] = -0
for i_elem in range(we, we + n_elem_tail):
for i_local_node in range(n_node_elem):
chord[i_elem, i_local_node] = chord_tail
elastic_axis[i_elem, i_local_node] = ea_tail
control_surface[i_elem, i_local_node] = 0
we += n_elem_tail
wn += n_node_tail
with h5.File(self.route + '/' + self.case_name + '.aero.h5', 'a') as h5file:
airfoils_group = h5file.create_group('airfoils')
# add one airfoil
naca_airfoil_main = airfoils_group.create_dataset('0', data=np.column_stack(
generate_naca_camber(P=0, M=0)))
naca_airfoil_tail = airfoils_group.create_dataset('1', data=np.column_stack(
generate_naca_camber(P=0, M=0)))
naca_airfoil_fin = airfoils_group.create_dataset('2', data=np.column_stack(
generate_naca_camber(P=0, M=0)))
# chord
chord_input = h5file.create_dataset('chord', data=chord)
dim_attr = chord_input .attrs['units'] = 'm'
# twist
twist_input = h5file.create_dataset('twist', data=twist)
dim_attr = twist_input.attrs['units'] = 'rad'
# sweep
sweep_input = h5file.create_dataset('sweep', data=sweep)
dim_attr = sweep_input.attrs['units'] = 'rad'
# airfoil distribution
airfoil_distribution_input = h5file.create_dataset('airfoil_distribution', data=airfoil_distribution)
surface_distribution_input = h5file.create_dataset('surface_distribution', data=surface_distribution)
surface_m_input = h5file.create_dataset('surface_m', data=surface_m)
m_distribution_input = h5file.create_dataset('m_distribution', data=m_distribution.encode('ascii', 'ignore'))
aero_node_input = h5file.create_dataset('aero_node', data=aero_node)
elastic_axis_input = h5file.create_dataset('elastic_axis', data=elastic_axis)
control_surface_input = h5file.create_dataset('control_surface', data=control_surface)
control_surface_deflection_input = h5file.create_dataset('control_surface_deflection', data=control_surface_deflection)
control_surface_chord_input = h5file.create_dataset('control_surface_chord', data=control_surface_chord)
control_surface_hinge_coord_input = h5file.create_dataset('control_surface_hinge_coord', data=control_surface_hinge_coord)
control_surface_types_input = h5file.create_dataset('control_surface_type', data=control_surface_type)
if self.polars is not None:
polars_group = h5file.create_group('polars')
for i_airfoil in range(3): # there are three airfoils
polars_group.create_dataset('{:g}'.format(i_airfoil), data=self.polars)
| {"/aircraft.py": ["/structure.py", "/aero.py"], "/aero.py": ["/structure.py"]} |
59,609 | bpiktel/tile_map_generator | refs/heads/main | /gui.py | from tkinter import (
Frame, Canvas, Scrollbar, Label, Tk, Button, Toplevel, Entry, colorchooser,
messagebox, Menu, filedialog)
import tkinter as tk
from PIL import ImageTk
from src.tile_map_io import TileMapIO
from src.visualisation import TileMapVisualisation
from src.tile_map import TileMap
from src.generator import TileMapGenerator
from src.tile import Tile, TileTreeNode
class MapViewer(Frame):
"""
Window in which tile image is displayed
"""
def __init__(self, root, map_image):
Frame.__init__(self, root)
root.title("Map Viewer")
self.pack(fill=tk.BOTH)
top_menu = Menu(root)
file_menu = Menu(top_menu, tearoff=0)
file_menu.add_command(label="Save", command=self.save_image)
top_menu.add_cascade(label="File", menu=file_menu)
root.config(menu=top_menu)
self.map_image = map_image
img_width, img_height = map_image.size
canv = Canvas(self)
canv.config(width=img_width, height=img_height)
scrollbar_v = Scrollbar(self, orient=tk.VERTICAL)
scrollbar_h = Scrollbar(self, orient=tk.HORIZONTAL)
scrollbar_v.config(command=canv.yview)
scrollbar_h.config(command=canv.xview)
canv.config(yscrollcommand=scrollbar_v.set)
canv.config(xscrollcommand=scrollbar_h.set)
scrollbar_v.pack(side=tk.RIGHT, fill=tk.Y)
scrollbar_h.pack(side=tk.BOTTOM, fill=tk.X)
canv.pack(side=tk.TOP, expand=tk.YES, fill=tk.BOTH)
canv.config(scrollregion=(0, 0, img_width, img_height))
self.image = ImageTk.PhotoImage(map_image)
self.img = canv.create_image(0, 0, anchor="nw", image=self.image)
self.update()
width_max = img_width+scrollbar_h.winfo_height()+4
height_max = img_height+scrollbar_v.winfo_width()+4
root.maxsize(width=width_max, height=height_max)
def save_image(self):
try:
file_path = filedialog.asksaveasfilename(
title="Save map",
filetypes=(("PNG", "*.png"), ("All files", "*.*")))
TileMapIO.save_map_image(self.map_image, file_path)
except Exception as e:
messagebox.showerror("Coulnd't save image", str(e))
class GenerationGUI(Frame):
"""
Main window of application
"""
def __init__(self, root):
Frame.__init__(self, root)
self.root = root
root.title("Tile Map Generator")
top_menu = Menu(root)
file_menu = Menu(top_menu, tearoff=0)
file_menu.add_command(label="Save", command=self.save_map)
file_menu.add_command(label="Open", command=self.load_map)
top_menu.add_cascade(label="File", menu=file_menu)
root.config(menu=top_menu)
tk.Grid.rowconfigure(self, 1, weight=1)
tk.Grid.columnconfigure(self, 0, weight=1)
tk.Grid.columnconfigure(self, 1, weight=1)
tk.Grid.columnconfigure(self, 2, weight=1)
self.pack(fill=tk.BOTH)
self.tile_info_container = Frame(self)
self.tiles_info_list = self.get_default_tiles_info()
self.update_tiles_info()
self.tile_info_container.grid(
row=0, column=0, columnspan=3, padx=10, pady=10)
self.map_size_frame = Frame(self)
self.map_size_label = Label(self.map_size_frame, text="Map size:")
self.map_size_label.grid(row=0, column=0)
self.x_map_size = Entry(self.map_size_frame, width=7)
self.x_map_size.grid(row=0, column=1)
self.map_size_x_label = Label(self.map_size_frame, text="x")
self.map_size_x_label.grid(row=0, column=2)
self.y_map_size = Entry(self.map_size_frame, width=7)
self.y_map_size.grid(row=0, column=3)
self.map_size_frame.grid(row=3, column=0, columnspan=3)
self.generate_button = Button(
self, text="Generate", command=self.generate_map)
self.generate_button.grid(row=4, column=0, padx=10, pady=10)
self.update_tiles_button = Button(
self, text="Update tiles", command=self.change_tiles_on_map)
self.update_tiles_button.grid(row=4, column=1, padx=10, pady=10)
self.view_map_button = Button(
self, text="View map", command=self.view_map)
self.view_map_button.grid(row=4, column=2, padx=10, pady=10)
# initializing map with sample tiles
self.tiles = self.construct_ttn(self.tiles_info_list[0])
self.map_ = TileMap(10, 10, self.tiles)
def view_map(self):
if self.map_ is not None:
self._map_window = Toplevel(self.root)
image = TileMapVisualisation.get_map_image(self.map_)
MapViewer(self._map_window, image).pack(
side="top", fill="both", expand=True)
def change_tiles_on_map(self):
try:
self.update_tiles()
self.view_map()
except Exception as e:
messagebox.showerror("Cannot generate map", str(e))
def update_tiles(self):
for tile in self.tiles_info_list:
if tile.edit_mode:
raise TileInEditModeError("All tiles must be saved")
self.tiles = self.construct_ttn(self.tiles_info_list[0])
self.map_.update_tiles(self.tiles)
def generate_map(self):
try:
size_x = self.x_map_size.get()
size_y = self.y_map_size.get()
if len(size_x) == 0 or len(size_y) == 0:
raise EmptyEntryFieldError("Map size must be specified")
if not size_x.isdigit() or not size_y.isdigit():
raise TypeError("Map size must be positive integer")
size_x = int(size_x)
size_y = int(size_y)
self.update_tiles()
self.map_ = TileMap(size_y, size_x, self.tiles)
self.map_ = TileMapGenerator().generate_map(self.map_)
self.view_map()
except Exception as e:
messagebox.showerror("Cannot generate map", str(e))
def get_default_tiles_info(self):
"""Returns sample data"""
return [RootTileInfoSegment(self.tile_info_container, self),
TileInfoSegment(self.tile_info_container, self, 0, 1,
name="grass", color="green", level=1)]
def update_tiles_info(self):
"""Updates tiles info displayed in main window"""
for row, block in enumerate(self.tiles_info_list):
block.grid(row=row, sticky="W")
def insert_new_tile(self, new_tile, parent_tile_id):
for i, tile in enumerate(self.tiles_info_list):
if tile.tile_id == parent_tile_id:
self.tiles_info_list.insert(i+1, new_tile)
return
def remove_tile_from_list(self, tile_info):
self.tiles_info_list.remove(tile_info)
def save_map(self):
try:
file_path = filedialog.asksaveasfilename(
title="Save map",
filetypes=(("Pickle files", "*.pickle"), ("All files", "*.*")))
TileMapIO.save_map_to_file(self.map_, file_path)
except Exception as e:
messagebox.showerror("Couldn;t save map", str(e))
def load_map(self):
try:
file_path = filedialog.askopenfilename(
title="Load map",
filetypes=(("Pickle files", "*.pickle"), ("All files", "*.*")))
load = TileMapIO.load_map_from_file(file_path)
if load is None:
return
self.map_ = load
self.tiles = self.map_.get_tiles()
self.load_tiles_info_from_tiles()
self.update_tiles_info()
except Exception as e:
messagebox.showerror("Couldn't load map", str(e))
def construct_ttn(self, first_tile):
"""Constructs TileTreeNode from parsed tiles data"""
ttn = TileTreeNode(first_tile.construct_tile_object())
for tile in self.tiles_info_list:
if tile.parent_tile_id == first_tile.tile_id:
child_ttn = self.construct_ttn(tile)
ttn.add_child(child_ttn)
return ttn
def load_tiles_info_from_tiles(self):
"""Creates new tiles info list from loaded tiles data"""
new_tiles_info = []
root_tile = self.tiles.get_tile()
new_tiles_info.append(RootTileInfoSegment(
self.tile_info_container, self,
root_tile.get_name(), root_tile.get_color()))
new_tiles_info += self.load_ttn(self.tiles, root_tile.get_id(), 1)
self.tiles_info_list = new_tiles_info
def load_ttn(self, ttn, parent_id, level=1):
"""Loads tiles info from TileTreeNode, creates tile hierarchy"""
tiles_info = []
for child_ttn in ttn.get_children():
tile = child_ttn.get_tile()
tiles_info.append(
self.get_tile_info_segment(tile, self, parent_id, level))
tiles_info += self.load_ttn(child_ttn, tile.get_id(), level+1)
return tiles_info
def get_tile_info_segment(self, tile, root, parent_id, level):
"""Returns tile info segment representing data in Tile object"""
return TileInfoSegment(
self.tile_info_container, self, parent_id, tile_id=tile.get_id(),
name=tile.get_name(), color=tile.get_color(), fill=tile.get_fill(),
islands=tile.get_islands(), level=level)
class TileInfoSegment(Frame):
"""
Widget containing single tile data and options to edit it
"""
def __init__(self, root, main_gui, parent_tile_id, tile_id=0, name="name",
color="#000000", fill=0.5, islands=1, level=0):
Frame.__init__(self, root)
self.main_gui = main_gui
self.tile_id = tile_id
self.parent_tile_id = parent_tile_id
self.tile_name = name
self.tile_color = color
self.tile_fill = fill
self.tile_islands = islands
self.indent_label = Label(self, text=level*"\t")
self.indent_label.grid(row=0, column=0)
self.id_label = Label(self, text="ID:")
self.id_label.grid(row=0, column=1)
id_text = tk.StringVar()
id_text.set(self.tile_id)
self.id_entry = Entry(self, width=2, textvariable=id_text)
self.id_entry.grid(row=0, column=2)
self.name_label = Label(self, text="name:")
self.name_label.grid(row=0, column=3)
name_text = tk.StringVar()
name_text.set(self.tile_name)
self.name_entry = Entry(self, width=15, textvariable=name_text)
self.name_entry.grid(row=0, column=4)
self.color_button = Button(self, bg=self.tile_color, text=" ",
command=lambda: self.choose_color())
self.color_button.grid(row=0, column=5, padx=2)
self.fill_label = Label(self, text="fill:")
self.fill_label.grid(row=0, column=6)
fill_text = tk.StringVar()
fill_text.set(self.tile_fill)
self.fill_entry = Entry(self, width=4, textvariable=fill_text)
self.fill_entry.grid(row=0, column=7)
self.islands_label = Label(self, text="islands:")
self.islands_label.grid(row=0, column=8)
islands_text = tk.StringVar()
islands_text.set(self.tile_islands)
self.islands_entry = Entry(self, width=2, textvariable=islands_text)
self.islands_entry.grid(row=0, column=9)
self.edit_mode = False
self.set_edit_mode(self.edit_mode)
self.edit_button = Button(
self, text="E", command=lambda: self.enter_edit_mode())
self.edit_button.grid(row=0, column=10)
self.save_button = Button(
self, text="S", command=lambda: self.save_tile())
self.add_button = Button(
self, text="+", command=lambda: self.add_new_tile(root, level))
self.add_button.grid(row=0, column=11)
self.remove_button = Button(
self, text="-", command=lambda: self.remove_tile())
self.remove_button.grid(row=0, column=12)
def add_new_tile(self, root, level):
new_segment = TileInfoSegment(
root, self.main_gui, self.tile_id, level=level+1)
new_segment.enter_edit_mode()
self.main_gui.insert_new_tile(new_segment, self.tile_id)
self.main_gui.update_tiles_info()
def remove_tile(self):
to_remove = self.find_children_tiles()
to_remove.append(self)
for element in to_remove:
self.main_gui.remove_tile_from_list(element)
element.grid_forget()
self.main_gui.update_tiles_info()
def find_children_tiles(self):
if self.tile_id == 0: # handling not saved tiles that have id of 0
return [] # returning empty list because 0 is root tile
children_tiles = []
parent_id = self.tile_id
for tile in self.main_gui.tiles_info_list:
if parent_id == tile.parent_tile_id:
children_tiles.append(tile)
children_tiles += tile.find_children_tiles()
return children_tiles
def enter_edit_mode(self):
self.edit_mode = True
self.set_edit_mode(self.edit_mode)
self.edit_button.grid_forget()
self.save_button.grid(row=0, column=10)
self.add_button.config(state="disabled")
def set_edit_mode(self, edit_mode=False):
if edit_mode:
state = "normal"
button_state = "normal"
else:
state = "readonly"
button_state = "disabled"
self.id_entry.config(state=state)
self.name_entry.config(state=state)
self.fill_entry.config(state=state)
self.islands_entry.config(state=state)
self.color_button.config(state=button_state)
def save_tile(self):
try:
self.validate_input()
self.update_tile_data()
self.edit_mode = False
self.set_edit_mode(self.edit_mode)
self.save_button.grid_forget()
self.edit_button.grid(row=0, column=10)
self.add_button.config(state="normal")
except Exception as e:
messagebox.showerror("Invalid tile data", str(e))
def update_tile_data(self):
self.tile_id = int(self.id_entry.get())
self.tile_name = self.name_entry.get()
self.tile_fill = float(self.fill_entry.get())
self.tile_islands = int(self.islands_entry.get())
def validate_input(self):
"""Checks if tile data provided by user is valid"""
if len(self.id_entry.get()) == 0:
raise EmptyFieldError("Tile ID must be provided")
try:
t_id = int(self.id_entry.get())
except ValueError:
raise ValueError("Tile ID must be integer")
if not self.check_if_uniqe_id(t_id):
raise NotUniqeIDError("Tile ID must be uniqe")
if t_id < 0:
raise ValueError("ID cannot be negative")
if len(self.name_entry.get()) == 0:
raise EmptyFieldError("Tile name must be provided")
if len(self.fill_entry.get()) == 0:
raise EmptyFieldError("Tile fill must be provided")
try:
t_fill = float(self.fill_entry.get())
except ValueError:
raise ValueError("Tile fill must be float")
if t_fill < 0 or t_fill > 1:
raise ValueError("Tile fill must be float from 0 to 1")
if len(self.islands_entry.get()) == 0:
raise EmptyFieldError("Tile islands number must be provided")
try:
t_islands = int(self.islands_entry.get())
except ValueError:
raise ValueError("Tile islands must be integer")
if t_islands < 1:
raise ValueError("Island number must be at least 1")
def choose_color(self):
color = colorchooser.askcolor()
self.color_button.config(bg=color[1])
self.tile_color = color[1]
def construct_tile_object(self):
tile = Tile(self.tile_id, self.tile_name, self.tile_color,
self.tile_fill, self.tile_islands)
return tile
def check_if_uniqe_id(self, t_id):
for existing_tile in self.main_gui.tiles_info_list:
if existing_tile.tile_id == t_id and not existing_tile.edit_mode:
return False
return True
class RootTileInfoSegment(TileInfoSegment):
"""
Widget of root tile, less options of editing
"""
def __init__(self, root, main_gui, name="ocean", color="blue"):
TileInfoSegment.__init__(self, root, main_gui, 'root', tile_id=0,
name="ocean", color="blue", fill=1, islands=1,
level=0)
self.remove_button.grid_forget()
self.fill_label.grid_forget()
self.fill_entry.grid_forget()
self.islands_label.grid_forget()
self.islands_entry.grid_forget()
self.id_entry.config(state="disabled")
def set_edit_mode(self, edit_mode=False):
if edit_mode:
state = "normal"
button_state = "normal"
else:
state = "readonly"
button_state = "disabled"
self.name_entry.config(state=state)
self.color_button.config(state=button_state)
class NotUniqeIDError(Exception):
pass
class EmptyFieldError(Exception):
pass
class TileInEditModeError(Exception):
pass
class EmptyEntryFieldError(Exception):
pass
if __name__ == "__main__":
root = Tk()
GenerationGUI(root).pack(side="top", fill="both", expand=True)
root.mainloop()
| {"/gui.py": ["/src/tile_map_io.py", "/src/visualisation.py", "/src/tile_map.py", "/src/generator.py", "/src/tile.py"], "/tests/test_generator.py": ["/src/generator.py"], "/src/tile_map_io.py": ["/src/visualisation.py"], "/tests/test_tile.py": ["/src/tile.py"], "/tests/test_tile_map.py": ["/src/tile_map.py", "/src/tile.py"], "/src/tile_map.py": ["/src/tile.py"]} |
59,610 | bpiktel/tile_map_generator | refs/heads/main | /tests/test_generator.py | import numpy as np
from src.generator import BorderGeneration as BG
def test_searching_for_coordinates():
sample_raw_map = [[0, 0, 1, 2],
[3, 4, 1, 3],
[8, 3, 4, 1]]
sample_map = BG(sample_raw_map, 1)
searched_coord_tuples = sample_map.get_coordinate_tuples(1)
assert searched_coord_tuples == [(1, 3), (2, 3), (3, 4)]
def test_getting_adjacent_coords():
assert BG.get_adj_coords((1, 2)) == [(0, 2), (1, 3), (1, 1), (2, 2)]
def test_border_tile_check():
sample_raw_map = [[0, 0, 1, 2],
[3, 4, 1, 3],
[8, 3, 4, 1]]
generator = BG(sample_raw_map, 0)
assert generator.check_if_border_tile((2, 2), 0)
assert not generator.check_if_border_tile((3, 3), 0)
def test_mask():
sample_raw_map = [[0, 0, 0, 0],
[0, 1, 1, 0],
[0, 0, 0, 0]]
generator = BG(sample_raw_map, 1)
generator.apply_mask(1)
masked_raw_map = generator.get_trimmed_map()
assert not np.all(masked_raw_map == -1)
def test_randomizing_fills():
fills = BG.get_fill_per_island(0.2, 3)
assert len(fills) == 3
def test_lists_comparision():
list1 = [(0, 0), (2, 4), (4, 3)]
list2 = [(0, 3), (0, 0), (1, 1), (4, 3)]
assert BG.coords_in_both_lists(list1, list2) == [(0, 0), (4, 3)]
def test_applying_section():
map1 = np.array([[1, 1, 1, 1], [3, 4, 1, 1], [1, 1, 1, 1]]).reshape(3, 4)
map2 = np.array([[3, 5, 0, 0], [1, 1, 0, 0], [0, 0, -1, 0]]).reshape(3, 4)
map1 = BG.apply_generated_section(map1, map2, 1)
assert np.all(map1 == 1)
| {"/gui.py": ["/src/tile_map_io.py", "/src/visualisation.py", "/src/tile_map.py", "/src/generator.py", "/src/tile.py"], "/tests/test_generator.py": ["/src/generator.py"], "/src/tile_map_io.py": ["/src/visualisation.py"], "/tests/test_tile.py": ["/src/tile.py"], "/tests/test_tile_map.py": ["/src/tile_map.py", "/src/tile.py"], "/src/tile_map.py": ["/src/tile.py"]} |
59,611 | bpiktel/tile_map_generator | refs/heads/main | /src/tile_map_io.py | import pickle
import os
import src.visualisation as tmv
class TileMapIO:
"""
TileMapIO class contains methods of loading and saving map in different
forms (image, string, serialization)
"""
@staticmethod
def save_map_image(image, path):
if len(path) == 0:
return
if os.path.splitext(path)[1] != '.png':
path += '.png'
image.save(path)
@staticmethod
def display_map_in_termial(map_):
print(tmv.get_string_map_representation(map_))
@staticmethod
def save_map_to_file(map_, path):
if len(path) == 0:
return
if os.path.splitext(path)[1] != '.pickle':
path += '.pickle'
with open(path, "wb") as f:
pickle.dump(map_, f)
@staticmethod
def load_map_from_file(path):
if len(path) == 0:
return None
with open(path, "rb") as pickle_in:
map_ = pickle.load(pickle_in)
return map_
| {"/gui.py": ["/src/tile_map_io.py", "/src/visualisation.py", "/src/tile_map.py", "/src/generator.py", "/src/tile.py"], "/tests/test_generator.py": ["/src/generator.py"], "/src/tile_map_io.py": ["/src/visualisation.py"], "/tests/test_tile.py": ["/src/tile.py"], "/tests/test_tile_map.py": ["/src/tile_map.py", "/src/tile.py"], "/src/tile_map.py": ["/src/tile.py"]} |
59,612 | bpiktel/tile_map_generator | refs/heads/main | /src/tile.py | class Tile:
"""
Tile object contains data used to generate and visualize tile map.
:param id: Unique number that TileMap class uses to store maps, must be
positive or 0
:type id: int
:param name: Name of tile type
:type name: str
:param color: Color of tile when exported to image
:type color: str
:param fill: Value from 0 to 1 that expresses how much of parent tile type
will be converted to this tile type
:type fill: float
:param islands: Number of separate bodies of this tile type
:type islands: int
:raises: :class:'ValueError': Fill value must be from range of 0 to 1
:raises: :class:'ValueError': ID cannot be negative number
"""
def __init__(self, id_, name, color, fill=0.2, islands=1):
if id_ < 0:
raise ValueError("ID cannot be negative number")
self._id = id_
self._name = name
self._color = color
if fill < 0 or fill > 1:
raise ValueError("Fill value must be from range of 0 to 1")
self._fill = fill
self._islands = islands
def get_id(self):
return self._id
def get_name(self):
return self._name
def get_color(self):
return self._color
def get_fill(self):
return self._fill
def get_islands(self):
return self._islands
def get_info(self):
"""Returns string with basic tile information"""
return f"{self._id}, {self._name} - color: {self._color}, " + \
f"fill: {self._fill}, islands: {self._islands}"
class TileTreeNode:
"""
TileTreeNode object is used to organise Tile objects in hierarchy that is
later used in map generation. Every tile is stored in one node as parent
tile with list of child tiles that can be generated on it.
:param tile: Tile object storing tile data
:type tile: Tile
:param children: List of children tiles for tile, defaults to None
:type children: list
"""
def __init__(self, tile, children=None):
self._tile = tile
if children is None:
self._children = []
else:
self._children = children
def get_tile(self):
return self._tile
def get_children(self):
return self._children
def add_child(self, child):
self._children.append(child)
def get_names_list(self):
"""Recursively collect names of node and child nodes"""
names = [self.get_tile().get_name()]
for child in self._children:
names += child.get_names_list()
return names
def get_id_color_tuple(self):
tile = self.get_tile()
return (tile.get_id(), tile.get_color())
def get_colors_list(self):
"""
Returns list of tuples with id of tile and color associated with it
of all child nodes of this node (and tile from this node)
:return: List of tuples with color and id
:rtype: list
"""
id_color_tuples = []
id_color_tuples.append(self.get_id_color_tuple())
for node in self._children:
id_color_tuples += node.get_colors_list()
return id_color_tuples
| {"/gui.py": ["/src/tile_map_io.py", "/src/visualisation.py", "/src/tile_map.py", "/src/generator.py", "/src/tile.py"], "/tests/test_generator.py": ["/src/generator.py"], "/src/tile_map_io.py": ["/src/visualisation.py"], "/tests/test_tile.py": ["/src/tile.py"], "/tests/test_tile_map.py": ["/src/tile_map.py", "/src/tile.py"], "/src/tile_map.py": ["/src/tile.py"]} |
59,613 | bpiktel/tile_map_generator | refs/heads/main | /tests/test_tile.py | import pytest
from src.tile import Tile, TileTreeNode
def test_ttn_structure():
sample_ttn = TileTreeNode(Tile(0, '0', 'red'),
[TileTreeNode(Tile(1, '1', 'green', 0.3, 2)),
TileTreeNode(Tile(2, '2', 'blue'),
[TileTreeNode(Tile(3, '3', 'red'))
])
])
first_child = sample_ttn.get_children()[0].get_tile()
assert first_child.get_id() == 1
assert first_child.get_name() == "1"
assert first_child.get_color() == 'green'
assert first_child.get_fill() == 0.3
assert first_child.get_islands() == 2
def test_malformed_tile_info():
with pytest.raises(ValueError):
Tile(-1, '', 'red', 0.2, 1)
with pytest.raises(ValueError):
Tile(0, '', 'red', 1.2, 1)
def test_tiles_info():
test_tile = Tile(0, 'n', 'red', 0.2, 1)
assert test_tile.get_info() == "0, n - color: red, fill: 0.2, islands: 1"
def test_color_list_generation():
sample_ttn = TileTreeNode(Tile(0, '0', 'red'),
[TileTreeNode(Tile(1, '1', 'green')),
TileTreeNode(Tile(2, '2', 'blue'),
[TileTreeNode(Tile(3, '3', 'red'))
])
])
colors_list = sample_ttn.get_colors_list()
assert colors_list == [(0, 'red'), (1, 'green'), (2, 'blue'), (3, 'red')]
def test_names_list():
sample_ttn = TileTreeNode(Tile(0, '0', 'red'),
[TileTreeNode(Tile(1, '1', 'green')),
TileTreeNode(Tile(2, '2', 'blue'),
[TileTreeNode(Tile(3, '3', 'red'))
])
])
assert sample_ttn.get_names_list() == ['0', '1', '2', '3']
| {"/gui.py": ["/src/tile_map_io.py", "/src/visualisation.py", "/src/tile_map.py", "/src/generator.py", "/src/tile.py"], "/tests/test_generator.py": ["/src/generator.py"], "/src/tile_map_io.py": ["/src/visualisation.py"], "/tests/test_tile.py": ["/src/tile.py"], "/tests/test_tile_map.py": ["/src/tile_map.py", "/src/tile.py"], "/src/tile_map.py": ["/src/tile.py"]} |
59,614 | bpiktel/tile_map_generator | refs/heads/main | /tests/test_tile_map.py | import pytest
import numpy as np
from src.tile_map import TileMap
from src.tile import Tile
from src.tile import TileTreeNode
def test_constructor():
tm = TileMap(3, 5, TileTreeNode(Tile(0, "", "")))
assert tm.get_map().shape == (3, 5)
assert np.count_nonzero(tm.get_map()) == 0
def test_wrong_data():
with pytest.raises(ValueError):
TileMap(-2, 4, TileTreeNode(Tile(0, "", "")))
with pytest.raises(TypeError):
TileMap(2, 4, 5)
| {"/gui.py": ["/src/tile_map_io.py", "/src/visualisation.py", "/src/tile_map.py", "/src/generator.py", "/src/tile.py"], "/tests/test_generator.py": ["/src/generator.py"], "/src/tile_map_io.py": ["/src/visualisation.py"], "/tests/test_tile.py": ["/src/tile.py"], "/tests/test_tile_map.py": ["/src/tile_map.py", "/src/tile.py"], "/src/tile_map.py": ["/src/tile.py"]} |
59,615 | bpiktel/tile_map_generator | refs/heads/main | /src/visualisation.py | from PIL import Image, ImageDraw
class TileMapVisualisation():
"""
TileMapVisualisation class containing methods of string or
image representation of tile map
"""
@staticmethod
def get_string_map_representation(t_map):
"""Method used get map representation to show in terminal"""
tiles = t_map.get_tiles()
raw_map = t_map.get_map()
string_map = ""
string_map += TileMapVisualisation.get_tiles_tree(tiles)
string_map += '\n'
string_map += TileMapVisualisation.get_map_of_ids(raw_map)
return string_map
@staticmethod
def get_tiles_tree(tiles, step=0):
tree = ""
tree += '\t' * step + tiles.get_tile().get_info()
for child in tiles.get_children():
tree += '\n'
tree += TileMapVisualisation.get_tiles_tree(child, step + 1)
return tree
@staticmethod
def get_map_of_ids(id_map):
p_map = ""
for row in id_map:
for id_ in row:
p_map += f"{str(id_):2}"
p_map += ' '
p_map += '\n'
return p_map
@staticmethod
def get_map_image(tile_map, tile_size=10):
"""Returns PIL.Image object of tile map"""
raw_map = tile_map.get_map()
tiles = tile_map.get_tiles()
map_shape = raw_map.shape
size = (map_shape[1]*tile_size, map_shape[0]*tile_size)
map_image = Image.new('RGB', size)
fill_colors = {}
for id_color_tuple in tiles.get_colors_list():
fill_colors[id_color_tuple[0]] = id_color_tuple[1]
for row_number, row in enumerate(raw_map):
for column_number, id_ in enumerate(row):
# white color if tile data is missing
fill_color = fill_colors.get(id_, 'white')
ImageDraw.Draw(map_image).rectangle([
(column_number*tile_size, row_number*tile_size),
((column_number+1)*tile_size, (row_number+1)*tile_size)],
outline='#000', fill=fill_color)
return map_image
| {"/gui.py": ["/src/tile_map_io.py", "/src/visualisation.py", "/src/tile_map.py", "/src/generator.py", "/src/tile.py"], "/tests/test_generator.py": ["/src/generator.py"], "/src/tile_map_io.py": ["/src/visualisation.py"], "/tests/test_tile.py": ["/src/tile.py"], "/tests/test_tile_map.py": ["/src/tile_map.py", "/src/tile.py"], "/src/tile_map.py": ["/src/tile.py"]} |
59,616 | bpiktel/tile_map_generator | refs/heads/main | /src/generator.py | from random import choice, randrange
from math import floor
import numpy as np
class TileMapGenerator:
"""
TileMapGenerator class contains methods of splitting tile map generation
into steps of generating single tile type.
"""
def generate_map(self, tile_map):
"""Splits map into map of ids and tiles object and combines
generated map of ids with tiles"""
raw_map = tile_map.get_map()
tiles = tile_map.get_tiles()
self.generate_section(raw_map, tiles)
tile_map.update_map(raw_map)
return tile_map
def generate_section(self, raw_map, tile_tree_node):
"""Calls generation of each tile id"""
parent_tile = tile_tree_node.get_tile()
children = tile_tree_node.get_children()
for tile_node in children:
tile = tile_node.get_tile()
gen = BorderGeneration(raw_map, parent_tile.get_id())
raw_map = gen.generate_tile(raw_map,
parent_tile.get_id(),
tile.get_id(),
tile.get_fill(),
tile.get_islands())
self.generate_section(raw_map, tile_node)
class BorderGeneration:
"""
BorderGeneration class collects methods used to generate 'islands' of child
id type on parent id type on numpy 2D array.
:param raw_map: 2D array of tiles ids.
:type raw_map: :class:'numpy.ndarray'
:param parent_id: Parent tile id, on which tile islands will be generated.
:type parent_id: int
"""
def __init__(self, raw_map, parent_id):
"""Adds padding around map to avoid getting out of bounds and masks
all ids not suitable for generation"""
self._map = np.pad(
raw_map, (1, 1), mode='constant', constant_values=-1)
self._map[self._map != parent_id] = -1
self._parent_id = parent_id
def generate_tile(self, raw_map, parent_tile, tile_id, fill, islands=1):
"""Generates single tile type. Creates non connecting islands
one by one and applying mask around them to avoid connections"""
number_of_tiles = len(self.get_coordinate_tuples(self._parent_id))
for fill in self.get_fill_per_island(fill, islands):
self.apply_mask(tile_id) # apply mask to avoid connections
n_tiles_to_gen = floor(number_of_tiles * fill)
self.generate_island(n_tiles_to_gen, self._parent_id, tile_id)
gen_map = self.get_trimmed_map()
raw_map = self.apply_generated_section(raw_map, gen_map, tile_id)
return raw_map
def apply_mask(self, tile_id):
"""Applies mask of -1 around existing islands to avoid connections"""
for coord in self.get_coordinate_tuples(tile_id):
for c in self.get_adj_coords(coord, mode='all'):
if self._map[c] != tile_id:
self._map[c] = -1
np.put(self._map, tile_id, -1)
@staticmethod
def apply_generated_section(output_map, map_to_apply, id_to_apply):
"""Applies generated tile id on map"""
for row_number, row in enumerate(map_to_apply):
for column_number, id_ in enumerate(row):
if id_ == id_to_apply:
output_map[row_number, column_number] = id_
return output_map
@staticmethod
def get_fill_per_island(fill, islands, size_diff=5):
"""Randomizes island sizes. Biggest islands can be
size_diff times bigger that smallest islands"""
random_sizes = [randrange(1, size_diff) for i in range(islands)]
sum_of_sizes = sum(random_sizes)
fills = [size / sum_of_sizes * fill for size in random_sizes]
return fills
def generate_island(self, tiles_to_generate, parent_id, child_id):
"""Main generation code. Selects seed around which new tiles will
appear. First chooeses tile that borders with parent tile
and generates new tile on random side of selected tile.
Note that island number has priority over fill so if there are no
locations to generate new tile result will have less fill, but
number of islands will be preserved"""
# selecting seed
coord_tuples = self.get_coordinate_tuples(parent_id)
seed = self.get_seed_coordinates(coord_tuples)
self._map[seed] = child_id
border_tiles = [seed]
for x in range(tiles_to_generate - 1):
# exit if no places to generate
if len(border_tiles) == 0:
return
# generating new tile
selected_tile = self.get_chosen_tile_coord(border_tiles, parent_id)
self._map[selected_tile] = child_id
# check if any adjecent tiles stopped being border tiles
tiles_adj = self.get_adj_coords(selected_tile)
tiles_to_check = self.coords_in_both_lists(
tiles_adj, border_tiles)
for tile_coord in tiles_to_check:
if not self.check_if_border_tile(tile_coord, parent_id):
border_tiles.remove(tile_coord)
# check if generated tile becomes border tile
if self.check_if_border_tile(selected_tile, parent_id):
border_tiles.append(selected_tile)
def get_coordinate_tuples(self, searched_id):
"""Returns list of tuples of coordinates of all tiles on map
with searched id"""
tuples = []
for row_number, row in enumerate(self._map):
for column, id_ in enumerate(row):
if id_ == searched_id:
tuples.append((row_number, column))
return tuples
def get_seed_coordinates(self, coord_tuples):
"""Returns random seed coordinate from list of suitable coordinates"""
return choice(coord_tuples)
def get_chosen_tile_coord(self, border_tiles, parent_id):
"""Returns coordinate of tile to fill from suitable locations
around selected border tile"""
coord = choice(border_tiles)
options = [c for c in self.get_adj_coords(coord)
if self._map[c] == parent_id]
return choice(options)
def check_if_border_tile(self, coord, parent_id):
"""Checks if around tile there are any spaces left to generate"""
for c in self.get_adj_coords(coord):
if self._map[c] == parent_id:
return True
return False
@staticmethod
def get_adj_coords(coord, mode='sides'):
"""Returns coordinates adjacent to selected with mode being only sides,
only corners or all coordinates around"""
SIDES = [(-1, 0), (0, 1), (0, -1), (1, 0)]
CORNERS = [(-1, 1), (-1, -1), (1, 1), (1, -1)]
if mode == 'sides':
adj = SIDES
elif mode == 'corners':
adj = CORNERS
elif mode == 'all':
adj = SIDES + CORNERS
return [(y + coord[0], x + coord[1]) for y, x in adj]
@staticmethod
def coords_in_both_lists(list1, list2):
return [c for c in list1 if c in list2]
def get_trimmed_map(self):
"""Returns map trimmed of added bounds"""
return self._map[1:-1, 1:-1]
| {"/gui.py": ["/src/tile_map_io.py", "/src/visualisation.py", "/src/tile_map.py", "/src/generator.py", "/src/tile.py"], "/tests/test_generator.py": ["/src/generator.py"], "/src/tile_map_io.py": ["/src/visualisation.py"], "/tests/test_tile.py": ["/src/tile.py"], "/tests/test_tile_map.py": ["/src/tile_map.py", "/src/tile.py"], "/src/tile_map.py": ["/src/tile.py"]} |
59,617 | bpiktel/tile_map_generator | refs/heads/main | /src/tile_map.py | import numpy as np
from src.tile import TileTreeNode
class TileMap:
"""
TileMap object stores 2D array of ids and tiles data corresponding to it.
:param tiles:
:type tiles: :class:'tile.Tile'
:param map:
:type map: :class:'numpy.ndarray'
"""
def __init__(self, size_y, size_x, tiles):
"""
Constructor method takes sizes of map as parameters and creates
numpy 2D array filled with background tile id.
"""
if size_x < 1 or size_y < 1:
raise ValueError(f"Can't create map of size {size_y}x{size_x}")
self._map = np.zeros((size_y, size_x), dtype=int)
if not isinstance(tiles, TileTreeNode):
raise TypeError(f"Expected TileTreeNode type but got{type(tiles)}")
self._tiles = tiles
self._map = np.full_like(
self._map, self.get_background_tile_id())
def update_map(self, raw_map):
"""Replaces map array with new one without changing tiles list."""
self._map = raw_map
def update_tiles(self, new_tiles):
"""Replaces map tiles definitions without changing map."""
self._tiles = new_tiles
def get_map(self):
return self._map
def get_tiles(self):
return self._tiles
def get_background_tile_id(self):
return self._tiles.get_tile().get_id()
| {"/gui.py": ["/src/tile_map_io.py", "/src/visualisation.py", "/src/tile_map.py", "/src/generator.py", "/src/tile.py"], "/tests/test_generator.py": ["/src/generator.py"], "/src/tile_map_io.py": ["/src/visualisation.py"], "/tests/test_tile.py": ["/src/tile.py"], "/tests/test_tile_map.py": ["/src/tile_map.py", "/src/tile.py"], "/src/tile_map.py": ["/src/tile.py"]} |
59,618 | zhuokaizhao/artifice | refs/heads/master | /artifice/sharedobjects/__init__.py | from .shared import * # noqa
| {"/artifice/sharedobjects/__init__.py": ["/artifice/sharedobjects/shared.py"], "/artifice/tform.py": ["/artifice/__init__.py"], "/artifice/mod.py": ["/artifice/__init__.py"], "/artifice/lay.py": ["/artifice/__init__.py"], "/artifice/sparse/sparse.py": ["/artifice/__init__.py", "/artifice/sparse/__init__.py"], "/scripts/arcing_sphere.py": ["/artifice/utils.py"], "/scripts/two_spheres.py": ["/artifice/utils.py"], "/test_utils/experiment.py": ["/artifice/utils.py", "/artifice/__init__.py"], "/artifice/conversions.py": ["/artifice/__init__.py"], "/test_utils/springs.py": ["/artifice/__init__.py"], "/test_utils/annotate.py": ["/artifice/utils.py"], "/artifice/sparse/sparse_lib.py": ["/artifice/__init__.py"], "/artifice/windows/__init__.py": ["/artifice/windows/annotator_window.py"], "/artifice/dat.py": ["/artifice/__init__.py"], "/artifice/prio.py": ["/artifice/__init__.py"], "/artifice/vis.py": ["/artifice/__init__.py"], "/artifice/main.py": ["/artifice/__init__.py"], "/artifice/sparse/__init__.py": ["/artifice/sparse/sparse.py"], "/artifice/ann.py": ["/artifice/sharedobjects/__init__.py", "/artifice/__init__.py"]} |
59,619 | zhuokaizhao/artifice | refs/heads/master | /artifice/docs.py | description = "Artifice."
commands = "Artifice command to run. Can be multiple."
# file settings
data_root = "todo"
model_root = "todo"
overwrite = "Overwrite existing models."
deep = "todo"
figs_dir = "todo"
# data settings
convert_mode = "todo"
transformation = "todo"
identity_prob = "todo"
priority_mode = "todo"
labeled = """use the labeled and not the annotated set for training. This is
subtly different from the AUGMENT option. If LABELED is true, then AUGMENT
should not be used, but if LABELED is false, augment may or may not be used,
since an annotated set can be used with or without augmentation."""
# annotation settings
annotation_mode = "todo"
record_size = "Number of examples to save in each annotated tfrecord."
annotation_delay = "todo"
# data sizes
image_shape = "Shape of the image as: HEIGHT WIDTH CHANNELS"
base_shape = "todo"
data_size = "Number of examples per training epoch."
test_size = "Number of examples withheld for testing."
batch_size = "Batch size."
subset_size = "Number of examples to annotate."
num_objects = "Maximum number of objects."
pose_dim = "todo"
num_shuffle = "todo"
# model architecture
base_size = "Height/width of the output of the first layer of the lower level."
level_filters = "Number of filters for each level in the unet."
level_depth = "todo"
# sparse eval and other optimization settings
model = "Which model to use."
multiscale = "todo"
use_var = "todo"
# model hyperparameters
dropout = "todo"
initial_epoch = """Initial epoch, starting at 0."""
epochs = """Number of training epochs. Default is 1."""
learning_rate = """Learning rate."""
tol = "todo"
# runtime settings
num_parallel_calls = "Threadpool size. Default (-1) uses available cores."
verbose = "Artifice verbosity. Default is 2 (debug level)."
keras_verbose = "Keras verbosity. Default is 1 (progress bars)."
patient = "Disable eager execution."
show = "Show plots rather than save them."
cache = "cache the pipelined dataset"
seconds = """Limits runtime for "prioritize" and "annotate" commands. For "train," sets
the time after which the dataset is no longer reloaded every epoch, and caching
can occur."""
| {"/artifice/sharedobjects/__init__.py": ["/artifice/sharedobjects/shared.py"], "/artifice/tform.py": ["/artifice/__init__.py"], "/artifice/mod.py": ["/artifice/__init__.py"], "/artifice/lay.py": ["/artifice/__init__.py"], "/artifice/sparse/sparse.py": ["/artifice/__init__.py", "/artifice/sparse/__init__.py"], "/scripts/arcing_sphere.py": ["/artifice/utils.py"], "/scripts/two_spheres.py": ["/artifice/utils.py"], "/test_utils/experiment.py": ["/artifice/utils.py", "/artifice/__init__.py"], "/artifice/conversions.py": ["/artifice/__init__.py"], "/test_utils/springs.py": ["/artifice/__init__.py"], "/test_utils/annotate.py": ["/artifice/utils.py"], "/artifice/sparse/sparse_lib.py": ["/artifice/__init__.py"], "/artifice/windows/__init__.py": ["/artifice/windows/annotator_window.py"], "/artifice/dat.py": ["/artifice/__init__.py"], "/artifice/prio.py": ["/artifice/__init__.py"], "/artifice/vis.py": ["/artifice/__init__.py"], "/artifice/main.py": ["/artifice/__init__.py"], "/artifice/sparse/__init__.py": ["/artifice/sparse/sparse.py"], "/artifice/ann.py": ["/artifice/sharedobjects/__init__.py", "/artifice/__init__.py"]} |
59,620 | zhuokaizhao/artifice | refs/heads/master | /artifice/img.py | """
Util functions for manipulating images in artifice.
"""
import numpy as np
from PIL import Image
from skimage import draw
from artifice.log import logger # noqa: F401
"""
Basic image utilities.
"""
def grayscale(image):
"""Convert an n-channel, 3D image to grayscale.
Use the[luminosity weighted average]
(https: // www.johndcook.com/blog/2009/08/24/algorithms-convert-color-grayscale/) # noqa: E501
if there are three channels. Otherwise, just use the average.
: param image: image to convert
: returns: new grayscale image.
"""
image = np.array(image)
out_shape = (image.shape[0], image.shape[1], 1)
if image.ndim == 2:
return image.reshape(*out_shape)
assert(image.ndim == 3)
if image.shape[2] == 3:
W = np.array([0.21, 0.72, 0.07])
return (image * W).mean(axis=2).reshape(*out_shape).astype(np.uint8)
else:
return image.mean(axis=2).reshape(*out_shape).astype(np.uint8)
def rgb(image, copy=False):
"""Convert grayscale image to rgb.
: param image:
: returns:
: rtype:
"""
image = np.squeeze(image)
if image.ndim == 2:
return np.stack((image, image, image), axis=-1)
if image.ndim == 3 and image.shape[2] > 3:
return image[:, :, :3]
if image.ndim == 3 and image.shape[2] == 3:
return image.copy() if copy else image
raise RuntimeError(f"couldn't handle image shape {image.shape}")
def open_as_array(fname):
im = Image.open(fname)
if im.mode == 'L':
image = np.array(im).reshape(im.size[1], im.size[0])
elif im.mode == 'RGB':
image = np.array(im).reshape(im.size[1], im.size[0], 3)
elif im.mode == 'P':
image = np.array(im.convert('RGB')).reshape(im.size[1], im.size[0], 3)
elif im.mode == 'RGBA':
image = np.array(im.convert('RGB')).reshape(im.size[1], im.size[0], 3)
else:
raise NotImplementedError("Cannot create image mode '{}'".format(im.mode))
return image
def as_float(image, atleast_3d=True):
"""Return image as a grayscale float32 array at least 3d, scaled to[0, 1]."""
if image.dtype in [np.float32, np.float64]:
image = image.astype(np.float32)
elif image.dtype in [np.uint8, np.int32, np.int64]:
image = image.astype(np.float32) / 255.
else:
raise ValueError(f"image dtype '{image.dtype}' not allowed")
if atleast_3d:
return np.atleast_3d(image)
else:
return image
def as_uint(image):
"""Scale to uint8, clipping values outside the valid range.
: param image:
: returns:
: rtype:
"""
if image.dtype == np.uint8:
return image
elif image.dtype in [np.float32, np.float64]:
image = (255 * np.clip(image, 0, 1.0)).astype(np.uint8)
elif image.dtype in [np.int32, np.int64]:
image = np.clip(image, 0, 255).astype(np.uint8)
else:
raise ValueError(f"image dtype '{image.dtype}' not allowed")
return image
def open_as_float(image_path):
return as_float(open_as_array(image_path), atleast_3d=False)
def save(fname, image):
"""Save the array image to png in fname."""
image = np.squeeze(as_uint(image))
im = Image.fromarray(image)
im.save(fname)
"""
Drawing functions.
"""
def draw_x(image, x, y, size=12, channel=0):
"""Draw a x at the x, y location with `size`
: param image: image to draw on, at least 3 channels, float valued in [0, 1)
: param x: x position
: param y: y position
: param size: marker diameter in pixels, default 12
: param channel: which channel(s) to draw in. Default(0) makes a red x
: returns:
: rtype:
"""
image = rgb(image)
h = int(size / (2 * np.sqrt(2)))
i = int(x)
j = int(y)
rr, cc, val = draw.line_aa(i - h, j - h, i + h, j + h)
rr, cc, val = get_inside(rr, cc, image.shape, vals=val)
image[rr, cc, channel] = val
rr, cc, val = draw.line_aa(i - h, j + h, i + h, j - h)
rr, cc, val = get_inside(rr, cc, image.shape, vals=val)
image[rr, cc, channel] = val
return image
def draw_t(image, x, y, size=12, channel=1):
"""Draw a x at the x, y location with `size`
: param image: image to draw on, at least 3 channels, float valued in [0, 1)
: param x: x position
: param y: y position
: param size: marker diameter in pixels, default 12
: param channel: which channel(s) to draw in. Default(1) makes a green x
: returns:
: rtype:
"""
image = rgb(image)
h = size // 2
i = int(np.floor(x))
j = int(np.floor(y))
rr, cc, val = draw.line_aa(i - h, j, i + h, j)
rr, cc, val = get_inside(rr, cc, image.shape, vals=val)
image[rr, cc, channel] = val
rr, cc, val = draw.line_aa(i, j - h, i, j + h)
rr, cc, val = get_inside(rr, cc, image.shape, vals=val)
image[rr, cc, channel] = val
return image
def draw_xs(image, xs, ys, **kwargs):
for x, y in zip(xs, ys):
image = draw_x(image, x, y, **kwargs)
return image
def draw_ts(image, xs, ys, **kwargs):
for x, y in zip(xs, ys):
image = draw_t(image, x, y, **kwargs)
return image
"""
Manipulations on indices.
"""
def indices_from_regions(regions, num_objects):
"""Given an image-shaped annotation of regions, get indices of regions.
: param regions:
: returns: `[(xs_0, ys_0), (xs_1, ys_1), ...]` indices for each region
: rtype: list of two-tuples, each with a list of ints
"""
regions = np.squeeze(regions)
indices = []
for i in range(num_objects + 1):
indices.append(np.where(regions == i))
return indices
def inside(xs, ys, shape):
"""Returns a boolean array for which indices are inside shape.
: param xs: numpy array of indices
: param ys: numpy array of indices
: param shape: image shape to compare against, using first two dimensions
: returns: 1-D boolean array
"""
over = np.logical_and(xs >= 0, ys >= 0)
under = np.logical_and(xs < shape[0], ys < shape[1])
return np.logical_and(over, under)
def get_inside(xs, ys, shape, vals=None):
"""Get the indices that are inside image's shape.
: param xs: x indices
: param ys: y indices
: param shape: image shape to compare with
: returns: a subset of indices.
"""
xs = np.array(xs)
ys = np.array(ys)
which = inside(xs, ys, shape)
if vals is None:
return xs[which], ys[which]
else:
return xs[which], ys[which], vals[which]
def fill_negatives(image):
"""Fill the negative values in background with gaussian noise.
: param image: a numpy array with negative values to fill
"""
image = image.copy()
indices = image >= 0
mean = image[indices].mean()
std = image[indices].std()
indices = image < 0
image[indices] = np.random.normal(mean, std, size=image[indices].shape)
return image
def compute_object_patch(mask, pad=True):
"""Given a numpy mask, compute the smallest square patch around it.
image[i:i+si, j:j+sj] will give the proper patch.
If pad = True, patch may not be strictly square, since it could be clipped by
the mask shape.
: param mask: boolean array giving object location
: param pad: pad the patch until it is a square with each side the length of
the diagonal of the original patch.
: returns: `[i, j, si, sj]` upper left coordinate of the patch, sizes
: rtype: list
"""
vertical = np.where(np.any(mask, axis=1))[0]
horizontal = np.where(np.any(mask, axis=0))[0]
i = np.min(vertical)
j = np.min(horizontal)
size = max(np.max(vertical) - i, np.max(horizontal)) + 1
if pad:
diagonal = size * np.sqrt(2)
padding = int(np.ceil((diagonal - size + 1) / 2))
i = max(0, i - padding)
j = max(0, j - padding)
si = min(size + padding, mask.shape[0] - i)
sj = min(size + padding, mask.shape[1] - j)
else:
si = sj = size
return [i, j, si, sj]
| {"/artifice/sharedobjects/__init__.py": ["/artifice/sharedobjects/shared.py"], "/artifice/tform.py": ["/artifice/__init__.py"], "/artifice/mod.py": ["/artifice/__init__.py"], "/artifice/lay.py": ["/artifice/__init__.py"], "/artifice/sparse/sparse.py": ["/artifice/__init__.py", "/artifice/sparse/__init__.py"], "/scripts/arcing_sphere.py": ["/artifice/utils.py"], "/scripts/two_spheres.py": ["/artifice/utils.py"], "/test_utils/experiment.py": ["/artifice/utils.py", "/artifice/__init__.py"], "/artifice/conversions.py": ["/artifice/__init__.py"], "/test_utils/springs.py": ["/artifice/__init__.py"], "/test_utils/annotate.py": ["/artifice/utils.py"], "/artifice/sparse/sparse_lib.py": ["/artifice/__init__.py"], "/artifice/windows/__init__.py": ["/artifice/windows/annotator_window.py"], "/artifice/dat.py": ["/artifice/__init__.py"], "/artifice/prio.py": ["/artifice/__init__.py"], "/artifice/vis.py": ["/artifice/__init__.py"], "/artifice/main.py": ["/artifice/__init__.py"], "/artifice/sparse/__init__.py": ["/artifice/sparse/sparse.py"], "/artifice/ann.py": ["/artifice/sharedobjects/__init__.py", "/artifice/__init__.py"]} |
59,621 | zhuokaizhao/artifice | refs/heads/master | /artifice/windows/annotator_window.py | """Create the annotator window for use by ann.py."""
from PyQt5.QtWidgets import QApplication, QLabel, QMainWindow, QWidget
from PyQt5.QtGui import QIcon, QPixmap
from PyQt5.QtCore import Qt
class AnnotatorWindow(QWidget):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.setWindowTitle("Annotator")
def main():
"""For testing."""
app = QApplication([])
window = AnnotatorWindow()
window.show()
app.exec_()
if __name__ == '__main__':
main()
| {"/artifice/sharedobjects/__init__.py": ["/artifice/sharedobjects/shared.py"], "/artifice/tform.py": ["/artifice/__init__.py"], "/artifice/mod.py": ["/artifice/__init__.py"], "/artifice/lay.py": ["/artifice/__init__.py"], "/artifice/sparse/sparse.py": ["/artifice/__init__.py", "/artifice/sparse/__init__.py"], "/scripts/arcing_sphere.py": ["/artifice/utils.py"], "/scripts/two_spheres.py": ["/artifice/utils.py"], "/test_utils/experiment.py": ["/artifice/utils.py", "/artifice/__init__.py"], "/artifice/conversions.py": ["/artifice/__init__.py"], "/test_utils/springs.py": ["/artifice/__init__.py"], "/test_utils/annotate.py": ["/artifice/utils.py"], "/artifice/sparse/sparse_lib.py": ["/artifice/__init__.py"], "/artifice/windows/__init__.py": ["/artifice/windows/annotator_window.py"], "/artifice/dat.py": ["/artifice/__init__.py"], "/artifice/prio.py": ["/artifice/__init__.py"], "/artifice/vis.py": ["/artifice/__init__.py"], "/artifice/main.py": ["/artifice/__init__.py"], "/artifice/sparse/__init__.py": ["/artifice/sparse/sparse.py"], "/artifice/ann.py": ["/artifice/sharedobjects/__init__.py", "/artifice/__init__.py"]} |
59,622 | zhuokaizhao/artifice | refs/heads/master | /batch/make_scripts.py | """Make all the scripts for training and analysis.
Unlike most artifice scripts, this should be run from ROOT/batch."""
import os
import itertools
train_template = """#!/bin/bash
#SBATCH --job-name={mode}
#SBATCH --output={out_name}
#SBATCH --error={err_name}
#SBATCH -p gpu2
#SBATCH --gres=gpu:1
#SBATCH --time=24:00:00
#SBATCH --account=pi-glk
#SBATCH --mem-per-cpu=32000
module unload python
module load ffmpeg
module load cuda/9.0
module load Anaconda3/2018.12
source activate tf-gpu-1.12.0
module load povray/3.7
cd /project2/glk/artifice
epochs={epochs}
mode={mode}
data={data}
subset_size={subset_size} # ignored if mode is 'full'
query_size={query_size} # ignored if mode is 'full'
echo "Starting training..."
python artifice.py train --mode $mode -i data/$data \\
--overwrite -e $epochs \\
-m models/${{data}}_${{mode}}{subset_addon} \\
--subset-size $subset_size --query-size $query_size \\
--verbose 2 --keras-verbose 2
echo "Finished."
"""
analysis_template = """#!/bin/bash
#SBATCH --job-name={mode}
#SBATCH --output={out_name}
#SBATCH --error={err_name}
#SBATCH -p gpu2
#SBATCH --gres=gpu:1
#SBATCH --time=01:00:00
#SBATCH --account=pi-glk
#SBATCH --mem-per-cpu=32000
module unload python
module load ffmpeg
module load cuda/9.0
module load Anaconda3/2018.12
source activate tf-gpu-1.12.0
module load povray/3.7
cd /project2/glk/artifice
mode={mode}
data={data}
subset_size={subset_size}
# echo "Starting coupled_spheres..."
# python artifice.py {cmd} --mode $mode -i data/coupled_spheres \\
# -m models/${{data}}_${{mode}}{subset_addon} \\
# --verbose 2 --keras-verbose 2
# echo "Finished."
# echo "Starting coupled_spheres_tethered..."
# python artifice.py {cmd} --mode $mode -i data/coupled_spheres_tethered \\
# -m models/${{data}}_${{mode}}{subset_addon} \\
# --verbose 2 --keras-verbose 2
# echo "Finished."
# echo "Starting waltzing_spheres..."
# python artifice.py {cmd} --mode $mode -i data/waltzing_spheres \\
# -m models/${{data}}_${{mode}}{subset_addon} \\
# --splits 0 0 2401 \\
# --verbose 2 --keras-verbose 2
# echo "Finished."
# echo "Starting shadowed_right_spheres..."
# python artifice.py {cmd} --mode $mode -i data/shadowed_right_spheres \\
# -m models/${{data}}_${{mode}}{subset_addon} \\
# --splits 0 0 2401 \\
# --verbose 2 --keras-verbose 2
# echo "Finished."
echo "Starting harper_waltzing_spheres..."
python artifice.py {cmd} --mode $mode -i data/harper_waltzing_spheres \\
-m models/${{data}}_${{mode}}{subset_addon} \\
--splits 0 0 2401 \\
--verbose 2 --keras-verbose 2
echo "Finished."
"""
num_active_epochs = 10
which_epochs = [20]
modes = ['full', 'random', 'active',
'augmented-full', 'augmented-random', 'augmented-active']
datas = ['harper_spheres', 'harper_spheres_tethered', 'coupled_spheres',
'coupled_spheres_tethered']
subset_sizes = [10, 100]
for t in itertools.product(which_epochs, modes, datas, subset_sizes):
print(t)
epochs, mode, data, subset_size = t
dir_name = f"train/{mode}_{data}" + ("" if 'full' in mode
else f"_subset{subset_size}")
if not os.path.exists(dir_name):
os.mkdir(dir_name)
subset_addon = "" if 'full' in mode else "_subset${subset_size}"
query_size = subset_size // num_active_epochs
out_name = os.path.join(os.getcwd(), dir_name, 'train.out')
err_name = os.path.join(os.getcwd(), dir_name, 'train.err')
script = train_template.format(
epochs=epochs, mode=mode, data=data, subset_size=subset_size,
query_size=query_size, subset_addon=subset_addon,
out_name=out_name, err_name=err_name)
with open(os.path.join(dir_name, 'train.batch'), 'w') as f:
f.write(script)
cmd = 'detect'
out_name = os.path.join(os.getcwd(), dir_name, f'{cmd}.out')
err_name = os.path.join(os.getcwd(), dir_name, f'{cmd}.err')
script = analysis_template.format(
cmd=cmd, mode=mode, data=data, subset_size=subset_size,
subset_addon=subset_addon, out_name=out_name, err_name=err_name)
with open(os.path.join(dir_name, 'detect.batch'), 'w') as f:
f.write(script)
cmd = 'visualize'
out_name = os.path.join(os.getcwd(), dir_name, f'{cmd}.out')
err_name = os.path.join(os.getcwd(), dir_name, f'{cmd}.err')
script = analysis_template.format(
cmd=cmd, mode=mode, data=data, subset_size=subset_size,
subset_addon=subset_addon, out_name=out_name, err_name=err_name)
with open(os.path.join(dir_name, 'visualize.batch'), 'w') as f:
f.write(script)
| {"/artifice/sharedobjects/__init__.py": ["/artifice/sharedobjects/shared.py"], "/artifice/tform.py": ["/artifice/__init__.py"], "/artifice/mod.py": ["/artifice/__init__.py"], "/artifice/lay.py": ["/artifice/__init__.py"], "/artifice/sparse/sparse.py": ["/artifice/__init__.py", "/artifice/sparse/__init__.py"], "/scripts/arcing_sphere.py": ["/artifice/utils.py"], "/scripts/two_spheres.py": ["/artifice/utils.py"], "/test_utils/experiment.py": ["/artifice/utils.py", "/artifice/__init__.py"], "/artifice/conversions.py": ["/artifice/__init__.py"], "/test_utils/springs.py": ["/artifice/__init__.py"], "/test_utils/annotate.py": ["/artifice/utils.py"], "/artifice/sparse/sparse_lib.py": ["/artifice/__init__.py"], "/artifice/windows/__init__.py": ["/artifice/windows/annotator_window.py"], "/artifice/dat.py": ["/artifice/__init__.py"], "/artifice/prio.py": ["/artifice/__init__.py"], "/artifice/vis.py": ["/artifice/__init__.py"], "/artifice/main.py": ["/artifice/__init__.py"], "/artifice/sparse/__init__.py": ["/artifice/sparse/sparse.py"], "/artifice/ann.py": ["/artifice/sharedobjects/__init__.py", "/artifice/__init__.py"]} |
59,623 | zhuokaizhao/artifice | refs/heads/master | /artifice/tform.py | """Transformation utils, used to build up augmentations.
Note that these transformations are expected to adjust not just the image but
also the label to match. This is easily done for translation, where the
corresponding label dimension (position) is known, but less so for other pose
dimensions. Custom code may be written here to allow for this (or we may add
more standard dimensions to the label, in addition to position, to allow for )
These will be wrapped in py_function, ensuring eager execution (fine since this
is just preparing data). Each function should take `image, label, annotation,
background` as arguments and return a [new_image, new_label] list.
Because these are wrapped in py_function, things can be turned into numpy
arrays and back.
These transformations are applied to entire images, not patches, so
interpolation should be limited to as small a region as possible.
"""
import numpy as np
import tensorflow as tf
from artifice.log import logger
from artifice import img
def swap(t):
return tf.gather(t, [1, 0])
def normal_translate(image, label, annotation, background):
"""Translate each object with a random offset, normal distributed."""
# boilerplate code
image = image.numpy()
label = label.numpy()
annotation = annotation.numpy()
background = background.numpy()
new_image = image.copy()
new_label = label.copy()
for l in range(label.shape[0]):
mask = annotation == l
if not mask.any():
logger.warning(f"no {l}'th object")
continue
i, j, si, sj = img.compute_object_patch(mask)
image_patch = image[i:i + si, j:j + sj].copy()
mask_patch = mask[i:i + si, j:j + sj].copy()
# todo; figure out if this is worth it.
# replace the original object with background
new_image[i:i + si, j:j + sj][mask_patch] = \
background[i:i + si, j:j + sj][mask_patch]
# get the translation, adjust the patch values
mask_patch = mask_patch.astype(np.float32)
translation = np.random.normal(loc=0, scale=5, size=2).astype(np.float32)
offset = swap(translation % 1)
image_patch = tf.contrib.image.translate(
image_patch, offset, interpolation='BILINEAR').numpy()
mask_patch = tf.contrib.image.translate(
mask_patch, offset, interpolation='NEAREST').numpy()
# adjust the coordinates of the patches, and their sizes
new_label[l, :2] += translation
i += int(np.floor(translation[0]))
j += int(np.floor(translation[1]))
if i + si < 0 or i >= mask.shape[0] or j + sj < 0 or j >= mask.shape[1]:
# patch got shifted entirely outside of the frame
continue
if i < 0:
si += i
i = 0
image_patch = image_patch[:si]
mask_patch = mask_patch[:si]
if i + si > mask.shape[0]:
si = mask.shape[0] - i
image_patch = image_patch[-si:]
mask_patch = mask_patch[-si:]
if j < 0:
sj += j
j = 0
image_patch = image_patch[:, :sj]
mask_patch = mask_patch[:, :sj]
if j + sj > mask.shape[1]:
sj = mask.shape[1] - j
image_patch = image_patch[:, -sj:]
mask_patch = mask_patch[:, -sj:]
mask_patch = mask_patch.astype(np.bool)
out = image_patch[mask_patch]
new_image[i:i + si, j:j + sj][mask_patch] = out
return [new_image, new_label]
def uniform_rotate(image, label, annotation, background):
"""Rotate each object by a random angle from Unif(0,2pi)"""
raise NotImplementedError
def normal_scale(image, label, annotation, background):
"""Adjust the scale of each object, with scale factors from N(1,0.1)."""
raise NotImplementedError
def combine_1_2(image, label, annotation, background):
"""Apply random translations and rotations, as above, together."""
raise NotImplementedError
def combine_1_2_3(image, label, annotation, background):
"""Do all three, potentially adjusting scale dimension of label."""
raise NotImplementedError
transformations = {0: normal_translate,
1: uniform_rotate,
2: normal_scale,
3: combine_1_2,
4: combine_1_2_3}
| {"/artifice/sharedobjects/__init__.py": ["/artifice/sharedobjects/shared.py"], "/artifice/tform.py": ["/artifice/__init__.py"], "/artifice/mod.py": ["/artifice/__init__.py"], "/artifice/lay.py": ["/artifice/__init__.py"], "/artifice/sparse/sparse.py": ["/artifice/__init__.py", "/artifice/sparse/__init__.py"], "/scripts/arcing_sphere.py": ["/artifice/utils.py"], "/scripts/two_spheres.py": ["/artifice/utils.py"], "/test_utils/experiment.py": ["/artifice/utils.py", "/artifice/__init__.py"], "/artifice/conversions.py": ["/artifice/__init__.py"], "/test_utils/springs.py": ["/artifice/__init__.py"], "/test_utils/annotate.py": ["/artifice/utils.py"], "/artifice/sparse/sparse_lib.py": ["/artifice/__init__.py"], "/artifice/windows/__init__.py": ["/artifice/windows/annotator_window.py"], "/artifice/dat.py": ["/artifice/__init__.py"], "/artifice/prio.py": ["/artifice/__init__.py"], "/artifice/vis.py": ["/artifice/__init__.py"], "/artifice/main.py": ["/artifice/__init__.py"], "/artifice/sparse/__init__.py": ["/artifice/sparse/sparse.py"], "/artifice/ann.py": ["/artifice/sharedobjects/__init__.py", "/artifice/__init__.py"]} |
59,624 | zhuokaizhao/artifice | refs/heads/master | /artifice/mod.py | """Implements artifice's detection scheme from end to end.
"""
import os
from time import time
import itertools
import numpy as np
from stringcase import snakecase
import tensorflow as tf
from tensorflow import keras
from artifice.log import logger
from artifice import dat
from artifice import utils
from artifice import lay
def _get_optimizer(learning_rate):
if tf.executing_eagerly():
return tf.train.AdadeltaOptimizer(learning_rate)
else:
return keras.optimizers.Adadelta(learning_rate)
def _update_hist(a, b):
"""Concat the lists in b onto the lists in a.
If b has elements that a does not, includes them. Behavior is undefined for
elements that are not lists.
:param a:
:param b:
:returns:
:rtype:
"""
c = a.copy()
for k, v in b.items():
if isinstance(v, list) and isinstance(c.get(k), list):
c[k] += v
else:
c[k] = v
return c
def _unbatch_outputs(outputs):
"""Essentially transpose the batch dimension to the outer dimension outputs.
:param outputs: batched outputs of the model, like
[[pose 0, pose 1, ....],
[output_0 0, output_0 1, ...],
[output_1 0, output_1 1, ...]]
:returns: result after unbatching, like
[[pose 0, output_0 0, output_1 0, ...],
[pose 1, output_0 1, output_1 1, ...],
...]
"""
unbatched_outputs = []
for i in range(outputs[0].shape[0]):
unbatched_outputs.append([output[i] for output in outputs])
return unbatched_outputs
def crop(inputs, shape=None, size=None):
if size is None:
assert shape is not None, 'one of `size` or `shape` must be provided'
size = shape[1:3]
top_crop = int(np.floor(int(inputs.shape[1] - size[0]) / 2))
bottom_crop = int(np.ceil(int(inputs.shape[1] - size[0]) / 2))
left_crop = int(np.floor(int(inputs.shape[2] - size[1]) / 2))
right_crop = int(np.ceil(int(inputs.shape[2] - size[1]) / 2))
outputs = keras.layers.Cropping2D(cropping=((top_crop, bottom_crop),
(left_crop, right_crop)),
input_shape=inputs.shape)(inputs)
return outputs
def _crop_like_conv(inputs,
kernel_size=[3, 3],
padding='valid'):
"""Crop the height, width dims of inputs as if convolved with a stride of 1.
:param inputs:
:param kernel_size:
:param padding:
:returns:
:rtype:
"""
assert padding in {'same', 'valid'}
if padding == 'same':
return inputs
top_crop = kernel_size[0] // 2
bottom_crop = (kernel_size[0] - 1) // 2
left_crop = kernel_size[1] // 2
right_crop = (kernel_size[1] - 1) // 2
outputs = keras.layers.Cropping2D(cropping=((top_crop, bottom_crop),
(left_crop, right_crop)),
input_shape=inputs.shape)(inputs)
return outputs
def conv(inputs,
filters,
kernel_shape=[3, 3],
activation='relu',
padding='valid',
kernel_initializer='glorot_normal',
norm=True,
mask=None,
batch_size=None,
activation_name=None,
norm_name=None,
**kwargs):
"""Perform 3x3 convolution on the layer.
:param inputs: input tensor
:param filters: number of filters or kernels
:param kernel_shape:
:param activation: keras activation to use. Default is 'relu'
:param padding: 'valid' or 'same'
:param norm: whether or not to perform batch normalization on the output
:param mask: if not None, performs a sparse convolution with mask.
:param batch_size: needed for sparse layers. Required if mask is not None
Other kwargs passed to the convolutional layer.
:returns:
:rtype:
"""
if mask is None:
inputs = keras.layers.Conv2D(
filters,
kernel_shape,
activation=None,
padding=padding,
use_bias=False,
kernel_initializer=kernel_initializer,
**kwargs)(inputs)
else:
inputs = lay.SparseConv2D(
filters,
kernel_shape,
batch_size=batch_size,
activation=None,
padding=padding,
use_bias=False,
kernel_initializer=kernel_initializer,
**kwargs)([inputs, mask])
if norm:
inputs = keras.layers.BatchNormalization(name=norm_name)(inputs)
if activation is not None:
inputs = keras.layers.Activation(activation, name=activation_name)(inputs)
return inputs
def conv_upsample(inputs,
filters,
size=2,
activation='relu',
mask=None,
batch_size=None,
**kwargs):
"""Upsample the inputs in dimensions 1,2 with a transpose convolution.
:param inputs:
:param filters:
:param scale: scale by which to upsample. Can be an int or a list of 2 ints,
specifying scale in each direction.
:param activation: relu by default
:param mask: if not None, use a SparseConv2DTranspose layer.
:param batch_size:
Additional kwargs passed to the conv transpose layer.
:returns:
:rtype:
"""
size = utils.listify(size, 2)
if mask is None:
inputs = keras.layers.Conv2DTranspose(
filters, size,
strides=size,
padding='same',
activation=activation,
use_bias=False,
**kwargs)(inputs)
else:
inputs = lay.SparseConv2DTranspose(
filters,
size,
batch_size=batch_size,
strides=size,
padding='same',
activation=activation,
use_bias=False,
**kwargs)([inputs, mask])
return inputs
def upsample(inputs, size=2, interpolation='nearest'):
"""Upsamples the inputs by `size`, using interpolation.
:param inputs:
:param scale: int or 2-list of ints to scale the inputs by.
:returns:
:rtype:
"""
return keras.layers.UpSampling2D(size, interpolation=interpolation)(inputs)
class Builder(type):
"""Metaclass that calls build *after* init but before finishing
instantiation."""
def __call__(cls, *args, **kwargs):
obj = type.__call__(cls, *args, **kwargs)
obj.build()
return obj
class ArtificeModel(metaclass=Builder):
"""A wrapper around keras models.
If loading an existing model, this class is sufficient, since the save file
will have the model topology and optimizer. Otherwise, a subclass should
implement the `forward()` and `compile()` methods, which are called during
__init__. In this case, super().__init__() should be called last in the
subclass __init__() method.
"""
def __init__(self, input_shape, model_dir='.', learning_rate=0.1,
overwrite=False):
"""Describe a model using keras' functional API.
Compiles model here, so all other instantiation should be finished.
:param inputs: tensor or list of tensors to input into the model (such as
layers.Input)
:param model_dir: directory to save the model. Default is cwd.
:param learning_rate:
:param overwrite: prefer to create a new model rather than load an existing
one in `model_dir`. Note that if a subclass uses overwrite=False, then the
loaded architecture may differ from the stated architecture in the
subclass, although the structure of the saved model names should prevent
this.
"""
self.input_shape = input_shape
self.overwrite = overwrite
self.model_dir = model_dir
self.learning_rate = learning_rate
self.name = snakecase(type(self).__name__).lower()
self.model_path = os.path.join(self.model_dir, f"{self.name}.hdf5")
self.checkpoint_path = os.path.join(
self.model_dir, f"{self.name}_ckpt.hdf5")
self.history_path = os.path.join(
self.model_dir, f"{self.name}_history.json")
def build(self):
"""Called after all subclasses have finished __init__()"""
inputs = keras.layers.Input(self.input_shape)
outputs = self.forward(inputs)
self.model = keras.Model(inputs, outputs)
self.compile()
if not self.overwrite:
self.load_weights()
def __str__(self):
output = f"{self.name}:\n"
for layer in self.model.layers:
output += "layer:{} -> {}:{}\n".format(
layer.input_shape, layer.output_shape, layer.name)
return output
@property
def layers(self):
return self.model.layers
def forward(self, inputs):
raise NotImplementedError("subclasses should implement")
def compile(self):
raise NotImplementedError("subclasses should implement")
@property
def callbacks(self):
return [keras.callbacks.ModelCheckpoint(
self.checkpoint_path, verbose=1, save_weights_only=True)]
def load_weights(self, checkpoint_path=None):
"""Update the model weights from the chekpoint file.
:param checkpoint_path: checkpoint path to use. If not provided, uses the
class name to construct a checkpoint path.
"""
if checkpoint_path is None:
checkpoint_path = self.checkpoint_path
if os.path.exists(checkpoint_path):
self.model.load_weights(checkpoint_path, by_name=True) # todo: by_name?
logger.info(f"loaded model weights from {checkpoint_path}")
else:
logger.info(f"no checkpoint at {checkpoint_path}")
def save(self, filename=None, overwrite=True):
if filename is None:
filename = self.model_path
return keras.models.save_model(self.model, filename, overwrite=overwrite,
include_optimizer=False)
# todo: would like to have this be True, but custom loss function can't be
# found in keras library. Look into it during training. For now, we're fine
# with just weights in the checkpoint file.
def fit(self, art_data, hist=None, cache=False, **kwargs):
"""Thin wrapper around model.fit(). Preferred method is `train()`.
:param art_data:
:param hist: existing hist. If None, starts from scratch. Use train for
loading from existing hist.
:param cache: cache the dataset.
:returns:
:rtype:
"""
kwargs['callbacks'] = kwargs.get('callbacks', []) + self.callbacks
new_hist = self.model.fit(art_data.training_input(cache=cache),
steps_per_epoch=art_data.steps_per_epoch,
**kwargs).history
new_hist = utils.jsonable(new_hist)
if hist is not None:
new_hist = _update_hist(hist, new_hist)
utils.json_save(self.history_path, new_hist)
return new_hist
def train(self, art_data, initial_epoch=0, epochs=1, seconds=0,
**kwargs):
"""Fits the model, saving it along the way, and reloads every epoch.
:param art_data: ArtificeData set
:param initial_epoch: epoch that training is starting from
:param epochs: epoch number to stop at. If -1, training continues forever.
:param seconds: seconds after which to stop reloading every epoch. If -1,
reload is never stopped. If 0, dataset is loaded only once, at beginning.
:returns: history dictionary
"""
if (initial_epoch > 0
and os.path.exists(self.history_path)
and not self.overwrite):
hist = utils.json_load(self.history_path)
else:
hist = {}
start_time = time()
epoch = initial_epoch
while epoch != epochs and time() - start_time > seconds > 0:
logger.info("reloading dataset (not cached)...")
hist = self.fit(art_data, hist=hist, initial_epoch=epoch,
epochs=(epoch + 1), **kwargs)
epoch += 1
if epoch != epochs:
hist = self.fit(art_data, hist=hist, initial_epoch=epoch,
epochs=epochs, **kwargs)
self.save()
return hist
def predict(self, art_data, multiscale=False):
"""Run prediction, reassembling tiles, with the Artifice data.
:param art_data: ArtificeData object
:returns: iterator over predictions
"""
raise NotImplementedError("subclasses should implement.")
def predict_visualization(self, art_data):
"""Run prediction, reassembling tiles, with the ArtificeData.
Intended for visualization. Implementation will depend on the model.
:param art_data: ArtificeData object
:returns: iterator over (image, field, prediction)
"""
raise NotImplementedError()
def predict_outputs(self, art_data):
"""Run prediction for single tiles images with the Artifice data.
Returns the raw outputs, with no prediction. Depends on subclass
implementation.
:param art_data: ArtificeData object
:returns: iterator over (tile, prediction, model_outputs)
"""
raise NotImplementedError("subclasses should implement")
def evaluate(self, art_data):
"""Run evaluation for object detection with the ArtificeData object.
Depends on the structure of the model.
:param art_data: ArtificeData object
:returns: `errors, total_num_failed` error matrix and number of objects not
detected
:rtype: np.ndarray, int
"""
raise NotImplementedError('subclasses should implmement')
def uncertainty_on_batch(self, images):
"""Estimate the model's uncertainty for each image.
:param images: a batch of images
:returns: "uncertainty" for each image.
:rtype:
"""
raise NotImplementedError("uncertainty estimates not implemented")
class UNet(ArtificeModel):
def __init__(self, *, base_shape, level_filters, num_channels, pose_dim,
level_depth=2, dropout=0.5, **kwargs):
"""Create a U-Net model for object detection.
Regresses a distance proxy at every level for multi-scale tracking. Model
output consists first of the `pose_dim`-channel pose image, followed by
multi-scale fields from smallest (lowest on the U) to largest (original
image dimension).
:param base_shape: the height/width of the output of the first layer in the lower
level. This determines input and output tile shapes. Can be a tuple,
specifying different height/width, or a single integer.
:param level_filters: number of filters at each level (bottom to top).
:param level_depth: number of layers per level
:param dropout: dropout to use for concatenations
:param num_channels: number of channels in the input
:param pose_dim:
"""
self.base_shape = utils.listify(base_shape, 2)
self.level_filters = level_filters
self.num_channels = num_channels
self.pose_dim = pose_dim
self.level_depth = level_depth
self.dropout = dropout
self.num_levels = len(self.level_filters)
self.input_tile_shape = self.compute_input_tile_shape()
self.output_tile_shapes = self.compute_output_tile_shapes()
self.output_tile_shape = self.output_tile_shapes[-1]
super().__init__(self.input_tile_shape + [self.num_channels], **kwargs)
@staticmethod
def compute_input_tile_shape_(base_shape, num_levels, level_depth):
"""Compute the shape of the input tiles.
:param base_shape: shape of the output of the last layer in the
lower level.
:param num_levels: number of levels
:param level_depth: layers per level (per side)
:returns: shape of the input tiles
"""
tile_shape = np.array(base_shape)
tile_shape += 2 * level_depth
for _ in range(num_levels - 1):
tile_shape *= 2
tile_shape += 2 * level_depth
return list(tile_shape)
def compute_input_tile_shape(self):
return self.compute_input_tile_shape_(
self.base_shape, self.num_levels, self.level_depth)
@staticmethod
def compute_output_tile_shape_(base_shape, num_levels, level_depth):
tile_shape = np.array(base_shape)
for _ in range(num_levels - 1):
tile_shape *= 2
tile_shape -= 2 * level_depth
return list(tile_shape)
def compute_output_tile_shape(self):
return self.compute_output_tile_shape_(
self.base_shape, self.num_levels, self.level_depth)
@staticmethod
def compute_output_tile_shapes_(base_shape, num_levels, level_depth):
"""Compute the shape of the output tiles at every level, bottom to top."""
shapes = []
tile_shape = np.array(base_shape)
shapes.append(list(tile_shape))
for _ in range(num_levels - 1):
tile_shape *= 2
tile_shape -= 2 * level_depth
shapes.append(list(tile_shape))
return shapes
def compute_output_tile_shapes(self):
return self.compute_output_tile_shapes_(
self.base_shape, self.num_levels, self.level_depth)
@staticmethod
def compute_level_input_shapes_(base_shape, num_levels, level_depth):
"""Compute the shape of the output tiles at every level, bottom to top."""
shapes = []
tile_shape = np.array(base_shape)
shapes.append(list(tile_shape + 2 * level_depth))
for _ in range(num_levels - 1):
tile_shape *= 2
shapes.append(list(tile_shape))
tile_shape -= 2 * level_depth
return shapes
def _fix_level_index(self, level):
if level >= 0:
return level
l = self.num_levels + level
if l < 0 or l >= self.num_levels:
raise ValueError(f"bad level: {level}")
return l
def convert_point_between_levels(self, point, level, new_level):
"""Convert len-2 point in the tile-space at `level` to `new_level`.
Level 0 is the lowest level, by convention. -1 can mean the highest
(original resolution) level.
:param point:
:param level: level to which the point belongs (last layer in that level).
:param new_level: level of the space to which the point should be converted.
:returns:
:rtype:
"""
level = self._fix_level_index(level)
new_level = self._fix_level_index(new_level)
while level < new_level:
point *= 2
point += self.level_depth
level += 1
while level > new_level:
point /= 2
point -= self.level_depth
level -= 1
return point
def convert_distance_between_levels(self, distance, level, new_level):
level = self._fix_level_index(level)
new_level = self._fix_level_index(new_level)
return distance * 2**(new_level - level)
@staticmethod
def pose_loss(pose, pred):
return tf.losses.mean_squared_error(pose[:, :, :, 1:],
pred[:, :, :, 1:],
weights=pose[:, :, :, :1])
def compile(self):
optimizer = _get_optimizer(self.learning_rate)
self.model.compile(optimizer=optimizer, loss=[self.pose_loss]
+ ['mse'] * self.num_levels)
def forward(self, inputs):
level_outputs = []
outputs = []
for level, filters in enumerate(reversed(self.level_filters)):
for _ in range(self.level_depth):
inputs = conv(inputs, filters)
if level < self.num_levels - 1:
level_outputs.append(inputs)
inputs = keras.layers.MaxPool2D()(inputs)
else:
outputs.append(conv(inputs, 1, kernel_shape=[1, 1], activation=None,
norm=False, name='output_0'))
level_outputs = reversed(level_outputs)
for i, filters in enumerate(self.level_filters[1:]):
inputs = conv_upsample(inputs, filters)
cropped = crop(next(level_outputs), inputs.shape)
dropped = keras.layers.Dropout(rate=self.dropout)(cropped)
inputs = keras.layers.Concatenate()([dropped, inputs])
for _ in range(self.level_depth):
inputs = conv(inputs, filters)
outputs.append(conv(inputs, 1, kernel_shape=[1, 1], activation=None,
norm=False, name=f'output_{i+1}'))
pose_image = conv(
inputs,
1 + self.pose_dim,
kernel_shape=[1, 1],
activation=None,
padding='same',
norm=False,
name='pose')
return [pose_image] + outputs
def predict(self, art_data, multiscale=False):
"""Run prediction, reassembling tiles, with the Artifice data."""
if tf.executing_eagerly():
outputs = []
for i, batch in enumerate(art_data.prediction_input()):
if i % 100 == 0:
logger.info(f"batch {i} / {art_data.steps_per_epoch}")
outputs += _unbatch_outputs(self.model.predict_on_batch(batch))
while len(outputs) >= art_data.num_tiles:
prediction = art_data.analyze_outputs(outputs, multiscale=multiscale)
yield prediction
del outputs[:art_data.num_tiles]
else:
raise NotImplementedError(
"enable eager execution for eval (remove --patient)")
outputs = []
next_batch = (art_data
.prediction_input()
.make_one_shot_iterator()
.get_next())
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
for i in itertools.count():
try:
batch = sess.run(next_batch)
except tf.errors.OutOfRangeError:
return
if i % 100 == 0:
logger.info(f"batch {i} / {art_data.steps_per_epoch}")
outputs += _unbatch_outputs(self.model.predict_on_batch(batch))
while len(outputs) >= art_data.num_tiles:
prediction = art_data.analyze_outputs(
outputs, multiscale=multiscale)
yield prediction
del outputs[:art_data.num_tiles]
def predict_visualization(self, art_data):
"""Run prediction, reassembling tiles, with the Artifice data."""
if tf.executing_eagerly():
tiles = []
dist_tiles = []
outputs = []
p = art_data.image_padding()
for batch in art_data.prediction_input():
tiles += [tile[p[0][0]:, p[1][0]:] for tile in list(batch)]
new_outputs = _unbatch_outputs(self.model.predict_on_batch(batch))
outputs += new_outputs
dist_tiles += [output[-1] for output in new_outputs]
while len(outputs) >= art_data.num_tiles:
image = art_data.untile(tiles[:art_data.num_tiles])
dist_image = art_data.untile(dist_tiles[:art_data.num_tiles])
prediction = art_data.analyze_outputs(outputs)
yield (image, dist_image, prediction)
del outputs[:art_data.num_tiles]
del tiles[:art_data.num_tiles]
del dist_tiles[:art_data.num_tiles]
else:
raise NotImplementedError("patient prediction")
def predict_outputs(self, art_data):
"""Run prediction for single tiles images with the Artifice data."""
num_tiles = art_data.num_tiles
art_data.num_tiles = 1
if tf.executing_eagerly():
tiles = []
outputs = []
p = art_data.image_padding()
for batch in art_data.prediction_input():
tiles += [tile[p[0][0]:, p[1][0]:] for tile in list(batch)]
outputs += _unbatch_outputs(self.model.predict_on_batch(batch))
while outputs:
tile = art_data.untile(tiles[:1])
yield (tile, outputs[0]) # todo: del line
del outputs[0]
del tiles[0]
else:
raise NotImplementedError("patient prediction")
art_data.num_tiles = num_tiles
def evaluate(self, art_data, multiscale=False):
"""Runs evaluation for UNet."""
if tf.executing_eagerly():
tile_labels = []
errors = []
outputs = []
total_num_failed = 0
for i, (batch_tiles, batch_labels) in enumerate(
art_data.evaluation_input()):
if i % 10 == 0:
logger.info(f"evaluating batch {i} / {art_data.steps_per_epoch}")
tile_labels += list(batch_labels)
outputs += _unbatch_outputs(self.model.predict_on_batch(batch_tiles))
while len(outputs) >= art_data.num_tiles:
label = art_data.untile_points(tile_labels[:art_data.num_tiles])
prediction = art_data.analyze_outputs(outputs, multiscale=multiscale)
error, num_failed = dat.evaluate_prediction(label, prediction)
total_num_failed += num_failed
errors += error[error[:, 0] >= 0].tolist()
del tile_labels[:art_data.num_tiles]
del outputs[:art_data.num_tiles]
else:
raise NotImplementedError("evaluation on patient execution")
return np.array(errors), total_num_failed
def uncertainty_on_batch(self, images):
"""Estimate the model's uncertainty for each image."""
batch_outputs = _unbatch_outputs(self.model.predict_on_batch(images))
confidences = np.empty(len(batch_outputs), np.float32)
for i, outputs in enumerate(batch_outputs):
detections = dat.multiscale_detect_peaks(outputs[1:])
confidences[i] = np.mean([outputs[0][x, y] for x, y in detections])
return 1 - confidences
class SparseUNet(UNet):
def __init__(self, *, batch_size=None, block_size=[8, 8], tol=0.5, **kwargs):
"""Create a UNet-like architecture using multi-scale tracking.
:param batch_size: determines whether variables will be used in sparse
layers for the scatter operation.
:param block_size: width/height of the blocks used for sparsity, at the
scale of the original resolution (resized at each level. These are rescaled
at each level.
:param 8]:
:param tol: absolute threshold value for sbnet attention.
:returns:
:rtype:
"""
super().__init__(**kwargs)
self.batch_size = batch_size
self.block_size = utils.listify(block_size, 2)
self.tol = tol
def forward(self, inputs):
if self.batch_size is not None:
inputs.set_shape([self.batch_size] + list(inputs.shape)[1:])
level_outputs = []
outputs = []
for level, filters in enumerate(reversed(self.level_filters)):
for _ in range(self.level_depth):
inputs = conv(inputs, filters)
if level < self.num_levels - 1:
level_outputs.append(inputs)
inputs = keras.layers.MaxPool2D()(inputs)
else:
mask = conv(inputs, 1, kernel_shape=[1, 1], activation=None,
norm=False, name='output_0')
outputs.append(mask)
level_outputs = reversed(level_outputs)
for i, filters in enumerate(self.level_filters[1:]):
inputs = conv_upsample(inputs, filters, mask=mask, tol=self.tol,
block_size=self.block_size,
batch_size=self.batch_size)
mask = upsample(mask, size=2, interpolation='nearest')
cropped = crop(next(level_outputs), inputs.shape)
dropped = keras.layers.Dropout(rate=self.dropout)(cropped)
inputs = keras.layers.Concatenate()([dropped, inputs])
for _ in range(self.level_depth):
inputs = conv(inputs, filters, mask=mask, tol=self.tol,
block_size=self.block_size,
batch_size=self.batch_size)
mask = _crop_like_conv(mask)
mask = conv(
inputs,
1,
kernel_shape=[1, 1],
activation=None,
norm=False,
padding='same',
mask=mask,
tol=self.tol,
block_size=self.block_size,
batch_size=self.batch_size,
name=f'output_{i+1}')
outputs.append(mask)
pose_image = conv(
inputs,
1 + self.pose_dim,
kernel_shape=[1, 1],
activation=None,
padding='same',
norm=False,
mask=mask,
block_size=self.block_size,
tol=self.tol,
batch_size=self.batch_size,
name='pose')
outputs = [pose_image] + outputs
return outputs
class BetterSparseUNet(SparseUNet):
"""An extension of the SparseUNet that foregoes repeated gather and scatter
operations. Although it uses the same inputs and outputs as the other models,
it only uses the distance output of the first (lowest) and last level,
outputting empty tensors (or zeros) at the other levels, not used in the
loss.
Uses the block_size as the base_shape for computing what is essentially a
more dynamic UNet, with a second tiling phase. The block stride is computed
based on the output block size.
"""
def __init__(self, **kwargs):
super().__init__(**kwargs)
if self.batch_size is None:
# todo: make this not required?
raise ValueError(
f'SparseScatter layer requires the batch size.')
self.level_input_tile_sizes = self.compute_level_input_shapes_(
self.base_shape, self.num_levels, self.level_depth)
# block sizes
self.level_input_block_size = list(2 * np.array(self.block_size))
self.level_output_block_size = list(
np.array(self.level_input_block_size) - 2 * self.level_depth)
# block strides
assert self.level_depth % 2 == 0, 'must have even level_depth for strides'
self.block_stride = list(np.array(self.block_size) - self.level_depth // 2)
self.level_input_block_stride = list(2 * np.array(self.block_stride))
self.level_output_block_stride = self.level_output_block_size
@staticmethod
def compute_level_input_block_strides_(input_block_stride,
num_levels,
level_depth):
"""Compute the block stride at the input to every level."""
strides = []
stride = np.array(input_block_stride)
strides.append(list(stride))
for _ in range(num_levels - 1):
stride *= 2
strides.append(list(stride))
return strides
def forward(self, inputs):
level_outputs = []
outputs = []
for level, filters in enumerate(reversed(self.level_filters)):
for _ in range(self.level_depth):
inputs = conv(inputs, filters)
if level < self.num_levels - 1:
level_outputs.append(inputs)
inputs = keras.layers.MaxPool2D()(inputs)
else:
mask = conv(inputs, 1, kernel_shape=[1, 1], activation=None,
norm=False, name='output_0')
outputs.append(mask)
# sparsify based on the first mask
bin_counts, active_block_indices = lay.ReduceMask(
block_size=self.block_size,
block_stride=self.block_stride,
tol=self.tol)(mask)
level_outputs = reversed(level_outputs)
for i, filters in enumerate(self.level_filters[1:]):
level = i + 1
blocks = lay.SparseGather(
block_size=self.block_size,
block_stride=self.block_stride)(
[inputs, bin_counts, active_block_indices])
blocks = conv_upsample(blocks, filters)
level_output = next(level_outputs)
cropped = crop(level_output, size=self.level_input_tile_sizes[level])
dropped = keras.layers.Dropout(rate=self.dropout)(cropped)
blocked = lay.SparseGather(
block_size=self.level_input_block_size,
block_stride=self.level_input_block_stride)(
[dropped, bin_counts, active_block_indices])
blocks = keras.layers.concatenate([blocked, blocks])
for j in range(self.level_depth):
blocks = conv(blocks, filters)
if level == self.num_levels - 1:
# make the blocks for the pose_image
blocks = conv(
blocks,
1 + self.pose_dim,
kernel_shape=[1, 1],
activation=None,
padding='same',
norm=False)
name = 'pose'
else:
name = None
inputs = lay.SparseScatter(
[self.batch_size] + self.output_tile_shapes[level] + [blocks.shape[3]],
block_size=self.level_output_block_size,
block_stride=self.level_output_block_stride,
name=name)(
[blocks, bin_counts, active_block_indices])
# convolve the level's mask at full size, use it to gather the next level
mask_blocks = conv(
blocks,
1,
kernel_shape=[1, 1],
activation=None,
norm=False,
padding='same')
mask = lay.SparseScatter(
[self.batch_size] + self.output_tile_shapes[level] + [1],
block_size=self.level_output_block_size,
block_stride=self.level_output_block_stride,
name=f'output_{level}')(
[mask_blocks, bin_counts, active_block_indices])
outputs.append(mask)
outputs = [inputs] + outputs
return outputs
class AutoSparseUNet(BetterSparseUNet):
"""An extension of the BetterSparseUNet that doesn't use the distance proxy but
rather a learned mask for each level.
:param gamma: sparsity loss coefficient.
"""
def __init__(self, *args, gamma=0.01, **kwargs):
"""AutoSparseUNet.
"""
super().__init__(*args, **kwargs)
self.gamma = gamma
@staticmethod
def compute_level_input_block_strides_(input_block_stride,
num_levels,
level_depth):
"""Compute the block stride at the input to every level."""
strides = []
stride = np.array(input_block_stride)
strides.append(list(stride))
for _ in range(num_levels - 1):
stride *= 2
strides.append(list(stride))
return strides
@staticmethod
def sparsity_loss(_, mask):
"""Choice of sparsity measure: hoyer entropy."""
# Gausian entropy: -inf at 0
# return tf.reduce_sum(tf.log(tf.square(mask)))
# Hoyer:
# sqrt_N = tf.sqrt(tf.cast(tf.size(mask), tf.float32))
# sos = tf.reduce_sum(tf.square(mask))
# num = sqrt_N - sos / tf.sqrt(sos)
# den = sqrt_N - 1
# return num / den
# l_2 / l_1
return tf.norm(mask, ord=2) / tf.norm(mask, ord=1)
def compile(self):
optimizer = _get_optimizer(self.learning_rate)
self.model.compile(
optimizer=optimizer,
loss=[self.pose_loss] + [self.sparsity_loss] * self.num_levels,
loss_weights=[1.] + [1. / self.num_levels] * self.num_levels)
# todo: resolve similarities between this and other forward functions
def forward(self, inputs):
level_outputs = []
outputs = []
for level, filters in enumerate(reversed(self.level_filters)):
for _ in range(self.level_depth):
inputs = conv(inputs, filters)
if level < self.num_levels - 1:
level_outputs.append(inputs)
inputs = keras.layers.MaxPool2D()(inputs)
else:
mask = conv(inputs,
1,
kernel_shape=[1, 1],
activation='relu',
norm=False,
activation_name=f'output_0')
outputs.append(mask)
# sparsify based on the first mask
bin_counts, active_block_indices = lay.ReduceMask(
block_size=self.block_size,
block_stride=self.block_stride,
tol=self.tol)(mask)
level_outputs = reversed(level_outputs)
for i, filters in enumerate(self.level_filters[1:]):
level = i + 1
blocks = lay.SparseGather(
block_size=self.block_size,
block_stride=self.block_stride)(
[inputs, bin_counts, active_block_indices])
blocks = conv_upsample(blocks, filters)
level_output = next(level_outputs)
cropped = crop(level_output, size=self.level_input_tile_sizes[level])
dropped = keras.layers.Dropout(rate=self.dropout)(cropped)
blocked = lay.SparseGather(
block_size=self.level_input_block_size,
block_stride=self.level_input_block_stride)(
[dropped, bin_counts, active_block_indices])
blocks = keras.layers.concatenate([blocked, blocks])
for j in range(self.level_depth):
blocks = conv(blocks, filters)
if level == self.num_levels - 1:
# make the blocks for the pose_image
blocks = conv(
blocks,
1 + self.pose_dim,
kernel_shape=[1, 1],
activation=None,
padding='same',
norm=False)
name = 'pose'
else:
name = None
inputs = lay.SparseScatter(
[self.batch_size] + self.output_tile_shapes[level] + [blocks.shape[3]],
block_size=self.level_output_block_size,
block_stride=self.level_output_block_stride,
name=name)(
[blocks, bin_counts, active_block_indices])
# convolve the level's mask at full size, use it to gather the next level
mask_blocks = conv(
blocks,
1,
kernel_shape=[1, 1],
activation='relu',
norm=False,
padding='same')
mask = lay.SparseScatter(
[self.batch_size] + self.output_tile_shapes[level] + [1],
block_size=self.level_output_block_size,
block_stride=self.level_output_block_stride,
name=f'output_{level}')(
[mask_blocks, bin_counts, active_block_indices])
outputs.append(mask)
bin_couns, active_block_indices = lay.ReduceMask(
block_size=self.block_size,
block_stride=self.block_stride,
tol=self.tol)(mask)
outputs = [inputs] + outputs
return outputs
| {"/artifice/sharedobjects/__init__.py": ["/artifice/sharedobjects/shared.py"], "/artifice/tform.py": ["/artifice/__init__.py"], "/artifice/mod.py": ["/artifice/__init__.py"], "/artifice/lay.py": ["/artifice/__init__.py"], "/artifice/sparse/sparse.py": ["/artifice/__init__.py", "/artifice/sparse/__init__.py"], "/scripts/arcing_sphere.py": ["/artifice/utils.py"], "/scripts/two_spheres.py": ["/artifice/utils.py"], "/test_utils/experiment.py": ["/artifice/utils.py", "/artifice/__init__.py"], "/artifice/conversions.py": ["/artifice/__init__.py"], "/test_utils/springs.py": ["/artifice/__init__.py"], "/test_utils/annotate.py": ["/artifice/utils.py"], "/artifice/sparse/sparse_lib.py": ["/artifice/__init__.py"], "/artifice/windows/__init__.py": ["/artifice/windows/annotator_window.py"], "/artifice/dat.py": ["/artifice/__init__.py"], "/artifice/prio.py": ["/artifice/__init__.py"], "/artifice/vis.py": ["/artifice/__init__.py"], "/artifice/main.py": ["/artifice/__init__.py"], "/artifice/sparse/__init__.py": ["/artifice/sparse/sparse.py"], "/artifice/ann.py": ["/artifice/sharedobjects/__init__.py", "/artifice/__init__.py"]} |
59,625 | zhuokaizhao/artifice | refs/heads/master | /artifice/__init__.py | from .logger import logger, set_verbosity
__all__ = [logger, set_verbosity]
| {"/artifice/sharedobjects/__init__.py": ["/artifice/sharedobjects/shared.py"], "/artifice/tform.py": ["/artifice/__init__.py"], "/artifice/mod.py": ["/artifice/__init__.py"], "/artifice/lay.py": ["/artifice/__init__.py"], "/artifice/sparse/sparse.py": ["/artifice/__init__.py", "/artifice/sparse/__init__.py"], "/scripts/arcing_sphere.py": ["/artifice/utils.py"], "/scripts/two_spheres.py": ["/artifice/utils.py"], "/test_utils/experiment.py": ["/artifice/utils.py", "/artifice/__init__.py"], "/artifice/conversions.py": ["/artifice/__init__.py"], "/test_utils/springs.py": ["/artifice/__init__.py"], "/test_utils/annotate.py": ["/artifice/utils.py"], "/artifice/sparse/sparse_lib.py": ["/artifice/__init__.py"], "/artifice/windows/__init__.py": ["/artifice/windows/annotator_window.py"], "/artifice/dat.py": ["/artifice/__init__.py"], "/artifice/prio.py": ["/artifice/__init__.py"], "/artifice/vis.py": ["/artifice/__init__.py"], "/artifice/main.py": ["/artifice/__init__.py"], "/artifice/sparse/__init__.py": ["/artifice/sparse/sparse.py"], "/artifice/ann.py": ["/artifice/sharedobjects/__init__.py", "/artifice/__init__.py"]} |
59,626 | zhuokaizhao/artifice | refs/heads/master | /artifice/lay.py | import numpy as np
import tensorflow as tf
from tensorflow import keras
from tensorflow.python.ops import array_ops
from artifice.log import logger # noqa
from artifice import utils
from artifice import sparse
from artifice import conv_utils
from artifice import img, vis
NEG_INF = np.finfo(np.float32).min
class PeakDetection(keras.layers.Layer):
"""Finds local maxima in each channel of the image.
Does this pretty crudely by comparing each pixel with all of those <= 2 units
away. That is, (i,j) is a local max if inputs[i,j] is greater than the pixels
marked x shown below:
|---|---|---|---|---|---|---|
| | | | | | | |
|---|---|---|---|---|---|---|
| | | | x | | | | 0
|---|---|---|---|---|---|---|
| | | x | x | x | | | 1
|---|---|---|---|---|---|---|
| | x | x |i,j| x | x | | 2
|---|---|---|---|---|---|---|
| | | x | x | x | | | 3
|---|---|---|---|---|---|---|
| | | | x | | | | 4
|---|---|---|---|---|---|---|
| | | | | | | | 5
|---|---|---|---|---|---|---|
0 1 2 3 4 5
We consider pixels near the edge to be local maxima if they satisfy the
above, assuming marked positions outside the image domain are at -inf.
In cases of ties, both points are returned.
Notes:
* Loop indices work out like so:
0 => 2, 2+1
1 => 1, 3+1
2 => 0, 4+1
3 => 1, 3+1
4 => 2, 2+1
"""
def __init__(self, threshold_abs=None, **kwargs):
self.threshold_abs = threshold_abs
super().__init__(**kwargs)
def compute_output_shape(self, input_shape):
"""(batch_size, num_channels, num_peaks, 2)"""
return (None, len(input_shape))
def call(self, inputs):
"""FIXME! briefly describe function
Use tf.image.image_gradients to get dx, dy for each channel then scan
that image for low/zero spots in both directions.
:param inputs:
:returns:
:rtype:
"""
padded = tf.pad(inputs, [[0, 0], [2, 2], [2, 2], [0, 0]],
constant_values=NEG_INF)
mask = np.ones_like(inputs, dtype=tf.bool)
for di in range(5):
start = abs(di - 2)
stop = -abs(di - 2) + 4
for dj in range(start, stop + 1):
mask = tf.logical_and(
mask,
inputs >= padded[:, di: di + inputs.shape[1],
dj: dj + inputs.shape[2], :])
if self.threshold_abs is not None:
mask = tf.logical_and(mask, inputs > tf.constant(
self.threshold_abs, tf.float32))
return tf.where(mask)
class SparseConv2D(keras.layers.Layer):
"""2D convolution using the sbnet library.
The input to this layer should therefore be a list of tensors `[inputs,
mask]` where `mask` has shape `[N, W, H, 1]`.
In theory, additional performance gain can be achieved by making inputs a
tf.Variable. We have not tested this.
:param filters:
:param kernel_size:
:param batch_size: if provided, allows SparseConv2D to use sparse_scatter_var
(assuming eager execution is not enabled)
:param strides:
:param padding:
:param activation:
:param use_bias:
:param kernel_initializer:
:param bias_initializer:
:param kernel_regularizer:
:param bias_regularizer:
:param activity_regularizer:
:param kernel_constraint:
:param bias_constraint:
:param block_size:
:param tol:
:param avgpool:
:returns:
:rtype:
"""
def __init__(self,
filters,
kernel_size,
batch_size=None, # todo: replace with a use_var option
strides=[1, 1],
padding='valid',
activation=None,
use_bias=True,
kernel_initializer='glorot_uniform',
bias_initializer='zeros',
kernel_regularizer=None,
bias_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
block_size=[16, 16],
tol=0.5,
avgpool=False,
**kwargs):
super().__init__(**kwargs)
self.filters = filters
self.kernel_size = utils.listify(kernel_size, 2)
self.batch_size = batch_size
self.use_var = batch_size is not None and not tf.executing_eagerly()
self.strides = utils.listify(strides, 2)
self.padding = padding
self.activation = keras.layers.Activation(activation)
self.use_bias = use_bias
self.kernel_initializer = kernel_initializer
self.bias_initializer = bias_initializer
self.kernel_regularizer = kernel_regularizer
self.bias_regularizer = bias_regularizer
self.kernel_constraint = kernel_constraint
self.bias_constraint = bias_constraint
self.block_size = utils.listify(block_size, 2)
self.output_block_size = [conv_utils.conv_output_length(
self.block_size[i],
self.kernel_size[i],
'valid',
self.strides[i]) for i in [0, 1]]
self.block_offset = [0, 0]
self.output_block_offset = self.block_offset
self.block_stride = self.output_block_size
self.output_block_stride = self.output_block_size
self.tol = tol
self.avgpool = avgpool
if self.padding == 'valid':
pad_size = [0, 0]
else:
pad_h = self.kernel_size[0] // 2
pad_w = (self.kernel_size[1] - 1) // 2
pad_size = [pad_h, pad_w]
self.pad = keras.layers.ZeroPadding2D(pad_size)
def build(self, input_shape):
input_shape, mask_shape = input_shape
self.block_count = [utils.divup(input_shape[1], self.block_stride[0]),
utils.divup(input_shape[2], self.block_stride[1])]
if len(input_shape) != 4:
raise ValueError(f'Inputs should have rank 4. Received input shape: '
f'{input_shape}')
if input_shape[3] is None:
raise ValueError('The channel dimension of the inputs '
'should be defined. Found `None`.')
input_dim = int(input_shape[3])
kernel_shape = self.kernel_size + [input_dim, self.filters]
self.kernel = self.add_weight(
name='kernel',
shape=kernel_shape,
dtype=tf.float32,
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
constraint=self.kernel_constraint,
trainable=True)
if self.use_bias:
self.bias = self.add_weight(
name='bias',
shape=(self.filters,),
dtype=tf.float32,
initializer=self.bias_initializer,
regularizer=self.bias_regularizer,
constraint=self.bias_constraint,
trainable=True)
else:
self.bias = None
if self.use_var:
output_shape = list(self.compute_output_shape([input_shape, mask_shape]))
self.outputs = self.add_variable(
name='outputs',
shape=[self.batch_size] + output_shape[1:],
dtype=tf.float32,
initializer='zeros',
trainable=False,
use_resource=False)
def compute_output_shape(self, input_shape):
input_shape, mask_shape = input_shape
shape = conv_utils.conv_output_shape(
input_shape,
self.filters,
self.kernel_size,
self.padding,
self.strides)
return tf.TensorShape(shape)
def call(self, inputs):
inputs, mask = inputs
if self.use_var:
self.outputs.assign(tf.zeros_like(self.outputs))
outputs = self.outputs
else:
output_shape = list(
self.compute_output_shape([inputs.shape, mask.shape]))
batch_size = array_ops.shape(inputs)[0]
outputs = tf.zeros([batch_size] + output_shape[1:], tf.float32)
if self.padding == 'same':
inputs = self.pad(inputs)
mask = self.pad(mask)
indices = sparse.reduce_mask(
mask,
block_count=self.block_count,
bsize=self.block_size,
boffset=self.block_offset,
bstride=self.block_stride,
tol=self.tol,
avgpool=self.avgpool)
blocks = sparse.gather(
inputs,
indices.bin_counts,
indices.active_block_indices,
bsize=self.block_size,
boffset=self.block_offset,
bstride=self.block_stride)
strides = [1, self.strides[0], self.strides[1], 1]
blocks = tf.nn.conv2d(
blocks,
self.kernel,
strides=strides,
padding='VALID')
if self.use_bias:
blocks = tf.nn.bias_add(blocks, self.bias, data_format='NHWC')
if self.activation is not None:
blocks = self.activation(blocks)
outputs = sparse.scatter(
blocks,
indices.bin_counts,
indices.active_block_indices,
outputs,
bsize=self.output_block_size,
boffset=self.output_block_offset,
bstride=self.output_block_stride,
use_var=self.use_var)
if self.use_var:
outputs.set_shape([None] + outputs.shape.as_list()[1:])
return outputs
class SparseConv2DTranspose(keras.layers.Layer):
"""2D transpose convolution using the sbnet library.
:param filters:
:param kernel_size:
:param batch_size: needed to allocate space for outputs, if using a variable
:param strides:
:param padding:
:param data_format:
:param dilation_rate:
:param activation:
:param use_bias:
:param kernel_initializer:
:param bias_initializer:
:param kernel_regularizer:
:param bias_regularizer:
:param activity_regularizer:
:param kernel_constraint:
:param bias_constraint:
:param block_size:
:param tol:
:param avgpool:
:returns:
:rtype:
"""
def __init__(self,
filters,
kernel_size,
batch_size,
strides=[1, 1],
padding='valid',
activation=None,
use_bias=True,
kernel_initializer='glorot_uniform',
bias_initializer='zeros',
kernel_regularizer=None,
bias_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
block_size=[16, 16],
tol=0.5,
avgpool=False,
**kwargs):
super().__init__(**kwargs)
self.filters = filters
self.kernel_size = utils.listify(kernel_size, 2)
self.batch_size = batch_size
self.use_var = batch_size is not None and not tf.executing_eagerly()
self.strides = utils.listify(strides, 2)
self.padding = padding
self.activation = keras.layers.Activation(activation)
self.use_bias = use_bias
self.kernel_initializer = kernel_initializer
self.bias_initializer = bias_initializer
self.kernel_regularizer = kernel_regularizer
self.bias_regularizer = bias_regularizer
self.kernel_constraint = kernel_constraint
self.bias_constraint = bias_constraint
self.block_size = utils.listify(block_size, 2)
self.output_block_size = [conv_utils.deconv_output_length(
self.block_size[i],
self.kernel_size[i],
'valid',
stride=self.strides[i]) for i in [0, 1]]
self.block_offset = [0, 0]
self.output_block_offset = self.block_offset
self.block_stride = self.block_size
self.output_block_stride = self.output_block_size # might not be correct
self.tol = tol
self.avgpool = avgpool
def build(self, input_shape):
input_shape, mask_shape = input_shape
self.block_count = [utils.divup(input_shape[1], self.block_stride[0]),
utils.divup(input_shape[2], self.block_stride[1])]
if len(input_shape) != 4:
raise ValueError(f'Inputs should have rank 4. Received input shape: '
f'{input_shape}')
if input_shape[3] is None:
raise ValueError('The channel dimension of the inputs '
'should be defined. Found `None`.')
input_dim = int(input_shape[3])
kernel_shape = self.kernel_size + [self.filters, input_dim]
self.kernel = self.add_weight(
name='kernel',
shape=kernel_shape,
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
constraint=self.kernel_constraint,
trainable=True,
dtype=tf.float32)
if self.use_bias:
self.bias = self.add_weight(
name='bias',
shape=(self.filters,),
initializer=self.bias_initializer,
regularizer=self.bias_regularizer,
constraint=self.bias_constraint,
trainable=True,
dtype=tf.float32)
else:
self.bias = None
if self.use_var:
output_shape = self.compute_output_shape([input_shape, mask_shape])
self.outputs = self.add_variable(
name='outputs',
shape=[self.batch_size] + list(output_shape)[1:],
dtype=tf.float32,
initializer='zeros',
trainable=False,
use_resource=False)
def compute_output_shape(self, input_shape):
input_shape, mask_shape = input_shape
shape = conv_utils.deconv_output_shape(
input_shape,
self.filters,
self.kernel_size,
self.padding,
self.strides)
return tf.TensorShape(shape)
def call(self, inputs):
inputs, mask = inputs
if self.padding == 'valid':
raise NotImplementedError('valid padding for transpose convolution')
indices = sparse.reduce_mask(
mask,
block_count=self.block_count,
bsize=self.block_size,
boffset=self.block_offset,
bstride=self.block_stride,
tol=self.tol,
avgpool=self.avgpool)
blocks = sparse.gather(
inputs,
indices.bin_counts,
indices.active_block_indices,
bsize=self.block_size,
boffset=self.block_offset,
bstride=self.block_stride)
blocks_shape = array_ops.shape(blocks)
num_blocks = blocks_shape[0]
height, width = blocks_shape[1], blocks_shape[2]
kernel_h, kernel_w = self.kernel_size
stride_h, stride_w = self.strides
out_pad_h = out_pad_w = None # output padding not implemented
# Infer the dynamic output shape:
out_height = conv_utils.deconv_output_length(
height,
kernel_h,
padding='valid',
output_padding=out_pad_h,
stride=stride_h)
out_width = conv_utils.deconv_output_length(
width,
kernel_w,
padding='valid',
output_padding=out_pad_w,
stride=stride_w)
blocks_output_shape = (num_blocks, out_height, out_width, self.filters)
strides = [1, self.strides[0], self.strides[1], 1]
blocks = tf.nn.conv2d_transpose(
blocks,
self.kernel,
blocks_output_shape,
strides=strides,
padding='VALID',
data_format='NHWC')
if not tf.executing_eagerly():
# Infer the static output shape:
out_shape = self.compute_output_shape([blocks.shape, mask.shape])
blocks.set_shape(out_shape)
if self.use_bias:
blocks = tf.nn.bias_add(blocks, self.bias, data_format='NHWC')
if self.activation is not None:
blocks = self.activation(blocks)
if self.use_var:
self.outputs.assign(tf.zeros_like(self.outputs))
outputs = self.outputs
else:
output_shape = list(
self.compute_output_shape([inputs.shape, mask.shape]))
batch_size = array_ops.shape(inputs)[0]
outputs = tf.zeros([batch_size] + output_shape[1:],
tf.float32) # todo: might not work
outputs = sparse.scatter(
blocks,
indices.bin_counts,
indices.active_block_indices,
outputs,
bsize=self.output_block_size,
boffset=self.output_block_offset,
bstride=self.output_block_stride,
use_var=self.use_var)
if self.use_var:
outputs.set_shape([None] + outputs.shape.as_list()[1:])
return outputs
class ReduceMask(keras.layers.Layer):
"""Perform the sparse gather operation.
Outputs is a list containing [bin_counts, active_block_indices] rather than
the usual namedtuple.
:param block_size:
:param block_offset:
:param block_stride:
:param tol:
:param avgpool:
:returns:
:rtype:
"""
def __init__(self,
block_size=[16, 16],
block_offset=[0, 0],
block_stride=[16, 16],
tol=0.5,
avgpool=False,
**kwargs):
super().__init__(**kwargs)
self.block_size = utils.listify(block_size, 2)
self.block_offset = utils.listify(block_offset, 2)
self.block_stride = utils.listify(block_stride, 2)
self.tol = tol
self.avgpool = avgpool
def build(self, mask_shape):
self.block_count = [utils.divup(mask_shape[1], self.block_stride[0]),
utils.divup(mask_shape[2], self.block_stride[1])]
def compute_output_shape(self, _):
return [tf.TensorShape([]), tf.TensorShape([None, 3])]
def call(self, mask_):
indices = sparse.reduce_mask(
mask_,
block_count=self.block_count,
bsize=self.block_size,
boffset=self.block_offset,
bstride=self.block_stride,
tol=self.tol,
avgpool=self.avgpool)
return [indices.bin_counts, indices.active_block_indices]
class SparseGather(keras.layers.Layer):
"""Perform the sparse gather operation.
:param block_size:
:param block_offset:
:param block_stride:
:returns:
:rtype:
"""
def __init__(self,
block_size=[16, 16],
block_offset=[0, 0],
block_stride=[16, 16],
**kwargs):
super().__init__(**kwargs)
self.block_size = utils.listify(block_size, 2)
self.block_offset = utils.listify(block_offset, 2)
self.block_stride = utils.listify(block_stride, 2)
def compute_output_shape(self, input_shape):
input_shape, _, _ = input_shape
return tf.TensorShape(
[None, self.block_size[0], self.block_size[1], input_shape[3]])
def call(self, inputs):
inputs, bin_counts, active_block_indices = inputs
return sparse.gather(
inputs,
bin_counts,
active_block_indices,
bsize=self.block_size,
boffset=self.block_offset,
bstride=self.block_stride)
class SparseScatter(keras.layers.Layer):
"""Perform the sparse scatter operation.
:param block_size:
:param block_offset:
:param block_stride:
:returns:
:rtype:
"""
def __init__(self,
output_shape,
block_size=[16, 16],
block_offset=[0, 0],
block_stride=[16, 16],
use_var=False,
**kwargs):
super().__init__(**kwargs)
self.output_shape_ = list(output_shape)
self.block_size = utils.listify(block_size, 2)
self.block_offset = utils.listify(block_offset, 2)
self.block_stride = utils.listify(block_stride, 2)
self.use_var = use_var
def compute_output_shape(self, _):
return tf.TensorShape(self.output_shape_)
def call(self, inputs):
inputs, bin_counts, active_block_indices = inputs
outputs = tf.zeros(self.output_shape_, tf.float32)
return sparse.scatter(
inputs,
bin_counts,
active_block_indices,
outputs,
bsize=self.block_size,
boffset=self.block_offset,
bstride=self.block_stride,
use_var=self.use_var)
def main():
# tf.enable_eager_execution()
inputs = keras.layers.Input(shape=(100, 100, 1))
x = SparseConv2D(1, [3, 3], 4, padding='same')([inputs, inputs])
x = SparseConv2D(1, [1, 1], 4, padding='same')([x, x])
# x = SparseConv2DTranspose(1, [2, 2], strides=[2, 2], padding='same')([x, x]) # noqa
# x = keras.layers.MaxPool2D()(x)
model = keras.Model(inputs, x)
model.compile(optimizer=tf.train.AdadeltaOptimizer(0.1), loss='mse',
metrics=['mae'])
images = np.array([
img.open_as_float('../data/disks_100x100/images/1001.png'),
img.open_as_float('../data/disks_100x100/images/1002.png'),
img.open_as_float('../data/disks_100x100/images/1003.png'),
img.open_as_float('../data/disks_100x100/images/1004.png')])
images = images[:, :, :, np.newaxis]
dataset = tf.data.Dataset.from_tensor_slices((images, images))
dataset = dataset.batch(4).repeat(-1)
model.fit(dataset, epochs=5, steps_per_epoch=1000)
x = images
y = model.predict(images)
vis.plot_image(*x, *y, columns=4, vmin=0., vmax=1.)
vis.show('../figs/sparse_conv2d_example.pdf')
if __name__ == '__main__':
main()
| {"/artifice/sharedobjects/__init__.py": ["/artifice/sharedobjects/shared.py"], "/artifice/tform.py": ["/artifice/__init__.py"], "/artifice/mod.py": ["/artifice/__init__.py"], "/artifice/lay.py": ["/artifice/__init__.py"], "/artifice/sparse/sparse.py": ["/artifice/__init__.py", "/artifice/sparse/__init__.py"], "/scripts/arcing_sphere.py": ["/artifice/utils.py"], "/scripts/two_spheres.py": ["/artifice/utils.py"], "/test_utils/experiment.py": ["/artifice/utils.py", "/artifice/__init__.py"], "/artifice/conversions.py": ["/artifice/__init__.py"], "/test_utils/springs.py": ["/artifice/__init__.py"], "/test_utils/annotate.py": ["/artifice/utils.py"], "/artifice/sparse/sparse_lib.py": ["/artifice/__init__.py"], "/artifice/windows/__init__.py": ["/artifice/windows/annotator_window.py"], "/artifice/dat.py": ["/artifice/__init__.py"], "/artifice/prio.py": ["/artifice/__init__.py"], "/artifice/vis.py": ["/artifice/__init__.py"], "/artifice/main.py": ["/artifice/__init__.py"], "/artifice/sparse/__init__.py": ["/artifice/sparse/sparse.py"], "/artifice/ann.py": ["/artifice/sharedobjects/__init__.py", "/artifice/__init__.py"]} |
59,627 | zhuokaizhao/artifice | refs/heads/master | /artifice/sparse/sparse.py | """The implementations in this file rely on SBNet, a project from
uber-research. The original repository containing this code can be found
[here](https://github.com/uber-research/sbnet), and the paper describing it
[here](https://arxiv.org/abs/1801.02108). Documentation for my wrapper
functions comes mostly from the git repo README, albeit in a more pythonic
format.
"""
import numpy as np
import tensorflow as tf
from artifice.log import logger # noqa
from artifice import utils
# whether to use custom sparse ops from sbnet
# todo: check if compiled sbnet available
_use_sbnet = tf.test.is_gpu_available() and tf.test.is_built_with_cuda()
if _use_sbnet:
from artifice import sbnet
else:
from . import sparse_lib
def reduce_mask(mask, *,
block_count,
bsize,
boffset,
bstride,
tol=0.5,
avgpool=False):
"""Reduce `mask` to indices for sparse_gather or sparse_scatter.
Thin wrapper around sbnet.reduce_mask, which offers the GPU
implementation. If no GPU is available, implements the operation in
tensorflow primitives.
Blocks, in the sbnet framework, refer to patches of the image which are
either collected for convolution or ignored (and thus implicitly zeroed). In
numpy terms each block is defined as a slice from the input mask of
dimensions `[N,H,W,1]`, with following dimensions: `[ni, BOFFSH+BSTRH*hi :
BOFFSH+BSTRH*hi+BSZH, BOFFSW+BSTRW*wi : BOFFSW+BSTRW*wi+BSZW, :]`
See https://arxiv.org/abs/1801.02108 for more info.
:param mask: `[N, H, W, 1]` shape tensor containing mask values.
:param block_count: `[BCH, BCW]` block counts in
height and width dimensions (axes 0 and 1 respectively).
:param bsize: `[BSZH, BSZW]` block size
:param boffset: `[BOFFSH, BOFFSW]` block offset
:param bstride: `[BSTRH, BSTRW]` block stride
:param tol: pooling threshold to consider a block as active
:param avgpool: if True, use average pooling. If False (default), use max
pooling.
:returns: `namedtuple` which contains tensors `bin_counts` and
`active_block_indices`, for passing to `sparse_gather` and `sparse_scatter`.
"""
if _use_sbnet:
indices = sbnet.reduce_mask(
mask,
block_count,
dynamic_bsize=bsize,
dynamic_boffset=boffset,
dynamic_bstride=bstride,
tol=tol,
avgpool=avgpool)
else:
indices = sparse_lib.reduce_mask(
mask,
block_count,
bsize=bsize,
boffset=boffset,
bstride=bstride,
tol=tol,
avgpool=avgpool)
return indices
def gather(inputs, bin_counts, active_block_indices, *, bsize, boffset,
bstride, transpose=False):
"""Gather the active blocks from `inputs` into a `block_stack`.
Gathers the blocks from `inputs` as in the following pseudocode:
```
for (ni, hi, wi) in indices.active_block_indices:
channel_slice = x[ni, BOFFSH+BSTRH*hi : BOFFSH+BSTRH*hi+BSZH,
BOFFSW+BSTRW*wi : BOFFSW+BSTRW*wi+BSZW, :]
block_stack[ni, :, :, :] = channel_slice
```
:param inputs: `[N, H, W, C]` shaped input tensor
:param bin_counts: Number of indices of active blocks.
:param active_block_indices: `[nBlocks, 3]` set of active block indices.
:param bsize: `[BSZH, BSZW]` block size
:param boffset: `[BOFFSH, BOFFSW]` block offset
:param bstride: `[BSTRH, BSTRW]` block stride
:param transpose: If `transpose` is true, a fused transpose operation will
also be performed and the resulting tensor will have dimensions
`[nBlocks, C, BSZH, BSZW]`.
:returns: `[nBlocks, BSZH, BSZW, C]` tensor stack of blocks
"""
if _use_sbnet:
outputs = sbnet.sparse_gather(
inputs,
bin_counts,
active_block_indices,
dynamic_bsize=bsize,
dynamic_boffset=boffset,
dynamic_bstride=bstride,
transpose=transpose)
else:
outputs = sparse_lib.gather(
inputs,
bin_counts,
active_block_indices,
bsize=bsize,
boffset=boffset,
bstride=bstride)
if not tf.executing_eagerly():
shape = tf.TensorShape([None, bsize[0], bsize[1], inputs.shape[3]])
outputs.set_shape(shape)
return outputs
def scatter(block_stack,
bin_counts,
active_block_indices,
outputs, *,
bsize,
boffset,
bstride,
add=False,
atomic=False,
transpose=False,
use_var=False):
"""Scatter blocks in `block_stack` back onto `outputs`.
Note that due to a limitation of TensorFlow API an intermediate tensor cannot
be modified in place unless it's specified to be a tf.Variable. This
necessitates creating an intermediate tensor inside the op and performing a
copy which has negative implications for performance. So the creators of
SBNet made a second version of the op sbnet_module.sparse_scatter_var that
expects `outputs` to be a tf.Variable and modifies it in place. We
automatically detect whether `outputs` is a Tensor or a Variable and using
the proper fucntion. Using a Variable is strongly recommended for maximum
performance.
the effect of this operation is opposite to sparse_gather - the input blocks
will be written on top of base tensor x, or added to it's contents if do_add
is True. The following pseudo-code snippet illustrates the semantics of
sparse_scatter:
```
for (ni, hi, wi) in indices.active_block_indices:
if add:
x[ni, BOFFSH+BSTRH*hi : BOFFSH+BSTRH*hi+BSZH,
BOFFSW+BSTRW*wi : BOFFSW+BSTRW*wi+BSZW, :] += blockStack[ni, :, :, :]
else:
x[ni, BOFFSH+BSTRH*hi : BOFFSH+BSTRH*hi+BSZH,
BOFFSW+BSTRW*wi : BOFFSW+BSTRW*wi+BSZW, :] = blockStack[ni, :, :, :]
```
If `transpose` is true, a fused transpose operation will also be performed by
sparse_scatter, permuting the input `[N,C,H,W]` dimensions to `[N,H,W,C]` in
the output.
:param block_stack: `[nBlocks, BSZH, BSZW, C]` tensor stack of blocks
:param bin_counts: Number of indices of active blocks.
:param active_block_indices: `[nBlocks, 3]` set of active block indices.
:param outputs: base tensor to copy to output and overwrite on top of
:param bsize: `[BSZH, BSZW]` block size
:param boffset: `[BOFFSH, BOFFSW]` block offset
:param bstride: `[BSTRH, BSTRW]` block stride
:param add: perform an add operation rather than replacement
:param atomic: use atomic or regular adds
:param transpose: whether to transpose.
:returns:
:rtype:
"""
shape = outputs.shape
if _use_sbnet:
if use_var:
assert not tf.executing_eagerly(), 'use_var forbids eager execution'
outputs = sbnet.sparse_scatter_var(
block_stack,
bin_counts,
active_block_indices,
outputs,
dynamic_bsize=bsize,
dynamic_boffset=boffset,
dynamic_bstride=bstride,
add=add,
atomic=atomic,
transpose=transpose)
else:
outputs = sbnet.sparse_scatter(
block_stack,
bin_counts,
active_block_indices,
outputs,
dynamic_bsize=bsize,
dynamic_boffset=boffset,
dynamic_bstride=bstride,
add=add,
atomic=atomic,
transpose=transpose)
else:
if use_var:
assert not tf.executing_eagerly(), 'use_var forbids eager execution'
outputs = sparse_lib.scatter_var(
block_stack,
bin_counts,
active_block_indices,
outputs,
bsize=bsize,
boffset=boffset,
bstride=bstride,
add=add)
else:
outputs = sparse_lib.scatter(
block_stack,
bin_counts,
active_block_indices,
outputs,
bsize=bsize,
boffset=boffset,
bstride=bstride,
add=add)
if not tf.executing_eagerly():
outputs.set_shape(shape)
return outputs
def main():
"""For testing/understanding sbnet."""
# tf.enable_eager_execution()
# Specify input tensor dimensions and block-sparsity parameters
batch = 4
hw = 256
channels = 64
blockSize = [16, 16]
blockStride = [14, 14]
blockOffset = [0, 0]
blockCount = [utils.divup(hw, blockStride[0]),
utils.divup(hw, blockStride[1])]
# build kwargs to simplify op calls
inBlockParams = {"dynamic_bsize": blockSize,
"dynamic_boffset": blockOffset,
"dynamic_bstride": blockStride}
outBlockParams = {"dynamic_bsize": [blockSize[0] - 2, blockSize[1] - 2],
"dynamic_boffset": blockOffset,
"dynamic_bstride": blockStride}
# create a random mask representing attention/a priori sparsity
# threshold the mask to a specified percentile sparsity
mask = np.random.randn(batch, blockCount[0], blockCount[1],
channels).astype(np.float32)
threshold = np.percentile(mask, 90)
sparseMask = np.greater(mask, threshold).astype(np.float32)
# upsample the mask to full resolution
upsampledMask = sparseMask.repeat(blockStride[0], # noqa
axis=1).repeat(blockStride[1], axis=2)
# create a random input tensor
x = tf.constant(np.random.randn(batch, hw, hw, channels).astype(np.float32))
# create a random weight tensor
w = tf.constant(np.random.randn(3, 3, channels, channels).astype(np.float32))
# reduce the mask to indices by using a fused pooling+indexing operation
indices = sbnet.reduce_mask(mask, blockCount, tol=0.5, **inBlockParams)
print("using gpu:", tf.test.is_gpu_available()
and tf.test.is_built_with_cuda())
print("bin_counts:", indices.bin_counts)
print("bin_counts:", indices.bin_counts.shape)
print("active_block_indices:", indices.active_block_indices)
print("active_block_indices:", indices.active_block_indices.shape)
# stack active overlapping tiles to batch dimension
blockStack = sbnet.sparse_gather(x, indices.bin_counts,
indices.active_block_indices,
transpose=True, **inBlockParams)
print("block_stack:", blockStack.shape)
# perform dense convolution on a sparse stack of tiles
convBlocks = tf.nn.conv2d(blockStack, w, strides=[1, 1, 1, 1],
padding='VALID', data_format='NCHW')
# convBlocks = keras.layers.Conv2D(channels, (3, 3), padding='valid',
# data_format='channels_first')(blockStack)
# write/scatter the tiles back on top of original tensor. Note that the
# output tensor is reduced by 1 on each side due to 'VALID' convolution
validX = x[:, 1:hw - 1, 1:hw - 1, :]
y = sbnet.sparse_scatter(
convBlocks, indices.bin_counts, indices.active_block_indices,
validX, transpose=True, add=False, atomic=False, **outBlockParams)
if not tf.executing_eagerly():
sess = tf.Session()
y_output, = sess.run([y])
if __name__ == '__main__':
main()
| {"/artifice/sharedobjects/__init__.py": ["/artifice/sharedobjects/shared.py"], "/artifice/tform.py": ["/artifice/__init__.py"], "/artifice/mod.py": ["/artifice/__init__.py"], "/artifice/lay.py": ["/artifice/__init__.py"], "/artifice/sparse/sparse.py": ["/artifice/__init__.py", "/artifice/sparse/__init__.py"], "/scripts/arcing_sphere.py": ["/artifice/utils.py"], "/scripts/two_spheres.py": ["/artifice/utils.py"], "/test_utils/experiment.py": ["/artifice/utils.py", "/artifice/__init__.py"], "/artifice/conversions.py": ["/artifice/__init__.py"], "/test_utils/springs.py": ["/artifice/__init__.py"], "/test_utils/annotate.py": ["/artifice/utils.py"], "/artifice/sparse/sparse_lib.py": ["/artifice/__init__.py"], "/artifice/windows/__init__.py": ["/artifice/windows/annotator_window.py"], "/artifice/dat.py": ["/artifice/__init__.py"], "/artifice/prio.py": ["/artifice/__init__.py"], "/artifice/vis.py": ["/artifice/__init__.py"], "/artifice/main.py": ["/artifice/__init__.py"], "/artifice/sparse/__init__.py": ["/artifice/sparse/sparse.py"], "/artifice/ann.py": ["/artifice/sharedobjects/__init__.py", "/artifice/__init__.py"]} |
59,628 | zhuokaizhao/artifice | refs/heads/master | /data/disks/view.py | from sys import argv
from PIL import Image
import matplotlib.pyplot as plt
import numpy as np
im = Image.open(argv[1])
image = np.array(im).reshape(im.size[0], im.size[1])
plt.imshow(image, cmap='gray')
plt.show()
| {"/artifice/sharedobjects/__init__.py": ["/artifice/sharedobjects/shared.py"], "/artifice/tform.py": ["/artifice/__init__.py"], "/artifice/mod.py": ["/artifice/__init__.py"], "/artifice/lay.py": ["/artifice/__init__.py"], "/artifice/sparse/sparse.py": ["/artifice/__init__.py", "/artifice/sparse/__init__.py"], "/scripts/arcing_sphere.py": ["/artifice/utils.py"], "/scripts/two_spheres.py": ["/artifice/utils.py"], "/test_utils/experiment.py": ["/artifice/utils.py", "/artifice/__init__.py"], "/artifice/conversions.py": ["/artifice/__init__.py"], "/test_utils/springs.py": ["/artifice/__init__.py"], "/test_utils/annotate.py": ["/artifice/utils.py"], "/artifice/sparse/sparse_lib.py": ["/artifice/__init__.py"], "/artifice/windows/__init__.py": ["/artifice/windows/annotator_window.py"], "/artifice/dat.py": ["/artifice/__init__.py"], "/artifice/prio.py": ["/artifice/__init__.py"], "/artifice/vis.py": ["/artifice/__init__.py"], "/artifice/main.py": ["/artifice/__init__.py"], "/artifice/sparse/__init__.py": ["/artifice/sparse/sparse.py"], "/artifice/ann.py": ["/artifice/sharedobjects/__init__.py", "/artifice/__init__.py"]} |
59,629 | zhuokaizhao/artifice | refs/heads/master | /scripts/coupled_spheres.py | """Create a dataset of two spheres coupled by an invisible spring, floating
without gravity. Outputs a tfrecord in data/coupled_spheres. (Should be run from
$ARTIFICE)
# TODO: add realistic physics simulation as in
https://gist.github.com/Zulko/f828b38421dfbee59daf, using package 'ode'.
"""
import vapory
import numpy as np
import matplotlib.pyplot as plt
from test_utils import experiment
import logging
logger = logging.getLogger('experiment')
# Main parameters
debug = False
seconds = 100 # 3000 frames, at 30fps. Take 2000 for training?
tether = True # Tether the (large) ball to center.
# dataset parameters
root = "/project2/glk/killeen/probal/data/harper_spheres/" # root dir for fname
fps = 30 # frame rate of the video
frame_step = 1/float(fps) # time per frame (DERIVED)
steps_per_frame = 1 # number of simulated time steps per frame
time_step = steps_per_frame * frame_step # delta t for simulation
N = int(fps * seconds) # number of frames (DERIVED)
output_formats = {'png', 'mp4'} # output formats
image_shape = (256, 256) # image shape
num_classes = 3 # including background
# Configure initial parameters. 1 povray unit = 1 cm
# ball 1 in povray unites
r1 = 5 # radius (cm)
m1 = 1 # mass (kg)
x1 = 50 # initial x position (cm)
y1 = 0 # initial y position
vx1 = -20 # initial x velocity (cm/s)
vy1 = 40 # initial y velocity
# ball 2
r2 = 5
m2 = 27
x2 = 0
y2 = 0
vx2 = 0
vy2 = 0
# Spring parameters
k = 5. # Hooke's constant (N / m)
relaxed_length = image_shape[0] / 2. # For Hooke's law (cm)
minimum_length = r1 + r2 # Nonlinear boundary of spring (cm)
# attractor/tether parameters
attractor_center = np.zeros(2, np.float64)
# Add walls at the boundary of the image plane
do_walls = True # TODO: fix this behavior
#################### CONFIGURABLE OPTIONS ABOVE ####################
# spring:
l_relaxed = relaxed_length / 100.
l_min = minimum_length / 100.
def spring(l):
"""
:param l: distance between masses, in meters
Return the force in Newtons exerted by the spring as a function of its length
`l`. Negative force is attractive, positive repulsive. In center-of-mass polar
coordinates, this should be (will be) a radial force.
In the small-displacement approximation, this should be a linear relation
according to Hooke's law. This function allows us to encode non-linearities in
the forces, in case I want to expand the simulation to do that.
"""
# Prevent occlusion, possibly.
if l > l_min:
lower_boundary = 0.1 / np.square(l - l_min) # coefficient may require tuning.
else:
lower_boundary = 10000 # Shouldn't happen, if above tuned correctly
return -k * (l - l_relaxed) + lower_boundary
# attractor:
attractor_relaxed = 0
attractor_k = 50.
def attractor(l):
"""Return a spring-like force as a function of mag_l
:param l: distance from object to attractor, in meters
:returns: attractive force in Newtons
"""
return -attractor_k * (l - attractor_relaxed)
def calculate_acceleration(x1, x2):
"""Calculate the accelerations of the system from equations of motion, given
position vectors x1 and x2 for the two spheres.
"""
l = x1 - x2
mag_l = np.linalg.norm(l)
mag_F = spring(mag_l)
l_hat = l / mag_l
a1 = mag_F * l_hat / m1
a2 = -mag_F * l_hat / m2
if tether:
l = x2 - attractor_center
mag_l = np.linalg.norm(l)
if mag_l > 0:
mag_F = attractor(mag_l)
l_hat = l / mag_l
a2 += mag_F * l_hat / m2
return a1, a2
def impose_walls():
"""Impose the walls at the boundary of the image_plane on the CURRENT state of
the system.
`walls` consists of top, left, bottom, right bounds.
"""
if not do_walls:
return
global current, walls
for objNum in ['1', '2']:
xk = 'x' + objNum
vk = 'v' + objNum
if current[xk][0] < walls[1]:
logger.debug(f"bouncing off left wall at {walls[1]}")
current[xk][0] = 2*walls[1] - current[xk][0]
current[vk][0] *= -1
if current[xk][0] > walls[3]:
logger.debug(f"bouncing off right wall at {walls[3]}")
current[xk][0] = 2*walls[3] - current[xk][0]
current[vk][0] *= -1
if current[xk][1] > walls[0]:
logger.debug(f"bouncing off top wall at {walls[0]}")
current[xk][1] = 2*walls[0] - current[xk][1]
current[vk][1] *= -1
if current[xk][1] < walls[2]:
logger.debug(f"bouncing off bottom wall at {walls[2]}")
current[xk][1] = 2*walls[2] - current[xk][1]
current[vk][1] *= -1
def step(n=1):
"""Update the polar and CM system over n time steps of length dt, using the
velocity Verlet algorithm, as on
https://en.wikipedia.org/wiki/Verlet_integration
"""
global initial, current
dt = time_step
# Just do cartesian coordinates. Cartesian coordinates are just easier, in
# case I have multiple things flying around.
while (n > 0):
for k in current.keys():
initial[k] = current[k]
# 1. Calculate half-step velocity
half_step_v1 = initial['v1'] + 0.5*initial['a1'] * dt
half_step_v2 = initial['v2'] + 0.5*initial['a2'] * dt
# 2. Calculate current position
current['x1'] = initial['x1'] + half_step_v1 * dt
current['x2'] = initial['x2'] + half_step_v2 * dt
# 3. Calculate current acceleration
current['a1'], current['a2'] = calculate_acceleration(current['x1'],
current['x2'])
# 4. Calculate current velocity
current['v1'] = half_step_v1 + 0.5*current['a1'] * dt
current['v2'] = half_step_v2 + 0.5*current['a2'] * dt
# Correct for bouncing off of walls
impose_walls()
logger.debug("position:{},{}".format(current['x1'], current['x2']))
n -= 1
global step_cnt
step_cnt = 0
def update_to_step(t):
"""Update to physical time step t (proportional to frame number fn)"""
global step_cnt
if t > step_cnt:
if debug: print("updating to step", t)
step(n = t - step_cnt)
step_cnt = t
# experiment spheres: whichever one is called first updates the global
# state. Then each of them translates the global state back into cartesian
# coordinates. Takes the frame number as argument.
def argsf1(fn):
t = steps_per_frame * fn
update_to_step(t)
x, y = 100 * current['x1']
return [x,y,0], r1
def argsf2(fn):
t = steps_per_frame * fn # TODO: fix
update_to_step(t)
x, y = 100 * current['x2']
return [x,y,0], r2
def main():
# helpers
color = lambda col : vapory.Texture(vapory.Pigment('color', col))
# initial state, in SI units
global initial, current
initial = {}
initial['x1'] = np.array([x1, y1]) / 100.
initial['v1'] = np.array([vx1, vy1]) / 100.
initial['x2'] = np.array([x2, y2]) / 100.
initial['v2'] = np.array([vx2, vy2]) / 100.
# Calculate initial acceleration with equations of motion
initial['a1'], initial['a2'] = calculate_acceleration(
initial['x1'], initial['x2'])
current = initial.copy()
# Begin setup
s1 = experiment.ExperimentSphere(argsf1, vapory.Texture('White_Wood'),
semantic_label=1)
s2 = experiment.ExperimentSphere(argsf2, vapory.Texture('White_Wood'),
semantic_label=2)
# experiment
exp = experiment.Experiment(image_shape=image_shape,
num_classes=num_classes,
N=N, data_root=root,
output_format=output_formats,
fps=fps, mode='L')
exp.add_object(vapory.LightSource([0, 5*image_shape[0], 0],
'color', [1,1,1]))
exp.add_object(vapory.LightSource([5*image_shape[0], 0, -2*image_shape[0]],
'color', [1,1,1]))
# Background
# TODO: make this an actually interesting experiment with a background image.
exp.add_object(vapory.Plane(
[0,0,1], 10*max(r1, r2), vapory.Texture(
vapory.Pigment(vapory.ImageMap('png', '"scripts/images/harper.png"')),
'scale', '300', 'translate', [image_shape[0] // 2, 2*image_shape[1] // 3, 0])))
if do_walls:
global walls
border = min(r1, r2)
walls = np.zeros(4) # top, left, bottom, right
walls[:2] = exp.unproject_to_image_plane((border, border))[:2]
walls[2:] = exp.unproject_to_image_plane(
(image_shape[0] - border, image_shape[1] - border))[:2]
walls /= 100. # convert to meters
exp.add_object(s1)
exp.add_object(s2)
if debug:
(image, _) , _ = exp.render_scene(0)
plt.imshow(np.squeeze(image), cmap='gray')
plt.show()
else:
exp.run()
if __name__ == "__main__":
main()
| {"/artifice/sharedobjects/__init__.py": ["/artifice/sharedobjects/shared.py"], "/artifice/tform.py": ["/artifice/__init__.py"], "/artifice/mod.py": ["/artifice/__init__.py"], "/artifice/lay.py": ["/artifice/__init__.py"], "/artifice/sparse/sparse.py": ["/artifice/__init__.py", "/artifice/sparse/__init__.py"], "/scripts/arcing_sphere.py": ["/artifice/utils.py"], "/scripts/two_spheres.py": ["/artifice/utils.py"], "/test_utils/experiment.py": ["/artifice/utils.py", "/artifice/__init__.py"], "/artifice/conversions.py": ["/artifice/__init__.py"], "/test_utils/springs.py": ["/artifice/__init__.py"], "/test_utils/annotate.py": ["/artifice/utils.py"], "/artifice/sparse/sparse_lib.py": ["/artifice/__init__.py"], "/artifice/windows/__init__.py": ["/artifice/windows/annotator_window.py"], "/artifice/dat.py": ["/artifice/__init__.py"], "/artifice/prio.py": ["/artifice/__init__.py"], "/artifice/vis.py": ["/artifice/__init__.py"], "/artifice/main.py": ["/artifice/__init__.py"], "/artifice/sparse/__init__.py": ["/artifice/sparse/sparse.py"], "/artifice/ann.py": ["/artifice/sharedobjects/__init__.py", "/artifice/__init__.py"]} |
59,630 | zhuokaizhao/artifice | refs/heads/master | /scripts/parse_log.py | import re
import numpy as np
import matplotlib.pyplot as plt
logfiles = [(f"/Users/Benjamin/projects/artifice/batch/train/"
f"augmented-active_harper_spheres_tethered_subset100/train.err", 10),
(f"/Users/Benjamin/projects/artifice/batch/train/"
f"augmented-active_harper_spheres_tethered_subset10/train.err", 1)]
for filename, query_size in logfiles:
with open(filename, 'r') as f:
log = f.read()
queries = re.findall(r'uncertainties (?P<q>.*)\n', log)
uncertainties = -np.array([[t[1] for t in eval(q)] for q in queries])
plt.plot(np.arange(1,10), uncertainties.mean(axis=1),
label=f"Query size {query_size}")
plt.title("Actively Selected Queries")
plt.ylabel("Mean Peak Value for Query")
plt.xlabel("Epoch")
plt.legend()
plt.savefig("/Users/Benjamin/projects/artifice/docs/peak_values.pdf",
transparent=True, pad_inches=0)
| {"/artifice/sharedobjects/__init__.py": ["/artifice/sharedobjects/shared.py"], "/artifice/tform.py": ["/artifice/__init__.py"], "/artifice/mod.py": ["/artifice/__init__.py"], "/artifice/lay.py": ["/artifice/__init__.py"], "/artifice/sparse/sparse.py": ["/artifice/__init__.py", "/artifice/sparse/__init__.py"], "/scripts/arcing_sphere.py": ["/artifice/utils.py"], "/scripts/two_spheres.py": ["/artifice/utils.py"], "/test_utils/experiment.py": ["/artifice/utils.py", "/artifice/__init__.py"], "/artifice/conversions.py": ["/artifice/__init__.py"], "/test_utils/springs.py": ["/artifice/__init__.py"], "/test_utils/annotate.py": ["/artifice/utils.py"], "/artifice/sparse/sparse_lib.py": ["/artifice/__init__.py"], "/artifice/windows/__init__.py": ["/artifice/windows/annotator_window.py"], "/artifice/dat.py": ["/artifice/__init__.py"], "/artifice/prio.py": ["/artifice/__init__.py"], "/artifice/vis.py": ["/artifice/__init__.py"], "/artifice/main.py": ["/artifice/__init__.py"], "/artifice/sparse/__init__.py": ["/artifice/sparse/sparse.py"], "/artifice/ann.py": ["/artifice/sharedobjects/__init__.py", "/artifice/__init__.py"]} |
59,631 | zhuokaizhao/artifice | refs/heads/master | /scripts/arcing_sphere.py | """Create a (test) dataset of a single sphere flying in a parabolic arc.
Outputs a tfrecord in data/arcing_sphere. (Should be run from $ARTIFICE)
"""
import vapory
import numpy as np
import matplotlib.pyplot as plt
from test_utils import experiment
from artifice.utils import dataset
debug = False
# helpers
color = lambda col : vapory.Texture(vapory.Pigment('color', col))
# dataset parameters
root = "data/arcing_sphere/" # root dir for fname
fps = 30 # frame rate of the video
time_step = 1/float(fps) # time per frame
seconds = 5 # number of seconds in the video
N = int(seconds / time_step) # number of frames
output_formats = {'mp4'} # write to a video
fname = root + 'arcing_sphere' # extensions from output_formats
image_shape = (512, 512) # image shape
num_classes = 2 # including background
# physical sphere parameters. 1 povray unit = 1 cm
radius = 50 # radius
mass = 2 # mass in kilograms
x = -200 # initial x position in world
y = -200 # initial y position in world
vx = 200 # initial x velocity
vy = 500 # initial y velocity
g = -981 # gravity acceleration
# experiment sphere parameters
def argsf(t_):
t = t_ * time_step
return ([vx*t + x, 0.5*g*t**2 + vy*t + y, 0], radius)
ball = experiment.ExperimentSphere(argsf, color('Red'))
# experiment
exp = experiment.Experiment(image_shape=image_shape,
num_classes=num_classes,
N=N, fname=fname,
output_format=output_formats,
fps=fps, mode='L')
exp.add_object(vapory.LightSource([0, 5*image_shape[0], -5*image_shape[1]],
'color', [1,1,1]))
exp.add_object(vapory.Plane([0,1,0], y - radius, color('White'))) # ground
exp.add_object(vapory.Plane([0,0,1], 5*radius, color('White'))) # back wall
exp.add_object(ball)
if debug:
image, annotation = exp.render_scene(2*fps)
plt.imshow(image[:,:,0], cmap='gray')
plt.show()
else:
exp.run(verbose=True)
| {"/artifice/sharedobjects/__init__.py": ["/artifice/sharedobjects/shared.py"], "/artifice/tform.py": ["/artifice/__init__.py"], "/artifice/mod.py": ["/artifice/__init__.py"], "/artifice/lay.py": ["/artifice/__init__.py"], "/artifice/sparse/sparse.py": ["/artifice/__init__.py", "/artifice/sparse/__init__.py"], "/scripts/arcing_sphere.py": ["/artifice/utils.py"], "/scripts/two_spheres.py": ["/artifice/utils.py"], "/test_utils/experiment.py": ["/artifice/utils.py", "/artifice/__init__.py"], "/artifice/conversions.py": ["/artifice/__init__.py"], "/test_utils/springs.py": ["/artifice/__init__.py"], "/test_utils/annotate.py": ["/artifice/utils.py"], "/artifice/sparse/sparse_lib.py": ["/artifice/__init__.py"], "/artifice/windows/__init__.py": ["/artifice/windows/annotator_window.py"], "/artifice/dat.py": ["/artifice/__init__.py"], "/artifice/prio.py": ["/artifice/__init__.py"], "/artifice/vis.py": ["/artifice/__init__.py"], "/artifice/main.py": ["/artifice/__init__.py"], "/artifice/sparse/__init__.py": ["/artifice/sparse/sparse.py"], "/artifice/ann.py": ["/artifice/sharedobjects/__init__.py", "/artifice/__init__.py"]} |
59,632 | zhuokaizhao/artifice | refs/heads/master | /scripts/two_spheres.py | """Create a dataset of scenes with two spheres, red and blue. Vary the position
and radius of these spheres randomly within the image frame.
Outputs a tfrecord in current data/two_spheres. (Should be run from $ARTIFICE)
"""
import vapory
import numpy as np
from test_utils import experiment
from artifice.utils import dataset
import matplotlib.pyplot as plt
debug = False
# Parameters
root = "data/two_spheres/" # Root dir for fname
N = 5 if debug else 5000 # number of examples
image_shape = (512, 512) # first two dimensions of output images
num_classes = 2 # number of semantic classes
output_format = 'tfrecord'
fname = root + "two_spheres" # tfrecord to write to
min_radius = 64 # minimum radius of either sphere
max_radius = 128 # maximum radius of either sphere
# experiment
color = lambda col : vapory.Texture(vapory.Pigment('color', col))
exp = experiment.BallExperiment(image_shape=image_shape,
num_classes=num_classes,
N=N, fname=fname,
output_format=output_format)
exp.add_object(vapory.Background('White'))
exp.add_object(vapory.LightSource([0, image_shape[0], -2*image_shape[1]],
'color', [1,1,1]))
argsf = lambda : (
[np.random.randint(-image_shape[1]/2, image_shape[1]/2),
np.random.randint(-image_shape[0]/2, image_shape[0]/2),
np.random.randint(-max_radius, max_radius)],
np.random.randint(min_radius, max_radius+1))
# grayscale images, but objects will result in the same class anyway
red_ball = experiment.ExperimentSphere(argsf, color('Red'))
blue_ball = experiment.ExperimentSphere(argsf, color('Blue'))
exp.add_object(red_ball)
exp.add_object(blue_ball)
# Run the experiment, creating the tfrecord
exp.run(verbose=debug)
# Save the first two images of the data, only works in sinteractive
if debug:
dataset.save_first_scene(fname)
| {"/artifice/sharedobjects/__init__.py": ["/artifice/sharedobjects/shared.py"], "/artifice/tform.py": ["/artifice/__init__.py"], "/artifice/mod.py": ["/artifice/__init__.py"], "/artifice/lay.py": ["/artifice/__init__.py"], "/artifice/sparse/sparse.py": ["/artifice/__init__.py", "/artifice/sparse/__init__.py"], "/scripts/arcing_sphere.py": ["/artifice/utils.py"], "/scripts/two_spheres.py": ["/artifice/utils.py"], "/test_utils/experiment.py": ["/artifice/utils.py", "/artifice/__init__.py"], "/artifice/conversions.py": ["/artifice/__init__.py"], "/test_utils/springs.py": ["/artifice/__init__.py"], "/test_utils/annotate.py": ["/artifice/utils.py"], "/artifice/sparse/sparse_lib.py": ["/artifice/__init__.py"], "/artifice/windows/__init__.py": ["/artifice/windows/annotator_window.py"], "/artifice/dat.py": ["/artifice/__init__.py"], "/artifice/prio.py": ["/artifice/__init__.py"], "/artifice/vis.py": ["/artifice/__init__.py"], "/artifice/main.py": ["/artifice/__init__.py"], "/artifice/sparse/__init__.py": ["/artifice/sparse/sparse.py"], "/artifice/ann.py": ["/artifice/sharedobjects/__init__.py", "/artifice/__init__.py"]} |
59,633 | zhuokaizhao/artifice | refs/heads/master | /data/disks_100x100/fix.py | """Fix world-space coords from glk.
"""
from os.path import join, basename
from glob import glob
import numpy as np
shape = [100,100]
miny = 1 # index 0
maxy = -1
minx = -1
maxx = 1
def lerp(omin, omax, imin, xx, imax):
alpha = (xx - imin) / (imax - imin)
return (1. - alpha)*omin + alpha*omax
def w2i(label, center="node"):
new_label = label.copy()
if center == "node":
new_label[:,1] = lerp(0, shape[0] - 1, minx, label[:,0], maxx)
new_label[:,0] = lerp(0, shape[1] - 1, miny, label[:,1], maxy)
return new_label
else:
raise NotImplementedError
paths = sorted(glob("old_labels/*.txt"))
for i, path in enumerate(paths):
if i % 500 == 0:
print(f"{i} / {len(paths)}")
label = np.loadtxt(path)
label = w2i(label)
np.savetxt(join('labels', basename(path)), label, '%.8f')
| {"/artifice/sharedobjects/__init__.py": ["/artifice/sharedobjects/shared.py"], "/artifice/tform.py": ["/artifice/__init__.py"], "/artifice/mod.py": ["/artifice/__init__.py"], "/artifice/lay.py": ["/artifice/__init__.py"], "/artifice/sparse/sparse.py": ["/artifice/__init__.py", "/artifice/sparse/__init__.py"], "/scripts/arcing_sphere.py": ["/artifice/utils.py"], "/scripts/two_spheres.py": ["/artifice/utils.py"], "/test_utils/experiment.py": ["/artifice/utils.py", "/artifice/__init__.py"], "/artifice/conversions.py": ["/artifice/__init__.py"], "/test_utils/springs.py": ["/artifice/__init__.py"], "/test_utils/annotate.py": ["/artifice/utils.py"], "/artifice/sparse/sparse_lib.py": ["/artifice/__init__.py"], "/artifice/windows/__init__.py": ["/artifice/windows/annotator_window.py"], "/artifice/dat.py": ["/artifice/__init__.py"], "/artifice/prio.py": ["/artifice/__init__.py"], "/artifice/vis.py": ["/artifice/__init__.py"], "/artifice/main.py": ["/artifice/__init__.py"], "/artifice/sparse/__init__.py": ["/artifice/sparse/sparse.py"], "/artifice/ann.py": ["/artifice/sharedobjects/__init__.py", "/artifice/__init__.py"]} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.