index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
33,036
|
tanaymitkari1/IntraNet
|
refs/heads/master
|
/ECA/views.py
|
from django.shortcuts import render, get_object_or_404
from django.urls import reverse
from django.contrib import messages
from .models import *
from django.http import HttpResponse, HttpResponseRedirect, Http404
from .forms import *
# Create your views here.
def eca(request):
context = {}
workshop = add_eca.objects.all()
context['workshop'] = workshop
return render(request, 'eca.html', context)
def add_workshop(request):
if request.method == 'GET':
return render(request, 'control/add_workshop.html')
if request.method == 'POST':
title = request.POST["title"]
info = request.POST["info"]
stdt = request.POST["start_date"]
eddt = request.POST["end_date"]
data = add_eca.objects.create(title=title, information=info, start_date=stdt, end_date=eddt)
if data:
messages.success(request, "sucessful")
return HttpResponseRedirect(reverse('add_workshop'))
def workshop_delete(request, id=None):
workshop = get_object_or_404(add_eca, id=id)
if request.method == 'POST':
workshop.delete()
return HttpResponseRedirect(reverse('eca'))
else:
context = {}
context['workshop'] = workshop
return render(request, 'control/workshop_delete.html', context)
def workshop_details(request, id=None):
if request.method == 'GET':
try:
workshop = add_eca.objects.get(id=id)
except:
raise Http404
context = {}
context['workshop'] = workshop
return render(request, 'student/workshop_detail.html', context)
if request.method == "POST":
user_id = request.user_id
data = Student_list.objectes.create(user=user_id)
if data:
message.sucess(request, "sucessful")
return HttpResponseRedirect(reverse('eca'))
|
{"/placement/urls.py": ["/placement/views.py"], "/personal/filters.py": ["/personal/models.py"], "/placement/forms.py": ["/placement/models.py"], "/personal/views.py": ["/personal/models.py", "/personal/forms.py", "/personal/filters.py"], "/BOS/views.py": ["/BOS/models.py", "/BOS/filters.py"], "/personal/admin.py": ["/personal/models.py"], "/placement/views.py": ["/placement/models.py", "/placement/forms.py", "/placement/filters.py"], "/ECA/urls.py": ["/ECA/views.py"], "/intranet/views.py": ["/placement/models.py", "/personal/models.py"], "/BOS/filters.py": ["/BOS/models.py"], "/personal/urls.py": ["/personal/views.py"], "/placement/filters.py": ["/placement/models.py"], "/personal/forms.py": ["/personal/models.py"], "/BOS/urls.py": ["/BOS/views.py"], "/BOS/admin.py": ["/BOS/models.py"], "/ECA/forms.py": ["/ECA/models.py"], "/ECA/views.py": ["/ECA/models.py", "/ECA/forms.py"]}
|
33,037
|
tanaymitkari1/IntraNet
|
refs/heads/master
|
/ECA/migrations/0002_add_eca_status.py
|
# Generated by Django 3.0 on 2020-04-02 19:49
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ECA', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='add_eca',
name='status',
field=models.CharField(default='active', max_length=10),
),
]
|
{"/placement/urls.py": ["/placement/views.py"], "/personal/filters.py": ["/personal/models.py"], "/placement/forms.py": ["/placement/models.py"], "/personal/views.py": ["/personal/models.py", "/personal/forms.py", "/personal/filters.py"], "/BOS/views.py": ["/BOS/models.py", "/BOS/filters.py"], "/personal/admin.py": ["/personal/models.py"], "/placement/views.py": ["/placement/models.py", "/placement/forms.py", "/placement/filters.py"], "/ECA/urls.py": ["/ECA/views.py"], "/intranet/views.py": ["/placement/models.py", "/personal/models.py"], "/BOS/filters.py": ["/BOS/models.py"], "/personal/urls.py": ["/personal/views.py"], "/placement/filters.py": ["/placement/models.py"], "/personal/forms.py": ["/personal/models.py"], "/BOS/urls.py": ["/BOS/views.py"], "/BOS/admin.py": ["/BOS/models.py"], "/ECA/forms.py": ["/ECA/models.py"], "/ECA/views.py": ["/ECA/models.py", "/ECA/forms.py"]}
|
33,038
|
tanaymitkari1/IntraNet
|
refs/heads/master
|
/BOS/migrations/0002_auto_20200501_2043.py
|
# Generated by Django 3.0 on 2020-05-01 15:13
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('BOS', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='adypu_data',
old_name='scecialization',
new_name='specialization',
),
]
|
{"/placement/urls.py": ["/placement/views.py"], "/personal/filters.py": ["/personal/models.py"], "/placement/forms.py": ["/placement/models.py"], "/personal/views.py": ["/personal/models.py", "/personal/forms.py", "/personal/filters.py"], "/BOS/views.py": ["/BOS/models.py", "/BOS/filters.py"], "/personal/admin.py": ["/personal/models.py"], "/placement/views.py": ["/placement/models.py", "/placement/forms.py", "/placement/filters.py"], "/ECA/urls.py": ["/ECA/views.py"], "/intranet/views.py": ["/placement/models.py", "/personal/models.py"], "/BOS/filters.py": ["/BOS/models.py"], "/personal/urls.py": ["/personal/views.py"], "/placement/filters.py": ["/placement/models.py"], "/personal/forms.py": ["/personal/models.py"], "/BOS/urls.py": ["/BOS/views.py"], "/BOS/admin.py": ["/BOS/models.py"], "/ECA/forms.py": ["/ECA/models.py"], "/ECA/views.py": ["/ECA/models.py", "/ECA/forms.py"]}
|
33,039
|
rlaplaza/rotator
|
refs/heads/master
|
/setup.py
|
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="rotator",
version="0.0",
package_dir={"rotator": "rotator"},
package=["rotator", "rotator/test"],
author="R.LAPLAZA",
author_email="laplazasolanas@gmail.com",
description="Geometry manipulation of molecule files.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/rlaplaza/rotator",
packages=setuptools.find_packages(),
classifiers=["Programming Language :: Python :: 3"],
)
|
{"/rotator/test/test_reverse.py": ["/rotator/__init__.py"], "/rotator/test/test_xyz.py": ["/rotator/__init__.py"], "/rotator/test/test_flower.py": ["/rotator/__init__.py"], "/rotator/test/test_rodriguez.py": ["/rotator/__init__.py"], "/rotator/main.py": ["/rotator/__init__.py"], "/rotator/__init__.py": ["/rotator/reader.py", "/rotator/rotationmats.py", "/rotator/main.py"], "/rotator/test/test_move.py": ["/rotator/__init__.py"], "/rotator/test/test_fchk_molden.py": ["/rotator/__init__.py"]}
|
33,040
|
rlaplaza/rotator
|
refs/heads/master
|
/rotator/test/test_reverse.py
|
from rotator import *
import numpy as np
def test_reverse():
mol1 = read_geom("water_opt.fchk")
coords1 = gen_geom(mol1, verb_lvl=3)
mol2 = put_geom(mol1, coords1, verb_lvl=3)
coords2 = gen_geom(mol2, verb_lvl=3)
assert np.allclose(coords1, coords2)
rotmat = g_rot_matrix(verb_lvl=3)
coords3 = np.dot(coords1, rotmat)
mol2 = put_geom(mol1, coords3, verb_lvl=3)
coords2 = gen_geom(mol2, verb_lvl=3)
assert np.allclose(coords1, coords2)
|
{"/rotator/test/test_reverse.py": ["/rotator/__init__.py"], "/rotator/test/test_xyz.py": ["/rotator/__init__.py"], "/rotator/test/test_flower.py": ["/rotator/__init__.py"], "/rotator/test/test_rodriguez.py": ["/rotator/__init__.py"], "/rotator/main.py": ["/rotator/__init__.py"], "/rotator/__init__.py": ["/rotator/reader.py", "/rotator/rotationmats.py", "/rotator/main.py"], "/rotator/test/test_move.py": ["/rotator/__init__.py"], "/rotator/test/test_fchk_molden.py": ["/rotator/__init__.py"]}
|
33,041
|
rlaplaza/rotator
|
refs/heads/master
|
/rotator/test/test_xyz.py
|
from rotator import *
import numpy as np
def test_xyz():
rotatexyz("water.xyz", degree="90", axis="x", filename2="water_90x.xyz")
rotatexyz("water.xyz", degree="90", axis="y", filename2="water_90y.xyz")
rotatexyz("water.xyz", degree="90", axis="z", filename2="water_90z.xyz")
rotatexyz("water.xyz", degree="180", axis="x", filename2="water_180x.xyz")
rotatexyz("water.xyz", degree="180", axis="y", filename2="water_180y.xyz")
rotatexyz("water.xyz", degree="180", axis="z", filename2="water_180z.xyz")
rotatexyz("water.xyz", degree="270", axis="x", filename2="water_270x.xyz")
rotatexyz("water.xyz", degree="270", axis="y", filename2="water_270y.xyz")
rotatexyz("water.xyz", degree="270", axis="z", filename2="water_270z.xyz")
# Lets test consistency in the x axis
mol1 = read_geom("water_90x.xyz")
geom1 = gen_geom(mol1)
mol2 = read_geom("water_180x.xyz")
geom2 = gen_geom(mol2)
mol3 = read_geom("water_270x.xyz")
geom3 = gen_geom(mol3)
xmat90 = s_rot_matrix(degree="90")
geom1_90x = np.dot(xmat90, geom1)
geom1_180x = np.dot(xmat90, geom1_90x)
# Rotating 90 on 90 or 180 is the same
assert np.allclose(geom1_90x, geom2)
# Rotating 90 on 90 or 180 is the same
assert np.allclose(geom1_180x, geom3)
# Lets test consistency in the y axis
mol1 = read_geom("water_90y.xyz")
geom1 = gen_geom(mol1)
mol2 = read_geom("water_180y.xyz")
geom2 = gen_geom(mol2)
mol3 = read_geom("water_270y.xyz")
geom3 = gen_geom(mol3)
ymat90 = s_rot_matrix(degree="90", axis="y")
geom1_90y = np.dot(ymat90, geom1)
geom1_180y = np.dot(ymat90, geom1_90y)
# Rotating 90 on 90 or 180 is the same
assert np.allclose(geom1_90y, geom2)
# Rotating 90 on 90 or 180 is the same
assert np.allclose(geom1_180y, geom3)
# Lets test consistency in the z axis
mol1 = read_geom("water_90z.xyz")
geom1 = gen_geom(mol1)
mol2 = read_geom("water_180z.xyz")
geom2 = gen_geom(mol2)
mol3 = read_geom("water_270z.xyz")
geom3 = gen_geom(mol3)
zmat90 = s_rot_matrix(degree="90", axis="z")
geom1_90z = np.dot(zmat90, geom1)
geom1_180z = np.dot(zmat90, geom1_90z)
# Rotating 90 on 90 or 180 is the same
assert np.allclose(geom1_90z, geom2)
# Rotating 90 on 90 or 180 is the same
assert np.allclose(geom1_180z, geom3)
|
{"/rotator/test/test_reverse.py": ["/rotator/__init__.py"], "/rotator/test/test_xyz.py": ["/rotator/__init__.py"], "/rotator/test/test_flower.py": ["/rotator/__init__.py"], "/rotator/test/test_rodriguez.py": ["/rotator/__init__.py"], "/rotator/main.py": ["/rotator/__init__.py"], "/rotator/__init__.py": ["/rotator/reader.py", "/rotator/rotationmats.py", "/rotator/main.py"], "/rotator/test/test_move.py": ["/rotator/__init__.py"], "/rotator/test/test_fchk_molden.py": ["/rotator/__init__.py"]}
|
33,042
|
rlaplaza/rotator
|
refs/heads/master
|
/rotator/test/test_flower.py
|
from rotator import *
import numpy as np
def test_flower():
mol1 = read_geom("water.xyz")
natoms = 4 * mol1.atnums.size # IOData is very handy!
geom1 = gen_geom(mol1)
print(geom1.T)
mat = s_rot_matrix(degree="90", axis="x")
geom2 = np.dot(mat, geom1)
print(geom2.T)
geom3 = np.dot(mat, geom2)
print(geom3.T)
geom4 = np.dot(mat, geom3)
print(geom4.T)
geom2 = g_displace(geom2, vec=np.asarray([0, 0.5, 0.5]))
print(geom2.T)
geom3 = s_displace(geom3, axis="z", norm=1.0)
print(geom3.T)
geom4 = g_displace(geom4, vec=np.asarray([0, -0.5, 0.5]))
print(geom4.T)
merge = (geom1.T, geom2.T, geom3.T, geom4.T)
geomflower = np.concatenate(merge, axis=0)
# print(geomflower)
f = open("flower.xyz", "w")
f.write("" + str(natoms) + "\n")
f.write("A beautiful flower of waters\n")
for i in range(natoms):
f.write("C")
for j in range(0, 3):
a = np.format_float_positional(geomflower[i, j], precision=4)
f.write(" " + str(a))
f.write("\n")
f.close()
|
{"/rotator/test/test_reverse.py": ["/rotator/__init__.py"], "/rotator/test/test_xyz.py": ["/rotator/__init__.py"], "/rotator/test/test_flower.py": ["/rotator/__init__.py"], "/rotator/test/test_rodriguez.py": ["/rotator/__init__.py"], "/rotator/main.py": ["/rotator/__init__.py"], "/rotator/__init__.py": ["/rotator/reader.py", "/rotator/rotationmats.py", "/rotator/main.py"], "/rotator/test/test_move.py": ["/rotator/__init__.py"], "/rotator/test/test_fchk_molden.py": ["/rotator/__init__.py"]}
|
33,043
|
rlaplaza/rotator
|
refs/heads/master
|
/rotator/reader.py
|
import numpy as np
import os
import pprint
from iodata import load_one, dump_one, IOData
pp = pprint.PrettyPrinter(indent=4)
class readererror(Exception):
""" Exception class for errors in the reader module.
"""
pass
def read_one(filename: str, verb=0):
"""Very simple wrapper for iodata load_one.
Parameters
----------
filename
A string that contains the path to an input file.
verb
Verbosity level integer flag.
Returns
-------
mol
An IOdata molecule object.
Raises
------
readereerror
If the file is not found or is not a file, or
does not contain the basis set information needed
to calculate the one-particle density matrix etc.
"""
path = os.path.abspath(filename)
try:
assert os.path.exists(path)
assert os.path.isfile(path)
except:
raise readererror("Could not load the file {0}.".format(path))
mol = load_one(path)
try:
mol.mo.coeffsa
except:
raise readererror(
"Basis set coefficients were not understood or are not present."
)
if verb > 1:
print("File loaded using IOData.")
if verb > 2:
pp.pprint(mol)
return mol
def read_geom(filename: str, verb=0):
"""Very simple wrapper for iodata load_one.
Parameters
----------
filename
A string that contains the path to an input file.
verb
Verbosity level integer flag.
Returns
-------
mol
An IOdata molecule object.
Raises
------
readereerror
If the file is not found.
"""
path = os.path.abspath(filename)
try:
assert os.path.exists(path)
assert os.path.isfile(path)
except:
raise readererror("Could not load the file {0}.".format(path))
mol = load_one(path)
if verb > 1:
print("File loaded using IOData.")
if verb > 2:
pp.pprint(mol)
return mol
|
{"/rotator/test/test_reverse.py": ["/rotator/__init__.py"], "/rotator/test/test_xyz.py": ["/rotator/__init__.py"], "/rotator/test/test_flower.py": ["/rotator/__init__.py"], "/rotator/test/test_rodriguez.py": ["/rotator/__init__.py"], "/rotator/main.py": ["/rotator/__init__.py"], "/rotator/__init__.py": ["/rotator/reader.py", "/rotator/rotationmats.py", "/rotator/main.py"], "/rotator/test/test_move.py": ["/rotator/__init__.py"], "/rotator/test/test_fchk_molden.py": ["/rotator/__init__.py"]}
|
33,044
|
rlaplaza/rotator
|
refs/heads/master
|
/rotator/test/test_rodriguez.py
|
from rotator import *
import numpy as np
def test_rodriguez():
mol1 = read_geom("water.xyz")
geom1 = gen_geom(mol1)
mat90x1 = s_rot_matrix(degree=90, axis="x")
mat90x2 = g_rot_matrix(degree=90, axis=[1, 0, 0])
assert np.allclose(mat90x1, mat90x2)
geom1_90x = np.dot(mat90x1, geom1)
geom2_90x = np.dot(mat90x2, geom1)
assert np.allclose(geom1_90x, geom2_90x)
|
{"/rotator/test/test_reverse.py": ["/rotator/__init__.py"], "/rotator/test/test_xyz.py": ["/rotator/__init__.py"], "/rotator/test/test_flower.py": ["/rotator/__init__.py"], "/rotator/test/test_rodriguez.py": ["/rotator/__init__.py"], "/rotator/main.py": ["/rotator/__init__.py"], "/rotator/__init__.py": ["/rotator/reader.py", "/rotator/rotationmats.py", "/rotator/main.py"], "/rotator/test/test_move.py": ["/rotator/__init__.py"], "/rotator/test/test_fchk_molden.py": ["/rotator/__init__.py"]}
|
33,045
|
rlaplaza/rotator
|
refs/heads/master
|
/rotator/main.py
|
import numpy as np
from numpy import linalg as la
from iodata import load_one, dump_one, IOData
import math
import os
import pprint
from rotator import *
pp = pprint.PrettyPrinter(indent=4)
class writererror(Exception):
""" Exception class for errors in the reader module.
"""
pass
def write_geom(mol, filename: str, verb_lvl=0):
"""Very simple wrapper for iodata dump_one.
Parameters
----------
filename
A string that contains the path to an output file.
verb
Verbosity level integer flag.
mol
An IOdata molecule object.
"""
dump_one(mol, filename)
def rotatexyz(filename1: str, degree="0", axis="x", verb_lvl=0, filename2="output.xyz"):
"""Read an xyz file, rotate it some degrees around some axis and write it.
Parameters
----------
filename1
A string that contains the path to an input xyz file.
filename2
A string that contains the path to the output xyz file. By default will be called out.xyz
degree
Angle of the rotation in degrees.
axis
Axis of the rotation. Can be a string x/y/z to use those axis or a vector.
verb_lvl
Verbosity level integer flag.
"""
mol = read_geom(filename1, verb=verb_lvl)
geom = gen_geom(mol, verb_lvl)
if isinstance(axis, str):
mat = s_rot_matrix(degree, axis, verb_lvl)
else:
mat = g_rot_matrix(degree, axis, verb_lvl)
newgeom = np.dot(mat, geom)
newmol = put_geom(mol, newgeom, verb_lvl)
write_geom(newmol, filename2, verb_lvl)
|
{"/rotator/test/test_reverse.py": ["/rotator/__init__.py"], "/rotator/test/test_xyz.py": ["/rotator/__init__.py"], "/rotator/test/test_flower.py": ["/rotator/__init__.py"], "/rotator/test/test_rodriguez.py": ["/rotator/__init__.py"], "/rotator/main.py": ["/rotator/__init__.py"], "/rotator/__init__.py": ["/rotator/reader.py", "/rotator/rotationmats.py", "/rotator/main.py"], "/rotator/test/test_move.py": ["/rotator/__init__.py"], "/rotator/test/test_fchk_molden.py": ["/rotator/__init__.py"]}
|
33,046
|
rlaplaza/rotator
|
refs/heads/master
|
/rotator/__init__.py
|
from iodata import load_one, dump_one, IOData
from rotator.reader import *
from rotator.rotationmats import *
from rotator.main import *
|
{"/rotator/test/test_reverse.py": ["/rotator/__init__.py"], "/rotator/test/test_xyz.py": ["/rotator/__init__.py"], "/rotator/test/test_flower.py": ["/rotator/__init__.py"], "/rotator/test/test_rodriguez.py": ["/rotator/__init__.py"], "/rotator/main.py": ["/rotator/__init__.py"], "/rotator/__init__.py": ["/rotator/reader.py", "/rotator/rotationmats.py", "/rotator/main.py"], "/rotator/test/test_move.py": ["/rotator/__init__.py"], "/rotator/test/test_fchk_molden.py": ["/rotator/__init__.py"]}
|
33,047
|
rlaplaza/rotator
|
refs/heads/master
|
/rotator/test/test_move.py
|
from rotator import *
import numpy as np
def test_move():
mol1 = read_geom("water.xyz")
geom1 = gen_geom(mol1)
geom2 = g_displace(geom1, vec=np.asarray([1, 1, 1]))
geom3 = g_displace(geom2, vec=np.asarray([-1, -1, -1]))
assert np.allclose(geom1, geom3)
|
{"/rotator/test/test_reverse.py": ["/rotator/__init__.py"], "/rotator/test/test_xyz.py": ["/rotator/__init__.py"], "/rotator/test/test_flower.py": ["/rotator/__init__.py"], "/rotator/test/test_rodriguez.py": ["/rotator/__init__.py"], "/rotator/main.py": ["/rotator/__init__.py"], "/rotator/__init__.py": ["/rotator/reader.py", "/rotator/rotationmats.py", "/rotator/main.py"], "/rotator/test/test_move.py": ["/rotator/__init__.py"], "/rotator/test/test_fchk_molden.py": ["/rotator/__init__.py"]}
|
33,048
|
rlaplaza/rotator
|
refs/heads/master
|
/rotator/rotationmats.py
|
import numpy as np
from numpy import linalg as la
from iodata import load_one, dump_one, IOData
import math
import os
import pprint
pp = pprint.PrettyPrinter(indent=4)
class rotatorerror(Exception):
""" Exception class for errors in the rotator module.
Usually means something is very weird.
"""
pass
def g_rot_matrix(degree="0.0", axis=np.asarray([1, 1, 1]), verb_lvl=0):
"""
Return the rotation matrix associated with counterclockwise rotation about
the given axis by theta radians.
Parameters
----------
axis
Axis of the rotation. Array or list.
degree
Angle of the rotation in degrees.
verb_lvl
Verbosity level integer flag.
Returns
-------
rot
Rotation matrix to be used.
"""
try:
theta = degree * (np.pi / 180)
except:
degree = float(degree)
theta = degree * (np.pi / 180)
axis = np.asarray(axis)
axis = axis / math.sqrt(np.dot(axis, axis))
a = math.cos(theta / 2.0)
b, c, d = -axis * math.sin(theta / 2.0)
aa, bb, cc, dd = a * a, b * b, c * c, d * d
bc, ad, ac, ab, bd, cd = b * c, a * d, a * c, a * b, b * d, c * d
rot = np.array(
[
[aa + bb - cc - dd, 2 * (bc + ad), 2 * (bd - ac)],
[2 * (bc - ad), aa + cc - bb - dd, 2 * (cd + ab)],
[2 * (bd + ac), 2 * (cd - ab), aa + dd - bb - cc],
]
)
if verb_lvl > 1:
print("Rotation matrix generated.")
if verb_lvl > 2:
pp.pprint(rot)
return rot
def s_rot_matrix(degree="0.0", axis="x", verb_lvl=0):
"""
Return the rotation matrix associated with counterclockwise rotation about
the given axis by theta radians.
Parameters
----------
axis
Axis of the rotation, string x, y or z.
theta
Angle of the rotation in degrees.
verb_lvl
Verbosity level integer flag.
Returns
-------
rot
Rotation matrix to be used.
"""
if not isinstance(axis, str):
raise rotatorerror("This function takes axis=x/y/z only.")
try:
theta = degree * (np.pi / 180)
c, s = math.cos(theta), math.sin(theta)
except:
degree = float(degree)
theta = degree * (np.pi / 180)
c, s = math.cos(theta), math.sin(theta)
if axis == "x":
rot = np.array([[1.0, 0, 0], [0, c, -s], [0, s, c]])
elif axis == "y":
rot = np.array([[c, 0, s], [0, 1.0, 0], [-s, 0, c]])
elif axis == "z":
rot = np.array([[c, -s, 0], [s, c, 0], [0, 0, 1.0]])
else:
raise rotatorerror("This function takes axis=x/y/z only.")
if verb_lvl > 1:
print("Rotation matrix generated.")
if verb_lvl > 2:
pp.pprint(rot)
return rot
def g_displace(coordmat, vec=np.asarray([1, 1, 1]), verb_lvl=0, norm=None):
"""Displace the geometry matrix following a displacement vector.
Alternatively, it can take the direction from any vector and renormalize to a norm.
Parameters
----------
coordmat
Geometry matrix.
norm
The norm in angstrom. Optional, can be given in in the vector.
vec
The vector of the direction of the displacement. Array.
verb_lvl
Verbosity level integer flag.
Returns
-------
coordmat
Displaced geometry matrix.
"""
vec = np.asarray(vec)
if norm is not None:
try:
norm = float(norm)
except:
raise rotatorerror("This function needs a real or integer norm.")
vec = vec / (np.la.norm(vec) + 1e-16)
vec = vec * norm
vec.shape = (3,1)
if verb_lvl > 2:
pp.pprint(vec)
coordmat += vec
return coordmat
def s_displace(coordmat, axis="x", norm=1, verb_lvl=0):
"""Displace the geometry matrix following a displacement vector x/y/z
using a norm in angstroms.
Parameters
----------
coordmat
Geometry matrix.
norm
The norm in angstrom. Optional, can be given in in the vector.
vec
The vector of the direction of the displacement, string x, y or z.
verb_lvl
Verbosity level integer flag.
Returns
-------
coordmat
Displaced geometry matrix.
"""
if axis == "x":
vec = np.array([1, 0, 0])
elif axis == "y":
vec = np.array([0, 1, 0])
elif axis == "z":
vec = np.array([0, 0, 1])
else:
raise rotatorerror("This function takes axis=x/y/z only.")
try:
norm = float(norm)
except:
raise rotatorerror("This function needs a real or integer norm.")
vec = vec * norm
vec.shape = (3,1)
if verb_lvl > 2:
pp.pprint(vec)
coordmat += vec
return coordmat
def gen_geom(mol, verb_lvl=0):
"""
Return the geometry matrix from the molecule object.
Parameters
----------
mol
IOData molecule object.
verb_lvl
Verbosity level integer flag.
Returns
-------
coordmat
Geometry matrix.
"""
if not isinstance(mol, IOData):
raise rotatorerror("Something other than an IOData mol object passed.")
coords = []
coordmat = np.empty(shape=[3, 3])
# coordmat = mol.atcoords.T*0.52917721092 # that is it, this works
coords = np.asarray(
[
[mol.atnums[i], mol.atcoords[i] * 0.52917721092]
for i in range(mol.atnums.size)
]
)
for i in range(
0, 3
): # Its perfectly possible to simply transpose mol.atcoords, this is for hookability
for j in range(mol.atnums.size):
coordmat[i, j] = coords[j, 1][i]
if verb_lvl > 1:
print("Geometry matrix generated.")
if verb_lvl > 2:
pp.pprint(coordmat)
pp.pprint(mol.atcoords)
return coordmat
def put_geom(mol, coordmat, verb_lvl=0):
"""
Put a new geometry matrix into the molecule object.
Parameters
----------
mol
IOData molecule object.
coordmat
Geometry matrix.
verb_lvl
Verbosity level integer flag.
Returns
-------
mol
IOData molecule object with new geometry.
"""
if not isinstance(mol, IOData):
raise rotatorerror("Something other than an IOData mol object passed.")
for i in range(mol.atnums.size):
for j in range(
0, 3
): # Its perfectly possible to simply coordmat to mol.atcoords, this is for hookability
mol.atcoords[i][j] = coordmat[j, i] * 1.8897259886
if verb_lvl > 1:
print("Geometry matrix updated.")
if verb_lvl > 2:
pp.pprint(coordmat)
pp.pprint(mol.atcoords)
return mol
|
{"/rotator/test/test_reverse.py": ["/rotator/__init__.py"], "/rotator/test/test_xyz.py": ["/rotator/__init__.py"], "/rotator/test/test_flower.py": ["/rotator/__init__.py"], "/rotator/test/test_rodriguez.py": ["/rotator/__init__.py"], "/rotator/main.py": ["/rotator/__init__.py"], "/rotator/__init__.py": ["/rotator/reader.py", "/rotator/rotationmats.py", "/rotator/main.py"], "/rotator/test/test_move.py": ["/rotator/__init__.py"], "/rotator/test/test_fchk_molden.py": ["/rotator/__init__.py"]}
|
33,049
|
rlaplaza/rotator
|
refs/heads/master
|
/rotator/test/test_fchk_molden.py
|
from rotator import *
import numpy as np
def test_fchk_molden():
mol1 = read_geom("water_opt.fchk")
mol2 = read_geom("water_opt.molden")
coords1 = gen_geom(mol1, verb_lvl=3)
coords2 = gen_geom(mol2, verb_lvl=3)
assert np.allclose(coords1, coords2)
|
{"/rotator/test/test_reverse.py": ["/rotator/__init__.py"], "/rotator/test/test_xyz.py": ["/rotator/__init__.py"], "/rotator/test/test_flower.py": ["/rotator/__init__.py"], "/rotator/test/test_rodriguez.py": ["/rotator/__init__.py"], "/rotator/main.py": ["/rotator/__init__.py"], "/rotator/__init__.py": ["/rotator/reader.py", "/rotator/rotationmats.py", "/rotator/main.py"], "/rotator/test/test_move.py": ["/rotator/__init__.py"], "/rotator/test/test_fchk_molden.py": ["/rotator/__init__.py"]}
|
33,050
|
orid7/SpeachToText
|
refs/heads/master
|
/SpeachToText.py
|
# -*- coding: utf-8 -*-
"""
Created on Fri Jun 29 10:53:28 2018
@author: ori dahari
"""
from __future__ import print_function
import time
import boto3
def main(fileName,jobName):
###Audio to text
transcribe = boto3.client('transcribe', region_name="us-west-2")
job_name = jobName
job_uri = "https://s3-us-west-2.amazonaws.com/recordtest/{}.wav".format(fileName)
transcribe.start_transcription_job(
TranscriptionJobName=job_name,
Media={'MediaFileUri': job_uri},
MediaFormat='wav',
LanguageCode='en-US',
)
while True:
status = transcribe.get_transcription_job(TranscriptionJobName=job_name)
if status['TranscriptionJob']['TranscriptionJobStatus'] in ['COMPLETED', 'FAILED']:
break
time.sleep(5)
print(status)
###URL to String
import urllib.request, json
with urllib.request.urlopen(status['TranscriptionJob']['Transcript']['TranscriptFileUri']) as url:
data = json.loads(url.read().decode())
EnText=data['results']['transcripts'][0]['transcript']
print(EnText)
return EnText
|
{"/MainClass.py": ["/recordToS3.py", "/SpeachToText.py", "/Twitter_Sentiment_Analysis_loadModel.py"]}
|
33,051
|
orid7/SpeachToText
|
refs/heads/master
|
/recordToS3.py
|
# -*- coding: utf-8 -*-
"""
Created on Sun Jun 30 18:40:34 2019
@author: ori dahari
"""
import pyaudio
import wave
import boto3
import os
os.chdir('C:\\Users\ori dahari\Documents\MBA\mini9\Practicum')
os.getcwd()
def main(recSec,fileName):
FORMAT = pyaudio.paInt16
CHANNELS = 2
RATE = 44100
CHUNK = 1024
RECORD_SECONDS = recSec
WAVE_OUTPUT_FILENAME = "{}.wav".format(fileName)
audio = pyaudio.PyAudio()
# start Recording
stream = audio.open(format=FORMAT, channels=CHANNELS,
rate=RATE, input=True,
frames_per_buffer=CHUNK)
print ("recording...")
frames = []
for i in range(0, int(RATE / CHUNK * RECORD_SECONDS)):
data = stream.read(CHUNK)
frames.append(data)
print ("finished recording")
# stop Recording
stream.stop_stream()
stream.close()
audio.terminate()
waveFile = wave.open(WAVE_OUTPUT_FILENAME, 'wb')
waveFile.setnchannels(CHANNELS)
waveFile.setsampwidth(audio.get_sample_size(FORMAT))
waveFile.setframerate(RATE)
waveFile.writeframes(b''.join(frames))
waveFile.close()
#upload to S3
s3 = boto3.client(
"s3",
)
bucket_resource = s3
bucket_resource.upload_file(
Bucket = 'recordtest',
Filename=WAVE_OUTPUT_FILENAME,
Key=WAVE_OUTPUT_FILENAME
)
|
{"/MainClass.py": ["/recordToS3.py", "/SpeachToText.py", "/Twitter_Sentiment_Analysis_loadModel.py"]}
|
33,052
|
orid7/SpeachToText
|
refs/heads/master
|
/MainClass.py
|
# -*- coding: utf-8 -*-
"""
Created on Mon Jul 1 12:50:06 2019
@author: ori dahari
"""
import os
import re
os.chdir('C:\\Users\ori dahari\Documents\MBA\mini9\Practicum')
import recordToS3
import SpeachToText
import Twitter_Sentiment_Analysis_loadModel
fileName="test81"
jobName="test81"
recLengthSec=4
recordToS3.main(recLengthSec,fileName)
text=SpeachToText.main(fileName,jobName)
sentenceResult=Twitter_Sentiment_Analysis_loadModel.predict(text)
sentenceResult['sentence']=text
resultList=[sentenceResult]
wordList = re.sub("[^\w]", " ", text).split()
for i in range(len(wordList)):
x=Twitter_Sentiment_Analysis_loadModel.predict(wordList[i])
x['word']=wordList[i]
resultList.append(x)
resultList
|
{"/MainClass.py": ["/recordToS3.py", "/SpeachToText.py", "/Twitter_Sentiment_Analysis_loadModel.py"]}
|
33,053
|
orid7/SpeachToText
|
refs/heads/master
|
/Twitter_Sentiment_Analysis_loadModel.py
|
# # Twitter Sentiment Analysis
# In[ ]:
#pip install gensim --upgrade
#pip install keras --upgrade
#pip install pandas --upgrade
#pip install tenserflow --upgrade
#pip uninstall gensim
#sudo apt-get install python3-dev build-essential
#sudo pip3 install --upgrade gensim
from keras.models import load_model
# In[ ]:
from gensim.models import Word2Vec
# In[ ]:
# DataFrame
import pandas as pd
# Matplot
import matplotlib.pyplot as plt
# Scikit-learn
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import LabelEncoder
from sklearn.metrics import confusion_matrix, classification_report, accuracy_score
from sklearn.manifold import TSNE
from sklearn.feature_extraction.text import TfidfVectorizer
# In[ ]:
# Keras
from keras.preprocessing.text import Tokenizer
from keras.preprocessing.sequence import pad_sequences
from keras.models import Sequential
from keras.layers import Activation, Dense, Dropout, Embedding, Flatten, Conv1D, MaxPooling1D, LSTM
from keras import utils
from keras.callbacks import ReduceLROnPlateau, EarlyStopping
# In[ ]:
# nltk
from nltk.corpus import stopwords
from nltk.stem import SnowballStemmer
# Word2vec
# Utility
import numpy as np
from collections import Counter
import time
import pickle
# Set log
#logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
# In[ ]:
#nltk.download('stopwords')
# ### Settings
# In[ ]:
# DATASET
DATASET_COLUMNS = ["target", "ids", "date", "flag", "user", "text"]
DATASET_ENCODING = "ISO-8859-1"
TRAIN_SIZE = 0.8
# TEXT CLENAING
TEXT_CLEANING_RE = "@\S+|https?:\S+|http?:\S|[^A-Za-z0-9]+"
# WORD2VEC
W2V_SIZE = 300
W2V_WINDOW = 7
W2V_EPOCH = 32
W2V_MIN_COUNT = 10
# KERAS
SEQUENCE_LENGTH = 300
EPOCHS = 8
BATCH_SIZE = 1024
# SENTIMENT
POSITIVE = "POSITIVE"
NEGATIVE = "NEGATIVE"
NEUTRAL = "NEUTRAL"
SENTIMENT_THRESHOLDS = (0.4, 0.7)
# EXPORT
KERAS_MODEL = "model.h5"
WORD2VEC_MODEL = "model.w2v"
TOKENIZER_MODEL = "tokenizer.pkl"
ENCODER_MODEL = "encoder.pkl"
# ### Read Dataset
# ### Dataset details
# * **target**: the polarity of the tweet (0 = negative, 2 = neutral, 4 = positive)
# * **ids**: The id of the tweet ( 2087)
# * **date**: the date of the tweet (Sat May 16 23:58:44 UTC 2009)
# * **flag**: The query (lyx). If there is no query, then this value is NO_QUERY.
# * **user**: the user that tweeted (robotickilldozr)
# * **text**: the text of the tweet (Lyx is cool)
# In[ ]:
model=load_model(KERAS_MODEL)
# In[ ]:
w2v_model = Word2Vec.load(WORD2VEC_MODEL)
# In[ ]:
with open(TOKENIZER_MODEL, 'rb') as handle:
tokenizer = pickle.load(handle)
#tokenizer = pickle.load(TOKENIZER_MODEL,"rb")
# In[ ]:
with open(ENCODER_MODEL, 'rb') as handle:
encoder = pickle.load(handle)
# ### Pre-Process dataset
# In[ ]:
#stop_words = stopwords.words("english")
#stemmer = SnowballStemmer("english")
# ### Predict
# In[ ]:
def decode_sentiment(score, include_neutral=True):
if include_neutral:
label = NEUTRAL
if score <= SENTIMENT_THRESHOLDS[0]:
label = NEGATIVE
elif score >= SENTIMENT_THRESHOLDS[1]:
label = POSITIVE
return label
else:
return NEGATIVE if score < 0.5 else POSITIVE
# In[ ]:
def predict(text, include_neutral=True):
start_at = time.time()
# Tokenize text
x_test = pad_sequences(tokenizer.texts_to_sequences([text]), maxlen=SEQUENCE_LENGTH)
# Predict
score = model.predict([x_test])[0]
# Decode sentiment
label = decode_sentiment(score, include_neutral=include_neutral)
return {"label": label, "score": float(score),
"elapsed_time": time.time()-start_at}
# In[ ]:
#predict("I love the music")
# In[ ]:
#predict("I hate the rain")
|
{"/MainClass.py": ["/recordToS3.py", "/SpeachToText.py", "/Twitter_Sentiment_Analysis_loadModel.py"]}
|
33,054
|
DableUTeeF/seven2
|
refs/heads/master
|
/stuff/create_csv.py
|
"""
"""
source = open('/home/palm/PycharmProjects/Seven/stuff/data1-30-9.txt').read().split('\n')[:-1]
clsed = []
open('/home/palm/PycharmProjects/keras-retinanet/datasetstuff/7classes.csv', 'w')
with open('/home/palm/PycharmProjects/keras-retinanet/datasetstuff/data1-30-9.csv', 'w') as wr:
for s in source:
x = s.split()
x1 = min(480, max(0, min(int(x[1]), int(x[2]))))
x2 = min(480, max(0, max(int(x[1]), int(x[2]))))
y1 = min(640, max(0, min(int(x[3]), int(x[4]))))
y2 = min(640, max(0, max(int(x[3]), int(x[4]))))
cls = x[0].split('/')[-2]
if abs(x1-x2) < 10 or abs(y1-y2) < 10:
continue
wr.write(f'{x[0]},{x1},{y1},{x2},{y2},{cls}\n')
if cls not in clsed:
with open('/home/palm/PycharmProjects/keras-retinanet/datasetstuff/7classes.csv', 'a') as wr2:
wr2.write(f'{cls},{len(clsed)}\n')
clsed.append(cls)
|
{"/siamese/siamese_train.py": ["/siamese/datagen.py"], "/readjust_xml.py": ["/siamese/siamese_predict.py"], "/siamese/multiprocess_predict.py": ["/siamese/siamese_predict.py"], "/siamese/siamese_inference.py": ["/siamese/siamese_predict.py"]}
|
33,055
|
DableUTeeF/seven2
|
refs/heads/master
|
/csv_convert.py
|
import os
files = ['stuff/data1(damkoeng).txt', 'stuff/data1-30-9.txt', 'stuff/data1_green_Screen.txt', 'stuff/data1-30-9-gs.txt']
dests = ['/home/root1/dataset-2020/7/data1/data1(damkoeng)',
'/home/root1/dataset-2020/7/data1 (3)',
'/home/root1/dataset-2020/7/data1/data1_green_Screen',
'/home/root1/dataset-2020/7/data1 (2)',
]
classes = []
open('anns/val_ann.csv', 'w')
open('anns/ann.csv', 'w')
open('anns/classes.csv', 'w')
for i, file in enumerate(files):
src = open(file).read().split('\n')
while src[-1] == '':
src = src[:-1]
for line in src:
ln = line.split(' ')
s_paths = os.path.split(ln[0])
cls = s_paths[0].split('/')[-1]
d_path = os.path.join(dests[i], cls, s_paths[-1])
if not os.path.exists(d_path):
print(d_path, 'not exits')
continue
x1, y1, x2, y2 = int(ln[-4]), int(ln[-3]), int(ln[-2]), int(ln[-1])
if (x2 - x1) + (y2 - y1) < 10:
continue
if x2 <= x1:
x2 += 1
if y2 <= y1:
y2 += 1
obj = f'{d_path},{min(x1, x2)},{min(y1, y2)},{max(x1, x2)},{max(y1, y2)},{s_paths[0]}'
if s_paths[0] not in classes:
with open('anns/val_ann.csv', 'a') as wr:
wr.write(obj)
wr.write('\n')
else:
with open('anns/ann.csv', 'a') as wr:
wr.write(obj)
wr.write('\n')
classes.append(s_paths[0])
classes = list(set(classes))
with open('anns/classes.csv', 'a') as wr:
for i, line in enumerate(classes):
wr.write(line)
wr.write(',')
wr.write(str(i))
wr.write('\n')
|
{"/siamese/siamese_train.py": ["/siamese/datagen.py"], "/readjust_xml.py": ["/siamese/siamese_predict.py"], "/siamese/multiprocess_predict.py": ["/siamese/siamese_predict.py"], "/siamese/siamese_inference.py": ["/siamese/siamese_predict.py"]}
|
33,056
|
DableUTeeF/seven2
|
refs/heads/master
|
/siamese/siamese_train.py
|
import os
import sys
# noinspection PyUnboundLocalVariable
if __name__ == "__main__" and __package__ is None:
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
__package__ = "siamese"
from siamese.models import ResNet, ContrastiveLoss
from siamese.datagen import DirectorySiameseLoader
import json
import torch
from torch.nn import functional as F
from natthaphon import Model
from torchvision import transforms
class ThresholdAcc:
def __call__(self, inputs, targets):
distant = F.cosine_similarity(inputs[0], inputs[1])
predict = (distant > 0.7).long()
acc = torch.sum(predict != targets.long()).float() / targets.size(0)
return acc
def __str__(self):
return 'acc()'
if __name__ == '__main__':
save_no = len(os.listdir('./snapshots/pairs'))
impath = '/home/palm/PycharmProjects/seven/images/cropped3/train'
model = Model(ResNet(zero_init_residual=False))
model.compile(torch.optim.SGD(model.model.parameters(),
lr=0.001,
momentum=0.9,
weight_decay=1e-4),
ContrastiveLoss(),
metric=None,
device='cuda')
model.load_weights('./snapshots/base.pth', load_opt=False)
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
train_datagen = DirectorySiameseLoader(impath,
transforms.Compose([transforms.Resize(256),
transforms.RandomResizedCrop(224),
transforms.RandomHorizontalFlip(),
transforms.RandomVerticalFlip(),
transforms.ToTensor(),
normalize]))
train_generator = train_datagen.get_dset(8, 1)
os.makedirs(f'./snapshots/pairs/{save_no}', exist_ok=True)
try:
h = model.fit_generator(train_generator, 20,
schedule=[10, 15],
tensorboard=f'logs/pair/{len(os.listdir("logs/pair"))}',
epoch_end=model.checkpoint(f'./snapshots/pairs/{save_no}', 'ContrastiveLoss'), step=200)
with open('siamese.json', 'w') as wr:
json.dump(h, wr)
finally:
model.save_weights('./snapshots/pairs_temp.pth')
|
{"/siamese/siamese_train.py": ["/siamese/datagen.py"], "/readjust_xml.py": ["/siamese/siamese_predict.py"], "/siamese/multiprocess_predict.py": ["/siamese/siamese_predict.py"], "/siamese/siamese_inference.py": ["/siamese/siamese_predict.py"]}
|
33,057
|
DableUTeeF/seven2
|
refs/heads/master
|
/stuff/gdriveupload.py
|
from pydrive.auth import GoogleAuth
from pydrive.drive import GoogleDrive
import os
if __name__ == '__main__':
gauth = GoogleAuth()
# Try to load saved client credentials
gauth.LoadCredentialsFile("stuff/mycreds.txt")
if gauth.credentials is None:
# Authenticate if they're not there
gauth.LocalWebserverAuth()
elif gauth.access_token_expired:
# Refresh them if expired
gauth.Refresh()
else:
# Initialize the saved creds
gauth.Authorize()
# Save the current credentials to a file
gauth.SaveCredentialsFile("stuff/mycreds.txt")
src_snapshot = 'snapshots/pairs/11'
dest_images = {"title": "pairs", "id": "17r6Yv5Jt8hbBU_PJ7gN-W9Wt_NaaAPvL"}
drive = GoogleDrive(gauth)
# file_list = drive.ListFile({'q': "'root' in parents and trashed=false"}).GetList()
file_list = drive.ListFile({'q': "'17r6Yv5Jt8hbBU_PJ7gN-W9Wt_NaaAPvL' in parents and trashed=false"}).GetList()
for file1 in file_list:
print('title: %s, id: %s' % (file1['title'], file1['id']))
# exit()
try:
for files in os.listdir(src_snapshot):
textfile = drive.CreateFile({'title': files, "parents": [{"kind": "drive#fileLink", "id": dest_images['id']}]})
textfile.SetContentFile(os.path.join(src_snapshot, files))
textfile.Upload()
print('Uploaded:', files)
except Exception as e:
print(e)
|
{"/siamese/siamese_train.py": ["/siamese/datagen.py"], "/readjust_xml.py": ["/siamese/siamese_predict.py"], "/siamese/multiprocess_predict.py": ["/siamese/siamese_predict.py"], "/siamese/siamese_inference.py": ["/siamese/siamese_predict.py"]}
|
33,058
|
DableUTeeF/seven2
|
refs/heads/master
|
/siamese/create_dataset.py
|
import os
import sys
# noinspection PyUnboundLocalVariable
if __name__ == "__main__" and __package__ is None:
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
__package__ = "siamese"
from yolo.utils import create_csv_training_instances
import cv2
import os
if __name__ == '__main__':
train_ints, valid_ints, labels, max_box_per_image = create_csv_training_instances('/home/palm/PycharmProjects/seven2/anns/annotation.csv',
'/home/palm/PycharmProjects/seven2/anns/val_ann.csv',
'/home/palm/PycharmProjects/seven2/anns/classes.csv',
)
save_path = '/home/palm/PycharmProjects/seven/images/test6'
for instance in valid_ints:
image = cv2.imread(instance['filename'])
for idx, obj in enumerate(instance['object']):
x1 = max(0, obj['xmin'])
x2 = min(image.shape[1], obj['xmax'])
y1 = max(0, obj['ymin'])
y2 = min(image.shape[0], obj['ymax'])
cropped_image = image[y1:y2, x1:x2]
if x2 - x1 > y2 - y1:
p = ((x2 - x1) - (y2 - y1)) // 2
cropped_image = cv2.copyMakeBorder(cropped_image, p, p, 0, 0, cv2.BORDER_CONSTANT)
else:
p = ((y2 - y1) - (x2 - x1)) // 2
cropped_image = cv2.copyMakeBorder(cropped_image, 0, 0, p, p, cv2.BORDER_CONSTANT)
setname = os.path.split(instance['filename'])[0][-1]
if obj['name'] in ['obj']:
os.makedirs(os.path.join(save_path, 'unknown/obj'), exist_ok=True)
cv2.imwrite(os.path.join(save_path, 'unknown/obj', setname + '_' + str(idx) + '_' + os.path.basename(instance['filename'])),
cropped_image)
else:
os.makedirs(os.path.join(save_path, 'train', obj['name']), exist_ok=True)
cv2.imwrite(os.path.join(save_path, 'train', obj['name'], setname + '_' + str(idx) + '_' + os.path.basename(instance['filename'])),
cropped_image)
|
{"/siamese/siamese_train.py": ["/siamese/datagen.py"], "/readjust_xml.py": ["/siamese/siamese_predict.py"], "/siamese/multiprocess_predict.py": ["/siamese/siamese_predict.py"], "/siamese/siamese_inference.py": ["/siamese/siamese_predict.py"]}
|
33,059
|
DableUTeeF/seven2
|
refs/heads/master
|
/get_anno_obj_detect.py
|
import cv2
import numpy as np
import os
N = 3
path = './data1/'
cap = [None] * N
for i in range(N):
cap[i] = cv2.VideoCapture(i + 1)
cap[i].set(cv2.CAP_PROP_FRAME_WIDTH, 1920)
cap[i].set(cv2.CAP_PROP_FRAME_HEIGHT, 1080)
drawing = False
ix = [None] * N
iy = [None] * N
ex = [None] * N
ey = [None] * N
for i in range(N):
ix[i], iy[i] = 0, 0
ex[i], ey[i] = 0, 0
def draw_rect(event, x, y, flags, i):
global ix, iy, ex, ey, drawing
if event == cv2.EVENT_LBUTTONDOWN:
drawing = True
ix[i], iy[i] = x, y
elif event == cv2.EVENT_MOUSEMOVE:
if drawing == True:
drawimg[i] = img[i].copy()
ex[i] = x
ey[i] = y
# cv2.rectangle(drawimg[i], (ix[i], iy[i]), (x, y), (255, 255, 255), 1)
elif event == cv2.EVENT_LBUTTONUP:
drawing = False
ex[i] = x
ey[i] = y
# cv2.rectangle(drawimg[i], (ix[i], iy[i]), (x, y), (255, 255, 255), 1)
if ix[i] < x and iy[i] < y:
ix[i], ex[i], iy[i], ey[i] = ix[i], x, iy[i], y
else:
ex[i], ix[i], ey[i], iy[i] = ix[i], x, iy[i], y
for i in range(N):
cv2.namedWindow('img' + str(i))
cv2.setMouseCallback('img' + str(i), draw_rect, i)
class_name = '0'
class_num = 0
while True:
img = [None] * N
drawimg = [None] * N
for i in range(N):
_, img[i] = cap[i].read()
img[i] = np.rot90(img[i])
drawimg[i] = img[i].copy()
cv2.rectangle(drawimg[i], (ix[i], iy[i]), (ex[i], ey[i]), (0, 0, 255), 2)
cv2.putText(drawimg[i], class_name, (ix[i], iy[i] - 10), cv2.FONT_HERSHEY_COMPLEX, 1, (0, 0, 255), 1)
cv2.imshow('img' + str(i), cv2.resize(drawimg[i], None, None, 0.2, 0.2))
key = cv2.waitKey(1) & 0xFF
if key == ord('q'):
break
if key == ord('n'):
class_num += 1
class_name = str(class_num)
if key == ord('s'):
if not os.path.exists(path + class_name):
os.makedirs(path + class_name)
maxnum = 0
for f in os.listdir(path + class_name):
if '.jpg' in f:
n = int(f[:-4])
if n > maxnum:
maxnum = n
for i in range(N):
fn = path + class_name + '/' + str(maxnum + 1 + i) + '.jpg'
cv2.imwrite(fn, img[i])
fn = path + class_name + '/' + str(maxnum + 1 + i) + '.txt'
with open(fn, 'w') as f:
f.write('%s %d %d %d %d\n' % (class_name, ix[i], iy[i], ex[i], ey[i]))
|
{"/siamese/siamese_train.py": ["/siamese/datagen.py"], "/readjust_xml.py": ["/siamese/siamese_predict.py"], "/siamese/multiprocess_predict.py": ["/siamese/siamese_predict.py"], "/siamese/siamese_inference.py": ["/siamese/siamese_predict.py"]}
|
33,060
|
DableUTeeF/seven2
|
refs/heads/master
|
/siamese/lsh_test.py
|
"""
pairwise = 3.2582782537205974e-05, 3.403033881831663e-05
lsh_pair = 1.1194194062374617e-05, 1.2179314307509757e-05
lsh_cosd = 1.3693449689933570e-05, 1.3121787751072310e-05
lsh_eusq = 0.8139138601596410e-05, 0.8484695964420297e-05
lsh_eusc = 3.4231815388225626e-05, 3.3993883803595526e-05
lsh_hamm = 2.0442149504858307e-05, 2.0106958167212748e-05
new_pair = 1.6684917586866188e-05, 1.6523444134256113e-05
"""
from lshash.lshash import LSHash
import os
import sys
# noinspection PyUnboundLocalVariable
if __name__ == "__main__" and __package__ is None:
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
__package__ = "siamese"
from siamese.models import ResNet, ContrastiveLoss
from PIL import Image
import torch
from natthaphon import Model
from torchvision import transforms
import time
import numpy as np
def euclidean_dist_new(x, y):
""" This is a hot function, hence some optimizations are made. """
result = np.dot(x, x) + np.dot(y, y) - np.dot(x, y) * 2
return np.sqrt(result)
def euclidean_dist(x, y):
""" This is a hot function, hence some optimizations are made. """
diff = np.array(x) - y
return np.sqrt(np.dot(diff, diff))
if __name__ == '__main__':
model = Model(ResNet(predict=True))
model.compile(torch.optim.SGD(model.model.parameters(),
lr=0.001,
momentum=0.9,
weight_decay=1e-4),
ContrastiveLoss(),
metric=None,
device='cuda')
model.load_weights('/home/palm/PycharmProjects/seven2/snapshots/pairs/3/epoch_0_0.03454810580774366.pth')
model.model.eval()
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
transform = transforms.Compose([transforms.Resize((224, 224)),
transforms.ToTensor(),
normalize])
lsh = LSHash(hash_size=16, input_dim=1024, num_hashtables=5)
cache_folder = '/home/palm/PycharmProjects/seven/caches'
with torch.no_grad():
target_image_ori = Image.open('/home/palm/PycharmProjects/seven/images/cropped2/unknown/obj/0_036.jpg')
target_image = transform(target_image_ori)
x = torch.zeros((1, 3, 224, 224))
x[0] = target_image
target_features = model.model._forward_impl(x.cuda()).cpu()
minimum = (float('inf'), 0)
ts = []
# for class_folder in os.listdir(cache_folder):
# for file in os.listdir(os.path.join(cache_folder, class_folder)):
# cache = torch.load(os.path.join(cache_folder, class_folder, file)).cpu()
# lsh.index(cache[0])
# target_hash = lsh._hash(lsh.uniform_planes[0], target_features[0])
for class_folder in os.listdir(cache_folder):
for file in os.listdir(os.path.join(cache_folder, class_folder)):
t = time.time()
cache = torch.load(os.path.join(cache_folder, class_folder, file)).cpu()
t1 = time.time() - t
# query_hash = lsh._hash(lsh.uniform_planes[0], cache[0])
t2 = time.time() - t
# distant = lsh.hamming_dist(target_hash, query_hash)
distant = euclidean_dist_new(target_features.numpy()[0], cache.numpy()[0])
# distant = torch.pairwise_distance(cache, target_features)
t3 = time.time() - t
# print(t1, t2, t3)
ts.append(t3-t2)
if distant < minimum[0]:
minimum = (distant, class_folder)
print(minimum)
print(sum(ts) / len(ts))
a = 0
|
{"/siamese/siamese_train.py": ["/siamese/datagen.py"], "/readjust_xml.py": ["/siamese/siamese_predict.py"], "/siamese/multiprocess_predict.py": ["/siamese/siamese_predict.py"], "/siamese/siamese_inference.py": ["/siamese/siamese_predict.py"]}
|
33,061
|
DableUTeeF/seven2
|
refs/heads/master
|
/readjust_xml.py
|
import os
import sys
# noinspection PyUnboundLocalVariable
if __name__ == "__main__" and __package__ is None:
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
__package__ = "siamese"
from siamese.models import ResNet, ContrastiveLoss
from siamese.siamese_predict import memory_image, memory_cache
from PIL import Image
from retinanet.utils.image import read_image_bgr, preprocess_image, resize_image
from lshash.lshash import LSHash
from retinanet import models
import torch
from natthaphon import Model
from torchvision import transforms
from xml.etree import cElementTree as ET
import numpy as np
import cv2
import tensorflow as tf
import keras
import shutil
import time
gpu_options = tf.GPUOptions(allow_growth=True)
sess = tf.Session(config=tf.ConfigProto(gpu_options=gpu_options))
keras.backend.set_session(sess)
if __name__ == '__main__':
model = Model(ResNet(predict=True))
model.compile(torch.optim.SGD(model.model.parameters(),
lr=0.001,
momentum=0.9,
weight_decay=1e-4),
ContrastiveLoss(),
metric=None,
device='cuda')
model.load_weights('/home/palm/PycharmProjects/seven2/snapshots/pairs/4/epoch_0_0.016697616640688282.pth')
model.model.eval()
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
transform = transforms.Compose([transforms.Resize((224, 224)),
transforms.ToTensor(),
normalize])
labels_to_names = {0: 'obj'}
prediction_model = models.load_model('/home/palm/PycharmProjects/seven2/snapshots/infer_model_temp.h5')
query_path = '/home/palm/PycharmProjects/seven/images/cropped5/train'
cache_path = '/home/palm/PycharmProjects/seven/caches'
cache_dict = {}
for set_name in [1, 2, 3]:
folder = f'/home/palm/PycharmProjects/seven/data1/{set_name}'
anns_path = f'/home/palm/PycharmProjects/seven2/xmls/revised/{set_name}'
exiting_anns = [os.path.basename(x) for x in os.listdir(anns_path)]
for i in os.listdir(folder):
if i[:-4] + '.xml' not in exiting_anns:
continue
if 'txt' in i:
continue
x = open(os.path.join(anns_path, i[:-4] + '.xml')).read()
if '<name>' not in x:
os.makedirs(f'/home/palm/PycharmProjects/seven2/xmls/readjusted/{set_name}/', exist_ok=True)
shutil.copy(os.path.join(anns_path, i[:-4] + '.xml'),
f'/home/palm/PycharmProjects/seven2/xmls/readjusted/{set_name}/'+i[:-4] + '.xml')
continue
if '<name>obj</name>' not in x:
os.makedirs(f'/home/palm/PycharmProjects/seven2/xmls/readjusted/{set_name}/', exist_ok=True)
shutil.copy(os.path.join(anns_path, i[:-4] + '.xml'),
f'/home/palm/PycharmProjects/seven2/xmls/readjusted/{set_name}/'+i[:-4] + '.xml')
continue
image = read_image_bgr(os.path.join(folder, i))
start_time = time.time()
# copy to draw ong
draw = image.copy()
draw = cv2.cvtColor(draw, cv2.COLOR_BGR2RGB)
# preprocess image for network
image = preprocess_image(image)
image, scale = resize_image(image, min_side=720, max_side=1280)
# process image
boxes, scores, labels = prediction_model.predict_on_batch(np.expand_dims(image, axis=0))
# correct for image scale
boxes /= scale
root = ET.Element('annotation')
ET.SubElement(root, 'filename').text = i
ET.SubElement(root, 'path').text = os.path.join(folder, i)
size = ET.SubElement(root, 'size')
ET.SubElement(size, 'width').text = str(draw.shape[1])
ET.SubElement(size, 'height').text = str(draw.shape[0])
for box, score, label in zip(boxes[0], scores[0], labels[0]):
# scores are sorted so we can break
if score < 0.5:
continue
b = box.astype(int)
minimum = (float('inf'), 0)
with torch.no_grad():
target_image_ori = Image.fromarray(draw[b[1]:b[3], b[0]:b[2]])
target_image = transform(target_image_ori)
x = torch.zeros((1, 3, 224, 224))
x[0] = target_image
target_features = model.model._forward_impl(x.cuda())
for query_folder in os.listdir(query_path):
for query_image_path in os.listdir(os.path.join(query_path, query_folder)):
query = os.path.join(query_path, query_folder, query_image_path)
cache_dict, query_features = memory_cache(cache_dict, model.model, query, os.path.join(cache_path, query_folder, query_image_path + '.pth'), transform)
y = LSHash.euclidean_dist(target_features.cpu().numpy()[0], query_features.cpu().numpy()[0])
if y < minimum[0]:
minimum = (y, query_folder)
if minimum[0] > 1:
minimum = (minimum[0], 'obj')
# print(minimum)
obj = ET.SubElement(root, 'object')
ET.SubElement(obj, 'name').text = minimum[1]
bndbx = ET.SubElement(obj, 'bndbox')
ET.SubElement(bndbx, 'xmin').text = str(b[0])
ET.SubElement(bndbx, 'ymin').text = str(b[1])
ET.SubElement(bndbx, 'xmax').text = str(b[2])
ET.SubElement(bndbx, 'ymax').text = str(b[3])
print(time.time() - start_time)
# cv2.imshow(f'im_{i}', draw)
tree = ET.ElementTree(root)
os.makedirs(f'/home/palm/PycharmProjects/seven2/xmls/readjusted/{set_name}/', exist_ok=True)
tree.write(f'/home/palm/PycharmProjects/seven2/xmls/readjusted/{set_name}/' + i[:-4] + '.xml')
|
{"/siamese/siamese_train.py": ["/siamese/datagen.py"], "/readjust_xml.py": ["/siamese/siamese_predict.py"], "/siamese/multiprocess_predict.py": ["/siamese/siamese_predict.py"], "/siamese/siamese_inference.py": ["/siamese/siamese_predict.py"]}
|
33,062
|
DableUTeeF/seven2
|
refs/heads/master
|
/correct_xml.py
|
import os
from xml.etree import cElementTree as ET
import tensorflow as tf
import keras
gpu_options = tf.GPUOptions(allow_growth=True)
sess = tf.Session(config=tf.ConfigProto(gpu_options=gpu_options))
keras.backend.set_session(sess)
if __name__ == '__main__':
for set_name in [0, 1, 2, 3]:
folder = f'/home/palm/PycharmProjects/seven/data1/'
anns_path = f'/home/palm/PycharmProjects/seven2/xmls/readjusted/{set_name}'
dst = '/home/palm/PycharmProjects/seven2/xmls/final'
for file in os.listdir(anns_path):
tree = ET.parse(os.path.join(anns_path, file))
for elem in tree.iter():
if 'filename' in elem.tag:
elem.text = f'{set_name}_{elem.text}'
if 'path' in elem.tag:
elem.text = f'/home/palm/PycharmProjects/seven/data1/{set_name}_'+os.path.basename(elem.text)
if 'name' in elem.tag:
if elem.text == 'Almond_bar':
elem.text = 'United Almond 19g'
elif elem.text == 'Diva 160ml':
elem.text = 'Daiwa dishwashing liquid lemon 160ml'
elif elem.text == 'Protractor ruler':
elem.text = 'TD protractor'
elif elem.text == 'Soffell Flora 80ml':
elem.text = 'Soffel Flora 80ml'
elif elem.text == 'Soffel flora 8ml':
elem.text = 'Soffel Lotion flora 8ml'
elif elem.text == 'Kitkat thai tea':
elem.text = 'Kitkat red 35g'
elif elem.text == 'KitKat Milktea 35g':
elem.text = 'Kitkat red 35g'
elif elem.text == 'KitKat Red 35g':
elem.text = 'Kitkat red 35g'
elif elem.text == 'Koh-kae salted peanuts 42g':
elem.text = 'Koh-Kae Salted Peanuts 42g'
elif elem.text == 'Almind_fried_56g':
elem.text = 'Almond_fried_56g'
elif 'Darlie' in elem.text:
elem.text = 'Darlie green'
tree.write(f'/home/palm/PycharmProjects/seven2/xmls/final/{set_name}_' + file[:-4] + '.xml')
|
{"/siamese/siamese_train.py": ["/siamese/datagen.py"], "/readjust_xml.py": ["/siamese/siamese_predict.py"], "/siamese/multiprocess_predict.py": ["/siamese/siamese_predict.py"], "/siamese/siamese_inference.py": ["/siamese/siamese_predict.py"]}
|
33,063
|
DableUTeeF/seven2
|
refs/heads/master
|
/siamese/multiprocess_predict.py
|
import os
import sys
os.environ["CUDA_VISIBLE_DEVICES"] = ""
# noinspection PyUnboundLocalVariable
if __name__ == "__main__" and __package__ is None:
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
__package__ = "siamese"
from siamese.models import ResNet, ContrastiveLoss
from PIL import Image
import torch
from natthaphon import Model
from torchvision import transforms
import time
from lshash.lshash import LSHash
from siamese.siamese_predict import memory_cache
import os
import multiprocessing
from functools import partial
from contextlib import contextmanager
@contextmanager
def poolcontext(*args, **kwargs):
pool = multiprocessing.Pool(*args, **kwargs)
yield pool
pool.terminate()
# nope
def predict_image_class(query_folder, target_features, cache_dict, class_minimum):
minimum = (float('inf'), 0)
for query_image_path in os.listdir(os.path.join(query_path, query_folder)):
t = time.time()
query = os.path.join(query_path, query_folder, query_image_path)
t1 = time.time() - t
cache_dict, query_features = memory_cache(cache_dict, model.model, query, os.path.join(cache_path, query_folder, query_image_path + '.pth'), transform)
t2 = time.time() - t
y = LSHash.euclidean_dist(target_features.cpu().numpy()[0], query_features.cpu().numpy()[0])
t3 = time.time() - t
print(t1, t2, t3)
if y < minimum[0]:
minimum = (y, query_folder)
class_minimum[query_folder] = minimum
if __name__ == '__main__':
model = Model(ResNet(predict=True))
model.compile(torch.optim.SGD(model.model.parameters(),
lr=0.001,
momentum=0.9,
weight_decay=1e-4),
ContrastiveLoss(),
metric=None,
device='cpu')
model.load_weights('/home/palm/PycharmProjects/seven2/snapshots/pairs/4/epoch_0_0.016697616640688282.pth')
model.model.eval()
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
transform = transforms.Compose([transforms.Resize((224, 224)),
transforms.ToTensor(),
normalize])
query_path = '/home/palm/PycharmProjects/seven/images/cropped2/train'
cache_path = '/home/palm/PycharmProjects/seven/caches'
target_path = '/home/palm/PycharmProjects/seven/images/cropped2/unknown/obj'
cache_dict = {}
with torch.no_grad():
for target_image_path in os.listdir(target_path):
target = os.path.join(target_path, target_image_path)
target_image_ori = Image.open(target)
target_image = transform(target_image_ori)
x = torch.zeros((1, 3, 224, 224))
x[0] = target_image
target_features = model.model._forward_impl(x)
minimum = (float('inf'), 0)
query_folders = os.listdir(query_path)
class_minimum = {}
with poolcontext(processes=8) as pool:
results = pool.map(partial(predict_image_class, target_features=target_features, cache_dict=cache_dict, class_minimum=class_minimum), query_folders)
print(class_minimum)
|
{"/siamese/siamese_train.py": ["/siamese/datagen.py"], "/readjust_xml.py": ["/siamese/siamese_predict.py"], "/siamese/multiprocess_predict.py": ["/siamese/siamese_predict.py"], "/siamese/siamese_inference.py": ["/siamese/siamese_predict.py"]}
|
33,064
|
DableUTeeF/seven2
|
refs/heads/master
|
/stuff/create_xml.py
|
import os
from xml.etree import cElementTree as ET
from PIL import Image
import json
if __name__ == '__main__':
txt_folder = '/media/palm/data/7/txt/'
ann_folder = '/media/palm/data/7/anns/'
image_folder = '/media/palm/data/7/images/'
names = open('/home/palm/PycharmProjects/Seven/stuff/obj.names').read().split('\n')
for txt in os.listdir(txt_folder):
imname = txt[:-4]+'.jpg'
try:
image = Image.open(os.path.join(image_folder, imname))
except FileNotFoundError:
continue
width, height = image.size
root = ET.Element('annotation')
ET.SubElement(root, 'filename').text = imname
ET.SubElement(root, 'path').text = os.path.join(image_folder, imname)
size = ET.SubElement(root, 'size')
ET.SubElement(size, 'width').text = str(width)
ET.SubElement(size, 'height').text = str(height)
class_txt = open(os.path.join(txt_folder, txt)).read().split('\n')
for obj_txt in class_txt:
if len(obj_txt) == 0:
break
obj_ = obj_txt.split(' ')
obj = ET.SubElement(root, 'object')
ET.SubElement(obj, 'name').text = names[int(obj_[0])]
w = int(float(obj_[3]) * width)
h = int(float(obj_[4]) * height)
x = int(float(obj_[1]) * width) - int(w / 2)
y = int(float(obj_[2]) * height) - int(h / 2)
bndbx = ET.SubElement(obj, 'bndbox')
ET.SubElement(bndbx, 'xmin').text = str(x)
ET.SubElement(bndbx, 'xmax').text = str(x+w)
ET.SubElement(bndbx, 'ymin').text = str(y)
ET.SubElement(bndbx, 'ymax').text = str(y+h)
tree = ET.ElementTree(root)
tree.write(os.path.join(ann_folder, txt[:-4]+'.xml'))
|
{"/siamese/siamese_train.py": ["/siamese/datagen.py"], "/readjust_xml.py": ["/siamese/siamese_predict.py"], "/siamese/multiprocess_predict.py": ["/siamese/siamese_predict.py"], "/siamese/siamese_inference.py": ["/siamese/siamese_predict.py"]}
|
33,065
|
DableUTeeF/seven2
|
refs/heads/master
|
/siamese/siamese_predict.py
|
import os
import sys
# noinspection PyUnboundLocalVariable
if __name__ == "__main__" and __package__ is None:
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
__package__ = "siamese"
from siamese.models import ResNet, ContrastiveLoss
from PIL import Image
import torch
from natthaphon import Model
from torchvision import transforms
import time
from lshash.lshash import LSHash
def save_cache(model, image, cachepath):
os.makedirs(os.path.split(cachepath)[0], exist_ok=True)
x = torch.zeros((1, 3, 224, 224))
x[0] = image
out = model._forward_impl(x.cuda())
torch.save(out, cachepath)
return out
def load_cache(model, image, cachepath):
if os.path.exists(cachepath):
return torch.load(cachepath, map_location=torch.device('cpu'))
return save_cache(model, image, cachepath)
def memory_cache(cachedict, model, query, cachepath, transform):
if cachepath not in cachedict:
image = Image.open(query)
image = transform(image)
cachedict[cachepath] = load_cache(model, image, cachepath)
return cachedict, cachedict[cachepath]
def memory_image(query, image_dict, transform):
if query not in image_dict:
query_image = Image.open(query)
query_image = transform(query_image)
image_dict[query] = query_image
return image_dict, image_dict[query]
if __name__ == '__main__':
model = Model(ResNet(predict=True))
model.compile(torch.optim.SGD(model.model.parameters(),
lr=0.001,
momentum=0.9,
weight_decay=1e-4),
ContrastiveLoss(),
metric=None,
device='cuda')
model.load_weights('/home/palm/PycharmProjects/seven2/snapshots/pairs/4/epoch_0_0.016697616640688282.pth')
model.model.eval()
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
transform = transforms.Compose([transforms.Resize((224, 224)),
transforms.ToTensor(),
normalize])
lsh = LSHash(hash_size=16, input_dim=1024, num_hashtables=5)
target_path = '/home/palm/PycharmProjects/seven/images/cropped2/unknown/obj'
query_path = '/home/palm/PycharmProjects/seven/images/cropped2/train'
cache_path = '/home/palm/PycharmProjects/seven/caches'
cache_dict = {}
with torch.no_grad():
for target_image_path in os.listdir(target_path):
target = os.path.join(target_path, target_image_path)
target_image_ori = Image.open(target)
target_image = transform(target_image_ori)
x = torch.zeros((1, 3, 224, 224))
x[0] = target_image
target_features = model.model._forward_impl(x.cuda())
minimum = (float('inf'), 0)
for query_folder in os.listdir(query_path):
for query_image_path in os.listdir(os.path.join(query_path, query_folder)):
t = time.time()
query = os.path.join(query_path, query_folder, query_image_path)
t1 = time.time() - t
cache_dict, query_features = memory_cache(cache_dict, model.model, query, os.path.join(cache_path, query_folder, query_image_path + '.pth'), transform)
t2 = time.time() - t
y = lsh.euclidean_dist(target_features.cpu().numpy()[0], query_features.cpu().numpy()[0])
t3 = time.time() - t
print(t1, t2, t3)
if y < minimum[0]:
minimum = (y, query_folder)
print(minimum, target_image_path)
# if minimum[0] < 1.:
# os.makedirs(os.path.join('/home/palm/PycharmProjects/seven/images/cropped2/unknown', minimum[1]), exist_ok=True)
# target_image_ori.save(os.path.join('/home/palm/PycharmProjects/seven/images/cropped2/unknown', minimum[1], target_image_path))
|
{"/siamese/siamese_train.py": ["/siamese/datagen.py"], "/readjust_xml.py": ["/siamese/siamese_predict.py"], "/siamese/multiprocess_predict.py": ["/siamese/siamese_predict.py"], "/siamese/siamese_inference.py": ["/siamese/siamese_predict.py"]}
|
33,066
|
DableUTeeF/seven2
|
refs/heads/master
|
/siamese/siamese_inference.py
|
import os
import sys
# noinspection PyUnboundLocalVariable
if __name__ == "__main__" and __package__ is None:
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
__package__ = "siamese"
from siamese.models import ResNet, ContrastiveLoss
from siamese.siamese_predict import memory_image, memory_cache
from PIL import Image
from retinanet.utils.image import read_image_bgr, preprocess_image, resize_image
from lshash.lshash import LSHash
from retinanet import models
import torch
from natthaphon import Model
from torchvision import transforms
from boxutils import add_bbox
import numpy as np
import cv2
import tensorflow as tf
import keras
gpu_options = tf.GPUOptions(allow_growth=True)
sess = tf.Session(config=tf.ConfigProto(gpu_options=gpu_options))
keras.backend.set_session(sess)
def pad(cropped_image, b):
x1, y1 , x2, y2 = b
if x2 - x1 > y2 - y1:
p = ((x2 - x1) - (y2 - y1)) // 2
cropped_image = cv2.copyMakeBorder(cropped_image, p, p, 0, 0, cv2.BORDER_CONSTANT)
else:
p = ((y2 - y1) - (x2 - x1)) // 2
cropped_image = cv2.copyMakeBorder(cropped_image, 0, 0, p, p, cv2.BORDER_CONSTANT)
return cropped_image
if __name__ == '__main__':
model = Model(ResNet(predict=True))
model.compile(torch.optim.SGD(model.model.parameters(),
lr=0.001,
momentum=0.9,
weight_decay=1e-4),
ContrastiveLoss(),
metric=None,
device='cuda')
model.load_weights('/home/palm/PycharmProjects/seven2/snapshots/pairs/5/epoch_1_0.012463876953125.pth')
model.model.eval()
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
transform = transforms.Compose([transforms.Resize((224, 224)),
transforms.ToTensor(),
normalize])
labels_to_names = [x.split(',')[0] for x in open('/home/palm/PycharmProjects/seven2/anns/classes.csv').read().split('\n')[:-1]]
prediction_model = models.load_model('/home/palm/PycharmProjects/seven2/snapshots/infer_model_temp.h5')
names_to_labels = {}
for x in open('/home/palm/PycharmProjects/seven2/anns/classes.csv').read().split('\n')[:-1]:
names_to_labels[x.split(',')[0]] = int(x.split(',')[1])
query_path = '/home/palm/PycharmProjects/seven/images/cropped7/train'
cache_path = '/home/palm/PycharmProjects/seven/caches'
cache_dict = {}
dst = f'/home/palm/PycharmProjects/seven/predict/4'
for set_name in [1]:
folder = f'/home/palm/PycharmProjects/seven/data1/{set_name}'
for i in os.listdir(folder):
image = read_image_bgr(os.path.join(folder, i))
# copy to draw ong
draw = image.copy()
# preprocess image for network
image = preprocess_image(image)
image, scale = resize_image(image, min_side=720, max_side=1280)
# process image
boxes, scores, labels = prediction_model.predict_on_batch(np.expand_dims(image, axis=0))
# correct for image scale
boxes /= scale
for box, score, label in zip(boxes[0], scores[0], labels[0]):
# scores are sorted so we can break
if score < 0.5:
break
b = box.astype(int)
minimum = (float('inf'), 0)
with torch.no_grad():
target_image_ori = pad(draw[b[1]:b[3], b[0]:b[2]], b)
target_image_ori = Image.fromarray(target_image_ori[..., ::-1])
target_image = transform(target_image_ori)
x = torch.zeros((1, 3, 224, 224))
x[0] = target_image
target_features = model.model._forward_impl(x.cuda())
for query_folder in os.listdir(query_path):
for query_image_path in os.listdir(os.path.join(query_path, query_folder)):
query = os.path.join(query_path, query_folder, query_image_path)
cache_dict, query_features = memory_cache(cache_dict, model.model, query, os.path.join(cache_path, query_folder, query_image_path + '.pth'), transform)
y = LSHash.euclidean_dist(target_features.cpu().numpy()[0], query_features.cpu().numpy()[0])
if y < minimum[0]:
minimum = (y, query_folder)
if minimum[0] > 1:
minimum = (minimum[0], 'obj')
else:
if minimum[1] in names_to_labels:
label = names_to_labels[minimum[1]]
draw = add_bbox(draw, b, label, labels_to_names, score)
else:
draw = add_bbox(draw, b, 0, [minimum[1]], score)
os.makedirs(dst, exist_ok=True)
cv2.imwrite(os.path.join(dst, i), draw)
|
{"/siamese/siamese_train.py": ["/siamese/datagen.py"], "/readjust_xml.py": ["/siamese/siamese_predict.py"], "/siamese/multiprocess_predict.py": ["/siamese/siamese_predict.py"], "/siamese/siamese_inference.py": ["/siamese/siamese_predict.py"]}
|
33,067
|
DableUTeeF/seven2
|
refs/heads/master
|
/stuff/createboxes.py
|
import cv2
import numpy as np
import os
N = 3
drawing = False
ix = [None] * N
iy = [None] * N
ex = [None] * N
ey = [None] * N
for i in range(N):
ix[i], iy[i] = 0, 0
ex[i], ey[i] = 0, 0
def draw_rect(event, x, y, flags, i):
global ix, iy, ex, ey, drawing
if event == cv2.EVENT_LBUTTONDOWN:
drawing = True
ix[i], iy[i] = x, y
ex[i], ey[i] = x, y
elif event == cv2.EVENT_MOUSEMOVE:
if drawing:
drawimg[i] = img[i].copy()
ex[i] = x
ey[i] = y
# cv2.rectangle(drawimg[i], (ix[i], iy[i]), (x, y), (255, 255, 255), 1)
elif event == cv2.EVENT_LBUTTONUP:
drawing = False
ex[i] = x
ey[i] = y
# cv2.rectangle(drawimg[i], (ix[i], iy[i]), (x, y), (255, 255, 255), 1)
if ix[i] < x and iy[i] < y:
ix[i], ex[i], iy[i], ey[i] = ix[i], x, iy[i], y
else:
ex[i], ix[i], ey[i], iy[i] = ix[i], x, iy[i], y
for i in range(N):
cv2.namedWindow('img' + str(i))
cv2.setMouseCallback('img' + str(i), draw_rect, i)
class_name = ''
root_directory = '/home/palm/PycharmProjects/seven/' #name of the folder
source_dir = 'data1' #
save_txt = 'data1-9-3.txt'
img = []
name = []
try:
written = [x.split(' ')[0] for x in open(save_txt, 'r').readlines()]
except FileNotFoundError:
written = []
for subdir in os.listdir(os.path.join(root_directory, source_dir)):
s = sorted(os.listdir(os.path.join(root_directory, source_dir, subdir)))
subdir_list = {}
for a in s:
subdir_list[f'{int(a.split(".")[0]):02d}.{a.split(".")[1]}'] = a
slist = sorted(subdir_list)
for fs in slist:
files = subdir_list[fs]
if 'txt' in files:
continue
if os.path.join(root_directory, source_dir, subdir, files) in written:
continue
img.append(cv2.imread(os.path.join(root_directory, source_dir, subdir, files)))
name.append(os.path.join(root_directory, source_dir, subdir, files))
while len(img) == N:
drawimg = [None] * N
for i in range(N):
drawimg[i] = img[i].copy()
cv2.rectangle(drawimg[i], (ix[i], iy[i]), (ex[i], ey[i]), (0, 0, 255), 2)
cv2.putText(drawimg[i], class_name, (ix[i], iy[i] - 10), cv2.FONT_HERSHEY_COMPLEX, 1, (0, 0, 255), 1)
cv2.imshow('img' + str(i), cv2.resize(drawimg[i], None, None, 0.2, 0.2))
key = cv2.waitKey(1) & 0xFF
if key == ord('q'):
cv2.destroyAllWindows()
raise SystemExit
if key == ord('r'):
ix[i], iy[i] = 0, 0
ex[i], ey[i] = 0, 0
if key == ord('s'):
img = []
with open(save_txt, 'a') as f:
for i in range(N):
f.write('%s %d %d %d %d\n' % (name[i],
ix[i], iy[i], ex[i], ey[i]))
name = []
break
|
{"/siamese/siamese_train.py": ["/siamese/datagen.py"], "/readjust_xml.py": ["/siamese/siamese_predict.py"], "/siamese/multiprocess_predict.py": ["/siamese/siamese_predict.py"], "/siamese/siamese_inference.py": ["/siamese/siamese_predict.py"]}
|
33,068
|
DableUTeeF/seven2
|
refs/heads/master
|
/stuff/create_xml_v2.py
|
import os
from xml.etree import cElementTree as ET
from PIL import Image
if __name__ == '__main__':
ann_folder = '/media/palm/data/7/ann1-30-9'
# names = open('../names.txt').read().split('\n')[:-1]
anns = open('/home/palm/PycharmProjects/Seven/stuff/data1-30-9.txt').read().split('\n')[:-1]
# assert len(anns) == len(names)
for i in range(len(anns)):
x = anns[i].split(' ')
imname = os.path.join(*x[0].split('/')[-2:])
impath = x[0]
try:
image = Image.open(impath)
except FileNotFoundError:
continue
width, height = image.size
root = ET.Element('annotation')
ET.SubElement(root, 'filename').text = imname
size = ET.SubElement(root, 'size')
ET.SubElement(size, 'width').text = str(width)
ET.SubElement(size, 'height').text = str(height)
obj_ = anns[i].split(' ')
if len(obj_) > 5:
continue
ctxt = obj_[0].split('/')[-2]
obj = ET.SubElement(root, 'object')
ET.SubElement(obj, 'name').text = ctxt
x1 = min(480, max(0, min(int(obj_[1]), int(obj_[2]))))
x2 = min(480, max(0, max(int(obj_[1]), int(obj_[2]))))
y1 = min(640, max(0, min(int(obj_[3]), int(obj_[4]))))
y2 = min(640, max(0, max(int(obj_[3]), int(obj_[4]))))
bndbx = ET.SubElement(obj, 'bndbox')
ET.SubElement(bndbx, 'xmin').text = str(x1)
ET.SubElement(bndbx, 'xmax').text = str(x2)
ET.SubElement(bndbx, 'ymin').text = str(y1)
ET.SubElement(bndbx, 'ymax').text = str(y2)
tree = ET.ElementTree(root)
if abs(x1-x2) < 10 or abs(y1-y2) < 10:
continue
tree.write(os.path.join(ann_folder, imname[:-4].replace('/', '_')+'.xml'))
|
{"/siamese/siamese_train.py": ["/siamese/datagen.py"], "/readjust_xml.py": ["/siamese/siamese_predict.py"], "/siamese/multiprocess_predict.py": ["/siamese/siamese_predict.py"], "/siamese/siamese_inference.py": ["/siamese/siamese_predict.py"]}
|
33,069
|
DableUTeeF/seven2
|
refs/heads/master
|
/gui_tk.py
|
import cv2
import numpy as np
import tkinter as tk
import threading
from PIL import Image
from PIL import ImageTk
import os
from retinanet import models
import time
from retinanet.utils.image import preprocess_image, resize_image
import tensorflow as tf
from univ_utils import add_bbox
from threading import Thread
import cv2
class WebcamThread:
def __init__(self, src=0, name="WebcamThread", af=None, f=None, w=None, h=None):
self.cap = cv2.VideoCapture(src)
if af is not None:
self.cap.set(cv2.CAP_PROP_AUTOFOCUS, 0)
if f is not None:
self.cap.set(cv2.CAP_PROP_FOCUS, 0)
if f is not None:
self.cap.set(cv2.CAP_PROP_FRAME_WIDTH, 1920)
if f is not None:
self.cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 1080)
_, self.frame = self.cap.read()
self.name = name
self.stopped = False
def update(self):
while True:
if self.stopped:
return
_, self.frame = self.cap.read()
def start(self):
t = Thread(target=self.update, name=self.name, args=())
t.daemon = True
t.start()
return self
def read(self):
return self.frame
def stop(self):
self.stopped = True
def verify(facevector1, facevector2, threshold=0.4):
min_dist = 99999
dist = np.linalg.norm(np.subtract(facevector1, facevector2))
if dist < min_dist:
min_dist = dist
print(min_dist)
if min_dist > threshold:
return False
else:
return True
class APP:
def __init__(self, cap):
self.cap = cap
self.frame = None
self.thread = None
self.stopEvent = None
self.model = models.load_model('/home/palm/PycharmProjects/seven2/snapshots/infer_model_temp.h5')
self.graph = tf.get_default_graph()
self.classes = {}
self.root = tk.Tk()
self.root.configure(background='SlateGray4')
self.root.bind('<KeyRelease>', self.keydetect)
self.panel = None
self.qrscanner = ''
self.predictionLabel = tk.Text(self.root, height=30, width=40,
borderwidth=0, highlightthickness=0,
relief='ridge', background="SlateGray4", foreground='SlateGray1')
self.predictionLabel.grid(row=0, column=0, padx=4, pady=2)
self.classLabel = tk.Text(self.root, height=30, width=40,
borderwidth=0, highlightthickness=0,
relief='ridge', background="SlateGray4", foreground='SlateGray1')
self.classLabel.grid(row=0, column=2, padx=4, pady=2)
self.stopEvent = threading.Event()
self.thread = threading.Thread(target=self.vdoLoop, args=())
self.thread.start()
self.root.wm_title("BingoBox")
self.root.wm_protocol("WM_DELETE_WINDOW", self.onClose)
self.t = time.time()
self.weight = 210
def vdoLoop(self):
with self.graph.as_default():
while not self.stopEvent.is_set():
obj = {}
frame = cv2.imread(f'/home/palm/PycharmProjects/seven/data1/1/1.jpg')
draw = frame.copy()
image = preprocess_image(frame)
image, scale = resize_image(image, min_side=720, max_side=1280)
boxes, scores, labels = self.model.predict_on_batch(np.expand_dims(image, axis=0))
boxes /= scale
for box, score, label in zip(boxes[0], scores[0], labels[0]):
if score < 0.9:
break
b = box.astype(int)
draw = add_bbox(draw, b, label, self.labels_to_names, score)
if label not in obj:
obj[label] = 0
obj[label] += 1
blk = ImageTk.PhotoImage(Image.fromarray(cv2.resize(draw, (360, 640))[..., ::-1]))
if self.weight > 10:
color = 'chartreuse3' if abs(self.get_weight(obj) - self.weight) < 20 else 'orangered'
else:
color = 'cornflower blue'
if self.panel is None:
self.panel = tk.Label(image=blk, borderwidth=0, highlightthickness=3, highlightbackground=color)
self.panel.image = blk
self.panel.grid(row=0, column=1, padx=2, pady=2)
else:
self.panel.configure(image=blk, highlightthickness=3, relief="solid", highlightbackground=color)
self.panel.image = blk
self.predictionLabel.config(state='normal')
self.predictionLabel.delete(1.0, tk.END)
self.predictionLabel.insert(tk.END, f"Obj{' ' * 7}Qty{' ' * 7}Ttl wt. \n")
def get_weight(self, obj):
weights = 0
for o in obj:
weights += self.labels_to_weight[o] * obj[o]
return weights
def keydetect(self, e):
if e.char == 'q':
self.onClose()
def onClose(self):
print("close")
self.stopEvent.set()
self.cap.stop()
self.root.quit()
os.system('killall python')
if __name__ == '__main__':
cap = WebcamThread(0, "QR detector 1", 0, 0, 1920, 1080).start()
app = APP(cap)
app.root.mainloop()
|
{"/siamese/siamese_train.py": ["/siamese/datagen.py"], "/readjust_xml.py": ["/siamese/siamese_predict.py"], "/siamese/multiprocess_predict.py": ["/siamese/siamese_predict.py"], "/siamese/siamese_inference.py": ["/siamese/siamese_predict.py"]}
|
33,070
|
DableUTeeF/seven2
|
refs/heads/master
|
/autobox.py
|
from retinanet.utils.image import read_image_bgr, preprocess_image, resize_image
from retinanet.utils.visualization import draw_box, draw_caption
from retinanet.utils.colors import label_color
from retinanet import models
import cv2
import os
import numpy as np
import time
from xml.etree import cElementTree as ET
if __name__ == '__main__':
labels_to_names = {0: 'obj'}
prediction_model = models.load_model('/home/palm/PycharmProjects/seven2/snapshots/infer_model_temp.h5')
for set_name in [0, 1, 2, 3]:
folder = f'/home/palm/PycharmProjects/seven/data1/{set_name}'
anns_path = f'/home/palm/PycharmProjects/seven2/xmls/revised/{set_name}'
exiting_anns = [os.path.basename(x) for x in os.listdir(anns_path)]
for i in os.listdir(folder):
if i[:-4] + '.xml' in exiting_anns:
continue
if 'txt' in i:
continue
image = read_image_bgr(os.path.join(folder, i))
# copy to draw ong
draw = image.copy()
draw = cv2.cvtColor(draw, cv2.COLOR_BGR2RGB)
# preprocess image for network
image = preprocess_image(image)
image, scale = resize_image(image, min_side=800, max_side=1333)
# process image
start = time.time()
boxes, scores, labels = prediction_model.predict_on_batch(np.expand_dims(image, axis=0))
print("processing time: ", time.time() - start)
# correct for image scale
boxes /= scale
root = ET.Element('annotation')
ET.SubElement(root, 'filename').text = i
ET.SubElement(root, 'path').text = os.path.join(folder, i)
size = ET.SubElement(root, 'size')
ET.SubElement(size, 'width').text = str(draw.shape[1])
ET.SubElement(size, 'height').text = str(draw.shape[0])
for box, score, label in zip(boxes[0], scores[0], labels[0]):
# scores are sorted so we can break
if score < 0.5:
continue
b = box.astype(int)
obj = ET.SubElement(root, 'object')
ET.SubElement(obj, 'name').text = labels_to_names[label]
bndbx = ET.SubElement(obj, 'bndbox')
ET.SubElement(bndbx, 'xmin').text = str(b[0])
ET.SubElement(bndbx, 'ymin').text = str(b[1])
ET.SubElement(bndbx, 'xmax').text = str(b[2])
ET.SubElement(bndbx, 'ymax').text = str(b[3])
color = label_color(label)
draw_box(draw, b, color=color)
caption = "{} {:.3f}".format(labels_to_names[label], score)
draw_caption(draw, b, caption)
# cv2.imshow(f'im_{i}', draw)
tree = ET.ElementTree(root)
tree.write(f'/home/palm/PycharmProjects/seven2/xmls/raw/{set_name}/' + i[:-4] + '.xml')
|
{"/siamese/siamese_train.py": ["/siamese/datagen.py"], "/readjust_xml.py": ["/siamese/siamese_predict.py"], "/siamese/multiprocess_predict.py": ["/siamese/siamese_predict.py"], "/siamese/siamese_inference.py": ["/siamese/siamese_predict.py"]}
|
33,071
|
DableUTeeF/seven2
|
refs/heads/master
|
/dataset_update_sequence.py
|
import os
os.system('python xml2csv_classify.py')
os.system('python siamese/create_dataset.py')
# os.system('python stuff/equalize_the_train.py')
# os.system('python autoclasses.py')
|
{"/siamese/siamese_train.py": ["/siamese/datagen.py"], "/readjust_xml.py": ["/siamese/siamese_predict.py"], "/siamese/multiprocess_predict.py": ["/siamese/siamese_predict.py"], "/siamese/siamese_inference.py": ["/siamese/siamese_predict.py"]}
|
33,072
|
DableUTeeF/seven2
|
refs/heads/master
|
/xml2csv_classify.py
|
from xml.etree import cElementTree as ET
import os
# bad_img = [os.path.basename(x).split('_')[0] for x in open('/home/palm/PycharmProjects/tops/anns/bad_img.txt').read().split('\n')[:-1]]
def check_bad(file):
return False
x = os.path.basename(file)[:-4]
return x in bad_img
if __name__ == '__main__':
open('anns/val_ann.csv', 'w')
open('anns/classes.csv', 'w')
classes = []
trainset = []
testset = []
with open('anns/annotation.csv', 'w') as wr:
for set_name in [0, 1, 2, 3]:
folder = f'/home/palm/PycharmProjects/seven/data1/{set_name}'
path = f'./xmls/readjusted/{set_name}'
for file in os.listdir(path):
val = False
if set_name == 1:
val = True
tree = ET.parse(os.path.join(path, file))
if len(tree.findall('object')) == 0:
continue
ln = ''
cls = ''
xmin = 0
xmax = 0
ymin = 0
ymax = 0
impath = ''
for elem in tree.iter():
if 'path' in elem.tag:
impath = elem.text
if 'palm' not in impath:
if '\\' in impath:
basename = impath.split('\\')[-1]
else:
basename = os.path.basename(impath)
impath = os.path.join('/home/palm/PycharmProjects/seven/data1', str(set_name), basename)
if 'object' in elem.tag:
if cls != '' and (xmax+xmin+ymax+ymax) != 0 and impath != 0:
if cls not in classes:
with open('anns/classes.csv', 'a') as cwr:
cwr.write(f'{cls},{len(classes)}\n')
classes.append(cls)
ln = f'{impath},{xmin},{ymin},{xmax},{ymax},{cls}'
if val:
testset.append(impath)
with open('anns/val_ann.csv', 'a') as vwr:
vwr.write(ln)
vwr.write('\n')
else:
trainset.append(impath)
wr.write(ln)
wr.write('\n')
elif 'name' in elem.tag:
cls = elem.text
if cls == 'Almond_bar':
cls = 'United Almond 19g'
elif cls == 'Diva 160ml':
cls = 'Daiwa dishwashing liquid lemon 160ml'
elif cls == 'Protractor ruler':
cls = 'TD protractor'
elif cls == 'Soffell Flora 80ml':
cls = 'Soffel Flora 80ml'
elif cls == 'Soffel flora 8ml':
cls = 'Soffel Lotion flora 8ml'
elif cls == 'Kitkat thai tea':
cls = 'Kitkat red 35g'
elif cls == 'KitKat Milktea 35g':
cls = 'Kitkat red 35g'
elif cls == 'KitKat Red 35g':
cls = 'Kitkat red 35g'
elif cls == 'Koh-kae salted peanuts 42g':
cls = 'Koh-Kae Salted Peanuts 42g'
elif cls == 'Almind_fried_56g':
cls = 'Almond_fried_56g'
elif 'Darlie' in cls:
cls = 'Darlie green'
elif 'xmin' in elem.tag:
xmin = elem.text
elif 'ymin' in elem.tag:
ymin = elem.text
elif 'xmax' in elem.tag:
xmax = elem.text
elif 'ymax' in elem.tag:
ymax = elem.text
if 1: # cls != 'obj':
if cls not in classes:
with open('anns/classes.csv', 'a') as cwr:
cwr.write(f'{cls},{len(classes)}\n')
classes.append(cls)
ln = f'{impath},{xmin},{ymin},{xmax},{ymax},{cls}'
if val:
testset.append(impath)
with open('anns/val_ann.csv', 'a') as vwr:
vwr.write(ln)
vwr.write('\n')
else:
trainset.append(impath)
wr.write(ln)
wr.write('\n')
print(len(set(trainset)))
print(len(set(testset)))
|
{"/siamese/siamese_train.py": ["/siamese/datagen.py"], "/readjust_xml.py": ["/siamese/siamese_predict.py"], "/siamese/multiprocess_predict.py": ["/siamese/siamese_predict.py"], "/siamese/siamese_inference.py": ["/siamese/siamese_predict.py"]}
|
33,073
|
DableUTeeF/seven2
|
refs/heads/master
|
/stuff/equalize_the_train.py
|
import shutil
import os
if __name__ == '__main__':
src_dir = '/home/palm/PycharmProjects/seven/images/test6/train'
dst_root = '/home/palm/PycharmProjects/seven/images/cropped6'
dst_dir = os.path.join(dst_root, 'train')
for folder in os.listdir(src_dir):
if folder not in os.listdir(dst_dir):
shutil.copytree(os.path.join(src_dir, folder),
os.path.join(dst_root, 'test', folder))
else:
print(folder)
|
{"/siamese/siamese_train.py": ["/siamese/datagen.py"], "/readjust_xml.py": ["/siamese/siamese_predict.py"], "/siamese/multiprocess_predict.py": ["/siamese/siamese_predict.py"], "/siamese/siamese_inference.py": ["/siamese/siamese_predict.py"]}
|
33,074
|
DableUTeeF/seven2
|
refs/heads/master
|
/stuff/movetxtandimage.py
|
import shutil
import os
if __name__ == '__main__':
image_dest = '/media/palm/data/7/images'
anns_dest = '/media/palm/data/7/anns'
root_folder = '/media/palm/data/7/data/'
for folder in os.listdir(root_folder):
# if not os.path.isdir(os.path.join(anns_dest, folder)):
# os.mkdir(os.path.join(anns_dest, folder))
# if not os.path.isdir(os.path.join(image_dest, folder)):
# os.mkdir(os.path.join(image_dest, folder))
for file in os.listdir(os.path.join(root_folder, folder)):
if file[-4:] == '.txt':
shutil.move(os.path.join(root_folder, folder, file),
os.path.join(anns_dest, file))
else:
shutil.move(os.path.join(root_folder, folder, file),
os.path.join(image_dest, file))
|
{"/siamese/siamese_train.py": ["/siamese/datagen.py"], "/readjust_xml.py": ["/siamese/siamese_predict.py"], "/siamese/multiprocess_predict.py": ["/siamese/siamese_predict.py"], "/siamese/siamese_inference.py": ["/siamese/siamese_predict.py"]}
|
33,075
|
DableUTeeF/seven2
|
refs/heads/master
|
/yolo/y3_video_infer.py
|
import cv2
import os
import sys
# noinspection PyUnboundLocalVariable
if __name__ == "__main__" and __package__ is None:
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
__package__ = "yolo"
from yolo.y3frontend import *
import json
import time
from yolo.utils import draw_boxesv3, normalize, evaluate, evaluate_coco, get_yolo_boxes, parse_annotation, create_csv_training_instances
from PIL import Image
import numpy as np
from yolo.preprocessing import minmaxresize, Y3BatchGenerator
from keras.models import load_model
if __name__ == '__main__':
config_path = '/home/palm/PycharmProjects/seven2/yolo/sevenconfig.json'
with open(config_path) as config_buffer:
config = json.loads(config_buffer.read())
train_ints, valid_ints, labels, max_box_per_image = create_csv_training_instances(
config['train']['train_csv'],
config['valid']['valid_csv'],
config['train']['classes_csv'],
)
infer_model = yolo3(
fe='effnetb3',
output_type='dw',
nb_class=len(labels)
)
infer_model.load_weights('/home/palm/PycharmProjects/seven2/snapshots/22_4.1761_1.2766.h5',
# by_name=True,
# skip_mismatch=True,
)
path = "/media/palm/data/coco/images/val2017"
pad = 1
# for _ in range(1000):
# if 1:
valid_generator = Y3BatchGenerator(
instances=valid_ints,
anchors=config['model']['anchors'],
labels=labels,
downsample=32,
max_box_per_image=max_box_per_image,
batch_size=1,
min_net_size=config['model']['min_input_size'],
max_net_size=config['model']['max_input_size'],
shuffle=True,
jitter=0.0,
)
cap = cv2.VideoCapture(1)
t = time.time()
while 1:
_, image = cap.read()
x = time.time()
# filename = '001dxxyile2uxkblr99uqo6fuhgprpccznlze0z0djhs9gkek2tsm8u5hsfzx62o.jpg'
# filename = 'download.jpeg'
image, w, h = minmaxresize(image, 416, 608)
# image = cv2.resize(image, (416, 416))
if pad:
imsize = image.shape
if imsize[0] > imsize[1]:
tempim = np.zeros((imsize[0], imsize[0], 3), dtype='uint8')
distant = (imsize[0] - imsize[1]) // 2
tempim[:, distant:distant + imsize[1], :] = image
image = tempim
h = imsize[0]
w = imsize[0]
elif imsize[1] > imsize[0]:
tempim = np.zeros((imsize[1], imsize[1], 3), dtype='uint8')
distant = (imsize[1] - imsize[0]) // 2
tempim[distant:distant + imsize[0], :, :] = image
image = tempim
h = imsize[1]
w = imsize[1]
image = np.expand_dims(image, 0)
boxes = get_yolo_boxes(infer_model,
image,
608, 608, # todo: change here too
config['model']['anchors'],
0.5,
0.5)[0]
# infer_model.predict(image)
# labels = ['badhelmet', 'badshoes', 'goodhelmet', 'goodshoes', 'person']
# # draw bounding boxes on the image using labels
image = draw_boxesv3(image[0], boxes, labels, 0.75)
cv2.imshow('img', image.astype('uint8'))
key = cv2.waitKey(1)
if key == ord('q'):
break
|
{"/siamese/siamese_train.py": ["/siamese/datagen.py"], "/readjust_xml.py": ["/siamese/siamese_predict.py"], "/siamese/multiprocess_predict.py": ["/siamese/siamese_predict.py"], "/siamese/siamese_inference.py": ["/siamese/siamese_predict.py"]}
|
33,076
|
DableUTeeF/seven2
|
refs/heads/master
|
/siamese/siamese_cls_eval.py
|
import os
import sys
# noinspection PyUnboundLocalVariable
if __name__ == "__main__" and __package__ is None:
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
__package__ = "siamese"
from siamese.models import ResNet, ContrastiveLoss
from PIL import Image
import torch
from natthaphon import Model
from torchvision import transforms
import time
from lshash.lshash import LSHash
import pickle as pk
def save_cache(model, image, cachepath):
os.makedirs(os.path.split(cachepath)[0], exist_ok=True)
x = torch.zeros((1, 3, 224, 224))
x[0] = image
out = model._forward_impl(x.cuda())
torch.save(out, cachepath)
return out
def load_cache(model, image, cachepath):
if os.path.exists(cachepath):
return torch.load(cachepath, map_location=torch.device('cpu'))
return save_cache(model, image, cachepath)
def memory_cache(cachedict, model, query, cachepath, transform):
if cachepath not in cachedict:
image = Image.open(query)
image = transform(image)
cachedict[cachepath] = load_cache(model, image, cachepath)
return cachedict, cachedict[cachepath]
def memory_image(query, image_dict, transform):
if query not in image_dict:
query_image = Image.open(query)
query_image = transform(query_image)
image_dict[query] = query_image
return image_dict, image_dict[query]
def predict():
model = Model(ResNet(predict=True))
model.compile(torch.optim.SGD(model.model.parameters(),
lr=0.001,
momentum=0.9,
weight_decay=1e-4),
ContrastiveLoss(),
metric=None,
device='cuda')
model.load_weights('/home/palm/PycharmProjects/seven2/snapshots/pairs/5/epoch_1_0.012463876953125.pth')
model.model.eval()
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
transform = transforms.Compose([transforms.Resize((224, 224)),
transforms.ToTensor(),
normalize])
target_path = '/home/palm/PycharmProjects/seven/images/test6/train'
query_path = '/home/palm/PycharmProjects/seven/images/cropped6/train'
cache_path = '/home/palm/PycharmProjects/seven/caches'
cache_dict = {}
predicted_dict = {}
correct = 0
count = 0
with torch.no_grad():
for target_image_folder in os.listdir(target_path):
if target_image_folder not in os.listdir(query_path):
continue
predicted_dict[target_image_folder] = {}
for target_image_path in os.listdir(os.path.join(target_path, target_image_folder)):
count += 1
target = os.path.join(target_path, target_image_folder, target_image_path)
target_image_ori = Image.open(target)
target_image = transform(target_image_ori)
x = torch.zeros((1, 3, 224, 224))
x[0] = target_image
target_features = model.model._forward_impl(x.cuda())
minimum = (float('inf'), 0)
for query_folder in os.listdir(query_path):
for query_image_path in os.listdir(os.path.join(query_path, query_folder)):
query = os.path.join(query_path, query_folder, query_image_path)
cache_dict, query_features = memory_cache(cache_dict, model.model, query, os.path.join(cache_path, query_folder, query_image_path + '.pth'), transform)
y = LSHash.euclidean_dist(target_features.cpu().numpy()[0], query_features.cpu().numpy()[0])
if y < minimum[0]:
minimum = (y, query_folder)
print(*minimum, target_image_folder)
predicted_dict[target_image_folder][target_image_path] = minimum[1]
if minimum[1] == target_image_folder:
correct += 1
print(count/correct)
pk.dump(predicted_dict, open('cls_eval.pk', 'wb'))
if __name__ == '__main__':
# predict()
import pickle as pk
import os
from sklearn.metrics import confusion_matrix
import matplotlib.pyplot as plt
import numpy as np
a = pk.load(open('cls_eval.pk', 'rb'))
labels_to_names = os.listdir('/home/palm/PycharmProjects/seven/images/cropped6/train')
y_true = [i +1 for i in range(len(labels_to_names))]
y_pred = [i +1 for i in range(len(labels_to_names))]
correct = 0
count = 0
class_correct = {}
for folder in a:
class_correct[folder] = [0, len('/home/palm/PycharmProjects/seven/images/cropped6/train/'+folder)]
for image in a[folder]:
y_true.append(labels_to_names.index(folder))
y_pred.append(labels_to_names.index(a[folder][image]))
count += 1
if a[folder][image] == folder:
correct += 1
class_correct[folder][0] += 1
f = confusion_matrix(y_true, y_pred)
pk.dump([y_true, y_pred, labels_to_names], open('ys.pk', 'wb'))
w = np.argwhere(f > 20)
sorted_cc = {}
for folder in class_correct:
print(folder, class_correct[folder][0]/class_correct[folder][1], class_correct[folder][1])
print(correct / count)
ticks = np.linspace(0, 153, num=154)
plt.imshow(f, interpolation='none')
plt.colorbar()
plt.xticks(ticks, fontsize=6)
plt.yticks(ticks, fontsize=6)
plt.grid(True)
plt.show()
|
{"/siamese/siamese_train.py": ["/siamese/datagen.py"], "/readjust_xml.py": ["/siamese/siamese_predict.py"], "/siamese/multiprocess_predict.py": ["/siamese/siamese_predict.py"], "/siamese/siamese_inference.py": ["/siamese/siamese_predict.py"]}
|
33,077
|
DableUTeeF/seven2
|
refs/heads/master
|
/siamese/datagen.py
|
from torch.utils.data import DataLoader
import numpy as np
from torchvision.datasets import ImageFolder
import torch
class DirectorySiameseLoader:
def __init__(self, target_path, transform):
self.dset = self.DataSet(target_path, transform)
def get_dset(self, batch_size, num_worker, shuffle=True):
return self.Loader(self.dset,
batch_size=batch_size,
shuffle=shuffle,
num_workers=num_worker)
class DataSet:
def __init__(self, target_path, transform):
self.target_path = target_path
self.dset = ImageFolder(target_path,
transform=transform)
self.len = len(self.dset)
self.curidx = -1
self.setidx = -1
def __next__(self):
self.curidx += 1
self.setidx += 1
if self.setidx >= self.len:
self.setidx -= self.len
return self[self.curidx]
def __len__(self):
return self.len**2
def __getitem__(self, idx):
# query image
xq, y_1 = self.dset[idx % self.len]
x = torch.zeros((2, *xq.size()))
x[0] = xq
# target image
xt, y_2 = self.dset[idx // self.len]
x[1] = xt
y = y_1 != y_2
return x, y
class Loader(DataLoader):
def __len__(self):
return int(np.round(len(self.dataset) / self.batch_size))
if __name__ == '__main__':
train_datagen = DirectorySiameseLoader('/media/palm/data/MicroAlgae/16_8_62/cropped/train',
None)
train_generator = train_datagen.get_dset(16, 1)
s = train_datagen.dset
s1 = s[1]
|
{"/siamese/siamese_train.py": ["/siamese/datagen.py"], "/readjust_xml.py": ["/siamese/siamese_predict.py"], "/siamese/multiprocess_predict.py": ["/siamese/siamese_predict.py"], "/siamese/siamese_inference.py": ["/siamese/siamese_predict.py"]}
|
33,078
|
DableUTeeF/seven2
|
refs/heads/master
|
/stuff/bg_subtract.py
|
import cv2
import numpy as np
import os
if __name__ == '__main__':
path = '/media/palm/data/7/data1-30-9-gs/data1/300'
gt = [cv2.imread(os.path.join(path, f'{x}.jpg')) for x in [1, 2, 3]]
for imid in range(12):
image = [cv2.imread(os.path.join(path, f'{x}.jpg')) for x in [1+(1+imid)*3, 2+(1+imid)*3, 3+(1+imid)*3]]
for idx in range(3):
diff = np.abs(cv2.cvtColor(gt[idx], cv2.COLOR_BGR2GRAY) - cv2.cvtColor(image[idx], cv2.COLOR_BGR2GRAY))
mask = cv2.threshold(diff, 128, 255, cv2.THRESH_BINARY)[1]
mask = cv2.dilate(mask, None, iterations=1)
mask[:350] = 255
# mask[:, :70] = 255
# mask[:, 400:] = 255
contours, hierarchy = cv2.findContours(mask.astype('uint8'), mode=cv2.RETR_LIST, method=cv2.CHAIN_APPROX_SIMPLE)
contours_poly = [None] * len(contours)
boundRect = [None] * len(contours)
pt = [float('inf'), float('inf'), -1, -1] # x, y, width, height
for i, c in enumerate(contours):
contours_poly[i] = cv2.approxPolyDP(c, 3, True)
boundRect[i] = cv2.boundingRect(contours_poly[i])
if boundRect[i][0] == 0 or boundRect[i][2] >= 400:
continue
if pt[0] > boundRect[i][0]:
pt[0] = boundRect[i][0]
if pt[1] > boundRect[i][1]:
pt[1] = boundRect[i][1]
if pt[2] < boundRect[i][2] + boundRect[i][0]:
pt[2] = boundRect[i][2] + boundRect[i][0]
if pt[3] < boundRect[i][3] + boundRect[i][1]:
pt[3] = boundRect[i][3] + boundRect[i][1]
# cv2.rectangle(image, (pt[0], pt[1]), (pt[2], pt[3]), (230, 180, 128))
cv2.rectangle(image[idx], (pt[0], pt[1]), (pt[2], pt[3]), (128, 128, 255), 2)
cv2.imwrite(f'/home/palm/PycharmProjects/Seven/out/1/{idx+1+(1+imid)*3}.jpg', image[idx])
# cv2.imshow(f'gt{idx}', gt[idx])
# cv2.imshow(f'mask{idx}', mask)
# while 1:
# keyboard = cv2.waitKey()
# print(keyboard)
# if keyboard == 113:
# break
|
{"/siamese/siamese_train.py": ["/siamese/datagen.py"], "/readjust_xml.py": ["/siamese/siamese_predict.py"], "/siamese/multiprocess_predict.py": ["/siamese/siamese_predict.py"], "/siamese/siamese_inference.py": ["/siamese/siamese_predict.py"]}
|
33,079
|
Lordtrituze/Airline_Management_System_DB
|
refs/heads/master
|
/models/aircraft.py
|
from datetime import date
class Aircraft:
id:int
name: str
model: str
capacity: int
reg_no: str
created_at: date
def __init__(self, id, name, model, capacity, reg_no, created_at):
self.id = id
self.name = name
self.model = model
self.capacity = capacity
self.reg_no = reg_no
self.created_at = created_at
def __str__(self):
aircraft = f"{self.id:<5}\t{self.name:<10}\t{self.model:<10}\t{self.capacity:<10}\t{self.reg_no:<10}\t{self.created_at}"
return aircraft
|
{"/repositories/passenger_repository.py": ["/models/passenger.py", "/repositories/base_repository.py"], "/repositories/flight_repository.py": ["/models/flight.py", "/repositories/base_repository.py"], "/repositories/aircraft_repository.py": ["/models/aircraft.py", "/repositories/base_repository.py"], "/AMS.py": ["/models/aircraft.py", "/models/passenger.py", "/models/flight.py", "/models/booking.py", "/repositories/aircraft_repository.py", "/repositories/passenger_repository.py", "/repositories/flight_repository.py", "/repositories/booking_repoaitory.py"], "/repositories/booking_repoaitory.py": ["/models/booking.py", "/repositories/base_repository.py"]}
|
33,080
|
Lordtrituze/Airline_Management_System_DB
|
refs/heads/master
|
/models/booking.py
|
from datetime import date
class Booking:
id: int
passenger_id: str
flight_id: int
booking_type: str
flight_class: str
booking_no: str
booking_date: date
seat_no: int
created_at: date
def __init__(self, id, passenger_id, flight_id, booking_type, flight_class, booking_no, booking_date, seat_no, created_at):
self.id = id
self.passenger_id = passenger_id
self.flight_id = flight_id
self.booking_type = booking_type
self.flight_class = flight_class
self.booking_no = booking_no
self.booking_date = booking_date
self.seat_no = seat_no
self.created_at = created_at
def __str__(self):
description = f"{self.id:<5}\t{self.passenger_id:<10}\t{self.flight_id:<10}\t{self.booking_type:<10}\t{self.flight_class:<10}\t{self.booking_no:<10}\t{self.booking_date:<20}\t{self.seat_no:<10}\t{self.created_at} "
return description
|
{"/repositories/passenger_repository.py": ["/models/passenger.py", "/repositories/base_repository.py"], "/repositories/flight_repository.py": ["/models/flight.py", "/repositories/base_repository.py"], "/repositories/aircraft_repository.py": ["/models/aircraft.py", "/repositories/base_repository.py"], "/AMS.py": ["/models/aircraft.py", "/models/passenger.py", "/models/flight.py", "/models/booking.py", "/repositories/aircraft_repository.py", "/repositories/passenger_repository.py", "/repositories/flight_repository.py", "/repositories/booking_repoaitory.py"], "/repositories/booking_repoaitory.py": ["/models/booking.py", "/repositories/base_repository.py"]}
|
33,081
|
Lordtrituze/Airline_Management_System_DB
|
refs/heads/master
|
/repositories/passenger_repository.py
|
from typing import List
from models.passenger import Passenger
from repositories.base_repository import baserepsoitory
class PassengerRepository(baserepsoitory):
def __init__(self):
super().__init__()
self.db = baserepsoitory.db
def create(self, passenger: Passenger):
cursor = self.db.cursor()
sql = "INSERT INTO passengers(first_name, last_name, email, address, reg_no) VALUES(%s, %s, %s, %s, %s)"
val = (passenger.first_name, passenger.last_name, passenger.email, passenger.address, passenger.reg_no)
cursor.execute(sql, val)
self.db.commit()
passenger.id = cursor.lastrowid
def find(self, reg_no: str):
cursor = self.db.cursor()
sql = "SELECT * FROM passengers WHERE reg_no = %s"
adr = (reg_no,)
cursor.execute(sql, adr)
record = cursor.fetchone()
passenger = PassengerRepository.__map_selected_record_to_passenger(record)
if passenger is None:
passenger = "Passenger unavailable"
return passenger
print(f"{'ID':<5}\t{'First Name':<20}\t{'Last Name':<20}\t{'Email':<25}\t{'Address':<25}\t{'Reg_no':<10}\t{'Created_at'}")
return passenger
def list(self):
cursor = self.db.cursor()
sql = "SELECT * FROM passengers"
cursor.execute(sql)
result = cursor.fetchall()
passengers: List[Passenger] = []
for record in result:
passenger = PassengerRepository.__map_selected_record_to_passenger(record)
passengers.append(passenger)
print(f"{'ID':<5}\t{'First Name':<20}\t{'Last Name':<20}\t{'Email':<25}\t{'Address':<25}\t{'Reg_no':<10}\t{'Created_at'}")
return passengers
def showAll(self):
passengers = self.list()
for passenger in passengers:
print(passenger)
def update(self, id: int, passenger: Passenger):
cursor = self.db.cursor()
sql = "UPDATE passengers SET first_name = %s, last_name = %s, email = %s, address = %s, reg_no = %s WHERE id " \
"= %s "
val = (passenger.first_name, passenger.last_name, passenger.email, passenger.address, passenger.reg_no, id)
cursor.execute(sql, val)
self.db.commit()
def find_id(self, regNo: str):
cursor = self.db.cursor()
sql = "SELECT * FROM passengers WHERE reg_no = %s"
adr = (regNo,)
cursor.execute(sql, adr)
record = cursor.fetchone()
passenger = PassengerRepository.__map_selected_record_to_passenger(record)
if passenger is None:
passenger = "Passenger Unavailable"
return passenger
return passenger.id
def delete(self, id: int):
cursor = self.db.cursor()
sql = "DELETE FROM passengers WHERE id = %s"
adr = (id,)
cursor.execute(sql, adr)
self.db.commit()
message = "Deleted"
return message
@staticmethod
def __map_selected_record_to_passenger(record):
if record is None:
return None
else:
id, first_name, last_name, email, address, reg_no, created_at = record
passenger = Passenger(id, first_name, last_name, email, address, reg_no, created_at)
return passenger
|
{"/repositories/passenger_repository.py": ["/models/passenger.py", "/repositories/base_repository.py"], "/repositories/flight_repository.py": ["/models/flight.py", "/repositories/base_repository.py"], "/repositories/aircraft_repository.py": ["/models/aircraft.py", "/repositories/base_repository.py"], "/AMS.py": ["/models/aircraft.py", "/models/passenger.py", "/models/flight.py", "/models/booking.py", "/repositories/aircraft_repository.py", "/repositories/passenger_repository.py", "/repositories/flight_repository.py", "/repositories/booking_repoaitory.py"], "/repositories/booking_repoaitory.py": ["/models/booking.py", "/repositories/base_repository.py"]}
|
33,082
|
Lordtrituze/Airline_Management_System_DB
|
refs/heads/master
|
/repositories/flight_repository.py
|
from typing import List
from models.flight import Flight
from repositories.base_repository import baserepsoitory
class FlightRepository(baserepsoitory):
def __init__(self):
super().__init__()
self.db = baserepsoitory.db
def create(self, flight: Flight):
cursor = self.db.cursor()
sql = "INSERT INTO flights(aircraft_id, takeoff_location, destination, takeoff_time, arrival_time, flight_no) " \
"VALUES(%s, %s, %s, %s, %s, %s) "
val = (flight.aircraft_id, flight.takeoff_location, flight.destination, flight.takeoff_time, flight.arrival_time, flight.flight_no)
cursor.execute(sql, val)
self.db.commit()
flight.id = cursor.lastrowid
def find(self, flight_no: str):
cursor = self.db.cursor()
sql = "SELECT * FROM flights WHERE flight_no = %s"
adr = (flight_no,)
cursor.execute(sql, adr)
record = cursor.fetchone()
flight = FlightRepository.__map_selected_record_to_flight(record)
if flight is None:
flight = "Flight unavailable"
return flight
print(f"{'ID':<5}\t{'aircraft_id':<10}\t{'takeoff_location':<20}\t{'destination':<20}\t{'takeoff_time':<20}\t{'arrival_time':<20}\t{'flight_no':<10}\t{'Created_at'}")
return flight
def find_id(self, flight_no: str):
cursor = self.db.cursor()
sql = "SELECT * FROM flights WHERE flight_no = %s"
adr = (flight_no,)
cursor.execute(sql, adr)
record = cursor.fetchone()
flight = FlightRepository.__map_selected_record_to_flight(record)
if flight is None:
flight = "Flight Unavailable"
return flight
return flight.id
def delete(self, id: int):
cursor = self.db.cursor()
sql = "DELETE FROM flights WHERE id = %s"
adr = (id,)
cursor.execute(sql, adr)
self.db.commit()
message = "Deleted"
return message
def update(self, id: int, flight: Flight):
cursor = self.db.cursor()
sql = "UPDATE flights SET aircraft_id = %s, takeoff_location = %s, destination = %s, takeoff_time = %s, arrival_time = %s, flight_no = %s WHERE id = %s"
val = (flight.aircraft_id, flight.takeoff_location, flight.destination, flight.takeoff_time, flight.arrival_time, flight.flight_no, id)
cursor.execute(sql, val)
self.db.commit()
def list(self):
cursor = self.db.cursor()
sql = "SELECT * FROM flights"
cursor.execute(sql)
result = cursor.fetchall()
flights: List[Flight] = []
for record in result:
flight = FlightRepository.__map_selected_record_to_flight(record)
flights.append(flight)
print(f"{'ID':<5}\t{'aircraft_id':<10}\t{'takeoff_location':<20}\t{'destination':<20}\t{'takeoff_time':<20}\t{'arrival_time':<20}\t{'flight_no':<10}\t{'Created_at'}")
return flights
def showAll(self):
flights = self.list()
for flight in flights:
print(flight)
@staticmethod
def __map_selected_record_to_flight(record):
if record is None:
return None
else:
id, aircraft_id, takeoff_location, destination, takeoff_time, arrival_time, flight_no, created_at = record
flight = Flight(id, aircraft_id, takeoff_location, destination, takeoff_time, arrival_time, flight_no, created_at)
return flight
|
{"/repositories/passenger_repository.py": ["/models/passenger.py", "/repositories/base_repository.py"], "/repositories/flight_repository.py": ["/models/flight.py", "/repositories/base_repository.py"], "/repositories/aircraft_repository.py": ["/models/aircraft.py", "/repositories/base_repository.py"], "/AMS.py": ["/models/aircraft.py", "/models/passenger.py", "/models/flight.py", "/models/booking.py", "/repositories/aircraft_repository.py", "/repositories/passenger_repository.py", "/repositories/flight_repository.py", "/repositories/booking_repoaitory.py"], "/repositories/booking_repoaitory.py": ["/models/booking.py", "/repositories/base_repository.py"]}
|
33,083
|
Lordtrituze/Airline_Management_System_DB
|
refs/heads/master
|
/repositories/base_repository.py
|
import mysql.connector
class baserepsoitory:
db =None
def __init__(self):
if baserepsoitory.db is None:
baserepsoitory.db = mysql.connector.connect(
host="localhost",
user="root",
password="Olalekan100%",
database="airline"
)
|
{"/repositories/passenger_repository.py": ["/models/passenger.py", "/repositories/base_repository.py"], "/repositories/flight_repository.py": ["/models/flight.py", "/repositories/base_repository.py"], "/repositories/aircraft_repository.py": ["/models/aircraft.py", "/repositories/base_repository.py"], "/AMS.py": ["/models/aircraft.py", "/models/passenger.py", "/models/flight.py", "/models/booking.py", "/repositories/aircraft_repository.py", "/repositories/passenger_repository.py", "/repositories/flight_repository.py", "/repositories/booking_repoaitory.py"], "/repositories/booking_repoaitory.py": ["/models/booking.py", "/repositories/base_repository.py"]}
|
33,084
|
Lordtrituze/Airline_Management_System_DB
|
refs/heads/master
|
/repositories/aircraft_repository.py
|
from typing import List
from models.aircraft import Aircraft
from repositories.base_repository import baserepsoitory
class AircraftRepository(baserepsoitory):
def __init__(self):
super().__init__()
self.db = baserepsoitory.db
def create(self, aircraft: Aircraft):
cursor = self.db.cursor()
sql = "INSERT INTO aircrafts(name, model, capacity, reg_no) VALUES(%s, %s, %s, %s)"
val = (aircraft.name, aircraft.model, aircraft.capacity, aircraft.reg_no)
cursor.execute(sql, val)
self.db.commit()
aircraft.id = cursor.lastrowid
def update(self, id: int, aircraft: Aircraft):
cursor = self.db.cursor()
sql = "UPDATE aircrafts SET name = %s, model = %s, capacity = %s, reg_no = %s WHERE id = %s"
val = (aircraft.name, aircraft.model, aircraft.capacity, aircraft.reg_no, id)
cursor.execute(sql, val)
self.db.commit()
def list(self):
cursor = self.db.cursor()
sql = "SELECT * FROM aircrafts"
cursor.execute(sql)
result = cursor.fetchall()
aircrafts: List[Aircraft] = []
for record in result:
aircraft = AircraftRepository.__map_selected_record_to_aircraft(record)
aircrafts.append(aircraft)
print(f"{'ID':<5}\t{'Name':<10}\t{'Model':<10}\t{'Capacity':<10}\t{'Reg_no':<10}\t{'Created_at'}")
return aircrafts
def showAll(self):
aircrafts = self.list()
for aircraft in aircrafts:
print(aircraft)
def find(self, reg_no: str):
cursor = self.db.cursor()
sql = "SELECT * FROM aircrafts WHERE reg_no = %s"
adr = (reg_no,)
cursor.execute(sql, adr)
record = cursor.fetchone()
aircraft = AircraftRepository.__map_selected_record_to_aircraft(record)
if aircraft is None:
aircraft = "Aircraft unavailable"
return aircraft
print(f"{'ID':<5}\t{'Name':<10}\t{'Model':<10}\t{'Capacity':<10}\t{'Reg_no':<10}\t{'Created_at'}")
return aircraft
def find_id(self, regNo: str):
cursor = self.db.cursor()
sql = "SELECT * FROM aircrafts WHERE reg_no = %s"
adr = (regNo,)
cursor.execute(sql, adr)
record = cursor.fetchone()
aircraft = AircraftRepository.__map_selected_record_to_aircraft(record)
if aircraft is None:
aircraft = "Aircraft Unavailable"
return aircraft
return aircraft.id
def delete(self, id: int):
cursor = self.db.cursor()
sql = "DELETE FROM aircrafts WHERE id = %s"
adr = (id,)
cursor.execute(sql, adr)
self.db.commit()
message = "Deleted"
return message
@staticmethod
def __map_selected_record_to_aircraft(record):
if record is None:
return None
else:
id, name, model, capacity, reg_no, created_at = record
aircraft = Aircraft(id, name, model, capacity, reg_no, created_at)
return aircraft
|
{"/repositories/passenger_repository.py": ["/models/passenger.py", "/repositories/base_repository.py"], "/repositories/flight_repository.py": ["/models/flight.py", "/repositories/base_repository.py"], "/repositories/aircraft_repository.py": ["/models/aircraft.py", "/repositories/base_repository.py"], "/AMS.py": ["/models/aircraft.py", "/models/passenger.py", "/models/flight.py", "/models/booking.py", "/repositories/aircraft_repository.py", "/repositories/passenger_repository.py", "/repositories/flight_repository.py", "/repositories/booking_repoaitory.py"], "/repositories/booking_repoaitory.py": ["/models/booking.py", "/repositories/base_repository.py"]}
|
33,085
|
Lordtrituze/Airline_Management_System_DB
|
refs/heads/master
|
/models/passenger.py
|
from datetime import date
class Passenger:
id: int
first_name: str
last_name: str
email: str
address: str
reg_no: str
created_at: date
def __init__(self, id, first_name, last_name, email, address, reg_no, created_at):
self.id = id
self.first_name = first_name
self.last_name = last_name
self.email = email
self.address = address
self.reg_no = reg_no
self.created_at = created_at
def __str__(self):
description = f"{self.id:<5}\t{self.first_name:<20}\t{self.last_name:<20}\t{self.email:<25}\t{self.address:<25}\t{self.reg_no:<10}\t{self.created_at}"
return description
|
{"/repositories/passenger_repository.py": ["/models/passenger.py", "/repositories/base_repository.py"], "/repositories/flight_repository.py": ["/models/flight.py", "/repositories/base_repository.py"], "/repositories/aircraft_repository.py": ["/models/aircraft.py", "/repositories/base_repository.py"], "/AMS.py": ["/models/aircraft.py", "/models/passenger.py", "/models/flight.py", "/models/booking.py", "/repositories/aircraft_repository.py", "/repositories/passenger_repository.py", "/repositories/flight_repository.py", "/repositories/booking_repoaitory.py"], "/repositories/booking_repoaitory.py": ["/models/booking.py", "/repositories/base_repository.py"]}
|
33,086
|
Lordtrituze/Airline_Management_System_DB
|
refs/heads/master
|
/AMS.py
|
from models.aircraft import Aircraft
from models.passenger import Passenger
from models.flight import Flight
from models.booking import Booking
from repositories.aircraft_repository import AircraftRepository
from repositories.passenger_repository import PassengerRepository
from repositories.flight_repository import FlightRepository
from repositories.booking_repoaitory import BookingRepository
aircraft_repository = AircraftRepository()
flight_repository = FlightRepository()
passenger_repository = PassengerRepository()
booking_repository = BookingRepository()
def main():
flag = True
options = [1, 2, 3, 4]
while flag:
mainMenu()
menuOption = int(input("\t--> "))
if menuOption == 0:
exit()
elif menuOption in options:
subMenu(menuOption)
else:
print("Please Enter a valid option")
main()
def mainMenu():
print(f"""
Airline Management Menu
Enter (1) to Manage Aircrafts
Enter (2) to Manage Flights
Enter (3) to Manage Passengers
Enter (4) to Manage Bookings
Enter (0) to Exit Menu""")
def subMenu(menuOption):
if menuOption == 1:
print(f"""
Aircraft Management Menu
Enter (1) to Create Aircrafts
Enter (2) to Search Aircrafts
Enter (3) to Update Aircrafts
Enter (4) to Delete Aircrafts
Enter (5) to Print All Aircrafts
Enter (0) to Exit to Main-Menu""")
menuOption = int(input("\t--> "))
aircraftMenu(menuOption)
elif menuOption == 2:
print(f"""
Flight Management Menu
Enter (1) to Create Flight
Enter (2) to Search Flight
Enter (3) to Update Flight
Enter (4) to Delete Flight
Enter (5) to Print All Flights
Enter (0) to Exit Main-Menu""")
menuOption = int(input("\t--> "))
flightMenu(menuOption)
elif menuOption == 3:
print(f"""
Passenger Management Menu
Enter (1) to Create Passenger
Enter (2) to Search Passenger
Enter (3) to Update Passenger
Enter (4) to Delete Passenger
Enter (5) to Print All Passengers
Enter (0) to Exit Main-Menu""")
menuOption = int(input("\t--> "))
passengerMenu(menuOption)
elif menuOption == 4:
print(f"""
Booking Management Menu
Enter (1) to Create Booking
Enter (2) to Search Booking
Enter (3) to Update Booking
Enter (4) to Delete Booking
Enter (5) to Print All Bookings
Enter (0) to Exit Main-Menu""")
menuOption = int(input("\t--> "))
bookingMenu(menuOption)
# Aircraft
def aircraftMenu(menuOption):
if menuOption == 1:
name = input("Enter The name of the Aircraft \n :")
model = input("Enter the model of the Aircraft \n :")
capacity = input("Enter the capacity of the Aircraft \n :")
reg_no = input("Enter the registration number of the Aircraft\n: ")
aircraft = Aircraft(id=None, name=name, model=model, capacity=capacity, reg_no= reg_no, created_at=None)
aircraft_repository.create(aircraft)
# aircraftManager.createCraft(name, model, capacity)
request()
subMenu(1)
elif menuOption == 2:
reg_no = input('Enter the Registration number of the Aircraft \n: ')
aircraft = aircraft_repository.find(reg_no=reg_no)
print(aircraft)
request()
subMenu(1)
elif menuOption == 3:
aircraft_repository.showAll()
id = int(input("Enter the id of the Aircraft you want to Update from Above \n :"))
reg_no = input("Enter the new Registration Number of the Aircraft \n :")
name = input("Enter The new name of the Aircraft \n :")
model = input("Enter the new model of the Aircraft \n :")
capacity = int(input("Enter the new capacity of the Aircraft \n :"))
aircraft = Aircraft(id=None, name=name, model=model, capacity=capacity, reg_no=reg_no, created_at=None)
aircraft_repository.update(id=id, aircraft=aircraft)
# aircraftManager.update(name, model, capacity, regNo)
request()
subMenu(1)
elif menuOption == 4:
# aircraft_repository.showAll()
reg_no = input("Enter the Registration Number of the Aircraft you want to delete \n :")
id = aircraft_repository.find_id(reg_no)
if type(id) is int:
aircraft = aircraft_repository.delete(id=id)
else:
aircraft = "Aircraft not found"
print(aircraft)
request()
subMenu(1)
elif menuOption == 5:
aircraft_repository.showAll()
request()
subMenu(1)
elif menuOption == 0:
main()
else:
print("Please enter a valid option")
subMenu(1)
#Flight
def flightMenu(menuOption):
if menuOption == 1:
aircraft = input("Enter the Registration Number of the Aircraft for the Flight \n :")
aircraft_id = aircraft_repository.find_id(aircraft)
if type(aircraft_id) is int:
pass
else:
print("No Aircraft with the Registration Number you entered was found")
request()
subMenu(2)
takeoff_location = input(
"Enter the takeoff_location of the Flight \n :")
destination = input(
"Enter the destination of the Flight \n :")
takeoff_time = input("Enter the take-off time \n :")
arrival_time = input("Enter the arrival time \n :")
flight_no = input("Enter the Flight")
flight = Flight(id=None, aircraft_id=aircraft_id, takeoff_location=takeoff_location, destination=destination, takeoff_time=takeoff_time, arrival_time=arrival_time, flight_no=flight_no, created_at=None)
flight_repository.create(flight)
request()
subMenu(2)
elif menuOption == 2:
flight_no = input('Enter the Flight Number \n: ')
flight = flight_repository.find(flight_no=flight_no)
print(flight)
request()
subMenu(2)
elif menuOption == 3:
passenger_repository.showAll()
id = int(input("Enter the ID of the Flight you want to Update \n :"))
aircraft_id = input("Enter The Aircraft ID for the Flight \n :")
takeoff_location = input("Enter The Takeoff Location of the Flight \n :")
destination = input("Enter The Destination of the Flight \n :")
takeoff_time = input("Enter Takeoff Time of the Flight \n :")
arrival_time = input("Enter Arrival Time of the Flight \n :")
flight_no = input("Enter the Flight Number of The Flight \n :")
flight = Flight(id=None, aircraft_id=aircraft_id, takeoff_location=takeoff_location, destination=destination, takeoff_time=takeoff_time, arrival_time=arrival_time, flight_no=flight_no, created_at=None)
flight_repository.update(id=id, flight=flight)
request()
subMenu(2)
elif menuOption == 4:
flight_no = input("Enter the Flight Number of the Flight you want to delete \n :")
id = flight_repository.find_id(flight_no)
if type(id) is int:
flight = flight_repository.delete(id=id)
else:
flight = "Aircraft not found"
print(flight)
request()
subMenu(2)
elif menuOption == 5:
flight_repository.showAll()
request()
subMenu(2)
elif menuOption == 0:
main()
else:
print("Please enter a valid option")
subMenu(2)
#Passenger
def passengerMenu(menuOption):
if menuOption == 1:
first_name = input("Enter The First Name of the Passenger \n :")
last_name = input("Enter The Last Name of the Passenger \n :")
email = input("Enter the email of the Passenger \n :")
address = input("Enter the address of the Passenger \n :")
reg_no = input("Enter the Registration Number \n :")
passenger = Passenger(id=None, first_name= first_name, last_name=last_name, email=email, address=address, reg_no=reg_no, created_at=None)
passenger_repository.create(passenger)
request()
subMenu(3)
elif menuOption == 2:
reg_no = input("Enter the Registration Number or Name of the Passenger you're looking for \n :")
result = passenger_repository.find(reg_no)
print(result)
request()
subMenu(3)
elif menuOption == 3:
passenger_repository.showAll()
id = int(input("Enter the ID of the Passenger you want to Update \n :"))
first_name = input("Enter The First Name of the Passenger \n :")
last_name = input("Enter The Last Name of the Passenger \n :")
email = input("Enter the new email of the Passenger \n :")
address = input("Enter the new address of the Passenger \n :")
reg_no = input("Enter the Registration Number of the Passenger \n :")
passenger = Passenger(id=None, created_at=None, first_name=first_name, last_name=last_name, email=email, address=address, reg_no=reg_no)
passenger_repository.update(id=id, passenger=passenger)
request()
subMenu(3)
elif menuOption == 4:
reg_no = input("Enter the Registration Number of the Passenger you want to delete \n :")
id = passenger_repository.find_id(reg_no)
if type(id) is int:
passenger = passenger_repository.delete(id=id)
else:
passenger = "Passenger not found"
print(passenger)
request()
subMenu(3)
elif menuOption == 5:
passenger_repository.showAll()
request()
subMenu(3)
elif menuOption == 0:
main()
else:
print("Please enter a valid option")
subMenu(3)
#Booking
def bookingMenu(menuOption):
if menuOption == 1:
passenger = input("Enter the Registration Number of the Passenger Booking the Flight \n :")
passenger_id = passenger_repository.find_id(passenger)
if type(passenger_id) is int:
pass
else:
print("No Passenger with the Registration Number you entered was found")
request()
subMenu(4)
flight = input("Enter the Flight Number of the Flight the Passenger is Booking \n :")
flight_id = flight_repository.find_id(flight)
if type(flight_id) is int:
pass
else:
print("No Flight with the Flight Number you entered was found")
request()
subMenu(4)
booking_type = input("Which Type of Ticket does the Passenger want to Book? \n (ONE-WAY) or (RETURN): ")
flight_class = input("Which Class of ticket is the Passenger Booking? \n (FIRST CLASS), (BUSINESS CLASS), or (ECONOMY): ")
booking_no = input("Enter the Booking Number \n :")
booking_date = input("Enter the date of Booking \n :")
seat_no = input("Enter the Seat Number \n :")
booking = Booking(id=None, passenger_id= passenger_id, flight_id= flight_id, booking_type= booking_type, flight_class= flight_class, booking_no= booking_no, booking_date= booking_date, seat_no= seat_no, created_at=None)
booking_repository.create(booking)
request()
subMenu(4)
elif menuOption == 2:
booking_no = input("Enter the Booking Number of the Booking you're looking for \n :")
result = booking_repository.find(booking_no)
print(result)
request()
subMenu(4)
elif menuOption == 3:
booking_repository.showAll()
id = int(input("Enter the ID of the Booking you want to Update \n :"))
passenger = input("Enter the Registration Number of the Passenger Booking the Flight \n :")
passenger_id = passenger_repository.find_id(passenger)
if type(passenger_id) is int:
pass
else:
print("No Passenger with the Registration Number you entered was found")
request()
subMenu(4)
flight = input("Enter the Flight Number of the Flight the Passenger is Booking \n :")
flight_id = flight_repository.find_id(flight)
if type(flight_id) is int:
pass
else:
print("No Flight with the Flight Number you entered was found")
request()
subMenu(4)
booking_type = input("Which Type of Ticket does the Passenger want to Book? \n (ONE-WAY) or (RETURN): ")
flight_class = input(
"Which Class of ticket is the Passenger Booking? \n (FIRST CLASS), (BUSINESS CLASS), or (ECONOMY): ")
booking_no = input("Enter the Booking Number \n :")
booking_date = input("Enter the date of Booking \n :")
seat_no = input("Enter the Seat Number \n :")
booking = Booking(id=None, passenger_id= passenger_id, flight_id= flight_id, booking_type= booking_type, flight_class= flight_class, booking_no= booking_no, booking_date= booking_date, seat_no= seat_no, created_at=None)
booking_repository.update(id=id, booking=booking)
request()
subMenu(4)
elif menuOption == 4:
booking_no = input("Enter the Booking Number of the Booking you want to delete \n :")
id = booking_repository.find_id(booking_no)
if type(id) is int:
booking = booking_repository.delete(id=id)
else:
booking = "Booking not found"
print(booking)
request()
subMenu(4)
elif menuOption == 5:
booking_repository.showAll()
request()
subMenu(4)
elif menuOption == 0:
main()
else:
print("Please enter a valid option")
subMenu(4)
def request():
answer = input(f"""Do you want to continue ?
(y/n) : """)
if answer == 'y':
pass
elif answer == 'n':
exit()
else:
print("Please enter a valid answer")
request()
main()
|
{"/repositories/passenger_repository.py": ["/models/passenger.py", "/repositories/base_repository.py"], "/repositories/flight_repository.py": ["/models/flight.py", "/repositories/base_repository.py"], "/repositories/aircraft_repository.py": ["/models/aircraft.py", "/repositories/base_repository.py"], "/AMS.py": ["/models/aircraft.py", "/models/passenger.py", "/models/flight.py", "/models/booking.py", "/repositories/aircraft_repository.py", "/repositories/passenger_repository.py", "/repositories/flight_repository.py", "/repositories/booking_repoaitory.py"], "/repositories/booking_repoaitory.py": ["/models/booking.py", "/repositories/base_repository.py"]}
|
33,087
|
Lordtrituze/Airline_Management_System_DB
|
refs/heads/master
|
/repositories/booking_repoaitory.py
|
from typing import List
from models.booking import Booking
from repositories.base_repository import baserepsoitory
class BookingRepository(baserepsoitory):
def __init__(self):
super().__init__()
self.db = baserepsoitory.db
def create(self, booking: Booking):
cursor = self.db.cursor()
sql = "INSERT INTO bookings(passenger_id, flight_id, booking_type, flight_class, booking_no, booking_date, seat_no) " \
"VALUES(%s, %s, %s, %s, %s, %s, %s) "
val = (booking.passenger_id, booking.flight_id, booking.booking_type, booking.flight_class, booking.booking_no, booking.booking_date, booking.seat_no)
cursor.execute(sql, val)
self.db.commit()
booking.id = cursor.lastrowid
def find(self, booking_no: str):
cursor = self.db.cursor()
sql = "SELECT * FROM bookings WHERE booking_no = %s"
adr = (booking_no,)
cursor.execute(sql, adr)
record = cursor.fetchone()
booking = BookingRepository.__map_selected_record_to_booking(record)
if booking is None:
booking = "Booking unavailable"
return booking
print(f"{'ID':<5}\t{'Passenger ID':<10}\t{'Flight ID':<10}\t{'Booking Type':<10}\t{'Flight Class':<10}\t{'Booking No':<10}\t{'Booking Date':<20}\t{'Seat No':<10}\t{'Created_at'}")
return booking
def find_id(self, booking_no: str):
cursor = self.db.cursor()
sql = "SELECT * FROM bookings WHERE booking_no = %s"
adr = (booking_no,)
cursor.execute(sql, adr)
record = cursor.fetchone()
booking = BookingRepository.__map_selected_record_to_booking(record)
if booking is None:
booking = "Booking unavailable"
return booking
return booking.id
def delete(self, id: int):
cursor = self.db.cursor()
sql = "DELETE FROM bookings WHERE id = %s"
adr = (id,)
cursor.execute(sql, adr)
self.db.commit()
message = "Deleted"
return message
def update(self, id: int, booking: Booking):
cursor = self.db.cursor()
sql = "UPDATE bookings SET passenger_id = %s, flight_id = %s, booking_type = %s, flight_class = %s, booking_no = %s, booking_date = %s, seat_no = %s WHERE id = %s"
val = (booking.passenger_id, booking.flight_id, booking.booking_type, booking.flight_class, booking.booking_no, booking.booking_date, booking.seat_no, id)
cursor.execute(sql, val)
self.db.commit()
def list(self):
cursor = self.db.cursor()
sql = "SELECT * FROM bookings"
cursor.execute(sql)
result = cursor.fetchall()
bookings: List[Booking] = []
for record in result:
booking = BookingRepository.__map_selected_record_to_booking(record)
bookings.append(booking)
print(f"{'ID':<5}\t{'Passenger ID':<10}\t{'Flight ID':<10}\t{'Booking Type':<10}\t{'Flight Class':<10}\t{'Booking No':<10}\t{'Booking Date':<20}\t{'Seat No':<10}\t{'Created_at'}")
return bookings
def showAll(self):
bookings = self.list()
for booking in bookings:
print(booking)
@staticmethod
def __map_selected_record_to_booking(record):
if record is None:
return None
else:
id, passenger_id, flight_id, booking_type, flight_class, booking_no, booking_date, seat_no, created_at = record
booking = Booking(id, passenger_id, flight_id, booking_type, flight_class, booking_no, booking_date, seat_no, created_at)
return booking
|
{"/repositories/passenger_repository.py": ["/models/passenger.py", "/repositories/base_repository.py"], "/repositories/flight_repository.py": ["/models/flight.py", "/repositories/base_repository.py"], "/repositories/aircraft_repository.py": ["/models/aircraft.py", "/repositories/base_repository.py"], "/AMS.py": ["/models/aircraft.py", "/models/passenger.py", "/models/flight.py", "/models/booking.py", "/repositories/aircraft_repository.py", "/repositories/passenger_repository.py", "/repositories/flight_repository.py", "/repositories/booking_repoaitory.py"], "/repositories/booking_repoaitory.py": ["/models/booking.py", "/repositories/base_repository.py"]}
|
33,088
|
Lordtrituze/Airline_Management_System_DB
|
refs/heads/master
|
/models/flight.py
|
from datetime import date
class Flight:
id: int
aircraft_id: int
takeoff_location: str
destination: str
takeoff_time: str
arrival_time: str
flight_no: int
created_at: date
def __init__(self, id, aircraft_id, takeoff_location, destination, takeoff_time, arrival_time, flight_no, created_at):
self.id = id
self.aircraft_id = aircraft_id
self.takeoff_location = takeoff_location
self.destination = destination
self.takeoff_time = takeoff_time
self.arrival_time = arrival_time
self.flight_no = flight_no
self.created_at = created_at
def __str__(self):
description = f"{self.id:<5}\t{self.aircraft_id:<10}\t{self.takeoff_location:<20}\t{self.destination:<20}\t{self.takeoff_time:<20}\t{self.arrival_time:<20}\t{self.flight_no:<10}\t{self.created_at} "
return description
|
{"/repositories/passenger_repository.py": ["/models/passenger.py", "/repositories/base_repository.py"], "/repositories/flight_repository.py": ["/models/flight.py", "/repositories/base_repository.py"], "/repositories/aircraft_repository.py": ["/models/aircraft.py", "/repositories/base_repository.py"], "/AMS.py": ["/models/aircraft.py", "/models/passenger.py", "/models/flight.py", "/models/booking.py", "/repositories/aircraft_repository.py", "/repositories/passenger_repository.py", "/repositories/flight_repository.py", "/repositories/booking_repoaitory.py"], "/repositories/booking_repoaitory.py": ["/models/booking.py", "/repositories/base_repository.py"]}
|
33,098
|
lich14/DAC
|
refs/heads/master
|
/model.py
|
import torch.nn as nn
import torch.nn.functional as F
from torch.distributions import Normal
import numpy as np
class lowPolicy(nn.Module):
def __init__(self, feature_dim, action_dim, num_options, hidden_dim=64):
super(lowPolicy, self).__init__()
self.feature_dim = feature_dim
self.action_dim = action_dim
self.hidden_dim = hidden_dim
self.num_options = num_options
self.body_actor = layer_init(nn.Linear(self.feature_dim, self.hidden_dim))
self.body_critic = layer_init(nn.Linear(self.feature_dim, self.hidden_dim))
self.a = layer_init(nn.Linear(self.hidden_dim, self.hidden_dim))
self.mean = layer_init(nn.Linear(self.hidden_dim, self.action_dim))
self.logstd = layer_init(nn.Linear(self.hidden_dim, self.action_dim))
self.v1 = layer_init(nn.Linear(self.hidden_dim, self.hidden_dim))
self.v2 = layer_init(nn.Linear(self.hidden_dim, self.num_options))
def forward(self, x):
body_actor = F.tanh(self.body_actor(x))
y = F.tanh(self.a(body_actor))
mean = self.mean(y)
logstd = self.logstd(y)
std = logstd.exp()
dist = Normal(mean, std)
action = dist.sample()
a_logp = dist.log_prob(action)
entropy = dist.entropy()
body_critic = F.relu(self.body_critic(x))
z = F.relu(self.v1(body_critic))
value = self.v2(z)
return {
'action': action,
'a_logp': a_logp,
'value': value,
'entropy': entropy,
'mean': mean,
'logstd': logstd,
}
def layer_init(layer, w_scale=0.1):
nn.init.orthogonal_(layer.weight.data)
layer.weight.data.mul_(w_scale)
nn.init.constant_(layer.bias.data, 0)
return layer
class OptionNet(nn.Module):
def __init__(self, num_options, feature_dim, hidden_dim=64):
super(OptionNet, self).__init__()
self.feature_dim = feature_dim
self.hidden_dim = hidden_dim
self.num_options = num_options
self.fc_body1 = layer_init(nn.Linear(self.feature_dim, self.hidden_dim))
self.fc_body2 = layer_init(nn.Linear(self.feature_dim, self.hidden_dim))
self.fc_beta = layer_init(nn.Linear(self.hidden_dim, self.num_options))
self.fc_option = layer_init(nn.Linear(self.hidden_dim, self.num_options))
self.fc_value = layer_init(nn.Linear(self.hidden_dim, self.num_options))
def forward(self, x):
body1 = F.tanh(self.fc_body1(x))
beta = F.sigmoid(self.fc_beta(body1))
q = F.softmax(self.fc_option(body1))
body2 = F.relu(self.fc_body2(x))
value = self.fc_value(body2)
return {
'q': q,
'beta': beta,
'value': value,
}
class Store():
def __init__(self, transition, buffer_capacity, batch_size):
self.buffer_capacity = buffer_capacity
self.batch_size = batch_size
self.buffer = np.empty(self.buffer_capacity, dtype=transition)
self.counter = 0
self.data = transition
def store(self, add):
self.buffer[self.counter] = add
self.counter += 1
if self.counter == self.buffer_capacity:
self.counter = 0
return True
else:
return False
def empty(self):
self.buffer = np.empty(self.buffer_capacity, dtype=self.data)
def show(self):
return self.buffer, self.buffer_capacity, self.batch_size
|
{"/DAC_divide.py": ["/model.py"], "/run.py": ["/DAC_divide.py"]}
|
33,099
|
lich14/DAC
|
refs/heads/master
|
/DAC_divide.py
|
'''
coded by lch
consider double value function, no frozen
'''
from model import lowPolicy, OptionNet, Store
from torch.utils.data.sampler import BatchSampler, SubsetRandomSampler
import torch
import numpy as np
from torch.distributions import Normal, Categorical
import torch.nn.functional as F
import torch.nn as nn
class DACAgent():
def __init__(self, config, lowtran, hightran, device):
self.config = config
self.device = device
self.lownet = lowPolicy(config.get('feature_dim'), config.get('action_dim'), config.get('num_options'),
config.get('hidden_dim')).double().to(self.device)
self.highnet = OptionNet(config.get('num_options'), config.get('feature_dim'),
config.get('hidden_dim')).double().to(self.device)
self.lowmemory = Store(lowtran, config.get('buffer_cap'), config.get('batch_size'))
self.highmemory = Store(hightran, config.get('buffer_cap'), config.get('batch_size'))
self.lowoptimizition = torch.optim.Adam(self.lownet.parameters(), lr=config.get('low_lr'))
self.highoptimizition = torch.optim.Adam(self.highnet.parameters(), lr=config.get('high_lr'))
self.start_list = config.get('start_list')
self.end_list = config.get('end_list')
def sample_option(self, prediction, prev_option, is_intial_states):
with torch.no_grad():
q_option = prediction['q']
mask = torch.zeros_like(q_option)
beta = 1
if is_intial_states == 0:
mask[prev_option] = 1
beta = prediction['beta'][prev_option]
pi_hat_option = (1 - beta) * mask + beta * q_option
dist = torch.distributions.Categorical(probs=q_option)
options = dist.sample()
options_logp = dist.log_prob(options)
dist = torch.distributions.Categorical(probs=pi_hat_option)
options_hat = dist.sample()
options_hat_logp = dist.log_prob(options_hat)
if is_intial_states:
options = options
options_logp = options_logp
else:
options = options_hat
options_logp = options_hat_logp
return options, options_logp
def choose_action(self, state, option):
state = state.to(self.device)
low_action_total = self.lownet(state)
start_index = self.start_list[option]
end_index = self.end_list[option]
action = low_action_total['action']
a_logp = low_action_total['a_logp'][start_index:end_index]
low_value = low_action_total['value'][option]
input_action = action[start_index:end_index].to('cpu')
input_action = input_action * 2 - 1
return {
'action': action,
'a_logp': a_logp.sum(),
'low_value': low_value,
'input_action': input_action,
}
def lowtrain(self):
buffer, buffer_capacity, batch_size = self.lowmemory.show()
s = torch.tensor(buffer['s'], dtype=torch.double).to(self.device)
option = torch.tensor(buffer['option'], dtype=torch.double).view(-1, 1).to(self.device)
s_ = torch.tensor(buffer['s_'], dtype=torch.double).to(self.device)
option_ = torch.tensor(buffer['option_'], dtype=torch.double).view(-1, 1).to(self.device)
a = torch.tensor(buffer['a'], dtype=torch.double).to(self.device)
old_a_logp = torch.tensor(buffer['a_logp'], dtype=torch.double).view(-1, 1).to(self.device)
r = torch.tensor(buffer['r'], dtype=torch.double).view(-1, 1).to(self.device)
done = torch.tensor(buffer['done'], dtype=torch.double).view(-1, 1).to(self.device)
action_loss_record, value_loss_record, entropy_record, loop_record = 0, 0, 0, 0
with torch.no_grad():
value_next = self.lownet(s_)['value']
option_change_next = torch.where(option_ > 5, torch.zeros_like(option_), option_)
value_next_zeros = torch.gather(value_next, 1, option_change_next.long())
value_next = torch.where(option_ > 5,
value_next.sum(dim=1, keepdim=True) / self.config.get('num_options'),
value_next_zeros)
value_now = self.lownet(s)['value']
option_change_now = torch.where(option > 5, torch.zeros_like(option), option)
value_now_zeros = torch.gather(value_now, 1, option_change_now.long())
value_now = torch.where(option > 5,
value_now.sum(dim=1, keepdim=True) / self.config.get('num_options'),
value_now_zeros)
delta = r + (1 - done) * self.config.get('gamma') * value_next - value_now
adv = torch.zeros_like(delta)
adv[-1] = delta[-1]
# GAE
for i in reversed(range(buffer_capacity - 1)):
adv[i] = delta[i] + self.config.get('tau') * (1 - done[i]) * adv[i + 1]
target_v = value_now + adv
adv = (adv - adv.mean()) / (adv.std() + np.finfo(np.float).eps) # Normalize advantage
for _ in range(self.config.get('ppoepoch')):
for index in BatchSampler(SubsetRandomSampler(range(buffer_capacity)), batch_size, False):
mean, logstd = self.lownet(s[index])['mean'], self.lownet(s[index])['logstd']
std = logstd.exp()
dist = Normal(mean, std)
a_logp = dist.log_prob(a[index])
option_short = option[index]
mask = torch.zeros_like(a_logp).double()
index_list = [torch.where(option_short == i)[0] for i in range(self.config.get('num_options'))]
input_list = torch.zeros(self.config.get('num_options'), self.config.get('action_dim'))
start_list = self.config.get('start_list')
end_list = self.config.get('end_list')
for i in range(self.config.get('num_options')):
input_list[i][start_list[i]:end_list[i]] = 1
for i in range(self.config.get('num_options')):
if torch.tensor(index_list[i].shape) != 0:
mask[index_list[i]] = torch.ones(torch.tensor(index_list[i].shape),
self.config.get('action_dim')).double().to(
self.device) * input_list[i].double().to(self.device)
a_logp = a_logp * mask
a_p_1 = a_logp.sum(dim=1, keepdim=True)
ratio = torch.exp((a_p_1 - old_a_logp[index]))
surr1 = ratio * adv[index]
surr2 = torch.clamp(ratio, 1.0 - self.config.get('clip_param'),
1.0 + self.config.get('clip_param')) * adv[index]
action_loss = -torch.min(surr1, surr2).mean()
entropy = dist.entropy() * mask
value_now = self.lownet(s[index])['value']
option_change_now = torch.where(option[index] > 5, torch.zeros_like(option[index]), option[index])
value_now_zeros = torch.gather(value_now, 1, option_change_now.long())
value_now = torch.where(option[index] > 5,
value_now.sum(dim=1, keepdim=True) / self.config.get('num_options'),
value_now_zeros)
value_loss = F.smooth_l1_loss(value_now, target_v[index])
self.lowoptimizition.zero_grad()
loss = action_loss + value_loss - self.config.get('entropy_para_low') * entropy.mean()
loss.backward()
nn.utils.clip_grad_norm_(self.lownet.parameters(), self.config.get('max_grad_norm'))
self.lowoptimizition.step()
action_loss_record += action_loss.cpu().detach()
value_loss_record += value_loss.cpu().detach()
entropy_record += entropy.mean().cpu().detach()
loop_record += 1
return {
'actionloss': action_loss_record / loop_record,
'valueloss': value_loss_record / loop_record,
'entropy': entropy_record / loop_record,
}
def hightrain(self):
buffer, buffer_capacity, batch_size = self.highmemory.show()
s = torch.tensor(buffer['s'], dtype=torch.double).to(self.device)
pre_option = torch.tensor(buffer['pre_option'], dtype=torch.double).view(-1, 1).to(self.device)
s_ = torch.tensor(buffer['s_'], dtype=torch.double).to(self.device)
option = torch.tensor(buffer['option'], dtype=torch.double).view(-1, 1).to(self.device)
option_logp = torch.tensor(buffer['option_logp'], dtype=torch.double).view(-1, 1).to(self.device)
r = torch.tensor(buffer['r'], dtype=torch.double).view(-1, 1).to(self.device)
done = torch.tensor(buffer['done'], dtype=torch.double).view(-1, 1).to(self.device)
action_loss_record, value_loss_record, entropy_record, loop_record = 0, 0, 0, 0
with torch.no_grad():
value_next = self.highnet(s_)['value']
option_change_next = torch.where(option > 5, torch.zeros_like(option), option)
value_next_zeros = torch.gather(value_next, 1, option_change_next.long())
value_next = torch.where(option > 5,
value_next.sum(dim=1, keepdim=True) / self.config.get('num_options'),
value_next_zeros)
value_now = self.highnet(s)['value']
option_change_now = torch.where(pre_option > 5, torch.zeros_like(pre_option), pre_option)
value_now_zeros = torch.gather(value_now, 1, option_change_now.long())
value_now = torch.where(pre_option > 5,
value_now.sum(dim=1, keepdim=True) / self.config.get('num_options'),
value_now_zeros)
delta = r + (1 - done) * self.config.get('gamma') * value_next - value_now
adv = torch.zeros_like(delta)
adv[-1] = delta[-1]
# GAE
for i in reversed(range(buffer_capacity - 1)):
adv[i] = delta[i] + self.config.get('tau') * (1 - done[i]) * adv[i + 1]
target_v = value_now + adv
adv = (adv - adv.mean()) / (adv.std() + np.finfo(np.float).eps) # Normalize advantage
for _ in range(self.config.get('ppoepoch')):
for index in BatchSampler(SubsetRandomSampler(range(buffer_capacity)), batch_size, False):
q_short, beta_short = self.highnet(s[index])['q'], self.highnet(s[index])['beta']
pre_option_short = pre_option[index]
pi_hat_option = self.sample_option_multi(q_short, beta_short, pre_option_short)
pi_hat_p = torch.gather(pi_hat_option, 1, option[index].long())
ratio = pi_hat_p / torch.exp(option_logp[index])
surr1 = ratio * adv[index]
surr2 = torch.clamp(ratio, 1.0 - self.config.get('clip_param'),
1.0 + self.config.get('clip_param')) * adv[index]
action_loss = -torch.min(surr1, surr2).mean()
m = Categorical(pi_hat_option)
entropy = m.entropy()
value_now = self.highnet(s[index])['value']
option_change_now = torch.where(pre_option[index] > 5, torch.zeros_like(pre_option[index]),
pre_option[index])
value_now_zeros = torch.gather(value_now, 1, option_change_now.long())
value_now = torch.where(pre_option[index] > 5,
value_now.sum(dim=1, keepdim=True) / self.config.get('num_options'),
value_now_zeros)
value_loss = F.smooth_l1_loss(value_now, target_v[index])
self.highoptimizition.zero_grad()
loss = action_loss + value_loss - self.config.get('entropy_para_high') * entropy.mean()
loss.backward()
nn.utils.clip_grad_norm_(self.highnet.parameters(), self.config.get('max_grad_norm'))
self.highoptimizition.step()
action_loss_record += action_loss.cpu().detach()
value_loss_record += value_loss.cpu().detach()
entropy_record += entropy.mean().cpu().detach()
loop_record += 1
return {
'actionloss': action_loss_record / loop_record,
'valueloss': value_loss_record / loop_record,
'entropy': entropy_record / loop_record,
}
def sample_option_multi(self, q, beta, pre_option):
index_init = torch.where(pre_option > 80)[0]
index_run = torch.where(pre_option < 81)[0]
mask = torch.zeros_like(q)
add_ones = torch.ones_like(q)
mask[index_run, :] = mask[index_run, :].scatter_(1, pre_option[index_run, :].long(), add_ones[index_run, :])
beta_change = beta * mask
beta_change = beta_change.sum(dim=1, keepdim=True)
beta_change[index_init, :] = 1
pi_hat_option = (1 - beta_change) * mask + beta_change * q
pi_hat_option[index_init, :] = q[index_init, :]
return pi_hat_option
class LinearSchedule:
def __init__(self, start, end=None, steps=None):
if end is None:
end = start
steps = 1
self.inc = (end - start) / float(steps)
self.current = start
self.end = end
if end > start:
self.bound = min
else:
self.bound = max
def __call__(self, steps=1):
val = self.current
self.current = self.bound(self.current + self.inc * steps, self.end)
return val
def to_np(t):
return t.cpu().detach().numpy()
|
{"/DAC_divide.py": ["/model.py"], "/run.py": ["/DAC_divide.py"]}
|
33,100
|
lich14/DAC
|
refs/heads/master
|
/run.py
|
import gym
import mujoco_py
import torch
import numpy as np
from DAC_0 import to_np
import DAC_divide
import DAC
import DAC_cross
import argparse
from tensorboardX import SummaryWriter
use_cuda = torch.cuda.is_available()
device = torch.device("cuda:2" if use_cuda else "cpu")
def get_args():
parser = argparse.ArgumentParser(description='RL')
parser.add_argument('--method', type=str, default='DAC_0', help='method for training')
parser.add_argument('--env', type=str, default='Walker2d-v2', help='environment')
args = parser.parse_args()
return args
class task():
def __init__(self, config):
self.env = gym.make(config.get('env_name'))
config['action_dim'] = self.env.action_space.shape[0] * config.get('num_options')
config['feature_dim'] = self.env.observation_space.shape[0]
config['start_list'] = [i * self.env.action_space.shape[0] for i in range(config.get('num_options'))]
config['end_list'] = [(i + 1) * self.env.action_space.shape[0] for i in range(config.get('num_options'))]
self.lowtrans = np.dtype([
('s', np.float64, (config.get('feature_dim'),)),
('s_', np.float64, (config.get('feature_dim'),)),
('a', np.float64, (config.get('action_dim'),)),
('option', np.float64),
('option_', np.float64),
('r', np.float64),
('a_logp', np.float64),
('done', np.float64),
('pre_option', np.float64),
])
self.hightrans = np.dtype([
('s', np.float64, (config.get('feature_dim'),)),
('s_', np.float64, (config.get('feature_dim'),)),
('option', np.float64),
('pre_option', np.float64),
('r', np.float64),
('option_logp', np.float64),
('done', np.float64),
])
if config.get('method') == 'DAC_divide':
self.nets = DAC_divide.DACAgent(config, self.lowtrans, self.hightrans, device)
elif config.get('method') == 'DAC':
self.nets = DAC.DACAgent(config, self.lowtrans, self.hightrans, device)
else:
self.nets = DAC_cross.DACAgent(config, self.lowtrans, self.hightrans, device)
self.is_initial_states = 1
self.prev_options = torch.tensor(100)
self.states = self.env.reset()
self.record = None
self.reward = 0
self.loop = 0
self.innerstep = 0
self.lowtrainloop = 0
self.hightrainloop = 0
def step(self):
self.innerstep += 1
self.states = torch.tensor(self.states, dtype=torch.double, device=device)
highoutput = self.nets.highnet(self.states)
options, options_logp = self.nets.sample_option(highoutput, self.prev_options, self.is_initial_states)
lowoutput = self.nets.choose_action(self.states, options)
input_action, actions, a_logp = lowoutput['input_action'], lowoutput['action'], lowoutput['a_logp']
next_states, rewards, terminals, info = self.env.step(to_np(input_action))
self.reward += rewards
self.is_initial_states = torch.tensor(terminals).double()
high_iftrain = self.nets.highmemory.store(
(self.states.to('cpu'), next_states, options.to('cpu'), self.prev_options.to('cpu'), rewards,
options_logp.to('cpu'), self.is_initial_states.to('cpu')))
if self.record is not None:
low_iftrain = self.nets.lowmemory.store(
(self.record[0].to('cpu'), self.record[1], self.record[2].to('cpu'), self.record[3].to('cpu'),
options.to('cpu'), self.record[4], self.record[5].to('cpu').detach(), self.record[6],
self.record[7].to('cpu')))
self.train(low_iftrain, high_iftrain)
self.record = [
self.states, next_states, actions, options, rewards, a_logp, self.is_initial_states, self.prev_options
]
self.prev_options = options
if terminals:
self.prev_options = torch.tensor(100)
writer.add_scalar('reward', self.reward, self.loop)
writer.add_scalar('step', self.innerstep, self.loop)
self.innerstep = 0
self.reward = 0
self.loop += 1
self.states = self.env.reset()
self.states = next_states
def train(self, low_iftrain, high_iftrain):
if low_iftrain is True:
record = self.nets.lowtrain()
writer.add_scalar('low/actor_loss', record['actionloss'], self.lowtrainloop)
writer.add_scalar('low/critic_loss', record['valueloss'], self.lowtrainloop)
writer.add_scalar('low/entropy', record['entropy'], self.lowtrainloop)
self.lowtrainloop += 1
if high_iftrain is True:
record = self.nets.hightrain()
writer.add_scalar('high/actor_loss', record['actionloss'], self.hightrainloop)
writer.add_scalar('high/critic_loss', record['valueloss'], self.hightrainloop)
writer.add_scalar('high/entropy', record['entropy'], self.hightrainloop)
self.hightrainloop += 1
if __name__ == "__main__":
args = get_args()
config = {
'env_name': args.env,
'num_options': 4,
'buffer_cap': 2048,
'batch_size': 64,
'gamma': 0.99,
'tau': 0.95,
'clip_param': 0.2,
'entropy_para_high': 0.01,
'entropy_para_low': 0,
'low_lr': 0.0003,
'high_lr': 0.0003,
'totalstep': 4000000,
'ppoepoch': 10,
'with_repara': False,
'hidden_dim': 64,
'method': args.method,
'soft_tau': 0.01,
'max_grad_norm': 0.5,
}
name = 'env_name_' + config.get('env_name') + '_method_' + config.get('method')
board_path = f"runs/{name}"
writer = SummaryWriter(board_path)
agent = task(config)
terminal = False
for _ in range(config.get('totalstep')):
agent.step()
writer.close()
|
{"/DAC_divide.py": ["/model.py"], "/run.py": ["/DAC_divide.py"]}
|
33,149
|
antonio6643/Alexis
|
refs/heads/master
|
/__init__.py
|
# from Lexer import Token, TokenRegistry, Lexer
from aLEXis.Lexer import Token, TokenRegistry, Lexer
|
{"/Lexer.py": ["/SampleTokens.py"], "/SampleTokens.py": ["/Lexer.py"]}
|
33,150
|
antonio6643/Alexis
|
refs/heads/master
|
/Lexer.py
|
from datetime import datetime
class Token:
def __init__(self, lineNumber: int, columnNumber: int, truePosition: int, data: str):
self.line = lineNumber
self.column = columnNumber
self.truePosition = truePosition
self.data = data # TODO: Data validation(don't want tokens with the wrong type)
@classmethod
def isValidCharacter(cls, char: str): # Can be overridden
if char in cls.identifiers:
return True
return False
def __repr__(self):
return "({0}, {1})".format(self.__class__.__name__, self.data)
class TokenRegistry:
def __init__(self, rawRegistry):
self.tokenTypes = rawRegistry
def classifyCharacter(self, char: str):
for t in self.tokenTypes:
if t.isValidCharacter(char):
return t # The idea is that I can just order it in the code. Saves some steps. :)
return None
class Buffer: # TODO: Process the token data since a string would have the data with the quotations
def __init__(self, tokenType: Token, startLine: int, startColumn: int, startPosition: int):
self.seekingToken = tokenType
self.line = startLine
self.column = startColumn
self.position = startPosition
self.stream = ""
def scout(self, char: str):
if self.seekingToken.isValidCharacter(char) and (not hasattr(self.seekingToken, "OnlyOne") or len(self.stream) == 0):
self.stream += char
return True
return False
def packageToken(self):
return self.seekingToken(self.line, self.column, self.position, self.stream)
class Lexer:
def __init__(self, data: str, tRegistry: TokenRegistry):
self.position = -1
self._data = data
self.tokens = []
self.Buffer = None
self.registry = tRegistry
self.Finished = False
self.line = 1
self.column = 0
def Step(self):
if self.Finished == False:
self.position += 1
self.column += 1
current = self._data[self.position]
if self.Buffer: # Try to add to buffer
Scouted = self.Buffer.scout(current)
if Scouted == False: # Pack up and move out
KnuToken = self.Buffer.packageToken()
self.tokens.append(KnuToken)
nextBuffer = self.registry.classifyCharacter(current)
if nextBuffer:
self.Buffer = Buffer(nextBuffer, self.line, self.column, self.position)
self.Buffer.scout(current)
else:
self.Buffer = None
else: # Check for Knu Buffer
if current.isspace(): # Whitespace can't constitute a knu buffer
if current == "\n":
self.line += 1
self.column = 0
else:
bestGuess = self.registry.classifyCharacter(current)
if bestGuess:
self.Buffer = Buffer(bestGuess, self.line, self.column, self.position)
self.Buffer.scout(current)
if self.position >= len(self._data) - 1:
self.Finished = True
if self.Buffer:
KnuToken = self.Buffer.packageToken()
self.tokens.append(KnuToken)
self.Buffer = None
def FullParse(self):
while self.Finished == False:
self.Step()
if __name__ == "__main__":
import SampleTokens
alexis = Lexer("100+100", SampleTokens.ArithmeticRegistry)
alexis.FullParse()
print(alexis.tokens)
|
{"/Lexer.py": ["/SampleTokens.py"], "/SampleTokens.py": ["/Lexer.py"]}
|
33,151
|
antonio6643/Alexis
|
refs/heads/master
|
/SampleTokens.py
|
from Lexer import Token, TokenRegistry
class NumberToken(Token):
identifiers = "1234567890.,"
def __init__(self, lineNum, columnNum, truePosition, data):
super().__init__(lineNum, columnNum, truePosition, data)
class OperatorToken(Token):
identifiers = "+-/*=^"
def __init__(self, lineNum, columnNum, truePosition, data):
super().__init__(lineNum, columnNum, truePosition, data)
ArithmeticRegistry = TokenRegistry([NumberToken, OperatorToken])
|
{"/Lexer.py": ["/SampleTokens.py"], "/SampleTokens.py": ["/Lexer.py"]}
|
33,193
|
alexdev27/win_print_server
|
refs/heads/master
|
/app/__init__.py
|
from fastapi import FastAPI
from .datamax_oneil.routes import datamax_router
app = FastAPI()
app.include_router(datamax_router, prefix='/api', tags=['Print steakhouse order'])
|
{"/app/__init__.py": ["/app/datamax_oneil/routes.py"], "/app/datamax_oneil/functions.py": ["/app/win32_printing_api/functions.py"], "/app/datamax_oneil/routes.py": ["/app/datamax_oneil/functions.py", "/app/datamax_oneil/schemes.py"]}
|
33,194
|
alexdev27/win_print_server
|
refs/heads/master
|
/run.py
|
import uvicorn
if __name__ == '__main__':
uvicorn.run(app='app:app', host='0.0.0.0', port=8944, loop='asyncio')
|
{"/app/__init__.py": ["/app/datamax_oneil/routes.py"], "/app/datamax_oneil/functions.py": ["/app/win32_printing_api/functions.py"], "/app/datamax_oneil/routes.py": ["/app/datamax_oneil/functions.py", "/app/datamax_oneil/schemes.py"]}
|
33,195
|
alexdev27/win_print_server
|
refs/heads/master
|
/app/win32_printing_api/functions.py
|
import win32con
import win32ui as w
def start_document():
doc = w.CreateDC()
doc.CreatePrinterDC()
doc.StartDoc('My Python Document')
doc.StartPage()
return doc
def attach_text(doc, x_offset=80, y_offset=40, text=''):
""" y_offset - offset from the top of the page.
x_offset - offset from the left of the page """
# doc.TextOut(80, 130, 'Батика premium 20')
doc.TextOut(x_offset, y_offset, text)
def end_document(doc):
doc.EndPage()
doc.EndDoc()
def _getfontsize(dc, desired_font_size: int):
inch_y = dc.GetDeviceCaps(win32con.LOGPIXELSY)
return int(-(desired_font_size * inch_y) / 72)
def apply_font(doc_obj, font_name, font_size):
fz = _getfontsize(doc_obj, font_size)
font_data = {'name': font_name, 'height': fz}
font_obj = w.CreateFont(font_data)
doc_obj.SelectObject(font_obj)
|
{"/app/__init__.py": ["/app/datamax_oneil/routes.py"], "/app/datamax_oneil/functions.py": ["/app/win32_printing_api/functions.py"], "/app/datamax_oneil/routes.py": ["/app/datamax_oneil/functions.py", "/app/datamax_oneil/schemes.py"]}
|
33,196
|
alexdev27/win_print_server
|
refs/heads/master
|
/app/datamax_oneil/schemes.py
|
from typing import List
from pydantic import BaseModel, Field
class RequestStrings(BaseModel):
data: List[str] = Field(..., title='List of strings to print', min_items=1)
|
{"/app/__init__.py": ["/app/datamax_oneil/routes.py"], "/app/datamax_oneil/functions.py": ["/app/win32_printing_api/functions.py"], "/app/datamax_oneil/routes.py": ["/app/datamax_oneil/functions.py", "/app/datamax_oneil/schemes.py"]}
|
33,197
|
alexdev27/win_print_server
|
refs/heads/master
|
/app/datamax_oneil/functions.py
|
from app.win32_printing_api.functions import start_document, apply_font, attach_text, end_document
MAX_ACCEPTABLE_CHARS = 36
NORMAL_X_OFFSET = 90
CUSTOM_X_OFFSET = 130
START_Y_OFFSET = 100
INCREMENTAL_Y_OFFSET = 30
arr = [
'Давно выяснено, что при оценке дизайна и композиции читаемый текст мешает сосредоточиться.',
'Lorem Ipsum используют потому, что',
'тот обеспечивает более или менее стандартное заполнение шаблона, '
]
def print_steakhouse_order(data):
# # print(data)
order_line = ' ' + data.pop(0)
doc = start_document()
# # header of the order
apply_font(doc, 'Consolas', 12)
attach_text(doc, text=order_line)
# # another information
apply_font(doc, 'Consolas', 9)
_process_strings(data, doc)
end_document(doc)
def _split_long_string(num_chars: int, string: str):
result = []
is_need_offset = False
_str = string[:]
while bool(_str):
part = _str[:num_chars]
result.append((is_need_offset, part))
if not is_need_offset:
is_need_offset = True
_str = _str[num_chars:]
return result
def _process_strings(strings, doc):
y_offset = START_Y_OFFSET
for _str in strings:
ready_strings = _split_long_string(MAX_ACCEPTABLE_CHARS, _str)
for num, val in enumerate(ready_strings, 1):
is_need_offset = val[0]
string = ' ' + val[1]
x_offset = CUSTOM_X_OFFSET if is_need_offset else NORMAL_X_OFFSET
attach_text(doc, x_offset, y_offset, string)
y_offset += INCREMENTAL_Y_OFFSET
|
{"/app/__init__.py": ["/app/datamax_oneil/routes.py"], "/app/datamax_oneil/functions.py": ["/app/win32_printing_api/functions.py"], "/app/datamax_oneil/routes.py": ["/app/datamax_oneil/functions.py", "/app/datamax_oneil/schemes.py"]}
|
33,198
|
alexdev27/win_print_server
|
refs/heads/master
|
/app/datamax_oneil/routes.py
|
from fastapi import APIRouter
from .functions import print_steakhouse_order
from .schemes import RequestStrings
datamax_router = APIRouter()
@datamax_router.post('/steakhouse/order', summary='Send list of strings to printer')
def steakhouse_order(data: RequestStrings):
print_steakhouse_order(data.dict()['data'])
|
{"/app/__init__.py": ["/app/datamax_oneil/routes.py"], "/app/datamax_oneil/functions.py": ["/app/win32_printing_api/functions.py"], "/app/datamax_oneil/routes.py": ["/app/datamax_oneil/functions.py", "/app/datamax_oneil/schemes.py"]}
|
33,215
|
Henryge/heartBeat
|
refs/heads/master
|
/heartBeat.py
|
import time,mysql.connector
from apscheduler.schedulers.blocking import BlockingScheduler
from db import HBDB
from hbCache import HBCache
from job import HBJob
def job():
jobs = HBDB().getAllJobs()
hbCache = HBCache()
cacheJobs = hbCache.getJobs();
for x in jobs:
cacheJob = hbCache.getJobById(x[0])
if len(cacheJob) == 0:
hbCache.putJob(HBJob(x[0],x[1],x[2],x[3],x[4],x[5],x[6],x[7]))
else:
jobStr = str(x[0]) + "|" + str(x[1]) + "|" + str(x[2]) + "|" + str(x[3]) + "|" + str(x[4]) + "|" + str(x[5]) + "|" + str(x[6]) + "|" + str(x[7])
if cacheJob[0] != jobStr:
print(str(x[0]) + "出现修改:" + jobStr)
def ping():
code = os.system("ping -n 1 -w 1 www.baidu.com")
if code:
print("ping is fail")
else:
print("ping is ok")
if __name__=='__main__':
scheduler = BlockingScheduler()
scheduler.add_job(job, 'interval', seconds=3)
scheduler.start()
|
{"/heartBeat.py": ["/db.py"]}
|
33,216
|
Henryge/heartBeat
|
refs/heads/master
|
/db.py
|
import mysql.connector
class HBDB(object):
def __init__(self):
self.mydb = mysql.connector.connect(
host='172.20.8.130',
user='iprs_dev',
passwd='iprs_dev',
#auth_plugin='mysql_native_password'
database='iprs_dev01'
)
def getAllJobs(self):
mycursor = self.mydb.cursor()
mycursor.execute("select id, url, app_name, beat_seconds, notice_emails, notice_count, notice_times, is_deleted from t_hb_jobs")
myresult = mycursor.fetchall()
return myresult
|
{"/heartBeat.py": ["/db.py"]}
|
33,218
|
leavetina321/webscrapping_youtube
|
refs/heads/master
|
/youtube/spiders/youtube_spider.py
|
from scrapy import Spider
from youtube.items import YoutubeItem
from scrapy import Request
import re
class youtubeSpider(Spider):
name = 'youtube_spider'
allowed_urls = ['https://socialblade.com/']
start_urls = ['https://socialblade.com/youtube/top/category/auto']
def parse(self, response):
links=response.xpath('//div[@style="width: 340px; background: #f6f6f6; padding: 0px 0px; color:#90CAF9; text-transform: uppercase; font-size: 8pt;"]//a/@href').extract()[4:]
for link in links:
yield Request(url= 'http://socialblade.com{}'.format(link), callback=self.parse_detail_page)
def parse_detail_page(self, response):
page_link=response.xpath('//div[@style="float: right; width: 900px;"]//div[@style="float: left; width: 350px; line-height: 25px;"]/a/@href').extract()
for link in page_link:
yield Request(url= 'http://socialblade.com{}'.format(link), callback=self.parse_user_page)
def parse_user_page(self, response):
item = YoutubeItem()
item['youtuber'] = response.xpath('//h1[@style="float: left; font-size: 1.4em; font-weight: bold; color:#333; margin: 0px; padding: 0px; margin-right: 5px;"]/text()').extract_first()
number_list=response.xpath('//span[@style="font-weight: bold;"]/text()').extract()
string_list=response.xpath('//span[@style="font-weight: bold;"]/a/text()').extract()
price_list=list(map(str.strip, response.xpath('//p[@style="font-size: 1.4em; color:#41a200; font-weight: 600; padding-top: 20px;"]/text()').extract()))
item['uploads']=int(number_list[0].replace(",",''))
item['subs']=int(number_list[1].replace(",",''))
item['video_view']=int(number_list[2].replace(",",''))
item['date']=number_list[3]
if len(string_list[0])==2:
item['country']=string_list[0]
else:
item['country']=None
try:
item['channel_type']=string_list[1]
except:
item['channel_type']=string_list[0]
item['e_m_earnings']=price_list[0]
item['e_y_earnings']=price_list[1]
item['view_last30']=int(response.xpath('//span [@id="afd-header-views-30d"]/text()').extract_first().strip().replace(",",''))
item['sub_last30']=int(response.xpath('//span [@id="afd-header-subs-30d"]/text()').extract_first().strip().replace(",",''))
item['grade']=response.xpath('//p[@style="font-size: 2.8em; font-weight: 600;"]/span/text()').extract_first()
sign1=response.xpath('//span[@id="afd-header-views-30d-perc"]//i[@class="fa fa-caret-down"]').extract()
if len(sign1)==0:
change1=response.xpath('//span[@id="afd-header-views-30d-perc"]//span[@style]/text()').extract()[0]
item['view_change']=int(re.findall('\d+',change1)[0])
else:
change2= response.xpath('//span[@id="afd-header-views-30d-perc"]//span[@style]/text()').extract()[0]
item['view_change']= -(int(re.findall('\d+',change2)[0]))
sign2=response.xpath('//span[@id="afd-header-subs-30d-perc"]//i[@class="fa fa-caret-down"]').extract()
if len(sign2)==0:
try:
change3=response.xpath('//span[@id="afd-header-subs-30d-perc"]//span[@style]/text()').extract()[0]
item['sub_change']=int(re.findall('\d+',change3)[0])
except:
item['sub_change']=None
else:
change4=response.xpath('//span[@id="afd-header-subs-30d-perc"]//span[@style]/text()').extract()[0]
item['sub_change']= -(int(re.findall('\d+',change4)[0]))
yield item
|
{"/youtube/spiders/youtube_spider.py": ["/youtube/items.py"]}
|
33,219
|
leavetina321/webscrapping_youtube
|
refs/heads/master
|
/youtube/items.py
|
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# https://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class YoutubeItem(scrapy.Item):
# define the fields for your item here like:
# name = scrapy.Field()
youtuber = scrapy.Field()
grade = scrapy.Field()
uploads = scrapy.Field()
subs = scrapy.Field()
video_view = scrapy.Field()
date = scrapy.Field()
country = scrapy.Field()
channel_type= scrapy.Field()
e_m_earnings = scrapy.Field()
e_y_earnings = scrapy.Field()
view_change = scrapy.Field()
sub_change = scrapy.Field()
view_last30 = scrapy.Field()
sub_last30 = scrapy.Field()
|
{"/youtube/spiders/youtube_spider.py": ["/youtube/items.py"]}
|
33,283
|
pkolios/mackerel
|
refs/heads/master
|
/tests/test_config_ini.py
|
import configparser
from pathlib import Path
import mackerel
def test_default_config_settings():
config = configparser.ConfigParser()
with open(Path(mackerel.__file__).parent / Path('config.ini')) as f:
config.read_file(f)
assert config['mackerel']['TEMPLATE_PATH'] == 'templates/example'
assert config['mackerel']['OUTPUT_PATH'] == '_build'
assert config['mackerel']['CONTENT_PATH'] == 'content'
assert config['mackerel']['DOC_EXT'] == '.md'
|
{"/tests/test_config_ini.py": ["/mackerel/__init__.py"], "/mackerel/build.py": ["/mackerel/__init__.py", "/mackerel/navigation.py", "/mackerel/site.py", "/mackerel/helpers.py"], "/mackerel/cli.py": ["/mackerel/__init__.py"], "/tests/renderers/test_document.py": ["/mackerel/__init__.py"], "/tests/test_build.py": ["/mackerel/__init__.py"], "/mackerel/renderers/base.py": ["/mackerel/__init__.py", "/mackerel/site.py"], "/mackerel/content.py": ["/mackerel/__init__.py", "/mackerel/renderers/helpers.py", "/mackerel/renderers/base.py"], "/tests/conftest.py": ["/mackerel/__init__.py"], "/tests/test_cli.py": ["/mackerel/__init__.py"], "/tests/test_navigation.py": ["/mackerel/navigation.py"], "/tests/renderers/test_template.py": ["/mackerel/__init__.py", "/mackerel/renderers/__init__.py"], "/tests/renderers/test_base.py": ["/mackerel/renderers/__init__.py"], "/tests/renderers/test_helpers.py": ["/mackerel/renderers/__init__.py"], "/mackerel/navigation.py": ["/mackerel/content.py", "/mackerel/helpers.py", "/mackerel/site.py"], "/mackerel/site.py": ["/mackerel/__init__.py", "/mackerel/content.py", "/mackerel/helpers.py"], "/mackerel/__init__.py": ["/mackerel/__version__.py"], "/tests/test_content.py": ["/mackerel/__init__.py"], "/mackerel/renderers/template.py": ["/mackerel/__init__.py", "/mackerel/renderers/base.py", "/mackerel/site.py"], "/mackerel/renderers/document.py": ["/mackerel/renderers/base.py", "/mackerel/site.py"], "/tests/test_helpers.py": ["/mackerel/__init__.py"], "/tests/test_site.py": ["/mackerel/renderers/document.py", "/mackerel/renderers/template.py", "/mackerel/site.py"]}
|
33,284
|
pkolios/mackerel
|
refs/heads/master
|
/mackerel/build.py
|
import shutil
from pathlib import Path
from typing import TYPE_CHECKING, Tuple, NamedTuple
from urllib.parse import urljoin, urlparse
from mackerel import content, exceptions
from mackerel.navigation import Navigation
from mackerel.site import Site
from mackerel.helpers import cached_property
if TYPE_CHECKING:
from configparser import ConfigParser # noqa
from mackerel import renderers # noqa
class BuildPage(NamedTuple):
path: Path
content: str
class Context:
"""Context contains data that is relevant for all documents"""
def __init__(self, site: Site) -> None:
self.nav = Navigation(site=site)
self.cfg = site.config
def url_for(self, resource: str, external: bool = False) -> str:
site_url = urlparse(self.cfg.get('user', 'url', fallback='/'))
if external:
return urljoin(site_url.geturl(), resource)
return urljoin(site_url.path, resource)
class Build:
def __init__(self, site: Site) -> None:
self.site = site
def execute(self, dry_run: bool = False) -> None:
if dry_run:
return None
try:
shutil.rmtree(self.site.output_path)
except FileNotFoundError:
pass
for page in self.pages:
self.touch(page.path)
page.path.write_text(page.content)
self.site.logger.info(f'{len(self.pages)} pages were built')
for f in self.site.other_content_files:
path = self._absolute_other_file_output_path(f)
if not path.parent.exists():
path.parent.mkdir(parents=True)
shutil.copyfile(src=f, dst=path)
for f in self.site.other_template_files:
path = self._absolute_template_file_output_path(f)
if not path.parent.exists():
path.parent.mkdir(parents=True)
shutil.copyfile(src=f, dst=path)
@staticmethod
def touch(path: Path) -> bool:
if not path.parent.exists():
path.parent.mkdir(parents=True)
path.touch()
return True
@cached_property
def context(self) -> Context:
return Context(site=self.site)
@cached_property
def pages(self) -> Tuple[BuildPage, ...]:
pages = []
for document in self.site.documents:
try:
pages.append(BuildPage(
path=self._absolute_page_output_path(document),
content=self.site.template_renderer.render(
ctx=self.context, document=document)))
except exceptions.RenderingError as exc:
self.site.logger.warning(str(exc))
return tuple(pages)
def _absolute_page_output_path(self, document: content.Document) -> Path:
return self.site.output_path / document.relative_path.with_suffix(
self.site.config['mackerel']['OUTPUT_EXT'])
def _absolute_other_file_output_path(self, other_file: Path) -> Path:
return self.site.output_path / other_file.relative_to(
self.site.content_path)
def _absolute_template_file_output_path(self, template_file: Path) -> Path:
return self.site.output_path / template_file.relative_to(
self.site.template_path)
|
{"/tests/test_config_ini.py": ["/mackerel/__init__.py"], "/mackerel/build.py": ["/mackerel/__init__.py", "/mackerel/navigation.py", "/mackerel/site.py", "/mackerel/helpers.py"], "/mackerel/cli.py": ["/mackerel/__init__.py"], "/tests/renderers/test_document.py": ["/mackerel/__init__.py"], "/tests/test_build.py": ["/mackerel/__init__.py"], "/mackerel/renderers/base.py": ["/mackerel/__init__.py", "/mackerel/site.py"], "/mackerel/content.py": ["/mackerel/__init__.py", "/mackerel/renderers/helpers.py", "/mackerel/renderers/base.py"], "/tests/conftest.py": ["/mackerel/__init__.py"], "/tests/test_cli.py": ["/mackerel/__init__.py"], "/tests/test_navigation.py": ["/mackerel/navigation.py"], "/tests/renderers/test_template.py": ["/mackerel/__init__.py", "/mackerel/renderers/__init__.py"], "/tests/renderers/test_base.py": ["/mackerel/renderers/__init__.py"], "/tests/renderers/test_helpers.py": ["/mackerel/renderers/__init__.py"], "/mackerel/navigation.py": ["/mackerel/content.py", "/mackerel/helpers.py", "/mackerel/site.py"], "/mackerel/site.py": ["/mackerel/__init__.py", "/mackerel/content.py", "/mackerel/helpers.py"], "/mackerel/__init__.py": ["/mackerel/__version__.py"], "/tests/test_content.py": ["/mackerel/__init__.py"], "/mackerel/renderers/template.py": ["/mackerel/__init__.py", "/mackerel/renderers/base.py", "/mackerel/site.py"], "/mackerel/renderers/document.py": ["/mackerel/renderers/base.py", "/mackerel/site.py"], "/tests/test_helpers.py": ["/mackerel/__init__.py"], "/tests/test_site.py": ["/mackerel/renderers/document.py", "/mackerel/renderers/template.py", "/mackerel/site.py"]}
|
33,285
|
pkolios/mackerel
|
refs/heads/master
|
/mackerel/cli.py
|
import os
import shutil
from pathlib import Path
import click
from livereload import Server
import mackerel
@click.group()
@click.version_option(message=f'{mackerel.__title__} {mackerel.__version__}') # type: ignore # noqa
@click.pass_context
def cli(ctx: click.core.Context) -> None:
"""
Mackerel is a minimal static site generator written in typed Python 3.6+.
"""
ctx.obj = {}
@cli.command()
@click.argument('SITE_PATH', type=click.Path(exists=False, resolve_path=True))
@click.pass_context
def init(ctx: click.core.Context, site_path: str) -> None:
"""Create an new mackerel site"""
output_path = Path(site_path)
sample_site_path = Path(os.path.dirname(
os.path.realpath(mackerel.__file__))) / 'site'
try:
shutil.copytree(src=sample_site_path, dst=output_path)
except FileExistsError as e:
ctx.fail(f'Initialize failed, file {e.filename} already exists')
click.echo(f'Initialized empty mackerel site in {output_path}')
@cli.command()
@click.argument('SITE_PATH', type=click.Path(
exists=True, file_okay=False, readable=True, resolve_path=True))
@click.option('--dry-run', default=False, is_flag=True,
help='Make a build without persisting any files.')
@click.pass_context
def build(ctx: click.core.Context, site_path: str, dry_run: bool) -> None:
"""Build the contents of SITE_PATH"""
site = mackerel.site.Site(path=Path(site_path))
if site.output_path.exists():
click.confirm(
f'Directory {str(site.output_path)} already exists, do you want '
'to overwrite?', abort=True)
build = mackerel.build.Build(site=site)
build.execute(dry_run=dry_run)
click.echo('Build finished.')
@cli.command()
@click.argument('SITE_PATH', type=click.Path(
exists=True, file_okay=False, readable=True, resolve_path=True))
@click.option('--host', '-h', default='127.0.0.1',
help='The interface to bind to.')
@click.option('--port', '-p', default=8000,
help='The port to bind to.')
@click.pass_context
def develop(ctx: click.core.Context, site_path: str, host: str,
port: int) -> None:
"""Runs a local development server"""
def rebuild_site() -> mackerel.site.Site:
site = mackerel.site.Site(path=Path(site_path))
build = mackerel.build.Build(site=site)
build.execute()
return site
site = rebuild_site()
server = Server()
server.watch(str(site.content_path), rebuild_site)
server.watch(str(site.template_path), rebuild_site)
server.serve(host=host.strip(), port=port, root=str(site.output_path))
if __name__ == '__main__':
cli()
|
{"/tests/test_config_ini.py": ["/mackerel/__init__.py"], "/mackerel/build.py": ["/mackerel/__init__.py", "/mackerel/navigation.py", "/mackerel/site.py", "/mackerel/helpers.py"], "/mackerel/cli.py": ["/mackerel/__init__.py"], "/tests/renderers/test_document.py": ["/mackerel/__init__.py"], "/tests/test_build.py": ["/mackerel/__init__.py"], "/mackerel/renderers/base.py": ["/mackerel/__init__.py", "/mackerel/site.py"], "/mackerel/content.py": ["/mackerel/__init__.py", "/mackerel/renderers/helpers.py", "/mackerel/renderers/base.py"], "/tests/conftest.py": ["/mackerel/__init__.py"], "/tests/test_cli.py": ["/mackerel/__init__.py"], "/tests/test_navigation.py": ["/mackerel/navigation.py"], "/tests/renderers/test_template.py": ["/mackerel/__init__.py", "/mackerel/renderers/__init__.py"], "/tests/renderers/test_base.py": ["/mackerel/renderers/__init__.py"], "/tests/renderers/test_helpers.py": ["/mackerel/renderers/__init__.py"], "/mackerel/navigation.py": ["/mackerel/content.py", "/mackerel/helpers.py", "/mackerel/site.py"], "/mackerel/site.py": ["/mackerel/__init__.py", "/mackerel/content.py", "/mackerel/helpers.py"], "/mackerel/__init__.py": ["/mackerel/__version__.py"], "/tests/test_content.py": ["/mackerel/__init__.py"], "/mackerel/renderers/template.py": ["/mackerel/__init__.py", "/mackerel/renderers/base.py", "/mackerel/site.py"], "/mackerel/renderers/document.py": ["/mackerel/renderers/base.py", "/mackerel/site.py"], "/tests/test_helpers.py": ["/mackerel/__init__.py"], "/tests/test_site.py": ["/mackerel/renderers/document.py", "/mackerel/renderers/template.py", "/mackerel/site.py"]}
|
33,286
|
pkolios/mackerel
|
refs/heads/master
|
/tests/renderers/test_document.py
|
from unittest import mock
import pytest
from mackerel import renderers
@pytest.yield_fixture
def document(document_mocks):
content = (
'Title: About\n'
'Template: page.html\n'
'\n'
'Tales without end...')
doc = document_mocks.create(content=content)
yield doc
class TestMistuneMarkdownRenderer:
def test_init(self):
with mock.patch('mackerel.renderers.document.mistune') as mistune:
renderers.document.MistuneMarkdownRenderer(site=mock.Mock())
mistune.Markdown.assert_called_with()
def test_extract_metadata(self, document):
with mock.patch('mackerel.renderers.document.mistune'):
renderer = renderers.document.MistuneMarkdownRenderer(
site=mock.Mock())
assert renderer.extract_metadata(document.content) == {
'title': 'About',
'template': 'page.html',
}
def test_render(self, document):
renderer = renderers.document.MistuneMarkdownRenderer(site=mock.Mock())
assert renderer.render(document.content) == (
'<p>Tales without end...</p>\n')
class TestMarkdownMarkdownRenderer:
def test_init(self, site):
with mock.patch('mackerel.renderers.document.markdown') as markdown:
renderers.document.MarkdownMarkdownRenderer(site=site)
markdown.Markdown.assert_called_with(
extensions=('markdown.extensions.meta',
'markdown.extensions.extra'),
output_format='html5')
def test_extract_metadata(self, site, document):
renderer = renderers.document.MarkdownMarkdownRenderer(site=site)
assert renderer.extract_metadata(document.content) == {
'title': 'About',
'template': 'page.html',
}
def test_render(self, site, document):
renderer = renderers.document.MarkdownMarkdownRenderer(site=site)
assert renderer.render(document.content) == (
'<p>Tales without end...</p>')
|
{"/tests/test_config_ini.py": ["/mackerel/__init__.py"], "/mackerel/build.py": ["/mackerel/__init__.py", "/mackerel/navigation.py", "/mackerel/site.py", "/mackerel/helpers.py"], "/mackerel/cli.py": ["/mackerel/__init__.py"], "/tests/renderers/test_document.py": ["/mackerel/__init__.py"], "/tests/test_build.py": ["/mackerel/__init__.py"], "/mackerel/renderers/base.py": ["/mackerel/__init__.py", "/mackerel/site.py"], "/mackerel/content.py": ["/mackerel/__init__.py", "/mackerel/renderers/helpers.py", "/mackerel/renderers/base.py"], "/tests/conftest.py": ["/mackerel/__init__.py"], "/tests/test_cli.py": ["/mackerel/__init__.py"], "/tests/test_navigation.py": ["/mackerel/navigation.py"], "/tests/renderers/test_template.py": ["/mackerel/__init__.py", "/mackerel/renderers/__init__.py"], "/tests/renderers/test_base.py": ["/mackerel/renderers/__init__.py"], "/tests/renderers/test_helpers.py": ["/mackerel/renderers/__init__.py"], "/mackerel/navigation.py": ["/mackerel/content.py", "/mackerel/helpers.py", "/mackerel/site.py"], "/mackerel/site.py": ["/mackerel/__init__.py", "/mackerel/content.py", "/mackerel/helpers.py"], "/mackerel/__init__.py": ["/mackerel/__version__.py"], "/tests/test_content.py": ["/mackerel/__init__.py"], "/mackerel/renderers/template.py": ["/mackerel/__init__.py", "/mackerel/renderers/base.py", "/mackerel/site.py"], "/mackerel/renderers/document.py": ["/mackerel/renderers/base.py", "/mackerel/site.py"], "/tests/test_helpers.py": ["/mackerel/__init__.py"], "/tests/test_site.py": ["/mackerel/renderers/document.py", "/mackerel/renderers/template.py", "/mackerel/site.py"]}
|
33,287
|
pkolios/mackerel
|
refs/heads/master
|
/mackerel/helpers.py
|
import configparser
import os
from pathlib import Path
from typing import Any
class cached_property:
def __init__(self, func: Any) -> None:
self.__doc__ = getattr(func, '__doc__')
self.func = func
def __get__(self, obj: Any, cls: Any) -> Any:
if obj is None:
return self
value = obj.__dict__[self.func.__name__] = self.func(obj)
return value
def make_config(site_path: Path) -> configparser.ConfigParser:
config = configparser.ConfigParser()
# Read default config values
default_cfg_path = Path(os.path.dirname(os.path.realpath(__file__)))
with open(default_cfg_path / Path('config.ini')) as f:
config.read_file(f)
# Read config file
config.read(str(Path(site_path) / Path('.mackerelconfig')))
return config
|
{"/tests/test_config_ini.py": ["/mackerel/__init__.py"], "/mackerel/build.py": ["/mackerel/__init__.py", "/mackerel/navigation.py", "/mackerel/site.py", "/mackerel/helpers.py"], "/mackerel/cli.py": ["/mackerel/__init__.py"], "/tests/renderers/test_document.py": ["/mackerel/__init__.py"], "/tests/test_build.py": ["/mackerel/__init__.py"], "/mackerel/renderers/base.py": ["/mackerel/__init__.py", "/mackerel/site.py"], "/mackerel/content.py": ["/mackerel/__init__.py", "/mackerel/renderers/helpers.py", "/mackerel/renderers/base.py"], "/tests/conftest.py": ["/mackerel/__init__.py"], "/tests/test_cli.py": ["/mackerel/__init__.py"], "/tests/test_navigation.py": ["/mackerel/navigation.py"], "/tests/renderers/test_template.py": ["/mackerel/__init__.py", "/mackerel/renderers/__init__.py"], "/tests/renderers/test_base.py": ["/mackerel/renderers/__init__.py"], "/tests/renderers/test_helpers.py": ["/mackerel/renderers/__init__.py"], "/mackerel/navigation.py": ["/mackerel/content.py", "/mackerel/helpers.py", "/mackerel/site.py"], "/mackerel/site.py": ["/mackerel/__init__.py", "/mackerel/content.py", "/mackerel/helpers.py"], "/mackerel/__init__.py": ["/mackerel/__version__.py"], "/tests/test_content.py": ["/mackerel/__init__.py"], "/mackerel/renderers/template.py": ["/mackerel/__init__.py", "/mackerel/renderers/base.py", "/mackerel/site.py"], "/mackerel/renderers/document.py": ["/mackerel/renderers/base.py", "/mackerel/site.py"], "/tests/test_helpers.py": ["/mackerel/__init__.py"], "/tests/test_site.py": ["/mackerel/renderers/document.py", "/mackerel/renderers/template.py", "/mackerel/site.py"]}
|
33,288
|
pkolios/mackerel
|
refs/heads/master
|
/mackerel/exceptions.py
|
class MackerelError(Exception):
"""Base class for mackerel's exceptions"""
class DocumentError(MackerelError):
"""Exception raised for errors in the content document"""
class RenderingError(MackerelError):
"""Exception raised for rendering errors"""
|
{"/tests/test_config_ini.py": ["/mackerel/__init__.py"], "/mackerel/build.py": ["/mackerel/__init__.py", "/mackerel/navigation.py", "/mackerel/site.py", "/mackerel/helpers.py"], "/mackerel/cli.py": ["/mackerel/__init__.py"], "/tests/renderers/test_document.py": ["/mackerel/__init__.py"], "/tests/test_build.py": ["/mackerel/__init__.py"], "/mackerel/renderers/base.py": ["/mackerel/__init__.py", "/mackerel/site.py"], "/mackerel/content.py": ["/mackerel/__init__.py", "/mackerel/renderers/helpers.py", "/mackerel/renderers/base.py"], "/tests/conftest.py": ["/mackerel/__init__.py"], "/tests/test_cli.py": ["/mackerel/__init__.py"], "/tests/test_navigation.py": ["/mackerel/navigation.py"], "/tests/renderers/test_template.py": ["/mackerel/__init__.py", "/mackerel/renderers/__init__.py"], "/tests/renderers/test_base.py": ["/mackerel/renderers/__init__.py"], "/tests/renderers/test_helpers.py": ["/mackerel/renderers/__init__.py"], "/mackerel/navigation.py": ["/mackerel/content.py", "/mackerel/helpers.py", "/mackerel/site.py"], "/mackerel/site.py": ["/mackerel/__init__.py", "/mackerel/content.py", "/mackerel/helpers.py"], "/mackerel/__init__.py": ["/mackerel/__version__.py"], "/tests/test_content.py": ["/mackerel/__init__.py"], "/mackerel/renderers/template.py": ["/mackerel/__init__.py", "/mackerel/renderers/base.py", "/mackerel/site.py"], "/mackerel/renderers/document.py": ["/mackerel/renderers/base.py", "/mackerel/site.py"], "/tests/test_helpers.py": ["/mackerel/__init__.py"], "/tests/test_site.py": ["/mackerel/renderers/document.py", "/mackerel/renderers/template.py", "/mackerel/site.py"]}
|
33,289
|
pkolios/mackerel
|
refs/heads/master
|
/tests/test_build.py
|
from pathlib import Path
from unittest import mock
import pytest
import mackerel
@pytest.yield_fixture
def build(site):
yield mackerel.build.Build(site=site)
def test_build(site):
test_build = mackerel.build.Build(site=site)
assert test_build.site == site
assert isinstance(test_build.context, mackerel.build.Context)
def test_build_pages(build, site):
assert len(build.pages) == len(site.documents)
for page in build.pages:
assert page.path
assert page.content
assert site.template_renderer.render.call_count == len(site.documents)
for document in site.documents:
assert (mock.call(ctx=build.context, document=document) in
site.template_renderer.render.call_args_list)
def test_build_absolute_page_output_path(build, document_mocks):
document = document_mocks.create(relative_path=Path('document.md'))
page_path = build._absolute_page_output_path(document)
assert page_path == build.site.output_path / Path('document.html')
def test_build_execute_dry_run(build):
build.touch = mock.Mock()
assert build.execute(dry_run=True) is None
assert build.touch.called is False
def test_build_execute(build):
build.touch = mock.Mock()
with mock.patch('shutil.rmtree') as rm_mock, \
mock.patch.object(Path, 'write_text') as write_mock, \
mock.patch('shutil.copyfile') as copy_mock:
build.execute()
assert rm_mock.called_with(build.site.output_path)
assert build.touch.call_count == write_mock.call_count == len(build.pages)
for page in build.pages:
assert mock.call(page.path) in build.touch.call_args_list
assert mock.call(page.content) in write_mock.call_args_list
assert build.site.logger.info.called
for file in build.site.other_content_files:
dst = build._absolute_other_file_output_path(file)
assert mock.call(src=file, dst=dst) in copy_mock.call_args_list
for file in build.site.other_template_files:
dst = build._absolute_template_file_output_path(file)
assert mock.call(src=file, dst=dst) in copy_mock.call_args_list
@pytest.mark.parametrize('path', [
'root.html',
'foo/bar.html',
'foo/bar/xyz.html',
])
def test_touch(build, tmpdir, path):
tmp_dir = Path(str(tmpdir.mkdir('_helper_tests')))
path = Path(tmp_dir, path)
assert path.exists() is False
build.touch(path)
assert path.exists()
def test_build_context(build):
# TODO: Test something more meaningful
assert build.context.nav
assert build.context.cfg
def test_context_url_for(build):
assert build.context.url_for('css/style.css') == '/css/style.css'
assert build.context.url_for('app.js') == '/app.js'
assert build.context.url_for(
'app.js', external=True) == 'http://localhost:8000/app.js'
with mock.patch.dict(build.context.cfg,
{'user': {'url': 'http://test/blog/'}}):
assert build.context.url_for('css/style.css') == '/blog/css/style.css'
assert build.context.url_for('app.js') == '/blog/app.js'
|
{"/tests/test_config_ini.py": ["/mackerel/__init__.py"], "/mackerel/build.py": ["/mackerel/__init__.py", "/mackerel/navigation.py", "/mackerel/site.py", "/mackerel/helpers.py"], "/mackerel/cli.py": ["/mackerel/__init__.py"], "/tests/renderers/test_document.py": ["/mackerel/__init__.py"], "/tests/test_build.py": ["/mackerel/__init__.py"], "/mackerel/renderers/base.py": ["/mackerel/__init__.py", "/mackerel/site.py"], "/mackerel/content.py": ["/mackerel/__init__.py", "/mackerel/renderers/helpers.py", "/mackerel/renderers/base.py"], "/tests/conftest.py": ["/mackerel/__init__.py"], "/tests/test_cli.py": ["/mackerel/__init__.py"], "/tests/test_navigation.py": ["/mackerel/navigation.py"], "/tests/renderers/test_template.py": ["/mackerel/__init__.py", "/mackerel/renderers/__init__.py"], "/tests/renderers/test_base.py": ["/mackerel/renderers/__init__.py"], "/tests/renderers/test_helpers.py": ["/mackerel/renderers/__init__.py"], "/mackerel/navigation.py": ["/mackerel/content.py", "/mackerel/helpers.py", "/mackerel/site.py"], "/mackerel/site.py": ["/mackerel/__init__.py", "/mackerel/content.py", "/mackerel/helpers.py"], "/mackerel/__init__.py": ["/mackerel/__version__.py"], "/tests/test_content.py": ["/mackerel/__init__.py"], "/mackerel/renderers/template.py": ["/mackerel/__init__.py", "/mackerel/renderers/base.py", "/mackerel/site.py"], "/mackerel/renderers/document.py": ["/mackerel/renderers/base.py", "/mackerel/site.py"], "/tests/test_helpers.py": ["/mackerel/__init__.py"], "/tests/test_site.py": ["/mackerel/renderers/document.py", "/mackerel/renderers/template.py", "/mackerel/site.py"]}
|
33,290
|
pkolios/mackerel
|
refs/heads/master
|
/mackerel/renderers/base.py
|
from typing import Dict, TYPE_CHECKING
if TYPE_CHECKING:
from mackerel import build, content # noqa
from mackerel.site import Site # noqa
class DocumentRenderer:
def __init__(self, site: 'Site') -> None:
raise NotImplementedError
def extract_metadata(self, text: str) -> Dict[str, str]:
"""
Extract the metadata from the top of the document and return a
dictionary with lower cased keys.
"""
raise NotImplementedError
def render(self, text: str) -> str:
raise NotImplementedError
class TemplateRenderer:
def __init__(self, site: 'Site') -> None:
raise NotImplementedError
def render(self, ctx: 'build.Context',
document: 'content.Document') -> str:
raise NotImplementedError
|
{"/tests/test_config_ini.py": ["/mackerel/__init__.py"], "/mackerel/build.py": ["/mackerel/__init__.py", "/mackerel/navigation.py", "/mackerel/site.py", "/mackerel/helpers.py"], "/mackerel/cli.py": ["/mackerel/__init__.py"], "/tests/renderers/test_document.py": ["/mackerel/__init__.py"], "/tests/test_build.py": ["/mackerel/__init__.py"], "/mackerel/renderers/base.py": ["/mackerel/__init__.py", "/mackerel/site.py"], "/mackerel/content.py": ["/mackerel/__init__.py", "/mackerel/renderers/helpers.py", "/mackerel/renderers/base.py"], "/tests/conftest.py": ["/mackerel/__init__.py"], "/tests/test_cli.py": ["/mackerel/__init__.py"], "/tests/test_navigation.py": ["/mackerel/navigation.py"], "/tests/renderers/test_template.py": ["/mackerel/__init__.py", "/mackerel/renderers/__init__.py"], "/tests/renderers/test_base.py": ["/mackerel/renderers/__init__.py"], "/tests/renderers/test_helpers.py": ["/mackerel/renderers/__init__.py"], "/mackerel/navigation.py": ["/mackerel/content.py", "/mackerel/helpers.py", "/mackerel/site.py"], "/mackerel/site.py": ["/mackerel/__init__.py", "/mackerel/content.py", "/mackerel/helpers.py"], "/mackerel/__init__.py": ["/mackerel/__version__.py"], "/tests/test_content.py": ["/mackerel/__init__.py"], "/mackerel/renderers/template.py": ["/mackerel/__init__.py", "/mackerel/renderers/base.py", "/mackerel/site.py"], "/mackerel/renderers/document.py": ["/mackerel/renderers/base.py", "/mackerel/site.py"], "/tests/test_helpers.py": ["/mackerel/__init__.py"], "/tests/test_site.py": ["/mackerel/renderers/document.py", "/mackerel/renderers/template.py", "/mackerel/site.py"]}
|
33,291
|
pkolios/mackerel
|
refs/heads/master
|
/mackerel/content.py
|
from pathlib import Path
from textwrap import shorten
from typing import TYPE_CHECKING, Dict, Optional
from mackerel import exceptions
from mackerel.renderers.helpers import strip_tags
if TYPE_CHECKING:
from mackerel.renderers.base import DocumentRenderer # noqa
class Document:
def __init__(self, document_path: Path, content_path: Path,
renderer: 'DocumentRenderer') -> None:
self.document_path = document_path # type: Path
self.relative_path = document_path.relative_to(content_path) # type: Path # noqa
self.content = self.document_path.read_text() # type: str
self.metadata = renderer.extract_metadata(
text=self.content) # type: Dict[str, str]
self.template = self._get_metadata_value(
key='template', metadata=self.metadata) # type: str
self.html = renderer.render(self.content) # type: str
self.title = self._get_metadata_value(
key='title', metadata=self.metadata) # type: str
def _get_metadata_value(self, key: str, metadata: Dict[str, str]) -> str:
try:
return metadata[key]
except KeyError:
raise exceptions.DocumentError(
f'Document `{str(self.document_path)}` is missing a {key}')
def __eq__(self, other: object) -> bool:
if not isinstance(other, Document):
return False
return self.document_path == other.document_path
def excerpt(self, width: Optional[int] = 150,
placeholder: Optional[str] = '...') -> str:
text = strip_tags(self.html)
return shorten(text, width=(width or 150)+len(placeholder),
placeholder=placeholder)
|
{"/tests/test_config_ini.py": ["/mackerel/__init__.py"], "/mackerel/build.py": ["/mackerel/__init__.py", "/mackerel/navigation.py", "/mackerel/site.py", "/mackerel/helpers.py"], "/mackerel/cli.py": ["/mackerel/__init__.py"], "/tests/renderers/test_document.py": ["/mackerel/__init__.py"], "/tests/test_build.py": ["/mackerel/__init__.py"], "/mackerel/renderers/base.py": ["/mackerel/__init__.py", "/mackerel/site.py"], "/mackerel/content.py": ["/mackerel/__init__.py", "/mackerel/renderers/helpers.py", "/mackerel/renderers/base.py"], "/tests/conftest.py": ["/mackerel/__init__.py"], "/tests/test_cli.py": ["/mackerel/__init__.py"], "/tests/test_navigation.py": ["/mackerel/navigation.py"], "/tests/renderers/test_template.py": ["/mackerel/__init__.py", "/mackerel/renderers/__init__.py"], "/tests/renderers/test_base.py": ["/mackerel/renderers/__init__.py"], "/tests/renderers/test_helpers.py": ["/mackerel/renderers/__init__.py"], "/mackerel/navigation.py": ["/mackerel/content.py", "/mackerel/helpers.py", "/mackerel/site.py"], "/mackerel/site.py": ["/mackerel/__init__.py", "/mackerel/content.py", "/mackerel/helpers.py"], "/mackerel/__init__.py": ["/mackerel/__version__.py"], "/tests/test_content.py": ["/mackerel/__init__.py"], "/mackerel/renderers/template.py": ["/mackerel/__init__.py", "/mackerel/renderers/base.py", "/mackerel/site.py"], "/mackerel/renderers/document.py": ["/mackerel/renderers/base.py", "/mackerel/site.py"], "/tests/test_helpers.py": ["/mackerel/__init__.py"], "/tests/test_site.py": ["/mackerel/renderers/document.py", "/mackerel/renderers/template.py", "/mackerel/site.py"]}
|
33,292
|
pkolios/mackerel
|
refs/heads/master
|
/tests/conftest.py
|
import configparser
import logging
from pathlib import Path
from unittest import mock
import pytest
import mackerel
class DocumentMock:
def __init__(self, **kwargs):
for key in kwargs:
setattr(self, key, kwargs[key])
@pytest.yield_fixture
def document_mocks():
class DocumentFactory:
def create(self, **kwargs):
return DocumentMock(**kwargs)
return DocumentFactory()
@pytest.yield_fixture
def site(document_mocks):
site = mock.Mock(spec=mackerel.site.Site)
site.config = configparser.ConfigParser()
site.config.read_dict({
'mackerel': {'OUTPUT_EXT': '.html'},
'user': {'url': 'http://localhost:8000/'},
'navigation': {'main': 'index.md, about.md'},
'Jinja2Renderer': {
'TRIM_BLOCKS': True,
'LSTRIP_BLOCKS': True,
},
'MarkdownMarkdownRenderer': {
'OUTPUT_FORMAT': 'html5',
'EXTENSIONS': 'markdown.extensions.meta, markdown.extensions.extra'
}
})
site.content_path = Path('/tmp/mackerel/test/content')
site.documents = (
document_mocks.create(relative_path=Path('about.md')),
document_mocks.create(relative_path=Path('index.md')),
document_mocks.create(relative_path=Path('posts/hello.md')),
document_mocks.create(relative_path=Path('posts/world.md')),
)
site.logger = mock.Mock(spec=logging.Logger)
site.other_content_files = (
Path('/tmp/mackerel/test/content/logo.svg'),
Path('/tmp/mackerel/test/content/posts/image.png'),
)
site.other_template_files = (
Path('/tmp/mackerel/test/templates/example/favicon.ico'),
Path('/tmp/mackerel/test/templates/example/css/style.css'),
Path('/tmp/mackerel/test/templates/example/js/app.js'),
)
site.output_path = Path('/tmp/mackerel/test/_build')
site.path = Path('/tmp/mackerel/test')
site.template_path = Path('/tmp/mackerel/test/templates/example')
site.template_renderer = mock.Mock(
spec=mackerel.renderers.base.TemplateRenderer)
yield site
@pytest.yield_fixture
def site_path():
yield Path(__file__).parent / 'site'
|
{"/tests/test_config_ini.py": ["/mackerel/__init__.py"], "/mackerel/build.py": ["/mackerel/__init__.py", "/mackerel/navigation.py", "/mackerel/site.py", "/mackerel/helpers.py"], "/mackerel/cli.py": ["/mackerel/__init__.py"], "/tests/renderers/test_document.py": ["/mackerel/__init__.py"], "/tests/test_build.py": ["/mackerel/__init__.py"], "/mackerel/renderers/base.py": ["/mackerel/__init__.py", "/mackerel/site.py"], "/mackerel/content.py": ["/mackerel/__init__.py", "/mackerel/renderers/helpers.py", "/mackerel/renderers/base.py"], "/tests/conftest.py": ["/mackerel/__init__.py"], "/tests/test_cli.py": ["/mackerel/__init__.py"], "/tests/test_navigation.py": ["/mackerel/navigation.py"], "/tests/renderers/test_template.py": ["/mackerel/__init__.py", "/mackerel/renderers/__init__.py"], "/tests/renderers/test_base.py": ["/mackerel/renderers/__init__.py"], "/tests/renderers/test_helpers.py": ["/mackerel/renderers/__init__.py"], "/mackerel/navigation.py": ["/mackerel/content.py", "/mackerel/helpers.py", "/mackerel/site.py"], "/mackerel/site.py": ["/mackerel/__init__.py", "/mackerel/content.py", "/mackerel/helpers.py"], "/mackerel/__init__.py": ["/mackerel/__version__.py"], "/tests/test_content.py": ["/mackerel/__init__.py"], "/mackerel/renderers/template.py": ["/mackerel/__init__.py", "/mackerel/renderers/base.py", "/mackerel/site.py"], "/mackerel/renderers/document.py": ["/mackerel/renderers/base.py", "/mackerel/site.py"], "/tests/test_helpers.py": ["/mackerel/__init__.py"], "/tests/test_site.py": ["/mackerel/renderers/document.py", "/mackerel/renderers/template.py", "/mackerel/site.py"]}
|
33,293
|
pkolios/mackerel
|
refs/heads/master
|
/mackerel/renderers/__init__.py
|
from . import document, template # noqa
|
{"/tests/test_config_ini.py": ["/mackerel/__init__.py"], "/mackerel/build.py": ["/mackerel/__init__.py", "/mackerel/navigation.py", "/mackerel/site.py", "/mackerel/helpers.py"], "/mackerel/cli.py": ["/mackerel/__init__.py"], "/tests/renderers/test_document.py": ["/mackerel/__init__.py"], "/tests/test_build.py": ["/mackerel/__init__.py"], "/mackerel/renderers/base.py": ["/mackerel/__init__.py", "/mackerel/site.py"], "/mackerel/content.py": ["/mackerel/__init__.py", "/mackerel/renderers/helpers.py", "/mackerel/renderers/base.py"], "/tests/conftest.py": ["/mackerel/__init__.py"], "/tests/test_cli.py": ["/mackerel/__init__.py"], "/tests/test_navigation.py": ["/mackerel/navigation.py"], "/tests/renderers/test_template.py": ["/mackerel/__init__.py", "/mackerel/renderers/__init__.py"], "/tests/renderers/test_base.py": ["/mackerel/renderers/__init__.py"], "/tests/renderers/test_helpers.py": ["/mackerel/renderers/__init__.py"], "/mackerel/navigation.py": ["/mackerel/content.py", "/mackerel/helpers.py", "/mackerel/site.py"], "/mackerel/site.py": ["/mackerel/__init__.py", "/mackerel/content.py", "/mackerel/helpers.py"], "/mackerel/__init__.py": ["/mackerel/__version__.py"], "/tests/test_content.py": ["/mackerel/__init__.py"], "/mackerel/renderers/template.py": ["/mackerel/__init__.py", "/mackerel/renderers/base.py", "/mackerel/site.py"], "/mackerel/renderers/document.py": ["/mackerel/renderers/base.py", "/mackerel/site.py"], "/tests/test_helpers.py": ["/mackerel/__init__.py"], "/tests/test_site.py": ["/mackerel/renderers/document.py", "/mackerel/renderers/template.py", "/mackerel/site.py"]}
|
33,294
|
pkolios/mackerel
|
refs/heads/master
|
/tests/test_cli.py
|
from pathlib import Path
from unittest import mock
import shutil
import pytest
from click.testing import CliRunner
import mackerel
@pytest.fixture
def runner():
return CliRunner()
@pytest.yield_fixture
def template_path():
yield Path(__file__).parent / 'site' / 'template'
@pytest.yield_fixture
def output_path(site_path):
path = Path(__file__).parent / 'site' / '_build'
try:
shutil.rmtree(path)
except FileNotFoundError:
pass
yield path
try:
shutil.rmtree(path)
except FileNotFoundError:
pass
def test_cli_base(runner):
result = runner.invoke(mackerel.cli.cli, ['--help'])
assert result.exit_code == 0
assert 'build' in result.output
def test_cli_build_error(runner):
result = runner.invoke(mackerel.cli.cli, ['build'])
assert result.exit_code == 2
assert 'SITE_PATH' in result.output
def test_build_success(runner, site_path, template_path, output_path):
output_path.mkdir()
result = runner.invoke(
mackerel.cli.cli, ['build', str(site_path)], input='y\n')
assert result.exit_code == 0
assert (f'Directory {str(output_path)} already exists, '
'do you want to overwrite? [y/N]: y') in result.output
assert '\nBuild finished.\n' in result.output
assert len(list(site_path.iterdir()))
def test_init_directory_exists(runner, site_path):
result = runner.invoke(
mackerel.cli.cli, ['init', str(site_path)])
assert result.exit_code == 2
assert f'Initialize failed, file {str(site_path)}' in result.output
def test_init_directory_success(runner, tmpdir, site_path):
test_dir = tmpdir.join('init_test')
result = runner.invoke(mackerel.cli.cli, ['init', str(test_dir)])
assert result.exit_code == 0
assert result.output == f'Initialized empty mackerel site in {test_dir}\n'
assert len(list(site_path.iterdir())) == len(test_dir.listdir())
def test_develop(runner, site):
with mock.patch('mackerel.cli.Server') as server, mock.patch(
'mackerel.cli.mackerel.build.Build') as build:
runner.invoke(
mackerel.cli.cli,
['develop', str(site.path), '-h 0.0.0.0', '-p 8080'])
server.assert_called_with()
watch_calls = (mock.call(str(site.template_path), mock.ANY),
mock.call(str(site.content_path), mock.ANY))
server().watch.assert_has_calls(watch_calls, any_order=True)
server().serve.assert_called_with(
host='0.0.0.0', port=8080, root=str(site.output_path))
assert build.called
build().execute.assert_called_with()
|
{"/tests/test_config_ini.py": ["/mackerel/__init__.py"], "/mackerel/build.py": ["/mackerel/__init__.py", "/mackerel/navigation.py", "/mackerel/site.py", "/mackerel/helpers.py"], "/mackerel/cli.py": ["/mackerel/__init__.py"], "/tests/renderers/test_document.py": ["/mackerel/__init__.py"], "/tests/test_build.py": ["/mackerel/__init__.py"], "/mackerel/renderers/base.py": ["/mackerel/__init__.py", "/mackerel/site.py"], "/mackerel/content.py": ["/mackerel/__init__.py", "/mackerel/renderers/helpers.py", "/mackerel/renderers/base.py"], "/tests/conftest.py": ["/mackerel/__init__.py"], "/tests/test_cli.py": ["/mackerel/__init__.py"], "/tests/test_navigation.py": ["/mackerel/navigation.py"], "/tests/renderers/test_template.py": ["/mackerel/__init__.py", "/mackerel/renderers/__init__.py"], "/tests/renderers/test_base.py": ["/mackerel/renderers/__init__.py"], "/tests/renderers/test_helpers.py": ["/mackerel/renderers/__init__.py"], "/mackerel/navigation.py": ["/mackerel/content.py", "/mackerel/helpers.py", "/mackerel/site.py"], "/mackerel/site.py": ["/mackerel/__init__.py", "/mackerel/content.py", "/mackerel/helpers.py"], "/mackerel/__init__.py": ["/mackerel/__version__.py"], "/tests/test_content.py": ["/mackerel/__init__.py"], "/mackerel/renderers/template.py": ["/mackerel/__init__.py", "/mackerel/renderers/base.py", "/mackerel/site.py"], "/mackerel/renderers/document.py": ["/mackerel/renderers/base.py", "/mackerel/site.py"], "/tests/test_helpers.py": ["/mackerel/__init__.py"], "/tests/test_site.py": ["/mackerel/renderers/document.py", "/mackerel/renderers/template.py", "/mackerel/site.py"]}
|
33,295
|
pkolios/mackerel
|
refs/heads/master
|
/tests/test_navigation.py
|
from unittest import mock
import pytest
from mackerel.navigation import Navigation, Node
@pytest.yield_fixture
def navigation(site):
yield Navigation(site)
def test_navigation_init(site):
navigation = Navigation(site)
assert navigation.site == site
def test_navigation_nodes(navigation):
assert len(navigation.nodes) == 4
def test_build_url(navigation):
url = navigation._build_url(navigation.site.documents[0])
assert url == '/about.html'
def test_build_url_with_directory(navigation):
with mock.patch.dict(navigation.site.config,
{'user': {'url': 'http://test/blog/'}}):
url = navigation._build_url(navigation.site.documents[0])
assert url == '/blog/about.html'
def test_build_url_with_missing_config_value(navigation):
with mock.patch.dict(navigation.site.config, {'user': {}}):
url = navigation._build_url(navigation.site.documents[0])
assert url == '/about.html'
def test_build_external_url(navigation):
url = navigation._build_external_url(navigation.site.documents[0])
assert url == 'http://localhost:8000/about.html'
def test_build_external_url_with_directory(navigation):
with mock.patch.dict(navigation.site.config,
{'user': {'url': 'http://test/blog/'}}):
url = navigation._build_external_url(navigation.site.documents[0])
assert url == 'http://test/blog/about.html'
def test_build_external_url_with_missing_config_value(navigation):
with mock.patch.dict(navigation.site.config, {'user': {}}):
url = navigation._build_external_url(navigation.site.documents[0])
assert url == '/about.html'
def test_get_node(navigation):
assert navigation.get_node('unknown_node.md') is None
nodes = (navigation.get_node('about.md'),
navigation.get_node(navigation.site.documents[0].relative_path))
for node in nodes:
assert node.document == navigation.site.documents[0]
assert node.url == '/about.html'
assert node.external_url == 'http://localhost:8000/about.html'
def test_get_menu(navigation):
assert navigation.get_menu('unknown_menu') == tuple()
index, about = navigation.get_menu('main')
assert index.url == '/index.html'
assert about.url == '/about.html'
def test_loop(navigation):
nodes = navigation.loop()
assert len(nodes) == 4
for node in nodes:
assert isinstance(node, Node)
nodes = navigation.loop('about')
assert len(nodes) == 0
nodes = navigation.loop('/about')
assert len(nodes) == 0
nodes = navigation.loop('posts')
assert len(nodes) == 2
nodes = navigation.loop('/posts')
assert len(nodes) == 2
|
{"/tests/test_config_ini.py": ["/mackerel/__init__.py"], "/mackerel/build.py": ["/mackerel/__init__.py", "/mackerel/navigation.py", "/mackerel/site.py", "/mackerel/helpers.py"], "/mackerel/cli.py": ["/mackerel/__init__.py"], "/tests/renderers/test_document.py": ["/mackerel/__init__.py"], "/tests/test_build.py": ["/mackerel/__init__.py"], "/mackerel/renderers/base.py": ["/mackerel/__init__.py", "/mackerel/site.py"], "/mackerel/content.py": ["/mackerel/__init__.py", "/mackerel/renderers/helpers.py", "/mackerel/renderers/base.py"], "/tests/conftest.py": ["/mackerel/__init__.py"], "/tests/test_cli.py": ["/mackerel/__init__.py"], "/tests/test_navigation.py": ["/mackerel/navigation.py"], "/tests/renderers/test_template.py": ["/mackerel/__init__.py", "/mackerel/renderers/__init__.py"], "/tests/renderers/test_base.py": ["/mackerel/renderers/__init__.py"], "/tests/renderers/test_helpers.py": ["/mackerel/renderers/__init__.py"], "/mackerel/navigation.py": ["/mackerel/content.py", "/mackerel/helpers.py", "/mackerel/site.py"], "/mackerel/site.py": ["/mackerel/__init__.py", "/mackerel/content.py", "/mackerel/helpers.py"], "/mackerel/__init__.py": ["/mackerel/__version__.py"], "/tests/test_content.py": ["/mackerel/__init__.py"], "/mackerel/renderers/template.py": ["/mackerel/__init__.py", "/mackerel/renderers/base.py", "/mackerel/site.py"], "/mackerel/renderers/document.py": ["/mackerel/renderers/base.py", "/mackerel/site.py"], "/tests/test_helpers.py": ["/mackerel/__init__.py"], "/tests/test_site.py": ["/mackerel/renderers/document.py", "/mackerel/renderers/template.py", "/mackerel/site.py"]}
|
33,296
|
pkolios/mackerel
|
refs/heads/master
|
/setup.py
|
#!/usr/bin/env python
import os
from setuptools import setup
here = os.path.abspath(os.path.dirname(__file__))
about = {}
with open(os.path.join(here, 'mackerel', '__version__.py'), 'r',
encoding='utf-8') as f:
exec(f.read(), about)
with open('README.rst', 'r', encoding='utf-8') as f:
readme = f.read()
with open('CHANGELOG.rst', 'r', encoding='utf-8') as f:
changelog = f.read()
setup(
name=about['__title__'],
version=about['__version__'],
author=about['__author__'],
author_email=about['__author_email__'],
description=about['__description__'],
long_description=readme + '\n\n' + changelog,
url=about['__url__'],
packages=['mackerel'],
package_data={'': ['LICENSE'], 'mackerel': ['config.ini']},
include_package_data=True,
license=about['__license__'],
python_requires='>=3.6',
install_requires=[
'Click',
'Jinja2',
'livereload',
'markdown',
'MarkupSafe',
'mistune',
'mistune-contrib',
],
setup_requires=['pytest-runner'],
tests_require=['pytest', 'pytest-cov'],
entry_points='''
[console_scripts]
mackerel=mackerel.cli:cli
''',
platforms='any',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Documentation',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: Content Management System', # noqa
'Topic :: Software Development :: Documentation',
'Topic :: Text Processing :: Markup :: HTML'
],
)
|
{"/tests/test_config_ini.py": ["/mackerel/__init__.py"], "/mackerel/build.py": ["/mackerel/__init__.py", "/mackerel/navigation.py", "/mackerel/site.py", "/mackerel/helpers.py"], "/mackerel/cli.py": ["/mackerel/__init__.py"], "/tests/renderers/test_document.py": ["/mackerel/__init__.py"], "/tests/test_build.py": ["/mackerel/__init__.py"], "/mackerel/renderers/base.py": ["/mackerel/__init__.py", "/mackerel/site.py"], "/mackerel/content.py": ["/mackerel/__init__.py", "/mackerel/renderers/helpers.py", "/mackerel/renderers/base.py"], "/tests/conftest.py": ["/mackerel/__init__.py"], "/tests/test_cli.py": ["/mackerel/__init__.py"], "/tests/test_navigation.py": ["/mackerel/navigation.py"], "/tests/renderers/test_template.py": ["/mackerel/__init__.py", "/mackerel/renderers/__init__.py"], "/tests/renderers/test_base.py": ["/mackerel/renderers/__init__.py"], "/tests/renderers/test_helpers.py": ["/mackerel/renderers/__init__.py"], "/mackerel/navigation.py": ["/mackerel/content.py", "/mackerel/helpers.py", "/mackerel/site.py"], "/mackerel/site.py": ["/mackerel/__init__.py", "/mackerel/content.py", "/mackerel/helpers.py"], "/mackerel/__init__.py": ["/mackerel/__version__.py"], "/tests/test_content.py": ["/mackerel/__init__.py"], "/mackerel/renderers/template.py": ["/mackerel/__init__.py", "/mackerel/renderers/base.py", "/mackerel/site.py"], "/mackerel/renderers/document.py": ["/mackerel/renderers/base.py", "/mackerel/site.py"], "/tests/test_helpers.py": ["/mackerel/__init__.py"], "/tests/test_site.py": ["/mackerel/renderers/document.py", "/mackerel/renderers/template.py", "/mackerel/site.py"]}
|
33,297
|
pkolios/mackerel
|
refs/heads/master
|
/tests/renderers/test_template.py
|
from unittest import mock
import pytest
from mackerel import exceptions
from mackerel.renderers import template
class TestJinja2Renderer:
def test_init(self, site):
with mock.patch('mackerel.renderers.template.jinja2') as jinja2:
template.Jinja2Renderer(site=site)
jinja2.FileSystemLoader.assert_called_with(
str(site.template_path.resolve()))
with mock.patch('mackerel.renderers.template.jinja2') as jinja2:
template.Jinja2Renderer(site=site)
jinja2.Environment.assert_called_with(
loader=mock.ANY, lstrip_blocks=True, trim_blocks=True)
def test_render(self, site, document_mocks):
document = document_mocks.create(template='path/to/template')
context = mock.Mock('context')
renderer = template.Jinja2Renderer(site=site)
render_func = mock.Mock()
renderer.env.get_template = mock.Mock(
return_value=mock.Mock(render=render_func))
renderer.render(ctx=context, document=document)
renderer.env.get_template.assert_called_once_with(document.template)
render_func.assert_called_once_with(ctx=context, document=document)
def test_render_template_not_found(self, site):
document = mock.Mock('document')
document.template = '/tmp/wrong/path/wrong_template.html'
document.document_path = '/tmp/some/document/path.md'
context = mock.Mock('context')
renderer = template.Jinja2Renderer(site=site)
with pytest.raises(exceptions.RenderingError) as excinfo:
renderer.render(ctx=context, document=document)
assert (f'Template file `{document.template}` for document '
f'`{document.document_path}` not found') in str(excinfo.value)
|
{"/tests/test_config_ini.py": ["/mackerel/__init__.py"], "/mackerel/build.py": ["/mackerel/__init__.py", "/mackerel/navigation.py", "/mackerel/site.py", "/mackerel/helpers.py"], "/mackerel/cli.py": ["/mackerel/__init__.py"], "/tests/renderers/test_document.py": ["/mackerel/__init__.py"], "/tests/test_build.py": ["/mackerel/__init__.py"], "/mackerel/renderers/base.py": ["/mackerel/__init__.py", "/mackerel/site.py"], "/mackerel/content.py": ["/mackerel/__init__.py", "/mackerel/renderers/helpers.py", "/mackerel/renderers/base.py"], "/tests/conftest.py": ["/mackerel/__init__.py"], "/tests/test_cli.py": ["/mackerel/__init__.py"], "/tests/test_navigation.py": ["/mackerel/navigation.py"], "/tests/renderers/test_template.py": ["/mackerel/__init__.py", "/mackerel/renderers/__init__.py"], "/tests/renderers/test_base.py": ["/mackerel/renderers/__init__.py"], "/tests/renderers/test_helpers.py": ["/mackerel/renderers/__init__.py"], "/mackerel/navigation.py": ["/mackerel/content.py", "/mackerel/helpers.py", "/mackerel/site.py"], "/mackerel/site.py": ["/mackerel/__init__.py", "/mackerel/content.py", "/mackerel/helpers.py"], "/mackerel/__init__.py": ["/mackerel/__version__.py"], "/tests/test_content.py": ["/mackerel/__init__.py"], "/mackerel/renderers/template.py": ["/mackerel/__init__.py", "/mackerel/renderers/base.py", "/mackerel/site.py"], "/mackerel/renderers/document.py": ["/mackerel/renderers/base.py", "/mackerel/site.py"], "/tests/test_helpers.py": ["/mackerel/__init__.py"], "/tests/test_site.py": ["/mackerel/renderers/document.py", "/mackerel/renderers/template.py", "/mackerel/site.py"]}
|
33,298
|
pkolios/mackerel
|
refs/heads/master
|
/tests/renderers/test_base.py
|
from unittest import mock
import pytest
from mackerel.renderers import base
def test_document_renderer():
with pytest.raises(NotImplementedError):
base.DocumentRenderer(site=mock.Mock())
dr = base.DocumentRenderer
for func in (dr.extract_metadata, dr.render):
with pytest.raises(NotImplementedError):
func(self=0, text='')
def test_template_renderer():
with pytest.raises(NotImplementedError):
base.TemplateRenderer(site=mock.Mock())
with pytest.raises(NotImplementedError):
base.TemplateRenderer.render(self=0, ctx='', document='')
|
{"/tests/test_config_ini.py": ["/mackerel/__init__.py"], "/mackerel/build.py": ["/mackerel/__init__.py", "/mackerel/navigation.py", "/mackerel/site.py", "/mackerel/helpers.py"], "/mackerel/cli.py": ["/mackerel/__init__.py"], "/tests/renderers/test_document.py": ["/mackerel/__init__.py"], "/tests/test_build.py": ["/mackerel/__init__.py"], "/mackerel/renderers/base.py": ["/mackerel/__init__.py", "/mackerel/site.py"], "/mackerel/content.py": ["/mackerel/__init__.py", "/mackerel/renderers/helpers.py", "/mackerel/renderers/base.py"], "/tests/conftest.py": ["/mackerel/__init__.py"], "/tests/test_cli.py": ["/mackerel/__init__.py"], "/tests/test_navigation.py": ["/mackerel/navigation.py"], "/tests/renderers/test_template.py": ["/mackerel/__init__.py", "/mackerel/renderers/__init__.py"], "/tests/renderers/test_base.py": ["/mackerel/renderers/__init__.py"], "/tests/renderers/test_helpers.py": ["/mackerel/renderers/__init__.py"], "/mackerel/navigation.py": ["/mackerel/content.py", "/mackerel/helpers.py", "/mackerel/site.py"], "/mackerel/site.py": ["/mackerel/__init__.py", "/mackerel/content.py", "/mackerel/helpers.py"], "/mackerel/__init__.py": ["/mackerel/__version__.py"], "/tests/test_content.py": ["/mackerel/__init__.py"], "/mackerel/renderers/template.py": ["/mackerel/__init__.py", "/mackerel/renderers/base.py", "/mackerel/site.py"], "/mackerel/renderers/document.py": ["/mackerel/renderers/base.py", "/mackerel/site.py"], "/tests/test_helpers.py": ["/mackerel/__init__.py"], "/tests/test_site.py": ["/mackerel/renderers/document.py", "/mackerel/renderers/template.py", "/mackerel/site.py"]}
|
33,299
|
pkolios/mackerel
|
refs/heads/master
|
/tests/renderers/test_helpers.py
|
from mackerel.renderers import helpers
def test_strip_tags():
assert helpers.strip_tags('<em>Foo & Bar</em>') == 'Foo & Bar'
assert helpers.strip_tags('Foo & Bar') == 'Foo & Bar'
|
{"/tests/test_config_ini.py": ["/mackerel/__init__.py"], "/mackerel/build.py": ["/mackerel/__init__.py", "/mackerel/navigation.py", "/mackerel/site.py", "/mackerel/helpers.py"], "/mackerel/cli.py": ["/mackerel/__init__.py"], "/tests/renderers/test_document.py": ["/mackerel/__init__.py"], "/tests/test_build.py": ["/mackerel/__init__.py"], "/mackerel/renderers/base.py": ["/mackerel/__init__.py", "/mackerel/site.py"], "/mackerel/content.py": ["/mackerel/__init__.py", "/mackerel/renderers/helpers.py", "/mackerel/renderers/base.py"], "/tests/conftest.py": ["/mackerel/__init__.py"], "/tests/test_cli.py": ["/mackerel/__init__.py"], "/tests/test_navigation.py": ["/mackerel/navigation.py"], "/tests/renderers/test_template.py": ["/mackerel/__init__.py", "/mackerel/renderers/__init__.py"], "/tests/renderers/test_base.py": ["/mackerel/renderers/__init__.py"], "/tests/renderers/test_helpers.py": ["/mackerel/renderers/__init__.py"], "/mackerel/navigation.py": ["/mackerel/content.py", "/mackerel/helpers.py", "/mackerel/site.py"], "/mackerel/site.py": ["/mackerel/__init__.py", "/mackerel/content.py", "/mackerel/helpers.py"], "/mackerel/__init__.py": ["/mackerel/__version__.py"], "/tests/test_content.py": ["/mackerel/__init__.py"], "/mackerel/renderers/template.py": ["/mackerel/__init__.py", "/mackerel/renderers/base.py", "/mackerel/site.py"], "/mackerel/renderers/document.py": ["/mackerel/renderers/base.py", "/mackerel/site.py"], "/tests/test_helpers.py": ["/mackerel/__init__.py"], "/tests/test_site.py": ["/mackerel/renderers/document.py", "/mackerel/renderers/template.py", "/mackerel/site.py"]}
|
33,300
|
pkolios/mackerel
|
refs/heads/master
|
/mackerel/navigation.py
|
from pathlib import Path
from typing import NamedTuple, TYPE_CHECKING
from urllib.parse import urljoin, urlparse
from mackerel.content import Document
from mackerel.helpers import cached_property
if TYPE_CHECKING:
from typing import Optional, Tuple, Union # noqa
from mackerel.site import Site # noqa
class Node(NamedTuple):
url: str
external_url: str
document: Document
class Navigation:
"""Navigation provides methods to list and access the content"""
def __init__(self, site: 'Site') -> None:
self.site = site
def get_menu(self, menu: str) -> 'Tuple[Node, ...]':
menu = self.site.config.get('navigation', menu, fallback='')
menu_entries = tuple(
item.strip() for item in menu.split(',') if menu)
nodes = []
for entry in menu_entries:
nodes.append(self.get_node(entry))
return tuple(nodes)
def get_node(self, rel_path: 'Union[str, Path]') -> 'Optional[Node]':
if isinstance(rel_path, str):
rel_path = Path(rel_path)
for node in self.nodes:
if node.document.relative_path == rel_path:
return node
return None
def loop(self, path: 'Optional[str]' = '/') -> 'Tuple':
path = path.rstrip('/') + '/'
path = '/' + path.lstrip('/')
nodes = []
for node in self.nodes:
if node.url.startswith(path):
nodes.append(node)
return tuple(nodes)
@cached_property
def nodes(self) -> 'Tuple[Node, ...]':
return tuple(
Node(url=self._build_url(document),
external_url=self._build_external_url(document),
document=document)
for document in self.site.documents)
def _build_url(self, document: Document) -> str:
site_url = urlparse(
self.site.config.get('user', 'url', fallback='/'))
doc_url = document.relative_path.with_suffix(
self.site.config['mackerel']['OUTPUT_EXT']).as_posix()
return urljoin(site_url.path, doc_url)
def _build_external_url(self, document: Document) -> str:
return urljoin(self.site.config.get('user', 'url', fallback='/'),
self._build_url(document))
|
{"/tests/test_config_ini.py": ["/mackerel/__init__.py"], "/mackerel/build.py": ["/mackerel/__init__.py", "/mackerel/navigation.py", "/mackerel/site.py", "/mackerel/helpers.py"], "/mackerel/cli.py": ["/mackerel/__init__.py"], "/tests/renderers/test_document.py": ["/mackerel/__init__.py"], "/tests/test_build.py": ["/mackerel/__init__.py"], "/mackerel/renderers/base.py": ["/mackerel/__init__.py", "/mackerel/site.py"], "/mackerel/content.py": ["/mackerel/__init__.py", "/mackerel/renderers/helpers.py", "/mackerel/renderers/base.py"], "/tests/conftest.py": ["/mackerel/__init__.py"], "/tests/test_cli.py": ["/mackerel/__init__.py"], "/tests/test_navigation.py": ["/mackerel/navigation.py"], "/tests/renderers/test_template.py": ["/mackerel/__init__.py", "/mackerel/renderers/__init__.py"], "/tests/renderers/test_base.py": ["/mackerel/renderers/__init__.py"], "/tests/renderers/test_helpers.py": ["/mackerel/renderers/__init__.py"], "/mackerel/navigation.py": ["/mackerel/content.py", "/mackerel/helpers.py", "/mackerel/site.py"], "/mackerel/site.py": ["/mackerel/__init__.py", "/mackerel/content.py", "/mackerel/helpers.py"], "/mackerel/__init__.py": ["/mackerel/__version__.py"], "/tests/test_content.py": ["/mackerel/__init__.py"], "/mackerel/renderers/template.py": ["/mackerel/__init__.py", "/mackerel/renderers/base.py", "/mackerel/site.py"], "/mackerel/renderers/document.py": ["/mackerel/renderers/base.py", "/mackerel/site.py"], "/tests/test_helpers.py": ["/mackerel/__init__.py"], "/tests/test_site.py": ["/mackerel/renderers/document.py", "/mackerel/renderers/template.py", "/mackerel/site.py"]}
|
33,301
|
pkolios/mackerel
|
refs/heads/master
|
/mackerel/site.py
|
import logging
from pathlib import Path
from typing import TYPE_CHECKING
from mackerel import exceptions, renderers
from mackerel.content import Document
from mackerel.helpers import cached_property, make_config
if TYPE_CHECKING:
from typing import Tuple # noqa
from configparser import ConfigParser # noqa
class Site:
def __init__(self, path: Path) -> None:
self.path = path
self.config = make_config(site_path=path) # type: ConfigParser
self.logger = logging.getLogger('mackerel') # type: logging.Logger
# Site paths
self.content_path = self.path / Path(
self.config['mackerel']['CONTENT_PATH']) # type: Path
self.output_path = self.path / Path(
self.config['mackerel']['OUTPUT_PATH']) # type: Path
self.template_path = self.path / Path(
self.config['mackerel']['TEMPLATE_PATH']) # type: Path
# Site files
self.document_files = tuple(
f for f in self.content_path.rglob('*')
if f.suffix == self.config['mackerel']['DOC_EXT']) # type: Tuple[Path, ...] # noqa
self.other_content_files = tuple(
f for f in self.content_path.rglob('*')
if f.suffix != self.config['mackerel']['DOC_EXT'] and
f.is_file()) # type: Tuple[Path, ...]
self.other_template_files = tuple(
f for f in self.template_path.rglob('*')
if f.suffix != self.config['mackerel']['TEMPLATE_EXT'] and
f.is_file()) # type: Tuple[Path, ...]
# Site renderers
self.document_renderer = getattr(
renderers.document,
self.config['mackerel']['DOCUMENT_RENDERER'])(site=self) # type: renderers.base.DocumentRenderer # noqa
self.template_renderer = getattr(
renderers.template,
self.config['mackerel']['TEMPLATE_RENDERER'])(site=self) # type: renderers.base.TemplateRenderer # noqa
@cached_property
def documents(self) -> 'Tuple[Document, ...]':
documents = []
for file in self.document_files:
try:
documents.append(Document(
document_path=file, content_path=self.content_path,
renderer=self.document_renderer))
except exceptions.DocumentError as exc:
self.logger.warning(str(exc))
return tuple(documents)
|
{"/tests/test_config_ini.py": ["/mackerel/__init__.py"], "/mackerel/build.py": ["/mackerel/__init__.py", "/mackerel/navigation.py", "/mackerel/site.py", "/mackerel/helpers.py"], "/mackerel/cli.py": ["/mackerel/__init__.py"], "/tests/renderers/test_document.py": ["/mackerel/__init__.py"], "/tests/test_build.py": ["/mackerel/__init__.py"], "/mackerel/renderers/base.py": ["/mackerel/__init__.py", "/mackerel/site.py"], "/mackerel/content.py": ["/mackerel/__init__.py", "/mackerel/renderers/helpers.py", "/mackerel/renderers/base.py"], "/tests/conftest.py": ["/mackerel/__init__.py"], "/tests/test_cli.py": ["/mackerel/__init__.py"], "/tests/test_navigation.py": ["/mackerel/navigation.py"], "/tests/renderers/test_template.py": ["/mackerel/__init__.py", "/mackerel/renderers/__init__.py"], "/tests/renderers/test_base.py": ["/mackerel/renderers/__init__.py"], "/tests/renderers/test_helpers.py": ["/mackerel/renderers/__init__.py"], "/mackerel/navigation.py": ["/mackerel/content.py", "/mackerel/helpers.py", "/mackerel/site.py"], "/mackerel/site.py": ["/mackerel/__init__.py", "/mackerel/content.py", "/mackerel/helpers.py"], "/mackerel/__init__.py": ["/mackerel/__version__.py"], "/tests/test_content.py": ["/mackerel/__init__.py"], "/mackerel/renderers/template.py": ["/mackerel/__init__.py", "/mackerel/renderers/base.py", "/mackerel/site.py"], "/mackerel/renderers/document.py": ["/mackerel/renderers/base.py", "/mackerel/site.py"], "/tests/test_helpers.py": ["/mackerel/__init__.py"], "/tests/test_site.py": ["/mackerel/renderers/document.py", "/mackerel/renderers/template.py", "/mackerel/site.py"]}
|
33,302
|
pkolios/mackerel
|
refs/heads/master
|
/mackerel/__init__.py
|
from .__version__ import ( # noqa
__title__, __description__, __url__, __version__, __author__,
__author_email__, __license__, __copyright__)
from . import build, cli, content, renderers, site # noqa
|
{"/tests/test_config_ini.py": ["/mackerel/__init__.py"], "/mackerel/build.py": ["/mackerel/__init__.py", "/mackerel/navigation.py", "/mackerel/site.py", "/mackerel/helpers.py"], "/mackerel/cli.py": ["/mackerel/__init__.py"], "/tests/renderers/test_document.py": ["/mackerel/__init__.py"], "/tests/test_build.py": ["/mackerel/__init__.py"], "/mackerel/renderers/base.py": ["/mackerel/__init__.py", "/mackerel/site.py"], "/mackerel/content.py": ["/mackerel/__init__.py", "/mackerel/renderers/helpers.py", "/mackerel/renderers/base.py"], "/tests/conftest.py": ["/mackerel/__init__.py"], "/tests/test_cli.py": ["/mackerel/__init__.py"], "/tests/test_navigation.py": ["/mackerel/navigation.py"], "/tests/renderers/test_template.py": ["/mackerel/__init__.py", "/mackerel/renderers/__init__.py"], "/tests/renderers/test_base.py": ["/mackerel/renderers/__init__.py"], "/tests/renderers/test_helpers.py": ["/mackerel/renderers/__init__.py"], "/mackerel/navigation.py": ["/mackerel/content.py", "/mackerel/helpers.py", "/mackerel/site.py"], "/mackerel/site.py": ["/mackerel/__init__.py", "/mackerel/content.py", "/mackerel/helpers.py"], "/mackerel/__init__.py": ["/mackerel/__version__.py"], "/tests/test_content.py": ["/mackerel/__init__.py"], "/mackerel/renderers/template.py": ["/mackerel/__init__.py", "/mackerel/renderers/base.py", "/mackerel/site.py"], "/mackerel/renderers/document.py": ["/mackerel/renderers/base.py", "/mackerel/site.py"], "/tests/test_helpers.py": ["/mackerel/__init__.py"], "/tests/test_site.py": ["/mackerel/renderers/document.py", "/mackerel/renderers/template.py", "/mackerel/site.py"]}
|
33,303
|
pkolios/mackerel
|
refs/heads/master
|
/mackerel/renderers/helpers.py
|
from markupsafe import Markup
def strip_tags(text: str) -> str:
"""Strip the html tags of the given string."""
return Markup(text).striptags()
|
{"/tests/test_config_ini.py": ["/mackerel/__init__.py"], "/mackerel/build.py": ["/mackerel/__init__.py", "/mackerel/navigation.py", "/mackerel/site.py", "/mackerel/helpers.py"], "/mackerel/cli.py": ["/mackerel/__init__.py"], "/tests/renderers/test_document.py": ["/mackerel/__init__.py"], "/tests/test_build.py": ["/mackerel/__init__.py"], "/mackerel/renderers/base.py": ["/mackerel/__init__.py", "/mackerel/site.py"], "/mackerel/content.py": ["/mackerel/__init__.py", "/mackerel/renderers/helpers.py", "/mackerel/renderers/base.py"], "/tests/conftest.py": ["/mackerel/__init__.py"], "/tests/test_cli.py": ["/mackerel/__init__.py"], "/tests/test_navigation.py": ["/mackerel/navigation.py"], "/tests/renderers/test_template.py": ["/mackerel/__init__.py", "/mackerel/renderers/__init__.py"], "/tests/renderers/test_base.py": ["/mackerel/renderers/__init__.py"], "/tests/renderers/test_helpers.py": ["/mackerel/renderers/__init__.py"], "/mackerel/navigation.py": ["/mackerel/content.py", "/mackerel/helpers.py", "/mackerel/site.py"], "/mackerel/site.py": ["/mackerel/__init__.py", "/mackerel/content.py", "/mackerel/helpers.py"], "/mackerel/__init__.py": ["/mackerel/__version__.py"], "/tests/test_content.py": ["/mackerel/__init__.py"], "/mackerel/renderers/template.py": ["/mackerel/__init__.py", "/mackerel/renderers/base.py", "/mackerel/site.py"], "/mackerel/renderers/document.py": ["/mackerel/renderers/base.py", "/mackerel/site.py"], "/tests/test_helpers.py": ["/mackerel/__init__.py"], "/tests/test_site.py": ["/mackerel/renderers/document.py", "/mackerel/renderers/template.py", "/mackerel/site.py"]}
|
33,304
|
pkolios/mackerel
|
refs/heads/master
|
/tests/test_content.py
|
from pathlib import Path
from unittest import mock
import pytest
from mackerel import content, exceptions
@pytest.yield_fixture
def document_path():
yield Path('/tmp/mackerel/test/content/document.md')
@pytest.yield_fixture
def content_path():
yield Path('/tmp/mackerel/test/content')
@pytest.yield_fixture
def renderer():
renderer = mock.Mock()
renderer.extract_metadata.return_value = {
'template': 'document.html', 'title': 'Test post'}
yield renderer
def test_document_init(document_path, content_path, renderer):
with mock.patch('pathlib.Path.read_text') as read_mock:
doc = content.Document(document_path=document_path,
content_path=content_path, renderer=renderer)
read_mock.assert_called_once_with()
renderer.extract_metadata.assert_called_with(text=doc.content)
renderer.render.assert_called_with(doc.content)
assert doc.document_path == document_path
assert doc.relative_path == Path('document.md')
assert doc.template == 'document.html'
assert doc.title == 'Test post'
assert renderer.extract_metadata() == doc.metadata
assert renderer.render() == doc.html
def test_document_eq(document_path, content_path, renderer):
with mock.patch('pathlib.Path.read_text') as read_mock:
doc1 = content.Document(document_path=document_path,
content_path=content_path, renderer=renderer)
doc2 = content.Document(document_path=document_path,
content_path=content_path, renderer=renderer)
assert read_mock.call_count == 2
assert doc1 == doc2
assert doc1 != 'some_string'
def test_document_missing_title(document_path, content_path, renderer):
renderer.extract_metadata.return_value = {'template': 'document.html'}
with mock.patch('pathlib.Path.read_text'):
with pytest.raises(exceptions.DocumentError) as excinfo:
content.Document(document_path=document_path,
content_path=content_path, renderer=renderer)
assert f'Document `{str(document_path)}` is missing a title' in str(
excinfo.value)
def test_document_excerpt(document_path, content_path, renderer):
with mock.patch('pathlib.Path.read_text'):
doc = content.Document(document_path=document_path,
content_path=content_path, renderer=renderer)
doc.html = (
'Tales without end are told of these massive, lonely figures who bore '
'half-seriously, half-mockingly a motto adopted from one of Salvor '
'Hardin\'s epigrams, "Never let your sense of morals prevent you from '
'doing what is right!"')
assert doc.excerpt(width=1) == '...'
assert doc.excerpt(width=5, placeholder='... more') == 'Tales... more'
assert doc.excerpt() == doc.excerpt(0) == (
'Tales without end are told of these massive, lonely figures who bore '
'half-seriously, half-mockingly a motto adopted from one of Salvor '
'Hardin\'s...')
|
{"/tests/test_config_ini.py": ["/mackerel/__init__.py"], "/mackerel/build.py": ["/mackerel/__init__.py", "/mackerel/navigation.py", "/mackerel/site.py", "/mackerel/helpers.py"], "/mackerel/cli.py": ["/mackerel/__init__.py"], "/tests/renderers/test_document.py": ["/mackerel/__init__.py"], "/tests/test_build.py": ["/mackerel/__init__.py"], "/mackerel/renderers/base.py": ["/mackerel/__init__.py", "/mackerel/site.py"], "/mackerel/content.py": ["/mackerel/__init__.py", "/mackerel/renderers/helpers.py", "/mackerel/renderers/base.py"], "/tests/conftest.py": ["/mackerel/__init__.py"], "/tests/test_cli.py": ["/mackerel/__init__.py"], "/tests/test_navigation.py": ["/mackerel/navigation.py"], "/tests/renderers/test_template.py": ["/mackerel/__init__.py", "/mackerel/renderers/__init__.py"], "/tests/renderers/test_base.py": ["/mackerel/renderers/__init__.py"], "/tests/renderers/test_helpers.py": ["/mackerel/renderers/__init__.py"], "/mackerel/navigation.py": ["/mackerel/content.py", "/mackerel/helpers.py", "/mackerel/site.py"], "/mackerel/site.py": ["/mackerel/__init__.py", "/mackerel/content.py", "/mackerel/helpers.py"], "/mackerel/__init__.py": ["/mackerel/__version__.py"], "/tests/test_content.py": ["/mackerel/__init__.py"], "/mackerel/renderers/template.py": ["/mackerel/__init__.py", "/mackerel/renderers/base.py", "/mackerel/site.py"], "/mackerel/renderers/document.py": ["/mackerel/renderers/base.py", "/mackerel/site.py"], "/tests/test_helpers.py": ["/mackerel/__init__.py"], "/tests/test_site.py": ["/mackerel/renderers/document.py", "/mackerel/renderers/template.py", "/mackerel/site.py"]}
|
33,305
|
pkolios/mackerel
|
refs/heads/master
|
/mackerel/renderers/template.py
|
from typing import TYPE_CHECKING
import jinja2
from mackerel import exceptions
from mackerel.renderers.base import TemplateRenderer
if TYPE_CHECKING:
from mackerel.site import Site # noqa
from mackerel import build, content # noqa
class Jinja2Renderer(TemplateRenderer):
def __init__(self, site: 'Site') -> None:
template_path = site.template_path # Type: Path
trim_blocks = site.config.getboolean('Jinja2Renderer', 'TRIM_BLOCKS')
lstrip_blocks = site.config.getboolean(
'Jinja2Renderer', 'LSTRIP_BLOCKS')
self.env = jinja2.Environment(
loader=jinja2.FileSystemLoader(str(template_path.resolve())),
trim_blocks=trim_blocks, lstrip_blocks=lstrip_blocks,)
def render(self, ctx: 'build.Context',
document: 'content.Document') -> str:
try:
template = self.env.get_template(document.template)
except jinja2.exceptions.TemplateNotFound:
raise exceptions.RenderingError(
f'Template file `{document.template}` for document '
f'`{document.document_path}` not found')
return template.render(ctx=ctx, document=document)
|
{"/tests/test_config_ini.py": ["/mackerel/__init__.py"], "/mackerel/build.py": ["/mackerel/__init__.py", "/mackerel/navigation.py", "/mackerel/site.py", "/mackerel/helpers.py"], "/mackerel/cli.py": ["/mackerel/__init__.py"], "/tests/renderers/test_document.py": ["/mackerel/__init__.py"], "/tests/test_build.py": ["/mackerel/__init__.py"], "/mackerel/renderers/base.py": ["/mackerel/__init__.py", "/mackerel/site.py"], "/mackerel/content.py": ["/mackerel/__init__.py", "/mackerel/renderers/helpers.py", "/mackerel/renderers/base.py"], "/tests/conftest.py": ["/mackerel/__init__.py"], "/tests/test_cli.py": ["/mackerel/__init__.py"], "/tests/test_navigation.py": ["/mackerel/navigation.py"], "/tests/renderers/test_template.py": ["/mackerel/__init__.py", "/mackerel/renderers/__init__.py"], "/tests/renderers/test_base.py": ["/mackerel/renderers/__init__.py"], "/tests/renderers/test_helpers.py": ["/mackerel/renderers/__init__.py"], "/mackerel/navigation.py": ["/mackerel/content.py", "/mackerel/helpers.py", "/mackerel/site.py"], "/mackerel/site.py": ["/mackerel/__init__.py", "/mackerel/content.py", "/mackerel/helpers.py"], "/mackerel/__init__.py": ["/mackerel/__version__.py"], "/tests/test_content.py": ["/mackerel/__init__.py"], "/mackerel/renderers/template.py": ["/mackerel/__init__.py", "/mackerel/renderers/base.py", "/mackerel/site.py"], "/mackerel/renderers/document.py": ["/mackerel/renderers/base.py", "/mackerel/site.py"], "/tests/test_helpers.py": ["/mackerel/__init__.py"], "/tests/test_site.py": ["/mackerel/renderers/document.py", "/mackerel/renderers/template.py", "/mackerel/site.py"]}
|
33,306
|
pkolios/mackerel
|
refs/heads/master
|
/mackerel/renderers/document.py
|
from typing import Dict, TYPE_CHECKING
import markdown
import mistune
from mistune_contrib import meta
from mackerel.renderers.base import DocumentRenderer
if TYPE_CHECKING:
from mackerel.site import Site # noqa
class MistuneMarkdownRenderer(DocumentRenderer):
def __init__(self, site: 'Site') -> None:
self.markdown = mistune.Markdown()
def extract_metadata(self, text: str) -> Dict[str, str]:
metadata, _ = meta.parse(text)
return {key.lower(): metadata[key] for key in metadata.keys()}
def render(self, text: str) -> str:
_, text = meta.parse(text)
return self.markdown(text.strip())
class MarkdownMarkdownRenderer(DocumentRenderer):
def __init__(self, site: 'Site') -> None:
ext_list = site.config.get(
'MarkdownMarkdownRenderer', 'extensions', fallback=None)
extensions = tuple(
item.strip() for item in ext_list.split(',') if ext_list)
output_format = site.config.get(
'MarkdownMarkdownRenderer', 'OUTPUT_FORMAT')
self.markdown = markdown.Markdown(
extensions=extensions, output_format=output_format)
def extract_metadata(self, text: str) -> Dict[str, str]:
self.render(text)
for key in self.markdown.Meta:
if len(self.markdown.Meta[key]) == 1:
self.markdown.Meta[key] = self.markdown.Meta[key][0]
return self.markdown.Meta
def render(self, text: str) -> str:
self.markdown.reset()
return self.markdown.convert(text)
|
{"/tests/test_config_ini.py": ["/mackerel/__init__.py"], "/mackerel/build.py": ["/mackerel/__init__.py", "/mackerel/navigation.py", "/mackerel/site.py", "/mackerel/helpers.py"], "/mackerel/cli.py": ["/mackerel/__init__.py"], "/tests/renderers/test_document.py": ["/mackerel/__init__.py"], "/tests/test_build.py": ["/mackerel/__init__.py"], "/mackerel/renderers/base.py": ["/mackerel/__init__.py", "/mackerel/site.py"], "/mackerel/content.py": ["/mackerel/__init__.py", "/mackerel/renderers/helpers.py", "/mackerel/renderers/base.py"], "/tests/conftest.py": ["/mackerel/__init__.py"], "/tests/test_cli.py": ["/mackerel/__init__.py"], "/tests/test_navigation.py": ["/mackerel/navigation.py"], "/tests/renderers/test_template.py": ["/mackerel/__init__.py", "/mackerel/renderers/__init__.py"], "/tests/renderers/test_base.py": ["/mackerel/renderers/__init__.py"], "/tests/renderers/test_helpers.py": ["/mackerel/renderers/__init__.py"], "/mackerel/navigation.py": ["/mackerel/content.py", "/mackerel/helpers.py", "/mackerel/site.py"], "/mackerel/site.py": ["/mackerel/__init__.py", "/mackerel/content.py", "/mackerel/helpers.py"], "/mackerel/__init__.py": ["/mackerel/__version__.py"], "/tests/test_content.py": ["/mackerel/__init__.py"], "/mackerel/renderers/template.py": ["/mackerel/__init__.py", "/mackerel/renderers/base.py", "/mackerel/site.py"], "/mackerel/renderers/document.py": ["/mackerel/renderers/base.py", "/mackerel/site.py"], "/tests/test_helpers.py": ["/mackerel/__init__.py"], "/tests/test_site.py": ["/mackerel/renderers/document.py", "/mackerel/renderers/template.py", "/mackerel/site.py"]}
|
33,307
|
pkolios/mackerel
|
refs/heads/master
|
/tests/test_helpers.py
|
from unittest import mock
from mackerel import helpers
def test_cached_property():
class TestClass:
counter = 0
@helpers.cached_property
def some_property(self):
self.counter += 1
return self.counter
test_object = TestClass()
assert test_object.counter == 0
assert test_object.some_property == 1
assert test_object.some_property == 1
def test_make_config():
with mock.patch('configparser.ConfigParser.read') as cfg_read, \
mock.patch('configparser.ConfigParser.read_file') as cfg_read_file:
helpers.make_config(site_path='/random/path/')
assert cfg_read_file.called
cfg_read.assert_called_once_with('/random/path/.mackerelconfig')
|
{"/tests/test_config_ini.py": ["/mackerel/__init__.py"], "/mackerel/build.py": ["/mackerel/__init__.py", "/mackerel/navigation.py", "/mackerel/site.py", "/mackerel/helpers.py"], "/mackerel/cli.py": ["/mackerel/__init__.py"], "/tests/renderers/test_document.py": ["/mackerel/__init__.py"], "/tests/test_build.py": ["/mackerel/__init__.py"], "/mackerel/renderers/base.py": ["/mackerel/__init__.py", "/mackerel/site.py"], "/mackerel/content.py": ["/mackerel/__init__.py", "/mackerel/renderers/helpers.py", "/mackerel/renderers/base.py"], "/tests/conftest.py": ["/mackerel/__init__.py"], "/tests/test_cli.py": ["/mackerel/__init__.py"], "/tests/test_navigation.py": ["/mackerel/navigation.py"], "/tests/renderers/test_template.py": ["/mackerel/__init__.py", "/mackerel/renderers/__init__.py"], "/tests/renderers/test_base.py": ["/mackerel/renderers/__init__.py"], "/tests/renderers/test_helpers.py": ["/mackerel/renderers/__init__.py"], "/mackerel/navigation.py": ["/mackerel/content.py", "/mackerel/helpers.py", "/mackerel/site.py"], "/mackerel/site.py": ["/mackerel/__init__.py", "/mackerel/content.py", "/mackerel/helpers.py"], "/mackerel/__init__.py": ["/mackerel/__version__.py"], "/tests/test_content.py": ["/mackerel/__init__.py"], "/mackerel/renderers/template.py": ["/mackerel/__init__.py", "/mackerel/renderers/base.py", "/mackerel/site.py"], "/mackerel/renderers/document.py": ["/mackerel/renderers/base.py", "/mackerel/site.py"], "/tests/test_helpers.py": ["/mackerel/__init__.py"], "/tests/test_site.py": ["/mackerel/renderers/document.py", "/mackerel/renderers/template.py", "/mackerel/site.py"]}
|
33,308
|
pkolios/mackerel
|
refs/heads/master
|
/tests/test_site.py
|
from pathlib import Path
from mackerel.renderers.document import MarkdownMarkdownRenderer
from mackerel.renderers.template import Jinja2Renderer
from mackerel.site import Site
def test_site_init(site_path):
site = Site(site_path)
assert site.config['mackerel']
assert site.content_path == site_path / Path('content')
assert site.output_path == site_path / Path('_build')
assert site.template_path == site_path / Path('template')
assert len(site.document_files) == 3
assert len(site.other_content_files) == 1
assert len(site.other_template_files) == 1
assert isinstance(site.document_renderer, MarkdownMarkdownRenderer)
assert isinstance(site.template_renderer, Jinja2Renderer)
assert len(site.documents) == 2
|
{"/tests/test_config_ini.py": ["/mackerel/__init__.py"], "/mackerel/build.py": ["/mackerel/__init__.py", "/mackerel/navigation.py", "/mackerel/site.py", "/mackerel/helpers.py"], "/mackerel/cli.py": ["/mackerel/__init__.py"], "/tests/renderers/test_document.py": ["/mackerel/__init__.py"], "/tests/test_build.py": ["/mackerel/__init__.py"], "/mackerel/renderers/base.py": ["/mackerel/__init__.py", "/mackerel/site.py"], "/mackerel/content.py": ["/mackerel/__init__.py", "/mackerel/renderers/helpers.py", "/mackerel/renderers/base.py"], "/tests/conftest.py": ["/mackerel/__init__.py"], "/tests/test_cli.py": ["/mackerel/__init__.py"], "/tests/test_navigation.py": ["/mackerel/navigation.py"], "/tests/renderers/test_template.py": ["/mackerel/__init__.py", "/mackerel/renderers/__init__.py"], "/tests/renderers/test_base.py": ["/mackerel/renderers/__init__.py"], "/tests/renderers/test_helpers.py": ["/mackerel/renderers/__init__.py"], "/mackerel/navigation.py": ["/mackerel/content.py", "/mackerel/helpers.py", "/mackerel/site.py"], "/mackerel/site.py": ["/mackerel/__init__.py", "/mackerel/content.py", "/mackerel/helpers.py"], "/mackerel/__init__.py": ["/mackerel/__version__.py"], "/tests/test_content.py": ["/mackerel/__init__.py"], "/mackerel/renderers/template.py": ["/mackerel/__init__.py", "/mackerel/renderers/base.py", "/mackerel/site.py"], "/mackerel/renderers/document.py": ["/mackerel/renderers/base.py", "/mackerel/site.py"], "/tests/test_helpers.py": ["/mackerel/__init__.py"], "/tests/test_site.py": ["/mackerel/renderers/document.py", "/mackerel/renderers/template.py", "/mackerel/site.py"]}
|
33,309
|
pkolios/mackerel
|
refs/heads/master
|
/mackerel/__version__.py
|
__version__ = '0.2'
__title__ = 'Mackerel'
__description__ = 'Minimal static site generator built with type annotations'
__author__ = 'Paris Kolios'
__author_email__ = 'paris@enc.io'
__url__ = 'http://mackerel.sh'
__license__ = 'MIT'
__copyright__ = 'Copyright 2017 Paris Kolios'
|
{"/tests/test_config_ini.py": ["/mackerel/__init__.py"], "/mackerel/build.py": ["/mackerel/__init__.py", "/mackerel/navigation.py", "/mackerel/site.py", "/mackerel/helpers.py"], "/mackerel/cli.py": ["/mackerel/__init__.py"], "/tests/renderers/test_document.py": ["/mackerel/__init__.py"], "/tests/test_build.py": ["/mackerel/__init__.py"], "/mackerel/renderers/base.py": ["/mackerel/__init__.py", "/mackerel/site.py"], "/mackerel/content.py": ["/mackerel/__init__.py", "/mackerel/renderers/helpers.py", "/mackerel/renderers/base.py"], "/tests/conftest.py": ["/mackerel/__init__.py"], "/tests/test_cli.py": ["/mackerel/__init__.py"], "/tests/test_navigation.py": ["/mackerel/navigation.py"], "/tests/renderers/test_template.py": ["/mackerel/__init__.py", "/mackerel/renderers/__init__.py"], "/tests/renderers/test_base.py": ["/mackerel/renderers/__init__.py"], "/tests/renderers/test_helpers.py": ["/mackerel/renderers/__init__.py"], "/mackerel/navigation.py": ["/mackerel/content.py", "/mackerel/helpers.py", "/mackerel/site.py"], "/mackerel/site.py": ["/mackerel/__init__.py", "/mackerel/content.py", "/mackerel/helpers.py"], "/mackerel/__init__.py": ["/mackerel/__version__.py"], "/tests/test_content.py": ["/mackerel/__init__.py"], "/mackerel/renderers/template.py": ["/mackerel/__init__.py", "/mackerel/renderers/base.py", "/mackerel/site.py"], "/mackerel/renderers/document.py": ["/mackerel/renderers/base.py", "/mackerel/site.py"], "/tests/test_helpers.py": ["/mackerel/__init__.py"], "/tests/test_site.py": ["/mackerel/renderers/document.py", "/mackerel/renderers/template.py", "/mackerel/site.py"]}
|
33,333
|
jeffkit/molp
|
refs/heads/master
|
/molp/tests.py
|
#encoding=utf-8
from django.test import TestCase
from molp.models import Parameter
from datetime import datetime
from datetime import timedelta
import time
import calendar
class ParameterManagerTestCase(TestCase):
def test_app_only(self):
"""测试参数只指定app
"""
p = Parameter(app='net.jf.test', name='name', value='jeff')
p.save()
p = Parameter(app='net.jf.test', name='gender', value='male')
p.save()
ps = Parameter.objects.get_parameters('net.jf.test', version='1.0',
channel='appstroe')
self.assertEqual(2, len(ps))
def test_app_version_not_match(self):
"""定义具体版本号的参数,但请求不匹配。返回默认参数。
"""
p = Parameter(app='net.jf.test', name='name', value='jeff')
p.save()
p = Parameter(app='net.jf.test', name='name', value='vera',
version='1.1')
p.save()
ps = Parameter.objects.get_parameters('net.jf.test', version='1.0',
channel='appstroe')
self.assertEqual(1, len(ps))
self.assertEqual('jeff', ps[0].value)
def test_app_version_match(self):
"""定义具体版本号的参数,请求亦匹配
"""
p = Parameter(app='net.jf.test', name='name', value='jeff')
p.save()
p = Parameter(app='net.jf.test', name='name', value='vera',
version='1.1')
p.save()
ps = Parameter.objects.get_parameters('net.jf.test', version='1.1',
channel='appstroe')
self.assertEqual(1, len(ps))
self.assertEqual('vera', ps[0].value)
def test_match_not_complete(self):
"""部分参数匹配,但并非全匹配,视为不匹配。
"""
p = Parameter(app='net.jf.test', name='name', value='jeff')
p.save()
p = Parameter(app='net.jf.test', name='name', value='vera',
version='1.1', channel='pp')
p.save()
ps = Parameter.objects.get_parameters('net.jf.test', version='1.1',
channel='appstroe')
self.assertEqual(1, len(ps))
self.assertEqual('jeff', ps[0].value)
def test_too_early_to_see(self):
"""参数定义生效时间,在生效前参数不可见。生效后可见。
"""
p = Parameter(app='net.jf.test', name='name', value='jeff')
p.save()
p = Parameter(app='net.jf.test', name='gender', value='male',
version='1.1',
effect_time=datetime.now() + timedelta(days=1))
p.save()
ps = Parameter.objects.get_parameters('net.jf.test', version='1.1',
channel='appstore',
since=time.time())
self.assertEqual(1, len(ps))
self.assertEqual('name', ps[0].name)
ps = Parameter.objects.get_parameters(
'net.jf.test', version='1.1',
channel='appstore',
since=time.time() + 172800)
self.assertEqual(2, len(ps))
def test_return_new_parameter(self):
"""只返回增量数据
"""
p = Parameter(app='net.jf.test', name='name', value='jeff')
p.save()
mt = p.modify_time
time.sleep(1)
p = Parameter(app='net.jf.test', name='gender', value='male',
version='1.1')
p.save()
ts = calendar.timegm(mt.timetuple())
ps = Parameter.objects.get_parameters('net.jf.test', version='1.1',
channel='appstore',
last_modify=ts + 1)
self.assertEqual(1, len(ps))
self.assertEqual('gender', ps[0].name)
|
{"/molp/tests.py": ["/molp/models.py"], "/molp/admin.py": ["/molp/models.py"]}
|
33,334
|
jeffkit/molp
|
refs/heads/master
|
/molp/models.py
|
#encoding=utf-8
from django.db import models
from django.conf import settings
from datetime import datetime
import calendar
class ParameterManager(models.Manager):
def parameter_compare(self, one, other):
if getattr(one, 'factor', 0) > getattr(other, 'factor', 0):
return -1
elif getattr(one, 'factor', 0) == getattr(other, 'factor', 0):
if getattr(one, 'arg_num', 0) <= getattr(other, 'arg_num', 0):
return 1
else:
return -1
else:
return 1
def get_parameters(self, app, version=None, channel=None,
since=None, last_modify=None):
"""获得在线参数
- app,应用id
- version,应用的版本号
- channel,应用的渠道
- since,版本对应应用的安装时间, utc时间戳。
- last_modify, 上次成功更新的最新时间。utc时间戳。
"""
parameters = self.get_query_set().filter(app=app)
if last_modify:
last_modify = datetime.fromtimestamp(last_modify)
parameters = parameters.filter(modify_time__gte=last_modify)
parameters = [v for v in parameters if
v.calculate_factor(version, channel, since) >= 0]
data, mdata = {}, {}
for p in parameters:
if p.name in data:
if isinstance(data[p.name], list):
data[p.name].append(p)
else:
data[p.name] = [data[p.name], p]
mdata[p.name] = data[p.name]
else:
data[p.name] = p
if mdata:
for key, value in mdata.iteritems():
value = sorted(value, cmp=self.parameter_compare)
data[key] = value[0]
return data.values()
class Parameter(models.Model):
app = models.CharField(u'应用', max_length=100,
choices=settings.APP_DEFINITION)
version = models.CharField(u'版本', max_length=20, null=True, blank=True)
channel = models.CharField(u'渠道', max_length=20, null=True, blank=True)
name = models.CharField(u'参数名', max_length=255)
value = models.CharField(u'参数值', max_length=1000)
create_time = models.DateTimeField(auto_now_add=True, editable=False)
modify_time = models.DateTimeField(auto_now=True, editable=False)
effect_time = models.DateTimeField(null=True, blank=True)
objects = ParameterManager()
class Meta:
verbose_name = u'在线参数'
verbose_name_plural = u'在线参数'
def __unicode__(self):
return self.name
def calculate_factor(self, version, channel, since):
"""该参数与条件的匹配度。
如果参数不完全匹配视为不匹配。
如果参数匹配,则返回参数个数。
- 0 无多余匹配
"""
factor = 0
number = 0
if self.version:
number += 1
if version != self.version:
return -1
factor += 1
if self.channel:
number += 1
if channel != self.channel:
return -1
factor += 1
if self.effect_time:
number += 1
if not since or (
since < calendar.timegm(self.effect_time.timetuple())):
return -1
factor += 1
self.arg_num = number
self.factor = factor
return factor
|
{"/molp/tests.py": ["/molp/models.py"], "/molp/admin.py": ["/molp/models.py"]}
|
33,335
|
jeffkit/molp
|
refs/heads/master
|
/molp/admin.py
|
#encoding=utf-8
from django.contrib import admin
from molp.models import Parameter
class ParameterAdmin(admin.ModelAdmin):
list_display = ('name', 'value', 'version', 'channel', 'effect_time')
list_filter = ('app', 'channel', 'version')
search_fields = ('name', 'value')
admin.site.register(Parameter, ParameterAdmin)
|
{"/molp/tests.py": ["/molp/models.py"], "/molp/admin.py": ["/molp/models.py"]}
|
33,336
|
jeffkit/molp
|
refs/heads/master
|
/setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
from molp import VERSION
url="https://github.com/jeffkit/molp"
long_description="online parameters for app mobile"
setup(name="molp",
version=VERSION,
description=long_description,
maintainer="jeff kit",
maintainer_email="bbmyth@gmail.com",
url = url,
long_description=long_description,
packages=find_packages('.'),
)
|
{"/molp/tests.py": ["/molp/models.py"], "/molp/admin.py": ["/molp/models.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.