index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
6,000 | ff3f6d50498f58f3a340e2d690165efcc1a5fb1d | class User:
account = []
def __init__(self,balance,int_rate):
self.balance = balance
self.int_rate = int_rate
User.account.append(self)
def dep(self,amount):
self.balance += amount
return self
def make_withdrawal(self,amount):
if(self.balance-amount) >= 0:
self.balance -= amount
else:
print("Insufficient funds:Charging a $5 fee")
self.balance -= 5
return self
def display_account_info(self):
print(self.balance) #print(f"Balance:{self.balance}")
return(self)
def yield_interest(self):
# self.balance+=(self.balance*self.int_rate)#times by a decimal gets you a smaller number
self.balance=self.balance+self.balance*self.int_rate
return(self)
@classmethod
def we_call_cls(cls):
for account in cls.account:
account.display_account_info()
class Jedi:
def __init__(self,name):
self.name = name #this means that its name is its name.
self.account = {
"Grey": User(5000,.3),
"light": User(300,.33)
}
prey=Jedi('prey')
print(prey.name)
prey.we_call_cls()
|
6,001 | 72c1226d40b3cdce29ef28493344c3cf68892149 | # Generated by Django 3.0.4 on 2020-03-29 19:51
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('index', '0003_auto_20200330_0444'),
]
operations = [
migrations.AlterField(
model_name='information',
name='comment',
field=models.CharField(blank=True, max_length=200, null=True),
),
migrations.AlterField(
model_name='information',
name='picture',
field=models.ImageField(blank=True, null=True, upload_to='images/'),
),
migrations.AlterField(
model_name='myclass',
name='day',
field=models.CharField(blank=True, max_length=1, null=True),
),
migrations.AlterField(
model_name='myclass',
name='period',
field=models.CharField(blank=True, max_length=10, null=True),
),
migrations.AlterField(
model_name='myclass',
name='place',
field=models.CharField(blank=True, max_length=50, null=True),
),
]
|
6,002 | 780dc49c3eaef3fb25ca0aac760326b1c3adc633 | #!/usr/bin/env python
# Copyright (C) 2014 Open Data ("Open Data" refers to
# one or more of the following companies: Open Data Partners LLC,
# Open Data Research LLC, or Open Data Capital LLC.)
#
# This file is part of Hadrian.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
from titus.fcn import Fcn
from titus.fcn import LibFcn
from titus.signature import Sig
from titus.signature import Sigs
from titus.datatype import *
from titus.errors import *
from titus.util import callfcn, div
import titus.P as P
from functools import reduce
provides = {}
def provide(fcn):
provides[fcn.name] = fcn
prefix = "la."
def np():
import numpy
return numpy
def rowKeys(x):
return set(x.keys())
def colKeys(x):
if len(x) == 0:
return set()
else:
return reduce(lambda a, b: a.union(b), [set(xi.keys()) for xi in list(x.values())])
def arraysToMatrix(x):
return np().matrix(x, dtype=np().double)
def arrayToRowVector(x):
return np().matrix(x, dtype=np().double).T
def rowVectorToArray(x):
return x.T.tolist()[0]
def matrixToArrays(x):
return x.tolist()
def mapsToMatrix(x, rows, cols):
return np().matrix([[x.get(i, {}).get(j, 0.0) for j in cols] for i in rows], dtype=np().double)
def mapToRowVector(x, keys):
return np().matrix([x.get(k, 0.0) for k in keys], dtype=np().double).T
def rowVectorToMap(x, keys):
return dict(list(zip(keys, x.T.tolist()[0])))
def matrixToMaps(x, rows, cols):
return dict((row, dict(list(zip(cols, xi)))) for row, xi in zip(rows, x.tolist()))
def raggedArray(x):
collens = list(map(len, x))
return max(collens) != min(collens)
def raggedMap(x):
return len(set(len(xi) for xi in list(x.values()))) != 1
class MapApply(LibFcn):
name = prefix + "map"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}, {"fcn": P.Fcn([P.Double()], P.Double())}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Map(P.Double()))}, {"fcn": P.Fcn([P.Double()], P.Double())}], P.Map(P.Map(P.Double())))])
errcodeBase = 24000
def __call__(self, state, scope, pos, paramTypes, x, fcn):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):
return [[callfcn(state, scope, fcn, [xj]) for xj in xi] for xi in x]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):
return dict((i, dict((j, callfcn(state, scope, fcn, [xj])) for j, xj in list(xi.items()))) for i, xi in list(x.items()))
provide(MapApply())
class Scale(LibFcn):
name = prefix + "scale"
sig = Sigs([Sig([{"x": P.Array(P.Double())}, {"alpha": P.Double()}], P.Array(P.Double())),
Sig([{"x": P.Array(P.Array(P.Double()))}, {"alpha": P.Double()}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Double())}, {"alpha": P.Double()}], P.Map(P.Double())),
Sig([{"x": P.Map(P.Map(P.Double()))}, {"alpha": P.Double()}], P.Map(P.Map(P.Double())))])
errcodeBase = 24010
def __call__(self, state, scope, pos, paramTypes, x, alpha):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):
return [[xj * alpha for xj in xi] for xi in x]
elif isinstance(x, (list, tuple)):
return [xi * alpha for xi in x]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in x):
return dict((i, dict((j, xj * alpha) for j, xj in list(xi.items()))) for i, xi in list(x.items()))
else:
return dict((i, xi * alpha) for i, xi in list(x.items()))
provide(Scale())
class ZipMap(LibFcn):
name = prefix + "zipmap"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}, {"y": P.Array(P.Array(P.Double()))}, {"fcn": P.Fcn([P.Double(), P.Double()], P.Double())}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Map(P.Double()))}, {"y": P.Map(P.Map(P.Double()))}, {"fcn": P.Fcn([P.Double(), P.Double()], P.Double())}], P.Map(P.Map(P.Double())))])
errcodeBase = 24020
def __call__(self, state, scope, pos, paramTypes, x, y, fcn):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x) and \
isinstance(y, (list, tuple)) and all(isinstance(yi, (list, tuple)) for yi in y):
if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip(x, y)):
raise PFARuntimeException("misaligned matrices", self.errcodeBase + 0, self.name, pos)
return [[callfcn(state, scope, fcn, [xj, yj]) for xj, yj in zip(xi, yi)] for xi, yi in zip(x, y)]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())) and \
isinstance(y, dict) and all(isinstance(y[i], dict) for i in list(y.keys())):
rows = rowKeys(x).union(rowKeys(y))
cols = colKeys(x).union(colKeys(y))
return dict((i, dict((j, callfcn(state, scope, fcn, [x.get(i, {}).get(j, 0.0), y.get(i, {}).get(j, 0.0)])) for j in cols)) for i in rows)
provide(ZipMap())
class Add(LibFcn):
name = prefix + "add"
sig = Sigs([Sig([{"x": P.Array(P.Double())}, {"y": P.Array(P.Double())}], P.Array(P.Double())),
Sig([{"x": P.Array(P.Array(P.Double()))}, {"y": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Double())}, {"y": P.Map(P.Double())}], P.Map(P.Double())),
Sig([{"x": P.Map(P.Map(P.Double()))}, {"y": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])
errcodeBase = 24030
def __call__(self, state, scope, pos, paramTypes, x, y):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x) and \
isinstance(y, (list, tuple)) and all(isinstance(yi, (list, tuple)) for yi in y):
if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip(x, y)):
raise PFARuntimeException("misaligned matrices", self.errcodeBase + 0, self.name, pos)
return [[xj + yj for xj, yj in zip(xi, yi)] for xi, yi in zip(x, y)]
elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):
if len(x) != len(y):
raise PFARuntimeException("misaligned matrices", self.errcodeBase + 0, self.name, pos)
return [xi + yi for xi, yi in zip(x, y)]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())) and \
isinstance(y, dict) and all(isinstance(y[i], dict) for i in list(y.keys())):
rows = rowKeys(x).union(rowKeys(y))
cols = colKeys(x).union(colKeys(y))
return dict((i, dict((j, x.get(i, {}).get(j, 0.0) + y.get(i, {}).get(j, 0.0)) for j in cols)) for i in rows)
else:
rows = rowKeys(x).union(rowKeys(y))
return dict((i, x.get(i, 0.0) + y.get(i, 0.0)) for i in rows)
provide(Add())
class Sub(LibFcn):
name = prefix + "sub"
sig = Sigs([Sig([{"x": P.Array(P.Double())}, {"y": P.Array(P.Double())}], P.Array(P.Double())),
Sig([{"x": P.Array(P.Array(P.Double()))}, {"y": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Double())}, {"y": P.Map(P.Double())}], P.Map(P.Double())),
Sig([{"x": P.Map(P.Map(P.Double()))}, {"y": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])
errcodeBase = 24040
def __call__(self, state, scope, pos, paramTypes, x, y):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x) and \
isinstance(y, (list, tuple)) and all(isinstance(yi, (list, tuple)) for yi in y):
if len(x) != len(y) or any(len(xi) != len(yi) for xi, yi in zip(x, y)):
raise PFARuntimeException("misaligned matrices", self.errcodeBase + 0, self.name, pos)
return [[xj - yj for xj, yj in zip(xi, yi)] for xi, yi in zip(x, y)]
elif isinstance(x, (list, tuple)) and isinstance(y, (list, tuple)):
if len(x) != len(y):
raise PFARuntimeException("misaligned matrices", self.errcodeBase + 0, self.name, pos)
return [xi - yi for xi, yi in zip(x, y)]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())) and \
isinstance(y, dict) and all(isinstance(y[i], dict) for i in list(y.keys())):
rows = rowKeys(x).union(rowKeys(y))
cols = colKeys(x).union(colKeys(y))
return dict((i, dict((j, x.get(i, {}).get(j, 0.0) - y.get(i, {}).get(j, 0.0)) for j in cols)) for i in rows)
else:
rows = rowKeys(x).union(rowKeys(y))
return dict((i, x.get(i, 0.0) - y.get(i, 0.0)) for i in rows)
provide(Sub())
class Dot(LibFcn):
name = prefix + "dot"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}, {"y": P.Array(P.Double())}], P.Array(P.Double())),
Sig([{"x": P.Map(P.Map(P.Double()))}, {"y": P.Map(P.Double())}], P.Map(P.Double())),
Sig([{"x": P.Array(P.Array(P.Double()))}, {"y": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Map(P.Double()))}, {"y": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])
errcodeBase = 24050
def __call__(self, state, scope, pos, paramTypes, x, y):
if paramTypes[1]["type"] == "array":
if isinstance(paramTypes[1]["items"], dict) and paramTypes[1]["items"]["type"] == "array":
# array matrix-matrix case
bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for row in x) or \
any(any(math.isnan(z) or math.isinf(z) for z in row) for row in y)
xmat = arraysToMatrix(x)
ymat = arraysToMatrix(y)
if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0] == 0 or ymat.shape[1] == 0:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 1, self.name, pos)
try:
if bad: raise PFARuntimeException("contains non-finite value", self.errcodeBase + 2, self.name, pos)
return matrixToArrays(np().dot(xmat, ymat))
except ValueError:
raise PFARuntimeException("misaligned matrices", self.errcodeBase + 0, self.name, pos)
else:
# array matrix-vector case
bad = any(any(math.isnan(z) or math.isinf(z) for z in row) for row in x) or \
any(math.isnan(z) or math.isinf(z) for z in y)
xmat = arraysToMatrix(x)
ymat = arrayToRowVector(y)
if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0] == 0 or ymat.shape[1] == 0:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 1, self.name, pos)
try:
if bad: raise PFARuntimeException("contains non-finite value", self.errcodeBase + 2, self.name, pos)
return rowVectorToArray(np().dot(xmat, ymat))
except ValueError:
raise PFARuntimeException("misaligned matrices", self.errcodeBase + 0, self.name, pos)
elif paramTypes[1]["type"] == "map":
if isinstance(paramTypes[1]["values"], dict) and paramTypes[1]["values"]["type"] == "map":
# map matrix-matrix case
bad = any(any(math.isnan(z) or math.isinf(z) for z in list(row.values())) for row in list(x.values())) or \
any(any(math.isnan(z) or math.isinf(z) for z in list(row.values())) for row in list(y.values()))
rows = list(rowKeys(x))
inter = list(colKeys(x).union(rowKeys(y)))
cols = list(colKeys(y))
xmat = mapsToMatrix(x, rows, inter)
ymat = mapsToMatrix(y, inter, cols)
if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0] == 0 or ymat.shape[1] == 0:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 1, self.name, pos)
if bad: raise PFARuntimeException("contains non-finite value", self.errcodeBase + 2, self.name, pos)
return matrixToMaps(np().dot(xmat, ymat), rows, cols)
else:
# map matrix-vector case
bad = any(any(math.isnan(z) or math.isinf(z) for z in list(row.values())) for row in list(x.values())) or \
any(math.isnan(z) or math.isinf(z) for z in list(y.values()))
rows = list(rowKeys(x))
cols = list(colKeys(x).union(rowKeys(y)))
xmat = mapsToMatrix(x, rows, cols)
ymat = mapToRowVector(y, cols)
if xmat.shape[0] == 0 or xmat.shape[1] == 0 or ymat.shape[0] == 0 or ymat.shape[1] == 0:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 1, self.name, pos)
if bad: raise PFARuntimeException("contains non-finite value", self.errcodeBase + 2, self.name, pos)
return rowVectorToMap(np().dot(xmat, ymat), rows)
provide(Dot())
class Transpose(LibFcn):
name = prefix + "transpose"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])
errcodeBase = 24060
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException("ragged columns", self.errcodeBase + 1, self.name, pos)
return [[x[r][c] for r in range(rows)] for c in range(cols)]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):
rows = rowKeys(x)
cols = colKeys(x)
if len(rows) < 1 or len(cols) < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
if raggedMap(x):
raise PFARuntimeException("ragged columns", self.errcodeBase + 1, self.name, pos)
return dict((c, dict((r, x[r][c]) for r in rows)) for c in cols)
provide(Transpose())
class Inverse(LibFcn):
name = prefix + "inverse"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])
errcodeBase = 24070
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException("ragged columns", self.errcodeBase + 1, self.name, pos)
return matrixToArrays(arraysToMatrix(x).I)
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):
rows = list(rowKeys(x))
cols = list(colKeys(x))
if len(rows) < 1 or len(cols) < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
xmat = mapsToMatrix(x, rows, cols)
return matrixToMaps(xmat.I, cols, rows)
provide(Inverse())
class Trace(LibFcn):
name = prefix + "trace"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}], P.Double()),
Sig([{"x": P.Map(P.Map(P.Double()))}], P.Double())])
errcodeBase = 24080
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):
rows = len(x)
if rows == 0:
return 0.0
else:
cols = len(x[0])
if raggedArray(x):
raise PFARuntimeException("ragged columns", self.errcodeBase + 0, self.name, pos)
return sum(x[i][i] for i in range(min(rows, cols)))
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):
keys = rowKeys(x).intersection(colKeys(x))
return sum(x[i][i] for i in keys)
provide(Trace())
class Det(LibFcn):
name = prefix + "det"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}], P.Double()),
Sig([{"x": P.Map(P.Map(P.Double()))}], P.Double())])
errcodeBase = 24090
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException("ragged columns", self.errcodeBase + 1, self.name, pos)
if rows != cols:
raise PFARuntimeException("non-square matrix", self.errcodeBase + 2, self.name, pos)
if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in x):
return float("nan")
else:
return float(np().linalg.det(arraysToMatrix(x)))
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):
keys = list(rowKeys(x).union(colKeys(x)))
if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
if any(any(math.isnan(z) or math.isinf(z) for z in list(row.values())) for row in list(x.values())):
return float("nan")
else:
return float(np().linalg.det(mapsToMatrix(x, keys, keys)))
provide(Det())
class Symmetric(LibFcn):
name = prefix + "symmetric"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}, {"tol": P.Double()}], P.Boolean()),
Sig([{"x": P.Map(P.Map(P.Double()))}, {"tol": P.Double()}], P.Boolean())])
errcodeBase = 24100
@staticmethod
def same(x, y, tol):
if math.isinf(x) and math.isinf(y) and ((x > 0.0 and y > 0.0) or (x < 0.0 and y < 0.0)):
return True
elif math.isnan(x) and math.isnan(y):
return True
elif not math.isinf(x) and not math.isnan(x) and not math.isinf(y) and not math.isnan(y):
return abs(x - y) < tol
else:
return False
def __call__(self, state, scope, pos, paramTypes, x, tol):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException("ragged columns", self.errcodeBase + 1, self.name, pos)
if rows != cols:
raise PFARuntimeException("non-square matrix", self.errcodeBase + 2, self.name, pos)
return all(all(self.same(x[i][j], x[j][i], tol) for j in range(cols)) for i in range(rows))
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):
keys = list(rowKeys(x).union(colKeys(x)))
if len(keys) < 1 or all(len(row) == 0 for row in list(x.values())):
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
return all(all(self.same(x.get(i, {}).get(j, 0.0), x.get(j, {}).get(i, 0.0), tol) for j in keys) for i in keys)
provide(Symmetric())
class EigenBasis(LibFcn):
name = prefix + "eigenBasis"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Map(P.Double()))}], P.Map(P.Map(P.Double())))])
errcodeBase = 24110
def calculate(self, x, size):
symm = (x + x.T) * 0.5
evals, evects = np().linalg.eig(symm)
evects = np().array(evects)
evects2 = [evects[:,i] * (-1.0 if evects[0,i] < 0.0 else 1.0) for i in range(size)]
eigvalm2 = [div(1.0, math.sqrt(abs(ei))) for ei in evals]
order = np().argsort(eigvalm2)
out = np().empty((size, size), dtype=np().double)
for i in range(size):
for j in range(size):
out[i,j] = evects2[order[i]][j] * eigvalm2[order[i]]
return out
def __call__(self, state, scope, pos, paramTypes, x):
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException("ragged columns", self.errcodeBase + 1, self.name, pos)
if rows != cols:
raise PFARuntimeException("non-square matrix", self.errcodeBase + 2, self.name, pos)
if any(any(math.isnan(z) or math.isinf(z) for z in row) for row in x):
raise PFARuntimeException("non-finite matrix", self.errcodeBase + 3, self.name, pos)
return matrixToArrays(self.calculate(arraysToMatrix(x), rows))
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):
keys = list(rowKeys(x).union(colKeys(x)))
if len(keys) < 1 or all(len(z) == 0 for z in list(x.values())):
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
if any(any(math.isnan(z) or math.isinf(z) for z in list(row.values())) for row in list(x.values())):
raise PFARuntimeException("non-finite matrix", self.errcodeBase + 3, self.name, pos)
return matrixToMaps(self.calculate(mapsToMatrix(x, keys, keys), len(keys)), list(map(str, range(len(keys)))), keys)
provide(EigenBasis())
class Truncate(LibFcn):
name = prefix + "truncate"
sig = Sigs([Sig([{"x": P.Array(P.Array(P.Double()))}, {"keep": P.Int()}], P.Array(P.Array(P.Double()))),
Sig([{"x": P.Map(P.Map(P.Double()))}, {"keep": P.Array(P.String())}], P.Map(P.Map(P.Double())))])
errcodeBase = 24120
def __call__(self, state, scope, pos, paramTypes, x, keep):
if isinstance(keep, int) and keep < 0:
keep = 0
if isinstance(x, (list, tuple)) and all(isinstance(xi, (list, tuple)) for xi in x):
rows = len(x)
if rows < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
cols = len(x[0])
if cols < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
if raggedArray(x):
raise PFARuntimeException("ragged columns", self.errcodeBase + 1, self.name, pos)
return x[:keep]
elif isinstance(x, dict) and all(isinstance(x[i], dict) for i in list(x.keys())):
rows = rowKeys(x)
cols = colKeys(x)
if len(rows) < 1 or len(cols) < 1:
raise PFARuntimeException("too few rows/cols", self.errcodeBase + 0, self.name, pos)
return dict((k, x[k]) for k in rows if k in keep)
provide(Truncate())
|
6,003 | 699410536c9a195024c5abbcccc88c17e8e095e3 | ############################################################
# Hierarchical Reinforcement Learning for Relation Extraction
# Multiprocessing with CUDA
# Require: PyTorch 0.3.0
# Author: Tianyang Zhang, Ryuichi Takanobu
# E-mail: keavilzhangzty@gmail.com, truthless11@gmail.com
############################################################
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.autograd as autograd
class TopModel(nn.Module):
def __init__(self, dim, statedim, rel_count):
super(TopModel, self).__init__()
self.dim = dim
self.hid2state = nn.Linear(dim*3 + statedim, statedim)
self.state2prob = nn.Linear(statedim, rel_count+1)
def forward(self, top_word_vec, rel_vec, memory, training):
inp = torch.cat([top_word_vec, rel_vec, memory])
outp = F.dropout(F.tanh(self.hid2state(inp)), training=training)
prob = F.softmax(self.state2prob(outp), dim=0)
return outp, prob
class BotModel(nn.Module):
def __init__(self, dim, statedim, rel_count):
super(BotModel, self).__init__()
self.dim = dim
self.hid2state = nn.Linear(dim*3 + statedim*2, statedim)
self.state2probL = nn.ModuleList([nn.Linear(statedim, 7) for i in range(0, rel_count)])
def forward(self, ent_vec, bot_word_vec, memory, rel, target, training):
inp = torch.cat([bot_word_vec, ent_vec, memory, target])
outp = F.dropout(F.tanh(self.hid2state(inp)), training=training)
prob = F.softmax(self.state2probL[rel-1](outp), dim=0)
return outp, prob
class Model(nn.Module):
def __init__(self, lr, dim, statedim, wv, rel_count):
super(Model, self).__init__()
self.dim = dim
self.statedim = statedim
self.rel_count = rel_count
self.topModel = TopModel(dim, statedim, rel_count)
self.botModel = BotModel(dim, statedim, rel_count)
wvTensor = torch.FloatTensor(wv)
self.wordvector = nn.Embedding(wvTensor.size(0), wvTensor.size(1))
self.wordvector.weight = nn.Parameter(wvTensor)
self.relationvector = nn.Embedding(rel_count+1, dim)
self.entitytypevector = nn.Embedding(7, dim)
self.preLSTML = nn.LSTMCell(dim, dim)
self.preLSTMR = nn.LSTMCell(dim, dim)
self.top2target = nn.Linear(statedim, statedim)
self.top2bot = nn.Linear(statedim, statedim)
self.bot2top = nn.Linear(statedim, statedim)
def sample(self, prob, training, preoptions, position):
if not training:
return torch.max(prob, 0)[1]
elif preoptions is not None:
return autograd.Variable(torch.cuda.LongTensor(1, ).fill_(preoptions[position]))
else:
return torch.multinomial(prob, 1)
def forward(self, mode, text, preoptions=None, preactions=None):
textin = torch.cuda.LongTensor(text)
wvs = self.wordvector(autograd.Variable(textin))
top_action, top_actprob = [], []
bot_action, bot_actprob = [], []
training = True if "test" not in mode else False
#-----------------------------------------------------------------
# Prepare
prehid = autograd.Variable(torch.cuda.FloatTensor(self.dim, ).fill_(0))
prec = autograd.Variable(torch.cuda.FloatTensor(self.dim, ).fill_(0))
front, back = [0 for i in range(len(text))], [0 for i in range(len(text))]
for x in range(len(text)):
prehid, prec = self.preLSTML(wvs[x], (prehid, prec))
front[x] = prehid
prehid = autograd.Variable(torch.cuda.FloatTensor(self.dim, ).fill_(0))
prec = autograd.Variable(torch.cuda.FloatTensor(self.dim, ).fill_(0))
for x in range(len(text))[::-1]:
prehid, prec = self.preLSTMR(wvs[x], (prehid, prec))
back[x] = prehid
wordin = []
for x in range(len(text)):
wordin.append(torch.cat([front[x], back[x]]))
#------------------------------------------------------------------
# First Layer
mem = autograd.Variable(torch.cuda.FloatTensor(self.statedim, ).fill_(0))
action = autograd.Variable(torch.cuda.LongTensor(1, ).fill_(0))
rel_action = autograd.Variable(torch.cuda.LongTensor(1, ).fill_(0))
for x in range(len(text)):
mem, prob = self.topModel(wordin[x],\
self.relationvector(rel_action)[0], mem, training)
action = self.sample(prob, training, preoptions, x)
if action.data[0] != 0:
rel_action = action
actprob = prob[action]
top_action.append(action.cpu().data[0])
if not training:
top_actprob.append(actprob.cpu().data[0])
else:
top_actprob.append(actprob)
#----------------------------------------------------------------
# Second Layer
if "NER" in mode and action.data[0] > 0:
rel = action.data[0]
target = self.top2target(mem)
actionb = autograd.Variable(torch.cuda.LongTensor(1, ).fill_(0))
actions, actprobs = [], []
mem = self.top2bot(mem)
for y in range(len(text)):
mem, probb = self.botModel(\
self.entitytypevector(actionb)[0], wordin[y], \
mem, rel, target, training)
actionb = self.sample(probb, training, preactions[x] if preactions is not None else None, y)
actprobb = probb[actionb]
actions.append(actionb.cpu().data[0])
if not training:
actprobs.append(actprobb.cpu().data[0])
else:
actprobs.append(actprobb)
mem = self.bot2top(mem)
bot_action.append(actions)
bot_actprob.append(actprobs)
return top_action, top_actprob, bot_action, bot_actprob
|
6,004 | a89724be31b4ccc1a3d83305509d9624da364a0c | import sys
def solution(input):
k = 1
for v in sorted(input):
if v >= k:
k += 1
return k - 1
testcase = sys.stdin.readline()
for i in range(int(testcase)):
sys.stdin.readline()
line1 = sys.stdin.readline().rstrip('\n')
line2 = sys.stdin.readline().rstrip('\n')
ans = solution(
[ int(x) for x in line1.split(' ') ],
[ int(x) for x in line2.split(' ') ],
)
print("Case #{}: {}".format(i+1, ans))
|
6,005 | 21c8078a18ee4579fa9b4b1b667d6ea0c1ce99b3 | # Generated by Django 2.1.3 on 2019-04-10 11:04
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog', '0014_auto_20190409_1917'),
]
operations = [
migrations.AlterField(
model_name='article',
name='estArchive',
field=models.BooleanField(default=False, verbose_name="Archiver l'article"),
),
migrations.AlterField(
model_name='projet',
name='estArchive',
field=models.BooleanField(default=False, verbose_name='Archiver le projet'),
),
]
|
6,006 | e08ab06be0957e5e173df798742abc493eac84d0 | import time
import numpy as np
import matplotlib.pyplot as plt
import cv2
import matplotlib.image as mpimg
import random
import skimage
import scipy
from PIL import Image
def readimg(dirs, imgname):
img = cv2.imread(dirs + imgname)
img = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
return img
def readimg_color(dirs, imgname):
img = cv2.imread(dirs + imgname)
img = cv2.normalize(img.astype('float'), None, 0.0, 1.0, cv2.NORM_MINMAX)
return img
def sift_descriptor(img):
sift = cv2.xfeatures2d.SIFT_create()
kp, dsp = sift.detectAndCompute(img, None)
return kp, dsp
def show_sift(kp, img):
# show the img with descriptors
copyimg = img.copy()
copyimg = cv2.drawKeypoints(img, kp, copyimg)
plt.imshow(copyimg)
plt.show()
def calculate_distance(kp1, kp2, dsp1, dsp2, num_threshold):
# fast computation of Euclidean distance between each descriptors
dist = scipy.spatial.distance.cdist(dsp1, dsp2, 'sqeuclidean')
# find the matching coordinates
idx1 = np.where(dist < num_threshold)[0]
idx2 = np.where(dist < num_threshold)[1]
coord1 = np.array([kp1[idx].pt for idx in idx1])
coord2 = np.array([kp2[idx].pt for idx in idx2])
# put into pairs of coords
match_coords = np.concatenate((coord1, coord2), axis=1)
return match_coords
def get_errors(matches, H):
# difference between original img2 points and transformed img1 points with H
num_pairs = len(matches)
# all matching points in img1
p1 = np.concatenate((matches[:, 0:2], np.ones((1, num_pairs)).T), axis=1)
# all matching points in img2
p2 = matches[:, 2:4]
# Transform every point in p1 to estimate p2.
transformed_p1 = np.zeros((num_pairs, 2))
for i in range(num_pairs):
transformed_p1[i] = (np.matmul(H, p1[i]) / np.matmul(H, p1[i])[-1])[0:2]
# Compute error of each matching pair
errors = np.linalg.norm(p2 - transformed_p1, axis=1) ** 2
return errors
def compute_H(subset):
# calculate the fitted homography
A = []
for i in range(subset.shape[0]):
p1 = np.append(subset[i][0:2], 1)
p2 = np.append(subset[i][2:4], 1)
row1 = [0, 0, 0, p1[0], p1[1], p1[2], -p2[1]*p1[0], -p2[1]*p1[1], -p2[1]*p1[2]]
row2 = [p1[0], p1[1], p1[2], 0, 0, 0, -p2[0]*p1[0], -p2[0]*p1[1], -p2[0]*p1[2]]
A.append(row1)
A.append(row2)
A = np.array(A)
U, s, V = np.linalg.svd(A)
H = V[len(V)-1].reshape(3, 3)
# normalize
H = H / H[2, 2]
return H
def show_inlier_matches(img1, img2, inliers):
print("num of inliers shown in the matching: " + str(len(inliers)))
h1, w1 = img1.shape
h2, w2 = img2.shape
vis = np.zeros((max(h1, h2), w1 + w2), np.uint8)
vis[:, :w1] = img1
vis[:h2, w1:] = img2
fig, ax = plt.subplots()
ax.imshow(vis)
ax.plot([inliers[:,0], inliers[:,2] + w1],[inliers[:,1], inliers[:,3]])
plt.show()
def ransac(img1, img2, matches, thres_ransac):
itertimes = 1000
inliners = 0
max_inliners = 0
for iter in range(0, itertimes):
subset_idx = random.sample(range(matches.shape[0]), k=4)
subset = matches[subset_idx]
H = compute_H(subset)
# check if it is full rank
if np.linalg.matrix_rank(H) < 3:
continue
# the norm of error caused if we choose the above subset
errors = get_errors(matches, H)
idx = np.where(errors < thres_ransac)[0]
inlinerspts = matches[idx]
# find the best number of inliners
inliners = len(inlinerspts)
if inliners >= max_inliners:
which_inliners = inlinerspts.copy()
max_inliners = inliners
best_H = H.copy()
avg_residual = sum(get_errors(matches[idx], H)) / inliners
print("num of inliners: " + str(max_inliners) + " average residual: " + str(avg_residual))
show_inlier_matches(img1, img2, which_inliners)
return best_H
# function provided by Maghav at Piazza @450
def warp_images(image0, image1, H):
transform = skimage.transform.ProjectiveTransform(H)
warp = skimage.transform.warp
r, c = image1.shape[:2]
# Note that transformations take coordinates in (x, y) format,
# not (row, column), in order to be consistent with most literature
corners = np.array([[0, 0],
[0, r],
[c, 0],
[c, r]])
# Warp the image corners to their new positions
warped_corners = transform(corners)
# Find the extents of both the reference image and the warped
# target image
all_corners = np.vstack((warped_corners, corners))
corner_min = np.min(all_corners, axis=0)
corner_max = np.max(all_corners, axis=0)
output_shape = (corner_max - corner_min)
output_shape = np.ceil(output_shape[::-1])
offset = skimage.transform.SimilarityTransform(translation=-corner_min)
image0_ = warp(image0, offset.inverse, output_shape=output_shape, cval=-1)
image1_ = warp(image1, (transform + offset).inverse, output_shape=output_shape, cval=-1)
image0_zeros = warp(image0, offset.inverse, output_shape=output_shape, cval=0)
image1_zeros = warp(image1, (transform + offset).inverse, output_shape=output_shape, cval=0)
overlap = (image0_ != -1.0 ).astype(int) + (image1_ != -1.0).astype(int)
overlap += (overlap < 1).astype(int)
merged = (image0_zeros+image1_zeros)/overlap
im = Image.fromarray((255*merged).astype('uint8'), mode='RGB')
im = np.asarray(im)
return im
def main(leftimg, rightimg, leftimgcolor, rightimgcolor):
# using 7000, 0.5 for 2 pic; 9000, 1.0 for 3 pic
thres = 9000
thres_ransac = 1.0
kp1, dsp1 = sift_descriptor(leftimg)
kp2, dsp2 = sift_descriptor(rightimg)
# get all matching points
matches = calculate_distance(kp1, kp2, dsp1, dsp2, thres)
H_matrix = ransac(leftimg, rightimg, matches, thres_ransac)
stitched_img = warp_images(rightimgcolor, leftimgcolor, H_matrix)
return stitched_img
def main_2pic():
dirs = 'MP3_part1_data/' + 'park/'
leftimg = readimg(dirs, 'left.jpg')
rightimg = readimg(dirs, 'right.jpg')
leftimgcolor = readimg_color(dirs, 'left.jpg')
rightimgcolor = readimg_color(dirs, 'right.jpg')
stitched_img = main(leftimg, rightimg, leftimgcolor, rightimgcolor)
plt.imshow(stitched_img)
plt.show()
def main_3pic():
dirs = 'MP3_part1_data/' + 'pier/' # ledge pier hill
leftimg = readimg(dirs, '1.jpg')
midimg = readimg(dirs, '2.jpg')
rightimg = readimg(dirs, '3.jpg')
leftimgcolor = readimg_color(dirs, '1.jpg')
midimgcolor = readimg_color(dirs, '2.jpg')
rightimgcolor = readimg_color(dirs, '3.jpg')
stitched1 = main(leftimg, midimg, leftimgcolor, midimgcolor)
plt.imshow(stitched1)
plt.show()
grey_stitch1 = cv2.cvtColor(stitched1, cv2.COLOR_RGB2GRAY)
stitched2 = main(grey_stitch1, rightimg, stitched1, rightimgcolor)
plt.imshow(stitched2)
plt.show()
if __name__ == '__main__':
#main_2pic()
main_3pic()
|
6,007 | c77db71844c65eb96946ac0cc384de43ad49ca99 | import math
def math_builtins():
assert abs(-123) == 123
assert abs(-123.456) == 123.456
assert abs(2+3j) == math.sqrt(2**2 + 3**2)
assert divmod(5, 2) == (2, 1)
assert max(1, 2, 3, 4) == 4
assert min(1, 2, 3, 4) == 1
a = 2
b = 3
c = 7
assert pow(a, b) == a ** b
assert pow(a, b, c) == a ** b % c
assert round(123.05) == 123
assert round(123.65) == 124
assert round(-123.05) == -123
assert round(-123.65) == -124
assert round(123.65, 1) == 123.7
assert round(-123.65, 1) == -123.7
lst = [1, 2, 3]
assert sum(lst) == 6
def math_module_constants():
assert math.pi == 3.141592653589793
assert math.tau == 6.283185307179586
assert math.e == 2.718281828459045
x = float('NaN')
assert math.isnan(x)
x = float('inf')
assert math.isinf(x)
x = math.inf
assert math.isinf(x)
x = -math.inf
assert math.isinf(x)
def math_module():
x = -1.23
assert math.fabs(x) == 1.23
if __name__ == "__main__":
math_builtins()
math_module_constants()
math_module()
|
6,008 | f69544a9123f1738cd7d21c1b4fc02dd73fb9d1b | '''Module main'''
import argparse
import api
import quoridor
import quoridorx
def analyser_commande():
'''Analyseur de ligne de commande.'''
parser = argparse.ArgumentParser(description='Jeu Quoridor - phase 3')
parser.add_argument("idul", help="IDUL du joueur.")
parser.add_argument("-l", '--lister', action='store_true',
help="Lister les identifiants de vos 20 dernières parties.")
# -a
parser.add_argument("-a", '--automatique', action='store_true',
help="Activer le mode automatique.")
# -x
parser.add_argument("-x", '--graphique', action='store_true',
help="Activer le mode graphique.")
return parser.parse_args()
if __name__ == "__main__":
COMMANDE = analyser_commande()
if COMMANDE.lister:
print(api.lister_parties(COMMANDE.idul))
# Mode automatique avec graphique (commande : python main.py -ax idul)
elif COMMANDE.automatique and COMMANDE.graphique:
DEBUTER = api.débuter_partie(COMMANDE.idul)
JEU = quoridorx.QuoridorX(DEBUTER[1]['joueurs'], DEBUTER[1]['murs'])
ID_PARTIE = DEBUTER[0]
JEU.afficher()
GAGNANT = True
while GAGNANT:
try:
COUP = JEU.jouer_coup(1)
JOUER = api.jouer_coup(ID_PARTIE, COUP[0], COUP[1])
JEU.liste_joueurs = JOUER['joueurs']
JEU.liste_murs = JOUER['murs']
JEU.afficher()
except StopIteration as err:
GAGNANT = False
print(f'Le gagnant est: {err}')
except RuntimeError as err:
print(err)
# Mode automatique (commande : python main.py -a idul)
elif COMMANDE.automatique:
DEBUTER = api.débuter_partie(COMMANDE.idul)
JEU = quoridor.Quoridor(DEBUTER[1]['joueurs'], DEBUTER[1]['murs'])
ID_PARTIE = DEBUTER[0]
print(JEU)
GAGNANT = True
while GAGNANT:
try:
COUP = JEU.jouer_coup(1)
JOUER = api.jouer_coup(ID_PARTIE, COUP[0], COUP[1])
JEU.liste_joueurs = JOUER['joueurs']
JEU.liste_murs = JOUER['murs']
print(JEU)
except StopIteration as err:
GAGNANT = False
print(f'Le gagnant est: {err}')
except RuntimeError as err:
print(err)
# Mode manuel avec graphique (commande : python main.py -x idul)
elif COMMANDE.graphique:
DEBUTER = api.débuter_partie(COMMANDE.idul)
JEU = quoridorx.QuoridorX(DEBUTER[1]['joueurs'], DEBUTER[1]['murs'])
ID_PARTIE = DEBUTER[0]
JEU.afficher()
GAGNANT = True
while GAGNANT:
OK_CHOIX = True
while OK_CHOIX:
CHOIX_COUP = input('Choisir votre coup("D","MH", "MV"): ')
POS = input('Entrer les coordonnées (x,y): ')
try:
JOUER = api.jouer_coup(ID_PARTIE, CHOIX_COUP, POS)
OK_CHOIX = False
JEU.liste_joueurs = JOUER['joueurs']
JEU.liste_murs = JOUER['murs']
JEU.afficher()
except StopIteration as err:
OK_CHOIX = False
GAGNANT = False
print(f'Le gagnant est: {err}')
except RuntimeError as err:
print(err)
# Mode manuel contre le serveur (commande : python main.py idul)
else:
DEBUTER = api.débuter_partie(COMMANDE.idul)
JEU = quoridor.Quoridor(DEBUTER[1]['joueurs'], DEBUTER[1]['murs'])
ID_PARTIE = DEBUTER[0]
print(JEU)
GAGNANT = True
while GAGNANT:
OK_CHOIX = True
while OK_CHOIX:
CHOIX_COUP = input('Choisir votre coup("D","MH", "MV"): ')
POS = input('Entrer les coordonnées (x,y): ')
try:
JOUER = api.jouer_coup(ID_PARTIE, CHOIX_COUP, POS)
OK_CHOIX = False
JEU.liste_joueurs = JOUER['joueurs']
JEU.liste_murs = JOUER['murs']
print(JEU)
except StopIteration as err:
OK_CHOIX = False
GAGNANT = False
print(f'Le gagnant est: {err}')
except RuntimeError as err:
print(err)
|
6,009 | ef3fa538828315845de5e2f7d4949f690e44276e | """
Flask app for testing the OpenID Connect extension.
"""
import json
from unittest.mock import MagicMock, Mock
from flask import Flask, g
import flask_oidc
from tests.json_snippets import *
oidc = None
def index():
return "too many secrets", 200, {
'Content-Type': 'text/plain; charset=utf-8'
}
def get_at():
return oidc.get_access_token(), 200, {
'Content-Type': 'text/plain; charset=utf-8'
}
def get_rt():
return oidc.get_refresh_token(), 200, {
'Content-Type': 'text/plain; charset=utf-8'
}
def get_test1():
return "successful call to test1", 200, {
'Content-Type': 'text/plain; charset=utf-8'
}
def get_test2():
return "successful call to test2", 200, {
'Content-Type': 'text/plain; charset=utf-8'
}
def get_test3():
return "successful call to test3", 200, {
'Content-Type': 'text/plain; charset=utf-8'
}
def get_unprotected():
return "successful call to unprotected", 200, {
'Content-Type': 'text/plain; charset=utf-8'
}
def raw_api():
return {'token': g.oidc_token_info}
def api():
return json.dumps(raw_api())
def get_test4():
return "successful call to test4", 200, {
'Content-Type': 'text/plain; charset=utf-8'
}
callback_method = Mock()
def create_app(config, oidc_overrides=None):
global oidc
app = Flask(__name__)
app.config.update(config)
if oidc_overrides is None:
oidc_overrides = {}
app.oidc = flask_oidc.OpenIDConnect(app, **oidc_overrides)
oidc = app.oidc
app.route('/')(app.oidc.check(index))
app.route('/at')(app.oidc.check(get_at))
app.route('/rt')(app.oidc.check(get_rt))
# Check standalone usage
rendered = app.oidc.accept_token(True, ['openid'], auth_header_key='Authorization')(api)
app.route('/api', methods=['GET', 'POST'])(rendered)
configure_keycloak_test_uris(app)
# Check combination with an external API renderer like Flask-RESTful
unrendered = app.oidc.accept_token(True, ['openid'], render_errors=False, auth_header_key='Authorization')(raw_api)
def externally_rendered_api(*args, **kwds):
inner_response = unrendered(*args, **kwds)
if isinstance(inner_response, tuple):
raw_response, response_code, headers = inner_response
rendered_response = json.dumps(raw_response), response_code, headers
else:
rendered_response = json.dumps(inner_response)
return rendered_response
app.route('/external_api', methods=['GET', 'POST'])(externally_rendered_api)
return app
def configure_keycloak_test_uris(app):
test1 = app.oidc.check_authorization(True)(get_test1)
app.route('/test1', methods=['GET', 'POST'])(test1)
test2 = app.oidc.check_authorization(True)(get_test2)
app.route('/test2', methods=['GET', 'POST'])(test2)
test3 = app.oidc.check_authorization(True)(get_test3)
app.route('/test3', methods=['GET', 'POST'])(test3)
callback_method.return_value = True
test4 = app.oidc.check_authorization(True, validation_func=callback_method)(get_test4)
app.route('/test4', methods=['GET', 'POST'])(test4)
unprotected = app.oidc.check_authorization(False)(get_unprotected)
app.route('/unprotected', methods=['GET'])(unprotected)
def _configure_mock_object(test_app):
test_app.oidc.validate_token = Mock()
test_app.oidc.validate_token.return_value = True
test_app.oidc.keycloakApi = MagicMock(autospec=flask_oidc.KeycloakAPI)
test_app.oidc.keycloakApi.authorize = Mock()
test_app.oidc.keycloakApi.authorize.return_value = valid_rpt
test_app.oidc.keycloakApi.get_access_token = Mock()
test_app.oidc.keycloakApi.get_access_token.return_value = access_token
test_app.oidc.keycloakApi._get_realm_pub_key = Mock()
test_app.oidc.keycloakApi._get_realm_pub_key.return_value = "abc"
def configure_mock_object_version1(test_app):
_configure_mock_object(test_app)
test_app.oidc.keycloakApi.jwt_decode = Mock()
test_app.oidc.keycloakApi.jwt_decode.return_value = decoded_jwt_with_permission_test1_and_test2
test_app.oidc.keycloakApi.get_resource_info = Mock()
test_app.oidc.keycloakApi.get_resource_info.side_effect = [resource_test1, resource_test2]
def configure_mock_version2(test_app):
_configure_mock_object(test_app)
test_app.oidc.keycloakApi.jwt_decode.return_value = decoded_jwt_with_permission_test3
test_app.oidc.keycloakApi.get_resource_info = Mock()
test_app.oidc.keycloakApi.get_resource_info.side_effect = [resource_test3]
def configure_mock_version3(test_app):
_configure_mock_object(test_app)
test_app.oidc.keycloakApi.jwt_decode.return_value = None
test_app.oidc.keycloakApi.get_resource_info = Mock()
test_app.oidc.keycloakApi.get_resource_info.side_effect = [resource_test3]
|
6,010 | 9dfc8414628a8b09de3c24c504dd4163efdd3d35 | # This is main file where we create the instances of Movie class
# and run the file to view the movie website page
# we have to import media where class Movie is defined and
# fresh_tomatoes python files
import fresh_tomatoes
import media
# Each instance has 8 arguments: Title, story line, poster image,
# trailer url, rating, category, director, duration
alien_covenant = media.Movie("Alien: Covenant", "The crew of a colony ship, "
"bound for a remote planet, discover an "
"uncharted paradise with a threat beyond"
"their imagination,"
"and must attempt a harrowing escape.",
"https://upload.wikimedia.org/wikipedia/en/3/33/"
"Alien_Covenant_Teaser_Poster.jpg",
"https://www.youtube.com/watch?v=H0VW6sg50Pk",
"R",
"Science fiction horror",
"Ridley Scott",
"123 Minutes")
avatar = media.Movie("Avatar", "A marine on an alien planet",
"http://upload.wikimedia.org/wikipedia/en/"
"b/b0/Avatar-Teaser-Poster.jpg",
"http://www.youtube.com/watch?v=5PSNL1qE6VY",
"PG-13",
"Epic science fiction",
"James Cameron",
"162 Minutes")
okja = media.Movie("Okja", "A young girl named Mija risks everything to "
"prevent a powerful, multi-national company "
"from kidnapping her best friend,"
"a massive animal named Okja",
"https://upload.wikimedia.org/wikipedia/en/f/f6/Okja.png",
"https://www.youtube.com/watch?v=AjCebKn4iic",
"R",
"Action-Adventure",
"Bong Joon-ho",
"120 Minutes")
gonegirl = media.Movie("Gone Girl",
"A sad story",
"http://upload.wikimedia.org/wikipedia/en/0/05/"
"Gone_Girl_Poster.jpg",
"http://www.youtube.com/watch?v=Ym3LB0lOJ0o",
"R",
"Crime",
"David Fincher",
"149 Minutes")
avenger = media.Movie("Avenger",
"A story about superheroes",
"http://upload.wikimedia.org/wikipedia/en/3/37/"
"Captain_America_The_First_Avenger_poster.jpg",
"http://www.youtube.com/watch?v=hIR8Ar-Z4hw",
"PG-13",
"Action",
"Joss Whedon",
"143 Minutes")
dark_knight = media.Movie("Dark knight rises",
"A story about batman",
"http://upload.wikimedia.org/wikipedia/en/8/83/"
"Dark_knight_rises_poster.jpg",
"http://www.youtube.com/watch?v=g8evyE9TuYk",
"PG-13",
"Action",
"Christopher Nolan",
"165 Minutes")
# Creating a list of all instances
movies = [alien_covenant, avatar, okja, gonegirl, avenger, dark_knight]
# Calling open_movies_page function to create fresh_tomatoes.html
# file which contains a movie web page
fresh_tomatoes.open_movies_page(movies)
|
6,011 | 83733e707a1be131335c4980cdf4beed365eb530 | from simulating_blobs_of_fluid.simulation import Simulation
from simulating_blobs_of_fluid.fluid_renderer import FluidRenderer
import arcade
def main():
simulation = Simulation(particle_count=50, dt=0.016, box_width=250)
FluidRenderer(simulation.box_width, 800, simulation)
arcade.run()
if __name__ == "__main__":
main()
|
6,012 | a4492af775899ec2dcc0cac44b2740edd8422273 | import copy
import random
def parse_arrow(string):
return tuple(string.split(' -> '))
def parse_sig(string, vals=None):
parts = string.split()
if len(parts) == 1:
return resolve(parts[0], vals)
elif parts[1] == 'AND':
return resolve(parts[0], vals) & resolve(parts[2], vals)
elif parts[1] == 'OR':
return resolve(parts[0], vals) | resolve(parts[2], vals)
elif parts[1] == 'LSHIFT':
return resolve(parts[0], vals) << int(parts[2])
elif parts[1] == 'RSHIFT':
return resolve(parts[0], vals) >> int(parts[2])
elif parts[0] == 'NOT':
return 2 ** 16 + ~ resolve(parts[1], vals)
else:
raise NotImplementedError
def resolve(string, vals):
try:
return int(string)
except ValueError:
pass
try:
return vals[string]
except KeyError:
raise NotReady
class NotReady(Exception):
pass
def parse_line(line, vals):
left, dest = parse_arrow(line)
sig = parse_sig(left, vals)
vals[dest] = sig
def clean(set_of_lines):
# all assignments with ints at the left should be excluded
return set(line for line in set_of_lines
if not isinstance(parse_arrow(line)[0], int)
and not line.endswith('-> b'))
def run_it(stored_lines, vals):
while stored_lines:
line = random.sample(stored_lines, 1)[0]
try:
parse_line(line, vals)
stored_lines.remove(line)
except NotReady:
pass
if __name__ == "__main__":
# this is apparently non-deterministic.
# I get different answers at different times.
# luckily, it worked for me the first time I ran it...
lines = set([x.strip() for x in open('input/input7.txt').readlines()])
vals = {}
stored_lines = copy.deepcopy(lines)
run_it(stored_lines, vals)
answer = vals['a']
print answer
vals = {'b': answer}
stored_lines = clean(lines)
run_it(stored_lines, vals)
print vals['a']
|
6,013 | 918db455fc50b49ca2b40dd78cecdec4ba08dcb8 | import math
# 计算像素点属于哪个中心点
from utils.util import distance
def attenuation(color, last_mean):
return 1 - math.exp(((distance(color, last_mean) / 80) ** 2) * -1)
def get_Count_By_distance(centers, pixel_use,d):
# d_min设置过低会产生多的中心点,许多很相似但是没有归到一类中
# d_min设置过高产生少的中心点,不相似的归到一类中
d_min = 1;
d_b = d;
count_use = 0;
for i in range(len(centers)):
d = attenuation(centers[i], pixel_use);
if d < d_min:
d_min = d;
count_use = i;
if d_min < d_b:
count = count_use;
else:
count = -1;
return count;
|
6,014 | b66f588149d160c119f9cc24af3acb9f64432d6e | import dash
import dash_html_components as html
app = dash.Dash(__name__)
app.layout = html.H1("Hello dashboard")
if __name__ == "__main__":
app.run_server(debug=False, port=8080, host="127.0.0.1")
|
6,015 | 10bf7959f178d3b5c0ce6e97253e665d32363af7 | #!/usr/bin/env python
# KMeans
# 参考 https://qiita.com/g-k/items/0d5d22a12a4507ecbf11
#
# データを適当なクラスタに分けた後、クラスタの平均を用いてうまい具合にデータがわかれるように調整させていくアルゴリズム
# 任意の指定のk個のクラスタを作成するアルゴリズムであることから、k-means法(k点平均法と呼ばれている)
# k-meansの初期値選択の弱点を解消したのが、k-means++
# k-means++では、中心点が互いに遠いところに配置されるような確率が高くなるように操作する。
# 教師なし学習のアルゴリズム
# 主に正解ラベルの無いベクトル形式のデータをクラスタリングするのに用いられる。
# 1 1つ目の中心点を、データ点の中から均等な確率でランダムに選ぶ。
# 2 残り全てのデータ点について、既存の中心点との距離の2乗を計算して足し合わせる。
# 3 2.の結果を合計した値で、それぞれの距離の2乗を割る。
# 4 3.の結果を新たな確率として、2つ目の中心点を選ぶ。
# 5 2.~4.を、クラスター数と同じ数の中心点が出来るまで繰り返す。
import matplotlib.font_manager as fm
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import seaborn as sns
from matplotlib import cm
from mpl_toolkits.mplot3d import Axes3D
from sklearn.model_selection import train_test_split
# 入力:データ、クラスター数、中心点の初期値、繰り返し回数
# 出力:各クラスターの中心点、各データ点の属するクラスター番号
def kmeansplus(X,K,n_iter):
n = X.shape[0]
idx = np.zeros(X.shape[0])
distance = np.zeros(n*K).reshape(n,K)
centers = np.zeros(X.shape[1]*K).reshape(K,-1)
#最初の確率は均等
pr = np.repeat(1/n,n)
#1つ目の中心点はランダムに選ぶ
centers[0,:] = X[np.random.choice(np.arange(n),1,p=pr),]
distance[:,0] = np.sum((X-centers[0,:])**2,axis=1)
for k in np.arange(1,K):
pr = np.sum(distance,axis=1)/np.sum(distance)
centers[k,:] = X[np.random.choice(np.arange(n),1,p=pr),]
distance[:,k] = np.sum((X-centers[k,:])**2,axis=1)
for _ in range(n_iter):
#データ点と中心点の距離を計算し、一番近い中心点のインデックス(クラスター番号)を返す。
for i in range(X.shape[0]):
idx[i] = np.argmin(np.sum((X[i,:] - centers)**2,axis=1))
#重心を計算して中心点を移動させる
for k in range(K):
centers[k,:] = X[idx==k,:].mean(axis=0)
return idx,centers
def main():
# サンプルとして、4種類の2次元正規乱数に従う点を各20個ずつ、計80個生成した。
# データは以下のように散らばっている
#データの生成
np.random.seed(123)
x1 = np.r_[np.random.normal(size=20,loc=1,scale=2),np.random.normal(size=20,loc=8,scale=2)
,np.random.normal(size=20,loc=15,scale=2),np.random.normal(size=20,loc=25,scale=2)]
x2 = np.r_[np.random.normal(size=20,loc=15,scale=2),np.random.normal(size=20,loc=1,scale=2)
,np.random.normal(size=20,loc=20,scale=2),np.random.normal(size=20,loc=0,scale=2)]
X = np.c_[x1,x2]
#可視化
plt.figure(figsize=(6,6))
plt.scatter(X[:,0],X[:,1],c="black",s=10,alpha=0.5)
plt.show()
# k-means法で4グループにクラスタリングしてみる。
# 簡単のため、繰り返し回数は4回とする。
K=4
centers = np.array([[0,5],[5,0],[10,15],[20,10]])
inter = 9
idx, centers = kmeansplus(X,K,inter)
data = pd.DataFrame(X,columns=["X","Y"])
data["idx"] = idx
data0 = data[data.idx==0]
data1 = data[data.idx==1]
data2 = data[data.idx==2]
data3 = data[data.idx==3]
plt.figure(figsize=(6,6))
plt.scatter(data0.X,data0.Y,color="r",s=10,alpha=0.5)
plt.scatter(data1.X,data1.Y,color="b",s=10,alpha=0.5)
plt.scatter(data2.X,data2.Y,color="g",s=10,alpha=0.5)
plt.scatter(data3.X,data3.Y,color="orange",s=10,alpha=0.5)
plt.scatter(centers[:,0],centers[:,1],color=["r","b","g","orange"])
plt.show()
plt.show()
if __name__ == "__main__":
main()
|
6,016 | 16cc85324b555f0cfec8d577b776b86872578822 | # Given an array of integers, return indices of the two numbers such that they add up to a specific target.
# You may assume that each input would have exactly one solution, and you may not use the same element twice.
# Example:
# Given nums = [2, 7, 11, 15], target = 9,
# Because nums[0] + nums[1] = 2 + 7 = 9,
# return [0, 1].
class Solution:
def twoSum(self, nums, target):
# create a dictionary using the values of the array as the dictionary keys, and the indices of the array as the dictionary values
d = dict([(nums[i],i) for i in range(len(nums))])
# iterate through the array
for n in range(len(nums)):
# find the difference between the target number and the integer in the array
dif = target - nums[n]
# find the difference as a key in the dictionary, be careful that the dictionary's value is not the same as the array's indice (can happen when the difference is half of the target number, but there are not two halves in the array)
if dif in d and d[dif] != n:
# if found, return the two indices of the numbers that add up to the target number
return (n,d[dif])
# just in case there is no solution, even though the problem allows for the assumption that there is always one solution
return ("No solution available")
# initilize a test case
s = Solution()
nums = [7,2,7,15]
target = 14
a = s.twoSum(nums,target)
print(a)
# create a dictionary that stores the indices as the keys and the integers as the values
# iterate through the array, attempting to find the target minus the integer as a key in the dictionary
# return the indice of the integer and the value of the key
# watch out for arrays that involve duplicates, such as [3,3,7,2], target 6 |
6,017 | f98d6dd9ac4714c24ce070a1a81dc4610d04b97e | # -*- coding: UTF-8 -*-
# File Name: ll.py
# Author: Sam
# mail: samyunwei@163.com
# Created Time: 2016年03月09日 星期三 19时18分02秒
#########################################################################
#!/usr/bin/env python
def checkmark(marks):
if not isinstance(marks,list):
return 'marks Error'
else:
mark = float(sum(marks))/len(marks)
if mark >= 90:
return 'A'
elif mark >= 80:
return 'B'
elif mark >= 70:
return 'C'
elif mark >= 60:
return 'D'
else:
return 'F'
##l = [100,80,90,90]
#print checkmark(l)
def getfl(thestr):
for i in range(len(thestr)):
print thestr[i]," ",thestr[-i-1]
#getfl("hello")
def mycmp(astr,bstr):
a,b = len(astr),len(bstr)
if a != b:
return False
for i in range(a):
if astr[i] != bstr[i]:
return False
else:
return True
#print mycmp('hellO','hello')
def myrcmp(astr,bstr):
a,b = len(astr),len(bstr)
if a != b:
return False
for i in range(a):
if astr[i] != bstr[-i-1]:
return False
else:
return True
#print myrcmp('ollhh','hello')
def getrstr(thestr):
return thestr + thestr[::-1]
#print getrstr("hello")
def mystrip(thestr):
thestrlen = len(thestr)
begl,endl = 0,0
for i in range(thestrlen):
if thestr[i] == ' ':
begl += 1
else:
break
for i in range(thestrlen):
if thestr[-i - 1] == ' ':
endl += 1
else:
break
return thestr[begl:thestrlen-1-endl]
print mystrip('hello '),'test','test'
print mystrip(' hello ')
print mystrip(' hello')
|
6,018 | 8b2911586e21162bec074732216c410c591f18a8 | from django.shortcuts import render
from django.http import HttpResponse
from django.contrib.auth.models import User
from .models import Museo, Distrito, Comentario, Favorito, Like, Titulo, Letra, Color
from django.views.decorators.csrf import csrf_exempt
from django.contrib.auth import authenticate, login
from django.contrib.auth import logout
from web.parser import parseXML
import operator
from django.template.loader import get_template
from django.template import Context
import datetime
def getMuseums():
museos = Museo.objects.all()
allMuseums = {}
for museo in museos:
allMuseums[museo.ID_ENTIDAD] = museo.comentario_set.count()
return allMuseums
def getAccessibleMuseums():
museos = Museo.objects.all()
allMuseums = {}
for museo in museos:
if museo.ACCESIBILIDAD == '1':
allMuseums[museo.ID_ENTIDAD] = museo.comentario_set.count()
return allMuseums
def getRanking():
allMuseums = getMuseums()
ranking = sorted(allMuseums.items(), key = operator.itemgetter(1))
ranking.reverse()
return ranking
def getAccessibleRanking():
allMuseums = getAccessibleMuseums()
ranking = sorted(allMuseums.items(), key = operator.itemgetter(1))
ranking.reverse()
return ranking
@csrf_exempt
def mainPage(request):
template = get_template('index.html')
topFive = range(5)
list = '<br>'
markers = ''
if request.method == 'GET' or (request.method == 'POST' and request.POST['accion'] == 'mostrar'):
ranking = getRanking()
list = (list + "<center><form action='/' method='post'><input type='hidden' name='accion' value='ocultar'>" +
"<input class='desplegable' type='submit' value='Mostrar museos accesibles'></form></center><div id='scroll'>")
if len(ranking) > 0:
for item in topFive:
if ranking[item][1] != 0:
museum = Museo.objects.get(ID_ENTIDAD = ranking[item][0])
list = list + "<center><a class='titulos' href=" + museum.CONTENT_URL + '>' + museum.NOMBRE + '</a><br><b>' + str(museum.comentario_set.count()) + ' Comentarios - ' + str(museum.like_set.count()) + ' Likes</b></br></br>'
list = list + "<a class='direccion'>" + museum.CLASE_VIAL + ' ' + museum.NOMBRE_VIA + ', Nº ' + museum.NUM + ', ' + museum.LOCALIDAD + '</a></br></br>'
list = list + "<a class='info' href=" + "/museos/" + museum.ID_ENTIDAD + '/>Más información</a></center></br></br>'
if museum.LATITUD != 'No disponible' and museum.LONGITUD != 'No disponible':
markers = (markers +
"var " + "X" + museum.ID_ENTIDAD + "info = new google.maps.InfoWindow({" +
"content:'<h1>" + museum.NOMBRE + "</h1>'});" +
"var " + "X" + museum.ID_ENTIDAD + "marker = new google.maps.Marker({" +
"position: {lat: " + museum.LATITUD + ", lng: " + museum.LONGITUD + " },map: map});" +
"X" + museum.ID_ENTIDAD + "marker.addListener('click', function() {" +
"X" + museum.ID_ENTIDAD + "info.open(map," + "X" + museum.ID_ENTIDAD + "marker);" +
"});")
if ranking[0][1] == 0:
list = list + "<a class='titulos'><center>" + 'No hay museos con comentarios, ¡sé el primero en comentar!' + '</center></a></br></br></div>'
else:
list = list + '</div>'
list = list + "<center><a class='info' href='/xml'>XML de la página</a></center>"
else:
list = list + "<a class='titulos'><center>" + 'No hay museos con comentarios, ¡sé el primero en comentar!' + '</center></a></br></br></div>'
elif request.method == 'POST' and request.POST['accion'] == 'ocultar':
ranking = getAccessibleRanking()
list = (list + "<center><form action='/' method='post'><input type='hidden' name='accion' value='mostrar'>" +
"<input class='desplegable' type='submit' value='Mostrar todos los museos'></form></center><div id='scroll'>")
if len(ranking) > 0:
for item in topFive:
if ranking[item][1] != 0:
museum = Museo.objects.get(ID_ENTIDAD = ranking[item][0])
list = list + "<center><a class='titulos' href=" + museum.CONTENT_URL + '>' + museum.NOMBRE + '</a><br><b>' + str(museum.comentario_set.count()) + ' Comentarios - ' + str(museum.like_set.count()) + ' Likes</b></br></br>'
list = list + "<a class='direccion'>" + museum.CLASE_VIAL + ' ' + museum.NOMBRE_VIA + ', Nº ' + museum.NUM + ', ' + museum.LOCALIDAD + '</a></br></br>'
list = list + "<a class='info' href=" + "/museos/" + museum.ID_ENTIDAD + '/>Más información</a></center></br></br>'
if museum.LATITUD != 'No disponbile' and museum.LONGITUD != 'No disponible':
markers = (markers +
"var " + "X" + museum.ID_ENTIDAD + "info = new google.maps.InfoWindow({" +
"content:'<h1>" + museum.NOMBRE + "</h1>'});" +
"var " + "X" + museum.ID_ENTIDAD + "marker = new google.maps.Marker({" +
"position: {lat: " + museum.LATITUD + ", lng: " + museum.LONGITUD + " },map: map});" +
"X" + museum.ID_ENTIDAD + "marker.addListener('click', function() {" +
"X" + museum.ID_ENTIDAD + "info.open(map," + "X" + museum.ID_ENTIDAD + "marker);" +
"});")
if ranking[0][1] == 0:
list = list + "<a class='titulos'><center>" + 'No hay museos accesibles con comentarios, ¡sé el primero en comentar!' + '</center></a></br></br></div>'
else:
list = list + '</div>'
list = list + "<center><a class='info' href='/xml'>XML de la página</a></center>"
else:
list = list + "<a class='titulos'><center>" + 'No hay museos accesibles con comentarios, ¡sé el primero en comentar!' + '</center></a></br></br></div>'
style = ''
if request.user.is_authenticated():
login = 1
try:
color = Color.objects.get(usuario = request.user)
color = color.color
except Color.DoesNotExist:
color = 'EEF4F8'
try:
letra = Letra.objects.get(usuario = request.user)
letra = letra.letra
except Letra.DoesNotExist:
letra = '9'
style = ("body{font-family: 'Helvetica', sans-serif;"
"color: #444444;"
"font-size: " + letra + "pt;"
"background-color: #" + color + ";}")
else:
login = 0
users = User.objects.all()
userList = ''
for user in users:
try:
title = Titulo.objects.get(usuario = user.username)
userList = userList + "<li><a href='/" + user.username + "'>" + title.titulo + ' - ' + user.username + "</a></li></br>"
except Titulo.DoesNotExist:
userList = userList + "<li><a href='/" + user.username + "'>Página de " + user.username + "</a></li></br>"
return HttpResponse(template.render(Context({'body': list, 'login': login, 'user': request.user, 'userList': userList, 'formato': style, 'markers': markers})))
@csrf_exempt
def museumsPage(request):
template = get_template('museos.html')
if request.method == 'GET':
museos = Museo.objects.all()
elif request.method == 'POST':
distrito = Distrito.objects.get(nombre = request.POST['distrito'])
museos = distrito.museo_set.all()
list = ''
markers = ''
i = 1
for museo in museos:
list = list + "<center><a class='titulos'>" + museo.NOMBRE + '</a></br>'
list = list + "<a class='info' href=" + "/museos/" + museo.ID_ENTIDAD + '/>Más información</a></center></br></br>'
if museo.LATITUD != 'No disponible' and museo.LONGITUD != 'No disponible':
markers = (markers +
"var " + "X" + museo.ID_ENTIDAD + "info = new google.maps.InfoWindow({" +
"content:'<h1>" + museo.NOMBRE + "</h1>'});" +
"var " + "X" + museo.ID_ENTIDAD + "marker = new google.maps.Marker({" +
"position: {lat: " + museo.LATITUD + ", lng: " + museo.LONGITUD + " },map: map});" +
"X" + museo.ID_ENTIDAD + "marker.addListener('click', function() {" +
"X" + museo.ID_ENTIDAD + "info.open(map," + "X" + museo.ID_ENTIDAD + "marker);" +
"});")
style = ''
if request.user.is_authenticated():
login = 1
try:
color = Color.objects.get(usuario = request.user)
color = color.color
except Color.DoesNotExist:
color = 'EEF4F8'
try:
letra = Letra.objects.get(usuario = request.user)
letra = letra.letra
except Letra.DoesNotExist:
letra = '9'
style = ("body{font-family: 'Helvetica', sans-serif;"
"color: #444444;"
"font-size: " + letra + "pt;"
"background-color: #" + color + ";}")
else:
login = 0
distritos = Distrito.objects.all()
districtList = ''
for distrito in distritos:
districtList = districtList + "<option value='" + distrito.nombre + "'>" + distrito.nombre + "</option>"
return HttpResponse(template.render(Context({'body': list, 'login': login, 'user': request.user, 'districtList': districtList, 'formato': style, 'markers': markers})))
@csrf_exempt
def museumPage(request, museumID):
template = get_template('museo.html')
museum = Museo.objects.get(ID_ENTIDAD = museumID)
if request.method == 'POST' and 'comentario' in request.POST:
comment = Comentario(texto = request.POST['comentario'], museo = museum, usuario = request.user.username)
comment.save()
elif request.method == 'POST' and 'añadir' in request.POST:
fav = Favorito(museo = museum, usuario = request.user)
fav.save()
elif request.method == 'POST' and 'quitar' in request.POST:
Favorito.objects.filter(museo = museum, usuario = request.user).delete()
elif request.method == 'POST' and 'mas' in request.POST:
like = Like(museo = museum, usuario = request.user)
like.save()
elif request.method == 'POST' and 'menos' in request.POST:
Like.objects.filter(museo = museum, usuario = request.user).delete()
comments = museum.comentario_set.all()
message = ("<center><b><a class='titulos_museo'>" + museum.NOMBRE + "</a></b></center><div id='scroll'></br>"
"<center><b><a class='titulos_museo'>Descripción</a></b></center></br>"
"<center><a class='texto_museo'>" + museum.DESCRIPCION_ENTIDAD + '</a></center></br>'
"<center><b><a class='titulos_museo'>Horario</a></b></center></br>"
"<center><a class='texto_museo'>" + museum.HORARIO + '</a></center></br>'
"<center><b><a class='titulos_museo'>Accesibilidad</a></b></center></br>"
"<center><a class='texto_museo'>" + museum.ACCESIBILIDAD + '</a></center></br>'
"<center><b><a class='titulos_museo'>Dirección</a></b></center></br>"
"<center><a class='texto_museo'>" + museum.CLASE_VIAL + ' ' + museum.NOMBRE_VIA + ', Nº ' + museum.NUM + ', ' + museum.LOCALIDAD + '</a><center></br>'
"<center><a class='texto_museo'>Barrio: " + museum.BARRIO + '</a></center></br>'
"<center><a class='texto_museo'>Distrito: " + str(museum.DISTRITO) + '</a></center></br>'
"<center><b><a class='titulos_museo'>Datos de contacto</a></b></center></br>"
"<center><a class='texto_museo'>Teléfono: " + museum.TELEFONO + '</a></center></br>'
"<center><a class='texto_museo'>Email: " + museum.EMAIL + '</a></center></br>'
"<center><b><a class='titulos_museo'>Comentarios</a></b></center></br>")
allComments = ''
for comment in comments:
allComments = allComments + "<center><a class='texto_museo'><b>" + 'Anónimo</b>: ' + comment.texto + ', ' + (datetime.timedelta(hours=2) + comment.fecha).strftime("%H:%M:%S %d-%m-%Y") + '</a></center></br>'
message = message + allComments
style = ''
if request.user.is_authenticated():
login = 1
try:
favorito = Favorito.objects.get(museo = museum, usuario = request.user)
favoriteButton = ("<center><form action='/museos/" + museumID + "/' method='post'><input type='hidden' name='quitar' value='fav'>" +
"<input class='desplegable' type='submit' value='Quitar de favoritos'></form></center>")
except Favorito.DoesNotExist:
favoriteButton = ("<center><form action='/museos/" + museumID + "/' method='post'><input type='hidden' name='añadir' value='fav'>" +
"<input class='desplegable' type='submit' value='Añadir a favoritos'></form></center>")
try:
like = Like.objects.get(museo = museum, usuario = request.user)
likeButton = ("<center><form action='/museos/" + museumID + "/' method='post'><input type='hidden' name='menos' value='like'>" +
"<input class='desplegable' type='submit' value='Dislike'></form></center>")
except Like.DoesNotExist:
likeButton = ("<center><form action='/museos/" + museumID + "/' method='post'><input type='hidden' name='mas' value='like'>" +
"<input class='desplegable' type='submit' value='Like'></form></center>")
try:
color = Color.objects.get(usuario = request.user)
color = color.color
except Color.DoesNotExist:
color = 'EEF4F8'
try:
letra = Letra.objects.get(usuario = request.user)
letra = letra.letra
except Letra.DoesNotExist:
letra = '9'
style = ("body{font-family: 'Helvetica', sans-serif;"
"color: #444444;"
"font-size: " + letra + "pt;"
"background-color: #" + color + ";}")
else:
login = 0
favoriteButton = ''
likeButton = ''
if museum.LATITUD != 'No disponbile' and museum.LONGITUD != 'No disponible':
marker = ("var " + "X" + museum.ID_ENTIDAD + "info = new google.maps.InfoWindow({" +
"content:'<h1>" + museum.NOMBRE + "</h1>'});" +
"var " + "X" + museum.ID_ENTIDAD + "marker = new google.maps.Marker({" +
"position: {lat: " + museum.LATITUD + ", lng: " + museum.LONGITUD + " },map: map});" +
"X" + museum.ID_ENTIDAD + "marker.addListener('click', function() {" +
"X" + museum.ID_ENTIDAD + "info.open(map," + "X" + museum.ID_ENTIDAD + "marker);" +
"});")
else:
marker = ''
return HttpResponse(template.render(Context({'body': message, 'login': login, 'user': request.user, 'id': museumID, 'fav': favoriteButton, 'like': likeButton, 'formato': style, 'marker': marker})))
@csrf_exempt
def loginPage(request):
if request.method == 'POST':
if not request.user.is_authenticated() and 'login' in request.POST:
username = request.POST['Usuario']
password = request.POST['Contraseña']
user = authenticate(username=username, password=password)
if user is not None:
login(request, user)
elif not request.user.is_authenticated() and 'registro' in request.POST:
username = request.POST['Usuario']
password = request.POST['Contraseña']
try:
user = User.objects.get(username = username)
user = authenticate(username = username, password = password)
if user is not None:
login(request, user)
except User.DoesNotExist:
user = User.objects.create_user(username = username, password = password)
user.save()
request.method = 'GET'
return mainPage(request)
def logoutPage(request):
logout(request)
return mainPage(request)
def userPage(request, user, number):
if number == None:
number = 1
template = get_template('personal.html')
listTotal = ''
favoritos = Favorito.objects.filter(usuario = user)
group = range(5)
count = 0;
markers = ''
for favorito in favoritos:
count = count + 1;
museum = Museo.objects.get(NOMBRE = favorito.museo)
listTotal = listTotal + "<a class='titulos' href=" + museum.CONTENT_URL + '>' + museum.NOMBRE + '</a><br><b>' + str(museum.comentario_set.count()) + ' Comentarios - ' + str(museum.like_set.count()) + ' Likes</b></br></br>'
listTotal = listTotal + "<a class='direccion'>" + museum.CLASE_VIAL + ' ' + museum.NOMBRE_VIA + ', Nº ' + museum.NUM + ', ' + museum.LOCALIDAD + '</a></br></br>'
listTotal = listTotal + "<a class='info' href=" + "/museos/" + museum.ID_ENTIDAD + '/>Más información</a> <b>Fecha de guardado:' + (datetime.timedelta(hours=2) + favorito.fecha).strftime("%H:%M:%S %d-%m-%Y") + '</b></br></br></br>'
if museum.LATITUD != 'No disponible' and museum.LONGITUD != 'No disponible':
markers = (markers +
"var " + "X" + museum.ID_ENTIDAD + "info = new google.maps.InfoWindow({" +
"content:'<h1>" + museum.NOMBRE + "</h1>'});" +
"var " + "X" + museum.ID_ENTIDAD + "marker = new google.maps.Marker({" +
"position: {lat: " + museum.LATITUD + ", lng: " + museum.LONGITUD + " },map: map});" +
"X" + museum.ID_ENTIDAD + "marker.addListener('click', function() {" +
"X" + museum.ID_ENTIDAD + "info.open(map," + "X" + museum.ID_ENTIDAD + "marker);" +
"});")
if (count % 5) == 0:
listTotal = listTotal + ';'
group = listTotal.split(';')[int(number) - 1]
list = ''
if (favoritos.count() % 5) == 0:
pages = int(favoritos.count() / 5)
else:
pages = int(favoritos.count() / 5) + 1
pagesRange = range(pages)
if pages > 1:
list = '<br>'
if int(number) > 1:
list = list + "<center><div class='pagination'><a href='/" + user + "/" + str(int(number) - 1) + "'>«</a>"
else:
list = list + "<center><div class='pagination'><a href='/" + user + "/" + str(number) + "'>«</a>"
for page in pagesRange:
if page == (int(number) - 1):
list = list + "<a class='active' href='/" + user + "/" + str(page + 1) + "'>" + str(page + 1) + "</a>"
else:
list = list + "<a href='/" + user + "/" + str(page + 1) + "'>" + str(page + 1) + "</a>"
if int(number) == pages:
list = list + "<a href='/" + user + "/" + str(number) + "'>»</a></div></center></br>"
else:
list = list + "<a href='/" + user + "/" + str(int(number) + 1) + "'>»</a></div></center></br>"
list = list + "<div id='scroll'><center>"
for item in group:
list = list + item
if (list == '' or list == "<div id='scroll'><center>") and user != 'AnonymousUser':
list = "<center><a class='titulos'>" + 'Para que aparezcan museos en esta página, ' + user + ' tiene que añadirlos.' + '</a></center></br></br>'
elif (list == '' or list == "<div id='scroll'><center>") and user == 'AnonymousUser':
list = "<center><a class='titulos'>" + 'Para ver tu página personal, primero tienes que loguearte.' + '</a></center></br></br>'
else:
list = list + "<center><a class='info' href='/" + user + "/xml'>XML del usuario</a></center>"
list = list + '</center></div>'
users = User.objects.all()
userList = ''
for user in users:
try:
title = Titulo.objects.get(usuario = user.username)
userList = userList + "<li><a href='/" + user.username + "'>" + title.titulo + ' - ' + user.username + "</a></li></br>"
except Titulo.DoesNotExist:
userList = userList + "<li><a href='/" + user.username + "'>Página de " + user.username + "</a></li></br>"
style = ''
if request.user.is_authenticated():
login = 1
try:
color = Color.objects.get(usuario = request.user)
color = color.color
except Color.DoesNotExist:
color = 'EEF4F8'
try:
letra = Letra.objects.get(usuario = request.user)
letra = letra.letra
except Letra.DoesNotExist:
letra = '9'
style = ("body{font-family: 'Helvetica', sans-serif;"
"color: #444444;"
"font-size: " + letra + "pt;"
"background-color: #" + color + ";}")
else:
login = 0
return HttpResponse(template.render(Context({'body': list, 'login': login, 'user': request.user, 'userList': userList, 'formato': style, 'markers': markers})))
def userXMLPage(request, user):
template = get_template("personalXML.xml")
favoriteList = []
favoriteMuseums = Favorito.objects.filter(usuario = user)
for favorite in favoriteMuseums:
favoriteList = favoriteList + [favorite.museo]
return HttpResponse(template.render(Context({'favoriteList': favoriteList, 'user': user})), content_type = "text/xml")
def XMLPage(request):
template = get_template("personalXML.xml")
user = ''
topList = []
topMuseums = getRanking()
topFive = range(5)
for item in topFive:
if topMuseums[item][1] != 0:
museum = Museo.objects.get(ID_ENTIDAD = topMuseums[item][0])
topList = topList + [museum]
return HttpResponse(template.render(Context({'favoriteList': topList, 'user': user})), content_type = "text/xml")
def XMLAccesiblePage(request):
template = get_template("personalXML.xml")
user = ''
topList = []
topMuseums = getAccessibleRanking()
topFive = range(5)
for item in topFive:
if topMuseums[item][1] != 0:
museum = Museo.objects.get(ID_ENTIDAD = topMuseums[item][0])
topList = topList + [museum]
return HttpResponse(template.render(Context({'favoriteList': topList, 'user': user})), content_type = "text/xml")
@csrf_exempt
def preferencesPage(request, user):
template = get_template("preferencias.html")
if request.method == 'POST':
if 'color' in request.POST:
try:
color = Color.objects.get(usuario = user)
color.color = request.POST['color']
except Color.DoesNotExist:
color = Color(usuario = user, color = request.POST['color'])
color.save()
elif 'tamaño' in request.POST:
try:
size = Letra.objects.get(usuario = user)
size.letra = request.POST['tamaño']
except Letra.DoesNotExist:
size = Letra(usuario = user, letra = request.POST['tamaño'])
size.save()
elif 'título' in request.POST:
try:
title = Titulo.objects.get(usuario = user)
title.titulo = request.POST['título']
except Titulo.DoesNotExist:
title = Titulo(usuario = user, titulo = request.POST['título'])
title.save()
style = ''
if request.user.is_authenticated():
login = 1
try:
color = Color.objects.get(usuario = request.user)
color = color.color
except Color.DoesNotExist:
color = 'EEF4F8'
try:
letra = Letra.objects.get(usuario = request.user)
letra = letra.letra
except Letra.DoesNotExist:
letra = '9'
style = ("body{font-family: 'Helvetica', sans-serif;"
"color: #444444;"
"font-size: " + letra + "pt;"
"background-color: #" + color + ";}")
else:
login = 0
return HttpResponse(template.render(Context({'login': login, 'user': user, 'formato': style})))
def aboutPage(request):
template = get_template('about.html')
style = ''
if request.user.is_authenticated():
login = 1
try:
color = Color.objects.get(usuario = request.user)
color = color.color
except Color.DoesNotExist:
color = 'EEF4F8'
try:
letra = Letra.objects.get(usuario = request.user)
letra = letra.letra
except Letra.DoesNotExist:
letra = '9'
style = ("body{font-family: 'Helvetica', sans-serif;"
"color: #444444;"
"font-size: " + letra + "pt;"
"background-color: #" + color + ";}")
else:
login = 0
return HttpResponse(template.render(Context({'login': login, 'user': request.user, 'formato': style})))
def updateDB(request):
#Museo.objects.all().delete()
museos = parseXML('web/museos.xml')
for museo in museos:
try:
distrito = Distrito.objects.get(nombre = museos[museo]['DISTRITO'])
except Distrito.DoesNotExist:
distrito = Distrito(nombre = museos[museo]['DISTRITO'])
distrito.save()
for museo in museos:
try:
A = museos[museo]['ID-ENTIDAD']
except KeyError:
A = 'No disponible'
try:
B = museos[museo]['NOMBRE']
except KeyError:
B = 'No disponible'
try:
C = museos[museo]['DESCRIPCION-ENTIDAD']
except KeyError:
C = 'No disponible'
try:
D = museos[museo]['HORARIO']
except KeyError:
D = 'No disponible'
try:
E = museos[museo]['TRANSPORTE']
except KeyError:
E = 'No disponible'
try:
F = museos[museo]['ACCESIBILIDAD']
except KeyError:
F = 'No disponible'
try:
G = museos[museo]['CONTENT-URL']
except KeyError:
G = 'No disponible'
try:
H = museos[museo]['NOMBRE-VIA']
except KeyError:
H = 'No disponible'
try:
I = museos[museo]['CLASE-VIAL']
except KeyError:
I = 'No disponible'
try:
J = museos[museo]['TIPO-NUM']
except KeyError:
J = 'No disponible'
try:
K = museos[museo]['NUM']
except KeyError:
K = 'No disponible'
try:
L = museos[museo]['LOCALIDAD']
except KeyError:
L = 'No disponible'
try:
M = museos[museo]['PROVINCIA']
except KeyError:
M = 'No disponible'
try:
N = museos[museo]['CODIGO-POSTAL']
except KeyError:
N = 'No disponible'
try:
Ñ = museos[museo]['BARRIO']
except KeyError:
Ñ = 'No disponible'
try:
O = Distrito.objects.get(nombre = museos[museo]['DISTRITO'])
except KeyError:
O = 'No disponible'
try:
P = museos[museo]['COORDENADA-X']
except KeyError:
P = 'No disponible'
try:
Q = museos[museo]['COORDENADA-Y']
except KeyError:
Q = 'No disponible'
try:
R = museos[museo]['LATITUD']
except KeyError:
R = 'No disponible'
try:
S = museos[museo]['LONGITUD']
except KeyError:
S = 'No disponible'
try:
T = museos[museo]['TELEFONO']
except KeyError:
T = 'No disponible'
try:
U = museos[museo]['FAX']
except KeyError:
U = 'No disponible'
try:
V = museos[museo]['EMAIL']
except KeyError:
V = 'No disponible'
try:
W = museos[museo]['TIPO']
except KeyError:
W = 'No disponible'
try:
viejoMuseo = Museo.objects.get(ID_ENTIDAD = A)
except Museo.DoesNotExist:
nuevoMuseo = Museo(
ID_ENTIDAD = A,
NOMBRE = B,
DESCRIPCION_ENTIDAD = C,
HORARIO = D,
TRANSPORTE = E,
ACCESIBILIDAD = F,
CONTENT_URL = G,
NOMBRE_VIA = H,
CLASE_VIAL = I,
TIPO_NUM = J,
NUM = K,
LOCALIDAD = L,
PROVINCIA = M,
CODIGO_POSTAL = N,
BARRIO = Ñ,
DISTRITO = O,
COORDENADA_X = P,
COORDENADA_Y = Q,
LATITUD = R,
LONGITUD = S,
TELEFONO = T,
FAX = U,
EMAIL = V,
TIPO = W)
nuevoMuseo.save()
return mainPage(request)
|
6,019 | f76185095ebb1adbf7ae22ffb500ffc3d6b0a30d | #!/usr/bin/env python3
"""
This file contains all the required methods for the street prediction utilizing
the Hough transform.
"""
import numpy as np
import scipy.ndimage as ndi
from skimage.draw import polygon
from skimage.transform import hough_line
def draw_roads(roads, shape):
"""
Creates an image with roads drawn as full lines.
Parameters:
roads -- ndarray describing all roads to be drawn
shape -- shape (size) of image
The parameters are exactly what is returned by find_roads (see there).
Returns:
An numpy.ndarray with shape 'shape' and floating point type, where
background has probability 0 and roads have been drawn on top of
each other, with pixel values equal to the road strength, from
lowest to highest strength.
"""
im = np.zeros(shape)
for i in reversed(range(roads.shape[0])):
strength, angle, distance, width = roads[i]
coord = _get_line_box_cuts(angle, distance, *shape)
if coord is None: continue # do not abort on bogus angle/distance
coord = np.asarray(coord)
x, y = _road_polygon(coord, width)
rr, cc = polygon(y, x, shape)
im[rr,cc] = strength
return im
def find_roads(
probability_map,
*,
input_threshold=0.3,
max_roads=None,
min_strength=0.17, #0.2,
num_angles=720,
roads_min_angle=np.pi/8,
roads_min_distance=50,
debugimage=None, # for debugging ...
debugprint=None): # for debugging ...
"""
Finds full-image roads in probability map (image).
Parameters:
probability_map -- an numpy.ndarray with probabilities per pixel (*)
(*) i.e., the array is shaped HxW, with pixel values from 0 to 1
Keyword-Only Parameters:
input_threshold -- threshold applied to probability_map
max_roads -- maximum number of roads to be found
min_strength -- minimum strength of roads to be found
num_angles -- angular resolution used in hough transforms
roads_min_angle -- minimum required angle between roads
roads_min_distance -- minimum required distance between roads
Returns:
roads -- roads that have been found (*)
shape -- shape of probability_map (vector with 2 elements)
(*) A numpy.ndarray with floating point type of shape Nx4, with N being
the number of roads found, and 4 corresponding to columns 'strength',
'angle', 'distance', 'width'. Strength is the response for the road
(the "probability"), 'angle' and 'distance' correspond to the values
returned by skimage.transform.hough_line, and 'width' is the
identified road width (can currently be 12, 32 or 48).
"""
# shorthand
im = probability_map
# the angles to be used in the Hough transform
theta = np.linspace(-np.pi/2, np.pi/2, num_angles)
# normalize almost anything to grayscale
if im.ndim == 3:
if im.shape[2] == 4:
im = im[:,:,:3] # throw away alpha
im = im.mean(axis=2) # convert RGB to grayscale
if debugimage: debugimage('original', im, 0, 1, 'jet')
assert im.ndim == 2
if debugimage:
hspace, _, _ = hough_line(im, theta)
debugimage('original_hough_hspace', hspace)
# create monochrome/binary input map
im[im >= input_threshold] = 1
im[im < input_threshold] = 0
if debugimage: debugimage('threshold_applied', im)
# Hough transform
hspace, angles, distances = hough_line(im, theta)
hspace = np.asarray(hspace, dtype=np.float32)
hspace /= hspace.max() # normalize
if debugimage: debugimage('hough_hspace', hspace)
# convolution filters, rectangular, tuned for widths of 12, 32, 48 pixels
w12 = np.concatenate([-np.ones((6)), np.ones((12)), -np.ones((6))])
w32 = np.concatenate([-np.ones((16)), np.ones((32)), -np.ones((16))])
w48 = np.concatenate([-np.ones((24)), np.ones((48)), -np.ones((24))])
# convolve
im12 = ndi.filters.convolve1d(hspace, w12, axis=0)
im32 = ndi.filters.convolve1d(hspace, w32, axis=0)
im48 = ndi.filters.convolve1d(hspace, w48, axis=0)
# normalize signal strengths for different road widths
im12 /= 12
im32 /= 32
im48 /= 48
ca = (None, None, 'jet',)
if debugimage: debugimage('hough_hspace_conv12', im12, *ca)
if debugimage: debugimage('hough_hspace_conv32', im32, *ca)
if debugimage: debugimage('hough_hspace_conv48', im48, *ca)
if debugimage:
debugimage('hough_hspace_combined',
np.hstack([im12, im32, im48]), *ca)
# compute possible roads of all widths, sorted by signal strength
seq = np.stack((im12, im32, im48)).flatten()
sor = np.argsort(seq)
roads = np.column_stack((
seq,
np.tile(np.tile(angles, distances.shape[0]), 3),
np.tile(np.repeat(distances, angles.shape[0]), 3),
np.repeat([12, 32, 48], distances.shape[0] * angles.shape[0])
))[sor][::-1]
# columns: strength, angle, distance, width
found_roads = np.asarray([]).reshape(0, 4)
# find as many as strong roads as desired, while dropping roads that are too
# similar to roads already found (non-max suppression)
for i in range(roads.shape[0]):
if roads[i,0] < min_strength:
break
a = roads[i,1]
d = roads[i,2]
close = (
np.logical_or(
np.logical_and(
np.abs(found_roads[:,1]-a) < roads_min_angle,
np.abs(found_roads[:,2]-d) < roads_min_distance),
np.logical_and(
np.pi - np.abs(found_roads[:,1]-a) < roads_min_angle,
np.abs(found_roads[:,2]+d) < roads_min_distance)))
if not np.any(close):
found_roads = np.vstack((found_roads, roads[i]))
if max_roads is not None and found_roads.shape[0] >= max_roads:
break
return found_roads, im.shape
# find begin and end coordinates of an intersection of a box (0, 0, width,
# height) with a line (given by angle and distance, as per Hough transform)
def _get_line_box_cuts(angle, distance, width, height):
a = np.cos(angle)
b = np.sin(angle)
d = distance
# TODO: handle divide-by-zero
x0 = d/a
x1 = (d-b*height)/a
y0 = d/b
y1 = (d-a*width)/b
intersections = []
if x0 >= 0 and x0 <= width: intersections.append((x0, 0))
if x1 >= 0 and x1 <= width: intersections.append((x1, height))
if y0 >= 0 and y0 <= height: intersections.append((0, y0))
if y1 >= 0 and y1 <= height: intersections.append((width, y1))
# TODO: what about degenerate cases?
if len(intersections) == 0: return None
assert len(intersections) == 2, (x0, x1, y0, y1)
return intersections
# return a list of pixel coordinates, usable to index 2D ndarrays, that
# correspond to the shape of line segment with given width
def _road_polygon(endpoints, width):
a, b = endpoints
a = np.asarray(a)
b = np.asarray(b)
n = b-a
n /= np.linalg.norm(n)
n *= width / 2
s = np.dot(np.array([[0, -1], [1, 0]]), n)
xy = np.array([
a - n - s,
a - n + s,
b + n + s,
b + n - s
])
x = xy[:,0]
y = xy[:,1]
return [x, y]
|
6,020 | c5f41b69ac215bd661ee39bdc8c3119db9606ca8 | import os, json, locale, requests, dash, dash_table, copy, time, flask, base64
import dash_core_components as dcc
import dash_html_components as html
import plotly.graph_objects as go
import pandas as pd
from os import listdir
import plotly.figure_factory as ff
from concurrent.futures import ThreadPoolExecutor, ProcessPoolExecutor
from dash.dependencies import Input, Output
from datetime import date,datetime,timedelta,time
from dateutil.relativedelta import relativedelta
#--! Check if app is deployed
try:
with open('./configuration/credentials.txt') as json_file:
credentials = json.load(json_file)
with open('./configuration/configuration.txt') as json_file:
config = json.load(json_file)
except:
raise Exception('Draai eerst deploy.py!')
#--! Set locale
locale = locale.setlocale(locale.LC_ALL, 'nl_NL.UTF-8')
#--! Set all global variables
globals = {'config': config, 'credentials': credentials, 'styles': {}}
board_url = 'https://api.trello.com/1/members/me/boards?fields=name&key='+credentials.get('API key')+ "&token="+credentials.get('API token')
boards = json.loads(json.dumps(requests.get(board_url).json()))
globals['boards'] = boards
globals['styles']['maindivs'] = {'box-shadow': '8px 8px 8px grey',
'background-image': """url('./assets/left.png')""",
'background-repeat': 'no-repeat',
'background-position': '0px 0px',
'margin-top': '1%',
'margin-bottom': '1%',
'margin-left': '1%',
'margin-right': '1%',
'text-align': 'center',
'border-radius': '10px'
}
globals['styles']['tabs'] = {'border-style': 'solid',
'border-width': '2px',
'background': 'rgb(255,255,255)',
'background': 'radial-gradient(circle, rgba(255,255,255,1) 0%, rgba(162,162,162,1) 100%, rgba(255,255,255,1) 100%)',
'margin-top': '5px',
'margin-bottom': '5px',
'margin-right': '5px',
'margin-left': '5px',
'border-radius': '6px'
}
globals['styles']['divgraphs'] = {'background-color': 'rgba(62,182,235,0.1)',
'margin-top': '1%',
'margin-bottom': '2%',
'margin-left': '1%',
'margin-right': '1%',
'text-align': 'center',
'border-radius': '10px'
}
globals['styles']['dropdowns'] = {'margin-left': '1%', 'margin-right': '2%'}
globals['graphlayouts']= {'bars': go.Layout(barmode='stack', paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)', hovermode='closest')}
#--! Create function to refresh data
def get_data(value):
# set data variable to global to use in other functions
global data
global config
with open('./configuration/configuration.txt') as json_file:
configfile = json.load(json_file)
config = configfile.get(value)
# set all url variables
keys = "key="+credentials.get('API key')+"&token="+credentials.get('API token')
trello_base_url = "https://api.trello.com/1/"
board_url = trello_base_url+"boards/"+ value
#board_url = trello_base_url+"boards/"+ config.get('Board ID')
url_cards = board_url+"?cards=all&card_pluginData=true&card_attachments=true&card_customFieldItems=true&filter=all&"+keys
url_lists = board_url+"/lists?filter=all&"+keys
url_customfields = board_url+"/customFields?"+keys
url_labels = board_url+"/labels?"+keys
url_members = board_url+"/members?"+keys
# get JSON
board = json.loads(json.dumps(requests.get(url_cards).json()))
lists = json.loads(json.dumps(requests.get(url_lists).json()))
customfields = json.loads(json.dumps(requests.get(url_customfields).json()))
labels = json.loads(json.dumps(requests.get(url_labels).json()))
members = json.loads(json.dumps(requests.get(url_members).json()))
cards = board['cards']
# create function to convert Trello date to datetime
def dateCalc(date):
try:
newdate = datetime.strptime(date[0:19],'%Y-%m-%dT%H:%M:%S')
return newdate
except:
return None
# create dict for custom fields
customfields_dict = {'date': {},'list': {}, 'text': {}, 'number': {}, 'checkbox': {}}
for i in customfields:
customfields_dict[i['type']] = {}
for i in customfields:
customfields_dict[i['type']][i['id']] = {}
for i in customfields:
if i['type'] == 'list':
customfields_dict[i['type']][i['id']]['name'] = i['name']
customfields_dict['list'][i['id']]['options'] = {}
for j in i['options']:
customfields_dict['list'][i['id']]['options'][j['id']] = j['value'].get('text')
else:
customfields_dict[i['type']][i['id']]['name'] = i['name']
# collect all chosen lists
chosenlists = []
for i in config.get('Not Started'):
chosenlists.append(i)
chosenlists.extend(config.get('Blocked'))
chosenlists.extend(config.get('Doing'))
chosenlists.extend(config.get('Done'))
for i in config.get('Epics'):
chosenlists.append(i)
for i in config.get('Always continuing'):
chosenlists.append(i)
for i in config.get('List with Epics Done'):
chosenlists.append(i)
# create function to convert cardid to datetime
def idtodate(cardid):
hex = cardid[0:8]
timestamp = int(hex,16)
timedate = datetime.fromtimestamp(timestamp)
return timedate
# create function to get the epic id from the attachment-urls
def get_epicid(url):
try:
if 'epicId=' in url:
start = url.find('epicId=')+7
end = url.find('&attachmentId=')
return url[start:end]
else:
pass
except:
pass
# create dict for cards
kaarten = {i['id']: {'Naam': i['name'],
'KaartID': i['id'],
'ListID': i['idList'],
'customfields': i['customFieldItems'],
'Aangemaakt': idtodate(i['id']),
'labels': [label['name'] for label in i['labels'] if i['labels'] != []],
'members': [member['fullName'] for member in members if member['id'] in i['idMembers']],
'Sjabloon': i['isTemplate'],
'Vervaldatum': dateCalc(i['due']),
'Gearchiveerd': i['closed'],
'epicid': [get_epicid(j['url']) for j in i['attachments']],
'Epic': None,
'shortUrl': i['shortUrl']
} for i in cards}
# remove all attachments except epic-attachments, plus add all members in one string field
for i,j in kaarten.items():
while None in j['epicid']:
j['epicid'].remove(None)
if j['members'] != []:
j['Leden'] = ''
for k in j['members']:
if j['Leden'] == '':
j['Leden'] += k
else:
j['Leden'] += ', '+ k
else:
j['Leden'] = None
del j['members']
# add the custom fields to cards-dict
if customfields_dict != {}:
for i,j in customfields_dict.items():
for k,l in j.items():
for m,n in kaarten.items():
n[l['name']] = None
for i,j in kaarten.items():
for k in j['customfields']:
if k['idCustomField'] in customfields_dict['list'].keys():
j[customfields_dict['list'][k['idCustomField']].get('name')] = customfields_dict['list'][k['idCustomField']]['options'].get(k['idValue'])
elif k['idCustomField'] in customfields_dict['checkbox'].keys():
if k['value']['checked'] == 'true':
j[customfields_dict['checkbox'][k['idCustomField']].get('name')] = True
else:
j[customfields_dict['checkbox'][k['idCustomField']].get('name')] = False
elif k['idCustomField'] in customfields_dict['date'].keys():
j[customfields_dict['date'][k['idCustomField']].get('name')] = dateCalc(k['value'].get('date'))
else:
for key in k['value']:
j[customfields_dict[key][k['idCustomField']].get('name')] = k['value'].get(key)
# add epicname
epicIdNameCategory = []
for i,j in kaarten.items():
epicIdNameCategory.append((i,j['Naam'],j[config.get('Custom Field for Categories')]))
for i,j in kaarten.items():
if j['epicid'] == []:
j['Epic'] = 'Geen epic'
j['Categorie'] = None
else:
for k in epicIdNameCategory:
if k[0] == j['epicid'][0]:
j['Epic'] = k[1]
j['Categorie'] = k[2]
del j['epicid']
# add listname and status
for i,j in kaarten.items():
for k in lists:
if j['ListID'] == k['id']: j['Lijst'] = k['name']
if j['Lijst'] in config.get('Not Started'):
j['Status'] = 'Niet gestart'
elif j['Lijst'] in config.get('Doing'):
j['Status'] = 'Doing'
elif j['Lijst'] in config.get('Blocked'):
j['Status'] = 'Blocked'
elif j['Lijst'] in config.get('Done'):
j['Status'] = 'Done'
elif j['Lijst'] in config.get('Always continuing'):
j['Status'] = 'Doorlopend'
elif j['Lijst'] in config.get('Epics'):
j['Status'] = 'Epics Doing'
elif j['Lijst'] in config.get('List with Epics Done'):
j['Status'] = 'Epics Done'
else:
j['Status'] = 'Archived'
del j['customfields']
del j['ListID']
for i,j in kaarten.items():
if j['Gearchiveerd'] == True and j['Status'] != 'Done':
j['Status'] = 'Archived'
# collect all lists with cards to delete
liststodelete = []
for i in lists:
if i['name'] not in chosenlists:
liststodelete.append(i['name'])
# collect all cards to delete
cardstodelete = []
for i,j in kaarten.items():
if j['Sjabloon'] == True:
cardstodelete.append(i)
elif j['Lijst'] in liststodelete:
cardstodelete.append(i)
# create hours-dict for available hours
hours = {}
for i,j in kaarten.items():
if j['Lijst'] == config.get('List for hours'):
hours[j['Naam']] = {config['Custom Field for Starting date']: j[config['Custom Field for Starting date']], config['Custom Field for Ending date']: j[config['Custom Field for Ending date']], config['Custom Field with hours']: j[config['Custom Field with hours']]}
# delete previously collected cards
for i in cardstodelete:
if i in kaarten:
del kaarten[i]
# create list with all dates (6 months history, 1yr in advance)
tmpdatesdict = {}
now = datetime.now().date()
numdays = 365
numdayshistory = 183
for x in range (0, numdays):
tmpdatesdict[str(now + timedelta(days = x))] = {}
for x in range (0,numdayshistory):
tmpdatesdict[str(now - timedelta(days = x))] = {}
dates = []
for i in sorted(tmpdatesdict):
dates.append(i)
# create some global arrays for later use
arrays = {'epics': list(dict.fromkeys([card['Epic'] for card in kaarten.values()])),
'xaxis_months': list(dict.fromkeys([i[0:4]+"-"+i[5:7]+"-01" for i in dates])),
'perioden': list(dict.fromkeys([i[0:4]+i[5:7] for i in dates])),
'statuses': list(dict.fromkeys([card['Status'] for card in kaarten.values()])),
config.get('Custom Field for Categories'): list(dict.fromkeys([card[config.get('Custom Field for Categories')] for card in kaarten.values()])),
config.get('Custom Field for Person'): list(dict.fromkeys([card[config.get('Custom Field for Person')] if card[config.get('Custom Field for Person')] != None else 'Geen ' + config.get('Custom Field for Person') for card in kaarten.values() ])),
}
# create dict to calculate the hours per day for each card
try:
urenperdagperkaart = {kaart['Naam']: {'Naam': kaart['Naam'],
'Leden': kaart['Leden'],
'Aangemaakt': kaart['Aangemaakt'],
'Epic': kaart['Epic'],
'shortUrl': kaart['shortUrl'],
config.get('Custom Field for Starting date'): kaart[config.get('Custom Field for Starting date')],
config.get('Custom Field for Ending date'): kaart[config.get('Custom Field for Ending date')],
'Gebied': kaart['Gebied'],
config.get('Custom Field for Person'): kaart[config.get('Custom Field for Person')],
config.get('Custom Field for Categories'): kaart[config.get('Custom Field for Categories')],
config.get('Custom Field with hours'): kaart[config.get('Custom Field with hours')],
'Cognosrapport': kaart['Cognosrapport'],
'Niet meenemen in telling': kaart['Niet meenemen in telling'],
'Lijst': kaart['Lijst'],
'Status': kaart['Status'],
'urenperdag': {i:0 for i in dates},
'urenperperiode': {i:0 for i in arrays['perioden']}}
for kaart in kaarten.values()}
except:
urenperdagperkaart = {kaart['Naam']: {'Naam': kaart['Naam'],
'Leden': kaart['Leden'],
'Aangemaakt': kaart['Aangemaakt'],
'Epic': kaart['Epic'],
'shortUrl': kaart['shortUrl'],
config.get('Custom Field for Starting date'): kaart[config.get('Custom Field for Starting date')],
config.get('Custom Field for Ending date'): kaart[config.get('Custom Field for Ending date')],
config.get('Custom Field for Person'): kaart[config.get('Custom Field for Person')],
config.get('Custom Field for Categories'): kaart[config.get('Custom Field for Categories')],
config.get('Custom Field with hours'): kaart[config.get('Custom Field with hours')],
'Lijst': kaart['Lijst'],
'Status': kaart['Status'],
'urenperdag': {i:0 for i in dates},
'urenperperiode': {i:0 for i in arrays['perioden']}}
for kaart in kaarten.values()}
# do the same for available hours
beschikbareuren = {key: {'urenperdag': {i:0 for i in dates},
'urenperperiode': {i:0 for i in arrays['perioden']}}
for key in hours.keys()}
for i in dates:
datekey = datetime.strptime(i,'%Y-%m-%d').date()
for k,l in kaarten.items():
if l['Niet meenemen in telling'] != True:
try:
if l[config.get('Custom Field for Starting date')].date() < datekey <= l[config.get('Custom Field for Ending date')].date():
delta = l[config.get('Custom Field for Ending date')] - l[config.get('Custom Field for Starting date')]
hoursperday = int(l[config.get('Custom Field with hours')])/int(delta.days)
urenperdagperkaart[l['Naam']]['urenperdag'][i] = hoursperday
except:
pass
for k,l in hours.items():
try:
if l[config.get('Custom Field for Starting date')].date() < datekey <= l[config.get('Custom Field for Ending date')].date():
hoursperday = int(l[config.get('Custom Field with hours')])/int(30.4)
beschikbareuren[k]['urenperdag'][i] = hoursperday
except:
pass
# calculate the hours per month with the hours per day for each card
for i,j in urenperdagperkaart.items():
for k,l in j['urenperdag'].items():
for m in j['urenperperiode'].keys():
if m==k[0:4]+k[5:7]:
j['urenperperiode'][m] += l
# do the same for available hours
for i,j in beschikbareuren.items():
for k,l in j['urenperdag'].items():
for m in j['urenperperiode'].keys():
if m==k[0:4]+k[5:7]:
j['urenperperiode'][m] += l
# create data for a dataframe with the hours per month
dfurenpermaand = copy.deepcopy(urenperdagperkaart)
for i,j in dfurenpermaand.items():
try:
j['Geplande uren'] = int(j['Geplande uren'])
except:
j['Geplande uren'] = 0
for k,l in j['urenperperiode'].items():
j[k] = round(l,2)
del j['urenperperiode']
# create a bar chart with all cards with no begin and end date
bars = []
labelsnietingepland = []
for j in kaarten.values():
if j[config.get('Custom Field for Starting date')] == None and j[config.get('Custom Field for Ending date')] == None and j[config.get('Custom Field with hours')] !=None and j['Status'] == 'Niet gestart':
labelsnietingepland.append(j['Lijst'])
labelsnietingepland = list(dict.fromkeys(labelsnietingepland))
for i,j in kaarten.items():
if j[config.get('Custom Field for Starting date')] == None and j[config.get('Custom Field for Ending date')] == None and j[config.get('Custom Field with hours')] !=None and j['Status'] == 'Niet gestart':
tmp = []
for label in labelsnietingepland:
if j['Lijst'] == label:
tmp.append(int(j['Geplande uren']))
else:
tmp.append(0)
bars.append(dict(x=labelsnietingepland,
y=tmp,
name=j['Naam'],
type='bar',
opacity='0.6'))
# create a bar chart with all cards with no begin and end date per epic
epicbars = []
tmpepicsforbarchart = {epic: 0 for epic in [name['Naam'] for name in kaarten.values() if name['Status'] in ['Epics Doing', 'Epics Done']]}
tmpepicsforbarchart['Geen epic'] = 0
for i,j in kaarten.items():
if j[config.get('Custom Field for Starting date')] == None and j[config.get('Custom Field for Ending date')] == None and j[config.get('Custom Field with hours')] !=None and j['Status'] == 'Niet gestart':
tmpepicsforbarchart[j['Epic']] += int(j[config.get('Custom Field with hours')])
epicsforbarchart = { k:v for k,v in tmpepicsforbarchart.items() if v!=0 }
epicbars.append(dict(x=[key for key in epicsforbarchart.keys()],
y=[value for value in epicsforbarchart.values()],
type='bar',
text=[value for value in epicsforbarchart.values()],
textposition='outside',
opacity='0.6'))
# create figure for gauge (planned vs available hours)
thismonth = datetime.strftime(datetime.now(), '%Y%m')
nextmonth = (datetime.now() + relativedelta(months=1)).strftime('%Y%m')
twomonths = (datetime.now() + relativedelta(months=2)).strftime('%Y%m')
arrays['threemonths'] = [(thismonth, datetime.strptime(thismonth,'%Y%m').strftime('%B')), (nextmonth, datetime.strptime(nextmonth,'%Y%m').strftime('%B')), (twomonths, datetime.strptime(twomonths,'%Y%m').strftime('%B'))]
gaugegeplandthismonth = round(sum([value for card in urenperdagperkaart.values() for keys,value in card['urenperperiode'].items() if keys==thismonth]))
gaugegeplandnextmonth = round(sum([value for card in urenperdagperkaart.values() for keys,value in card['urenperperiode'].items() if keys==nextmonth]))
gaugegeplandtwomonths = round(sum([value for card in urenperdagperkaart.values() for keys,value in card['urenperperiode'].items() if keys==twomonths]))
deltathismonth = round(sum([value for card in beschikbareuren.values() for keys,value in card['urenperperiode'].items() if keys==thismonth]))
deltanextmonth = round(sum([value for card in beschikbareuren.values() for keys,value in card['urenperperiode'].items() if keys==nextmonth]))
deltatwomonths = round(sum([value for card in beschikbareuren.values() for keys,value in card['urenperperiode'].items() if keys==twomonths]))
if deltathismonth > gaugegeplandthismonth:
gaugerangethismonth = deltathismonth + 20
else:
gaugerangethismonth = gaugegeplandthismonth + 20
if deltanextmonth > gaugegeplandnextmonth:
gaugerangenextmonth = deltanextmonth + 20
else:
gaugerangenextmonth = gaugegeplandnextmonth + 20
if deltatwomonths > gaugegeplandtwomonths:
gaugerangetwomonths = deltatwomonths + 20
else:
gaugerangetwomonths = gaugegeplandtwomonths + 20
gaugestepsthismonth = {'axis': {'range': [None, gaugerangethismonth]},
'bar': {'color': '#3eb6eb'},
'steps': [
{'range': [0, deltathismonth*0.5], 'color': '#3deb34'},
{'range': [deltathismonth*0.5, deltathismonth*0.75], 'color': '#b4eb34'},
{'range': [deltathismonth*0.75, deltathismonth*0.9], 'color': '#ebb434'},
{'range': [deltathismonth*0.9, deltathismonth], 'color': '#eb6e34'},
{'range': [deltathismonth,gaugerangethismonth], 'color': '#eb3434'},
],
'threshold': {'line': {'color': "#5c0000", 'width': 4}, 'thickness': 0.75, 'value': deltathismonth}
}
gaugestepsnextmonth = {'axis': {'range': [None, gaugerangenextmonth]},
'bar': {'color': '#3eb6eb'},
'steps': [
{'range': [0, deltanextmonth*0.5], 'color': '#3deb34'},
{'range': [deltanextmonth*0.5, deltanextmonth*0.75], 'color': '#b4eb34'},
{'range': [deltanextmonth*0.75, deltanextmonth*0.9], 'color': '#ebb434'},
{'range': [deltanextmonth*0.9, deltanextmonth], 'color': '#eb6e34'},
{'range': [deltanextmonth,gaugerangenextmonth], 'color': '#eb3434'},
],
'threshold': {'line': {'color': "#5c0000", 'width': 4}, 'thickness': 0.75, 'value': deltanextmonth}
}
gaugestepstwomonths = {'axis': {'range': [None, gaugerangetwomonths]},
'bar': {'color': '#3eb6eb'},
'steps': [
{'range': [0, deltatwomonths*0.5], 'color': '#3deb34'},
{'range': [deltatwomonths*0.5, deltatwomonths*0.75], 'color': '#b4eb34'},
{'range': [deltatwomonths*0.75, deltatwomonths*0.9], 'color': '#ebb434'},
{'range': [deltatwomonths*0.9, deltatwomonths], 'color': '#eb6e34'},
{'range': [deltatwomonths,gaugerangetwomonths], 'color': '#eb3434'},
],
'threshold': {'line': {'color': "#5c0000", 'width': 4}, 'thickness': 0.75, 'value': deltatwomonths}
}
gaugefig = go.Figure()
gaugefig.add_trace(go.Indicator(
domain = {'x': [0, 0.3], 'y': [0, 1]},
value = gaugegeplandthismonth,
mode = "gauge+number+delta",
title = {'text': "Totale uren voor " + datetime.strptime(thismonth,'%Y%m').strftime('%B')},
delta = {'reference': deltathismonth},
gauge = gaugestepsthismonth
))
gaugefig.add_trace(go.Indicator(
domain = {'x': [0.35, 0.65], 'y': [0, 1]},
value = gaugegeplandnextmonth,
mode = "gauge+number+delta",
title = {'text': "Totale uren voor " + datetime.strptime(nextmonth,'%Y%m').strftime('%B')},
delta = {'reference': deltanextmonth},
gauge = gaugestepsnextmonth
))
gaugefig.add_trace(go.Indicator(
domain = {'x': [0.7, 1], 'y': [0, 1]},
value = gaugegeplandtwomonths,
mode = "gauge+number+delta",
title = {'text': "Totale uren voor " + datetime.strptime(twomonths,'%Y%m').strftime('%B')},
delta = {'reference': deltatwomonths},
gauge = gaugestepstwomonths
))
gaugefig.update_layout(paper_bgcolor='rgba(0,0,0,0)',
plot_bgcolor='rgba(0,0,0,0)',)
graphdata = {'nietingepland': bars, 'nietingeplandepics': epicbars, 'gaugefig': gaugefig}
columntypes = {}
for key, value in kaarten[next(iter(kaarten))].items():
if 'datum' in key or key == 'Aangemaakt':
columntypes[key] = 'datetime'
elif type(value) == int:
columntypes[key] = 'numeric'
elif type(value in [str,bool]):
columntypes[key] = 'text'
columntypesurenpermaand = dict(columntypes)
columntypesurenpermaand.update({i: 'text' for i in arrays['perioden']})
data = {'kaarten': kaarten,
'arrays': arrays,
'urenperdagperkaart': urenperdagperkaart,
'beschikbareuren': beschikbareuren,
'graphdata': graphdata,
'dfs': {'kaartendf': pd.DataFrame(data=kaarten).T,
'columntypes': columntypes,
'urenpermaand': pd.DataFrame(data=dfurenpermaand).T,
'columntypesurenpermaand': columntypesurenpermaand
}
}
#--! Create layout function. Only create a simple layout with a few components. The rest will be loaded using callbacks.
def make_layout():
return html.Div(
className='First Div',
children=[
html.Div(
style={
'font-style': 'italic',
'font-weight': 'bold',
'border': '10px',
'box-shadow': '8px 8px 8px grey',
'background': 'rgb(149,193,31)',
'background': 'linear-gradient(133deg, rgba(62,182,235,1) 0%, rgba(243,253,255,1) 76%, rgba(243,253,255,0) 100%)',
'margin-top': '1%',
'margin-bottom': '1%',
'margin-right': '1%',
'margin-left': '1%',
'border-radius': '10px',
'text-align': 'center'
},
className='Banner',
children=[
html.Div(
style={'display': 'inline-block', 'width': '80%'},
children=[
html.H1('Trello borden USD'),
]
),
html.Div(
style={'display': 'inline-block', 'margin-right': '1px'},
children=[
html.Img(src=app.get_asset_url('logonop.png'), style={'width': '150px','margin-right': '0px'})
]
)
]
),
html.H5('Kies hieronder een bord', style={'text-align': 'center'}),
dcc.Dropdown(
id='dropdown_boards',
options=[{'label': i['name'], 'value': i['id']} for i in boards],
value = boards[0]['id'],
),
html.Button('Data verversen', id='refreshdatabtn', n_clicks=0),
html.Div(
id='test'
)
]
)#/firstdiv
#--! Get CSS files and scripts and set App (including layout)
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']
external_scripts = ['https://cdn.plot.ly/plotly-locale-nl-latest.js']
app = dash.Dash(__name__, external_stylesheets=external_stylesheets,external_scripts=external_scripts, url_base_pathname='/dash/')
app.layout = make_layout
#--! Set Dash to suppress callback exceptions, because some callbacks can only be made when the first callback in the main layout has been made.
app.config['suppress_callback_exceptions'] = True
#--! Define app callbacks
#---! dropdown_boards
# This function should be changed when more boards are added. For now, only Werkvoorraad is compatible.
@app.callback(Output('test', 'children'),
[Input('dropdown_boards', 'value'),
Input('refreshdatabtn', 'n_clicks')]
)
def create_maindiv(value, n_clicks):
# first retrieve all data
get_data(value)
import os
if os.name=='nt':
daterefreshed = datetime.strftime(datetime.now(), '%A %d %b, %H:%M')
else:
daterefreshed = datetime.strftime(datetime.now(),'%A %-d %B, %H:%M')
# Return all other divs
return html.Div(
className='',
children=[
# Show date of refresh
dcc.Markdown('''**Laatst ververst: **''' + daterefreshed),
# Create tabs
dcc.Tabs(
className='Tabs',
children=[
# Create first tab
dcc.Tab(
label='Gantt charts',
style=globals['styles']['tabs'],
children=[
html.Div(
className='tab2_div1',
style=globals['styles']['maindivs'],
children=[
html.H3('Uitleg'),
html.Div(
style=globals['styles']['divgraphs'],
children=[
dcc.Markdown('''In dit tabblad worden de kaarten in GANTT charts weergegeven. Kies in de dropdown voor welke epic de kaarten moeten worden weergegeven.'''),
]
),
]
),
html.Div(
className='tab2_div2',
style=globals['styles']['maindivs'],
children=[
html.H4('Gantt per epic'),
dcc.Dropdown(
style = globals['styles']['dropdowns'],
id='dropdownganttepics',
options=[{'label':name, 'value':name} for name in data['arrays']['epics']],
value = [next(iter(data['arrays']['epics']))]
),
html.Div(
style=globals['styles']['divgraphs'],
children=[
dcc.Graph(id='ganttepics'),
]
),
]
),
html.Div(
className='tab2_div3',
style=globals['styles']['maindivs'],
children=[
html.H4('Gantt per persoon'),
dcc.Dropdown(
style = globals['styles']['dropdowns'],
id='dropdownganttpersoon',
options=[{'label':name, 'value':name} for name in data['arrays'][config.get('Custom Field for Person')]],
),
dcc.Dropdown(
style = globals['styles']['dropdowns'],
id='dropdownganttpersoonstatus',
options=[{'label':name, 'value':name} for name in data['arrays']['statuses']],
value = data['arrays']['statuses'],
multi=True,
),
html.Div(
style=globals['styles']['divgraphs'],
children=[
dcc.Graph(id='ganttpersoon'),
]
),
]
),
]
),
dcc.Tab(
label='Data export',
style=globals['styles']['tabs'],
children=[
html.Div(
className='tab3_div1',
style=globals['styles']['maindivs'],
children=[
html.H3('Uitleg'),
html.Div(
style=globals['styles']['divgraphs'],
children=[
dcc.Markdown('''Hieronder kan de data worden geëxporteerd. Via de buttons 'Export' downloadt je een excelbestand.'''),
dcc.Markdown('''In het dashboard kun je met de knop 'Toggle columns' ook velden zichtbaar maken, om van tevoren te filteren. Kies dan de velden, filter daarna en klik op 'Export'.'''),
]
),
]
),
html.Div(
className='tab3_div2',
style=globals['styles']['maindivs'],
children=[
html.H4('Platte dump'),
dcc.Markdown('Deze tabel laat de platte data zien, zoals in Trello gevuld.'),
dash_table.DataTable(
id='table_plattedump',
columns=[{'name': i, 'id': i, 'type': data['dfs']['columntypes'].get(i), 'hideable': True} for i in data['dfs']['kaartendf'].columns if i in data['dfs']['columntypes'].keys()],
data=data['dfs']['kaartendf'].to_dict('records'),
hidden_columns=[i for i in data['dfs']['columntypes']],
export_format='xlsx',
export_headers='display',
export_columns='all',
filter_action="native",
sort_action="native",
sort_mode="multi",
style_table={'overflowX': 'scroll'},
style_header={'backgroundColor': 'rgba(62,182,235,0.6)','color': 'black', 'fontWeight': 'bold', 'fontFamily': 'Arial'},
style_cell = {'backgroundColor': 'rgba(62,182,235,0.2)', 'color': 'black','text-align': 'left', 'fontFamily': 'Arial', 'height': 'auto'},
)
]
),
html.Div(
className='tab3_div3',
style=globals['styles']['maindivs'],
children=[
html.H4('Uren per maand'),
dcc.Markdown('Hieronder kan een export gemaakt worden van de uren zoals ze per maand zijn ingepland.'),
dcc.Markdown('Ook hierin kan gefilterd worden. filter bijvoorbeeld in de maand naar keuze op >0 om alle kaarten die geen ingeplande uren hebben niet te tonen.'),
dash_table.DataTable(
id='table_urenpermaand',
columns=[{'name': i, 'id': i, 'type': data['dfs']['columntypesurenpermaand'].get(i), 'hideable': True} for i in data['dfs']['urenpermaand'].columns if i in data['dfs']['columntypesurenpermaand'].keys()],
data=data['dfs']['urenpermaand'].to_dict('records'),
hidden_columns=[i for i in data['dfs']['columntypesurenpermaand']],
export_format='xlsx',
export_headers='display',
export_columns='all',
filter_action="native",
sort_action="native",
sort_mode="multi",
style_header={'backgroundColor': 'rgba(62,182,235,0.6)','color': 'black', 'fontWeight': 'bold', 'fontFamily': 'Arial'},
style_cell = {'backgroundColor': 'rgba(62,182,235,0.2)', 'color': 'black','text-align': 'left', 'fontFamily': 'Arial'},
)
]
),
]
),
dcc.Tab(
label='Langetermijnplanning',
style=globals['styles']['tabs'],
children=[
html.Div(
className='maindivs',
style=globals['styles']['maindivs'],
children=[
html.H3('Uitleg'),
html.Div(
style=globals['styles']['divgraphs'],
children=[
dcc.Markdown('''In dit tabblad wordt een langetermijnplanning getoond.'''),
dcc.Markdown('''De focus hierbij ligt vooral op de categorieen.'''),
]
),
]
),
html.Div(
className='maindivs',
style=globals['styles']['maindivs'],
children=[
html.H4('Ingeplande uren per categorie'),
dcc.Dropdown(
style = globals['styles']['dropdowns'],
id='dropdownurenpermaand',
options=[{'label':name, 'value':name} for name in data['arrays'][config.get('Custom Field for Categories')] if name != None],
multi=True,
searchable=False,
value = data['arrays'][config.get('Custom Field for Categories')]
),
html.Div(
style=globals['styles']['divgraphs'],
children=[
dcc.Graph(id='urenpermaand')
]
),
]
),
html.Div(
className='tab1_div3',
style=globals['styles']['maindivs'],
children=[
html.H4('Nog in te plannen uren (per lijst)'),
dcc.Markdown('''*Nieuw* zijn werkzaamheden die **nog niet** zijn besproken of ze worden gedaan.'''),
dcc.Markdown('''*Wensenlijst* zijn werkzaamheden die **wel** zijn besproken, maar **geen prioriteit** hebben.'''),
dcc.Markdown('''*Inplannen* zijn werkzaamheden die **moeten** gebeuren.'''),
dcc.Markdown('''**NB:** Alleen werkzaamheden waarvan we een ureninschatting kunnen maken, worden getoond!'''),
html.Div(
style=globals['styles']['divgraphs'],
children=[
dcc.Graph(
id='graph_nietingepland',
figure={'data': data['graphdata']['nietingepland'],
'layout': globals['graphlayouts']['bars']}
)
]
),
]
),
html.Div(
className='tab1_div4',
style=globals['styles']['maindivs'],
children=[
html.H4('Nog in te plannen uren (per epic)'),
dcc.Markdown('''**NB:** Alleen werkzaamheden waarvan we een ureninschatting kunnen maken, worden getoond!'''),
html.Div(
style=globals['styles']['divgraphs'],
children=[
dcc.Graph(
id='graph_nietingepland_epics',
figure={'data': data['graphdata']['nietingeplandepics'],
'layout': globals['graphlayouts']['bars']}
)
]
),
]
),
]
),
dcc.Tab(
style=globals['styles']['tabs'],
label='Tactische planning',
children=[
html.Div(
className='maindivs',
style=globals['styles']['maindivs'],
children=[
html.H3('Uitleg'),
dcc.Markdown('''In dit tabblad is een middellange termijnplanning te zien.'''),
]
),
html.Div(
className='maindivs',
style=globals['styles']['maindivs'],
children=[
html.H4('Totalen'),
dcc.Markdown('''Hieronder staan twee totaaloverzichten van de aankomende maanden.'''),
dcc.Markdown('''De blauwe balk geeft de ingeplande uren weer. De streep geeft de beschikbare uren aan.'''),
dcc.Markdown('''Het kleine getal eronder geeft aan hoeveel uren tekort/over zijn voor die maand.'''),
html.Div(
style=globals['styles']['divgraphs'],
children=[
dcc.Graph(
figure=(data['graphdata']['gaugefig'])
)
]
)
]
),
html.Div(
className='maindivs',
style=globals['styles']['maindivs'],
children=[
html.H4('Gantt'),
dcc.Dropdown(
style = globals['styles']['dropdowns'],
id='dropdowngantttactisch',
options=[{'label':j, 'value': i} for i,j in data['arrays']['threemonths']],
multi=False,
searchable=False,
value = data['arrays']['threemonths'][0][0],
),
html.Div(
style=globals['styles']['divgraphs'],
children=[
dcc.Graph(id='gantttactisch'
)
]
)
]
),
]
),
# dcc.Tab(
# style=globals['styles']['tabs'],
# label='Configuratie',
# children=[
# html.Div(
# className='maindivs',
# style=globals['styles']['maindivs'],
# children=[
# html.H3('Uitleg'),
# dcc.Markdown('''Klik op de button hieronder om de huidige configuratie te downloaden.'''),
# html.A(id='export_link', href='/dash/configuration/', children=[html.Button(id='export_button', type='button', children=['Export'])]),
# dcc.Markdown('''Pas het bestand aan en upload deze hieronder.'''),
# dcc.Upload(
# id='configupload',
# children=html.Div([
# 'Sleep het bestand of ',
# html.A('selecteer het bestand')
# ]),
# style=globals['styles']['divgraphs'],
# multiple=False,
# ),
# html.Div(id='confirmupload',style=globals['styles']['divgraphs'])
# ]
# ),
# ]
# )
]
)
]
)
#---! gantttactisch
@app.callback(Output('gantttactisch', 'figure'),
[Input('dropdowngantttactisch','value')]
)
def update_gantttactisch(v1):
if v1 != None:
if v1[4:] == '12':
v1plus1 = str(int(v1[0:4])+1)+'01'
else:
v1plus1 = str(int(v1)+1)
if v1[4:] == '01':
v1min1 = str(int(v1[0:4])-1)+'12'
else:
v1min1 = str(int(v1)-1)
if v1[4:] == '11':
v1plus2 = str(int(v1[0:4])+1)+'01'
else:
v1plus2 = str(int(v1)+2)
import random
import numpy as np
from operator import itemgetter
ganttdata= []
monthkey = int(v1)
for i,j in data['kaarten'].items():
if j['Status'] in ['Niet gestart', 'Doing', 'Blocked']:
try:
if int(datetime.strftime(j['Begindatum'], '%Y%m')) <= monthkey and int(datetime.strftime(j['Einddatum'], '%Y%m')) >= monthkey:
if j['Begindatum'].date() < datetime.strptime(v1min1+'01','%Y%m%d').date():
start=datetime.strptime(v1min1+'01','%Y%m%d').date()
else:
start = j['Begindatum'].date()
if j['Einddatum'].date() >= datetime.strptime(v1plus2+'01','%Y%m%d').date():
eind=datetime.strptime(v1plus2+'01','%Y%m%d').date()
else:
eind = j['Einddatum'].date()
ganttdata.append(dict(Task=j['Epic'],
Start=start,
Finish=eind,
Resource=j['Naam'] + ' (uren: ' + str(round(data['urenperdagperkaart'][j['Naam']]['urenperperiode'][v1])) + ')'
))
except:
pass
result = sorted(ganttdata, key=itemgetter('Task'))
rgb = []
for c in range(len(result)):
r = list(np.random.choice(range(256), size=3))
s2 = ','.join(map(str,r))
s1 = "rgb("
s3 = ")"
rgb.append(s1 + s2 + s3)
fig = ff.create_gantt(result, index_col='Resource', show_colorbar=True, group_tasks=False, showgrid_x=True, showgrid_y=True, colors=rgb)
fig['layout'].update(paper_bgcolor='rgba(0,0,0,0)',
plot_bgcolor='rgba(0,0,0,0)',)
fig.add_trace(go.Scatter(mode='lines', x=[v1[0:4]+'-'+v1[4:]+'-01',v1[0:4]+'-'+v1[4:]+'-01'],y=[-1,len(result)], line={'shape': 'spline', 'color': 'black', 'width': 4}, showlegend=False))
fig.add_trace(go.Scatter(mode='lines', x=[v1plus1[0:4]+'-'+v1plus1[4:]+'-01',v1plus1[0:4]+'-'+v1plus1[4:]+'-01'],y=[-1,len(result)], line={'shape': 'spline', 'color': 'black', 'width': 4}, showlegend=False))
return fig
else:
return {'data': [go.Pie()],'layout': go.Layout(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)')}
# #---! configupload
# @app.callback(Output('confirmupload', 'children'),
# [Input('configupload','contents')]
# )
# def confirm_upload(contents):
# global newconfig
# if contents is not None:
# try:
# newconfig = json.loads(base64.b64decode(contents[23:]).decode('ASCII'))
# d = {}
# for key,value in newconfig.items():
# if type(value) == list:
# d[key] = ''
# for i in value:
# if d[key] == '':
# d[key] += i
# else:
# if i == value[-1]:
# d[key] += (', '+i)
# else:
# d[key] = value
# return html.Div(
# id='returneddiv',
# style=globals['styles']['divgraphs'],
# children=[
# dcc.Markdown('''Check hieronder of de juiste data is ingevoerd. Klik daarna daaronder op 'Opslaan'.'''),
# dash_table.DataTable(
# style_header={'backgroundColor': 'rgba(62,182,235,0.6)','color': 'black', 'fontWeight': 'bold', 'fontFamily': 'Arial'},
# style_cell = {'backgroundColor': 'rgba(62,182,235,0.2)', 'color': 'black','text-align': 'left', 'fontFamily': 'Arial'},
# columns=[{'name': 'Sleutel', 'id': 'Sleutel'}, {'name': 'Waarde', 'id': 'Waarde'}],
# data=[{'Sleutel': key, 'Waarde': value} for key, value in d.items()]
# ),
# html.Button(
# 'Opslaan',
# id='save_button',
# n_clicks=0
# ),
# html.Div(
# id='savedornot',
# )
# ]
# )
# except:
# return html.H5('Het bestand is incorrect. Download en upload opnieuw!')
# else:
# return
# #---! save-button
# @app.callback(Output('savedornot','children'),
# [Input('save_button','n_clicks'),])
# def save_fnct(n_clicks):
# if n_clicks > 0:
# with open('./configuration/configuration.txt','w') as outfile:
# json.dump(newconfig, outfile, indent=4, sort_keys=True)
# return 'Opgeslagen. Refresh de page.'
# else:
# return
#---! ganttpersoon
@app.callback(Output('ganttpersoon','figure'),
[Input('dropdownganttpersoon','value'),
Input('dropdownganttpersoonstatus', 'value')])
def update_ganttpersoon(v1, v2):
ganttdata = []
for i,j in data['kaarten'].items():
if j[config.get('Custom Field for Person')] == v1 and j['Status'] != 'Archived' and j['Status'] in v2:
try:
ganttdata.append(dict(Task=j['Naam'],
Start=j[config.get('Custom Field for Starting date')].date(),
Finish = j[config.get('Custom Field for Ending date')].date(),
Resource=j['Epic']
))
except:
pass
if ganttdata != []:
fig = ff.create_gantt(ganttdata, index_col='Resource', show_colorbar=True, showgrid_x=True, showgrid_y=True)
fig['layout'].update(paper_bgcolor='rgba(0,0,0,0)',
plot_bgcolor='rgba(0,0,0,0)',)
return fig
else:
return {'data': [go.Pie()],'layout': go.Layout(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)')}
#---! ganttepics
@app.callback(Output('ganttepics','figure'),
[Input('dropdownganttepics','value')])
def update_ganttepics(value):
ganttdata = []
for i,j in data['kaarten'].items():
if j['Epic'] == value and j['Status'] != 'Archived':
try:
ganttdata.append(dict(Task=j['Naam'],
Start=j[config.get('Custom Field for Starting date')].date(),
Finish = j[config.get('Custom Field for Ending date')].date(),
Resource=j['Status']
))
except:
pass
if ganttdata != []:
fig = ff.create_gantt(ganttdata, index_col='Resource', show_colorbar=True, showgrid_x=True, showgrid_y=True)
fig['layout'].update(paper_bgcolor='rgba(0,0,0,0)',
plot_bgcolor='rgba(0,0,0,0)',)
return fig
else:
return {'data': [go.Pie()],'layout': go.Layout(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)')}
#---! urenpermaand callback
@app.callback(Output('urenpermaand', 'figure'),
[Input('dropdownurenpermaand', 'value')]
)
def update_urenpermaand(value):
layout = go.Layout(paper_bgcolor='rgba(0,0,0,0)',
plot_bgcolor='rgba(0,0,0,0)',
xaxis={'title': 'Datum', 'gridcolor': 'gray'},
yaxis={'title': 'Ingeplande uren', 'gridcolor': 'gray'})
bars = []
if 'Regulier werk' in value:
yaxis = []
for i in data['arrays']['perioden']:
yaxis.append(round(sum([value['urenperperiode'][i] for value in data['urenperdagperkaart'].values() if value[config.get('Custom Field for Categories')] == 'Regulier werk']),0))
bars.append(dict(x=data['arrays']['xaxis_months'],
y=yaxis,
name='Regulier werk',
line = {'shape': 'spline', 'smoothing': 0.4},
mode='lines+markers',
marker= {'symbol': 'triangle-up-open', 'size': 10},
stackgroup='one',
))
for categorie in data['arrays'][config.get('Custom Field for Categories')]:
if categorie in value and categorie != 'Regulier werk':
if categorie == None:
categorienaam = 'Geen categorie'
else:
categorienaam = categorie
yaxis = []
for i in data['arrays']['perioden']:
yaxis.append(round(sum([value['urenperperiode'][i] for value in data['urenperdagperkaart'].values() if value[config.get('Custom Field for Categories')] == categorie]),0))
bars.append(dict(x=data['arrays']['xaxis_months'],
y=yaxis,
name=categorienaam,
line = {'shape': 'spline', 'smoothing': 0.4},
mode='lines+markers',
marker= {'symbol': 'triangle-up-open', 'size': 10},
stackgroup='one',
))
yaxis = []
for i in data['arrays']['perioden']:
yaxis.append(round(sum([value['urenperperiode'][i] for value in data['beschikbareuren'].values()]),0))
bars.append(dict(name='Totaal beschikbare uren',
mode = 'lines',
x = data['arrays']['xaxis_months'],
y = yaxis,
size=10,
line = {'shape': 'spline', 'smoothing': 0.3, 'width':6, 'color': 'black'},
))
return {
'data': bars,
'layout': layout}
#--! App routes
@app.server.route("/dash/configuration/")
def download_file():
return flask.send_file('./configuration/configuration.txt',
attachment_filename="configuration.txt",
as_attachment=True,
cache_timeout=0
)
#--! Check if this is the main app and if so, run Dash!
if __name__ == '__main__':
app.run_server(debug=False,host='0.0.0.0', port=8050)
|
6,021 | 95163a28a35cc88240d9d6edc2e9b416e5493909 | import json
import sys
with open(sys.argv[1], 'r') as f:
x = json.load(f)
with open('my_wire_to_quartus_wire.json', 'r') as f:
wirenamemap = json.load(f)
print("----- There are {} muxes in the database".format(len(x)))
print("----- There are {} routing pairs in the database".format(sum((len(v) for k, v in x.items()))))
def bits2str(bits):
ret = ""
for row in bits:
rowstr = ""
for bit in row:
rowstr += "1" if bit else "0"
ret += rowstr + '\n'
return ret
def parse_xyi(inp):
xpos = inp.find('X')
ypos = inp.find('Y')
ipos = inp.find('I')
assert xpos >= 0
assert ypos > xpos
assert ipos > ypos
return (int(inp[xpos + 1:ypos]), int(inp[ypos + 1:ipos]), int(inp[ipos + 1:]))
def parse_xysi(inp):
xpos = inp.find('X')
ypos = inp.find('Y')
spos = inp.find('S')
ipos = inp.find('I')
assert xpos >= 0
assert ypos > xpos
assert spos > ypos
assert ipos > spos
sval = int(inp[spos + 1:ipos])
assert sval == 0
return (int(inp[xpos + 1:ypos]), int(inp[ypos + 1:spos]), int(inp[ipos + 1:]))
def anybits(bits):
for y in bits:
for x in y:
if not x:
return True
return False
def decodemux(bits):
A = not bits[0][0]
B = not bits[0][1]
C = not bits[0][2]
D = not bits[0][3]
E = not bits[1][0]
F = not bits[1][1]
G = not bits[1][2]
H = not bits[1][3]
assert G + C + D + H == 1
assert A + B + E + F == 1 or (A + B + E + F == 0 and G)
if G:
assert A + B + C + D + E + F + H == 0
if G:
return 0
if C:
if A: return 1
if B: return 2
if E: return 3
if F: return 4
if D:
if A: return 5
if B: return 6
if E: return 7
if F: return 8
if H:
if A: return 9
if B: return 10
if E: return 11
if F: return 12
def flipv(muxbits):
return muxbits[::-1]
def fliph(muxbits):
return [x[::-1] for x in muxbits]
# # print(x)
# uniq_r_muxes = []
# for _ in range(8):
# uniq_r_muxes.append(set())
# for X in range(2, 8):
# for Y in range(1, 5):
# for N in range(8):
# mux = "R:X{}Y{}I{}".format(X, Y, N)
# muxvals = x[mux]
# # print(muxvals)
# for muxsrc, muxbits in muxvals.items():
# uniq_r_muxes[N].add(bits2str(muxbits))
# # print(uniq_r_muxes)
# for N in range(8):
# print("~~~~~ R{} ~~~~~".format(N))
# for xx in sorted(list(uniq_r_muxes[N])):
# print(xx)
# # print(x)
# uniq_l_muxes = []
# for _ in range(8):
# uniq_l_muxes.append(set())
# # print(x)
# uniq_l2_muxes = []
# for _ in range(8):
# uniq_l2_muxes.append(set())
# for X in [8]:
# for Y in range(1, 5):
# for N in range(8):
# mux = "L2:X{}Y{}I{}".format(X, Y, N)
# muxvals = x[mux]
# # print(muxvals)
# for muxsrc, muxbits in muxvals.items():
# uniq_l2_muxes[N].add(bits2str(muxbits))
# # print(uniq_l2_muxes)
# for N in range(8):
# print("~~~~~ L2:{} ~~~~~".format(N))
# for xx in sorted(list(uniq_l2_muxes[N])):
# print(xx)
# # print(x)
# uniq_l_muxes = []
# for _ in range(8):
# uniq_l_muxes.append(set())
# for X in range(3, 9):
# for Y in range(1, 5):
# for N in range(8):
# mux = "L:X{}Y{}I{}".format(X, Y, N)
# muxvals = x[mux]
# # print(muxvals)
# for muxsrc, muxbits in muxvals.items():
# uniq_l_muxes[N].add(bits2str(muxbits))
# # print(uniq_l_muxes)
# for N in range(8):
# print("~~~~~ L{} ~~~~~".format(N))
# for xx in sorted(list(uniq_l_muxes[N])):
# print(xx)
# uniq_u_muxes = []
# for _ in range(7):
# uniq_u_muxes.append(set())
# for X in [8]:#range(2, 8):
# for Y in range(1, 5):
# for N in range(7):
# mux = "U:X{}Y{}I{}".format(X, Y, N)
# muxvals = x[mux]
# # print(muxvals)
# for muxsrc, muxbits in muxvals.items():
# uniq_u_muxes[N].add(bits2str(muxbits))
# # print(uniq_r_muxes)
# for N in range(7):
# print("~~~~~ U{} ~~~~~".format(N))
# for xx in sorted(list(uniq_u_muxes[N])):
# print(xx)
# uniq_d_muxes = []
# for _ in range(7):
# uniq_d_muxes.append(set())
# for X in [8]:#range(2, 8):
# for Y in range(1, 5):
# for N in range(7):
# mux = "D:X{}Y{}I{}".format(X, Y, N)
# muxvals = x[mux]
# # print(muxvals)
# for muxsrc, muxbits in muxvals.items():
# uniq_d_muxes[N].add(bits2str(muxbits))
# # print(uniq_r_muxes)
# for N in range(7):
# print("~~~~~ D{} ~~~~~".format(N))
# for xx in sorted(list(uniq_d_muxes[N])):
# print(xx)
# uniq_l_li_muxes = []
# for _ in range(18):
# uniq_l_li_muxes.append(set())
# for Y in range(1, 5):
# for N in range(18):
# mux = "LOCAL_INTERCONNECT:X1Y{}S0I{}".format(Y, N)
# muxvals = x[mux]
# # print(muxvals)
# for muxsrc, muxbits in muxvals.items():
# uniq_l_li_muxes[N].add(bits2str(muxbits))
# # print(uniq_r_muxes)
# for N in range(18):
# print("~~~~~ LOCAL_INTERCONNECT:X1 {} ~~~~~".format(N))
# for xx in sorted(list(uniq_l_li_muxes[N])):
# print(xx)
# uniq_li_muxes = []
# for _ in range(26):
# uniq_li_muxes.append(set())
# for X in range(2, 8):
# for Y in range(1, 5):
# for N in range(26):
# mux = "LOCAL_INTERCONNECT:X{}Y{}S0I{}".format(X, Y, N)
# muxvals = x[mux]
# # print(muxvals)
# for muxsrc, muxbits in muxvals.items():
# uniq_li_muxes[N].add(bits2str(muxbits))
# # print(uniq_r_muxes)
# for N in range(26):
# print("~~~~~ LOCAL_INTERCONNECT:X1 {} ~~~~~".format(N))
# for xx in sorted(list(uniq_li_muxes[N])):
# print(xx)
# uniq_top_li_muxes = []
# for _ in range(10):
# uniq_top_li_muxes.append(set())
# for X in range(2, 8):
# for N in range(10):
# mux = "LOCAL_INTERCONNECT:X{}Y5S0I{}".format(X, N)
# muxvals = x[mux]
# # print(muxvals)
# for muxsrc, muxbits in muxvals.items():
# uniq_top_li_muxes[N].add(bits2str(muxbits))
# # print(uniq_r_muxes)
# for N in range(10):
# print("~~~~~ LOCAL_INTERCONNECT:Y5 {} ~~~~~".format(N))
# for xx in sorted(list(uniq_top_li_muxes[N])):
# print(xx)
LABELS = [
"|G|C|D|H|A|B|E|F|",
"|0| | | | | | | | ",
"| |0| | |0| | | | ",
"| |0| | | |0| | | ",
"| |0| | | | |0| | ",
"| |0| | | | | |0| ",
"| | |0| |0| | | | ",
"| | |0| | |0| | | ",
"| | |0| | | |0| | ",
"| | |0| | | | |0| ",
"| | | |0|0| | | | ",
"| | | |0| |0| | | ",
"| | | |0| | |0| | ",
"| | | |0| | | |0| ",
]
for dst, srcs in x.items():
srcs_decoded = [None] * 13
is_tb_io = False
for src, muxbits in srcs.items():
if dst.startswith("R:"):
_, _, I = parse_xyi(dst)
if I >= 4:
muxbits = flipv(muxbits)
elif dst.startswith("L:") or dst.startswith("L2"):
_, _, I = parse_xyi(dst)
muxbits = fliph(muxbits)
if I >= 4:
muxbits = flipv(muxbits)
elif dst.startswith("U:"):
X, _, I = parse_xyi(dst)
if X == 8:
muxbits = fliph(muxbits)
if I == 0 and X != 8:
muxbits = fliph(muxbits)
if I >= 4:
muxbits = flipv(muxbits)
elif dst.startswith("D:"):
X, _, I = parse_xyi(dst)
if X == 8:
muxbits = fliph(muxbits)
if I == 6 and X != 8:
muxbits = fliph(muxbits)
if I >= 3:
muxbits = flipv(muxbits)
elif dst.startswith("LOCAL_INTERCONNECT:"):
X, Y, I = parse_xysi(dst[19:])
if X == 1:
muxbits = fliph(muxbits)
if I > 8:
muxbits = flipv(muxbits)
elif X == 8:
if I > 8:
muxbits = flipv(muxbits)
else:
if Y == 0 or Y == 5:
is_tb_io = True
if Y == 0:
muxbits = flipv(muxbits)
if I < 5:
muxbits = fliph(muxbits)
else:
if I in range(0, 5) or I in range(13, 18):
muxbits = fliph(muxbits)
if I >= 13:
muxbits = flipv(muxbits)
else:
continue
muxidx = decodemux(muxbits)
if srcs_decoded[muxidx] is not None:
print(dst, src, srcs_decoded[muxidx])
assert srcs_decoded[muxidx] is None
srcs_decoded[muxidx] = src
print("~~~~~ {} ~~~~~".format(dst))
print(LABELS[0])
if is_tb_io:
assert srcs_decoded[0] is None
for i in range(len(srcs_decoded)):
if is_tb_io and i == 0:
continue
print(LABELS[i + 1], end='')
src = srcs_decoded[i]
if src is None:
print("???")
else:
print(src, end='')
if src in wirenamemap:
print(" ({})".format(wirenamemap[src]))
else:
print()
# if dst.startswith("LOCAL_INTERCONNECT:"):
# continue
# print(dst, src)
# if dst.startswith("L:"):
# _, _, I = parse_xyi(dst)
# muxbits = fliph(muxbits)
# if I >= 4:
# muxbits = flipv(muxbits)
# if dst.startswith("R:"):
# _, _, I = parse_xyi(dst)
# if I >= 4:
# muxbits = flipv(muxbits)
# if dst.startswith("D:"):
# X, _, I = parse_xyi(dst)
# if I >= 3:
# muxbits = flipv(muxbits)
# if I == 6:
# muxbits = fliph(muxbits)
# if X == 8:
# muxbits = fliph(muxbits)
# if dst.startswith("U:"):
# X, _, I = parse_xyi(dst)
# if I >= 4:
# muxbits = flipv(muxbits)
# if I == 0:
# muxbits = fliph(muxbits)
# if X == 8:
# muxbits = fliph(muxbits)
# if dst.startswith("L2:"):
# _, _, I = parse_xyi(dst)
# if I >= 4:
# muxbits = flipv(muxbits)
# decodemux(muxbits)
|
6,022 | f5a953d91e95d82e84e3e6d18ee89d28ba1b1515 | import asyncio
import multiprocessing
from concurrent.futures import ProcessPoolExecutor
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from datetime import datetime
import time
from apscheduler.schedulers.blocking import BlockingScheduler
from apscheduler.triggers.combining import OrTrigger
from apscheduler.triggers.cron import CronTrigger
def day_limits():
variable.value = 90
print ('Day Variable: ',variable.value)
def night_limits():
variable.value = 65
print ('Night Variable: ', variable.value)
def thread_2(variable):
while True:
c_hour = int(datetime.now().strftime("%H"))
c_min = int(datetime.now().strftime("%M"))
c_sec = int(datetime.now().strftime("%S"))
print ('%02d:%02d:%02d - Variable: %d ' % (c_hour,c_min,c_sec,variable.value))
time.sleep(2)
if __name__ == "__main__":
m = multiprocessing.Manager()
variable = m.Value('i', 60)
schedfortest = BlockingScheduler()
trigger_test = OrTrigger([
CronTrigger(minute='*/1')
])
schedfortest.add_job(callbacktotal,
trigger_test,
minute='*/2',
max_instances=10)
schedfortest.start()
scheduler = AsyncIOScheduler()
scheduler.add_job(day_limits, 'cron', hour=7,misfire_grace_time=3600,timezone='GB')
scheduler.add_job(night_limits, 'cron', hour=19, minute=32,misfire_grace_time=3600,timezone='GB')
scheduler.start()
scheduler.print_jobs()
executor = ProcessPoolExecutor(1)
loop = asyncio.get_event_loop()
baa = asyncio.async(loop.run_in_executor(executor, thread_2, variable)) # Need to pass variable explicitly
try:
loop.run_forever()
except (KeyboardInterrupt, Exception):
loop.stop()
scheduler.shutdown() |
6,023 | ca0aedcfb997299240870649823fb872e0d9f99a | from accessor import *
from order import Order
from copy import deepcopy
import pandas as pd
import numpy as np
import util
class Broker:
def __init__(self, equity):
self.execute = Execute(equity) # Execute
def make_order(self, unit, limit_price, stop_loss, stop_profit):
order_queue.append(Order(unit, limit_price, stop_loss, stop_profit))
def check_order(self, ohlc, date, commission):
"""
check the order and set the information to order by different condition
"""
op = ohlc[0]
for o in order_queue:
if position() != 0 and position() + o.units != 0 and len(order_queue) == 1:
o.is_parents = False
if o.limit_price:
trading_price = o.limit_price
else:
trading_price = op
setattr(o, 'trading_price', trading_price)
setattr(o, 'trading_date', date)
if o.is_long:
if 1 > o.units > 0:
size = int((self.execute.equity * o.units) / trading_price)
setattr(o, 'units', size)
if o.stop_loss:
stop_loss_price = o.trading_price * (1 - o.stop_loss)
setattr(o, 'stop_loss_prices', stop_loss_price)
if o.stop_profit:
stop_profit_price = o.trading_price * (1 + o.stop_profit)
setattr(o, 'stop_profit_prices', stop_profit_price)
if not o.is_parents:
add_position_long_order.append(o)
elif o.is_short:
if -1 < o.units < 0:
size = int((self.execute.equity * o.units) / trading_price)
setattr(o, 'units', size)
if o.stop_loss:
stop_loss_price = o.trading_price * (1 + o.stop_loss)
setattr(o, 'stop_loss_prices', stop_loss_price)
if o.stop_profit:
stop_profit_price = o.trading_price * (1 - o.stop_profit)
setattr(o, 'stop_profit_prices', stop_profit_price)
if not o.is_parents:
add_position_short_order.append(o)
order_execute.append(o)
self.work(ohlc, date=date, commission=commission)
order_queue.clear()
self.check_if_sl_or_sp(ohlc=ohlc, date=date, commission=commission)
def check_if_sl_or_sp(self, ohlc, date, commission):
for t in order_execute:
origin_o = deepcopy(t).is_parents
if util.touch_stop_loss(order=t, price=ohlc[3], date=date) :
t.replace(_unit=-t.units, _trading_price=t.stop_loss_prices, trading_date=date, _is_fill=False,
_is_parent=False, stop_loss=None)
elif util.touch_stop_profit(order=t, price=ohlc[3], date=date):
t.replace(_unit=-t.units, _trading_price=t.stop_profit_prices, trading_date=date, _is_fill=False,
_is_parent=False, stop_loss=None)
if not origin_o:
order_execute.remove(t)
self.work(ohlc, date=date, commission=commission)
def work(self, price, date, commission):
self.execute.trading(price, date, commission)
def liquidation(self, pos, price, date, commission):
"""
clean the last position
"""
o = Order(-1 * pos, limit_price=None, stop_loss=None, stop_profit=None, is_fill=False)
setattr(o, 'trading_price', price[0])
setattr(o, 'trading_date', date)
order_execute.append(o)
self.work(price=price, date=date, commission=commission)
def get_log(self):
log_dict = {'BuyDate': buy_date, 'BuyPrice': buy_price, 'BuyUnits': buy_unit, 'CashPaying': amnt_paying,
'SellDate': sell_date, 'SellPrice': sell_price, 'SellUnits': sell_unit,
'CashReceiving': amnt_receiving}
log = pd.DataFrame(log_dict)
for i in list(log_dict.values()):
i.clear()
return log
class Execute:
def __init__(self, equity):
self.__equity = equity
def trading(self, price, date, commission):
c = price[3]
for t in order_execute:
if not t.is_filled:
position_list.append(t.units)
if t.is_short and add_position_long_order and t.is_parents:
self.split_add_pos_order(t, add_position_long_order, commission)
elif t.is_long and add_position_short_order and t.is_parents:
self.split_add_pos_order(t, add_position_short_order, commission)
else:
self.fill(t, commission)
# if self._touch_stop_loss(order=t, price=c):
# origin_o = deepcopy(t).is_parents
# t.replace(units=-t.units, trading_prices=t.stop_loss_price, trading_date=date, is_filled=False,
# is_parent=False, stop_loss=None)
# if not origin_o:
# order_execute.remove(t)
if position() == 0 and t in order_execute: del order_execute[: order_execute.index(t) + 1]
def fill(self, t, commission):
adj_price = util.adjust_price(trade=t, commission=commission)
if t.is_long:
assert self.__equity >= adj_price * t.units, 'Your money is empty'
buy_price.append(t.trading_price)
buy_date.append(t.trading_date)
buy_unit.append(t.units)
amnt_paying.append(adj_price * t.units)
self.__equity -= t.units * adj_price
setattr(t, 'is_filled', True)
elif t.is_short:
sell_price.append(t.trading_price)
sell_date.append(t.trading_date)
sell_unit.append(t.units)
amnt_receiving.append(abs(t.units) * adj_price)
self.__equity += abs(t.units) * adj_price
setattr(t, 'is_filled', True)
def split_add_pos_order(self, trade_order, add_position_order: list, commission):
"""
split the order which include overweight order into a list of single order and fill them
e.g. a sell order [with 6 units has an parent order and an overweight order] becomes
[an parent order with -4 units , an order with -2 units]
"""
temp_order_list = []
origin_trader_order_sign = np.sign(trade_order.units)
if trade_order.is_short:
parents_unit = trade_order.units + sum(abs(_o.units) for _o in add_position_order)
else:
parents_unit = trade_order.units - sum(abs(_o.units) for _o in add_position_order)
trade_order.units = parents_unit
if trade_order.units != 0:
temp_order_list.append(trade_order)
for _t in add_position_order:
if np.sign(_t.units) == origin_trader_order_sign:
temp_order_list.append(_t)
else:
ct = deepcopy(_t)
ct.units = -_t.units
ct.trading_date = trade_order.trading_date
ct.trading_prices = trade_order.trading_price
temp_order_list.append(ct)
for temp_o in temp_order_list:
self.fill(temp_o, commission)
add_position_order.clear()
@property
def equity(self):
return self.__equity
def position():
return sum(size for size in position_list)
|
6,024 | 4156b003210a41d6ec8f30e2d20adfb1f4b3deb0 | import torch
from torchvision import datasets, transforms
from torch.utils.data import Dataset, DataLoader
# load the data Set
from torch.utils.data import random_split
from torchvision.datasets import ImageFolder
batch_size = 256
data_dir = 'nut_snacks/dataset/'
data_transforms = transforms.Compose(
[transforms.RandomResizedCrop(128),
transforms.ToTensor(),
])
dataset = ImageFolder(data_dir, transform=data_transforms)
print('Total dataset images: ',len(dataset))
loader = torch.utils.data.DataLoader(
dataset, batch_size=batch_size)
def mean_std(loader):
mean = 0
std = 0
for images, _ in loader :
batch_samples = images.size(0)
images = images.view(batch_samples, images.size(1), -1)
mean += images.mean(2).sum(0)
std += images.std(2).sum(0)
mean /= len(loader.dataset)
std /= len(loader.dataset)
return mean,std
mean, std = mean_std(loader)
print(f'Mean: {mean}')
print(f'Std: {std}')
|
6,025 | 9bd63181de024c2f4517defa9ed51bdbc8d610d2 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from urllib import request,parse
# req = request.Request('https://api.douban.com/v2/book/2129650')
# req.add_header('User-Agent', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.62 Safari/537.36')
# with request.urlopen(req) as f:
# data = f.read()
# print('Status:', f.status, f.reason)
# for k, v in f.getheaders():
# print('%s:%s' % (k, v))
# print('Data:', data.decode('utf-8'))
print('Login to weibo.com')
email = input('Email:')
passwd = input('Password:')
login_data = parse.urlencode([
('username', email),
('password', passwd),
('entry', 'mwei'),
('client_id', ''),
('savestate', 1),
('ec', ''),
('pagerefer', 'https://passport.weibo.cn/signin/welcome?entry=mweibo&r=http%3A%2F%2Fm.weibo.cn%2F')
])
req = request.Request('https://chenshuaijun.com')
req.add_header('Host', 'chenshuaijun.com')
req.add_header('User-Agent', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.62 Safari/537.36')
with request.urlopen(req, data=login_data.encode('utf-8')) as f:
print('Status:', f.status, f.reason)
for k, v in f.getheaders():
print('%s: %s' % (k, v))
print('Data:', f.read().decode('utf-8'))
|
6,026 | 3605e8b8b2f8f49cc7c40fc436c147578b12091c | from . import metrics
from . import matrices
from .pairwise import apply_pairwise_rect, apply_pairwise_sparse, apply_running_rect
from . import numba_tools as nb_tools
from . import running_metrics as running
__all__ = ['metrics',
'apply_pairwise_rect',
'apply_pairwise_sparse',
'apply_running_rect',
'nb_tools',
'matrices',
'running'] |
6,027 | 1dd223854c10e69a397098511eab50b9ebd347c8 | # My Godzilla Hat Code - @alt_bier
from adafruit_circuitplayground.express import cpx
import random
#cpx.pixels.brightness = 0.5 # 50 pct
cpx.pixels.fill((0, 0, 0)) # Turn off the NeoPixels if they're on!
# Function to give us a nice color swirl on the built in NeoPixel (R,G,B)
def wheeln(pos, sft):
if (pos + sft) > 255:
pos = (pos + sft) - 256
else:
pos = (pos + sft)
if (pos < 0) or (pos > 255):
return (0, 0, 0)
if pos < 85:
return (int(255 - pos*3), int(pos*3), 0)
elif pos < 170:
pos -= 85
return (0, int(255 - (pos*3)), int(pos*3))
else:
pos -= 170
return (int(pos*3), 0, int(255 - pos*3))
# Function to flash random colors
def randcolor():
randgr = randrd = randbl = 0
# determine if all colors off
if (random.randint(0,14) == 1):
# if on then determine if each color is off and return an intensity value if on
if (random.randint(0,1) == 1):
randgr = random.randint(1,255)
if (random.randint(0,1) == 1):
randrd = random.randint(1,255)
if (random.randint(0,1) == 1):
randbl = random.randint(1,255)
return (randgr, randrd, randbl)
# Function to simulate a flame effect on built in NeoPixel (R,G,B)
def flame(pos, clr, sft):
# pos = position, sft = shift
if (pos + sft) > 255:
pos = (pos + sft) - 256
else:
pos = (pos + sft)
#
# RETURN VALUES
if pos < 32:
# OFF
rval = 0
elif (pos > 31) and (pos < 64):
# Low-High
rval = int((pos*8) - 249)
elif (pos > 63) and (pos < 96):
# High-Low
rval = int(767 - (pos*8))
elif (pos > 95) and (pos < 128):
# OFF
rval = 0
elif (pos > 127) and (pos < 160):
# Low-High
rval = int((pos*8) - 1017)
elif (pos > 159) and (pos < 192):
# High-Low
rval = int(1535 - (pos*8))
elif (pos > 191) and (pos < 224):
# OFF
rval = 0
elif (pos > 223):
# OFF
rval = 0
#
# RETURN COLOR
if (clr == 0):
# Red
return (rval, 0, 0)
elif (clr == 1):
# Red & Green
return (rval, rval, 0)
elif (clr == 2):
# Green
return (0, rval, 0)
elif (clr == 3):
# Green & Blue
return (0, rval, rval)
elif (clr == 4):
# Blue
return (0, rval, rval)
elif (clr == 5):
# Blue & Red
return (rval, 0, rval)
else:
return (0, 0, 0)
# Function to turn off all the built in NeoPixels
def alloff():
cpx.pixels.fill((0, 0, 0))
mode = 1
pusha = 0
pushb = 0
clr = 0
i = 0
while True:
# NeoPixels are cpx.pixels[0-9]
if (mode == 1):
cpx.pixels[0] = flame(i, clr, 32)
cpx.pixels[1] = flame(i, clr, 24)
cpx.pixels[2] = flame(i, clr, 16)
cpx.pixels[3] = flame(i, clr, 8)
cpx.pixels[4] = flame(i, clr, 0)
cpx.pixels[5] = flame(i, clr, 0)
cpx.pixels[6] = flame(i, clr, 8)
cpx.pixels[7] = flame(i, clr, 16)
cpx.pixels[8] = flame(i, clr, 24)
cpx.pixels[9] = flame(i, clr, 32)
elif (mode == 2):
cpx.pixels[0] = wheeln(i, 0)
cpx.pixels[1] = wheeln(i, 24)
cpx.pixels[2] = wheeln(i, 48)
cpx.pixels[3] = wheeln(i, 72)
cpx.pixels[4] = wheeln(i, 96)
cpx.pixels[5] = wheeln(i, 120)
cpx.pixels[6] = wheeln(i, 144)
cpx.pixels[7] = wheeln(i, 168)
cpx.pixels[8] = wheeln(i, 192)
cpx.pixels[9] = wheeln(i, 216)
elif (mode == 3):
cpx.pixels[0] = randcolor()
cpx.pixels[1] = randcolor()
cpx.pixels[2] = randcolor()
cpx.pixels[3] = randcolor()
cpx.pixels[4] = randcolor()
cpx.pixels[5] = randcolor()
cpx.pixels[6] = randcolor()
cpx.pixels[7] = randcolor()
cpx.pixels[8] = randcolor()
cpx.pixels[9] = randcolor()
else:
# Mode = 0 so turn All Off
alloff()
# Button A is bottom button on hat
if cpx.button_a:
print("Button A on Bottom Pressed! Changing mode to ALL OFF.")
pusha = 1
# Button B is top button on hat
if cpx.button_b:
print("Button B on Top Pressed! Changing mode.")
pushb = 1
i = (i+1) % 256
#print (i)
if (i == 255):
clr = (clr+1) % 6
if ((i == 63) | (i == 127) | (i == 191) | (i >= 255)) and (pusha == 1):
mode = 0
pusha = 0
i = 0
if ((i == 63) | (i == 127) | (i == 191) | (i >= 255)) and (pushb == 1):
mode = (mode+1)
pushb = 0
i = 0
if (mode > 3):
mode = 1
|
6,028 | ecbc1da3efb39300b60aeb47897fb01b6bd7af31 |
import code2
print ("Main en code1: %s\n" % __name__)
|
6,029 | 4d9064add28302fe173a8b0a81ee7d187db8aead | from typing import Any
from typing import List
from xsdata.codegen.mixins import RelativeHandlerInterface
from xsdata.codegen.models import Attr
from xsdata.codegen.models import Class
from xsdata.models.enums import Tag
from xsdata.utils.namespaces import build_qname
class ClassEnumerationHandler(RelativeHandlerInterface):
"""Enumeration class processor."""
__slots__ = ()
def process(self, target: Class):
"""
Process class receiver.
Steps:
1. Filter attrs not derived from xs:enumeration
2. Flatten attrs derived from xs:union of enumerations
3. Promote inner enumeration classes to root classes
"""
self.filter(target)
self.flatten(target)
self.promote(target)
@classmethod
def filter(cls, target: Class):
"""Filter attrs not derived from xs:enumeration if there are any
xs:enumeration attrs."""
enumerations = [attr for attr in target.attrs if attr.is_enumeration]
if enumerations:
target.attrs = enumerations
def flatten(self, target: Class):
"""
Flatten attrs derived from xs:union of enumeration classes.
Find the enumeration classes and merge all of their members in
the target class.
"""
if len(target.attrs) != 1 or target.attrs[0].tag != Tag.UNION:
return
enums: List[Any] = []
for attr_type in target.attrs[0].types:
if attr_type.forward:
enums.extend(target.inner)
elif not attr_type.native:
enums.append(self.container.find(attr_type.qname))
else:
enums.append(None)
merge = all(isinstance(x, Class) and x.is_enumeration for x in enums)
if merge:
target.attrs.clear()
target.inner.clear()
target.attrs.extend(attr.clone() for enum in enums for attr in enum.attrs)
def promote(self, target: Class):
"""
Promote inner enumeration classes to root classes.
Steps:
1. Find inner enumerations
2. Clone and update their qualified name
3. Update attributes types
"""
for inner in list(target.inner):
if inner.is_enumeration:
target.inner.remove(inner)
clone = self.clone_enumeration(inner, target.name)
self.container.add(clone)
for attr in target.attrs:
self.update_types(attr, inner.qname, clone.qname)
@classmethod
def clone_enumeration(cls, inner: Class, name: str) -> Class:
clone = inner.clone()
clone.qname = build_qname(clone.target_namespace, f"{name}_{clone.name}")
return clone
@classmethod
def update_types(cls, attr: Attr, search: str, replace: str):
for attr_type in attr.types:
if attr_type.qname == search and attr_type.forward:
attr_type.qname = replace
attr_type.forward = False
|
6,030 | bc8bc5c3b6954302d005fe618827c644f93ad14e | ### 15/04/2020
### Author: Omer Goder
### Looping through a list
months = ['january','fabruary','march','april','may','june','july','august','september','october','november','december']
# Using a for loop to print a list
for month in months:
print("The next month is:\t" + month)
print('\n')
print("\nEnd of program\n") # Print out once - not in the loop
#example for indexing using enumeration (considers non-pythonic)
#for index, month in enumerate(months):
# print(index, month.title() + " is a name of a month\n")
|
6,031 | 2464da1c4d2ddab3a053f0a14e3cc9a8beabe031 | from MyFeistel import MyFeistel, LengthPreservingCipher
import pytest
import base64
import os
class TestMyFeistel:
def test_Functionality(self):
key = base64.urlsafe_b64encode(os.urandom(16))
feistel = MyFeistel(key, 10)
# decrypt(encrypt(msg)) == msg
for i in xrange(20):
msg = os.urandom(6)
assert feistel.decrypt(feistel.encrypt(msg)) == msg
def test_OddLengthMessage(self):
pass
class TestLengthPreservingCipher:
def test_Functionality(self):
key = base64.urlsafe_b64encode(os.urandom(16))
lpc = LengthPreservingCipher(key, 10)
# decrypt(encrypt(msg)) == msg
for i in xrange(20):
msg = os.urandom(6)
assert lpc.decrypt(lpc.encrypt(msg)) == msg
|
6,032 | e0075e4afafba9da70bbcb2ee073b5c1f7782d7d | import numpy as np
import scipy.signal as sp
from common import *
class Processor:
def __init__(self, sr, **kwargs):
self.samprate = float(sr)
self.hopSize = kwargs.get("hopSize", roundUpToPowerOf2(self.samprate * 0.005))
self.olaFac = int(kwargs.get("olaFac", 2))
def analyze(self, x):
assert(self.olaFac > 0)
# constant
nX = len(x)
nHop = getNFrame(nX, self.hopSize)
nFrame = nHop * self.olaFac
nBin = self.hopSize + 1
windowFunc, B, windowMean = getWindow("hanning")
windowSize = 2 * self.hopSize
halfWindowSize = self.hopSize
window = np.sqrt(windowFunc(windowSize))
windowNormFac = 2.0 / (windowMean * windowSize)
# do calculate
magnList = np.zeros((nFrame, nBin), dtype = np.float64)
phaseList = np.zeros((nFrame, nBin), dtype = np.float64)
for iFrame in range(nFrame):
frame = getFrame(x, iFrame * self.hopSize // self.olaFac, windowSize)
frame *= window
tSig = np.zeros(windowSize, dtype = np.float64)
tSig[:halfWindowSize] = frame[halfWindowSize:]
tSig[-halfWindowSize:] = frame[:halfWindowSize]
fSig = np.fft.rfft(tSig)
magnList[iFrame] = np.abs(fSig) * windowNormFac
phaseList[iFrame] = np.unwrap(np.angle(fSig))
return magnList, phaseList
def synth(self, *args):
# constant
nFrame, nBin = args[0].shape
nHop = nFrame // self.olaFac
nOut = nHop * self.hopSize
windowFunc, B, windowMean = getWindow("hanning")
windowSize = 2 * self.hopSize
halfWindowSize = self.hopSize
window = np.sqrt(windowFunc(windowSize))
# check input
assert(nBin == self.hopSize + 1)
# synth
out = np.zeros(nOut, dtype = np.float64)
if(len(args) == 1):
fSigList = args[0]
elif(len(args) == 2):
fSigList = magnPhaseToFSig(*args)
else:
raise ValueError("Bad input.")
fSigList *= halfWindowSize
for iFrame in range(nFrame):
tSig = np.fft.irfft(fSigList[iFrame])
ob, oe, ib, ie = getFrameRange(nOut, iFrame * self.hopSize // self.olaFac, windowSize)
out[ib:ie] += (tSig * window)[ob:oe]
out /= self.olaFac
return out
|
6,033 | 8b4bc312bf4b64f98c4f84f4bf89984291be0428 | # Generated by Django 3.1.7 on 2021-03-19 14:38
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('news', '0002_auto_20210317_1400'),
]
operations = [
migrations.AlterField(
model_name='author',
name='author',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='Автор'),
),
]
|
6,034 | 75741d11bebcd74b790efe7e5633d4507e65a25f | class HashTable:
def __init__(self):
self.size = 11
self.slots = [None] * self.size
self.data = [None] * self.size
def put(self, key, data):
# there are three situations,
#1. the hashvalue returned by hashfunction of the slot is empty, just put the key in that slot, and the data in the datalist
hashvalue = self.hashfunction(key, len(self.slots))
if self.slots[hashvalue] == None:
self.slots[hashvalue] = key
self.data[hashvalue] = data
else:
#2. the hashvalue returned by the hashfunction of the slot is not empty and is the same of the key , replace the data
if self.slots[hashvalue] == key:
self.data[hashvalue] = data #replace
else:
#3. the hashvalue returned by the hashfunction of the slot is not empty and is different from the key, you need to do rehashing
# while the rehashing value is not the same as the key and is not empty
nextslot = self.rehash(hashvalue, len(self.slots))
while nextslot != None and self.slots[nextslot] != key:
nextslot = self.rehash(nextslot, len(self.slots))
#3.1 the reshashing value is empty
if self.slots[nextslot] == None:
self.slots[nextslot] = key
self.data[nextslot] = data
#3.2 the reshashing value is the same as the key in the current slot
else:
self.data[nextslot] = data #replace
def hashfunction(self, key, size):
return key%size
def rehash(self,oldhash,size):
return (oldhash + 1)%size
def get(self, key):
# there are some auguments: startslot(the initial hashvalue of the key by the hashfunction), data(the corresponding data of the key)
#stop( a boolean value indicating whether to stop or not, position (the position you indicated in the slot)),#found(indicator)
startslot = self.hashfunction(key, len(self.slots))
position = startslot
stop = False
found = False
data = None
while position is not None and not stop and not found :
if self.slots[position] == key:
found = True
data = self.data[position]
else:
position = self.rehash(position, len(self.slots))
if position == startslot:
stop = True
return data
def __getitem__(self, key):
return self.get(key)
def __setitem__(self, key, data):
self.put(key,data)
|
6,035 | 12f05f42c9ed56d6a2c95fb56a8619fae47a2f1a | /home/runner/.cache/pip/pool/9b/88/a0/f20a7b2f367cd365add3353eba0cf34569d5f62a33587f96cebe6d4360 |
6,036 | e11c479a99ab68755de8ab565e3d360d557129cf | # Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import traceback
import getopt
#
# Load Buzz library (if available...)
#
try:
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import buzz
except ImportError:
print 'Error importing Buzz library!!!'
print '-' * 80
print __doc__
print '-' * 80
token = verification_code = buzz_client = ''
#
# Function to obtain login data
#
def GetLoginData():
'Obtains login information from either the command line or by querying the user'
global token, verification_code, buzz_client
key = secret = ''
# Check first if anything is specified in the command line
if len(sys.argv[1:]):
try:
(opts, args) = getopt.getopt(sys.argv[1:], 'k:s:v:', ['key','secret', 'vercode'])
if (len(args)): raise getopt.GetoptError('bad parameter')
except getopt.GetoptError:
print '''
Usage: %s <-t access token> <-a verification_code>
-k (--key): OPTIONAL, previously obtained access token key
-s (--secret): OPTIONAL, previously obtained access token secret
Exiting...
''' % (sys.argv[0])
sys.exit(0)
for (opt, arg) in opts:
if opt in ('-k', '--key'):
key = arg
elif opt in ('-s', '--secret'):
secret = arg
# Query the user for data otherwise - we need key and secret for our OAuth request token.
if ((key == '') or (secret == '')):
token = buzz_client.fetch_oauth_request_token ('oob')
token = buzz_client.oauth_request_token
print '''
Please access the following URL to confirm access to Google Buzz:
%s
Once you're done enter the verification code to continue: ''' % (buzz_client.build_oauth_authorization_url(token)),
verification_code = raw_input().strip()
buzz_client.fetch_oauth_access_token (verification_code, token)
else:
buzz_client.build_oauth_access_token(key, secret)
# Do we have a valid OAUth access token?
if (buzz_client.oauth_token_info().find('Invalid AuthSub signature') != (-1)):
print 'Access token is invalid!!!'
sys.exit(0)
else:
print '''
Your access token key is \'%s\', secret is \'%s\'
Keep this data handy in case you want to reuse the session later!
''' % (buzz_client.oauth_access_token.key, buzz_client.oauth_access_token.secret)
#
# Main program starts here
#
try:
buzz_client = buzz.Client()
buzz_client.oauth_scopes=[buzz.FULL_ACCESS_SCOPE]
buzz_client.use_anonymous_oauth_consumer()
GetLoginData()
print 'Got an access token! key: %s, secret %s' % (buzz_client.oauth_access_token.key, buzz_client.oauth_access_token.secret)
print 'Token info: ' + buzz_client.oauth_token_info()
print '\nAll done'
except:
print '\nBzzzz! Something broke!!!'
print '-' * 50
traceback.print_exc()
print '-' * 50
|
6,037 | 0b7bba826b82c3751c072395431e17bc1dc9bb90 | import numpy as np
from scipy import fft
import math
from sklearn import svm
from activity_recognition import WiiGesture
class WiiGestureClassifier():
"""
This class uses the FFT on the average of all three sensor values
to provide the training data for the SVM
Three good distinguishable gestures are:
Fast circle movement
Still, doing nothing
Fast swing movement from behind the shoulder (like a whip)
"""
def __init__(self):
super(self.__class__, self).__init__()
def train(self, gestureList):
self.gestureList = gestureList
self.parsedGestureList = []
self.parseArrays(self.gestureList)
if self.checkListForEmpty():
return "\na gesture has no trained samples"
self.minlen = self.calcMinLength()
self.cutGestureList()
self.getFrequencies()
self.buildClassifier()
def parseArrays(self, data):
parsedData = []
for gesture in data:
parsedGesture = WiiGesture(gesture.name)
parsedData = [self.parseDataset(dataSet)
for dataSet in gesture.trainingsData]
parsedGesture.trainingsData = parsedData
self.parsedGestureList.append(parsedGesture)
def parseDataset(self, dataSet):
x = []
y = []
z = []
avg = []
#Use the difference from default sensor value
for values in dataSet:
x.append(values[0]-512)
y.append(values[1]-512)
z.append(values[2]-512)
avg.append((values[0]-512 + values[1]-512 + values[2]-512) / 3)
return avg
def calcMinLength(self):
all = []
for gesture in self.parsedGestureList:
all += gesture.trainingsData
minlen = min([len(x) for x in all])
return minlen
def cutGestureList(self):
for gesture in self.parsedGestureList:
gesture.trainingsData = [l[:self.minlen] for l in gesture.trainingsData]
def getFrequencies(self):
for gesture in self.parsedGestureList:
gesture.frequencies = [
np.abs(fft(l) / len(l))[1:len(l) / 2] for l in gesture.trainingsData]
def buildClassifier(self):
self.c = svm.SVC()
count = 0
categories = []
trainingData = []
for gesture in self.parsedGestureList:
categories += [count] * len(gesture.frequencies)
trainingData += gesture.frequencies
count += 1
try:
self.c.fit(trainingData, categories)
except ValueError:
return 'More traininsdata for some gestures required'
def classify(self, gesture):
parsedData = []
parsedGesture = WiiGesture(gesture.name)
parsedData = [self.parseDataset(dataSet) for dataSet in gesture.trainingsData]
parsedGesture.trainingsData = parsedData
if len(parsedGesture.trainingsData[0]) < self.minlen:
missingValues = self.minlen - len(parsedGesture.trainingsData[0])
for x in range(missingValues):
parsedGesture.trainingsData[0].append(0)
parsedGesture.trainingsData = [l[:self.minlen] for l in parsedGesture.trainingsData]
parsedGesture.frequencies = [np.abs(fft(l) / len(l))[1:len(l) / 2] for l in parsedGesture.trainingsData]
return self.c.predict(parsedGesture.frequencies[0])
def checkListForEmpty(self):
#checks for empty gestures and exits code
if len(self.parsedGestureList) <= 0:
return True
for gesture in self.parsedGestureList:
if len(gesture.trainingsData) <= 0:
return True
else:
return False |
6,038 | 3218a9e82cd19bab1680079aee5f09a97992629e | from flask import Flask
app = Flask(__name__)
import orderapi, views, models, processing
if __name__=="__main__":
orderapi.app.debug = True
orderapi.app.run(host='0.0.0.0', port=34203)
views.app.debug = True
views.app.run(host='0.0.0.0', port=42720)
|
6,039 | ade300f2921ca860bbe92aa351df2c88238b7996 | import sys, string, math
s = input()
print(ord(s))
|
6,040 | 848374ea7d706bbd2ef5a76489cabeff998acb82 | # Generated by Django 3.1.5 on 2021-05-30 14:27
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('fuser', '0009_movement_type'),
]
operations = [
migrations.AlterField(
model_name='movementpassmodel',
name='movement_type',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='fuser.movement_type'),
),
]
|
6,041 | 6e6f153857879da625f57f0382f1997fcae4f6c8 | from django.db import models
from django.contrib.auth.models import User, Group
from userena.models import UserenaBaseProfile
from django.db.models.signals import post_save
from tastypie.models import create_api_key
class UserProfile(UserenaBaseProfile):
# user reference
user = models.OneToOneField(User)
facebook_id = models.CharField(max_length = 128, blank = True, null = True)
class Meta:
permissions = (
('change_profile', 'Change profile'),
('view_profile', 'View profile'),
('delete_profile', 'Delete profile'),
)
def create_user_profile(sender, instance, created, **kwargs):
"""
Create user profie and set the permissions
"""
if created and instance.pk >= 0:
UserProfile.objects.create(user=instance)
# get default group, but not for anonymous
try:
default_group = Group.objects.get(name = "default_users")
instance.groups.add(default_group)
except:
pass
post_save.connect(create_user_profile, sender=User)
# generate api key for the user when the user is created
post_save.connect(create_api_key, sender=User) |
6,042 | 7b7705cdaa8483f6abbc3f4fb3fa1ca506742da8 | import math,random,numpy as np
def myt():
x=[0]*10
y=[]
for i in range(100000):
tmp = int(random.random()*10)
x[tmp] = x[tmp]+1
tmpy=[0]*10
tmpy[tmp] = 1
for j in range(10):
tmpy[j] = tmpy[j] + np.random.laplace(0,2,None)
y.append(tmpy)
result=[0]*10
for i in range(10):
for j in range(100000):
result[i] = result[i]+y[j][i]
print x
print result
if __name__ == '__main__':
myt() |
6,043 | 31a2fa5b2febc2ef80b57e45c2ebb662b886c4b7 | '''Чи можна в квадратному залі площею S помістити круглу сцену радіусом R так,
щоб від стіни до сцени був прохід не менше K?'''
from math import sqrt
s = int(input('Input your area of square (S): '))
r = int(input('Input your radius of scene (R): '))
k = int(input('Input your width of passage (K): '))
k2 = sqrt(s) / 2 - r
if k2 >= k:
print(" Yes, the scene can be set.")
else:
print(" Sorry, but the scene can't be set.")
|
6,044 | 286801b69546046853d123c5708f24eaaa2e8cec | from __future__ import annotations
from collections import Counter
from distribution import Distribution, Normal
class GoodKind:
"""
The definition of a kind of good. "Vegtable" is a kind of good, as is
"Iron Ore", "Rocket Fuel", and "Electic Motor"
"""
def __init__(self, name: str):
assert len(name) > 0
self.name = name
def __hash__(self):
# Implimenting hash so I can use this in dictionaries
# TODO, look again at Dataclasses
return hash(repr(self))
def __repr__(self):
return f"GoodKind('{self.name}')"
class BagOfGoods(Counter):
def several(self, times: int):
"""
Returns a new bag of goods containing a set of goods that is a
multiple (times) of the callee.
"""
return BagOfGoods({g: self[g] * times for g in self.keys()})
def divide(self, other: BagOfGoods):
"""
Divides one bag of goods by another. Returns the quotient as an
integer. (Whole number quotients, only. "Natural" division, no
negatives, floats, etc.)
"""
if not any(other.elements()):
raise ZeroDivisionError()
return min((self[g] // other[g] for g in other.keys()))
def divide_with_remainder(self, other: BagOfGoods):
"""
Like divide(), but returns the quotent and a new bag of goods representing the remainder
after division.
"""
quotient = self.divide(other)
remainder = BagOfGoods({g: self[g] - other[g] * quotient for g in self.keys()})
return quotient, remainder
def equals(self, other: BagOfGoods):
if self.keys() != other.keys():
return False
for good in self | other:
if self[good] != other[good]:
return False
return True
class Recipe:
"""
An accounting of the goods and labor needed to produce something. An
invocation of a recipe produces one output
"""
def __init__(
self,
labor_amount: float,
required_goods: BagOfGoods,
planet_variation: Distribution,
person_variation: Distribution,
labor_variation: Distribution,
output_good: GoodKind,
):
self.labor_amount = labor_amount
self.required_goods = required_goods
self.planet_variation = planet_variation
self.person_variation = person_variation
self.labor_variation = labor_variation
self.output_good = output_good
def draw_planet_variation(self):
return max(0, self.planet_variation.draw())
def draw_person_variation(self):
return max(0, self.person_variation.draw())
def draw_labor_variation(self):
return max(0, self.labor_variation.draw())
def determine_required_goods(self, output_amount: int):
"""
Determines the amount of goods and labor required for a given amount of
output. Basically does a multiplication
"""
required_goods = self.required_goods.several(output_amount)
required_labor = self.labor_amount * output_amount
return required_goods, required_labor
def __hash__(self):
# Implimenting hash so I can use this in dictionaries
# TODO, look again at Dataclasses1G
return hash(repr(self))
def __str__(self):
return f"Recipe(for '{self.output_good}')"
class FactoryKind:
def __init__(self, recipe: Recipe, rate: float, name: str = ""):
self.name = name
self.rate = rate
self.recipe = recipe
good_index = {}
def generate_good(good_name: str):
good = GoodKind(good_name)
good_index[good_name] = good
return good
food = generate_good("Food")
wood = generate_good("Wood")
basic_recipe_index = {}
def generate_basic_recipe(
labor: int,
good: GoodKind,
planet_variation: Distribution,
person_variation: Distribution,
labor_variation: Distribution,
required_goods=BagOfGoods(),
):
recipe = Recipe(
labor, required_goods, planet_variation, person_variation, labor_variation, good
)
basic_recipe_index[good.name] = recipe
return recipe
basic_food_recipe = generate_basic_recipe(
1, food, Normal(0.75, 0.5), Normal(1, 0.3), Normal(1, 0.05)
)
basic_wood_recipe = generate_basic_recipe(
1, wood, Normal(1, 0.5), Normal(1, 0.2), Normal(1, 0.05)
)
|
6,045 | 9a183b1f81681b3dec1132a27b17e389438ab725 | """
Copyright (c) 2017 - Philip Paquette
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
# Modified from https://raw.githubusercontent.com/Newmu/dcgan_code/master/lib/rng.py
# MIT License
import numpy as np
from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams
import theano.tensor.shared_randomstreams
from random import Random
seed = 42
py_rng = Random(seed)
np_rng = np.random.RandomState(seed)
t_rng = RandomStreams(seed)
t_rng_2 = theano.tensor.shared_randomstreams.RandomStreams(seed)
def set_seed(n):
global seed, py_rng, np_rng, t_rng
seed = n
py_rng = Random(seed)
np_rng = np.random.RandomState(seed)
t_rng = RandomStreams(seed)
|
6,046 | 02a28b61ad9d664c89829df019f4887c2c869f91 | import input_data
import tensorflow as tf
from infogan import InfoGAN
if __name__ == '__main__':
# get input data
mnist_data = input_data.load_mnist_dataset('../../dataset/mnist_data', one_hot=True)
num_sample = mnist_data.train.num_examples
dataset = 'mnist'
if dataset == 'mnist':
input_dim = 784
# define latent dimension
z_dim = 16
c_discrete_dim = 10
c_continuous_dim = 2
num_epoch = 1000000
batch_size = 32
# Launch the session
with tf.Session() as sess:
gan = InfoGAN(sess, num_epoch=num_epoch, batch_size=batch_size,
dataset=dataset, input_dim=input_dim, z_dim=z_dim, c_discrete_dim=c_discrete_dim,
c_continuous_dim=c_continuous_dim)
# build generative adversarial network
gan.build_net()
# train the model
gan.train(mnist_data.train, num_sample)
|
6,047 | 3abeac4fb80244d2da14e14a6048c09b0c0c1393 | """
You are given two arrays (without duplicates) nums1 and nums2 where nums1’s elements are subset of nums2. Find all the next greater numbers for nums1's elements in the corresponding places of nums2.
The Next Greater Number of a number x in nums1 is the first greater number to its right in nums2. If it does not exist, output -1 for this number.
https://leetcode.com/problems/next-greater-element-i/?tab=Description
"""
class Solution(object):
def nextGreaterElement(self, findNums, nums):
"""
:type findNums: List[int]
:type nums: List[int]
:rtype: List[int]
"""
for k,v in enumerate(findNums):
try:
index = nums.index(v)
except ValueError:
findNums[k] = -1
else:
findNums[k] = -1
for i in range(index+1, len(nums)):
if nums[i] > v:
findNums[k] = nums[i]
break
return findNums
def test():
sol = Solution()
findnums = [2,4]
nums = [1,2,3,4]
print(sol.nextGreaterElement(findnums,nums))
test() |
6,048 | 58d144b2c6c307719cef0b5097945c8206135ccf | """CPU functionality."""
import sys
HLT = 0b00000001
LDI = 0b10000010
PRN = 0b01000111
MUL = 0b10100010
PUSH = 0b01000101
POP = 0b01000110
CMP = 0b10100111
CALL = 0b01010000
RET = 0b00010001
ADD = 0b10100000
CMP = 0b10100111
JMP = 0b01010100
JEQ = 0b01010101
JNE = 0b01010110
AND = 0b10101000
NOT = 0b01101001
OR = 0b10101010
XOR = 0b10101011
SHL = 0b10101100
SHR = 0b10101101
MOD = 0b10100100
class CPU:
"""Main CPU class."""
def __init__(self):
"""Construct a new CPU."""
self.reg = [0] * 8
self.pc = 0
self.ram = [0] * 256
self.running = True
self.reg[7] = 0xf4
self.sp = self.reg[7]
self.fl = 0b00000000
self.branchtable = {}
self.branchtable[HLT] = self.op_hlt
self.branchtable[LDI] = self.op_ldi
self.branchtable[PRN] = self.op_prn
self.branchtable[MUL] = self.op_mul
self.branchtable[PUSH] = self.op_push
self.branchtable[POP] = self.op_pop
self.branchtable[CALL] = self.op_call
self.branchtable[RET] = self.op_ret
self.branchtable[ADD] = self.op_add
self.branchtable[CMP] = self.op_cmp
self.branchtable[JMP] = self.op_jmp
self.branchtable[JEQ] = self.op_jeq
self.branchtable[JNE] = self.op_jne
self.branchtable[AND] = self.op_and
self.branchtable[NOT] = self.op_not
self.branchtable[OR] = self.op_or
self.branchtable[XOR] = self.op_xor
self.branchtable[SHL] = self.op_shl
self.branchtable[SHR] = self.op_shr
self.branchtable[MOD] = self.op_mod
def ram_read(self, MAR):
return self.ram[MAR]
def ram_write(self, MAR, MDR):
self.ram[MAR] = MDR
def op_hlt(self, operand_a, operand_b):
self.running = False
def op_ldi(self, operand_a, operand_b):
self.reg[operand_a] = operand_b
# self.pc += 3
def op_prn(self, operand_a, operand_b):
print('prn:', self.reg[operand_a])
# self.pc += 2
def op_mul(self, operand_a, operand_b):
self.alu('MUL', operand_a, operand_b)
# self.pc += 3
def op_push(self, operand_a, operand_b):
self.sp -= 1
val = self.reg[operand_a]
self.ram_write(self.sp, val)
# self.pc += 2
def op_pop(self, operand_a, operand_b):
self.reg[operand_a] = self.ram_read(self.sp)
# self.pc += 2
self.sp += 1
def op_call(self, operand_a, operand_b):
ret_addr = self.pc + 2
self.sp -= 1
self.ram_write(self.sp, ret_addr) # write sp and pc location to ram
sub_addr = self.reg[operand_a]
self.pc = sub_addr
def op_ret(self, operand_a, operand_b):
ret_addr = self.ram_read(self.sp) # set ret_addr to location in ram
self.sp += 1
self.pc = ret_addr
def op_add(self, operand_a, operand_b):
self.alu('ADD', operand_a, operand_b)
def op_cmp(self, operand_a, operand_b):
self.alu('CMP', operand_a, operand_b)
def op_jmp(self, operand_a, operand_b):
self.pc = self.reg[operand_a]
def op_jeq(self, operand_a, operand_b):
if self.fl == 0b00000001:
self.op_jmp(operand_a, operand_b)
else:
self.pc += 2
def op_jne(self, operand_a, operand_b):
if self.fl != 0b00000001:
self.op_jmp(operand_a, operand_b)
else:
self.pc += 2
def op_and(self, operand_a, operand_b):
self.alu('AND', operand_a, operand_b)
def op_or(self, operand_a, operand_b):
self.alu('ADD', operand_a, operand_b)
def op_xor(self, operand_a, operand_b):
self.alu('CMP', operand_a, operand_b)
def op_not(self, operand_a, operand_b):
self.alu('ADD', operand_a, operand_b)
def op_shl(self, operand_a, operand_b):
self.alu('CMP', operand_a, operand_b)
def op_shr(self, operand_a, operand_b):
self.alu('ADD', operand_a, operand_b)
def op_mod(self, operand_a, operand_b):
self.alu('CMP', operand_a, operand_b)
def load(self, filename):
"""Load a program into memory."""
address = 0
with open(filename) as file:
for line in file:
val = line.split("#")[0].strip()
if val == '':
continue
instruction = int(val, 2)
self.ram[address] = instruction
address += 1
# For now, we've just hardcoded a program:
# program = [
# # From print8.ls8
# 0b10000010, # LDI R0,8
# 0b00000000,
# 0b00001000,
# 0b01000111, # PRN R0
# 0b00000000,
# 0b00000001, # HLT
# ]
# for instruction in program:
# self.ram[address] = instruction
# address += 1
def alu(self, op, reg_a, reg_b):
"""ALU operations."""
if op == 'ADD':
self.reg[reg_a] = self.reg[reg_a] + self.reg[reg_b]
elif op == 'MUL':
self.reg[reg_a] = self.reg[reg_a] * self.reg[reg_b]
elif op == 'CMP':
if self.reg[reg_a] < self.reg[reg_b]:
self.fl = 0b00000100
elif self.reg[reg_a] > self.reg[reg_b]:
self.fl = 0b00000010
elif self.reg[reg_a] == self.reg[reg_b]:
self.fl = 0b00000001
elif op == 'AND':
self.reg[reg_a] = self.reg[reg_a] & self.reg[reg_b]
elif op == 'OR':
self.reg[reg_a] = self.reg[reg_a] | self.reg[reg_b]
elif op == 'XOR':
self.reg[reg_a] = self.reg[reg_a] ^ self.reg[reg_b]
elif op == 'NOT':
self.reg[reg_a] = ~self.reg[reg_a]
elif op == 'SHL':
self.reg[reg_a] = self.reg[reg_a] << self.reg[reg_b]
elif op == 'SHR':
self.reg[reg_a] = self.reg[reg_a] >> self.reg[reg_b]
elif op == 'MOD':
if self.reg[reg_b] == 0:
print('ERROR: divide by 0')
self.op_hlt()
else:
remainder = self.reg[reg_a] % self.reg[reg_b]
self.reg[reg_a] = remainder
else:
raise Exception("Unsupported ALU operation")
def trace(self):
"""
Handy function to print out the CPU state. You might want to call this
from run() if you need help debugging.
"""
print(f"TRACE: %02X | %02X %02X %02X |" % (
self.pc,
# self.fl,
# self.ie,
self.ram_read(self.pc),
self.ram_read(self.pc + 1),
self.ram_read(self.pc + 2)
), end='')
for i in range(8):
print(" %02X" % self.reg[i], end='')
print()
def run(self):
"""Run the CPU."""
self.trace()
while self.running is True:
IR = self.ram_read(self.pc)
operand_a = self.ram_read(self.pc + 1)
operand_b = self.ram_read(self.pc + 2)
# This increments the pc position automatically
op_size = IR >> 6
ins_set = ((IR >> 4) & 0b1) == 1
if not ins_set:
self.pc += op_size + 1
if IR in self.branchtable:
self.branchtable[IR](operand_a, operand_b)
# SAVE WHERE WE'RE COMING FROM TO THE STACK AND SET PC TO WHERE WE'RE GOING
|
6,049 | 0054921928838d9aee63cf58f50a0a01ee12635d | from django.db import models
class crontab(models.Model):
name = models.CharField(max_length=20)
class converter(models.Model):
name = models.CharField(max_length=20)
class MainTable(models.Model):
rank = models.IntegerField(null=True)
coinid = models.CharField(max_length=30,null=True)
symbol = models.CharField(max_length=10)
name = models.CharField(max_length=30)
thumbimg = models.CharField(max_length=30)
marketcap = models.FloatField(null=True)
totalvolume = models.FloatField(null=True)
price_change = models.FloatField(null=True)
pricechangepercentage = models.FloatField(null=True)
onehourchange = models.FloatField(null=True)
sevendaychange = models.FloatField(null=True)
circulating_supply = models.FloatField(null=True)
class Table(models.Model):
name = models.CharField(max_length=30)
coinid = models.CharField(max_length=30)
symbol = models.CharField(max_length=20)
img = models.CharField(max_length=50)
image = models.CharField(max_length=50)
class Price(models.Model):
price = models.FloatField(null=True)
class Marketdata(models.Model):
price_change_24h = models.FloatField(null=True)
price_change_percentage_24h = models.FloatField(null=True) |
6,050 | ea12ede51881f6e826a044df5d7aba457c434658 | """
Problem Link: https://practice.geeksforgeeks.org/problems/palindrome/0
Given an integer, check whether it is a palindrome or not.
Input:
The first line of input contains an integer T denoting the number of test cases.
For each test case there will be single line containing single integer N.
Output:
Print "Yes" or "No" (without quotes) depending on whether the number is palindrome or not.
Constraints:
1 <= T <= 1000
1 <= N <= 10000
Example:
Input:
3
6
167
55555
Output:
Yes
No
Yes
"""
for _ in range(int(input())):
n = int(input())
temp = n
rev = 0
while temp:
rev = (rev*10)+(temp%10)
temp //= 10
print("Yes" if rev == n else "No") |
6,051 | 25b3defc8410c72c7c6f25288af91bd0c826f2ed | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ui/about.ui'
#
# Created by: PyQt5 UI code generator 5.15.4
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_aboutDialog(object):
def setupUi(self, aboutDialog):
aboutDialog.setObjectName("aboutDialog")
aboutDialog.resize(400, 175)
self.label = QtWidgets.QLabel(aboutDialog)
self.label.setGeometry(QtCore.QRect(20, 10, 51, 16))
self.label.setObjectName("label")
self.label_2 = QtWidgets.QLabel(aboutDialog)
self.label_2.setGeometry(QtCore.QRect(40, 40, 201, 21))
self.label_2.setObjectName("label_2")
self.label_3 = QtWidgets.QLabel(aboutDialog)
self.label_3.setGeometry(QtCore.QRect(40, 70, 261, 21))
self.label_3.setOpenExternalLinks(True)
self.label_3.setObjectName("label_3")
self.label_4 = QtWidgets.QLabel(aboutDialog)
self.label_4.setGeometry(QtCore.QRect(40, 100, 91, 21))
self.label_4.setObjectName("label_4")
self.label_5 = QtWidgets.QLabel(aboutDialog)
self.label_5.setGeometry(QtCore.QRect(40, 130, 91, 21))
self.label_5.setObjectName("label_5")
self.retranslateUi(aboutDialog)
QtCore.QMetaObject.connectSlotsByName(aboutDialog)
def retranslateUi(self, aboutDialog):
_translate = QtCore.QCoreApplication.translate
aboutDialog.setWindowTitle(_translate("aboutDialog", "About"))
self.label.setText(_translate("aboutDialog", "About"))
self.label_2.setText(_translate("aboutDialog", "Author: Andrew Christiansen"))
self.label_3.setText(_translate("aboutDialog", "Homepage: <a href=\"https://github.com/drewtchrist/pylabeler\">https://github.com/drewtchrist/pylabeler</a>"))
self.label_4.setText(_translate("aboutDialog", "Version: 0.1.0"))
self.label_5.setText(_translate("aboutDialog", "License: MIT"))
|
6,052 | ac0e301e58ea64465ccd4b2b9aa4ae69283d6d0c | import FWCore.ParameterSet.Config as cms
process = cms.Process("GeometryInfo")
# minimum of logs
process.MessageLogger = cms.Service("MessageLogger",
cerr = cms.untracked.PSet(
enable = cms.untracked.bool(False)
),
cout = cms.untracked.PSet(
enable = cms.untracked.bool(True),
threshold = cms.untracked.string('INFO')
)
)
# geometry
process.load("Geometry.VeryForwardGeometry.geometryRPFromDD_2018_cfi")
#process.load("Geometry.VeryForwardGeometry.geometryRPFromDD_2017_cfi")
# no events to process
process.source = cms.Source("EmptyIOVSource",
timetype = cms.string('runnumber'),
firstValue = cms.uint64(1),
lastValue = cms.uint64(1),
interval = cms.uint64(1)
)
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(1)
)
#Database output service
process.load("CondCore.CondDB.CondDB_cfi")
# input database (in this case local sqlite file)
process.CondDB.connect = 'sqlite_file:CTPPSRPAlignment.db'
process.PoolDBESSource = cms.ESSource("PoolDBESSource",
process.CondDB,
DumpStat=cms.untracked.bool(True),
toGet = cms.VPSet(
cms.PSet(
record = cms.string('RPMisalignedAlignmentRecord'),
tag = cms.string("CTPPSRPAlignment_misaligned")
)
)
)
process.ctppsGeometryInfo = cms.EDAnalyzer("CTPPSGeometryInfo",
geometryType = cms.untracked.string("misaligned"),
printRPInfo = cms.untracked.bool(True),
printSensorInfo = cms.untracked.bool(True)
)
process.p = cms.Path(
process.ctppsGeometryInfo
)
|
6,053 | 64ac007faeebe0e71ba0060e74fa07154e6291e2 | from django.urls import path
from .views import PollsList, SinglePollsView, PollsCreate, PollsAnswer
app_name = "authors"
# app_name will help us do a reverse look-up latter.
urlpatterns = [
path('polls/', PollsList.as_view()),
path('polls/create', PollsCreate.as_view()),
path('polls/<int:pk>', SinglePollsView.as_view()),
path('answers/', PollsAnswer.as_view()),
] |
6,054 | 2a5f69fbb26bd1f94c10ff0da687391bf5bd3c23 | import fs
gInfo = {
'obj': g2.go(capUrl),
'Headers-C-T': g2.response.headers['Content-Type'],
'url': g2.response.url,
'urlDetails': g2.response.url_details()
}
capHtml = capHtml = gInfo['obj'].unicode_body(ignore_errors=True, fix_special_entities=True)
b64cap = re.findall(r'base64,(.*?)\\" id=', capHtml, re.DOTALL)
savecaptcha = open(file="/home/ubuntu/captcha.png", mode="w")
savecaptcha.write(b64cap[0])
savecaptcha.close()
f = open(file="/home/ubuntu/captcha.png", mode="rb")
r = f.read()
i = base64.b64decode(r)
f.close()
fincapfile = open(file="/home/ubuntu/workspace/ffcap.jpeg", mode="wb")
capsave = fincapfile.write(i)
fincapfile.close() |
6,055 | 1145050d82e614d5c248fc7e6a71720e6ff72414 | # -*- coding: utf-8 -*-
"""
# @Time : 2018/6/11 下午6:45
# @Author : zhanzecheng
# @File : 542.01矩阵1.py
# @Software: PyCharm
"""
# 一个简单的循环方式来解决这个问题
# 这一题的思路不错,用多次循环来计数
# TODO: check 1
class Solution:
def updateMatrix(self, matrix):
"""
:type matrix: List[List[int]]
:rtype: List[List[int]]
"""
cur = 0
col = len(matrix[0])
row = len(matrix)
while True:
cur += 1
flag = False
for i in range(len(matrix)):
for j in range(len(matrix[0])):
if matrix[i][j] == cur:
if i - 1 < 0 or matrix[i - 1][j] >= cur:
pass
else:
continue
if j - 1 < 0 or matrix[i][j - 1] >= cur:
pass
else:
continue
if i + 1 >= row or matrix[i + 1][j] >= cur:
pass
else:
continue
if j + 1 >= col or matrix[i][j + 1] >= cur:
pass
else:
continue
flag = True
matrix[i][j] += 1
if not flag:
break
return matrix
if __name__ == '__main__':
solution = Solution()
data = [
[0, 0, 0, 0],
[1, 1, 1, 1],
[1, 1, 1, 1],
[1, 1, 1, 1]
]
print(solution.updateMatrix(data))
data =[
[1, 0, 1, 1, 0, 0, 1, 0, 0, 1],
[0, 1, 1, 0, 1, 0, 1, 0, 1, 1],
[0, 0, 1, 0, 1, 0, 0, 1, 0, 0],
[1, 0, 1, 0, 1, 1, 1, 1, 1, 1],
[0, 1, 0, 1, 1, 0, 0, 0, 0, 1],
[0, 0, 1, 0, 1, 1, 1, 0, 1, 0],
[0, 1, 0, 1, 0, 1, 0, 0, 1, 1],
[1, 0, 0, 0, 1, 1, 1, 1, 0, 1],
[1, 1, 1, 1, 1, 1, 1, 0, 1, 0],
[1, 1, 1, 1, 0, 1, 0, 0, 1, 1]
]
result = [
[1,0,1,1,0,0,1,0,0,1],
[0,1,1,0,1,0,1,0,1,1],
[0,0,1,0,1,0,0,1,0,0],
[1,0,1,0,1,1,1,1,1,1],
[0,1,0,1,1,0,0,0,0,1],
[0,0,1,0,1,1,1,0,1,0],
[0,1,0,1,0,1,0,0,1,1],
[1,0,0,0,1,2,1,1,0,1],
[2,1,1,1,1,1,1,0,1,0],
[1,2,1,1,0,1,0,0,1,1]
]
true_result = [
[1,0,1,1,0,0,1,0,0,1],
[0,1,1,0,1,0,1,0,1,1],
[0,0,1,0,1,0,0,1,0,0],
[1,0,1,0,1,1,1,1,1,1],
[0,1,0,1,1,0,0,0,0,1],
[0,0,1,0,1,1,1,0,1,0],
[0,1,0,1,0,1,0,0,1,1],
[1,0,0,0,1,2,1,1,0,1],
[2,1,1,1,1,2,1,0,1,0],
[3,2,2,1,0,1,0,0,1,1]
]
|
6,056 | 96708216c5ffa56a60475b295c21b18225e6eed9 | from django.urls import path
from rest_framework.routers import DefaultRouter
from . import views
app_name = "rooms"
router = DefaultRouter()
router.register("", views.RoomViewSet)
urlpatterns = router.urls
#
# urlpatterns = [
# # path("list/", views.ListRoomsView.as_view()),
# # path("list/", views.rooms_view),
# path("list/",views.RoomsView.as_view()),
# path('<int:pk>/',views.RoomView.as_view()),
# path('search/',views.room_search)
# ]
|
6,057 | a74f2050a057f579a8a8b77ac04ef09073cdb6cf | import matplotlib.pyplot as plt
import numpy as np
import random
plt.ion()
def draw_board(grid_size, hole_pos,wall_pos):
board = np.ones((grid_size,grid_size))
board[wall_pos] = 10
board[hole_pos] = 0
return board
class Game():
"""
A class which implements the Gobble game. Initializes with a grid_size
and path_radius. There is an "example" method to illustrate how the
game is played.
"""
def __init__(self, grid_size):
self.grid_size = grid_size
#self.player_pos = (np.random.randint(grid_size),np.random.randint(grid_size))
self.start_game(grid_size)
#self.show_board()
plt.title("Nate's Lame Game")
def start_game(self, grid_size):
self.score = 0
self.goal_pos = (0,0)
self.wall_pos = (grid_size//2,np.arange(5))
self.board = draw_board(grid_size, self.goal_pos, self.wall_pos)
self.player_pos = (9,9)
self.board[self.player_pos] = .5
# self.board[self.player_pos] = .5
def show_board(self):
plt.imshow(self.board)
def update_board(self, new_pos, show_plt=False):
# if np.sum(np.abs(np.array(new_pos) - np.array(self.goal_pos))) < np.sum(np.abs(np.array(self.player_pos) - np.array(self.goal_pos))):
# self.score += 1
# else:
# self.score -= 1
if np.sum(np.abs(np.array(new_pos) - np.array(self.goal_pos))) == 1:
self.score += 100
self.board[self.player_pos] = 1
self.board[new_pos] = .5
self.player_pos = new_pos
if show_plt:
self.show_board()
if self.check_end():
print('Game over yo')
self.start_game(self.grid_size)
return True
return False
def get_actions(self):
x,y = self.player_pos
actions = [(x+1,y), (x,y+1),
(x-1,y), (x,y-1)]
v_dim = self.board.shape[0]
valid = []
for a in actions:
if a[0] < v_dim and a[1] < v_dim and a[0] > -1 and a[1] > -1 and self.board[a] != 10:
valid.append(a)
return valid
def check_end(self):
if self.player_pos == self.goal_pos:
print('game is finished')
self.score = 0
return True
else:
return False
def example(self):
"""
Illustrates how to play the game.
"""
while self.check_end() == False:
plt.pause(0.25)
end = self.update_board(random.choice(self.get_actions()), True)
|
6,058 | 1935cab249bf559aeadf785ce7abcecb03344c04 | from .signals import get_restaurant_coordinates, count_average_price, count_total_calories
from .dish import Dish
from .ingredients import Ingredient
from .restaurants import Restaurant
|
6,059 | 81ae5bbc8e3e712ee4f54656bc28f385a0b4a29f | # -*- coding: utf-8 -*-
"""
Created on Thu Jun 18 23:54:17 2015
@author: rein
@license: MIT
@version: 0.1
"""
from __future__ import print_function
import numpy as np
import footballpy.processing.ragged_array as ra
""" Ranking dictionary necessary to determine the column number
of each player.
The type system depends on the type of the raw data.
Type A: Elaborate positioning scheme
Type B: Simple scheme
Type C: Amisco-scheme
"""
__position_ranking = {
'A': {
'TW':1, 'LV':2, 'IVL':3, 'IVZ':4, 'IVR':5, 'RV':6,
'DML':7, 'DMZ':8, 'DMR':9,
'LM':10, 'HL':11, 'MZ': 12, 'HR':13, 'RM':14,
'OLM':15, 'ZO':16, 'ORM':17,
'HST':18, 'LA':19, 'STL':20, 'STR':21, 'RA':22,
'STZ':23
},
'B': {
'G': 1, 'D': 2, 'M': 3, 'A': 4
},
'C': {
'goalie': 1, 'defenseman': 2, 'mid-fielder': 3,
'forward': 4
}
}
def sort_position_data(pos,type='A'):
"""Sorts the position data according to player positions.
As the final matrix should contain the player according to their
position starting from left to right from back to front the indexed
ragged array list should be sorted such that the entries match
this format.
Args:
pos: The list with tuples containing the position data and the
playing position.
type: The type of position rankings used by the tracking system.
Type A is default.
Returns:
The sorted list.
"""
ranking_type = __position_ranking[type]
return sorted(pos,key=lambda player: ranking_type[player[2]])
def stitch_position_data(pos,ball,NO_PLAYERS=11):
"""Puts position data into a single array.
stitch_position_data does not change the ordering of the data and
stitches the position data together as given. Therefore, if the playing
position must be controlled sort_position_data must be called first.
Args:
pos: position data list (indexed ragged array)
ball: list with two matrices (1st and 2nd half)
NO_PLAYERS: default = 11
Returns:
output_fields:
"""
# magic numbers
_MISSING_ = -2.0**13
_NO_DIM_ = 2 # x- and y-coordinates
_POST_LOOK_ = 20
# end magic numbers
frames = ball[:,0]
min_frame = min(frames)
max_frame = max(frames)
no_frames = ball.shape[0]
if no_frames != (max_frame - min_frame + 1):
raise IndexError("No of ball frames doesn't match")
no_players_input = len(pos)
input_fields = ra.expand_indexed_ragged_array(pos, frames,
lambda x: x[1], _MISSING_)
input_fields_clean = ra.drop_expanded_ragged_entries(input_fields,NO_PLAYERS*_NO_DIM_,_MISSING_)
output_fields = ra.condense_expanded_ragged_array(input_fields, missing_id = _MISSING_)
return output_fields
def determine_playing_direction(goalie):
""" Determines the teams' playing direction.
Determines the playing direction using
the average position of the goalie.
Args:
goalie: x-y position of goalie
Returns:
either 'l2r': left to right or 'r2l': right to left.
"""
return 'l2r' if np.average(goalie[:,0]) < 0 else 'r2l'
def switch_playing_direction(position_coords):
"""Switches the position coordinates.
Mirrors the position coordinates either from left to right or vice versa.
The routine assumes that the origin (0,0) is localized at the width and
length midpoints.
-----------------
| |
|_ |
| | (0,0)
|_| |
| |
| |
-----------------
Args:
position_coords: x-y position coordinates of the players.
Returns:
Nothing, the matrix coordinates are flipped in place.
"""
# just mirrors the x-coordinate in place
position_coords[:,0::2] *= -1
def rescale_playing_coords(position_coords,pitch_dim):
"""Relocates the origin to left-bottom and rescales to [0,10] height/width.
The routine assumes that the origin (0,0) is localized at the width and
length midpoints.
-----------------
| |
|_ |
| | (0,0)
|_| |
| |
| |
-----------------
Args:
position_coords:
pitch_dim:
Returns:
Nothing, the matrix coordinates are scaled in place.
"""
pitch_width = pitch_dim['width']
pitch_length = pitch_dim['length']
# translate to bottom-left corner
position_coords[:,0::2] += pitch_length/2 # x-coordinates
position_coords[:,1::2] += pitch_width/2 # y-coordinates
# rescale to [0,10]
position_coords[:,0::2] *= 10.0/pitch_length # x-coordinates
position_coords[:,1::2] *= 10.0/pitch_width # y-coordinates
def clamp_values(result,vmin=0.0, vmax=10.0):
"""Clamps the position values to [0,10]
Args:
result:
vmin: minimum value
vmax = maximum value
Returns:
None. Matrix is clamped in place.
"""
for entry in result:
for ht in result[entry]:
ht[ht<vmin] = vmin
ht[ht>vmax] = vmax
def run(pos_data,ball_data,match,ranking_type='A'):
"""Driver routine to run all processing steps.
Args:
ranking_type: Specifies which postion_ranking system should be used.
Returns:
"""
roles = ['home','guest']
sections = ['1st','2nd']
result = {'home':[0]*2, 'guest':[0]*2, 'ball':[0]*2}
# switch for l2r switching mode
l2r_section = 0
# processing player position data first
for sec in sections:
home_direction = 'r2l'
for role in roles:
print('Processing: %s-%s...' % (role,sec))
sorted_pos_data = sort_position_data(pos_data[role][sec], ranking_type)
stitched_data = stitch_position_data(sorted_pos_data,ball_data[sec!='1st'])
if role == 'home':
home_direction = determine_playing_direction(stitched_data[:,0:2])
if home_direction == 'l2r':
switch_playing_direction(stitched_data)
l2r_section = 0 if sec=='1st' else 1
rescale_playing_coords(stitched_data,match['stadium'])
result[role][0 if sec=='1st' else 1] = stitched_data
print('done')
# processing ball data
print('Processing ball...')
switch_playing_direction(ball_data[l2r_section][:,1:3])
for i in [0,1]:
rescale_playing_coords(ball_data[i][:,1:3],match['stadium'])
result['ball'][0] = ball_data[0][:,1:3]
result['ball'][1] = ball_data[1][:,1:3]
#correct value ranges.
print('clamping values.')
clamp_values(result)
print('done.')
return result
if __name__ == '__main__':
#teams, match, pos_data,ball_data
section = '2nd'
kk = pos_data['home'][section]
kks = sort_position_data(kk)
bb = ball_data[section!='1st']
ss = stitch_position_data(kks,bb)
data_transformed = run(pos_data,ball_data,match)
|
6,060 | 3e7d2bacb15c39658ef5044685b73068deb1c145 | from math import pi
from root_regula_falsi import *
r = 1.0
ρs = 200.0
ρw = 1000.0
def f(h):
Vw = 4*pi*r**3/3 - pi*h**2/3*(3*r - h) # displaced volume of water
Vs = 4*pi*r**3/3
return ρw*Vw - ρs*Vs
xr = root_regula_falsi(f, 0.0, 2*r) |
6,061 | 7f21fcc1265be8b3263971a4e76470616459f433 | from django.core.exceptions import ObjectDoesNotExist
from django.shortcuts import render, HttpResponseRedirect, Http404
from django.contrib.auth import authenticate, login, logout
from accounts.forms import RegistrationForm, LoginForm, StudentDetailsForm, companyDetailsForm, SocietyDetailsForm
from accounts.models import MyUser, studentData, CompanyData, SoietyData
from accounts.helper_functions import password_check, email_check
# Create your views here.
def login_page(request):
if request.user.is_authenticated():
return HttpResponseRedirect("/")
else:
form = LoginForm(request.POST or None)
next_url = request.GET.get('next')
if form.is_valid():
username = form.cleaned_data['email']
password = form.cleaned_data['password']
print username, password
user = authenticate(username=username, password=password)
if user is not None:
try:
user_details = studentData.objects.get(id=user.id)
login(request, user)
return HttpResponseRedirect('/home')
except ObjectDoesNotExist:
account = MyUser.objects.get(id=user.id)
account_type = account.get_account_tyoe()
return HttpResponseRedirect("complete_registration/" + account_type +"/"+str(user.id))
context = {
"form": form
}
return render(request, "generalPages/loginpage.html", context)
def register_page(request):
# if request.user.is_authenticated():
# return HttpResponseRedirect("/")
# else:
# form = RegistrationForm(request.POST or None)
# context = {
# "form": RegistrationForm(),
# "action_value_society": "register/society",
# "action_value_student": "register/student",
# "action_value_company": "register/company",
# "submit_btn_value": "Register"
#
# }
# return render(request, "generalPages/register.html", context)
return render(request, "generalPages/register.html")
def student_reg(request):
# if request.user.is_authenticated():
# return HttpResponseRedirect("/")
# else:
# form = RegistrationForm(request.POST or None)
# print form
#
# if form.is_valid():
# email = form.cleaned_data["email"]
# password = form.cleaned_data["password2"]
#
# print email + password
#
# user = MyUser.objects.create_user(email=email, password=password, userType="student")
# #todo: send out confirmation email
#
#
# # get the ID so i can pass it in the URL to the complete registration page
# user_id = user.id
# return HttpResponseRedirect("/complete_registration/student/" + str(user_id))
#
# else:
# #todo: change this that it raises username already in use error
# print "form is invalid"
# # todo: add a parameter that tells them, the username or password was incorrect
# return HttpResponseRedirect("/register")
return render(request, "student/CompleteStudentRegistration.html")
def company_reg(request):
# if request.user.is_authenticated():
# return HttpResponseRedirect("/")
# else:
# form = RegistrationForm(request.POST or None)
# print form
#
# if form.is_valid():
# email = form.cleaned_data["email"]
# password = form.cleaned_data["password2"]
#
# print email + password
#
# user = MyUser.objects.create_user(email=email, password=password, userType="company")
# # todo: send out confirmation email
#
# # get the ID so i can pass it in the URL to the complete registration page
# user_id = user.id
# return HttpResponseRedirect("/complete_registration/company/" + str(user_id))
#
# else:
# print "form is invalid"
# # todo: add a parameter that tells them, the username or password was incorrect
# return HttpResponseRedirect("/register")
return render(request, "company/completeCompanyregistration.html")
def society_reg(request):
# if request.user.is_authenticated():
# return HttpResponseRedirect("/")
# else:
# form = RegistrationForm(request.POST or None)
# print form
#
# if form.is_valid():
# email = form.cleaned_data["email"]
# password = form.cleaned_data["password2"]
#
# print email + password
#
# user = MyUser.objects.create_user(email=email, password=password, userType="society")
# # todo: send out confirmation email
#
# # get the ID so i can pass it in the URL to the complete registration page
# user_id = user.id
# return HttpResponseRedirect("/complete_registration/society/" + str(user_id))
#
# else:
# print "form is invalid"
# # todo: add a parameter that tells them, the username or password was incorrect
# return HttpResponseRedirect("/register")
return render(request, "society/completeSocietyRegistration.html")
def complete_student_registration(request):
print request.POST
return HttpResponseRedirect("/")
# # check if the id is the one that matchest to their email:
#
#
# # print "in their"
# # print request
# #
# # return HttpResponseRedirect("/")
# if request.user.is_authenticated():
# return HttpResponseRedirect("/")
# else:
# try:
# user = MyUser.objects.get(id=id)
#
# except ObjectDoesNotExist:
# return HttpResponseRedirect("/register")
# except:
# return HttpResponseRedirect("/login")
#
# try:
# user_details = studentData.objects.get(id=id)
# login(request, user)
# return HttpResponseRedirect('/home')
# except ObjectDoesNotExist:
#
# if user.user_type == 'student':
# form = StudentDetailsForm(request.POST or None)
#
# if form.is_valid():
# f_name = form.cleaned_data["first_name"]
# s_name= form.cleaned_data["surname"]
# studyCunt = form.cleaned_data["countryOfStudy"]
# course= form.cleaned_data['course']
# university = form.cleaned_data['university']
#
# studentData.objects.create(id=user, first_name=f_name, surname=s_name,
# countryOfStudy=studyCunt, course=course, university=university)
# login(request, user)
# return HttpResponseRedirect("/home")
# # else:
# # print "form is invalid"
# context = {
# "form": StudentDetailsForm(),
#
# }
# return render(request, "student/CompleteStudentRegistration.html", context)
#
# pass
# else:
# return HttpResponseRedirect('/login')
# except:
# return HttpResponseRedirect("/404")
def complete_company_registration(request, id):
# check if the id is the one that matchest to their email:
# print "in their"
# print request
#
# return HttpResponseRedirect("/")
if request.user.is_authenticated():
return HttpResponseRedirect("/")
else:
try:
user = MyUser.objects.get(id=id)
except ObjectDoesNotExist:
return HttpResponseRedirect("/register")
except:
return HttpResponseRedirect("/login")
try:
user_details = CompanyData.objects.get(id=id)
login(request, user)
return HttpResponseRedirect('/company_home')
except ObjectDoesNotExist:
if user.user_type == 'company':
form = companyDetailsForm(request.POST or None)
if form.is_valid():
print "there"
company_name = form.cleaned_data["company_name"]
website = form.cleaned_data["company_website"]
city = form.cleaned_data["HQ_city"]
industry = form.cleaned_data["industry"]
CompanyData.objects.create(id=user, Company_name=company_name, company_website=website,
HQ_city=city, description=None, industry=industry)
login(request, user)
return HttpResponseRedirect("/company_home")
# else:
# print "form is invalid"
context = {
"form": companyDetailsForm(),
}
return render(request, "company/completeCompanyregistration.html", context)
pass
else:
return HttpResponseRedirect('/login')
except:
return HttpResponseRedirect("/404")
def complete_society_registration(request, id):
print "hey"
if request.user.is_authenticated():
return HttpResponseRedirect("/")
else:
print "ho"
try:
user = MyUser.objects.get(id=id)
except ObjectDoesNotExist:
return HttpResponseRedirect("/register")
except:
return HttpResponseRedirect("/login")
try:
user_details = SoietyData.objects.get(id=id)
login(request, user)
return HttpResponseRedirect('/home')
except ObjectDoesNotExist:
print "lets "
if user.user_type == 'society':
form = SocietyDetailsForm(request.POST or None)
if form.is_valid():
name = form.cleaned_data['society_name']
university = form.cleaned_data['society_university']
fb = form.cleaned_data['society_FB']
website = form.cleaned_data['society_website']
SoietyData.objects.create(id=user, society_name=name, society_university=university,
society_facebook=fb, society_website=website)
login(request, user)
return HttpResponseRedirect("/society_home")
# else:
# print "form is invalid"
context = {
"form": SocietyDetailsForm(),
}
print "go"
return render(request, "society/completeSocietyRegistration.html", context)
else:
return HttpResponseRedirect('/login')
except:
return HttpResponseRedirect("/thisisaknownerror")
def logout_call(request):
logout(request)
return HttpResponseRedirect('/')
|
6,062 | 61484d9a08f2e3fcd15573ce89be4118a442dc2e | # Generated by Django 3.1 on 2020-09-26 03:46
import datetime
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('bcs', '0002_auto_20200915_2245'),
]
operations = [
migrations.AddField(
model_name='study_material',
name='study_materail_date',
field=models.DateField(auto_now_add=True, default=django.utils.timezone.now),
preserve_default=False,
),
migrations.AlterField(
model_name='exam',
name='exam_date',
field=models.DateField(blank=True, default=datetime.date(2020, 9, 26)),
),
migrations.AlterField(
model_name='exam',
name='exam_time',
field=models.IntegerField(default=10),
),
migrations.AlterField(
model_name='study_material',
name='study_image',
field=models.ImageField(blank=True, null=True, upload_to='images/'),
),
]
|
6,063 | 1ccb23435d8501ed82debf91bd6bf856830d01cb | from flask import Blueprint, render_template, request, session, url_for, redirect
from flask_socketio import join_room, leave_room, send, emit
from models.game.game import Game
from models.games.games import Games
from decorators.req_login import requires_login
game_blueprint = Blueprint('game', __name__)
@game_blueprint.route('/<string:game_id>')
@requires_login
def game_index(game_id):
return render_template('game/game.html')
|
6,064 | 0d18272f8056f37eddabb024dd769a2793f88c24 | #!/usr/bin/env python
import argparse
import xml.etree.cElementTree as ET
from datetime import datetime, timedelta
from requests import codes as requests_codes
from requests_futures.sessions import FuturesSession
from xml.etree import ElementTree as ET
parser = argparse.ArgumentParser(description='Fetch dqm images')
parser.add_argument('-H', '--host', metavar='ADDRESS', required=True)
args = parser.parse_args()
namespaces = {'SOAP-ENV': 'http://schemas.xmlsoap.org/soap/envelope/',
'SOAP-ENC': 'http://schemas.xmlsoap.org/soap/encoding/',
'xsi': 'http://www.w3.org/2001/XMLSchema-instance',
'xsd': 'http://www.w3.org/2001/XMLSchema',
'vai4': 'http://www.vaisala.com/schema/ice/iceMsgCommon/v1',
'vai1': 'http://www.vaisala.com/wsdl/ice/uploadIceObservation/v2',
'vai3': 'http://www.vaisala.com/schema/ice/obsMsg/v2',
}
try:
register_namespace = ET.register_namespace
except AttributeError:
def register_namespace(prefix, uri):
ET._namespace_map[uri] = prefix
for prefix, uri in namespaces.iteritems():
register_namespace(prefix, uri)
class ObsV2XML:
def __init__(self):
self.observation = ET.Element("{%s}observation" % namespaces['vai3'],
attrib={'version': '2.0',
'fastTrackQC': 'false'})
self.instances = {}
self.resultOfs = {}
def add_station(self, target):
# target = (idType, id)
self.instances[target] = ET.SubElement(self.observation,
"{%s}instance" % namespaces['vai3'])
targettag = ET.SubElement(self.instances[target],
"{%s}target" % namespaces['vai3'])
idtypetag = ET.SubElement(targettag, "{%s}idType" % namespaces['vai4'])
idtypetag.text = target[0]
idtag = ET.SubElement(targettag, "{%s}id" % namespaces['vai4'])
idtag.text = target[1]
def add_timestamp(self, target, timestamp):
if (target,timestamp) not in self.resultOfs:
self.resultOfs[(target, timestamp)] = ET.SubElement(self.instances[target],
"{%s}resultOf" % namespaces['vai3'],
attrib={'codespace': 'NTCIP',
'timestamp': timestamp,
'reason': 'scheduled',
'version': '0.0.1'})
def add_value(self, target, timestamp, code, value, quality="1"):
self.add_timestamp(target, timestamp)
tag = ET.SubElement(self.resultOfs[(target, timestamp)],
"{%s}value" % namespaces['vai3'],
attrib={'code': code,
'quality': quality})
tag.text = str(value)
def xml(self):
return ET.tostring(self.observation, encoding="UTF-8")
now = datetime.utcnow()
roundednow = now - timedelta(seconds=(now.second + 60), microseconds=now.microsecond)
session = FuturesSession(max_workers=10)
r = session.get('http://%s/api/v1/dqmData/values?geo=90,-180,-90,180&exactTime=%s' % (args.host, roundednow.strftime('%Y%m%dT%H%M%S')), auth=('demo', 'demovai'), verify=False).result()
meta = session.get('http://%s/api/v1/dqmData/meta?geo=90,-180,-90,180&period=P5M&queryMode=insertionTime' % args.host, auth=('demo', 'demovai'), verify=False).result()
stations = {}
for station in meta.json()['metaData']:
if 'stationnId' in station:
stn_id = station['stationnId']
else:
stn_id = station['stationId']
stations[stn_id] = station['xmlTargetName']
history = []
for observation in r.json()['observations']:
stationId = observation[u'stationId']
stationName = observation[u'stationName']
xml_target_name = stations[stationId]
target = ('stationFullName', xml_target_name)
obsv2 = ObsV2XML()
obsv2.add_station(target)
for dataset in observation['dataSet']:
timestamp = dataset['time']
if timestamp[10] == ' ':
timestamp = timestamp.replace(' ', 'T')
for record in dataset['values']:
symbol = record['symbol']
if symbol.startswith(('essIce','essAvgWindS','essSpotWindS','essMaxWindGustS','essAirT','essWetbulbT','essDewpointT','essMaxT','essMinT','spectroRelativeHumidity','spectroSurfaceTemp','essSurfaceTemp','essSurfaceFreeze','essVisibility.','essAtmosphericPressure.','spectroAirTemp','essSubSurfaceTemperature','essPavementTemperature', 'spectroSurfaceFrictionIndex')):
multiplier = 10
elif symbol.startswith(('spectroSurfaceIceLayer','spectroSurfaceWaterLayer','spectroSurfaceSnowLayer')):
multiplier = 100
else:
multiplier = 1
value = record['nvalue']
if value is not None:
value = value*multiplier
if record['qcFailed'] > 0:
quality = "-100"
else:
quality = "1"
obsv2.add_value(target, timestamp, symbol, value, quality)
payload = obsv2.xml().replace("<?xml version='1.0' encoding='UTF-8'?>", '')
payload = """<?xml version="1.0" encoding="UTF-8"?>
<SOAP-ENV:Envelope xmlns:SOAP-ENV="http://schemas.xmlsoap.org/soap/envelope/" xmlns:SOAP-ENC="http://schemas.xmlsoap.org/soap/encoding/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:vai4="http://www.vaisala.com/schema/ice/iceMsgCommon/v1" xmlns:vai1="http://www.vaisala.com/wsdl/ice/uploadIceObservation/v2" xmlns:vai3="http://www.vaisala.com/schema/ice/obsMsg/v2"><SOAP-ENV:Body>%s</SOAP-ENV:Body></SOAP-ENV:Envelope>""" % payload
history.append((session.post('http://db.vaicld.com:40001', data=payload), timestamp, stationId, stationName))
for future, timestamp, stationId, stationName in history:
r = future.result()
if r.status_code == requests_codes.ok:
tree = ET.fromstring(r.text)
status = tree.find('.//{http://www.vaisala.com/schema/ice/iceMsgCommon/v1}status').text
message = tree.find('.//{http://www.vaisala.com/schema/ice/iceMsgCommon/v1}text').text.replace('\n', ' | ')
print "OK", timestamp, stationId, stationName, status, message
else:
print "ERROR", timestamp, stationId, stationName, r.text
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
6,065 | 7cc77de31adff5b4a394f117fc743cd6dd4bc06c | import base
import telebot
import markups
from starter import start_bot, bot
@bot.message_handler(commands=['start'])
def start(message):
chat = message.chat
# welcome(msg)
msg = bot.send_message(chat.id, "Select a language in the list", reply_markup=markups.language())
bot.register_next_step_handler(msg, llanguage)
# base.create_user(chat.id)
def llanguage(msg):
chat = msg.chat
base.create_user(msg.chat.id, msg.text)
markup = telebot.types.ReplyKeyboardMarkup(True, True)
markup.row("ok")
str = bot.send_message(msg.chat.id, base.get_text(msg.chat.id,"confirm"), reply_markup=markup)
bot.register_next_step_handler(str, welcome)
def welcome(msg):
bot.send_message(msg.chat.id, "Чат-поддержка", reply_markup=markups.addWelcome())
bot.send_message(msg.chat.id, base.get_text(msg.chat.id, 'welcome_inf') % msg.from_user.first_name,
reply_markup=markups.welcome(), parse_mode='html')
@bot.callback_query_handler(func=lambda call: call.data == 'currency')
def select_currency(call):
chat = call.message.chat
bot.edit_message_text(base.get_text(chat.id,'currency'), chat.id, call.message.message_id, reply_markup=markups.currency())
@bot.message_handler(regexp="Выбор валюты")
def select_currency(msg):
chat = msg.chat
bot.send_message(chat.id, base.get_text(chat.id,'currency'), reply_markup=markups.currency())
@bot.callback_query_handler(func=lambda call: call.data[:4] == 'ccur')
def currency(call):
current_currency = call.data[4:] # Выбранная валюта
chat = call.message.chat
bot.edit_message_text(base.get_text(chat.id,'operations'), chat.id,
call.message.message_id, reply_markup=markups.menu())
def langg():
markup = telebot.types.InlineKeyboardMarkup()
bt_eng = telebot.types.InlineKeyboardButton(text="English", callback_data="langeng")
bt_rus = telebot.types.InlineKeyboardButton(text="Русский", callback_data="langrus")
bt_ukr = telebot.types.InlineKeyboardButton(text="Украiнський", callback_data="langukr")
markup.add(bt_eng)
markup.add(bt_rus)
markup.add(bt_ukr)
return markup
@bot.callback_query_handler(func=lambda call: call.data[:4] == "lang")
def lan(call):
chat = call.message.chat
new_lan = call.data[4:]
bot.edit_message_text( "Вы выбрали язык",chat.id,call.message.message_id,reply_markup=markups.settings())
@bot.callback_query_handler(func=lambda call: call.data == 'requests')
def my_requests(call):
text = base.get_text(call.message.chat.id, 'no_req')
bot.edit_message_text(text, call.message.chat.id, call.message.message_id)
bot.edit_message_reply_markup(call.message.chat.id, call.message.message_id,
reply_markup=markups.add_request(call.message.chat.id))
@bot.callback_query_handler(func=lambda call: call.data == 'backtomenu')
def currency(call):
chat = call.message.chat
bot.edit_message_text(base.get_text(chat.id,'operations'), chat.id,
call.message.message_id, reply_markup=markups.menu())
@bot.message_handler(regexp="Назад")
def back(msg):
bot.send_message(msg.chat.id, "Операции покупки или продажи", reply_markup=markups.addWelcome())
bot.send_message(msg.chat.id, base.get_text(msg.chat.id,"operations"), reply_markup=markups.menu())
@bot.message_handler(regexp="Обменные операции")
def exchange(msg):
bot.send_message(msg.chat.id, "Купить/Продать", reply_markup=markups.exchangeR())
bot.send_message(msg.chat.id, base.get_text(msg.chat.id,"exchamge"), reply_markup=markups.exchangeI())
@bot.callback_query_handler(func=lambda call: call.data == 'buy')
def buy(call):
chat = call.message.chat
bot.send_message(chat.id, "Покупка", reply_markup=markups.exchangeR())
bot.send_message(chat.id, base.get_text(chat.id,'buycur'), reply_markup=markups.buyI_sellI())
@bot.callback_query_handler(func=lambda call: call.data == 'monero')
def monero(call):
chat = call.message.chat
bot.send_message(chat.id, "Покупка/Продажа Monero", reply_markup=markups.payments())
@bot.callback_query_handler(func=lambda call: call.data == 'sell')
def sell(call):
chat = call.message.chat
bot.send_message(chat.id, "Продажа", reply_markup=markups.exchangeR())
bot.send_message(chat.id, base.get_text(chat.id,'sellcur'), reply_markup=markups.buyI_sellI())
@bot.message_handler(regexp="Кошелёк")
def wallet(msg):
bot.send_message(msg.chat.id, "Кошелёк", reply_markup=markups.exchangeR())
bot.send_message(msg.chat.id, base.get_text(msg.chat.id,'wallet'), reply_markup=markups.wallet())
@bot.callback_query_handler(func=lambda call: call.data == 'bringin')
def bring_in(call):
msg = call.message
bot.edit_message_text("Выберете валюту на счёт которой придут бабосы", msg.chat.id,
msg.message_id, reply_markup=markups.bringin())
@bot.callback_query_handler(func=lambda call: call.data[:6] == 'bbring')
def bbring(call):
msg = call.message
bot.edit_message_text("Внесите " + call.data[6:], msg.chat.id, msg.message_id)
@bot.callback_query_handler(func=lambda call: call.data == 'withdraw')
def withdraw(call):
msg=call.message
bot.edit_message_text("С какой валюты списать бобосы",msg.chat.id,msg.message_id,reply_markup=markups.withdraw())
@bot.callback_query_handler(func=lambda call: call.data[:5] == 'wwith')
def wwithdraw(call):
msg=call.message
bot.edit_message_text("Введите сколько вывести" + call.data[5:],msg.chat.id,msg.message_id)
@bot.callback_query_handler(func=lambda call: call.data == "my requests")
def user_requests(call):
bot.send_message(call.message.chat.id, "Если нужно,то просто раскомменти")
# markup = telebot.types.InlineKeyboardMarkup()
# data = base.get_user_requests(call.message.chat.id)
# val = base.get_user_value(call.message.chat.id)
# if not data:
# btn_add = telebot.types.InlineKeyboardButton("📝 Добавить объявление", callback_data='add request')
# back = telebot.types.InlineKeyboardButton(text="Назад",
# callback_data='exchange')
# markup.row(btn_add, back)
# bot.edit_message_text("У вас нет объявлений", call.message.chat.id, call.message.message_id)
# bot.edit_message_reply_markup(call.message.chat.id, call.message.message_id,
# reply_markup=markup)
#
#
# else:
# for each in data:
# btn = telebot.types.InlineKeyboardButton(
# text=each.rType + ", " + each.paymentMethod + ", " + each.rate + " " + each.currency,
# callback_data=each.currency + "->" + each.rid)
# markup.row(btn)
# btn_add = telebot.types.InlineKeyboardButton("📝 Добавить объявление", callback_data='add request')
# back = telebot.types.InlineKeyboardButton(text="Назад",
# callback_data='exchange')
# markup.row(btn_add, back)
# bot.edit_message_text("Что-то там про объявления",
# call.message.chat.id, call.message.message_id, parse_mode="markdown")
# bot.edit_message_reply_markup(call.message.chat.id, call.message.message_id, reply_markup=markup)
@bot.callback_query_handler(func=lambda call: call.data == 'add request')
def add_request(call):
msg = call.message
bot.edit_message_text("Выберите валюту", msg.chat.id, msg.message_id, reply_markup=markups.request_curr())
@bot.callback_query_handler(func=lambda call: call.data[:4] == 'rreq')
def req_cur(call):
cur = call.data[4:]
msg = call.message
bot.edit_message_text("Выберите тип объявления", msg.chat.id, msg.message_id, reply_markup=markups.request_type())
@bot.callback_query_handler(func=lambda call: call.data == 'reqsell')
@bot.callback_query_handler(func=lambda call: call.data == 'reqbuy')
def req_buy(call):
msg = call.message
ms = bot.send_message(msg.chat.id, "Метод оплаты", reply_markup=markups.pay_method())
bot.register_next_step_handler(ms, rate)
def rate(msg):
bot.send_message(msg.chat.id, "Курс")
@bot.message_handler(regexp="Настройки")
def settings(msg):
bot.send_message(msg.chat.id, base.get_text(msg.chat.id,'settings'), reply_markup=markups.settings())
@bot.callback_query_handler(func=lambda call: call.data == 'settings')
def setings(call):
msg = call.message
bot.edit_message_text(base.get_text(msg.chat.id,'settings'), msg.chat.id, msg.message_id, reply_markup=markups.settings())
@bot.callback_query_handler(func=lambda call: call.data == "chooselanguage")
def lang(call):
chat = call.message.chat
bot.edit_message_text( "Выберите язык",chat.id,call.message.message_id, reply_markup=langg())
@bot.callback_query_handler(func=lambda call: call.data == 'rate')
def rat(call):
msg = call.message
bot.edit_message_text("Выберите источник актульного курса", msg.chat.id, msg.message_id,
reply_markup=markups.rate())
@bot.callback_query_handler(func=lambda call: call.data[:5] == 'burse')
def burses(call):
number_of_burse = call.data[5:]
msg = call.message
markup = telebot.types.InlineKeyboardMarkup()
bt_back_to_rates = telebot.types.InlineKeyboardButton(text="Вернуться к выбору биржы", callback_data='rate')
markup.add(bt_back_to_rates)
bot.edit_message_text("Для пары BTC/RUB теперь используются котировки биржи ...название...", msg.chat.id,
msg.message_id, reply_markup=markup)
@bot.callback_query_handler(func=lambda call: call.data == 'address')
def address_cur(call):
msg = call.message
bot.edit_message_text("Выберите валюту", msg.chat.id, msg.message_id, reply_markup=markups.address())
@bot.callback_query_handler(func=lambda call: call.data[:4] == 'adrs')
def address(call):
msg = call.message
mes = bot.edit_message_text("Введите адрес", msg.chat.id, msg.message_id)
bot.register_next_step_handler(mes, enter_address)
def enter_address(msg):
new_address = msg
bot.send_message(msg.chat.id, "Информация сохранена")
@bot.message_handler(regexp="О сервисе")
def service(msg):
bot.send_message(msg.chat.id,"Нужно придумать")
if __name__ == "__main__":
bot.polling()
# start_bot()
|
6,066 | 42371760d691eac9c3dfe5693b03cbecc13fd94d | __source__ = 'https://leetcode.com/problems/merge-two-binary-trees/'
# Time: O(n)
# Space: O(n)
#
# Description: Leetcode # 617. Merge Two Binary Trees
#
# Given two binary trees and imagine that when you put one of them to cover the other,
# some nodes of the two trees are overlapped while the others are not.
#
# You need to merge them into a new binary tree. The merge rule is that if two nodes overlap,
# then sum node values up as the new value of the merged node. Otherwise,
# the NOT null node will be used as the node of new tree.
#
# Example 1:
# Input:
# Tree 1 Tree 2
# 1 2
# / \ / \
# 3 2 1 3
# / \ \
# 5 4 7
# Output:
# Merged tree:
# 3
# / \
# 4 5
# / \ \
# 5 4 7
# Note: The merging process must start from the root nodes of both trees.
#
# Hide Company Tags Amazon
# Hide Tags Tree
#
import unittest
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
# 68ms 68.16%
class Solution(object):
def mergeTrees(self, t1, t2):
"""
:type t1: TreeNode
:type t2: TreeNode
:rtype: TreeNode
"""
if t1 and t2:
root = TreeNode(t1.val + t2.val)
root.left = self.mergeTrees(t1.left, t2.left)
root.right = self.mergeTrees(t1.right, t2.right)
return root
else:
return t1 or t2
class TestMethods(unittest.TestCase):
def test_Local(self):
self.assertEqual(1, 1)
if __name__ == '__main__':
unittest.main()
Java = '''
# Thought: https://leetcode.com/problems/merge-two-binary-trees/solution/
/**
* Definition for a binary tree node.
* public class TreeNode {
* int val;
* TreeNode left;
* TreeNode right;
* TreeNode(int x) { val = x; }
* }
*/
# DFS
# 10ms 40.59%
class Solution {
public TreeNode mergeTrees(TreeNode t1, TreeNode t2) {
if (t1 != null && t2 != null) {
TreeNode root = new TreeNode(t1.val + t2.val);
root.left = mergeTrees(t1.left, t2.left);
root.right = mergeTrees(t1.right, t2.right);
return root;
} else if (t1 == null) {
return t2;
} else {
return t1;
}
}
}
# DFS
# 6ms 98.05%
class Solution {
public TreeNode mergeTrees(TreeNode t1, TreeNode t2) {
if (t1 == null)
return t2;
if (t2 == null)
return t1;
t1.val += t2.val;
t1.left = mergeTrees(t1.left, t2.left);
t1.right = mergeTrees(t1.right, t2.right);
return t1;
}
}
# BFS
# 8ms 69.45%
class Solution {
public TreeNode mergeTrees(TreeNode t1, TreeNode t2) {
if (t1 == null)
return t2;
Stack < TreeNode[] > stack = new Stack < > ();
stack.push(new TreeNode[] {t1, t2});
while (!stack.isEmpty()) {
TreeNode[] t = stack.pop();
if (t[0] == null || t[1] == null) {
continue;
}
t[0].val += t[1].val;
if (t[0].left == null) {
t[0].left = t[1].left;
} else {
stack.push(new TreeNode[] {t[0].left, t[1].left});
}
if (t[0].right == null) {
t[0].right = t[1].right;
} else {
stack.push(new TreeNode[] {t[0].right, t[1].right});
}
}
return t1;
}
}
'''
|
6,067 | dafefc65335a0d7e27057f51b43e52b286f5bc6b | from haven import haven_utils as hu
import itertools, copy
EXP_GROUPS = {}
EXP_GROUPS['starter_issam'] = hu.cartesian_exp_group({
'batch_size': 32,
'opt': {'name': 'adamW', 'lr': 0.0001, 'wd': 1e-6},
'model': {'name': 'resnext50_32x4d_ssl'},
'loss_func': {'name': 'cross_entropy'},
'max_epoch': [50]
})
EXP_GROUPS['clip'] = hu.cartesian_exp_group({
'batch_size': 32,
'model': {'name': 'clip'},
'max_epoch': [30],
}) |
6,068 | 67b1cdfa514aac4fdac3804285ec8d0aebce944d | from Bio.PDB import *
import urllib.request
import numpy as np
import pandas as pd
from math import sqrt
import time
import os
import heapq
from datetime import datetime
dir_path = os.getcwd()
peptidasesList = pd.read_csv("./MCSA_EC3.4_peptidases.csv")
peptidasesList = peptidasesList[peptidasesList.iloc[:, 4] == "residue"]
peptidasesList = peptidasesList.reset_index(drop=True)
print(len(peptidasesList))
bindingSiteDic = {}
for i in range(len(peptidasesList)):
# print(bindingSiteDic)
if peptidasesList.loc[i, "PDB"] not in bindingSiteDic:
bindingSiteDic[peptidasesList.loc[i, "PDB"]] = {
peptidasesList.loc[i, "chain/kegg compound"]: [peptidasesList.loc[i, "resid/chebi id"]]}
elif peptidasesList.loc[i, "chain/kegg compound"] not in bindingSiteDic[peptidasesList.loc[i, "PDB"]]:
bindingSiteDic[peptidasesList.loc[i, "PDB"]] = {
peptidasesList.loc[i, "chain/kegg compound"]: [peptidasesList.loc[i, "resid/chebi id"]]}
else:
bindingSiteDic[peptidasesList.loc[i, "PDB"]][peptidasesList.loc[i, "chain/kegg compound"]].append(
peptidasesList.loc[i, "resid/chebi id"])
for protein in bindingSiteDic:
for chain in bindingSiteDic[protein]:
bindingSiteDic[protein][chain] = [int(x) for x in list(set(bindingSiteDic[protein][chain]))]
uniqueList = peptidasesList[["PDB", "chain/kegg compound"]].drop_duplicates()
uniqueList.reset_index(drop=True).iloc[20:, ]
backbone = ["N", "CA", "C", "O"]
aminoAcidCodes = ["ALA", "ARG", "ASN", "ASP", "CYS", "GLN", "GLY", "GLU", "HIS", "ILE", "LEU", "LYS",
"MET", "PHE", "PRO", "PYL", "SER", "SEC", "THR", "TRP", "TYR", "TRP", "VAL"]
neighhor_df = pd.DataFrame(columns=["proteinid", "chain", "aaid", "neighborid"])
n_bigger = 5
target_list = []
start_time = datetime.now()
for eachRow in range(0, len(uniqueList)):
pdbID = uniqueList.iloc[eachRow, 0]
chainOrder = uniqueList.iloc[eachRow, 1]
PDB = PDBList()
PDB.retrieve_pdb_file(pdb_code=pdbID, pdir="../pdb", file_format="pdb")
p = PDBParser()
structure = p.get_structure("X", "../pdb/pdb" + pdbID + ".ent")
oneChain = pd.DataFrame(columns=["Seq", "Residue", "Center", "Direction"])
protein_start_time = datetime.now()
if structure.header["resolution"] <= 3.0:
if chainOrder in [x.id for x in list(structure[0].get_chains())]:
chain = chainOrder
for residue in structure[0][chainOrder]:
if residue.get_resname() in aminoAcidCodes:
if len(list(residue.get_atoms())) > 3:
if residue.get_resname() != "GLY":
point = vectors.Vector([0, 0, 0])
for atom in residue:
if (atom.get_name() not in backbone):
point = point + atom.get_vector()
center = point.__div__(len(residue) - 4)
cToRGroup = residue["CA"].get_vector() - center
oneChain.loc[len(oneChain)] = [residue.get_id()[1], residue.get_resname(), center,
cToRGroup]
else:
center = residue["CA"].get_vector()
cToRGroup = center - (residue["C"].get_vector() + residue["N"].get_vector() + residue[
"O"].get_vector()).__div__(3)
oneChain.loc[len(oneChain)] = [residue.get_id()[1], residue.get_resname(), center,
cToRGroup]
columns = np.array(list(oneChain.iloc[:, 0]))
row_index = oneChain.iloc[:, 0]
distanceMatrix = pd.DataFrame(columns=list(oneChain.iloc[:, 0]), index=list(oneChain.iloc[:, 0]))
print(time.time())
numResidue = len(oneChain)
for row in range(0, numResidue):
if row % 50 == 0:
print(str(row) + "th row")
for column in range(0, numResidue):
coordinatesSubstraction = list(oneChain.loc[row, "Center"] - oneChain.loc[column, "Center"])
distanceMatrix.iloc[row, column] = sqrt(sum(list(map(lambda x: x * x, coordinatesSubstraction))))
# distanceMatrix.iloc[row, column] = sqrt(sum(list(map(lambda x: x * x, coordinatesSubstraction))))
row_list = list(distanceMatrix.iloc[row, :])
result = list(map(row_list.index, heapq.nsmallest(n_bigger, row_list)))
target_col = columns[result]
target_list.append(target_col)
neighhor_df.loc[len(neighhor_df)] = [pdbID, chain, row_index[row], str(target_col)]
protein_end_time = datetime.now()
print(pdbID, " Duration: {}".format(protein_end_time - protein_start_time))
end_time = datetime.now()
print("The total Duration: {}".format(end_time - start_time))
print(time.time())
pdbID = uniqueList.iloc[35, 0]
chainOrder = uniqueList.iloc[35, 1]
PDB = PDBList()
for pdbid in uniqueList.iloc[:, 0]:
exist = os.path.isfile('../pdb/pdb' + pdbID + '.ent')
if not exist:
PDB.retrieve_pdb_file(pdb_code=pdbid, pdir="../pdb", file_format="pdb")
p = PDBParser()
structure = p.get_structure("X", "../pdb/pdb" + pdbID + ".ent")
oneChain = pd.DataFrame(columns=["Seq", "Residue", "Center", "Direction", "pdbid", "chain"])
if structure.header["resolution"] <= 3.0:
if chainOrder in [x.id for x in list(structure[0].get_chains())]: # Chain information not in pdb file
for residue in structure[0][chainOrder]:
if residue.get_resname() in aminoAcidCodes: # Only treat common amino acid
if len(list(residue.get_atoms())) > 3:
if residue.get_resname() != "GLY": # Glysine as a special case
point = vectors.Vector([0, 0, 0])
for atom in residue:
if (atom.get_name() not in backbone):
point = point + atom.get_vector()
center = point.__div__(len(residue) - 4)
cToRGroup = residue["CA"].get_vector() - center
oneChain.loc[len(oneChain)] = [residue.get_id()[1], residue.get_resname(), center, cToRGroup,
pdbID, chainOrder]
else:
center = residue["CA"].get_vector()
cToRGroup = center - (residue["C"].get_vector() + residue["N"].get_vector() + residue[
"O"].get_vector()).__div__(3)
oneChain.loc[len(oneChain)] = [residue.get_id()[1], residue.get_resname(), center, cToRGroup,
pdbID, chainOrder]
distanceMatrix = pd.DataFrame(columns=list(oneChain.iloc[:, 0]), index=list(oneChain.iloc[:, 0]))
print(len(oneChain))
print(time.time())
numResidue = len(oneChain)
columns = np.array(list(oneChain.iloc[:, 0]))
n_bigger = 3
target_list = []
for row in range(0, numResidue):
if row % 50 == 0:
print(str(row) + "th row")
for column in range(0, numResidue):
coordinatesSubstraction = list(oneChain.loc[row, "Center"] - oneChain.loc[column, "Center"])
distanceMatrix.iloc[row, column] = sqrt(sum(list(map(lambda x: x * x, coordinatesSubstraction))))
row_list = list(distanceMatrix.iloc[row, :])
result = list(map(row_list.index, heapq.nlargest(n_bigger, row_list)))
target_col = columns[result]
target_list.append(target_col)
print(time.time())
sortedDistance = distanceMatrix.apply(lambda x: np.sort(x), axis=1)
sortedD = np.array(sortedDistance.tolist())
# get 10 biggest value
sortedD[:, len(oneChain) - 10:]
# get the index 10 biggest value
distanceMatrix.apply(lambda x: np.argsort(x), axis=1).iloc[:, len(oneChain) - 10:]
for eachRow in range(0, len(uniqueList)):
pdbID = uniqueList.iloc[eachRow, 0]
chainOrder = uniqueList.iloc[eachRow, 1]
PDB = PDBList()
PDB.retrieve_pdb_file(pdb_code=pdbID, pdir="../pdb", file_format="pdb")
p = PDBParser()
structure = p.get_structure("X", "../pdb/pdb" + pdbID + ".ent")
oneChain = pd.DataFrame(columns=["Seq", "Residue", "Center", "Direction"])
if structure.header["resolution"] <= 3.0:
if chainOrder in [x.id for x in list(structure[0].get_chains())]:
for residue in structure[0][chainOrder]:
if residue.get_resname() in aminoAcidCodes:
if len(list(residue.get_atoms())) > 3:
if residue.get_resname() != "GLY":
point = vectors.Vector([0, 0, 0])
for atom in residue:
if (atom.get_name() not in backbone):
point = point + atom.get_vector()
center = point.__div__(len(residue) - 4)
cToRGroup = residue["CA"].get_vector() - center
oneChain.loc[len(oneChain)] = [residue.get_id()[1], residue.get_resname(), center,
cToRGroup]
else:
center = residue["CA"].get_vector()
cToRGroup = center - (residue["C"].get_vector() + residue["N"].get_vector() + residue[
"O"].get_vector()).__div__(3)
oneChain.loc[len(oneChain)] = [residue.get_id()[1], residue.get_resname(), center,
cToRGroup]
distanceMatrix = pd.DataFrame(columns=list(oneChain.iloc[:, 0]), index=list(oneChain.iloc[:, 0]))
print(time.time())
numResidue = len(oneChain)
for row in range(0, numResidue):
if row % 50 == 0:
print(str(row) + "th row")
for column in range(0, numResidue):
coordinatesSubstraction = list(oneChain.loc[row, "Center"] - oneChain.loc[column, "Center"])
distanceMatrix.iloc[row, column] = sqrt(sum(list(map(lambda x: x * x, coordinatesSubstraction))))
print(time.time())
|
6,069 | 1f01989f10be5404d415d4abd1ef9ab6c8695aba | from valuate.predict import *
def get_profit_rate(intent, popularity):
"""
获取畅销系数
"""
# 按畅销程度分级,各交易方式相比于标价的固定比例
profits = gl.PROFITS
profit = profits[popularity]
# 计算各交易方式的价格相比于标价的固定比例
if intent == 'sell':
# 商家收购价相比加权平均价的比例
profit_rate = 1 - profit[0] - profit[1]
elif intent == 'buy':
# 商家真实售价相比加权平均价的比例
profit_rate = 1 - profit[0]
elif intent == 'release':
# 建议标价相比加权平均价的比例
profit_rate = 1
elif intent == 'private':
# C2C价格相比加权平均价的比例
profit_rate = 1 - profit[0] - profit[2]
elif intent == 'lowest':
# 最低成交价相比加权平均价的比例
profit_rate = 1 - profit[0] - profit[1] - profit[3]
elif intent == 'cpo':
# 认证二手车价相比加权平均价的差异比例
profit_rate = 1 - profit[0] - profit[8]
elif intent == 'replace':
# 4S店置换价相比加权平均价的比例
profit_rate = 1 - profit[0] - profit[4]
elif intent == 'auction':
# 拍卖价相比加权平均价的差异比例
profit_rate = 1 - profit[0] - profit[5]
elif intent == 'avg-buy':
# 平均买车价相比加权平均价的差异比例
profit_rate = 1 - profit[0] - profit[7]
elif intent == 'avg-sell':
# 平均卖车价价相比加权平均价的差异比例
profit_rate = 1 - profit[0] - profit[6]
return profit_rate
def cal_intent_condition(prices, price_bn):
"""
计算所有交易方式的4个级别车况价
"""
if(prices[2] * 1.03) > price_bn:
rate = (prices[2] * 1.03) / price_bn
prices = prices / rate
df1 = pd.DataFrame(prices)
df2 = pd.DataFrame([gl.CAR_CONDITION_COEFFICIENT_VALUES])
all_map = df1.dot(df2)
all_map.columns = ['excellent', 'good', 'fair', 'bad']
all_map['intent'] = pd.Series(gl.INTENT_TYPE).values
all_map = all_map.loc[:, ['intent', 'excellent', 'good', 'fair', 'bad']]
all_map[['excellent', 'good', 'fair', 'bad']] = all_map[['excellent', 'good', 'fair', 'bad']].astype(int)
return all_map
def process_mile(price, use_time, mile):
"""
mile处理
"""
# 正常行驶的车辆以一年2.5万公里为正常基数,低于2.5万公里的价格的浮动在+3.5%以内
# 大于2.5万公里的若每年的平均行驶里程大于2.5万公里小于5万公里价格浮动在-3.5-7.5%
# 若年平均形式里程大于5万公里及以上影响价格在-7.5-12.5%之间
mile_per_month = mile / use_time
if mile_per_month < gl.MILE_THRESHOLD_2_5:
return price + 0.035 * (1 - mile_per_month/gl.MILE_THRESHOLD_2_5) * price
elif gl.MILE_THRESHOLD_2_5 <= mile_per_month < gl.MILE_THRESHOLD_5:
return price - (0.04 * (mile_per_month/gl.MILE_THRESHOLD_5)+0.035) * price
elif gl.MILE_THRESHOLD_5 <= mile_per_month < gl.MILE_THRESHOLD_10:
return price - (0.05 * (mile_per_month/gl.MILE_THRESHOLD_5)+0.075) * price
else:
return price - 0.125 * price
def process_profit_rate(df):
"""
畅销系数处理
"""
return get_profit_rate(df['intent'], df['popularity'])
def process_buy_profit_rate(df):
"""
畅销系数处理
"""
return get_profit_rate(df['intent_source'], df['popularity'])
def process_unreasonable_history_price(data, nums):
"""
处理不合理历史价格趋势
"""
if nums == 0:
return data
temp = data[1:]
temp.sort()
for i, value in enumerate(temp):
data[i+1] = temp[i]
for i in range(0, nums):
rate = (data[i + 1] - data[i]) / data[i + 1]
if (data[i] >= data[i + 1]) | (0.003 > rate) | (0.0157 < rate):
data[i + 1] = int(data[i] * 1.0083)
return data
def process_unreasonable_future_price(data, nums):
"""
处理不合理未来价格趋势
"""
temp = data[1:]
temp.sort(reverse=True)
for i, value in enumerate(temp):
data[i+1] = temp[i]
for i in range(0, nums):
rate = (data[i] - data[i + 1]) / data[i]
if (data[i] <= data[i + 1]) | (0.036 > rate) | (0.188 < rate):
data[i + 1] = int(data[i] * 0.9)
return data
def process_fill_zero(hedge):
temp = hedge
if len(hedge) < 18:
for i in range(0, (18-len(hedge))):
temp = '0'+temp
return temp
def predict_from_db(model_detail_slug, city, use_time):
"""
从生产库查询预测
"""
# 查找city和model_detail_slug编号
city_id = province_city_map.loc[city, 'city_id']
model_detail_slug_id = model_detail_map.loc[model_detail_slug, 'final_model_detail_slug_id']
# 计算查询字段编号和月编号
if (use_time % 6) == 0:
column_num = str(int(use_time / 6) - 1)
month_num = 6
else:
column_num = str(int(use_time / 6))
month_num = use_time % 6
# 查询
record = db_operate.query_valuate(model_detail_slug_id, city_id, column_num, use_time)
# 查找对应值
dealer_hedge = str(record.loc[0, 'b2c_year_'+column_num])
dealer_hedge = process_fill_zero(dealer_hedge)
dealer_hedge = dealer_hedge[(month_num-1)*3:month_num*3]
dealer_hedge = int(dealer_hedge) / 1000
cpersonal_hedge = str(record.loc[0, 'c2c_year_'+column_num])
cpersonal_hedge = process_fill_zero(cpersonal_hedge)
cpersonal_hedge = cpersonal_hedge[(month_num-1)*3:month_num*3]
cpersonal_hedge = int(cpersonal_hedge) / 1000
return dealer_hedge, cpersonal_hedge
def predict_from_db_history(model_detail_slug, city, use_time):
"""
从生产库查询预测
"""
# 查找city和model_detail_slug编号
city_id = province_city_map.loc[city, 'city_id']
model_detail_slug_id = model_detail_map.loc[model_detail_slug, 'final_model_detail_slug_id']
# 计算查询字段编号和月编号
if (use_time % 6) == 0:
column_num = int(use_time / 6) - 1
month_num = 6
else:
column_num = int(use_time / 6)
month_num = use_time % 6
# 查询
dealer_hedge, cpersonal_hedge = db_operate.query_valuate_history(model_detail_slug_id, city_id, column_num, use_time)
# 查找对应值
result = []
if len(dealer_hedge) == 1:
dealer_hedge = process_fill_zero(dealer_hedge[0])
cpersonal_hedge = process_fill_zero(cpersonal_hedge[0])
for i in range(0, use_time):
dealer_per = dealer_hedge[i*3:(i+1)*3]
cpersonal_per = cpersonal_hedge[i * 3:(i + 1) * 3]
result.append([int(dealer_per)/1000, int(cpersonal_per)/1000, use_time])
result.reverse()
elif len(dealer_hedge) == 2:
dealer_hedge = process_fill_zero(dealer_hedge[0])+process_fill_zero(dealer_hedge[1])
cpersonal_hedge = process_fill_zero(cpersonal_hedge[0])+process_fill_zero(cpersonal_hedge[1])
for i in range(month_num-1, month_num+6):
dealer_per = dealer_hedge[i*3:(i+1)*3]
cpersonal_per = cpersonal_hedge[i * 3:(i + 1) * 3]
result.append([int(dealer_per)/1000, int(cpersonal_per)/1000, use_time])
result.reverse()
return result
def predict_from_db_future(model_detail_slug, city, use_time, times):
"""
从生产库查询预测
"""
# 查找city和model_detail_slug编号
city_id = province_city_map.loc[city, 'city_id']
model_detail_slug_id = model_detail_map.loc[model_detail_slug, 'final_model_detail_slug_id']
# 计算查询字段编号和月编号
if (use_time % 6) == 0:
column_num = int(use_time / 6) - 1
month_num = 6
else:
column_num = int(use_time / 6)
month_num = use_time % 6
# 查询
record = db_operate.query_valuate_future(model_detail_slug_id, city_id)
# 查找对应值
result = []
for i in range(0, times):
dealer_hedge = str(record.loc[0, 'b2c_year_' + str(column_num+i*2)])
dealer_hedge = process_fill_zero(dealer_hedge)
dealer_hedge = dealer_hedge[(month_num - 1) * 3:month_num * 3]
dealer_hedge = int(dealer_hedge) / 1000
cpersonal_hedge = str(record.loc[0, 'c2c_year_' + str(column_num+i*2)])
cpersonal_hedge = process_fill_zero(cpersonal_hedge)
cpersonal_hedge = cpersonal_hedge[(month_num - 1) * 3:month_num * 3]
cpersonal_hedge = int(cpersonal_hedge) / 1000
result.append([dealer_hedge, cpersonal_hedge, use_time+i*12])
return result
def process_prices_relate(dealer_price, cpersonal_price):
"""
人工处理三类价格的相关性
"""
buy = dealer_price
private = cpersonal_price
# 计算buy与private的比例关系
private_buy_rate = (buy - private) / private
# 人工处理预测不合理的三类价格
if (private_buy_rate < 0) | (abs(private_buy_rate) > 0.12):
private = int(buy * (1 - 0.0875))
sell = int(private * (1 - 0.0525))
return buy, private, sell
def process_adjust_profit(model_detail_slug, popularity):
"""
调整值调整
"""
index = str(model_detail_slug)+'_'+str(popularity)
if index in model_detail_slug_popularity_index:
rate = adjust_profit.loc[index, 'rate']
else:
rate = 0
return rate
def check_params_value(city, model_detail_slug, use_time, mile, category):
"""
校验参数
"""
# 校验city
if city not in cities:
raise ApiParamsValueError('city', city, 'Unknown city!')
# 校验model
if model_detail_slug not in models:
raise ApiParamsValueError('model_detail_slug', model_detail_slug, 'Unknown model!')
# 校验mile
if not ((isinstance(mile, int)) | (isinstance(mile, float))):
raise ApiParamsTypeError('mile', mile, 'Mile must be int or float!')
elif mile < 0:
raise ApiParamsValueError('mile', mile, 'Mile must be greater than zero!')
# 校验use_time
if not isinstance(use_time, int):
raise ApiParamsTypeError('use_time', use_time, 'Use_time must be int!')
if category == 'valuate':
if (use_time < 1) | (use_time > 240):
raise ApiParamsValueError('use_time', use_time, 'The use_time of Forecast must be in 1-240!')
elif category == 'history':
if (use_time < 1) | (use_time > 240):
raise ApiParamsValueError('use_time', use_time, 'The use_time of historical trend must be in 1-240!')
elif category == 'future':
if (use_time < 1) | (use_time > 240):
raise ApiParamsValueError('use_time', use_time, 'The use_time of future trend must be in 1-240!')
class Predict(object):
def __init__(self):
"""
加载各类匹配表和模型
"""
self.result = []
self.valuate_model = []
def add_process_intent(self, buy, private, sell, popularity, price_bn):
"""
根据交易方式修正预测值
"""
# 组合结果
self.result = result_map.copy()
self.result.loc[(self.result['intent'] == 'buy'), 'predict_price'] = buy
self.result.loc[(self.result['intent'] == 'private'), 'predict_price'] = private
self.result.loc[(self.result['intent'] == 'sell'), 'predict_price'] = sell
self.result['predict_price'] = self.result['predict_price'].fillna(buy)
self.result['popularity'] = popularity
self.result['profit_rate'] = self.result.apply(process_profit_rate, axis=1)
self.result['buy_profit_rate'] = self.result.apply(process_buy_profit_rate, axis=1)
self.result['predict_price'] = self.result['predict_price'] / self.result['buy_profit_rate']
self.result['predict_price'] = self.result['profit_rate'] * self.result['predict_price']
# 计算所有交易类型
self.result = cal_intent_condition(self.result.predict_price.values, price_bn)
def follow_process(self, use_time, mile, price_bn, dealer_hedge, cpersonal_hedge, province, model_slug, model_detail_slug):
"""
后续跟进处理
"""
# 获取价格
dealer_price, cpersonal_price = dealer_hedge * price_bn, cpersonal_hedge * price_bn
# 处理mile
dealer_price = process_mile(dealer_price, use_time, mile)
cpersonal_price = process_mile(cpersonal_price, use_time, mile)
# 处理价格之间的相关性
buy, private, sell = process_prices_relate(dealer_price, cpersonal_price)
# 获取流行度
index = str(model_slug) + '_' + str(province)
if index in province_popularity_index:
popularity = province_popularity_map.loc[index, 'popularity']
else:
popularity = 'C'
# 进行调整值最终调整
rate = process_adjust_profit(model_detail_slug, popularity)
buy, private, sell = buy * (1 + rate), private * (1 + rate), sell * (1 + rate)
return buy, private, sell, popularity
def predict(self, city='深圳', model_detail_slug='model_25023_cs', use_time=12, mile=2, ret_type='records'):
"""
预测返回
"""
# 校验参数
check_params_value(city, model_detail_slug, use_time, mile, category='valuate')
# 查找款型对应的新车指导价,调整后的款型
price_bn = model_detail_map.loc[model_detail_slug, 'final_price_bn']
price_bn = price_bn * 10000
province = province_city_map.loc[city, 'province']
model_slug = model_detail_map.loc[model_detail_slug, 'model_slug']
final_model_detail_slug = model_detail_map.loc[model_detail_slug, 'final_model_detail_slug']
# 预测返回保值率
dealer_hedge, cpersonal_hedge = predict_from_db(final_model_detail_slug, city, use_time)
buy, private, sell, popularity = self.follow_process(use_time, mile, price_bn, dealer_hedge, cpersonal_hedge, province, model_slug, model_detail_slug)
# 根据交易方式修正预测值
self.add_process_intent(buy, private, sell, popularity, price_bn)
if ret_type == 'records':
return self.result.to_dict('records')
else:
return self.result
def predict_for_history(self, city='深圳', model_detail_slug='model_25023_cs', use_time=12, mile=2):
"""
预测历史数据返回
"""
# 校验参数
check_params_value(city, model_detail_slug, use_time, mile, category='valuate')
# 查找款型对应的新车指导价,调整后的款型
price_bn = model_detail_map.loc[model_detail_slug, 'final_price_bn']
price_bn = price_bn * 10000
province = province_city_map.loc[city, 'province']
model_slug = model_detail_map.loc[model_detail_slug, 'model_slug']
final_model_detail_slug = model_detail_map.loc[model_detail_slug, 'final_model_detail_slug']
# 预测返回保值率
data_buy = []
data_sell = []
data_private = []
result = predict_from_db_history(final_model_detail_slug, city, use_time)
for dealer_hedge, cpersonal_hedge, use_time_per in result:
buy, private, sell, popularity = self.follow_process(use_time_per, mile, price_bn, dealer_hedge,
cpersonal_hedge, province, model_slug,
model_detail_slug)
data_buy.append(int(buy))
data_private.append(int(private))
data_sell.append(int(sell))
return data_buy, data_private, data_sell
def predict_for_future(self, city='深圳', model_detail_slug='model_25023_cs', use_time=12, mile=2, times=3):
"""
预测历史数据返回
"""
# 校验参数
check_params_value(city, model_detail_slug, use_time, mile, category='valuate')
# 查找款型对应的新车指导价,调整后的款型
price_bn = model_detail_map.loc[model_detail_slug, 'final_price_bn']
price_bn = price_bn * 10000
province = province_city_map.loc[city, 'province']
model_slug = model_detail_map.loc[model_detail_slug, 'model_slug']
final_model_detail_slug = model_detail_map.loc[model_detail_slug, 'final_model_detail_slug']
# 预测返回保值率
data_buy = []
data_sell = []
data_private = []
result = predict_from_db_future(final_model_detail_slug, city, use_time, times)
for dealer_hedge, cpersonal_hedge, use_time_per in result:
buy, private, sell, popularity = self.follow_process(use_time_per, mile, price_bn, dealer_hedge,
cpersonal_hedge, province, model_slug,
model_detail_slug)
data_buy.append(int(buy))
data_private.append(int(private))
data_sell.append(int(sell))
return data_buy, data_private, data_sell
def history_price_trend(self, city='深圳', model_detail_slug='model_25023_cs', use_time=12, mile=2, ret_type='records'):
"""
计算历史价格趋势
"""
# 校验参数
check_params_value(city, model_detail_slug, use_time, mile, category='history')
# 计算时间
times_str = ['0', '-1', '-2', '-3', '-4', '-5', '-6']
nums = 6
if use_time <= 6:
times_str = []
nums = use_time-1
for i in range(0, nums+1):
times_str.append(str(-i))
# 计算车商交易价,车商收购价的历史价格走势
data_buy, data_private, data_sell = self.predict_for_history(city, model_detail_slug, use_time, mile)
# 处理异常值
data_buy = process_unreasonable_history_price(data_buy, nums)
data_sell = process_unreasonable_history_price(data_sell, nums)
data_private = process_unreasonable_history_price(data_private, nums)
result_b_2_c = pd.DataFrame([data_buy], columns=times_str)
result_b_2_c['type'] = 'buy'
result_c_2_b = pd.DataFrame([data_sell], columns=times_str)
result_c_2_b['type'] = 'sell'
result_c_2_c = pd.DataFrame([data_private], columns=times_str)
result_c_2_c['type'] = 'private'
result = result_b_2_c.append(result_c_2_b, ignore_index=True)
result = result.append(result_c_2_c, ignore_index=True)
if ret_type == 'records':
return result.to_dict('records')
else:
return result
def future_price_trend(self, city='深圳', model_detail_slug='model_25023_cs', use_time=365, mile=2, ret_type='records'):
"""
计算未来价格趋势
"""
# 校验参数
check_params_value(city, model_detail_slug, use_time, mile, category='future')
# 计算时间
times_str = ['0', '12', '24', '36']
nums = 3
if use_time > 204:
times_str = []
nums = int((240-use_time) / 12)
for i in range(0, nums+1):
times_str.append(str(i*12))
# 计算个人交易价的未来价格趋势
data_buy, data_private, data_sell = self.predict_for_future(city, model_detail_slug, use_time, mile, len(times_str))
data_buy = process_unreasonable_future_price(data_buy, nums)
data_sell = process_unreasonable_future_price(data_sell, nums)
data_private = process_unreasonable_future_price(data_private, nums)
result_b_2_c = pd.DataFrame([data_buy], columns=times_str)
result_b_2_c['type'] = 'buy'
result_c_2_b = pd.DataFrame([data_sell], columns=times_str)
result_c_2_b['type'] = 'sell'
result_c_2_c = pd.DataFrame([data_private], columns=times_str)
result_c_2_c['type'] = 'private'
result = result_b_2_c.append(result_c_2_b, ignore_index=True)
result = result.append(result_c_2_c, ignore_index=True)
if ret_type == 'records':
return result.to_dict('records')
else:
return result
|
6,070 | 70c9d75dabfa9eac23e34f94f34d39c08e21b3c0 | import rospy
#: the parameter namespace for the arni_countermeasure node
ARNI_CTM_NS = "arni/countermeasure/"
#: the parameter namespace for configuration files
#: of the arni_countermeasure node
ARNI_CTM_CFG_NS = ARNI_CTM_NS + "config/"
def get_param_num(param):
#dummy val
value = 1
try:
value = rospy.get_param(param)
if not isinstance(value, (int, float, long)):
err_msg = (
"Param %s is not an number" % param)
rospy.logerr(err_msg)
rospy.signal_shutdown(err_msg)
except KeyError:
err_msg = (
"Param %s is not set" % param
+ " and its default value has been forcefully removed")
rospy.logerr(err_msg)
rospy.signal_shutdown(err_msg)
return value
def get_param_duration(param):
"""Calls rospy.get_param and logs errors.
Logs if the param does not exist or is not parsable to rospy.Durotation.
And calls rospy.signal_shutdown if the value is invalid/not existing.
:return: The Param param from the parameter server.
:rtype: rospy.Duration
"""
# dummy value
value = rospy.Duration(1)
try:
# only a default value in case the param gets fuzzed.
value = rospy.Duration(get_param_num(param))
except ValueError:
err_msg = (
"Param %s has the invalid value '%s'."
% (param, rospy.get_param(param)))
rospy.logerr(err_msg)
rospy.signal_shutdown(err_msg)
value = rospy.Duration(1)
return value
|
6,071 | 5e68233fde741c0d2a94bf099afb6a91c08e2a29 | def test_corr_callable_method(self, datetime_series):
my_corr = (lambda a, b: (1.0 if (a == b).all() else 0.0))
s1 = Series([1, 2, 3, 4, 5])
s2 = Series([5, 4, 3, 2, 1])
expected = 0
tm.assert_almost_equal(s1.corr(s2, method=my_corr), expected)
tm.assert_almost_equal(datetime_series.corr(datetime_series, method=my_corr), 1.0)
tm.assert_almost_equal(datetime_series[:15].corr(datetime_series[5:], method=my_corr), 1.0)
assert np.isnan(datetime_series[::2].corr(datetime_series[1::2], method=my_corr))
df = pd.DataFrame([s1, s2])
expected = pd.DataFrame([{
0: 1.0,
1: 0,
}, {
0: 0,
1: 1.0,
}])
tm.assert_almost_equal(df.transpose().corr(method=my_corr), expected) |
6,072 | 49a9fb43f3651d28d3ffac5e33d10c428afd08fd | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
def calcLuckyNumber(x):
resultSet = set()
for i in range(30):
for j in range(30):
for k in range(30):
number = pow(3, i) * pow(5, j) * pow(7, k)
if number > 1 and number <= x:
resultSet.add(number)
return resultSet
x = input("input number: ")
if x != '':
x = int(x)
if x > 0:
result = calcLuckyNumber(x)
print(len(result))
|
6,073 | 4bf140ae01f2eaa0c67f667766c3ec921d552066 | import pulumi
import pulumi_aws as aws
bar = aws.elasticache.get_replication_group(replication_group_id="example")
|
6,074 | 7254e74ff3f562613cc610e4816a2d92b6b1cd4c | name = 'Ледяная скорбь'
description = 'Тот кто держит этот клинок, должен обладать бесконечной силой. Подобно тому, как он разрывает плоть, он разрывает души.'
price = 3000
fightable = True
def fight_use(user, reply, room):
return 200 |
6,075 | 79a8ff0000f3be79a62d693ed6bae7480673d970 | import argparse
from ray.tune.config_parser import make_parser
from ray.tune.result import DEFAULT_RESULTS_DIR
EXAMPLE_USAGE = """
Training example:
python ./train.py --run DQN --env CartPole-v0 --no-log-flatland-stats
Training with Config:
python ./train.py -f experiments/flatland_random_sparse_small/global_obs/ppo.yaml
Note that -f overrides all other trial-specific command-line options.
"""
def create_parser(parser_creator=None):
parser = make_parser(
parser_creator=parser_creator,
formatter_class=argparse.RawDescriptionHelpFormatter,
description="Train a reinforcement learning agent.",
epilog=EXAMPLE_USAGE)
# See also the base parser definition in ray/tune/config_parser.py
parser.add_argument(
"--ray-address",
default=None,
type=str,
help="Connect to an existing Ray cluster at this address instead "
"of starting a new one.")
parser.add_argument(
"--ray-num-cpus",
default=None,
type=int,
help="--num-cpus to use if starting a new cluster.")
parser.add_argument(
"--ray-num-gpus",
default=None,
type=int,
help="--num-gpus to use if starting a new cluster.")
parser.add_argument(
"--ray-num-nodes",
default=None,
type=int,
help="Emulate multiple cluster nodes for debugging.")
parser.add_argument(
"--ray-redis-max-memory",
default=None,
type=int,
help="--redis-max-memory to use if starting a new cluster.")
parser.add_argument(
"--ray-memory",
default=None,
type=int,
help="--memory to use if starting a new cluster.")
parser.add_argument(
"--ray-object-store-memory",
default=None,
type=int,
help="--object-store-memory to use if starting a new cluster.")
parser.add_argument(
"--experiment-name",
default="default",
type=str,
help="Name of the subdirectory under `local_dir` to put results in.")
parser.add_argument(
"--local-dir",
default=DEFAULT_RESULTS_DIR,
type=str,
help="Local dir to save training results to. Defaults to '{}'.".format(
DEFAULT_RESULTS_DIR))
parser.add_argument(
"--upload-dir",
default="",
type=str,
help="Optional URI to sync training results to (e.g. s3://bucket).")
parser.add_argument(
"-v", action="store_true", help="Whether to use INFO level logging.")
parser.add_argument(
"-vv", action="store_true", help="Whether to use DEBUG level logging.")
parser.add_argument(
"--resume",
action="store_true",
help="Whether to attempt to resume previous Tune experiments.")
parser.add_argument(
"--torch",
action="store_true",
help="Whether to use PyTorch (instead of tf) as the DL framework.")
parser.add_argument(
"--eager",
action="store_true",
help="Whether to attempt to enable TF eager execution.")
parser.add_argument(
"--trace",
action="store_true",
help="Whether to attempt to enable tracing for eager mode.")
parser.add_argument(
"--log-flatland-stats",
action="store_true",
default=True,
help="Whether to log additional flatland specfic metrics such as percentage complete or normalized score.")
parser.add_argument(
"-e",
"--eval",
action="store_true",
help="Whether to run evaluation. Default evaluation config is default.yaml "
"to use custom evaluation config set (eval_generator:high_eval) under configs")
parser.add_argument(
"--bind-all",
action="store_true",
default=False,
help="Whether to expose on network (binding on all network interfaces).")
parser.add_argument(
"--env", default=None, type=str, help="The gym environment to use.")
parser.add_argument(
"--queue-trials",
action="store_true",
help=(
"Whether to queue trials when the cluster does not currently have "
"enough resources to launch one. This should be set to True when "
"running on an autoscaling cluster to enable automatic scale-up."))
parser.add_argument(
"-f",
"--config-file",
default=None,
type=str,
help="If specified, use config options from this file. Note that this "
"overrides any trial-specific options set via flags above.")
return parser
|
6,076 | ff959a388438a6d9c6d418e28c676ec3fd196ea0 | from django.conf.urls import url, include
from api.resources import PlayerResource, GameResource
from . import views
player_resource = PlayerResource()
game_resource = GameResource()
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^api/', include(player_resource.urls)),
url(r'^api/', include(game_resource.urls)),
] |
6,077 | e5b5a0c8c0cbe4862243548b3661057240e9d8fd | # -*- coding: utf-8 -*-
import numpy as np
import pandas as pd
import pandas
import numpy
import json
import torch.utils.data as data
import os
import torch
def load_json(file):
with open(file) as json_file:
data = json.load(json_file)
return data
class VideoDataSet(data.Dataset):
def __init__(self,opt,subset="train"):
self.temporal_scale = opt["temporal_scale"] # 时域长度 归一化到100
self.temporal_gap = 1. / self.temporal_scale # 每个snippt时间占比
self.subset = subset # training validation or test
self.mode = opt["mode"] # 'train' or 'test'
self.feature_path = opt["feature_path"] # '特征存放位置'
self.boundary_ratio = opt["boundary_ratio"] # 0.1 人为扩充boundary的区域长度占总长度的比率
self.video_info_path = opt["video_info"] # 存在视频信息的csv
self.video_anno_path = opt["video_anno"] # 存放标记信息的csv
self._getDatasetDict()
self.check_csv()
def check_csv(self):
# 因为某些视频的特征可能不存在,或者遭到了损坏
for video in self.video_list:
if not os.path.exists(self.feature_path + "csv_mean_" + str(self.temporal_scale) + "/" + video + ".csv"):
print("video :{} feature csv is not existed".format(video))
self.video_list.remove(video)
del self.video_dict[video]
# 删除已知的错误样本
del_videl_list = ['v_5HW6mjZZvtY']
for v in del_videl_list:
if v in self.video_dict:
print("del " + v +' video')
self.video_list.remove(v)
del self.video_dict[v]
print ("After check: csv \n %s subset video numbers: %d" %(self.subset,len(self.video_list)))
def _getDatasetDict(self):
anno_df = pd.read_csv(self.video_info_path)
anno_database= load_json(self.video_anno_path)
self.video_dict = {} # 存放一系列内容,包括gt
for i in range(len(anno_df)):
video_name=anno_df.video.values[i]
video_info=anno_database[video_name]
video_subset=anno_df.subset.values[i] # 读取该视频属于的子数据集 training validation or test
if self.subset == "full": #全部都要
self.video_dict[video_name] = video_info
if self.subset in video_subset:
self.video_dict[video_name] = video_info # 是需要的数据集样本添加到字典中
self.video_list = list(self.video_dict.keys()) # 含有哪些video
print ("Before check: csv \n %s subset video numbers: %d" %(self.subset,len(self.video_list)))
def __getitem__(self, index):
video_data,anchor_xmin,anchor_xmax = self._get_base_data(index)
if self.mode == "train":
match_score_action,match_score_start,match_score_end = self._get_train_label(index,anchor_xmin,anchor_xmax)
return video_data,match_score_action,match_score_start,match_score_end
else:
return index,video_data,anchor_xmin,anchor_xmax
def _get_base_data(self,index):
video_name=self.video_list[index]
anchor_xmin=[self.temporal_gap*i for i in range(self.temporal_scale)] # 0.00 d到 0.99
anchor_xmax=[self.temporal_gap*i for i in range(1,self.temporal_scale+1)] # 0.01到1.00
try:
video_df=pd.read_csv(self.feature_path+ "csv_mean_"+str(self.temporal_scale)+"/"+video_name+".csv") # 得到这个视频的特征
except:
print('Error in '+video_name+".csv")
video_data = video_df.values[:,:]
video_data = torch.Tensor(video_data) # 这个video的特征[100, 400]
video_data = torch.transpose(video_data,0,1) #[400, 100] 便于时域的一维卷积操作
video_data.float()
return video_data,anchor_xmin,anchor_xmax
def _get_train_label(self,index,anchor_xmin,anchor_xmax): # 相当于要生成3个概率序列的真值
video_name=self.video_list[index]
video_info=self.video_dict[video_name] # 包括duration_second duration_frame annotations and feature_frame 但是这个特征长度已经被归一化了
video_frame=video_info['duration_frame']
video_second=video_info['duration_second']
feature_frame=video_info['feature_frame']
corrected_second=float(feature_frame)/video_frame*video_second #相当于校准时间 因为采用的滑动窗口形式进行提取特征,两个frame会存在一些差异
video_labels=video_info['annotations']
gt_bbox = []
for j in range(len(video_labels)): #将时间归一化 0到1之间
tmp_info=video_labels[j]
tmp_start=max(min(1,tmp_info['segment'][0]/corrected_second),0)
tmp_end=max(min(1,tmp_info['segment'][1]/corrected_second),0)
gt_bbox.append([tmp_start,tmp_end])
gt_bbox=np.array(gt_bbox)
gt_xmins=gt_bbox[:,0]
gt_xmaxs=gt_bbox[:,1]
gt_lens=gt_xmaxs-gt_xmins
gt_len_small=np.maximum(self.temporal_gap,self.boundary_ratio*gt_lens) # starting region 和 ending region的长度
gt_start_bboxs=np.stack((gt_xmins-gt_len_small/2,gt_xmins+gt_len_small/2),axis=1) # starting region
gt_end_bboxs=np.stack((gt_xmaxs-gt_len_small/2,gt_xmaxs+gt_len_small/2),axis=1) # ending region
# anchors = np.stack((anchor_xmin, anchor_xmax), 1) # 代表每一个snippet的范围
match_score_action=[]
# 给每一个位置计算TEM的三个概率值,但是from 0 to 99 效率不高吧 这种方法生成会有大量的无效操作,特别是gt较少的时候,可以后期优化
for jdx in range(len(anchor_xmin)):
match_score_action.append(np.max(self._ioa_with_anchors(anchor_xmin[jdx],anchor_xmax[jdx],gt_xmins,gt_xmaxs)))
match_score_start=[]
for jdx in range(len(anchor_xmin)):
match_score_start.append(np.max(self._ioa_with_anchors(anchor_xmin[jdx],anchor_xmax[jdx],gt_start_bboxs[:,0],gt_start_bboxs[:,1])))
match_score_end=[]
for jdx in range(len(anchor_xmin)):
match_score_end.append(np.max(self._ioa_with_anchors(anchor_xmin[jdx],anchor_xmax[jdx],gt_end_bboxs[:,0],gt_end_bboxs[:,1])))
match_score_action = torch.Tensor(match_score_action)
match_score_start = torch.Tensor(match_score_start)
match_score_end = torch.Tensor(match_score_end)
return match_score_action,match_score_start,match_score_end #3个长度为100的概率序列
def _ioa_with_anchors(self,anchors_min,anchors_max,box_min,box_max):
len_anchors=anchors_max-anchors_min
int_xmin = np.maximum(anchors_min, box_min)
int_xmax = np.minimum(anchors_max, box_max)
inter_len = np.maximum(int_xmax - int_xmin, 0.)
scores = np.divide(inter_len, len_anchors)
return scores
def _ioa(self, anchors, gts):
len_anchors = anchors[:,1] - anchors[:,0]
int_min = np.maximum(anchors[:,0],gts[:,0])
int_max = np.minimum(anchors[:,1],gts[:,1])
np.maximum(np.expand_dims(np.arange(1, 5), 1), np.arange(3))
def __len__(self):
return len(self.video_list)
class ProposalDataSet(data.Dataset):
def __init__(self,opt,subset="train"):
self.subset=subset
self.mode = opt["mode"]
if self.mode == "train": # 测试与前推时的样本数量是不一样的
self.top_K = opt["pem_top_K"]
else:
self.top_K = opt["pem_top_K_inference"]
self.video_info_path = opt["video_info"]
self.video_anno_path = opt["video_anno"]
self.feature_path = opt["feature_path"] # '特征存放位置'
self.temporal_scale = opt["temporal_scale"] # 时域长度 归一化到100
self._getDatasetDict()
self.check_csv()
def check_csv(self):
# 因为某些视频的特征可能不存在,或者遭到了损坏
for video in self.video_list:
if not os.path.exists(self.feature_path + "csv_mean_" + str(self.temporal_scale) + "/" + video + ".csv"):
print("video :{} feature csv is not existed".format(video))
self.video_list.remove(video)
del self.video_dict[video]
# 删除已知的错误样本
del_videl_list = ['v_5HW6mjZZvtY']
for v in del_videl_list:
if v in self.video_dict:
print("del " + v +' video')
self.video_list.remove(v)
del self.video_dict[v]
print ("After check: csv \n %s subset video numbers: %d" %(self.subset,len(self.video_list)))
def _getDatasetDict(self):
anno_df = pd.read_csv(self.video_info_path) #读取信息
anno_database= load_json(self.video_anno_path) # 读取相关真值信息
self.video_dict = {}
for i in range(len(anno_df)):
video_name=anno_df.video.values[i]
video_info=anno_database[video_name]
video_subset=anno_df.subset.values[i]
if self.subset == "full":
self.video_dict[video_name] = video_info
if self.subset in video_subset:
self.video_dict[video_name] = video_info
self.video_list = list(self.video_dict.keys())
print ("%s subset video numbers: %d" %(self.subset,len(self.video_list)))
def __len__(self):
return len(self.video_list)
def __getitem__(self, index):
video_name = self.video_list[index]
pdf=pandas.read_csv("./output/PGM_proposals/"+video_name+".csv") # 读取proposal
pdf=pdf[:self.top_K]
video_feature = numpy.load("./output/PGM_feature/" + video_name+".npy") # read BSP feature for proposals
video_feature = video_feature[:self.top_K,:]
#print len(video_feature),len(pdf)
video_feature = torch.Tensor(video_feature)
if self.mode == "train":
video_match_iou = torch.Tensor(pdf.match_iou.values[:]) # choose IOU as gt 已经在TEM inference阶段计算好
return video_feature,video_match_iou # [bs, 32] [bs]
else:
# 取得proposals的 starting location, ending location, starting score, ending score
video_xmin =pdf.xmin.values[:]
video_xmax =pdf.xmax.values[:]
video_xmin_score = pdf.xmin_score.values[:]
video_xmax_score = pdf.xmax_score.values[:]
return video_feature,video_xmin,video_xmax,video_xmin_score,video_xmax_score
def load_json(file):
with open(file) as json_file:
json_data = json.load(json_file)
return json_data
class BMN_VideoDataSet(data.Dataset):
def __init__(self, opt, subset="train"):
self.temporal_scale = opt["temporal_scale"] # 100
self.temporal_gap = 1. / self.temporal_scale
self.subset = subset
self.mode = opt["mode"]
self.feature_path = opt["feature_path"]
self.video_info_path = opt["video_info"]
self.video_anno_path = opt["video_anno"]
self._getDatasetDict()
self.check_csv()
self._get_match_map()
def check_csv(self):
# 因为某些视频的特征可能不存在,或者遭到了损坏
for video in self.video_list:
if not os.path.exists(self.feature_path + "csv_mean_" + str(self.temporal_scale) + "/" + video + ".csv"):
print("video :{} feature csv is not existed".format(video))
self.video_list.remove(video)
del self.video_dict[video]
# 删除已知的错误样本
del_videl_list = ['v_5HW6mjZZvtY']
for v in del_videl_list:
if v in self.video_dict:
print("del " + v +' video')
self.video_list.remove(v)
del self.video_dict[v]
print ("After check: csv \n %s subset video numbers: %d" %(self.subset,len(self.video_list)))
def _getDatasetDict(self):
anno_df = pd.read_csv(self.video_info_path)
anno_database = load_json(self.video_anno_path)
self.video_dict = {}
for i in range(len(anno_df)):
video_name = anno_df.video.values[i]
video_info = anno_database[video_name]
video_subset = anno_df.subset.values[i]
if self.subset in video_subset:
self.video_dict[video_name] = video_info
self.video_list = list(self.video_dict.keys())
print("%s subset video numbers: %d" % (self.subset, len(self.video_list)))
def __getitem__(self, index):
video_data = self._load_file(index) # video feature [400, 100]
if self.mode == "train":
# [D, T] [100, 100] [T=100] [T=100]
match_score_start, match_score_end, confidence_score = self._get_train_label(index, self.anchor_xmin,
self.anchor_xmax)
return video_data,confidence_score, match_score_start, match_score_end
else:
return index, video_data
def _get_match_map(self):
match_map = []
for idx in range(self.temporal_scale):
tmp_match_window = []
xmin = self.temporal_gap * idx # start locaiton 归一化之后的
for jdx in range(1, self.temporal_scale + 1):
xmax = xmin + self.temporal_gap * jdx # ending location 加上duration
tmp_match_window.append([xmin, xmax])
match_map.append(tmp_match_window)
match_map = np.array(match_map) # 100x100x2 最后一个2代表BM map上面每一个代表的candidate proposals所代表的时域范围 [start, duration, 2]
match_map = np.transpose(match_map, [1, 0, 2]) # [0.00,0.01] [0.01,0.02] [0.02,0.03].....[0.99,0.100] [duration, start, 2]
match_map = np.reshape(match_map, [-1, 2]) # [0,2] [1,3] [2,4].....[99,101] # duration x start [100*100, 2]
self.match_map = match_map # duration is same in row, start is same in col
self.anchor_xmin = [self.temporal_gap * (i-0.5) for i in range(self.temporal_scale)] # 每一个 snippet 的 开始时间
self.anchor_xmax = [self.temporal_gap * (i+0.5) for i in range(1, self.temporal_scale + 1)] # 每一个 snippet的结束时刻
# 注意从产生特征的角度来看,上面的anchor min 和anchor max 应该和BSN一样,不减去0.5,
# 比如第一个特征的就是由0-16帧图片产生,最后一个特征就是-16到-1的图片产生,应该 不用减去那个0.5
# 之后可以通过实验验证一下是否影响精度 相反,我觉得上面的match map应该加上0,5 因为每个snippet的中央区域在中间 但是因为是离线处理,所以不应该纠结那么多
def _load_file(self, index):
video_name = self.video_list[index]
video_df = pd.read_csv(self.feature_path + "csv_mean_" + str(self.temporal_scale) + "/" + video_name + ".csv")
video_data = video_df.values[:, :]
video_data = torch.Tensor(video_data)
video_data = torch.transpose(video_data, 0, 1)
video_data.float()
return video_data
def _get_train_label(self, index, anchor_xmin, anchor_xmax):
video_name = self.video_list[index]
video_info = self.video_dict[video_name]
video_frame = video_info['duration_frame']
video_second = video_info['duration_second']
feature_frame = video_info['feature_frame']
corrected_second = float(feature_frame) / video_frame * video_second # there are some frames not used
video_labels = video_info['annotations'] # the measurement is second, not frame
##############################################################################################
# change the measurement from second to percentage
gt_bbox = []
gt_iou_map = []
for j in range(len(video_labels)): #对于每个Proposal
tmp_info = video_labels[j]
tmp_start = max(min(1, tmp_info['segment'][0] / corrected_second), 0) # 归一化时间
tmp_end = max(min(1, tmp_info['segment'][1] / corrected_second), 0)
gt_bbox.append([tmp_start, tmp_end])
tmp_gt_iou_map = iou_with_anchors( # 每一个候选的proposals计算IOU
self.match_map[:, 0], self.match_map[:, 1], tmp_start, tmp_end)
tmp_gt_iou_map = np.reshape(tmp_gt_iou_map,
[self.temporal_scale, self.temporal_scale]) # [100, 100] 相当于BM map的label
gt_iou_map.append(tmp_gt_iou_map)
gt_iou_map = np.array(gt_iou_map) # [num_gt, 100, 100]
gt_iou_map = np.max(gt_iou_map, axis=0) # 取最大的IOU作为gt [100, 100]
gt_iou_map = torch.Tensor(gt_iou_map)
##############################################################################################
####################################################################################################
# generate R_s and R_e # 构建增强后的start region和ending region
gt_bbox = np.array(gt_bbox)
gt_xmins = gt_bbox[:, 0]
gt_xmaxs = gt_bbox[:, 1]
gt_lens = gt_xmaxs - gt_xmins
gt_len_small = 3 * self.temporal_gap # np.maximum(self.temporal_gap, self.boundary_ratio * gt_lens) # 直接用绝对大小代表增强区域的大小
gt_start_bboxs = np.stack((gt_xmins - gt_len_small / 2, gt_xmins + gt_len_small / 2), axis=1)
gt_end_bboxs = np.stack((gt_xmaxs - gt_len_small / 2, gt_xmaxs + gt_len_small / 2), axis=1) # 产生增强之后的两个区域
#####################################################################################################
##########################################################################################################
# calculate the ioa for all timestamp # 计算两个概率序列的真值
match_score_start = []
for jdx in range(len(anchor_xmin)): # 针对每一个anchor都计算与gt之间的ioa作为真值
match_score_start.append(np.max(
ioa_with_anchors(anchor_xmin[jdx], anchor_xmax[jdx], gt_start_bboxs[:, 0], gt_start_bboxs[:, 1])))
match_score_end = []
for jdx in range(len(anchor_xmin)):
match_score_end.append(np.max(
ioa_with_anchors(anchor_xmin[jdx], anchor_xmax[jdx], gt_end_bboxs[:, 0], gt_end_bboxs[:, 1])))
match_score_start = torch.Tensor(match_score_start)
match_score_end = torch.Tensor(match_score_end)
############################################################################################################
return match_score_start, match_score_end, gt_iou_map # 三个真值 [100], [100], [100,100]
def __len__(self):
return len(self.video_list)
def ioa_with_anchors(anchors_min, anchors_max, box_min, box_max):
# calculate the overlap proportion between the anchor and all bbox for supervise signal,
# the length of the anchor is 0.01
len_anchors = anchors_max - anchors_min
int_xmin = np.maximum(anchors_min, box_min)
int_xmax = np.minimum(anchors_max, box_max)
inter_len = np.maximum(int_xmax - int_xmin, 0.)
scores = np.divide(inter_len, len_anchors)
return scores
def iou_with_anchors(anchors_min, anchors_max, box_min, box_max):
"""Compute jaccard score between a box and the anchors.
"""
len_anchors = anchors_max - anchors_min
int_xmin = np.maximum(anchors_min, box_min)
int_xmax = np.minimum(anchors_max, box_max)
inter_len = np.maximum(int_xmax - int_xmin, 0.)
union_len = len_anchors - inter_len + box_max - box_min
# print inter_len,union_len
jaccard = np.divide(inter_len, union_len)
return jaccard
if __name__ == '__main__':
import opts
opt = opts.parse_opt()
opt = vars(opt)
# test dataset for BMN network
train_loader = torch.utils.data.DataLoader(BMN_VideoDataSet(opt, subset="train"),
batch_size=opt["bmn_batch_size"], shuffle=True,
num_workers=8, pin_memory=True)
for a,b,c,d in train_loader:
print(a.shape,b.shape,c.shape,d.shape)
break |
6,078 | f08677430e54822abbce61d0cac5a6fea14d3872 | from a10sdk.common.A10BaseClass import A10BaseClass
class MacAgeTime(A10BaseClass):
"""Class Description::
Set Aging period for all MAC Interfaces.
Class mac-age-time supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
:param aging_time: {"description": "Set aging period in seconds for all MAC interfaces (default 300 seconds)", "format": "number", "default": 300, "optional": true, "maximum": 600, "minimum": 10, "type": "number"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/mac-age-time`.
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.required=[]
self.b_key = "mac-age-time"
self.a10_url="/axapi/v3/mac-age-time"
self.DeviceProxy = ""
self.aging_time = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
|
6,079 | e55115a65ebee5d41dcd01a5cbabc328acf152da | from flask import Flask
from flask import request, redirect, render_template
from flask_bootstrap import Bootstrap
import urllib.request
import urllib.parse
import json
import uuid
import yaml
import hashlib
from Crypto import Random
from Crypto.Cipher import AES
import base64
app = Flask(__name__)
Bootstrap(app)
with open("app_config.yml", 'r') as ymlfile:
cfg = yaml.load(ymlfile)
postapikey = cfg['app']['postapikey']
mainurl = cfg['app']['mainurl']
appurl = cfg['app']['appurl']
secretkey = cfg['app']['secret']
# Some crypto staff
BLOCK_SIZE = 16
def trans(key):
return hashlib.md5(key.encode("utf-8")).digest()
def encrypt(message, passphrase):
passphrase = trans(passphrase)
IV = Random.new().read(BLOCK_SIZE)
aes = AES.new(passphrase, AES.MODE_CFB, IV)
return base64.b32encode(IV + aes.encrypt(message)).decode("utf-8")
def decrypt(encrypted, passphrase):
passphrase = trans(passphrase)
encrypted = base64.b32decode(encrypted)
IV = encrypted[:BLOCK_SIZE]
aes = AES.new(passphrase, AES.MODE_CFB, IV)
return aes.decrypt(encrypted[BLOCK_SIZE:]).decode("utf-8")
def mokum_message(message):
try:
postdata = {"post": {"timelines": ["user"],
"text": message,
"comments_disabled": True,
"nsfw": False},
"_uuid": str(uuid.uuid4())
}
req = urllib.request.Request("https://mokum.place/api/v1/posts.json")
req.add_header('Content-Type', 'application/json')
req.add_header('Accept', 'application/json')
req.add_header('X-API-Token', postapikey)
resp = urllib.request.urlopen(req, json.dumps(postdata).encode("utf-8"))
message = json.loads(resp.read().decode("utf-8"))
if message['post']['id']:
return message['post']['id']
except:
return False
def mokum_comment(messageid, comment):
try:
posturl = "https://mokum.place/api/v1/posts/" + str(messageid) + "/comments.json"
postdata = {"comment": {"text": comment,
# "platform": "anonymous device"
},
"_uuid": str(uuid.uuid4())}
req = urllib.request.Request(posturl)
req.add_header('Content-Type', 'application/json')
req.add_header('Accept', 'application/json')
req.add_header('X-API-Token', postapikey)
resp = urllib.request.urlopen(req, json.dumps(postdata).encode("utf-8"))
message = json.loads(resp.read().decode("utf-8"))
if message['id']:
return message['id']
except:
return False
@app.route('/')
def main():
return render_template('post.html')
@app.route('/post', methods=['POST'])
def post():
posttext = request.form['post']
id = mokum_message(posttext)
mokum_comment(id, "click to comment --> " + appurl + "/c/" + encrypt(str(id), secretkey))
return redirect(mainurl + str(id))
@app.route('/c/<cid>')
def comm(cid):
return render_template('comment.html', cid=cid)
@app.route('/comment', methods=['POST'])
def commented():
postid = decrypt(request.form['cid'], secretkey)
posttext = request.form['comment']
mokum_comment(postid, posttext)
return redirect(mainurl + postid)
if __name__ == '__main__':
app.run(debug=True)
|
6,080 | a5a764586faabb5af58f4649cdd20b6b18236a99 | import numpy as np
class Layer:
def __init__(self):
pass
@property
def need_update(self):
return False
class FC(Layer):
def __init__(self, W, b, lr, decay, epoch_drop, l2=0):
self.W = W.copy()
self.b = b.copy()
self.alpha_0 = lr
self.decay = decay
self.epoch_drop = epoch_drop
self.l2 = l2
self.count = 0
def forward(self, x):
self.x = x.copy()
self.m, self.n = x.shape
return np.dot(self.x, self.W) + self.b
def backprop(self, back_grad):
self.grad_W = np.dot(self.x.T, back_grad) + self.l2 * self.W
self.grad_b = np.dot(np.ones(self.m), back_grad)
self.grad = np.dot(back_grad, self.W.T)
return self.grad
def l_rate(self):
lrate = self.alpha_0 * \
(self.decay ** (np.floor((1 + self.count) / self.epoch_drop)))
self.count += 1
return lrate
def update(self):
lr = self.l_rate()
self.W -= lr * self.grad_W
self.b -= lr * self.grad_b
@property
def need_update(self):
return True
class Sigmoid(Layer):
def forward(self, x):
self.x = x.copy()
self.sig_res = 1 / (1 + np.exp(-x))
return self.sig_res
def backprop(self, back_grad):
grad = back_grad * self.sig_res * (1 - self.sig_res)
return grad
class Relu(Layer):
def forward(self, x):
self.x = x.copy()
return np.maximum(x, 0)
def backprop(self, back_grad):
grad = back_grad.copy()
grad[self.x < 0] = 0
return grad
class Leaky_Relu(Layer):
def forward(self, x):
self.x = x.copy()
return np.maximum(x, self.x * 0.01)
def backprop(self, back_grad):
grad = back_grad.copy()
grad[self.x < 0] = grad[self.x < 0] * 0.01
return grad
class Tanh(Layer):
def forward(self, x):
self.x = x.copy()
self.tanh = np.tanh(x)
return self.tanh
def backprop(self, back_grad):
grad = back_grad * (1 - self.tanh ** 2)
return grad
class Arctan(Layer):
def forward(self, x):
self.x = x.copy()
return np.arctan(self.x)
def backprop(self, back_grad):
grad = back_grad / (1 + self.x ** 2)
return grad
class SoftPlus(Layer):
def forward(self, x):
self.x = x.copy()
return np.log(1 + np.exp(self.x))
def backprop(self, back_grad):
grad = back_grad / (1 + np.exp(-self.x))
return grad
class SoftSign(Layer):
def forward(self, x):
self.x = x.copy()
return self.x / (1 + np.abs(self.x))
def backprop(self, back_grad):
grad = back_grad / (1 + np.abs(self.x) ** 2)
return grad
class Softmax(Layer):
def forward(self, x, y):
self.x = (x.copy() - x.max(axis=1).reshape(-1, 1))
# Avoiding overflow of exp(),
# This operation doesn't change the output of CE
self.y = y.copy()
self.m, self.n = self.x.shape
self.denom = np.sum(np.exp(x), axis=1).reshape((-1, 1))
self.softmax = np.exp(x) / self.denom
loss = 0
for i in range(self.m):
loss -= np.log(self.softmax[i, y[i]])
return loss / self.m
def dirac(self, a, b):
return 1 if a == b else 0
def backprop(self):
grad = np.zeros([self.m, self.n])
for i in range(self.m):
for j in range(self.n):
grad[i, j] = (self.softmax[i, j] -
self.dirac(j, self.y[i])) / self.m
return grad
def get_act_func(layer_name):
activation_function_dict = {
"arctan": Arctan,
"l_relu": Leaky_Relu,
"relu": Relu,
"sigmoid": Sigmoid,
"tanh": Tanh,
"softplus": SoftPlus,
"softsign": SoftSign
}
return activation_function_dict[layer_name]()
|
6,081 | d867d17b2873de7c63d0ff29eb585cce1a68dda6 | import sys
pdb = open(sys.argv[1])
name = sys.argv[2]
res = []
resid = None
for l in pdb:
if not l.startswith("ATOM"):
continue
if int(l[22:26]) != resid:
res.append([])
resid = int(l[22:26])
res[-1].append(l)
for i in range(len(res)-2):
outp = open("%s%d-%dr.pdb"%(name,i+1,i+3), "w")
for r in res[i:i+3]:
for j in r:
print >> outp, j,
|
6,082 | 7ed6d475bfe36fdd0b6cd2f0902a0bccb22f7f60 | # -*- coding: utf-8 -*-
"""
项目:爬取思否网站首页推荐文章
作者:cho
时间:2019.9.23
"""
import json
import parsel
import scrapy
from scrapy import Request
from SF.items import SfItem
class SfCrawlSpider(scrapy.Spider):
name = 'sf_crawl'
allowed_domains = ['segmentfault.com']
header = {
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/76.0.3809.132 Safari/537.36',
'referer': 'https://segmentfault.com/',
'content-type': 'application/json; charset=UTF-8',
}
def start_requests(self):
for page in range(1,100):
urls = 'https://segmentfault.com/api/timelines/recommend?page={}&_=4f2739f6f7dc1221704e01a1dfb7b8c7'.format(page)
yield Request(url=urls,headers=self.header,callback=self.parse)
def parse(self, response):
datas = json.loads(response.text)["data"]
if datas and len(datas)>0:
for data in datas:
name = data['user'][2]
user_url = 'https://segmentfault.com'+ data['user'][3]
id = data['user_id']
title = data['title']
excerpt = data['excerpt']
date = data['createdDate']
views = data['viewsWord']
votes = data['votes']
a_url = str('https://segmentfault.com'+ data['url'])
item = SfItem(name=name,user_url=user_url,id=id,title=title,excerpt=excerpt,date=date,views=views,votes=votes)
yield Request(url=a_url,headers=self.header,callback=self.parse_content,meta={'keys':item})
def parse_content(self,response):
item = response.meta['keys']
sel = parsel.Selector(response.text)
item['blog_name'] = sel.xpath('//div[@class="article__authormeta"]/a[2]/text()').extract_first()
item['blog_url'] = sel.xpath('//div[@class="article__authormeta"]/a[2]/@href').extract_first()
item['content'] = sel.xpath('div[@class="row"]/text()').extract_first()
yield item
|
6,083 | 14b6dc403be76abef5fde2cca5d773c88faa4b40 | #!usr/bin/python
#--*--coding:utf-8--*--
import sys
import re
if __name__ == '__main__':
category = re.compile('\[\[Category\:.*\]\]')#.は改行以外の任意の文字列にマッチ
for line in open(sys.argv[1]):
if category.search(line) is not None:#比較にはisを用いなければならない
print line.strip() |
6,084 | f0b98a3d6015d57a49e315ac984cac1cccf0b382 | import sys
def input(_type=str):
return _type(sys.stdin.readline().strip())
def main():
N, K, D = map(int, input().split())
rules = [tuple(map(int, input().split())) for _ in range(K)]
minv, maxv = min([r[0] for r in rules]), max([r[1] for r in rules])
while minv + 1 < maxv:
midv = (minv + maxv)//2
cnt, max_in = 0, 0
for A, B, C in rules:
if midv < A:
continue
n = (min(midv, B)-A)//C
max_in = max(A + n * C, max_in)
cnt += n + 1
# print(minv, midv, maxv, max_in, cnt)
if cnt >= D:
maxv = max_in
else:
minv = midv + 1
if minv < maxv:
cnt, max_in = 0, 0
for A, B, C in rules:
if minv < A:
continue
max_in = max(A + (min(minv, B)-A)//C * C, max_in)
cnt += (min(minv, B) - A)//C + 1
if cnt >= D:
maxv = max_in
print(maxv)
main()
# 10 20 30 40 50
# 30 60 90
# 20 45 70
# 70 95 |
6,085 | 4276fd61ad48b325961cd45be68eea6eab51f916 | import os
os.environ['CITY_CONF']='/opt/ris-web/city/duisburg.py'
from webapp import app
app.run(debug=True, host='0.0.0.0')
|
6,086 | ade4d797a83eaa06e8bde90972a56376d7e0f55a | import pprint
class ErrorResponseCollection(object):
def __init__(self, status, message, param = "message"):
self.status = status
self.message = message
self.param = param
def as_md(self):
return '\n\n> **%s**\n\n```\n{\n\n\t"%s": "%s"\n\n}\n\n```' % \
(self.message, self.param, self.message)
GET_401 = ErrorResponseCollection(
status= 401,
message = "Authentication credentials were not provided.",
param = "detail"
)
GET_REPO_STATUS_404 = ErrorResponseCollection(
status = 404,
message = "NOT FOUND"
)
class ResponseCollection(object):
def __init__(self, message=None, data=None):
self.message = message
self.data = data
if self.message == None:
self.message = " "
def as_md(self):
return '\n\n> **%s**\n\n```json\n%s\n\n```' % \
(self.message, pprint.pformat(self.data, width=20, indent=4))
GET_BRANCH_STATUS_200 = ResponseCollection(
message = "HTTP_200_OK",
data = dict(branches=[
'master',
'develop',
'feature/get_repo'
])
)
GET_REPO_STATUS_200 = ResponseCollection(
message = "HTTP_200_OK",
data = {
"repositories": [
{
"name": "dogproject",
"url": "https://github.com/<user>~~~~~~.git",
"latest_commit": "2019-09-12",
"latest_scan": "2019-09-15",
},
{
"name": "catproject1234533",
"url": "https://github.com/<user>~~~~~~.git",
"latest_commit": "2019-10-11",
"latest_scan": "2019-10-11",
},
],
"repository_size": 31
}
)
GET_COMMIT_STATUS_200 = ResponseCollection(
message = "HTTP_200_OK",
data ={
'commit': [
{'sha': '123133010b97571286b568432f63395d18a49e05',
'message': 'fix : remove comments and fix code'},
{'sha': '312313fc750cdea348e23145948d2ee58e29f483b',
'message': 'Update : korea_api crawling and yara convert Update : korea_api crawling and yara rule convert'},
{'sha': '464d238123137e8502a455f97dca165cb2d28612', 'message': 'Initial commit'}]
}
)
GET_CODE_DETECT_STATUS_200 = ResponseCollection(
message = "HTTP_200_OK",
data = {
"category": [
"log_",
"Token",
"룰추가따라 늘어남",
"..."
],
"log_": [
{
"file_name": ".gitignore",
"line_number": 1,
"strings": "a",
"line1": "",
"line2": "# Created by https://www.gitignore.io/api/git,python,django,pycharm+all",
"line3": "## HUFORMATION ##"
}
],
"Token": [
{
"file_name": "파일이름",
"line_number": 10,
"strings": "ddddd",
"line1": "탐지 줄 앞",
"line2": "탐지된 줄",
"line3": "탐지줄 다음"
},
{
"file_name": ".gitignore",
"line_number": 1,
"strings": "a",
"line1": "",
"line2": "# Created by https://www.gitignore.io/api/git,python,django,pycharm+all",
"line3": "## HUFORMATION ##"
}
],
"룰추가따라 늘어남": [
{
"file_name": "파일이름",
"line_number": 302,
"strings": "ddddd",
"line1": "탐지 줄 앞",
"line2": "탐지된 줄",
"line3": "탐지줄 다음"
},
{
"file_name": ".gitignore",
"line_number": 1,
"strings": "a",
"line1": "aa",
"line2": "~~a~~~",
"line3": "다음줄"
},
{
"file_name": ".gitignore",
"line_number": 1,
"strings": "a",
"line1": "aa",
"line2": "~~a~~~",
"line3": "다음줄"
},
],
"...": [
{
"file_name": ".gitignore",
"line_number": 1,
"strings": "a",
"line1": "aa",
"line2": "~~a~~~",
"line3": "다음줄"
},
]
}
) |
6,087 | 092c6d637fe85136b4184d05f0ac7db17a8efb3b | # -*- coding:utf-8 -*-
import time
from abc import ABCMeta, abstractmethod
from xlreportform.worksheet import WorkSheet
__author__ = "Andy Yang"
class Bases(metaclass=ABCMeta):
def __init__(self):
pass
@abstractmethod
def set_style(self):
"""set workshet's style, indent,border,font,and so on"""
@abstractmethod
def query(self):
"""query from mysql, sqlserver"""
@abstractmethod
def clean(self):
"""clean data"""
@abstractmethod
def export(self):
"""export data"""
class ReportForm(Bases, WorkSheet):
def __init__(self, visible=False, filename=None, sheetname=None):
WorkSheet.__init__(self, visible, filename, sheetname)
def __new__(cls, *args, **kwargs):
cls.query(cls)
cls.clean(cls)
cls.set_style(cls)
cls.export(cls)
return object.__new__(cls)
class DayRport(ReportForm):
def query(self):
print('query')
def set_style(self):
print('set_style')
def export(self):
print('export')
if __name__ == '__main__':
d = DayRport(visible=True, filename='okok.xlsx', sheetname='dageda')
time.sleep(5)
print(d) |
6,088 | c9b1956d66f0b8ae8a7ce7e509259747c8b7709e | #program, ktory zisti, ci zadany rok je prestupny
rok=input("Zadaj rok: ")
rok_int= int(rok)
if rok_int% 4==0:
if rok_int % 100 != 0:
if rok_int % 400:
print(f'Rok {rok_int} je priestupny')
else:
print("rok je neprestupny")
else:
print("rok je prestupny")
else:
print(f"Rok {rok_int} nie je priestupny")
#
#pridame rozsah rokov
rok_od = int(input("Zadaj rok od: "))
rok_do = int(input("Zadaj rok do: "))
for rok in range(rok_od, rok_do+1):
if ((rok%4 == 0) and (rok % 100 != 0)) or rok %400 == 0:
print(f"Rok {rok} je prestupny")
|
6,089 | 7ba8f0bd962413f6ff825df27330447b11360f10 | from .base import BaseLevel
from map_objects import DefinedMap
from entity.monster import Daemon
from entity.weapons import Axe
class FinalLevel(BaseLevel):
def __init__(self):
lvl_map = DefinedMap('levels/demon_lair.xp')
super().__init__(lvl_map.width, lvl_map.height)
self.map = lvl_map
self.set_entrance(50, 29)
boss = Daemon(8, 27, 10)
self.add_entity(boss)
def add_player(self, player):
super().add_player(player)
self.player.fov = 100
self.player.weapon = Axe() |
6,090 | dc261b29c1c11bb8449ff20a7f2fd120bef9efca | #颜色选择对话框
import tkinter
import tkinter.colorchooser
root = tkinter.Tk()
root.minsize(300,300)
#添加颜色选择按钮
def select():
#打开颜色选择器
result = tkinter.colorchooser.askcolor(title = '内裤颜色种类',initialcolor = 'purple')
print(result)
#改变按钮颜色
btn1['bg'] = result[1]
btn1 = tkinter.Button(root,text = '请选择你的内裤颜色',command = select)
btn1.pack()
root.mainloop() |
6,091 | e99d557808c7ae32ebfef7e7fb2fddb04f45b13a | class Config(object):
DEBUG = False
TESTING = False
SQLALCHEMY_TRACK_MODIFICATIONS = False
class Production(Config):
SQLALCHEMY_DATABASE_URI = '<Production DB URL>'
class Development(Config):
# psql postgresql://Nghi:nghi1996@localhost/postgres
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'postgresql://Nghi:nghi1996@localhost/postgres'
SQLALCHEMY_ECHO = False
JWT_SECRET_KEY = 'JWT_SECRET_NGHI!123'
SECRET_KEY = 'SECRET_KEY_NGHI_ABC!123'
SECURITY_PASSWORD_SALT = 'SECURITY_PASSWORD_SALT_NGHI_ABC!123'
MAIL_DEFAULT_SENDER = 'dev2020@localhost'
MAIL_SERVER = 'smtp.gmail.com'
MAIL_PORT = 465
MAIL_USERNAME = 'nghidev2020@gmail.com'
MAIL_PASSWORD = 'nghi1996'
MAIL_USE_TLS = False
MAIL_USE_SSL = True
UPLOAD_FOLDER = 'images'
class Testing(Config):
TESTING = True
# SQLALCHEMY_DATABASE_URI = 'postgresql://Nghi:nghi1996@localhost/postgres_test'
SQLALCHEMY_ECHO = False
JWT_SECRET_KEY = 'JWT_SECRET_NGHI!123'
SECRET_KEY = 'SECRET_KEY_NGHI_ABC!123'
SECURITY_PASSWORD_SALT = 'SECURITY_PASSWORD_SALT_NGHI_ABC!123'
MAIL_DEFAULT_SENDER = 'dev2020@localhost'
MAIL_SERVER = 'smtp.gmail.com'
MAIL_PORT = 465
MAIL_USERNAME = 'nghidev2020@gmail.com'
MAIL_PASSWORD = 'nghi1996'
MAIL_USE_TLS = False
MAIL_USE_SSL = True
UPLOAD_FOLDER = 'images'
|
6,092 | 6b0b60ec571cf026d0f0cff3d9517362c16b459b | import re
from collections import OrderedDict
OPENING_TAG = '<{}>'
CLOSING_TAG= '</{}>'
U_LIST = '<ul>{}</ul>'
LIST_ITEM = '<li>{}</li>'
STRONG = '<strong>{}</strong>'
ITALIC = '<em>{}</em>'
PARAGRAPH = '<p>{}</p>'
HEADERS = OrderedDict({'######': 'h6',
'#####': 'h5',
'####': 'h4',
'###:': 'h3',
'##': 'h2',
'#': 'h1'})
def replace_header_tags(l=''):
for k,v in HEADERS.items():
line_with_header = re.match(f'{k} (.*)', l)
if line_with_header:
rest_string = line_with_header.group(1)
return OPENING_TAG.format(v) + rest_string + CLOSING_TAG.format(v)
return l
def replace_bold_tags(l=''):
line_with_bold = re.match('(.*)__(.*)__(.*)', l)
if line_with_bold:
return line_with_bold.group(1) + \
STRONG.format(line_with_bold.group(2)) + line_with_bold.group(3)
return l
def replace_italic_tags(l=''):
line_with_ital = re.match('(.*)_(.*)_(.*)', l)
if line_with_ital:
return line_with_ital.group(1) + \
ITALIC.format(line_with_ital.group(2)) + line_with_ital.group(3)
return l
def apply_p_tag_if_no_tag(l=''):
return l if re.match('<h|<ul|<p|<li', l) else PARAGRAPH.format(l)
def check_if_list_item(l=''):
list_item = re.match(r'\* (.*)', l)
if list_item:
return LIST_ITEM.format(list_item.group(1))
return False
def is_last_line(i, _list):
return _list.index(i) == len(_list) - 1
def parse(markdown):
lines = markdown.split('\n')
res = ''
current_list = ''
for i in lines:
line = replace_header_tags(i)
line = replace_bold_tags(line)
line = replace_italic_tags(line)
list_item = check_if_list_item(line)
if list_item:
current_list += list_item
res += U_LIST.format(current_list) if is_last_line(i, lines) else ''
else:
res += U_LIST.format(current_list) if current_list else ''
current_list = ''
res += apply_p_tag_if_no_tag(line)
return res
|
6,093 | 43792a647243b9d667d6d98b62a086d742e8e910 | from datetime import timedelta
from django import template
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.core.urlresolvers import reverse
from django.utils import timezone
from api.analysis import *
from api.models import Service
register = template.Library()
# Takes a timdelta object and returns a string indicating how many
# weeks, days, hours it is. Does not round, only truncates!
@register.filter
def td_humanize(diff):
if diff.total_seconds() < 0:
return "Meni jo!"
days = diff.days
if days >= 7:
weeks, days = divmod(days, 7)
result = str(weeks) + " vk"
if days:
result += " " + str(days) + " pv"
return result
elif days:
hours, remainder = divmod(diff.seconds, 3600)
result = str(days) + " pv"
if hours:
result += " " + str(hours) + " h"
return result
else:
hours, remainder = divmod(diff.seconds, 3600)
minutes, seconds = divmod(remainder, 60)
if minutes >= 30:
hours += 1
result = str(hours) + " h"
return result
# Takes a datetime object and returns the difference between now and then
@register.filter
def time_from_now(datetime):
now = timezone.now()
if datetime != "Ei tiedossa":
return td_humanize(datetime - now)
else:
return "Ei tiedossa"
# Check if the given service code is among supported service codes. If it is, return the same code.
# If not, return code "180".
@register.filter
def parse_service_code(service_code):
if Service.objects.filter(service_code=service_code).exists():
return service_code
else:
return "180"
# Returns the service name based on given service code. This is done because somtimes
# service_name is in the wrong language
@register.filter
def get_service_name(service_code):
try:
service = Service.objects.get(service_code=service_code)
except ObjectDoesNotExist:
return "Muu"
return service.service_name
# Check if the feedback really is open or not. Return true if:
# - status == open/moderation
# - detailed_status contains specified substrings
# If ALLOW_HELSINKI_SPECIFIC_FEATURES == False just return basic status
@register.filter
def is_open(feedback):
if settings.ALLOW_HELSINKI_SPECIFIC_FEATURES:
open_strings = ["PUBLIC_WORKS_NEW", "PUBLIC_WORKS_COMPLETED_SCHEDULED_LATER"]
if feedback.status in ["open", "moderation"]:
return True
else:
for string in open_strings:
if string in feedback.detailed_status:
return True
return False
else:
return (feedback.status in ["open", "moderation"])
# Returns the real status string of the feedback
@register.filter
def real_status(feedback):
if is_open(feedback):
return "Avoin"
else:
return "Suljettu"
# If the expected_datetime is empty, return median estimation
@register.filter
def get_expected_datetime(feedback):
if feedback.expected_datetime:
return feedback.expected_datetime
else:
time = calc_fixing_time(feedback.service_code)
if time > 0:
median = timedelta(milliseconds=time)
return (feedback.requested_datetime + median)
else:
return "Ei tiedossa"
# Highlights the active navbar link
@register.simple_tag
def navbar_link_class(request, urls):
if request.path in (reverse(url) for url in urls.split()):
return "active"
return ""
# Checks if the user has already voted this feedback and returns a proper class. Uses session data.
@register.simple_tag
def feedback_vote_icon_status(request, item):
if "vote_id_list" in request.session:
if str(item.id) in request.session["vote_id_list"]:
return "icon_disabled"
return "icon_enabled"
|
6,094 | e7b1ccbcbb81ff02561d858a4db54d49a2aa0f8a | from django import forms
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
from .models import Upload
class DocumentForm(forms.ModelForm):
class Meta:
model = Upload
fields = ('document',)
|
6,095 | cbbb314a3262713f6cb2bb2dd90709d7bf1ca8eb | # i have created this file-hitu
from django.http import HttpResponse
from django.shortcuts import render
from .forms import Sign_Up, Login
from .models import Student
# render is used to create and impot the templates
# render takes first arg = request, 2nd arg = name of the file you want to import, 3rd arg = parameters or variable name
def index(request):
return render(request, 'index.html')
def get_name(request):
# if this is a POST request we need to process the form data
if request.method == 'POST':
# create a form instance and populate it with data from the request:
form = Sign_Up(request.POST)
# check whether it's valid:
if form.is_valid():
firstName = form.cleaned_data['first_name']
lastName = form.cleaned_data['last_name']
email = form.cleaned_data['email']
password = form.cleaned_data['password']
details = Student(first_name=firstName, last_name=lastName, email=email,
password=password) # these are models variable in red
# process the data in form.cleaned_data as required
details.save() # this is used to save all the details
# ...
# redirect to a new URL:
return render(request, 'login/new_index.html', {'form': form})
# if a GET (or any other method) we'll create a blank form
else:
form = Sign_Up()
return render(request, 'login/new_index.html', {'form': form})
def login_name(request):
# if this is a POST request we need to process the form data
if request.method == 'POST':
# create a form instance and populate it with data from the request:
form = Login(request.POST)
# check whether it's valid:
if form.is_valid():
email = form.cleaned_data['email']
password = form.cleaned_data['password']
return render(request, 'login/new_index.html', {'form': form})
# if a GET (or any other method) we'll create a blank form
else:
form = Login()
return render(request, 'login/new_index.html', {'form': form})
|
6,096 | 4a8e8994ec8734664a5965b81da9d146d8504f8d | import weakref
from soma.controller import Controller
from soma.functiontools import SomaPartial
from traits.api import File, Undefined, Instance
class MatlabConfig(Controller):
executable = File(Undefined, output=False,
desc='Full path of the matlab executable')
def load_module(capsul_engine, module_name):
capsul_engine.add_trait('matlab', Instance(MatlabConfig))
capsul_engine.matlab = MatlabConfig()
capsul_engine.matlab.on_trait_change(SomaPartial(update_execution_context,
weakref.proxy(capsul_engine)))
def init_module(capul_engine, module_name, loaded_module):
pass
def update_execution_context(capsul_engine):
if capsul_engine.matlab.executable is not Undefined:
capsul_engine.execution_context.environ['MATLAB_EXECUTABLE'] \
= capsul_engine.matlab.executable
|
6,097 | d549303228e860ae278a5a9497a4a3a68989aeca | from packer.utils import hello_world
|
6,098 | 4c63072b6242507c9b869c7fd38228488fda2771 | """Test that Chopsticks remote processes can launch tunnels."""
from unittest import TestCase
from chopsticks.helpers import output_lines
from chopsticks.tunnel import Local, Docker, RemoteException
from chopsticks.facts import python_version
def ping_docker():
"""Start a docker container and read out its Python version."""
with Docker('unittest-36', image='python:3.6') as tun:
return tun.call(python_version)[:2]
def recursive():
"""Infinite recursion, requiring depth limit to stop."""
with Local() as tun:
tun.call(recursive)
class RecursiveTest(TestCase):
docker_name = 'unittest-36'
def tearDown(self):
ls = output_lines(['docker', 'ps', '-a'])
images = []
for l in ls[1:]:
ws = l.split()
images.append(ws[-1])
assert self.docker_name not in images, \
"Image %r remained running after test" % self.docker_name
def test_python_version(self):
"""We can start a sub-tunnel from within a tunnel."""
with Local() as tun:
res = tun.call(ping_docker)
self.assertEqual(
res,
[3, 6]
)
def test_depth_limit(self):
"""Recursive tunneling is limited by a depth limit."""
with self.assertRaisesRegexp(
RemoteException,
r'.*DepthLimitExceeded: Depth limit of 2 ' +
'exceeded at localhost -> localhost -> localhost'):
recursive()
|
6,099 | 4736f4e06f166b3c3fd8379a2021eb84a34fcbd3 | import socket
import threading
import os
import time
import psutil
import shutil
class server:
def __init__(self):
self.commandSock = socket.socket()
self.commandPort = 8080
self.transferSock = socket.socket()
self.transferPort = 8088
self.chatSock=socket.socket()
self.chatPort=8085
self.host = ''
self.bindsocket()
def bindsocket(self):
self.commandSock.bind((self.host, self.commandPort))
self.transferSock.bind((self.host, self.transferPort))
self.chatSock.bind((self.host,self.chatPort))
self.commandSock.listen(10)
self.transferSock.listen(10)
self.chatSock.listen(10)
self.filename = ""
print ("Waiting for a connection.....")
self.clientTransferSock, self.transferAddr = self.transferSock.accept()
self.clientCommandSock, self.commandAddr = self.commandSock.accept()
self.clientChatSock , self.chatAddr = self.chatSock.accept()
print("Got a transfer connection from %s" % str(self.transferAddr))
print("Got a command connection from %s" % str(self.commandAddr))
print("Got a chat connection from %s" % str(self.chatAddr))
self.sendPartitions()
self.clientCommandSock.send(('Partitions Sent').encode('utf-8'))
print('Partitions Sent!')
def closeServer(self):
self.clientCommandSock.close()
self.clientTransferSock.close()
self.clientChatSock.close()
def dicision(self):
while True:
self.message = (self.clientCommandSock.recv(32)).decode('utf-8')
#(self.message)
if self.message == 'Delete Request':
self.clientCommandSock.send('Delete Request Received'.encode('utf-8'))
self.delete()
elif self.message == 'Copy Request':
self.clientCommandSock.send('Copy Request Received'.encode('utf-8'))
self.copy()
elif self.message == 'Send File Request':
self.clientCommandSock.send('Send File Request Received'.encode('utf-8'))
self.sendFile()
elif self.message == 'Listdir Request':
self.clientCommandSock.send('Listdir Request Received'.encode('utf-8'))
self.listdir()
elif self.message == 'Chat Request':
self.clientCommandSock.send('Chat Request Received'.encode('utf-8'))
self.chat()
elif self.message == 'Mkdir Request':
self.clientCommandSock.send('Mkdir Request Received'.encode('utf-8'))
self.mkdir()
def chat(self):
self.chatfile=open('chatfile.txt','w')
self.message = self.clientChatSock.recv(128).decode('utf-8')
self.chatfile.write(self.message+'\n')
self.chatfile.close()
print(self.message)
def mkdir(self):
self.mkdirPath = self.clientCommandSock.recv(128).decode('utf-8')
try:
os.mkdir(self.mkdirPath)
self.clientCommandSock.send('Directory Made'.encode('utf-8'))
print ('Directory Made Successfully!')
except:
self.clientCommandSock.send('Directory Already Exist'.encode('utf-8'))
print ('Directory Already Exist')
def send(self, directory):
print(directory)
self.filename = directory.split('\\')[len(directory.split('\\')) - 1]
self.filename = self.filename.encode('utf-8')
self.nameSize = len(self.filename)
self.nameSize = str(self.nameSize).encode('utf-8')
self.clientTransferSock.send(self.nameSize)
while (self.clientTransferSock.recv(32)).decode('utf-8') != 'Name Size Received':
print('Waiting for Name Size to deliver...')
time.sleep(1)
else:
print('Name Size Delivered!')
self.clientTransferSock.send(self.filename)
while (self.clientTransferSock.recv(32)).decode('utf-8') != 'File Name Received':
print('Waiting for File Name to deliver...')
time.sleep(1)
else:
print('File Name Delivered!')
self.filename = self.filename.decode('utf-8')
# filename = os.path.join(path,filename)
self.fileSize = os.path.getsize(directory)
self.fileSize = str(self.fileSize).encode('utf-8')
self.clientTransferSock.send(self.fileSize)
while (self.clientTransferSock.recv(32)).decode('utf-8') != 'File Size Received':
print('Waiting for File Size to deliver...')
time.sleep(1)
else:
print('File Size Delivered!')
file_to_send = open(directory, 'rb')
lines = file_to_send.read()
self.clientTransferSock.sendall(lines)
file_to_send.close()
while (self.clientTransferSock.recv(32)).decode('utf-8') != 'File Received':
print('Waiting for File to deliver...')
time.sleep(1)
else:
print('File Delivered Successfully!')
def delete(self):
self.deleteDirectory = self.clientCommandSock.recv(128).decode('utf-8')
try:
os.remove(self.deleteDirectory)
self.clientCommandSock.send('File Deleted'.encode('utf-8'))
print ('Delete successfully!')
except:
self.clientCommandSock.send('File Not Found'.encode('utf-8'))
print ('File not found!')
def copy(self):
self.pathes = (self.clientCommandSock.recv(128).decode('utf-8')).split(',')
print(self.pathes)
#shutil.copy2(self.pathes[0], self.pathes[1])
try:
shutil.copy2(self.pathes[0], self.pathes[1])
self.clientCommandSock.send('File Copied'.encode('utf-8'))
print ('Copied successfully!')
except:
self.clientCommandSock.send('File Not Found or Access Denied'.encode('utf-8'))
print ('File Not Found or Access Denied')
def sendFile(self):
self.sendFileDirectory = self.clientCommandSock.recv(128).decode('utf-8')
self.clientCommandSock.send('File Directory Received'.encode('utf-8'))
threading.Thread(target=self.send, args=(self.sendFileDirectory,)).start()
def sendPartitions(self):
self.dps_defualt = psutil.disk_partitions()
fmt_str = "{:<8}"
fmt_str.format("Opts")
self.dps = [chr(x) + ":" for x in range(65, 90) if os.path.exists(chr(x) + ":")]
self.clientCommandSock.send((str(self.dps)).encode('utf-8'))
def listdir(self):
self.listdirPath = self.clientCommandSock.recv(128).decode('utf-8')
self.clientCommandSock.send('Listdir Path Received'.encode('utf-8'))
self.clientCommandSock.send(str(len(str(os.listdir(self.listdirPath)))).encode('utf-8'))
while (self.clientCommandSock.recv(32)).decode('utf-8') != 'Listdir Size Received':
print('Waiting for Listdir Size to deliver...')
time.sleep(1)
else:
print('Listdir Size Delivered!')
self.clientCommandSock.sendall(str(os.listdir(self.listdirPath)).encode('utf-8'))
while (self.clientCommandSock.recv(32)).decode('utf-8') != 'Listdir Received':
print('Waiting for Listdir to deliver...')
time.sleep(1)
else:
print('Listdir Delivered!')
if __name__ == '__main__':
myServer = server()
threading.Thread(target=myServer.dicision()).start()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.