index int64 | repo_name string | branch_name string | path string | content string | import_graph string |
|---|---|---|---|---|---|
77,667 | gellyfisher/masterproef | refs/heads/master | /results.py | import math
import numpy
from scipy.stats import norm
import time
from prices import *
from basket import *
from rainbow import *
import matplotlib.pyplot as plt
import pandas as pd
import seaborn as sns
def tabel12():
numpy.random.seed(3141592653)
amount=500000 #aantal simulaties
N=5
thetas=[0.35,0.25,0.20,0.15,0.05]
rhoArray = [0.1,0.5]
volatilitiesArray = [[0.2] * N, [0.5] * N]
driftsArray = [[0.05] * N,[0.1] * N]
initials = [100] * N
TArray=[1,3]
KArray=[100,90,110]
result=""
nus={}
for T in TArray:
for K in KArray:
for drifts in driftsArray:
for volatilities in volatilitiesArray:
for rho in rhoArray:
correlations = numpy.ones((N,N))*rho+numpy.diag([1-rho]*N)
prob=Probability(correlations,initials,drifts,volatilities)
basket=Basket(prob,K,prob.prices,T,thetas)
if (K==100):
simulated,error,nu=calibrate(prob,basket,amount,1e-09)
nus[(T,drifts[0],volatilities[0],rho)]=nu
else:
nu=nus[(T,drifts[0],volatilities[0],rho)]
basket.setNu(nu)
simulated,error = prob.simulate(basket,drifts[0],T,amount)
approx=basket.approximate(0)
result+=("%d & %.2f & %.1f & %.1f & %6.4f & %.4f & %.4f & %.4f & %.4f \\\\\n")%(K,drifts[0],volatilities[0],rho, simulated,error,approx,abs(simulated-approx)/simulated,nu)
result+="\\hline\n"
result+="\n" #zodat we makkelijk T=1 en T=3 uit elkaar kunnen halen
print(result)
def tabel345():
numpy.random.seed(3141592653)
amount=500000 #aantal simulaties
N=5
thetas=[0.35,0.25,0.20,0.15,0.05]
rhoArray = [0.1,0.5]
volatilitiesArray = [[0.2] * N, [0.5] * N]
driftsArray = [[0.05] * N,[0.1] * N]
initials = [100] * N
TArray=[1,3]
KArray=[100,90,110]
sum=0
sumalt=0
result="" #benaderingen zelf
result2="" #absolute fouten
result3="" #tijden
for T in TArray:
for K in KArray:
for drifts in driftsArray:
for volatilities in volatilitiesArray:
for rho in rhoArray:
correlations = numpy.ones((N,N))*rho+numpy.diag([1-rho]*N)
prob=Probability(correlations,initials,drifts,volatilities)
basket=Basket(prob,K,prob.prices,T,thetas,method="integral",productmethod=True)
basket2=Basket(prob,K,prob.prices,T,thetas,nu=1,method="calibrate")
start = time.time_ns()
simulated,error = prob.simulate(basket,drifts[0],T,amount)
simulatedTime = (time.time_ns() - start) / (10**6) #in ms
start = time.time_ns()
gamma = basket.approxGamma()
gammaTime = (time.time_ns() - start) / (10**6) #in ms
start = time.time_ns()
approx = basket.approximate(0)
approxTime = (time.time_ns() - start) / (10**6) #in ms
start = time.time_ns()
approx2 = basket2.approximate(0)
approx2Time = (time.time_ns() - start) / (10**6) #in ms
errors=[abs(a-simulated) for a in [approx2,approx,gamma]]
best=min(errors)
errors2=[abs(a-simulated) for a in [approx2,approx]]
best2=min(errors2)
format = "%d & %d & %.2f & %.1f & %.1f & %6.4f & %.4f &"
for i in range(3):
if errors[i]==best:
format+=" \\bfseries %.4f "
else:
format+=" %.4f "
if i!=2:
if (errors2[i]==best2):
format+=" \\textsuperscript{*} "
format+="&"
format+="\\\\\n"
sum+=abs(approx2-simulated)
sumalt+=abs(approx-simulated)
result+=format%(T,K,drifts[0],volatilities[0],rho, simulated, error, approx2, approx, gamma)
result2+=format%(T,K,drifts[0],volatilities[0],rho, simulated, error, approx2-simulated, approx-simulated, gamma-simulated)
result3+=("%d & %d & %.2f & %.1f & %.1f & %.2f \\text{ ms} & %.2f \\text{ ms} & %.2f \\text{ ms} & %.2f \\text{ ms} \\\\\n")%(T,K,drifts[0],volatilities[0],rho, simulatedTime, approx2Time, approxTime, gammaTime)
result+="\\hline\n"
result2+="\\hline\n"
result3+="\\hline\n"
print(result)
print()
print()
print(result2)
print()
print()
print(result3)
print()
print()
print(sum)
print(sumalt)
def figuur2(): #we genereren random basket opties.
numpy.random.seed(3141592653)
amount=100000 #aantal simulaties
N=5
sum=0
sumalt=0
res=[]
res2=[]
for i in range(48):
thetas = numpy.random.rand(N)
temp=numpy.random.rand(N,N)
correlations=0.5*(temp+temp.transpose()) #maak de matrix symmetrisch
numpy.fill_diagonal(correlations,N); #zorg dat de matrix positief definiets is
correlations = correlations/N #herschaal zodat het een correlatiematrix word.
volatilities = numpy.random.randn(N)
drifts = [numpy.random.uniform(0,0.5)]*N
initials = numpy.random.rand(N)*100
T = numpy.random.uniform(0.1,3)
K = numpy.random.uniform(0,10)
prob=Probability(correlations,initials,drifts,volatilities)
basket=Basket(prob,K,prob.prices,T,thetas,method="integral")
basket2=Basket(prob,K,prob.prices,T,thetas,nu=1,method="calibrate")
simulated,error = prob.simulate(basket,drifts[0],T,amount)
approx = basket.approximate(0)
approx2 = basket2.approximate(0)
res.append(approx/simulated)
res2.append(approx2/simulated)
# print(approx,approx2,simulated)
# print("SUMS",sum,sumalt)
# print()
df=pd.DataFrame()
df['benadering']=res2
df['alt. benadering']=res
fig, ax = plt.subplots(2,1,sharex=True)
bp1=sns.boxplot(df['benadering'],ax=ax[0])
bp2=sns.boxplot(df['alt. benadering'],ax=ax[1])
plt.setp(bp1.artists, edgecolor = 'k', facecolor='sandybrown')
plt.setp(bp2.artists, edgecolor = 'k', facecolor='sandybrown')
plt.setp(bp1.lines, color='k')
plt.setp(bp2.lines, color='k')
ax[0].tick_params(axis='x',labelbottom=True)
ax[0].set_yticks([])
ax[1].set_yticks([])
plt.tight_layout()
plt.savefig("../randombasketboxplots.png")
def tabel67(): #rainbow resultaten
numpy.random.seed(3141592653)
amount=500000 #aantal simulaties
N=5
rhoArray = [0.1,0.5]
volatilitiesArray = [[0.2] * N, [0.5] * N]
driftsArray = [[0.05] * N,[0.1] * N]
initials = [100] * N
TArray=[1,3]
KArray=[100,90,110]
sum=0
sumalt=0
result="" #benaderingen zelf
result2="" #absolute fouten
for T in TArray:
for K in KArray:
for drifts in driftsArray:
for volatilities in volatilitiesArray:
for rho in rhoArray:
correlations = numpy.ones((N,N))*rho+numpy.diag([1-rho]*N)
prob=Probability(correlations,initials,drifts,volatilities)
rainbow=Rainbow(prob,K,prob.prices,T,method="integral")
rainbow2=Rainbow(prob,K,prob.prices,T,method="calibrate")
simulated,error = prob.simulate(rainbow,drifts[0],T,amount)
approx = rainbow.approximate(0)
approx2 = rainbow2.approximate(0)
errors=[abs(a-simulated) for a in [approx2,approx]]
best=min(errors)
format = "%d & %d & %.2f & %.1f & %.1f & %6.4f & %.4f &"
for i in range(2):
if errors[i]==best:
format+=" \\bfseries %.4f "
else:
format+=" %.4f "
if i!=1:
format+="&"
format+="\\\\\n"
result+=format%(T,K,drifts[0],volatilities[0],rho, simulated, error, approx2, approx)
result2+=format%(T,K,drifts[0],volatilities[0],rho, simulated, error, approx2-simulated, approx-simulated)
result+="\\hline\n"
result2+="\\hline\n"
print(result)
print()
print()
print(result2)
def figuur3(): #we genereren random rainbow opties.
numpy.random.seed(3141592653)
amount=100000 #aantal simulaties
N=5
sum=0
sumalt=0
res=[]
res2=[]
for i in range(48):
thetas = numpy.random.randn(N)
temp=numpy.random.rand(N,N)
correlations=0.5*(temp+temp.transpose()) #maak de matrix symmetrisch
numpy.fill_diagonal(correlations,N); #zorg dat de matrix positief definiet is
correlations = correlations/N #herschaal zodat het een correlatiematrix word.
volatilities = numpy.random.randn(N)
drifts = [numpy.random.uniform(0,0.5)]*N
initials = numpy.random.rand(N)*100
T = numpy.random.uniform(0.1,3)
K = numpy.random.uniform(0,10)
prob=Probability(correlations,initials,drifts,volatilities)
rainbow=Rainbow(prob,K,prob.prices,T,method="integral")
rainbow2=Rainbow(prob,K,prob.prices,T,nu=1,method="calibrate")
simulated,error = prob.simulate(rainbow,drifts[0],T,amount)
approx = rainbow.approximate(0)
approx2 = rainbow2.approximate(0)
res.append(approx/simulated)
res2.append(approx2/simulated)
df=pd.DataFrame()
df['benadering']=res2
df['alt. benadering']=res
fig, ax = plt.subplots(2,1,sharex=True)
bp1=sns.boxplot(df['benadering'],ax=ax[0])
bp2=sns.boxplot(df['alt. benadering'],ax=ax[1])
plt.setp(bp1.artists, edgecolor = 'k', facecolor='sandybrown')
plt.setp(bp2.artists, edgecolor = 'k', facecolor='sandybrown')
plt.setp(bp1.lines, color='k')
plt.setp(bp2.lines, color='k')
ax[0].tick_params(axis='x',labelbottom=True)
ax[0].set_yticks([])
ax[1].set_yticks([])
plt.tight_layout()
plt.savefig("../randomrainbowboxplots.png")
print(sum)
print(sumalt)
def tabel89():
numpy.random.seed(3141592653)
amount=500000 #aantal simulaties
N=5
thetas=[0.35,0.25,0.20,0.15,0.05]
rhoArray = [0.1,0.5]
volatilitiesArray = [[0.2] * N, [0.5] * N]
driftsArray = [[0.05] * N,[0.1] * N]
initials = [100] * N
TArray=[1,3]
KArray=[100,90,110]
sum=0
sumalt=0
result="" #benaderingen zelf
result2="" #absolute fouten
for T in TArray:
for K in KArray:
for drifts in driftsArray:
for volatilities in volatilitiesArray:
for rho in rhoArray:
temp=numpy.random.randn(N,N)
correlations=0.5*(temp+temp.transpose()) #maak de matrix symmetrisch
numpy.fill_diagonal(correlations,N); #zorg dat de matrix positief definiets is
correlations = correlations/N #herschaal zodat het een correlatiematrix word.
# correlations = numpy.ones((N,N))*rho+numpy.diag([1-rho]*N)
prob=Probability(correlations,initials,drifts,volatilities)
basket2=Basket(prob,K,prob.prices,T,thetas,method="integral",productmethod=False)
basket=Basket(prob,K,prob.prices,T,thetas,nu=1,method="integral",productmethod=True)
simulated,error = prob.simulate(basket,drifts[0],T,amount)
approx = basket.approximate(0)
approx2 = basket2.approximate(0)
sum+=approx2-simulated
sumalt+=approx-simulated
errors=[abs(a-simulated) for a in [approx2,approx]]
best=min(errors)
format = "%d & %d & %.2f & %.1f & %.1f & %6.4f & %.4f &"
for i in range(2):
if errors[i]==best:
format+=" \\bfseries %.4f "
else:
format+=" %.4f "
if i!=1:
format+="&"
format+="\\\\\n"
result+=format%(T,K,drifts[0],volatilities[0],rho, simulated, error, approx2, approx)
result2+=format%(T,K,drifts[0],volatilities[0],rho, simulated, error, approx2-simulated, approx-simulated)
result+="\\hline\n"
result2+="\\hline\n"
print(result)
print()
print()
print(result2)
print()
print()
print(sum)
print(sumalt)
def figuur1():
numpy.random.seed(3141592653)
amount=500000 #aantal simulaties
N=5
thetas=[0.35,0.25,0.20,0.15,0.05]
rhoArray = [0.1,0.5]
volatilitiesArray = [[0.2] * N, [0.5] * N]
driftsArray = [[0.05] * N,[0.1] * N]
initials = [100] * N
TArray=[1,3]
KArray=[100,90,110]
sum=0
sumalt=0
res=[]
res2=[]
result="" #benaderingen zelf
result2="" #absolute fouten
for T in TArray:
for K in KArray:
for drifts in driftsArray:
for volatilities in volatilitiesArray:
for rho in rhoArray:
correlations = numpy.ones((N,N))*rho+numpy.diag([1-rho]*N)
prob=Probability(correlations,initials,drifts,volatilities)
basket=Basket(prob,K,prob.prices,T,thetas,method="integral",productmethod=False)
basket2=Basket(prob,K,prob.prices,T,thetas,nu=1,method="calibrate")
simulated,error = prob.simulate(basket,drifts[0],T,amount)
approx = basket.approximate(0)
approx2 = basket2.approximate(0)
sum+=approx2/simulated
sumalt+=approx/simulated
res.append(approx/simulated)
res2.append(approx2/simulated)
errors=[abs(1-a/simulated) for a in [approx2,approx]]
best=min(errors)
format = "%d & %d & %.2f & %.1f & %.1f & %6.4f & %.4f &"
for i in range(2):
if errors[i]==best:
format+=" \\bfseries %.4f "
else:
format+=" %.4f "
if i!=1:
format+="&"
format+="\\\\\n"
result+=format%(T,K,drifts[0],volatilities[0],rho, simulated, error, approx2/simulated, approx/simulated)
result+="\\hline\n"
print(result)
print()
print()
print(sum/48) #0.9418153089164248
print(sumalt/48) #1.0774573386623003
print()
df=pd.DataFrame()
df['benadering']=res2
df['alt. benadering']=res
fig, ax = plt.subplots(2,1,sharex=True)
bp1=sns.boxplot(df['benadering'],ax=ax[0])
bp2=sns.boxplot(df['alt. benadering'],ax=ax[1])
plt.setp(bp1.artists, edgecolor = 'k', facecolor='sandybrown')
plt.setp(bp2.artists, edgecolor = 'k', facecolor='sandybrown')
plt.setp(bp1.lines, color='k')
plt.setp(bp2.lines, color='k')
ax[0].tick_params(axis='x',labelbottom=True)
ax[0].set_yticks([])
ax[1].set_yticks([])
plt.tight_layout()
plt.savefig("../boxplots.png")
if __name__=="__main__":
# tabel123()
# tabel45()
figuur2()
# tabel67()
# figuur3()
# tabel89() #vergelijk tussen productmethode en geen productmethode
# figuur1()
| {"/tests.py": ["/prices.py", "/basket.py", "/exchange.py", "/option.py", "/rainbow.py"], "/rainbow.py": ["/prices.py", "/basket.py"], "/basket.py": ["/prices.py", "/option.py"], "/option.py": ["/prices.py"], "/results.py": ["/prices.py", "/basket.py", "/rainbow.py"]} |
77,668 | gellyfisher/masterproef | refs/heads/master | /ceo.py | import math
import numpy
from scipy.stats import norm
class CEO:
def __init__(self,prob,call1,call2):
self.call1 = call1
self.call2 = call2
self.maturity = call1.maturity
self.prob = prob
self.nu = 1
def payoff(self):
return max(0,self.call1.payoff()-self.call2.payoff())
# return max(0,self.call1.blackScholes(T)-self.call2.blackScholes(T))
def approximate3(self,t):
r = self.call1.price.drift
sigma1 = self.call1.price.volatility
sigma2 = self.call2.price.volatility
delta1 = self.call1.partialDerivative(0,self.call1.price)
delta2 = self.call2.partialDerivative(0,self.call2.price)
k1 = (self.call1.approximate(0)/(self.call1.price.approximate(t) * delta1))-1
k2 = (self.call2.approximate(0)/(self.call2.price.approximate(t) * delta2))-1
rho = self.prob.getCorrelation(self.call1.price,self.call2.price)
correlations = numpy.matrix([[1,rho],[rho,1]],dtype='float64')
volatilities = [sigma1/(1+k1),sigma2/(1+k2)]
drifts = [0.04]*2
initials = [self.call1.approximate(0),self.call2.approximate(0)]
newprob=Probability(correlations,initials,drifts,volatilities)
xchg=Exchange(newprob,newprob.prices[1],newprob.prices[0],self.maturity)
return xchg.approximate(t)
def approximate2(self,t):
amount=100000
total=0
T=self.maturity
for i in range(amount):
self.prob.samplePrices(0)
r = self.call1.price.drift
sigma1 = self.call1.price.volatility
sigma2 = self.call2.price.volatility
delta1 = self.call1.partialDerivative(0,self.call1.price)
delta2 = self.call2.partialDerivative(0,self.call2.price)
gamma1 = self.call1.gamma(0)
gamma2 = self.call2.gamma(0)
c1 = (sigma1 * self.call1.price.approximate(t) * gamma1)/delta1
c2 = (sigma2 * self.call2.price.approximate(t) * gamma2)/delta2
sigmat1 = sigma1 + c1
sigmat2 = sigma2 + c2
W1Ster = self.prob.motions[0].getSampled()-0.5*sigmat1*t
W2Ster = self.prob.motions[1].getSampled()-0.5*sigmat2*t
k1 = (self.call1.approximate(0)/(self.call1.price.approximate(t) * delta1))-1
k2 = (self.call2.approximate(0)/(self.call2.price.approximate(t) * delta2))-1
U1 = self.call1.approximate(0) * math.exp(r*T) * (math.exp(sigmat1 *W1Ster)+k1)/(1+k1)
U2 = self.call2.approximate(0) * math.exp(r*T) * (math.exp(sigmat2 *W2Ster)+k2)/(1+k2)
total+=math.exp(-r*T)*max(U1-U2,0)
return total/amount
def approximate(self,t): #for now we assume t is 0
C1=self.call1.approximate(t)
C2=self.call2.approximate(t)
P1=0
P2=0
r = self.call1.price.drift
tau = self.maturity-t
S1 = self.call1.price.approximate(t)
S2 = self.call2.price.approximate(t)
sigma1 = self.call1.price.volatility
sigma2 = self.call2.price.volatility
rho12 = self.prob.getCorrelation(self.call1.price,self.call2.price)
pdv1 = self.call1.partialDerivative(t,self.call1.price)
pdv2 = self.call2.partialDerivative(t,self.call2.price)
sigmat1 = 0 if C1==0 else self.nu * math.sqrt((pdv1**2) * (sigma1 **2) * (S1**2))/C1
sigmat2 = 0 if C2==0 else self.nu * math.sqrt((pdv2**2) * (sigma2 **2) * (S2**2))/C2
if (sigmat1==0 or sigmat2==0):
gamma1=sigmat1+sigmat2
else:
beta12 = (rho12*sigma1*sigma2*S1*S2*pdv1*pdv2)/(sigmat1*sigmat2*C1*C2)
gamma1 = math.sqrt(sigmat1**2+sigmat2**2-2*sigmat1*sigmat2*beta12)
if (C1!=0 and C2!=0):
d1plus = (math.log(C1/C2) + tau*(gamma1**2)/2)/(gamma1*math.sqrt(tau))
d1min = (math.log(C1/C2) - tau*(gamma1**2)/2)/(gamma1*math.sqrt(tau))
else:
d1plus=math.inf
d1min=math.inf
E1 = C1*norm.cdf(d1plus) - C2* norm.cdf(d1min)
E2 = 0
return E1+E2 | {"/tests.py": ["/prices.py", "/basket.py", "/exchange.py", "/option.py", "/rainbow.py"], "/rainbow.py": ["/prices.py", "/basket.py"], "/basket.py": ["/prices.py", "/option.py"], "/option.py": ["/prices.py"], "/results.py": ["/prices.py", "/basket.py", "/rainbow.py"]} |
77,669 | gellyfisher/masterproef | refs/heads/master | /exchange.py | import math
import numpy
from scipy.stats import norm
class Exchange:
def __init__(self,prob,price1,price2,maturity):
self.prob = prob
self.price1 = price1
self.price2 = price2
self.maturity = maturity
def payoff(self):
return max(0,(self.price2.value-self.price1.value))
def approximate(self,t):#actually its exact in this case
sigma1 = self.price1.volatility
sigma2 = self.price2.volatility
rho12 = self.prob.getCorrelation(self.price1,self.price2)
sigmat = math.sqrt(sigma1**2+sigma2**2-2*sigma1*sigma2*rho12)
r=self.price1.drift
tau=self.maturity-t
dplus=(math.log(self.price2.approximate(t)/self.price1.approximate(t))+tau*(sigmat**2)/2)/(sigmat*math.sqrt(tau))
dmin=(math.log(self.price2.approximate(t)/self.price1.approximate(t))-tau*(sigmat**2)/2)/(sigmat*math.sqrt(tau))
return self.price2.approximate(t)*norm.cdf(dplus)-self.price1.approximate(t)*norm.cdf(dmin) | {"/tests.py": ["/prices.py", "/basket.py", "/exchange.py", "/option.py", "/rainbow.py"], "/rainbow.py": ["/prices.py", "/basket.py"], "/basket.py": ["/prices.py", "/option.py"], "/option.py": ["/prices.py"], "/results.py": ["/prices.py", "/basket.py", "/rainbow.py"]} |
77,672 | EelcoWiechert/JKU2017_music_events_impact | refs/heads/master | /create_objects_for_d3.py | from statsmodels.tsa.seasonal import seasonal_decompose
import json
import pandas as pd
from collections import OrderedDict
import numpy as np
import matplotlib.pylab as plt
import time
def read_top_artists(a_file):
# READ ARTISTS
artist_id = read_artists_reversed("../data/LFM-1b_artists.txt")
top_artists = []
# READ THE FILE WITH THE NAMES OF THE TOP ARTISTS
for t in open(a_file):
# FIND THE USED ID FOR THESE ARTISTS
top_artists.append(t.rstrip('\n'))
return top_artists
# read the created event file and load into pandas dataframe
def read_events(a_file):
events = pd.read_csv(a_file, sep=",", names=['id', 'description', 'year', 'month', 'day', 'category'])
return events
# Read artists file, returns a dictionary of {id:name}
def read_artists(a_file):
artist_names = {}
with open(a_file, 'r') as f:
for line in f:
content = line.strip().split('\t')
if len(content) == 2:
artist_names[np.int32(content[0])] = content[1]
else:
print('Problem encountered with ', content)
return artist_names
# Read artists file, returns a dictionary of {name:id}
def read_artists_reversed(a_file):
artist_names = {}
with open(a_file, 'r') as f:
for line in f:
content = line.strip().split('\t')
if len(content) == 2:
artist_names[content[1]] = np.int32(content[0])
else:
print('Problem encountered with ', content)
return artist_names
# Read genres of each artist, returns a dic of {name:list_of_genres}
def read_artist_genre(a_file):
artist_genre = {}
with open(a_file, 'r') as f:
for line in f:
content = line.strip().split('\t')
if len(content) > 1:
artist_genre[content[0]] = list(map(int, content[1:]))
return artist_genre
# Load a pandas dataframe that
def read_genre_id(a_file):
genre_coding = pd.read_csv(a_file, sep="\t", header=None)
return genre_coding
def load_country_id(a_file):
country_id = pd.read_csv(a_file, sep="\t", index_col=0)
return country_id
def create_object_list(time_series_dic, LOCATION_OBJECT_LIST):
object_list = []
# for every year
for year, countries in time_series_dic.items():
# for every country
for country, genres in countries.items():
# for every genre
for genre_, week in genres.items():
# for every week
for w, playc in week.items():
event = {}
event['year'] = year
event['country'] = country
event['genre'] = genre_
event['week'] = w
event['playcount'] = playc
try:
event['relative_play'] = (
float(playc) / float(time_series_dic[year][country]['total_playcount'][w]))
except:
event['relative_play'] = 0
object_list.append(event
)
with open(LOCATION_OBJECT_LIST, 'w') as fp:
json.dump(object_list, fp, sort_keys=True, indent=4)
def create_event_dic():
# LOAD DATA
artist_id = read_artists("data/time_series_analysis/LFM-1b_artists.txt")
artist_genre = read_artist_genre("data/time_series_analysis/LFM-1b_artist_genres_allmusic.txt")
genre_coding = read_genre_id("data/time_series_analysis/genres_allmusic.txt")
country_id = load_country_id("data/time_series_analysis/country_ids_filter_itemLE_10000_userLE_1000.csv")
files = ["2005", "2006", "2007", "2008", "2009", "2010", "2011", "2012", "2013", "2014"]
# VARIABLES
time_series_dic = dict()
error = 0
#FUNCTION
for file in files:
print(file)
print("\nStarted the year " + str(file) + " : " + str(time.ctime()) + "\n")
with open("../data/itemLE_10000_userLE_1000/y" + file + "-m-d-c-a-pc.csv", 'r') as f:
next(f)
for line in f:
try:
x = line.strip().split('\t')
x = list(map(int, x))
# get variable names
year = x[0]
country_code = country_id.iloc[x[3]]['country']
genres = list(map(lambda x: genre_coding.iloc[x][0], artist_genre[artist_id[x[4]]][:1]))
weeknumber_event = (str(x[0]) + '-' + str(x[1]) + '-' + str(x[2]))
# Add year to dic
if year not in time_series_dic:
time_series_dic[int(year)] = dict()
# Add country to dic
if country_code not in time_series_dic[year]:
time_series_dic[year][country_code] = dict()
# Add genre to dic
for genre in genres: # find list of genres
if genre not in time_series_dic[year][country_code]:
time_series_dic[year][country_code][genre] = dict()
if genre == 'total_playcount':
print('next line')
continue
# add week number of listening event
if weeknumber_event not in time_series_dic[year][country_code][genre]:
time_series_dic[year][country_code][genre][weeknumber_event] = 0
time_series_dic[year][country_code][genre][weeknumber_event] += x[5]
if 'total_playcount' not in time_series_dic[year][
country_code]:
time_series_dic[year][country_code]['total_playcount'] = dict()
if weeknumber_event not in \
time_series_dic[year][country_code]['total_playcount']:
time_series_dic[year][country_code]['total_playcount'][weeknumber_event] = 0
time_series_dic[year][country_code]['total_playcount'][weeknumber_event] += x[5]
except:
error += 1
print('Number of lines which could not be read: %s' % error)
return time_series_dic
def time_series_analysis(LOCATION_OBJECT_LIST, SAVE_TIME_SERIES):
needed_columns = ['date', 'country', 'genre', 'original', 'trend', 'seasonal', 'residual']
with open(LOCATION_OBJECT_LIST) as data_file:
data = json.load(data_file)
# LOAD COUNTRY ID
country_id = load_country_id("data/time_series_analysis/country_ids_filter_itemLE_10000_userLE_1000.csv")
country_list = country_id['country'].tolist()
genre_coding = read_genre_id("data/time_series_analysis/genres_allmusic.txt")
genre_list = genre_coding[0].tolist()
genre_list.remove("children's")
# CREATE DATAFRAME AND ADD NEW COLUMNS FOR TIME SERIES ANALYSIS
df = pd.DataFrame(data)
df['week'] = pd.to_datetime(df['week'], format='%Y-%m-%d')
df['date'] = df['week']
df.set_index('week', inplace=True)
df['trend'] = 0
df['seasonal'] = 0
df['residual'] = 0
dic = {}
for country in country_list[:5]:
dic[country] = {}
for genre in genre_list:
dic[country][genre] = {}
print(country, genre)
ts_log = df[(df.country == country) & (df.genre == genre)].sort_index(axis=0).filter(
items=['week', 'relative_play'])
print(ts_log.head())
F = df[(df.country == country) & (df.genre == genre)].sort_index(axis=0)
decomposition = seasonal_decompose(ts_log.values, freq=10)
trend = decomposition.trend
seasonal = decomposition.seasonal
residual = decomposition.resid
dates = []
for d in F['date'].tolist():
dates.append((str(d.year) + "-" + str(d.month) + "-" + str(d.day)))
try:
temp = pd.DataFrame(
np.column_stack(
[dates, F['country'].tolist(), F['genre'].tolist(), ts_log, trend, seasonal, residual]),
columns=needed_columns)
final = final.append(temp, ignore_index=True)
except:
final = pd.DataFrame(
np.column_stack(
[dates, F['country'].tolist(), F['genre'].tolist(), ts_log, trend, seasonal, residual]),
columns=needed_columns)
plt.subplot(411)
plt.plot(ts_log, label='Original')
plt.legend(loc='best')
plt.subplot(412)
plt.plot(trend, label='Trend')
plt.legend(loc='best')
plt.subplot(413)
plt.plot(seasonal, label='Seasonality')
plt.legend(loc='best')
plt.subplot(414)
plt.plot(residual, label='Residuals')
plt.legend(loc='best')
plt.tight_layout()
# plt.show()
plt.close()
x = final.to_json(orient='records')
with open(SAVE_TIME_SERIES, 'w') as fp:
json.dump(json.loads(x), fp, sort_keys=True, indent=4)
def time_series_analysis2(LOCATION_OBJECT_LIST, SAVE_TIME_SERIES):
needed_columns = ['date', 'country', 'genre', 'original', 'trend', 'seasonal', 'residual']
with open(LOCATION_OBJECT_LIST) as data_file:
data = json.load(data_file)
# LOAD COUNTRY ID
country_id = load_country_id("data/time_series_analysis/country_ids_filter_itemLE_10000_userLE_1000.csv")
country_list = country_id['country'].tolist()
genre_coding = read_genre_id("data/time_series_analysis/genres_allmusic.txt")
genre_list = genre_coding[0].tolist()
genre_list.remove("children's")
# CREATE DATAFRAME AND ADD NEW COLUMNS FOR TIME SERIES ANALYSIS
df = pd.DataFrame(data)
df['week'] = pd.to_datetime(df['week'], format='%Y-%m-%d')
df['date'] = df['week']
df.set_index('week', inplace=True)
df['trend'] = 0
df['seasonal'] = 0
df['residual'] = 0
dic = {}
for country in country_list[:5]:
dic[country] = {}
for genre in ['total_playcount']:
dic[country][genre] = {}
print(country, genre)
ts_log = df[(df.country == country) & (df.genre == genre)].sort_index(axis=0).filter(
items=['week', 'playcount'])
print(ts_log.head())
F = df[(df.country == country) & (df.genre == genre)].sort_index(axis=0)
decomposition = seasonal_decompose(ts_log.values, freq=10)
trend = decomposition.trend
seasonal = decomposition.seasonal
residual = decomposition.resid
dates = []
for d in F['date'].tolist():
dates.append((str(d.year) + "-" + str(d.month) + "-" + str(d.day)))
try:
temp = pd.DataFrame(
np.column_stack(
[dates, F['country'].tolist(), F['genre'].tolist(), ts_log, trend, seasonal, residual]),
columns=needed_columns)
final = final.append(temp, ignore_index=True)
except:
final = pd.DataFrame(
np.column_stack(
[dates, F['country'].tolist(), F['genre'].tolist(), ts_log, trend, seasonal, residual]),
columns=needed_columns)
x = final.to_json(orient='records')
with open(SAVE_TIME_SERIES, 'w') as fp:
json.dump(json.loads(x), fp, sort_keys=True, indent=4) | {"/main.py": ["/create_objects_for_d3.py", "/create_objects_for_d3_artist.py"], "/create_objects_for_d3_artist.py": ["/create_objects_for_d3.py"], "/trendAPI.py": ["/google_trend.py", "/linewriter.py"]} |
77,673 | EelcoWiechert/JKU2017_music_events_impact | refs/heads/master | /linewriter.py | import csv
def write_event(PATH,line):
with open(PATH, "a") as csv_file:
writer = csv.writer(csv_file, delimiter=',', dialect='excel')
writer.writerow(line) | {"/main.py": ["/create_objects_for_d3.py", "/create_objects_for_d3_artist.py"], "/create_objects_for_d3_artist.py": ["/create_objects_for_d3.py"], "/trendAPI.py": ["/google_trend.py", "/linewriter.py"]} |
77,674 | EelcoWiechert/JKU2017_music_events_impact | refs/heads/master | /collect_google_trend_events.py | import random
from bs4 import BeautifulSoup
import requests
import pandas as pd
from pytrends.request import TrendReq
import calendar
import time
import csv
'''
This scripts collects events from google trend by searching for the search peak value in time
'''
LOCATION_TREND_SOURCE_CSV = '../data/country_cid.csv'
EVENT_FILE = '../data/events.csv'
def write_event(PATH,line):
with open(PATH, "a") as csv_file:
writer = csv.writer(csv_file, delimiter=',', dialect='excel')
writer.writerow(line)
class get_trend_topics_google(object):
def __init__(self, LOCATION_TREND_SOURCE_CSV):
self.link = LOCATION_TREND_SOURCE_CSV
def get_data(self):
google_trend_links = pd.read_csv(self.link, header=0, sep=';', dtype={'year': 'str'})
google_trend_links = google_trend_links.fillna('')
google_trend_links['trending_topics'] = ""
for index, row in google_trend_links.iterrows():
# construct link
link = 'https://trends.google.com/trends/topcharts/widget?cid=' + str(row['cid']) + '&geo=' + str(
row['country']) + '&date=' + str(row['year']) + '&vm=trendingchart&h=413'
# "https://trends.google.nl/trends/topcharts/widget?cid=zg406&geo=&date=2012&vm=trendingchart&h=413
try:
time.sleep(3)
response = requests.post(link)
soup = BeautifulSoup(response.text, "html.parser")
print(link)
result = soup.find_all("div", {"class": "widget-single-item-detailed-title-container"})
trending_searches = []
for i in result:
trending_searches.append(i.text)
google_trend_links['trending_topics'][index] = trending_searches
print(trending_searches)
if len(trending_searches) == 0:
print('2')
# construct link
link = 'https://trends.google.com/trends/topcharts/widget?cid=' + str(row['cid']) + '&geo=' + str(
row['country']) + '&date=' + str(row['year']) + '&vm=chart&h=413'
time.sleep(3)
response = requests.post(link)
soup = BeautifulSoup(response.text, "html.parser")
result = soup.find_all("span", {"class": "widget-title-in-list"})
trending_searches = []
for i in result:
trending_searches.append(i.text)
google_trend_links['trending_topics'][index] = trending_searches
print(trending_searches)
except:
print('Could not find link')
return google_trend_links
def event_date(topic, search_year):
# Login to Google.
pytrend = TrendReq()
# high level search for week
pytrend.build_payload(kw_list=[topic], timeframe=str(search_year) + '-01-01 ' + str(search_year) + '-12-30')
interest_over_time_df = pytrend.interest_over_time()
last_day = str(calendar.monthrange(int(search_year), interest_over_time_df[topic].idxmax(axis=0).month)[1]) # find last day of month
time.sleep(3)
# low level search for day
pytrend.build_payload(kw_list=[topic], timeframe=str(search_year) + '-' + str(interest_over_time_df[topic].
idxmax(axis=0).month) + '-01 ' + str(search_year) + '-' + str(interest_over_time_df[topic].idxmax(axis=0).month) + '-' + last_day)
interest_over_time_df = pytrend.interest_over_time()
return interest_over_time_df[topic].idxmax(axis=0)
'''
START SCRIPT
'''
x = pd.DataFrame(get_trend_topics_google(LOCATION_TREND_SOURCE_CSV).get_data())
n = 0
for index, row in x.iterrows():
search_year = row['year']
search_cat = row['cat']
for topic in row['trending_topics']:
time.sleep(random.randint(1, 20))
trend_date_of_event = event_date(topic, search_year)
event = []
event.append(n)
event.append(topic)
event.append(trend_date_of_event.year)
event.append(trend_date_of_event.month)
event.append(trend_date_of_event.day)
event.append(search_cat)
write_event(EVENT_FILE,event)
n+=1
print(event)
| {"/main.py": ["/create_objects_for_d3.py", "/create_objects_for_d3_artist.py"], "/create_objects_for_d3_artist.py": ["/create_objects_for_d3.py"], "/trendAPI.py": ["/google_trend.py", "/linewriter.py"]} |
77,675 | EelcoWiechert/JKU2017_music_events_impact | refs/heads/master | /time_serie_analysis.py | import pandas as pd
import numpy as np
import datetime
import matplotlib.pyplot as plt
import collections
# PARAMETERS
DISPLAY = 'rel'
YEAR = '2005'
COLORS = ['black','gray','rosybrown','red','sienna','bisque','gold','olivedrab','darkgreen','mediumspringgreen','lightseagreen','paleturquoise','darkcyan','deepskyblue','royalblue','navy','blue','plum','m','deeppink','crimson']
COUNTRIES = ['US', 'UK', 'RU', 'DE', 'FI', 'SE', 'NL', 'AU']
# read the created event file and load into pandas dataframe
def read_events(a_file):
events = pd.read_csv(a_file, sep=",", names=['id','description','year','month','day','category'])
return events
# Read artists file, returns a dictionary of {id:name}
def read_artists(a_file):
artist_names = {}
with open(a_file, 'r') as f:
for line in f:
content = line.strip().split('\t')
if len(content) == 2:
artist_names[np.int32(content[0])] = content[1]
else:
print('Problem encountered with ', content)
return artist_names
# Read genres of each artist, returns a dic of {name:list_of_genres}
def read_artist_genre(a_file):
artist_genre = {}
with open(a_file, 'r') as f:
for line in f:
content = line.strip().split('\t')
if len(content) >1:
artist_genre[content[0]] = list(map(int,content[1:]))
return artist_genre
# Load a pandas dataframe that
def read_genre_id(a_file):
genre_coding = pd.read_csv(a_file, sep="\t", header=None)
return genre_coding
def load_country_id(a_file):
country_id = pd.read_csv(a_file, sep="\t",index_col=0)
return country_id
# LOAD DATA
events = read_events('data/events.csv')
artist_id = read_artists("data/time_series_analysis/LFM-1b_artists.txt")
artist_genre = read_artist_genre("data/time_series_analysis/LFM-1b_artist_genres_allmusic.txt")
genre_coding = read_genre_id("data/time_series_analysis/genres_allmusic.txt")
country_id = load_country_id("data/time_series_analysis/country_ids_filter_itemLE_10000_userLE_1000.csv")
genre_list = genre_coding[0].tolist()
time_series_dic = {}
error=0
with open("data/time_series_analysis/y2005-m-d-c-a-pc.csv", 'r') as f:
next(f)
for line in f:
try:
x = line.strip().split('\t')
x = list(map(int, x))
# get variable names
country_code = country_id.iloc[x[3]]['country']
genres = list(map(lambda x: genre_coding.iloc[x][0], artist_genre[artist_id[x[4]]]))
weeknumber_event = datetime.date(x[0], x[1], x[2]).isocalendar()[1]
# Add country to dic
if country_code not in time_series_dic:
time_series_dic[country_code] = {}
# add genre
for genre in genres: # find list of genres
if genre not in time_series_dic[country_code]:
time_series_dic[country_code][genre] = {}
# add week number of listening event
if weeknumber_event not in time_series_dic[country_code][genre]:
time_series_dic[country_code][genre][weeknumber_event] = 0
time_series_dic[country_code][genre][weeknumber_event] += x[5]
except:
error+=1
print('Number of lines which could not be read: %s' % (error))
print(time_series_dic)
events_filtered = events[(events['year'] == int(YEAR))]
weeks_of_events = []
names_of_events = []
for index, row in events_filtered.iterrows():
weeknumber_event = datetime.date(row['year'], row['month'], row['day']).isocalendar()[1]
weeks_of_events.append(weeknumber_event)
names_of_events.append(row['description'])
print(names_of_events)
for COUNTRY_OF_INTEREST in COUNTRIES:
# Total playcount
total_playcount ={}
for gen in genre_list:
try:
for week, playcount in time_series_dic[COUNTRY_OF_INTEREST][gen].items():
if week not in total_playcount:
total_playcount[week] = playcount
else:
total_playcount[week] += playcount
except:
print('genre %s for %s resulted in an error' % (gen, COUNTRY_OF_INTEREST))
total_sorted = dict(collections.OrderedDict(sorted(total_playcount.items())))
n = 0
for gen in genre_list:
try:
data = dict(collections.OrderedDict(sorted(time_series_dic[COUNTRY_OF_INTEREST][gen].items())))
if DISPLAY == 'rel':
plt.plot(list(data.keys()), [spec / total for spec, total in zip(list(data.values()), total_sorted.values())], label=gen, c=COLORS[n])
else:
DISPLAY = 'abs'
plt.plot(list(data.keys()), list(data.values()), label=gen, c=COLORS[n])
n += 1
except:
print('genre %s for %s resulted in an error' % (gen, COUNTRY_OF_INTEREST))
m=0
for e in weeks_of_events:
plt.axvline(x=e)
plt.text((e+0.1), 0.2, names_of_events[m], rotation=90, fontsize=4)
m+=1
plt.title('Popularity evolution for ' + COUNTRY_OF_INTEREST + ' in ' + YEAR + '(Total playcounts: ' + str(sum(total_sorted.values())) +')')
plt.xlabel('Week', fontsize=12)
plt.ylabel('Playcount (' + DISPLAY + ')', fontsize=12)
plt.grid(True)
lgd = plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0., fontsize=6)
plt.savefig('data/' + COUNTRY_OF_INTEREST + '_' + YEAR + '_' + DISPLAY +'.png', dpi=400, bbox_extra_artists=(lgd,), bbox_inches='tight')
plt.close() | {"/main.py": ["/create_objects_for_d3.py", "/create_objects_for_d3_artist.py"], "/create_objects_for_d3_artist.py": ["/create_objects_for_d3.py"], "/trendAPI.py": ["/google_trend.py", "/linewriter.py"]} |
77,676 | EelcoWiechert/JKU2017_music_events_impact | refs/heads/master | /read_users_per_day.py |
'''
This script is used to make a list of object,
where each object comprises a date and the number of unique users on that day
'''
import json
from datetime import datetime
import pandas as pd
import matplotlib.pyplot as plt
file = json.load(open('../data/unique_users_per_day.json'))
df = pd.DataFrame(file)
df['date'] = pd.to_datetime(df['date'], format='%Y-%m-%d')
print(df.head())
df.plot(x='date', y='number_of_unqiue_listeners')
plt.xlim([datetime.datetime(2005,1,1),datetime.datetime(2014,12,31)])
plt.title('From 1-1-2005 to 31-12-2014')
plt.suptitle('Number of unique users per day')
plt.savefig('unique_users_per_day_LFM.png', dpi=300)
CREATE_LIST = False # Set to true if you would like to create an object list
CREATE_PLOT = True # Set to true if you would like to plot the object list
print('start script')
# DEFINE VARIABLES
LOCATION_LFM_LE_FILE = '../data/LFM-1b/LFM-1b_LEs.txt' # Location of listening event file
LOCATION_OBJECT_LIST = '../data/unique_users_per_day.json' # where to place the new json file that contains the number of users
users_per_day = dict() # temp dic
users_per_day_final = dict() # temp dic
event_list = [] # object list
if CREATE_LIST:
# OPEN THE LISTENING EVENT FILE AND SET LINE COUNTER TO 0
n = 0
print('create objects')
print(datetime.now())
with open(LOCATION_LFM_LE_FILE) as f:
for line in f:
n+=1
# PRINT PROGRESS
if n % 1000000 == 0:
print(round(n/1088161692, 4))
# CONVERT TIMESTAMP TO YYYY-MM-DD
date_raw = datetime.fromtimestamp(int(line.split()[4]))
date = str(date_raw.year) + '-' + str(date_raw.month) + '-' + str(date_raw.day)
# WHEN WE ENCOUNTER THE DATE FOR THE FIRST TIME, ADD TO DIC
if date not in users_per_day:
users_per_day[date] = []
# ADD THE DATE AS KEY, {USER_1 : ''} AS VALUE
# THIS WAY DOUBLE USERS ARE OVERWRITTEN
users_per_day[date].append(int(line.split()[0]))
print('Creating objects')
print(datetime.now())
n = 0
for date, users in users_per_day.items():
n+=1
print(date)
if n % 100 == 0:
print(n)
# ONLY GET THE UNIQUE IDS IN LIST
usersUnique = list(set(users))
# CREATE AN OBJECT
x = {'date': date, 'number_of_unqiue_listeners': len(usersUnique)}
# ADD OBJECT TO LIST
event_list.append(x)
with open(LOCATION_OBJECT_LIST, 'w') as fp:
json.dump(event_list, fp, sort_keys=True, indent=4)
if CREATE_PLOT:
file = json.load(open(LOCATION_OBJECT_LIST))
df = pd.DataFrame(file)
df['date'] = pd.to_datetime(df['date'], format='%Y-%m-%d')
df.plot(x='date', y='number_of_unqiue_listeners')
plt.xlim([datetime.datetime(2005, 1, 1), datetime.datetime(2014, 12, 31)])
plt.title('From 1-1-2005 to 31-12-2014')
plt.suptitle('Number of unique users per day')
plt.savefig('../data/unique_users_per_day_LFM.png', dpi=300)
| {"/main.py": ["/create_objects_for_d3.py", "/create_objects_for_d3_artist.py"], "/create_objects_for_d3_artist.py": ["/create_objects_for_d3.py"], "/trendAPI.py": ["/google_trend.py", "/linewriter.py"]} |
77,677 | EelcoWiechert/JKU2017_music_events_impact | refs/heads/master | /main.py | from create_objects_for_d3 import *
from create_objects_for_d3_artist import *
# MAKE OBJECT LIST
OBJECT_LIST = 1
# 0 = on genre
# 1 = on artists
CREATE_OBJECT_LIST = False
TIME_SERIES = True
LOCATION_OBJECT_LIST = "../data/allCountries_relativePlaycount_Genre.json"
LOCATION_OBJECT_LIST_ARTISTS = "../data/data_rel_playcount_artist.json"
SAVE_TIME_SERIES = "../data/time_series_analysis_artist2.json"
# START SCRIPT
if CREATE_OBJECT_LIST:
if OBJECT_LIST == 0:
# THIS FUNCTION CREATES A DICTIONARY => YEAR, COUNTRY, GENRE, WEEK
time_series_dic = create_event_dic()
# CREATE OBJECT LIST
create_object_list(time_series_dic, LOCATION_OBJECT_LIST)
if OBJECT_LIST == 1:
# THIS FUNCTION CREATES A DICTIONARY => YEAR, COUNTRY, ARTIST, WEEK
time_series_dic = create_event_dic_artists()
# CREATE OBJECT LIST
create_object_list_artists(time_series_dic, LOCATION_OBJECT_LIST_ARTISTS)
# IF TRUE, TIME SERIE ANALYSIS WILL BE ADDED TO THE OBJECT LIST
if TIME_SERIES:
if OBJECT_LIST == 0:
time_series_analysis2(LOCATION_OBJECT_LIST, SAVE_TIME_SERIES)
if OBJECT_LIST == 1:
time_series_analysis_artist(LOCATION_OBJECT_LIST_ARTISTS, SAVE_TIME_SERIES)
exit()
"""
# PARAMETERS
DISPLAY = 'rel'
YEAR = '2005'
COLORS = ['black','gray','rosybrown','red','sienna','bisque','gold','olivedrab','darkgreen','mediumspringgreen','lightseagreen','paleturquoise','darkcyan','deepskyblue','royalblue','navy','blue','plum','m','deeppink','crimson']
COUNTRIES = ['US', 'UK', 'RU', 'DE', 'FI', 'SE', 'NL', 'AU']
events = read_events('data/events.csv')
events_filtered = events[(events['year'] == int(YEAR))]
weeks_of_events = []
names_of_events = []
for index, row in events_filtered.iterrows():
weeknumber_event = datetime.date(row['year'], row['month'], row['day']).isocalendar()[1]
weeks_of_events.append(weeknumber_event)
names_of_events.append(row['description'])
for COUNTRY_OF_INTEREST in COUNTRIES:
# Total playcount
total_playcount ={}
for gen in genre_list:
try:
for week, playcount in time_series_dic[int(YEAR)][COUNTRY_OF_INTEREST][gen].items():
if week not in total_playcount:
total_playcount[week] = playcount
else:
total_playcount[week] += playcount
except:
print('genre %s for %s resulted in an error' % (gen, COUNTRY_OF_INTEREST))
total_sorted = dict(collections.OrderedDict(sorted(total_playcount.items())))
n = 0
for gen in genre_list:
try:
data = dict(collections.OrderedDict(sorted(time_series_dic[COUNTRY_OF_INTEREST][gen].items())))
if DISPLAY == 'rel':
plt.plot(list(data.keys()), [spec / total for spec, total in zip(list(data.values()), total_sorted.values())], label=gen, c=COLORS[n])
else:
DISPLAY = 'abs'
plt.plot(list(data.keys()), list(data.values()), label=gen, c=COLORS[n])
n += 1
except:
print('genre %s for %s resulted in an error' % (gen, COUNTRY_OF_INTEREST))
m=0
for e in weeks_of_events:
plt.axvline(x=e)
plt.text((e+0.1), 0.2, names_of_events[m], rotation=90, fontsize=4)
m+=1
plt.title('Popularity evolution for ' + COUNTRY_OF_INTEREST + ' in ' + YEAR + '(Total playcounts: ' + str(sum(total_sorted.values())) +')')
plt.xlabel('Week', fontsize=8)
plt.ylabel('Playcount (' + DISPLAY + ')', fontsize=8)
plt.grid(True)
lgd = plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0., fontsize=6)
plt.savefig('data/' + COUNTRY_OF_INTEREST + '_' + YEAR + '_' + DISPLAY +'.png', dpi=400, bbox_extra_artists=(lgd,), bbox_inches='tight')
plt.close()
""" | {"/main.py": ["/create_objects_for_d3.py", "/create_objects_for_d3_artist.py"], "/create_objects_for_d3_artist.py": ["/create_objects_for_d3.py"], "/trendAPI.py": ["/google_trend.py", "/linewriter.py"]} |
77,678 | EelcoWiechert/JKU2017_music_events_impact | refs/heads/master | /create_objects_for_d3_artist.py | from create_objects_for_d3 import *
def create_event_dic_artists():
# LOAD DATA
artist_id = read_artists("../data/LFM-1b_artists.txt")
country_id = load_country_id("../data/country_ids_filter_itemLE_10000_userLE_1000.csv")
# VARIABLES
time_series_dic = dict()
error = 0
files = ["2005", "2006", "2007", "2008", "2009", "2010", "2011", "2012", "2013", "2014"]
# READ THE FILES AS CREATED BY MARKUS LINE BY LINE
for file in files:
print(file)
print("\nStarted the year " + str(file) + " : " + str(time.ctime()) + "\n")
with open("../data/itemLE_10000_userLE_1000/y" + file + "-m-d-c-a-pc.csv", 'r') as f:
next(f)
for line in f:
try:
x = line.strip().split('\t')
x = list(map(int, x))
# SAVE THE INFORMATION FROM THE FILE PER LINE INTO VARIABLES
year = x[0]
country_code = country_id.iloc[x[3]]['country']
artist = artist_id[x[4]]
date_event = (str(x[0]) + '-' + str(x[1]) + '-' + str(x[2]))
# AS WE ARE ONLY INTERESTED IN A COUPLE OF ARTISTS (TOP 20), SKIP THE OTERS
if x[4] not in {1602, 54, 761458, 320, 153, 55, 4115, 2966994, 27, 470, 283, 16, 137, 140, 245, 99, 2893933, 135, 402, 1648, 172}:
continue
# CREATE A DICTIONARY {year:{country:{artist:{date}}}}
# Add year to dic
if year not in time_series_dic:
time_series_dic[int(year)] = dict()
# Add country to dic
if country_code not in time_series_dic[year]:
time_series_dic[year][country_code] = dict()
# Add artist to dic
if artist not in time_series_dic[year][country_code]:
time_series_dic[year][country_code][artist] = dict()
# add week number of listening event
if date_event not in time_series_dic[year][country_code][artist]:
time_series_dic[year][country_code][artist][date_event] = 0
time_series_dic[year][country_code][artist][date_event] += x[5]
# CALCULATE THE TOTAL NUMBER OF SONGS PLAYED IN ORDER TO CALCULATE
# THE RELATIVE NUMBER OF SONGS PLAYED
if 'total_playcount' not in time_series_dic[year][country_code]:
time_series_dic[year][country_code]['total_playcount'] = dict()
if date_event not in time_series_dic[year][country_code]['total_playcount']:
time_series_dic[year][country_code]['total_playcount'][date_event] = 0
time_series_dic[year][country_code]['total_playcount'][date_event] += x[5]
except:
error += 1
print('Number of lines which could not be read: %s' % (error))
return time_series_dic
def create_object_list_artists(time_series_dic, LOCATION_OBJECT_LIST_ARTISTS):
object_list = []
# for every year
for year, countries in time_series_dic.items():
# for every country
for country, artists in countries.items():
# for every genre
for art, date in artists.items():
if art == 'total_playcount':
continue
# for every week
for d, playc in date.items():
event = {}
event['year'] = year
event['country'] = country
event['artist'] = art
event['week'] = d
event['playcount'] = playc
try:
event['relative_play'] = (
float(playc) / float(time_series_dic[year][country]['total_playcount'][d]))
except:
event['relative_play'] = 0
object_list.append(event
)
with open(LOCATION_OBJECT_LIST_ARTISTS, 'w') as fp:
json.dump(object_list, fp, sort_keys=True, indent=4)
def time_series_analysis_artist(LOCATION_OBJECT_LIST, SAVE_TIME_SERIES):
needed_columns = ['date', 'country', 'artist', 'original', 'trend', 'seasonal', 'residual']
with open(LOCATION_OBJECT_LIST) as data_file:
data = json.load(data_file)
# LOAD COUNTRY ID
country_id = load_country_id("../data/country_ids_filter_itemLE_10000_userLE_1000.csv")
country_list = country_id['country'].tolist()
artist_list = read_top_artists('../data/top_artists.txt')
#artist_list = ["Michael Jackson"]
# CREATE DATAFRAME AND ADD NEW COLUMNS FOR TIME SERIES ANALYSIS
df = pd.DataFrame(data)
df['date'] = pd.to_datetime(df['week'], format='%Y-%m-%d')
df.set_index('date', inplace=True)
df['trend'] = 0
df['seasonal'] = 0
df['residual'] = 0
dic = {}
for country in country_list[:1]: # ONLY THE COUNTRY WITH THE MOST PLAYCOUNTS
dic[country] = {}
for artist in artist_list:
dic[country][artist] = {}
print(country, artist)
ts = df[(df.country == country) & (df.artist == artist)].sort_index(axis=0).filter(
items=['date', 'relative_play'])
F = df[(df.country == country) & (df.artist == artist)].sort_index(axis=0)
print('')
print('---------------------------------------------')
print('')
print(F.head())
print('')
print('---------------------------------------------')
print('')
decomposition = seasonal_decompose(ts.values, freq=10)
trend = decomposition.trend
seasonal = decomposition.seasonal
residual = decomposition.resid
dates = []
for d in F.index.tolist():
dates.append((str(d.year) + "-" + str(d.month) + "-" + str(d.day)))
try:
temp = pd.DataFrame(
np.column_stack(
[dates, F['country'].tolist(), F['artist'].tolist(), ts, trend, seasonal, residual]),
columns=needed_columns)
final = final.append(temp, ignore_index=True)
except:
final = pd.DataFrame(
np.column_stack(
[dates, F['country'].tolist(), F['artist'].tolist(), ts, trend, seasonal, residual]),
columns=needed_columns)
x = final.to_json(orient='records')
with open(SAVE_TIME_SERIES, 'w') as fp:
json.dump(json.loads(x), fp, sort_keys=True, indent=4) | {"/main.py": ["/create_objects_for_d3.py", "/create_objects_for_d3_artist.py"], "/create_objects_for_d3_artist.py": ["/create_objects_for_d3.py"], "/trendAPI.py": ["/google_trend.py", "/linewriter.py"]} |
77,679 | EelcoWiechert/JKU2017_music_events_impact | refs/heads/master | /google_trend.py | from bs4 import BeautifulSoup
import requests
import pandas as pd
from pytrends.request import TrendReq
import calendar
import time
class get_trend_topics_google(object):
def __init__(self, LOCATION_TREND_SOURCE_CSV):
self.link = LOCATION_TREND_SOURCE_CSV
def get_data(self):
google_trend_links = pd.read_csv(self.link, header=0, sep=';', dtype={'year': 'str'})
google_trend_links = google_trend_links.fillna('')
google_trend_links['trending_topics'] = ""
for index, row in google_trend_links.iterrows():
# construct link
link = 'https://trends.google.com/trends/topcharts/widget?cid=' + str(row['cid']) + '&geo=' + str(
row['country']) + '&date=' + str(row['year']) + '&vm=trendingchart&h=413'
# "https://trends.google.nl/trends/topcharts/widget?cid=zg406&geo=&date=2012&vm=trendingchart&h=413
try:
time.sleep(3)
response = requests.post(link)
soup = BeautifulSoup(response.text, "html.parser")
result = soup.find_all("div", {"class": "widget-single-item-detailed-title-container"})
trending_searches = []
for i in result:
trending_searches.append(i.text)
google_trend_links['trending_topics'][index] = trending_searches
except:
print('Could not find link')
return google_trend_links
def event_date(topic, search_year):
# Login to Google.
pytrend = TrendReq()
# high level search for week
pytrend.build_payload(kw_list=[topic], timeframe=str(search_year) + '-01-01 ' + str(search_year) + '-12-30')
interest_over_time_df = pytrend.interest_over_time()
last_day = str(calendar.monthrange(int(search_year), interest_over_time_df[topic].idxmax(axis=0).month)[1]) # find last day of month
time.sleep(3)
# low level search for day
pytrend.build_payload(kw_list=[topic], timeframe=str(search_year) + '-' + str(interest_over_time_df[topic].
idxmax(axis=0).month) + '-01 ' + str(search_year) + '-' + str(interest_over_time_df[topic].idxmax(axis=0).month) + '-' + last_day)
interest_over_time_df = pytrend.interest_over_time()
return interest_over_time_df[topic].idxmax(axis=0) | {"/main.py": ["/create_objects_for_d3.py", "/create_objects_for_d3_artist.py"], "/create_objects_for_d3_artist.py": ["/create_objects_for_d3.py"], "/trendAPI.py": ["/google_trend.py", "/linewriter.py"]} |
77,680 | EelcoWiechert/JKU2017_music_events_impact | refs/heads/master | /trendAPI.py | from google_trend import *
from linewriter import *
import time
import random
LOCATION_TREND_SOURCE_CSV = 'data/country_cid.csv'
EVENT_FILE = 'data/events.csv'
x = pd.DataFrame(get_trend_topics_google(LOCATION_TREND_SOURCE_CSV).get_data())
n = 0
for index, row in x.iterrows():
search_year = row['year']
search_cat = row['cat']
for topic in row['trending_topics']:
time.sleep(random.randint(1, 20))
trend_date_of_event = event_date(topic, search_year)
event = []
event.append(n)
event.append(topic)
event.append(trend_date_of_event.year)
event.append(trend_date_of_event.month)
event.append(trend_date_of_event.day)
event.append(search_cat)
write_event(EVENT_FILE,event)
n+=1
print(event)
| {"/main.py": ["/create_objects_for_d3.py", "/create_objects_for_d3_artist.py"], "/create_objects_for_d3_artist.py": ["/create_objects_for_d3.py"], "/trendAPI.py": ["/google_trend.py", "/linewriter.py"]} |
77,686 | preetgur/EMS | refs/heads/master | /Employee/views.py | from django.shortcuts import render , redirect,HttpResponse
from Employee.forms import Emp_form
from Employee.models import Employee
# Create your views here.
def emp(request):
if request.method == "POST":
# initiate form
form = Emp_form(request.POST)
if form.is_valid():
try :
form.save()
return redirect("/show")
except:
print("some error occured")
else :
return HttpResponse("Employed id alreday exits!")
else :
form = Emp_form()
return render(request,"Employee/index.html",{"form_html":form})
def show(request):
all_emp = Employee.objects.all()
return render(request,"Employee/show.html",{"all_emp":all_emp})
def edit(request,id):
employee = Employee.objects.get(id = id)
return render(request,"Employee/edit.html",{"employee":employee})
def update(request,id):
employee = Employee.objects.get(id=id)
# filled the form with selected employee
print(employee)
form = Emp_form(request.POST, instance = employee)
if form.is_valid():
form.save()
return redirect("/show")
else :
print("error")
return render(request, 'Employee/edit.html', {'employee': employee})
def destroy(request,id):
emp_del = Employee.objects.get(id = id)
emp_del.delete()
return redirect("/show")
def basic(request):
return render(request,"Employee/basic.html") | {"/Employee/views.py": ["/Employee/models.py"]} |
77,687 | preetgur/EMS | refs/heads/master | /Employee/models.py | from django.db import models
# Create your models here.
class Employee(models.Model):
emp_id = models.CharField(unique=True,max_length=10)
emp_fname = models.CharField(max_length=20,default="")
emp_lname = models.CharField(max_length=20,default="")
emp_email = models.EmailField()
emp_mobile = models.CharField(max_length=12)
emp_address = models.TextField(max_length=200)
def __str__(self):
return "Emp_id : "+self.emp_id | {"/Employee/views.py": ["/Employee/models.py"]} |
77,688 | preetgur/EMS | refs/heads/master | /Employee/migrations/0002_auto_20200220_1710.py | # Generated by Django 3.0.2 on 2020-02-20 11:40
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Employee', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='employee',
name='emp_name',
),
migrations.AddField(
model_name='employee',
name='emp_fname',
field=models.CharField(default='', max_length=20),
),
migrations.AddField(
model_name='employee',
name='emp_lname',
field=models.CharField(default='', max_length=20),
),
]
| {"/Employee/views.py": ["/Employee/models.py"]} |
77,689 | preetgur/EMS | refs/heads/master | /Employee/urls.py |
from django.urls import path
from Employee import views
urlpatterns = [
path("",views.emp),
path("show",views.show),
path("edit/<int:id>",views.edit),
path("update/<int:id>",views.update),
path("delete/<int:id>",views.destroy),
path("basic",views.basic),
]
| {"/Employee/views.py": ["/Employee/models.py"]} |
77,690 | preetgur/EMS | refs/heads/master | /Employee/migrations/0001_initial.py | # Generated by Django 3.0.2 on 2020-02-20 11:36
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Employee',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('emp_id', models.CharField(max_length=10, unique=True)),
('emp_name', models.CharField(max_length=100)),
('emp_email', models.EmailField(max_length=254)),
('emp_mobile', models.IntegerField(max_length=11)),
('emp_address', models.TextField(max_length=200)),
],
),
]
| {"/Employee/views.py": ["/Employee/models.py"]} |
77,698 | Yifei-G/multi-functional-device | refs/heads/master | /devices/MDF1.py | from sys import path
path.append("../package")
from packages.printer import Printer
from packages.scanner import Scanner
class MDF1(Scanner, Printer):
def __init__(self, serial_number, scan_resolution="360ppi", print_resolution="480ppi"):
self.serial_number = serial_number
self.scan_resolution = scan_resolution
self.print_resolution = print_resolution
def scan_document(self):
print("The document has been scanned by device {}".format(self.serial_number))
def get_scanner_status(self):
print("The device is:", self.serial_number)
print("The resolution is:", self.scan_resolution)
def print_document(self):
print("The document has been printed by device {}".format(self.serial_number))
def get_printer_status(self):
print("The device is:", self.serial_number)
print("The resolution is:", self.print_resolution)
| {"/devices/MDF1.py": ["/packages/printer.py", "/packages/scanner.py"], "/main.py": ["/devices/MDF1.py", "/devices/MDF3.py"], "/devices/MDF3.py": ["/packages/printer.py", "/packages/scanner.py"]} |
77,699 | Yifei-G/multi-functional-device | refs/heads/master | /main.py | from sys import path
from devices.MDF1 import MDF1
from devices.MDF2 import MDF2
from devices.MDF3 import MDF3
separate_line = "*" * 20
print(separate_line)
print("MDF type 1:")
mdf1 = MDF1("GYF2046")
mdf1.print_document()
mdf1.get_printer_status()
mdf1.scan_document()
mdf1.get_scanner_status()
print(separate_line)
print("MDF type 2:")
mdf2_1 = MDF2("GYF3000", "test.pdf")
mdf2_1.print_document()
mdf2_1.get_printer_status()
mdf2_1.scan_document()
mdf2_1.get_scanner_status()
mdf2_2 = MDF2("GYF3000", "exam.pdf")
mdf2_3 = MDF2("GYF3000", "password.txt")
mdf2_2.print_document()
mdf2_2.scan_document()
mdf2_3.print_document()
mdf2_3.get_printer_status()
mdf2_3.scan_document()
mdf2_3.get_scanner_status()
MDF2.get_print_history()
print(separate_line)
print("MDF type 3:")
mdf3_1 = MDF3("GYF3000", "book1.pdf")
mdf3_1.print_document()
mdf3_1.get_printer_status()
mdf3_1.scan_document()
mdf3_1.get_scanner_status()
mdf3_2 = MDF2("GYF3000", "book2.pdf")
mdf3_3 = MDF2("GYF3000", "recipe.txt")
mdf3_2.print_document()
mdf3_2.scan_document()
mdf3_3.print_document()
mdf3_3.get_printer_status()
mdf3_3.scan_document()
mdf3_3.get_scanner_status()
MDF3.get_print_history()
| {"/devices/MDF1.py": ["/packages/printer.py", "/packages/scanner.py"], "/main.py": ["/devices/MDF1.py", "/devices/MDF3.py"], "/devices/MDF3.py": ["/packages/printer.py", "/packages/scanner.py"]} |
77,700 | Yifei-G/multi-functional-device | refs/heads/master | /devices/MDF3.py | from sys import path
path.append("../package")
from packages.printer import Printer
from packages.scanner import Scanner
class MDF3(Scanner, Printer):
printed_documents = []
def __init__(self, serial_number, document_name, scan_resolution="1080ppi", print_resolution="1080ppi"):
self.serial_number = serial_number
self.scan_resolution = scan_resolution
self.print_resolution = print_resolution
self.document_name = document_name
def scan_document(self):
print("The document {} has been scanned by device {}".format(
self.document_name, self.serial_number))
def get_scanner_status(self):
print("The device is:", self.serial_number)
print("The resolution is:", self.scan_resolution)
def print_document(self):
print("The document {} has been printed by device {}".format(
self.document_name, self.serial_number))
MDF3.printed_documents.append(self.document_name)
@classmethod
def get_print_history(cls):
print("Printing history:")
for name in cls.printed_documents:
print("The file has been printend:", name, end="\n")
print()
def get_printer_status(self):
print("The device is:", self.serial_number)
print("The resolution is:", self.print_resolution)
| {"/devices/MDF1.py": ["/packages/printer.py", "/packages/scanner.py"], "/main.py": ["/devices/MDF1.py", "/devices/MDF3.py"], "/devices/MDF3.py": ["/packages/printer.py", "/packages/scanner.py"]} |
77,701 | Yifei-G/multi-functional-device | refs/heads/master | /packages/printer.py | import abc
class Printer(abc.ABC):
@abc.abstractmethod
def print_document(self):
pass
@abc.abstractmethod
def get_printer_status(self):
pass
| {"/devices/MDF1.py": ["/packages/printer.py", "/packages/scanner.py"], "/main.py": ["/devices/MDF1.py", "/devices/MDF3.py"], "/devices/MDF3.py": ["/packages/printer.py", "/packages/scanner.py"]} |
77,702 | Yifei-G/multi-functional-device | refs/heads/master | /packages/scanner.py | import abc
class Scanner(abc.ABC):
@abc.abstractmethod
def scan_document(self):
pass
@abc.abstractmethod
def get_scanner_status(self):
pass
| {"/devices/MDF1.py": ["/packages/printer.py", "/packages/scanner.py"], "/main.py": ["/devices/MDF1.py", "/devices/MDF3.py"], "/devices/MDF3.py": ["/packages/printer.py", "/packages/scanner.py"]} |
77,703 | dingus9/myui | refs/heads/master | /myui/controllers/example.py | import tornado.web
from myui import BaseHandler
class params:
route = '/example'
pass
class Handler(BaseHandler):
@tornado.web.removeslash
def get(self):
self.render('example.html')
| {"/myui/controllers/example.py": ["/myui/__init__.py"]} |
77,704 | dingus9/myui | refs/heads/master | /myui/__init__.py | import os
from importlib import import_module
import tornado.web
import tornado.httpserver
import tornado.ioloop
import tornado.options
import tornado.wsgi
import logging
import json
access_log = logging.getLogger("tornado.access")
app_log = logging.getLogger("tornado.application")
# gen_log = logging.getLogger("tornado.general")
SETTINGS = None
_application = None
class Application(tornado.web.Application):
def __init__(self, handlers, settings):
tornado.web.Application.__init__(self, handlers, **settings)
class BaseHandler(tornado.web.RequestHandler):
pass
def parse_log_file_option(option):
if 'file://' in option:
return {
'type': 'file',
'path': option[len('file://'):]
}
elif 'console' in option:
return {
'type': 'console',
}
elif 'rsyslog://' in option:
return {
'type': 'rsyslog',
'uri': option[len('rsyslog://'):]
}
raise ValueError('Invalid logger option %s' % option)
def plugin_options():
"""Parse the plugin options from CLI as JSON string into dict and combine into plugin_config."""
cli_opts = json.loads(tornado.options.options.plugin_opts)
for plugin, values in cli_opts.iteritems():
if plugin not in tornado.options.options.plugin_config:
tornado.options.options.plugin_config[plugin] = cli_opts
else: # Merge and override plugin options in config file.
for key, value in cli_opts[plugin].iteritems():
tornado.options.options.plugin_config[plugin][key] = value
def parse_options():
# General options CLI + Config
tornado.options.define("config_file",
default=os.environ.get('MYUI_CONFIG', "/etc/myui.conf"),
help="webui port")
tornado.options.define("app_title", default='My-UI')
tornado.options.define("plugins", default="",
help="comma-separated list of plugins that should be loaded")
tornado.options.define("plugin_opts",
default='{}',
help="JSON string of plugin specific options merged over "
"plugin_config dict")
tornado.options.add_parse_callback(plugin_options)
tornado.options.define("port", default="3000", help="webui port")
tornado.options.define("login_url", default='/login')
tornado.options.define("template_path", default=os.path.join(os.path.dirname(
os.path.realpath(__file__)), 'templates'), help="templates directory name")
tornado.options.define("static_path",
default=os.path.join(os.path.dirname(os.path.realpath(__file__)),
'static'),
help="static files dirctory name")
tornado.options.define("cookie_secret", default='this is my secret. you dont know it.')
tornado.options.define("debug", default=True, help="enable tornado debug mode")
# Config File Only Options
tornado.options.parse_command_line(final=False)
tornado.options.define("plugin_config",
default={},
help="Dictionary of config options")
tornado.options.parse_config_file(tornado.options.options.config_file, final=True)
def gen_settings(mode='server'):
"""Generate settings dict from tornado.options.options"""
try:
tornado.options.options.port
tornado.options.options.config_file
except AttributeError:
parse_options()
return dict(template_path=tornado.options.options.template_path,
login_url=tornado.options.options.login_url,
static_path=tornado.options.options.static_path,
cookie_secret=tornado.options.options.cookie_secret,
debug=tornado.options.options.debug,
plugin_config=tornado.options.options.plugin_config,
app_title=tornado.options.options.app_title)
def init_models(plugin):
"""Initialize models with settings loaded from tornado settings.
Typically called from inside the controller of a plugin except during model migrations
and creations etc."""
settings = gen_settings()
# Generating list of models
models = {}
cursors = {}
# Bootstrap plugin model settings if they exist
try:
plugin_model_opts = settings['plugin_config'][plugin]
except KeyError:
plugin_model_opts = None
app_log.info('Loading models... ({0})'.format(plugin))
list_of_models = generate_models(plugin)
for model in list_of_models:
models[model] = import_module('{0}.models.{1}'.format(plugin, model))
try:
# Initialize model
cursors[model] = models[model].get_tables(plugin_model_opts)
except Exception as e:
app_log.error('Failed to load tables for %s.%s: %s' % (plugin, model, e.message))
return cursors
def generate_models(plugin):
models = import_module('{0}.models'.format(plugin))
ret = [each for each in models.__all__]
return ret
def generate_controllers(plugin):
controllers = import_module('{0}.controllers'.format(plugin))
ret = [each for each in controllers.__all__]
return ret
def load_controllers():
app_log.info('Loading controllers...')
controllers = {}
for plugin in tornado.options.options.plugins.split(','):
list_of_controllers = generate_controllers(plugin)
for controller in list_of_controllers:
controllers[controller] = import_module(
'{0}.controllers.{1}'.format(plugin, controller))
app_log.info('Controller[{0}] loaded'.format(controller))
return controllers
def create_models():
"""Run model init"""
settings = gen_settings()
for plugin in tornado.options.options.plugins.split(','):
app_log.info('Running create on models in... (%s)' % plugin)
for model in generate_models(plugin):
modelObj = import_module('{0}.models.{1}'.format(plugin, model))
modelObj.create(settings['plugin_config'][plugin])
def upgrade_models():
"""Run model upgrades"""
settings = gen_settings()
for plugin in tornado.options.options.plugins.split(','):
app_log.info('Running upgrade on models in... (%s)' % plugin)
for model in generate_models(plugin):
modelObj = import_module('{0}.models.{1}'.format(plugin, model))
modelObj.upgrade(settings['plugin_config'][plugin])
def application():
global _application
if _application: # return existing cached Appliction object stored in this module
return _application
settings = gen_settings()
# Check to see if the plugin has uimodules
try:
settings['ui_modules'] = {'uimodules': import_module(
'{0}.uimodules'.format(tornado.options.options.plugins))}
except ImportError:
pass
controllers = load_controllers()
# Build handlers
handlers = []
for controller in controllers:
c = controllers[controller]
c.Handler.logger = app_log
if isinstance(c.params.route, basestring):
handlers.append((c.params.route, c.Handler))
else:
for uri_string in c.params.route:
handlers.append((uri_string, c.Handler))
app_log.info('%s routes loaded for %s controllers' % (len(handlers), len(controllers)))
_application = Application(handlers, settings)
return _application
def server():
"""Run dev server"""
http_server = tornado.httpserver.HTTPServer(application())
http_server.listen(tornado.options.options.port)
app_log.info('Server up: listening on %s' % tornado.options.options.port)
tornado.ioloop.IOLoop.instance().start()
def wsgiapp(*params):
return tornado.wsgi.WSGIAdapter(application())(*params)
| {"/myui/controllers/example.py": ["/myui/__init__.py"]} |
77,705 | kastnerkyle/deconstructionism | refs/heads/master | /tfdllib.py | from __future__ import print_function
import tensorflow as tf
import numpy as np
import uuid
from scipy import linalg
from scipy.stats import truncnorm
from scipy.misc import factorial
import tensorflow as tf
import shutil
import socket
import os
import re
import copy
import sys
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
import logging
from collections import OrderedDict
logging.basicConfig(level=logging.INFO,
format='%(message)s')
logger = logging.getLogger(__name__)
string_f = StringIO()
ch = logging.StreamHandler(string_f)
# Automatically put the HTML break characters on there for html logger
formatter = logging.Formatter('%(message)s<br>')
ch.setFormatter(formatter)
logger.addHandler(ch)
def get_logger():
return logger
sys.setrecursionlimit(40000)
# Storage of internal shared
_lib_shared_params = OrderedDict()
def _get_name():
return str(uuid.uuid4())
def _get_shared(name):
if name in _lib_shared_params.keys():
logger.info("Found name %s in shared parameters" % name)
return _lib_shared_params[name]
else:
raise NameError("Name not found in shared params!")
def _set_shared(name, variable):
if name in _lib_shared_params.keys():
raise ValueError("Trying to set key %s which already exists!" % name)
_lib_shared_params[name] = variable
def get_params_dict():
return _lib_shared_params
weight_norm_default = False
def get_weight_norm_default():
return weight_norm_default
strict_mode_default = False
def get_strict_mode_default():
return strict_mode_default
def print_network(params_dict):
logger.info("=====================")
logger.info("Model Summary")
logger.info("format: {name} {shape}, {parameter_count}")
logger.info("---------------------")
for k, v in params_dict.items():
#strip_name = "_".join(k.split("_")[1:])
strip_name = k
shp = tuple(shape(v))
k_count = np.prod(shp) / float(1E3)
logger.info("{} {}, {}K".format(strip_name, shp, k_count))
params = params_dict.values()
n_params = sum([np.prod(shape(p)) for p in params])
logger.info("---------------------")
logger.info(" ")
logger.info("Total: {}M".format(n_params / float(1E6)))
logger.info("=====================")
def shape(x):
r = x.get_shape().as_list()
r = [ri if ri != None else -1 for ri in r]
#if len([ri for ri in r if ri == -1]) > 1:
# raise ValueError("Too many None shapes in shape dim {}, should only 1 -1 dim at most".format(r))
return r
def ndim(x):
return len(shape(x))
def sigmoid(x):
return tf.sigmoid(x)
def tanh(x):
return tf.tanh(x)
def np_zeros(shape):
"""
Builds a numpy variable filled with zeros
Parameters
----------
shape, tuple of ints
shape of zeros to initialize
Returns
-------
initialized_zeros, array-like
Array-like of zeros the same size as shape parameter
"""
return np.zeros(shape).astype("float32")
def np_normal(shape, random_state, scale=0.01):
"""
Builds a numpy variable filled with normal random values
Parameters
----------
shape, tuple of ints or tuple of tuples
shape of values to initialize
tuple of ints should be single shape
tuple of tuples is primarily for convnets and should be of form
((n_in_kernels, kernel_width, kernel_height),
(n_out_kernels, kernel_width, kernel_height))
random_state, numpy.random.RandomState() object
scale, float (default 0.01)
default of 0.01 results in normal random values with variance 0.01
Returns
-------
initialized_normal, array-like
Array-like of normal random values the same size as shape parameter
"""
if type(shape[0]) is tuple:
shp = (shape[1][0], shape[0][0]) + shape[1][1:]
else:
shp = shape
return (scale * random_state.randn(*shp)).astype("float32")
def np_truncated_normal(shape, random_state, scale=0.075):
"""
Builds a numpy variable filled with truncated normal random values
Parameters
----------
shape, tuple of ints or tuple of tuples
shape of values to initialize
tuple of ints should be single shape
tuple of tuples is primarily for convnets and should be of form
((n_in_kernels, kernel_width, kernel_height),
(n_out_kernels, kernel_width, kernel_height))
random_state, numpy.random.RandomState() object
scale, float (default 0.075)
default of 0.075
Returns
-------
initialized_normal, array-like
Array-like of truncated normal random values the same size as shape parameter
"""
if type(shape[0]) is tuple:
shp = (shape[1][0], shape[0][0]) + shape[1][1:]
else:
shp = shape
sigma = scale
lower = -2 * sigma
upper = 2 * sigma
mu = 0
N = np.prod(shp)
samples = truncnorm.rvs(
(lower - mu) / float(sigma), (upper - mu) / float(sigma),
loc=mu, scale=sigma, size=N, random_state=random_state)
return samples.reshape(shp).astype("float32")
def np_tanh_fan_normal(shape, random_state, scale=1.):
"""
Builds a numpy variable filled with random values
Parameters
----------
shape, tuple of ints or tuple of tuples
shape of values to initialize
tuple of ints should be single shape
tuple of tuples is primarily for convnets and should be of form
((n_in_kernels, kernel_width, kernel_height),
(n_out_kernels, kernel_width, kernel_height))
random_state, numpy.random.RandomState() object
scale, float (default 1.)
default of 1. results in normal random values
with sqrt(2 / (fan in + fan out)) scale
Returns
-------
initialized_fan, array-like
Array-like of random values the same size as shape parameter
References
----------
Understanding the difficulty of training deep feedforward neural networks
X. Glorot, Y. Bengio
"""
# The . after the 2 is critical! shape has dtype int...
if type(shape[0]) is tuple:
kern_sum = np.prod(shape[0]) + np.prod(shape[1])
shp = (shape[1][0], shape[0][0]) + shape[1][1:]
else:
kern_sum = np.sum(shape)
shp = shape
var = scale * np.sqrt(2. / kern_sum)
return var * random_state.randn(*shp).astype("float32")
def np_variance_scaled_uniform(shape, random_state, scale=1.):
"""
Builds a numpy variable filled with random values
Parameters
----------
shape, tuple of ints or tuple of tuples
shape of values to initialize
tuple of ints should be single shape
tuple of tuples is primarily for convnets and should be of form
((n_in_kernels, kernel_width, kernel_height),
(n_out_kernels, kernel_width, kernel_height))
random_state, numpy.random.RandomState() object
scale, float (default 1.)
default of 1. results in uniform random values
with 1 * sqrt(1 / (n_dims)) scale
Returns
-------
initialized_scaled, array-like
Array-like of random values the same size as shape parameter
References
----------
Efficient Backprop
Y. LeCun, L. Bottou, G. Orr, K. Muller
"""
if type(shape[0]) is tuple:
shp = (shape[1][0], shape[0][0]) + shape[1][1:]
kern_sum = np.prod(shape[0])
else:
shp = shape
kern_sum = shape[0]
# Make sure bounds aren't the same
bound = scale * np.sqrt(3. / float(kern_sum)) # sqrt(3) for std of uniform
return random_state.uniform(low=-bound, high=bound, size=shp).astype(
"float32")
def np_glorot_uniform(shape, random_state, scale=1.):
"""
Builds a numpy variable filled with random values
Parameters
----------
shape, tuple of ints or tuple of tuples
shape of values to initialize
tuple of ints should be single shape
tuple of tuples is primarily for convnets and should be of form
((n_in_kernels, kernel_width, kernel_height),
(n_out_kernels, kernel_width, kernel_height))
random_state, numpy.random.RandomState() object
scale, float (default 1.)
default of 1. results in uniform random values
with 1. * sqrt(6 / (n_in + n_out)) scale
Returns
-------
initialized_scaled, array-like
Array-like of random values the same size as shape parameter
"""
shp = shape
kern_sum = sum(shp)
bound = scale * np.sqrt(6. / float(kern_sum))
return random_state.uniform(low=-bound, high=bound, size=shp).astype(
"float32")
def np_ortho(shape, random_state, scale=1.):
"""
Builds a numpy variable filled with orthonormal random values
Parameters
----------
shape, tuple of ints or tuple of tuples
shape of values to initialize
tuple of ints should be single shape
tuple of tuples is primarily for convnets and should be of form
((n_in_kernels, kernel_width, kernel_height),
(n_out_kernels, kernel_width, kernel_height))
random_state, numpy.random.RandomState() object
scale, float (default 1.)
default of 1. results in orthonormal random values sacled by 1.
Returns
-------
initialized_ortho, array-like
Array-like of random values the same size as shape parameter
References
----------
Exact solutions to the nonlinear dynamics of learning in deep linear
neural networks
A. Saxe, J. McClelland, S. Ganguli
"""
if type(shape[0]) is tuple:
shp = (shape[1][0], shape[0][0]) + shape[1][1:]
flat_shp = (shp[0], np.prd(shp[1:]))
else:
shp = shape
flat_shp = shape
g = random_state.randn(*flat_shp)
U, S, VT = linalg.svd(g, full_matrices=False)
res = U if U.shape == flat_shp else VT # pick one with the correct shape
res = res.reshape(shp)
return (scale * res).astype("float32")
def make_numpy_biases(bias_dims):
return [np_zeros((dim,)) for dim in bias_dims]
def make_numpy_weights(in_dim, out_dims, random_state, init=None,
scale="default"):
"""
Will return as many things as are in the list of out_dims
You *must* get a list back, even for 1 element retuTrue
blah, = make_weights(...)
or
[blah] = make_weights(...)
"""
ff = [None] * len(out_dims)
fs = [scale] * len(out_dims)
for i, out_dim in enumerate(out_dims):
if init is None:
if in_dim == out_dim:
ff[i] = np_ortho
fs[i] = 1.
else:
ff[i] = np_variance_scaled_uniform
fs[i] = 1.
elif init == "ortho":
if in_dim != out_dim:
raise ValueError("Unable to use ortho init for non-square matrices!")
ff[i] = np_ortho
fs[i] = 1.
elif init == "glorot_uniform":
ff[i] = np_glorot_uniform
elif init == "normal":
ff[i] = np_normal
fs[i] = 0.01
elif init == "truncated_normal":
ff[i] = np_truncated_normal
fs[i] = 0.075
elif init == "embedding_normal":
ff[i] = np_truncated_normal
fs[i] = 1. / np.sqrt(in_dim)
else:
raise ValueError("Unknown init type %s" % init)
ws = []
for i, out_dim in enumerate(out_dims):
if fs[i] == "default":
ws.append(ff[i]((in_dim, out_dim), random_state))
else:
ws.append(ff[i]((in_dim, out_dim), random_state, scale=fs[i]))
return ws
def dot(a, b):
# Generalized dot for nd sequences, assumes last axis is projection
# b must be rank 2
a_tup = shape(a)
b_tup = shape(b)
if len(a_tup) == 2 and len(b_tup) == 2:
return tf.matmul(a, b)
elif len(a_tup) == 3 and len(b_tup) == 2:
# more generic, supports multiple -1 axes
return tf.einsum("ijk,kl->ijl", a, b)
#a_i = tf.reshape(a, [-1, a_tup[-1]])
#a_n = tf.matmul(a_i, b)
#a_nf = tf.reshape(a_n, list(a_tup[:-1]) + [b_tup[-1]])
#return a_nf
else:
raise ValueError("Shapes for arguments to dot() are {} and {}, not supported!".format(a_tup, b_tup))
def scan(fn, sequences, outputs_info):
nonepos = [n for n, o in enumerate(outputs_info) if o is None]
nonnone = [o for o in outputs_info if o is not None]
sequences_and_nonnone = sequences + nonnone
sliced = [s[0] for s in sequences] + nonnone
inf_ret = fn(*sliced)
if len(outputs_info) < len(inf_ret):
raise ValueError("More outputs from `fn` than elements in outputs_info. Expected {} outs, given outputs_info of length {}, but `fn` returns {}. Pass None in outputs_info for returns which don't accumulate".format(len(outputs_info), len(outputs_info), len(inf_ret)))
initializers = []
for n in range(len(outputs_info)):
if outputs_info[n] is not None:
initializers.append(outputs_info[n])
else:
initializers.append(0. * inf_ret[n])
def wrapwrap(nonepos, initializers):
type_class = "list" if isinstance(initializers, list) else "tuple"
def fnwrap(accs, inps):
inps_then_accs = inps + [a for n, a in enumerate(accs) if n not in nonepos]
fn_rets = fn(*inps_then_accs)
return [fr for fr in fn_rets]
return fnwrap
this_fn = wrapwrap(nonepos, initializers)
r = tf.scan(this_fn, sequences, initializers)
return r
def Embedding(indices, n_symbols, output_dim, random_state=None,
init="gaussian",
strict=None, name=None):
"""
Last dimension of indices tensor must be 1!!!!
"""
if name is None:
name = _get_name()
if random_state is None:
raise ValueError("Must pass random_state argument to Embedding")
name_w = name + "_embedding_w"
if strict is None:
strict = get_strict_mode_default()
if strict:
cur_defs = get_params_dict()
if name_w in cur_defs:
raise ValueError("Name {} already created in params dict!".format(name_w))
if init != "gaussian":
raise ValueError("Currently unsupported init type {}".format(init))
try:
vectors = _get_shared(name_w)
except NameError:
logger.info("Linear layer {} initialized using init {}".format(name, init))
vectors_weight = random_state.randn(n_symbols, output_dim).astype("float32")
vectors = tf.Variable(vectors_weight, trainable=True)
_set_shared(name_w, vectors)
ii = tf.cast(indices, "int32")
shp = shape(ii)
nd = ndim(ii)
if shp[-1] != 1:
if nd < 3:
logger.info("Embedding input should have last dimension 1, inferring dimension to 1, from shape {} to {}".format(shp, tuple(list(shp) + [1])))
ii = tf.expand_dims(ii, axis=-1)
else:
raise ValueError("Embedding layer input must have last dimension 1 for input size > 3D, got {}".format(shp))
shp = shape(ii)
nd = len(shp)
lu = tf.nn.embedding_lookup(vectors, ii)
if nd == 3:
lu = lu[:, :, 0]
elif nd == 2:
lu = lu[:, 0]
else:
raise ValueError("Input dimension not handled, Embedding input shape {} results in shape {}".format(shp, shape(lu)))
return lu
def Linear(list_of_inputs, list_of_input_dims, output_dim, random_state=None,
name=None, init=None, scale="default", biases=True, bias_offset=0.,
strict=None):
if random_state is None:
raise ValueError("Must pass random_state to Linear")
nd = ndim(list_of_inputs[0])
input_var = tf.concat(list_of_inputs, axis=nd - 1)
input_dim = sum(list_of_input_dims)
if init is None or type(init) is str:
logger.info("Linear layer {} initialized using init {}".format(name, init))
weight_values, = make_numpy_weights(input_dim, [output_dim],
random_state=random_state,
init=init, scale=scale)
else:
# rely on announcement from parent class
weight_values=init[0]
if name is None:
name = _get_name()
name_w = name + "_linear_w"
name_b = name + "_linear_b"
name_out = name + "_linear_out"
if strict is None:
strict = get_strict_mode_default()
if strict:
cur_defs = get_params_dict()
if name_w in cur_defs:
raise ValueError("Name {} already created in params dict!".format(name_w))
if name_b in cur_defs:
raise ValueError("Name {} already created in params dict!".format(name_b))
try:
weight = _get_shared(name_w)
except NameError:
weight = tf.Variable(weight_values, trainable=True, name=name_w)
_set_shared(name_w, weight)
out = dot(input_var, weight)
if biases:
if (init is None) or (type(init) is str):
b, = make_numpy_biases([output_dim])
else:
b = init[1]
b = b + bias_offset
try:
biases = _get_shared(name_b)
except NameError:
biases = tf.Variable(b, trainable=True, name=name_b)
_set_shared(name_b, biases)
out = out + biases
out = tf.identity(out, name=name_out)
return out
def SimpleRNNCell(list_of_inputs, list_of_input_dims, previous_hidden,
num_units, output_dim, random_state=None,
name=None, init=None, scale="default", strict=None):
# output is the thing to use in following layers, state is a tuple that contains things to feed into the next call
if random_state is None:
raise ValueError("Must pass random_state")
if name is None:
name = _get_name()
hidden_dim = num_units
inp_to_h = Linear(list_of_inputs, list_of_input_dims, hidden_dim, random_state=random_state,
name=name + "_simple_rnn_inp_to_h",
init=init, strict=strict)
h_to_h = Linear([previous_hidden], [hidden_dim], hidden_dim, random_state=random_state,
name=name + "_simple_rnn_h_to_h", biases=False,
init=init, strict=strict)
h = tf.nn.tanh(inp_to_h + h_to_h)
h_to_out = Linear([h], [hidden_dim], output_dim, random_state=random_state,
name=name + "_simple_rnn_h_to_out",
init=init, strict=strict)
return h_to_out, (h,)
def LSTMCell(list_of_inputs, list_of_input_dims,
previous_hidden, previous_cell,
num_units,
output_dim=None,
input_mask=None,
random_state=None,
name=None, init=None, scale="default",
forget_bias=1., strict=None):
# output is the thing to use in following layers, state is a tuple that feeds into the next call
if random_state is None:
raise ValueError("Must pass random_state")
if name is None:
name = _get_name()
input_dim = sum(list_of_input_dims)
hidden_dim = 4 * num_units
if init is None or init == "truncated_normal":
inp_init = "truncated_normal"
h_init = "truncated_normal"
out_init = "truncated_normal"
elif init == "glorot_uniform":
inp_init = "glorot_uniform"
h_init = "glorot_uniform"
out_init = "glorot_uniform"
elif init == "normal":
inp_init = "normal"
h_init = "normal"
out_init = "normal"
else:
raise ValueError("Unknown init argument {}".format(init))
comb_w_np, = make_numpy_weights(input_dim + num_units, [hidden_dim],
random_state=random_state,
init=inp_init)
comb_b_np, = make_numpy_biases([hidden_dim])
logger.info("LSTMCell {} input to hidden initialized using init {}".format(name, inp_init))
logger.info("LSTMCell {} hidden to hidden initialized using init {}".format(name, h_init))
lstm_proj = Linear(list_of_inputs + [previous_hidden], list_of_input_dims + [hidden_dim],
hidden_dim,
random_state=random_state,
name=name + "_lstm_proj",
init=(comb_w_np, comb_b_np), strict=strict)
i, j, f, o = tf.split(lstm_proj, 4, axis=-1)
c = tf.sigmoid(f + forget_bias) * previous_cell + tf.sigmoid(i) * tf.tanh(j)
if input_mask is not None:
c = input_mask[:, None] * c + (1. - input_mask[:, None]) * previous_cell
h = tf.sigmoid(o) * tf.tanh(c)
if input_mask is not None:
h = input_mask[:, None] * h + (1. - input_mask[:, None]) * h
if output_dim is not None:
h_to_out_w_np, = make_numpy_weights(num_units, [output_dim],
random_state=random_state,
init=out_init)
h_to_out_b_np, = make_numpy_biases([output_dim])
h_to_out = Linear([h], [num_units], output_dim, random_state=random_state,
name=name + "_lstm_h_to_out",
init=(h_to_out_w_np, h_to_out_b_np), strict=strict)
final_out = h_to_out
logger.info("LSTMCell {} hidden to output initialized using init {}".format(name, out_init))
else:
final_out = h
return final_out, (h, c)
def GaussianAttentionCell(list_of_step_inputs, list_of_step_input_dims,
previous_state_list,
previous_attention_position,
full_conditioning_tensor,
full_conditioning_tensor_dim,
num_units,
previous_attention_weight,
att_dim=10,
attention_scale=1.,
cell_type="lstm",
name=None,
input_mask=None,
conditioning_mask=None,
random_state=None, strict=None, init=None):
#returns w_t, k_t, phi_t, state
# where state is the state tuple retruned by the inner cell_type
if name is None:
name = _get_name()
name = name + "_gaussian_attention"
check = any([len(shape(si)) != 2 for si in list_of_step_inputs])
if check:
raise ValueError("Unable to support step_input with n_dims != 2")
if init is None or init == "truncated_normal":
rnn_init = "truncated_normal"
forward_init = "truncated_normal"
else:
raise ValueError("init != None not supported")
if cell_type == "gru":
raise ValueError("NYI")
elif cell_type == "lstm":
att_rnn_out, state = LSTMCell(list_of_step_inputs + [previous_attention_weight],
list_of_step_input_dims + [full_conditioning_tensor_dim],
previous_state_list[0], previous_state_list[1],
num_units,
input_mask=input_mask,
random_state=random_state,
name=name + "_gauss_att_lstm",
init=rnn_init)
else:
raise ValueError("Unsupported cell_type %s" % cell_type)
ret = Linear(
list_of_inputs=[att_rnn_out], list_of_input_dims=[num_units],
output_dim=3 * att_dim, name=name + "_group",
random_state=random_state,
strict=strict, init=forward_init)
a_t = ret[:, :att_dim]
b_t = ret[:, att_dim:2 * att_dim]
k_t = ret[:, 2 * att_dim:]
k_tm1 = previous_attention_position
cond_dim = full_conditioning_tensor_dim
ctx = full_conditioning_tensor
ctx_mask = conditioning_mask
"""
ctx = Linear(
list_of_inputs=[full_conditioning_tensor],
list_of_input_dims=[full_conditioning_tensor_dim],
output_dim=next_proj_dim, name=name + "_proj_ctx",
weight_norm=weight_norm,
random_state=random_state,
strict=strict, init=ctx_forward_init)
"""
a_t = tf.exp(a_t)
b_t = tf.exp(b_t)
a_t = tf.identity(a_t, name=name + "_a_scale")
b_t = tf.identity(b_t, name=name + "_b_scale")
step_size = attention_scale * tf.exp(k_t)
k_t = k_tm1 + step_size
k_t = tf.identity(k_t, name=name + "_position")
# tf.shape and tensor.shape are not the same...
u = tf.cast(tf.range(0., limit=tf.shape(full_conditioning_tensor)[0], delta=1.), dtype=tf.float32)
u = tf.expand_dims(tf.expand_dims(u, axis=0), axis=0)
def calc_phi(lk_t, la_t, lb_t, lu):
la_t = tf.expand_dims(la_t, axis=2)
lb_t = tf.expand_dims(lb_t, axis=2)
lk_t = tf.expand_dims(lk_t, axis=2)
phi = tf.exp(-tf.square(lk_t - lu) * lb_t) * la_t
phi = tf.reduce_sum(phi, axis=1, keep_dims=True)
return phi
phi_t = calc_phi(k_t, a_t, b_t, u)
phi_t = tf.identity(phi_t, name=name + "_phi")
"""
# Notes from pytorch tests
# sanity check shapes for proper equivalent to np.dot
aaaa = np.random.randn(50, 1, 46)
bbbb = np.random.randn(50, 46, 400)
r = np.matmul(aaaa, bbbb)
# r has shape ms, 1, embed_dim
# since aaaa and bbbb are > 2d, treated as stack of matrices, matrix dims on last 2 axes
# this means 50, 1, 46 x 50, 46, 400 is 50 reps of 1, 46 x 46, 400
# leaving shape 50, 1, 400
# equivalent to dot for 1 matrix is is (aaaa[0][:, :, None] * bbbb[0][None, :, :]).sum(axis=-2)
# so for all 50, (aaaa[:, :, :, None] * bbbb[:, None, :, :]).sum(axis=-2)
# ((aaaa[:, :, :, None] * bbbb[:, None, :, :]).sum(axis=-2) == r).all()
_a = Variable(th.FloatTensor(aaaa))
_b = Variable(th.FloatTensor(bbbb))
e_a = _a[:, :, :, None].expand(_a.size(0), _a.size(1), _a.size(2), _b.size(2))
e_b = _b[:, None, :, :].expand(_b.size(0), _a.size(1), _b.size(1), _b.size(2))
# In [17]: np.sum(((e_a * e_b).sum(dim=-2)[:, :, 0].data.numpy() - r) ** 2)
# Out[17]: 1.6481219193765024e-08
# equivalent to comb = th.matmul(phi, c), for backwards compat
e_phi = phi[:, :, :, None].expand(phi.size(0), phi.size(1), phi.size(2), c.size(2))
e_c = c[:, None, :, :].expand(c.size(0), phi.size(1), c.size(1), c.size(2))
comb = (e_phi * e_c).sum(dim=-2)[:, :, 0]
# comb has shape minibatch_size, 1, embed_size
# w_t has shape minibatch_size, embed_size
w_t = comb[:, 0, :]
"""
if conditioning_mask is not None:
w_t_pre = phi_t * tf.transpose(ctx, (1, 2, 0))
w_t_masked = w_t_pre * (tf.transpose(ctx_mask, (1, 0))[:, None])
w_t = tf.reduce_sum(w_t_masked, axis=-1)[:, None]
else:
w_t = tf.matmul(phi_t, tf.transpose(ctx, (1, 0, 2)))
phi_t = phi_t[:, 0]
w_t = w_t[:, 0]
w_t = tf.identity(w_t, name=name + "_post_weighting")
return w_t, k_t, phi_t, state
def LogitBernoulliAndCorrelatedLogitGMM(
list_of_inputs, list_of_input_dims, output_dim=2, name=None, n_components=10,
random_state=None, strict=None, init=None):
"""
returns logit_bernoulli, logit_coeffs, mus, logit_sigmas, corr
"""
assert n_components >= 1
if name is None:
name = _get_name()
else:
name = name + "_logit_bernoulli_and_correlated_logit_gaussian_mixture"
def _reshape(l, d=n_components):
if d == 1:
shp = shape(l)
t = tf.reshape(l, shp[:-1] + [1, shp[-1]])
return t
if len(shape(l)) == 2:
t = tf.reshape(l, (-1, output_dim, d))
elif len(shape(l)) == 3:
shp = shape(l)
t = tf.reshape(l, (-1, shp[1], output_dim, d))
else:
raise ValueError("input ndim not supported for gaussian "
"mixture layer")
return t
if output_dim != 2:
raise ValueError("General calculation for GMM not yet implemented")
mus = Linear(
list_of_inputs=list_of_inputs, list_of_input_dims=list_of_input_dims,
output_dim=n_components * output_dim, name=name + "_mus_pre",
random_state=random_state,
strict=strict, init=init)
mus = _reshape(mus)
mus = tf.identity(mus, name=name + "_mus")
logit_sigmas = Linear(
list_of_inputs=list_of_inputs, list_of_input_dims=list_of_input_dims,
output_dim=n_components * output_dim, name=name + "_logit_sigmas_pre",
random_state=random_state,
strict=strict, init=init)
logit_sigmas = _reshape(logit_sigmas)
logit_sigmas = tf.identity(logit_sigmas, name=name + "_logit_sigmas")
"""
coeffs = Linear(
list_of_inputs=list_of_inputs, list_of_input_dims=list_of_input_dims,
output_dim=n_components, name=name + "_coeffs_pre",
weight_norm=weight_norm, random_state=random_state,
strict=strict, init=init)
coeffs = tf.nn.softmax(coeffs)
coeffs = _reshape(coeffs, 1)
coeffs = tf.identity(coeffs, name=name + "_coeffs")
"""
logit_coeffs = Linear(
list_of_inputs=list_of_inputs, list_of_input_dims=list_of_input_dims,
output_dim=n_components, name=name + "_logit_coeffs_pre",
random_state=random_state,
strict=strict, init=init)
logit_coeffs = _reshape(logit_coeffs, 1)
logit_coeffs = tf.identity(logit_coeffs, name=name + "_logit_coeffs")
calc_corr = int(factorial(output_dim ** 2 // 2 - 1))
corrs = Linear(
list_of_inputs=list_of_inputs, list_of_input_dims=list_of_input_dims,
output_dim=n_components * calc_corr, name=name + "_corrs_pre",
random_state=random_state,
strict=strict, init=init)
corrs = tf.tanh(corrs)
corrs = _reshape(corrs, calc_corr)
corrs = tf.identity(corrs, name + "_corrs")
logit_bernoullis = Linear(
list_of_inputs=list_of_inputs, list_of_input_dims=list_of_input_dims,
output_dim=1, name=name + "_logit_bernoullis_pre",
random_state=random_state,
strict=strict, init=init)
logit_bernoullis = tf.identity(logit_bernoullis, name + "_logit_bernoullis")
return logit_bernoullis, logit_coeffs, mus, logit_sigmas, corrs
# from A d B
# https://github.com/adbrebs/handwriting/blob/master/model.py
def _logsumexp(inputs, axis=-1):
max_i = tf.reduce_max(inputs, axis=axis)
z = tf.log(tf.reduce_sum(tf.exp(inputs - max_i[..., None]), axis=axis)) + max_i
return z
def LogitBernoulliAndCorrelatedLogitGMMCost(
logit_bernoulli_values, logit_coeff_values, mu_values, logit_sigma_values, corr_values,
true_values, name=None):
"""
Logit bernoulli combined with correlated gaussian mixture model negative log
likelihood compared to true_values.
This is typically paired with LogitBernoulliAndCorrelatedLogitGMM
Based on implementation from Junyoung Chung.
Parameters
----------
logit_bernoulli_values : tensor, shape
The predicted values out of some layer, normallu a linear layer
logit_coeff_values : tensor, shape
The predicted values out of some layer, normally a linear layer
mu_values : tensor, shape
The predicted values out of some layer, normally a linear layer
logit_sigma_values : tensor, shape
The predicted values out of some layer, normally a linear layer
true_values : tensor, shape[:-1]
Ground truth values. Must be the same shape as mu_values.shape[:-1].
Returns
-------
nll : tensor, shape predicted_values.shape[1:]
The cost per sample, or per sample per step if 3D
References
----------
[1] University of Utah Lectures
http://www.cs.utah.edu/~piyush/teaching/gmm.pdf
[2] Statlect.com
http://www.statlect.com/normal_distribution_maximum_likelihood.htm
"""
if name == None:
name = _get_name()
else:
name = name
tv = true_values
if len(shape(tv)) == 3:
true_values = tf.expand_dims(tv, axis=2)
elif len(shape(tv)) == 2:
true_values = tf.expand_dims(tv, axis=1)
else:
raise ValueError("shape of labels not currently supported {}".format(shape(tv)))
def _subslice(arr, idx):
if len(shape(arr)) == 3:
return arr[:, idx]
elif len(shape(arr)) == 4:
return arr[:, :, idx]
raise ValueError("Unsupported ndim {}".format(shape(arr)))
mu_values = tf.identity(mu_values, name=name + "_mus")
mu_1 = _subslice(mu_values, 0)
mu_2 = _subslice(mu_values, 1)
corr_values = _subslice(corr_values, 0)
corr_values = tf.identity(corr_values, name=name + "_corrs")
sigma_values = tf.exp(logit_sigma_values) + 1E-12
sigma_values = tf.identity(sigma_values, name=name + "_sigmas")
sigma_1 = _subslice(sigma_values, 0)
sigma_2 = _subslice(sigma_values, 1)
bernoulli_values = tf.nn.sigmoid(logit_bernoulli_values)
bernoulli_values = tf.identity(bernoulli_values, name=name + "_bernoullis")
logit_coeff_values = _subslice(logit_coeff_values, 0)
coeff_values = tf.nn.softmax(logit_coeff_values, dim=-1)
coeff_values = tf.identity(coeff_values, name=name + "_coeffs")
"""
logit_sigma_1 = _subslice(logit_sigma_values, 0)
logit_sigma_2 = _subslice(logit_sigma_values, 1)
logit_coeff_values = _subslice(logit_coeff_values, 0)
"""
true_0 = true_values[..., 0]
true_1 = true_values[..., 1]
true_2 = true_values[..., 2]
# don't be clever
buff = (1. - tf.square(corr_values)) + 1E-6
x_term = (true_1 - mu_1) / sigma_1
y_term = (true_2 - mu_2) / sigma_2
Z = tf.square(x_term) + tf.square(y_term) - 2. * corr_values * x_term * y_term
N = 1. / (2. * np.pi * sigma_1 * sigma_2 * tf.sqrt(buff)) * tf.exp(-Z / (2. * buff))
ep = tf.reduce_sum(true_0 * bernoulli_values + (1. - true_0) * (1. - bernoulli_values), axis=-1)
rp = tf.reduce_sum(coeff_values * N, axis=-1)
nll = -tf.log(rp + 1E-8) - tf.log(ep + 1E-8)
"""
ll_b = -tf.reduce_sum(tf.nn.sigmoid_cross_entropy_with_logits(labels=true_0, logits=logit_bernoulli_values), axis=-1)
ll_b = tf.identity(ll_b, name=name + "_binary_ll")
buff = 1 - corr_values ** 2 + 1E-8
inner1 = (0.5 * tf.log(buff) +
logit_sigma_1 + logit_sigma_2 + tf.log(2 * np.pi))
z1 = ((true_1 - mu_1) ** 2) / tf.exp(2 * logit_sigma_1)
z2 = ((true_2 - mu_2) ** 2) / tf.exp(2 * logit_sigma_2)
zr = (2 * corr_values * (true_1 - mu_1) * (true_2 - mu_2)) / (
tf.exp(logit_sigma_1 + logit_sigma_2))
z = z1 + z2 - zr
inner2 = .5 * (1. / buff)
ll_g = -(inner1 + z * inner2)
ll_g = tf.identity(ll_g, name=name + "_gaussian_ll")
ll_sm = tf.nn.log_softmax(logit_coeff_values, dim=-1)
ll_sm = tf.identity(ll_sm, name=name + "_coeff_ll")
nllp1 = -_logsumexp(ll_g + ll_sm,
axis=len(shape(logit_coeff_values)) - 1)
nllp1 = tf.identity(nllp1, name=name + "_gmm_nll")
nllp2 = - ll_b
nllp2 = tf.identity(nllp2, name=name + "_b_nll")
nll = nllp1 + nllp2
nll = tf.identity(nll, name=name + "_full_nll")
"""
return nll
def BernoulliAndCorrelatedGMMCost(
bernoulli_values, coeff_values, mu_values_list, sigma_values_list,
corr_values, true_values_bernoulli, true_values_coord_list, name=None):
"""
Bernoulli combined with correlated gaussian mixture model negative log
likelihood compared to true_values.
This is typically paired with BernoulliAndLogitGMM
Based on implementation from Junyoung Chung.
Parameters
----------
bernoulli_values : tensor, shape
The predicted values out of some layer, normally a sigmoid layer
coeff_values : tensor, shape
The predicted values out of some layer, normally a softmax layer
mu_values_list: tensor, shape
The predicted values out of some layer, normally a linear layer
sigma_values_list: tensor, shape
list of predicted values out of some layer, normally an exp or softplus layer
corr_values: tensor, shape
true_values_bernoulli : tensor, shape[:-1]
Ground truth values. Must be the same shape as mu_values.shape[:-1],
assumes the bernoulli true values are on the first entry ([:, :, 0])
true_values_coords_list :
Returns
-------
nll : tensor, shape predicted_values.shape[1:]
The cost per sample, or per sample per step if 3D
References
----------
[1] University of Utah Lectures
http://www.cs.utah.edu/~piyush/teaching/gmm.pdf
[2] Statlect.com
http://www.statlect.com/normal_distribution_maximum_likelihood.htm
"""
if name == None:
name = _get_name()
else:
name = name
xs = true_values_coord_list[0]
ys = true_values_coord_list[1]
es = true_values_bernoulli
txs = shape(xs)
if txs[-1] != 1:
raise ValueError("Targets must be 1 dimensional")
tys = shape(ys)
tes = shape(es)
if tys != txs:
raise ValueError("Targets must have the same dimension")
if tes != txs:
raise ValueError("Targets must have the same dimension")
# seq length generally -1
batch_size = txs[1]
def _2d(a):
return tf.reshape(a, (-1, shape(a)[-1]))
true_values_bernoulli = _2d(true_values_bernoulli)
true_values_coord_list = [_2d(tvc) for tvc in true_values_coord_list]
coeff_values = _2d(coeff_values)
bernoulli_values = _2d(bernoulli_values)
corr_values = _2d(corr_values)
mu_values_list = [_2d(mv) for mv in mu_values_list]
sigma_values_list = [_2d(sv) for sv in sigma_values_list]
error_msg = "Dimension of variable {} not supported, got {}. Must be 2"
if len(shape(true_values_bernoulli)) != 2:
raise ValueError(error_msg.format("true_values_bernoulli", len(shape(true_values_bernoulli))))
elif any([len(shape(tvc)) != 2 for tvc in true_values_coord_list]):
raise ValueError(error_msg.format("true_values_coord_list", [len(shape(true_values_coord_list[0])), len(shape(truce_values_coord_list[1]))]))
elif len(shape(bernoulli_values)) != 2:
raise ValueError(error_msg.format("bernoulli_values", len(shape(bernoulli_values))))
elif len(shape(coeff_values)) != 2:
raise ValueError(error_msg.format("coeff_values", len(shape(coeff_values))))
elif any([len(shape(m)) != 2 for m in mu_values_list]):
raise ValueError(error_msg.format("mu_values", [len(shape(mu_values[0])), len(shape(mu_values_list[1]))]))
elif any([len(shape(s)) != 2 for s in sigma_values_list]):
raise ValueError(error_msg.format("sigma_values", [len(shape(sigma_values[0])), len(shape(sigma_values[1]))]))
elif len(shape(corr_values)) != 2:
raise ValueError(error_msg.format("corr_values", len(shape(corr_values))))
if len(true_values_coord_list) != 2:
raise ValueError("Only 2D GMM currently supported, got {} inputs in list for true coordinates".format(len(true_values_coord_list)))
if len(mu_values_list) != 2:
raise ValueError("Only 2D GMM currently supported, got {} inputs in list for mu values".format(len(true_values_coord_list)))
if len(sigma_values_list) != 2:
raise ValueError("Only 2D GMM currently supported, got {} inputs in list for sigma values".format(len(true_values_coord_list)))
mu_1 = mu_values_list[0]
mu_1 = tf.identity(mu_1, name=name + "_mu_1")
mu_2 = mu_values_list[1]
mu_2 = tf.identity(mu_2, name=name + "_mu_2")
corr_values = tf.identity(corr_values, name=name + "_corrs")
sigma_1 = sigma_values_list[0]
sigma_1 = tf.identity(sigma_1, name=name + "_sigma_1")
sigma_2 = sigma_values_list[1]
sigma_2 = tf.identity(sigma_2, name=name + "_sigma_2")
bernoulli_values = tf.identity(bernoulli_values, name=name + "_bernoullis")
coeff_values = tf.identity(coeff_values, name=name + "_coeffs")
true_0 = true_values_bernoulli
true_1 = true_values_coord_list[0]
true_2 = true_values_coord_list[1]
# don't be clever
buff = (1. - tf.square(corr_values)) + 1E-6
x_term = (true_1 - mu_1) / sigma_1
y_term = (true_2 - mu_2) / sigma_2
Z = tf.square(x_term) + tf.square(y_term) - 2. * corr_values * x_term * y_term
N = 1. / (2. * np.pi * sigma_1 * sigma_2 * tf.sqrt(buff)) * tf.exp(-Z / (2. * buff))
ep = true_0 * bernoulli_values + (1. - true_0) * (1. - bernoulli_values)
assert shape(ep)[-1] == 1
ep = ep[:, 0]
rp = tf.reduce_sum(coeff_values * N, axis=-1)
nll = -tf.log(rp + 1E-8) - tf.log(ep + 1E-8)
nll = tf.reshape(nll, (-1, batch_size))
return nll
'''
def BernoulliAndCorrelatedGMMCost(
bernoulli_values, coeff_values, mu_values_list, sigma_values_list,
corr_values, true_values_bernoulli, true_values_coord_list, name=None):
"""
Logit bernoulli combined with correlated gaussian mixture model negative log
likelihood compared to true_values.
This is typically paired with LogitBernoulliAndCorrelatedLogitGMM
Based on implementation from Junyoung Chung.
Parameters
----------
bernoulli_values : tensor, shape
The predicted values out of some layer, normally a sigmoid layer
coeff_values : tensor, shape
The predicted values out of some layer, normally a softmax layer
mu_values_list: tensor, shape
The predicted values out of some layer, normally a linear layer
sigma_values_list: tensor, shape
list of predicted values out of some layer, normally an exp or softplus layer
corr_values: tensor, shape
true_values_bernoulli : tensor, shape[:-1]
Ground truth values. Must be the same shape as mu_values.shape[:-1],
assumes the bernoulli true values are on the first entry ([:, :, 0])
true_values_coords_list :
Returns
-------
nll : tensor, shape predicted_values.shape[1:]
The cost per sample, or per sample per step if 3D
References
----------
[1] University of Utah Lectures
http://www.cs.utah.edu/~piyush/teaching/gmm.pdf
[2] Statlect.com
http://www.statlect.com/normal_distribution_maximum_likelihood.htm
"""
if name == None:
name = _get_name()
else:
name = name
error_msg = "Dimension of variable {} not supported, got {}. Must be 2"
if len(shape(true_values_bernoulli)) != 2:
raise ValueError(error_msg.format("true_values_bernoulli", len(shape(true_values_bernoulli))))
elif any([len(shape(tvc)) != 2 for tvc in true_values_coord_list]):
raise ValueError(error_msg.format("true_values_coord_list", [len(shape(true_values_coord_list[0])), len(shape(truce_values_coord_list[1]))]))
elif len(shape(bernoulli_values)) != 2:
raise ValueError(error_msg.format("bernoulli_values", len(shape(bernoulli_values))))
elif len(shape(coeff_values)) != 2:
raise ValueError(error_msg.format("coeff_values", len(shape(coeff_values))))
elif any([len(shape(m)) != 2 for m in mu_values_list]):
raise ValueError(error_msg.format("mu_values", [len(shape(mu_values[0])), len(shape(mu_values_list[1]))]))
elif any([len(shape(s)) != 2 for s in sigma_values_list]):
raise ValueError(error_msg.format("sigma_values", [len(shape(sigma_values[0])), len(shape(sigma_values[1]))]))
elif len(shape(corr_values)) != 2:
raise ValueError(error_msg.format("corr_values", len(shape(corr_values))))
mu_1 = mu_values_list[0]
mu_1 = tf.identity(mu_1, name=name + "_mu_1")
mu_2 = mu_values_list[1]
mu_2 = tf.identity(mu_2, name=name + "_mu_2")
corr_values = tf.identity(corr_values, name=name + "_corrs")
sigma_1 = sigma_values_list[0]
sigma_1 = tf.identity(sigma_1, name=name + "_sigma_1")
sigma_2 = sigma_values_list[1]
sigma_2 = tf.identity(sigma_2, name=name + "_sigma_2")
bernoulli_values = tf.identity(bernoulli_values, name=name + "_bernoullis")
coeff_values = tf.identity(coeff_values, name=name + "_coeffs")
true_0 = true_values_bernoulli
true_1 = true_values_coord_list[0]
true_2 = true_values_coord_list[1]
# don't be clever
buff = (1. - tf.square(corr_values)) + 1E-6
x_term = (true_1 - mu_1) / sigma_1
y_term = (true_2 - mu_2) / sigma_2
Z = tf.square(x_term) + tf.square(y_term) - 2. * corr_values * x_term * y_term
N = 1. / (2. * np.pi * sigma_1 * sigma_2 * tf.sqrt(buff)) * tf.exp(-Z / (2. * buff))
ep = true_0 * bernoulli_values + (1. - true_0) * (1. - bernoulli_values)
rp = tf.reduce_sum(coeff_values * N, axis=-1)
nll = -tf.log(rp + 1E-8) - tf.log(ep + 1E-8)
return nll
'''
| {"/train.py": ["/tfdllib.py"]} |
77,706 | kastnerkyle/deconstructionism | refs/heads/master | /extras.py | from __future__ import print_function
# Author: Kyle Kastner
# License: BSD 3-clause
# Thanks to Jose (@sotelo) for tons of guidance and debug help
# Credit also to Junyoung (@jych) and Shawn (@shawntan) for help/utility funcs
# Strangeness in init could be from onehots, via @igul222. Ty init for one hot layer as N(0, 1) just as in embedding
# since oh.dot(w) is basically an embedding
import os
import re
import tarfile
from collections import Counter, OrderedDict
from bs4 import BeautifulSoup as Soup
import sys
import pickle
import numpy as np
import fnmatch
from scipy import linalg
from functools import wraps
import exceptions
from pthbldr import pe
class base_iterator(object):
def __init__(self, list_of_containers, minibatch_size,
axis,
start_index=0,
stop_index=np.inf,
make_mask=False,
one_hot_class_size=None):
self.list_of_containers = list_of_containers
self.minibatch_size = minibatch_size
self.make_mask = make_mask
self.start_index = start_index
self.stop_index = stop_index
self.slice_start_ = start_index
self.axis = axis
if axis not in [0, 1]:
raise ValueError("Unknown sample_axis setting %i" % axis)
self.one_hot_class_size = one_hot_class_size
if one_hot_class_size is not None:
assert len(self.one_hot_class_size) == len(list_of_containers)
def reset(self):
self.slice_start_ = self.start_index
def __iter__(self):
return self
def next(self):
return self.__next__()
def __next__(self):
self.slice_end_ = self.slice_start_ + self.minibatch_size
if self.slice_end_ > self.stop_index:
# TODO: Think about boundary issues with weird shaped last mb
self.reset()
raise StopIteration("Stop index reached")
ind = slice(self.slice_start_, self.slice_end_)
self.slice_start_ = self.slice_end_
if self.make_mask is False:
res = self._slice_without_masks(ind)
if not all([self.minibatch_size in r.shape for r in res]):
# TODO: Check that things are even
self.reset()
raise StopIteration("Partial slice returned, end of iteration")
return res
else:
res = self._slice_with_masks(ind)
# TODO: Check that things are even
if not all([self.minibatch_size in r.shape for r in res]):
self.reset()
raise StopIteration("Partial slice returned, end of iteration")
return res
def _slice_without_masks(self, ind):
raise AttributeError("Subclass base_iterator and override this method")
def _slice_with_masks(self, ind):
raise AttributeError("Subclass base_iterator and override this method")
class list_iterator(base_iterator):
def _slice_without_masks(self, ind, return_shapes=False):
sliced_c = [np.asarray(c[ind]) for c in self.list_of_containers]
# object arrays
shapes = [[sci.shape for sci in sc] for sc in sliced_c]
if min([len(i) for i in sliced_c]) < self.minibatch_size:
self.reset()
raise StopIteration("Invalid length slice")
self.is_thin = [False for lc in self.list_of_containers]
for n in range(len(sliced_c)):
sc = sliced_c[n]
if self.one_hot_class_size is not None:
convert_it = self.one_hot_class_size[n]
if convert_it is not None:
raise ValueError("One hot conversion not implemented")
if not isinstance(sc, np.ndarray) or sc.dtype == np.object:
maxlen = max([len(i) for i in sc])
# Assume they at least have the same internal dtype
if len(sc[0].shape) > 1:
total_shape = (maxlen, sc[0].shape[1])
elif len(sc[0].shape) == 1:
total_shape = (maxlen, 1)
else:
raise ValueError("Unhandled array size in list")
if self.axis == 0:
raise ValueError("Unsupported axis of iteration")
new_sc = np.zeros((len(sc), total_shape[0],
total_shape[1]))
new_sc = new_sc.squeeze().astype(sc[0].dtype)
else:
new_sc = np.zeros((total_shape[0], len(sc),
total_shape[1]))
new_sc = new_sc.astype(sc[0].dtype)
for m, sc_i in enumerate(sc):
if len(sc_i.shape) == 1:
self.is_thin[n] = True
# if the array is 1D still broadcast fill
sc_i = sc_i[:, None]
new_sc[:len(sc_i), m, :] = sc_i
if self.is_thin[n]:
sliced_c[n] = new_sc[..., 0]
else:
sliced_c[n] = new_sc
if not return_shapes:
return sliced_c
else:
return sliced_c, shapes
def _slice_with_masks(self, ind):
cs, cs_shapes = self._slice_without_masks(ind, return_shapes=True)
if self.axis == 0:
ms = [np.zeros_like(c[:, 0]) for c in cs]
elif self.axis == 1:
ms = [np.zeros_like(c)
if self.is_thin[n] else np.zeros_like(c[:, :, 0])
for n, c in enumerate(cs)]
for ni, csi in enumerate(cs):
for ii in range(len(cs_shapes[ni])):
if cs_shapes[ni][ii][0] < 1:
raise AttributeError("Minibatch has invalid content size {}".format(cs_shapes[ni][ii][0]))
assert cs_shapes[ni][ii]
ms[ni][:cs_shapes[ni][ii][0], ii] = 1.
assert len(cs) == len(ms)
return [i for sublist in list(zip(cs, ms)) for i in sublist]
def dense_to_one_hot(labels_dense, num_classes=10):
"""Convert class labels from scalars to one-hot vectors."""
labels_shape = labels_dense.shape
labels_dense = labels_dense.reshape([-1])
num_labels = labels_dense.shape[0]
index_offset = np.arange(num_labels) * num_classes
labels_one_hot = np.zeros((num_labels, num_classes))
labels_one_hot.flat[index_offset + labels_dense.ravel()] = 1
labels_one_hot = labels_one_hot.reshape(labels_shape+(num_classes,))
return labels_one_hot
def tokenize_ind(phrase, vocabulary):
phrase = phrase + " "
vocabulary_size = len(vocabulary.keys())
phrase = [vocabulary[char_] for char_ in phrase]
phrase = np.array(phrase, dtype='int32').ravel()
phrase = dense_to_one_hot(phrase, vocabulary_size)
return phrase
# https://mrcoles.com/blog/3-decorator-examples-and-awesome-python/
def rsync_fetch(fetch_func, machine_to_fetch_from, *args, **kwargs):
"""
be sure not to call it as
rsync_fetch(fetch_func, machine_name)
not
rsync_fetch(fetch_func(), machine_name)
"""
try:
r = fetch_func(*args, **kwargs)
except Exception as e:
if isinstance(e, IOError):
full_path = e.filename
filedir = str(os.sep).join(full_path.split(os.sep)[:-1])
if not os.path.exists(filedir):
if filedir[-1] != "/":
fd = filedir + "/"
else:
fd = filedir
os.makedirs(fd)
if filedir[-1] != "/":
fd = filedir + "/"
else:
fd = filedir
if not os.path.exists(full_path):
sdir = str(machine_to_fetch_from) + ":" + fd
cmd = "rsync -avhp --progress %s %s" % (sdir, fd)
pe(cmd, shell=True)
else:
print("unknown error {}".format(e))
r = fetch_func(*args, **kwargs)
return r
def plot_lines_iamondb_example(X, title="", save_name=None):
import matplotlib.pyplot as plt
f, ax = plt.subplots()
x = np.cumsum(X[:, 1])
y = np.cumsum(X[:, 2])
size_x = x.max() - x.min()
size_y = y.max() - y.min()
f.set_size_inches(5 * size_x / size_y, 5)
cuts = np.where(X[:, 0] == 1)[0]
start = 0
for cut_value in cuts:
ax.plot(x[start:cut_value], y[start:cut_value],
'k-', linewidth=1.5)
start = cut_value + 1
ax.axis('equal')
ax.axes.get_xaxis().set_visible(False)
ax.axes.get_yaxis().set_visible(False)
ax.set_title(title)
if save_name is None:
plt.show()
else:
plt.savefig(save_name, bbox_inches='tight', pad_inches=0)
plt.close()
def implot(arr, title="", cmap="gray", save_name=None):
import matplotlib.pyplot as plt
f, ax = plt.subplots()
ax.matshow(arr, cmap=cmap)
plt.axis("off")
def autoaspect(x_range, y_range):
"""
The aspect to make a plot square with ax.set_aspect in Matplotlib
"""
mx = max(x_range, y_range)
mn = min(x_range, y_range)
if x_range <= y_range:
return mx / float(mn)
else:
return mn / float(mx)
x1 = arr.shape[0]
y1 = arr.shape[1]
asp = autoaspect(x1, y1)
ax.set_aspect(asp)
plt.title(title)
if save_name is None:
plt.show()
else:
plt.savefig(save_name)
def check_fetch_iamondb():
""" Check for IAMONDB data
This dataset cannot be downloaded automatically!
"""
#partial_path = get_dataset_dir("iamondb")
partial_path = os.sep + "Tmp" + os.sep + "kastner" + os.sep + "iamondb"
if not os.path.exists(partial_path):
os.makedirs(partial_path)
combined_data_path = os.path.join(partial_path, "original-xml-part.tar.gz")
untarred_data_path = os.path.join(partial_path, "original")
if not os.path.exists(combined_data_path):
files = "original-xml-part.tar.gz"
url = "http://www.iam.unibe.ch/fki/databases/"
url += "iam-on-line-handwriting-database/"
url += "download-the-iam-on-line-handwriting-database"
err = "Path %s does not exist!" % combined_data_path
err += " Download the %s files from %s" % (files, url)
err += " and place them in the directory %s" % partial_path
print("WARNING: {}".format(err))
return partial_path
"""
- all points:
>> [[x1, y1, e1], ..., [xn, yn, en]]
- indexed values
>> [h1, ... hn]
"""
def distance(p1, p2, axis=None):
return np.sqrt(np.sum(np.square(p1 - p2), axis=axis))
def clear_middle(pts):
to_remove = set()
for i in range(1, len(pts) - 1):
p1, p2, p3 = pts[i - 1: i + 2, :2]
dist = distance(p1, p2) + distance(p2, p3)
if dist > 1500:
to_remove.add(i)
npts = []
for i in range(len(pts)):
if i not in to_remove:
npts += [pts[i]]
return np.array(npts)
def separate(pts):
seps = []
for i in range(0, len(pts) - 1):
if distance(pts[i], pts[i+1]) > 600:
seps += [i + 1]
return [pts[b:e] for b, e in zip([0] + seps, seps + [len(pts)])]
def iamondb_extract(partial_path):
"""
Lightly modified from https://github.com/Grzego/handwriting-generation/blob/master/preprocess.py
"""
data = []
charset = set()
file_no = 0
pth = os.path.join(partial_path, "original")
for root, dirs, files in os.walk(pth):
for file in files:
file_name, extension = os.path.splitext(file)
if extension == '.xml':
file_no += 1
print('[{:5d}] File {} -- '.format(file_no, os.path.join(root, file)), end='')
xml = ElementTree.parse(os.path.join(root, file)).getroot()
transcription = xml.findall('Transcription')
if not transcription:
print('skipped')
continue
#texts = [html.unescape(s.get('text')) for s in transcription[0].findall('TextLine')]
texts = [HTMLParser.HTMLParser().unescape(s.get('text')) for s in transcription[0].findall('TextLine')]
points = [s.findall('Point') for s in xml.findall('StrokeSet')[0].findall('Stroke')]
strokes = []
mid_points = []
for ps in points:
pts = np.array([[int(p.get('x')), int(p.get('y')), 0] for p in ps])
pts[-1, 2] = 1
pts = clear_middle(pts)
if len(pts) == 0:
continue
seps = separate(pts)
for pss in seps:
if len(seps) > 1 and len(pss) == 1:
continue
pss[-1, 2] = 1
xmax, ymax = max(pss, key=lambda x: x[0])[0], max(pss, key=lambda x: x[1])[1]
xmin, ymin = min(pss, key=lambda x: x[0])[0], min(pss, key=lambda x: x[1])[1]
strokes += [pss]
mid_points += [[(xmax + xmin) / 2., (ymax + ymin) / 2.]]
distances = [-(abs(p1[0] - p2[0]) + abs(p1[1] - p2[1]))
for p1, p2 in zip(mid_points, mid_points[1:])]
splits = sorted(np.argsort(distances)[:len(texts) - 1] + 1)
lines = []
for b, e in zip([0] + splits, splits + [len(strokes)]):
lines += [[p for pts in strokes[b:e] for p in pts]]
print('lines = {:4d}; texts = {:4d}'.format(len(lines), len(texts)))
charset |= set(''.join(texts))
data += [(texts, lines)]
print('data = {}; charset = ({}) {}'.format(len(data), len(charset), ''.join(sorted(charset))))
translation = {'<NULL>': 0}
for c in ''.join(sorted(charset)):
translation[c] = len(translation)
def translate(txt):
return list(map(lambda x: translation[x], txt))
dataset = []
labels = []
for texts, lines in data:
for text, line in zip(texts, lines):
line = np.array(line, dtype=np.float32)
line[:, 0] = line[:, 0] - np.min(line[:, 0])
line[:, 1] = line[:, 1] - np.mean(line[:, 1])
dataset += [line]
labels += [translate(text)]
whole_data = np.concatenate(dataset, axis=0)
std_y = np.std(whole_data[:, 1])
norm_data = []
for line in dataset:
line[:, :2] /= std_y
norm_data += [line]
dataset = norm_data
print('datset = {}; labels = {}'.format(len(dataset), len(labels)))
save_path = os.path.join(partial_path, 'preprocessed_data')
try:
os.makedirs(save_path)
except FileExistsError:
pass
np.save(os.path.join(save_path, 'dataset'), np.array(dataset))
np.save(os.path.join(save_path, 'labels'), np.array(labels))
with open(os.path.join(save_path, 'translation.pkl'), 'wb') as file:
pickle.dump(translation, file)
print("Preprocessing finished and cached at {}".format(save_path))
def fetch_iamondb():
partial_path = check_fetch_iamondb()
combined_data_path = os.path.join(partial_path, "original-xml-part.tar.gz")
untarred_data_path = os.path.join(partial_path, "original")
if not os.path.exists(untarred_data_path):
print("Now untarring {}".format(combined_data_path))
tar = tarfile.open(combined_data_path, "r:gz")
tar.extractall(partial_path)
tar.close()
saved_dataset_path = os.path.join(partial_path, 'preprocessed_data')
if not os.path.exists(saved_dataset_path):
iamondb_extract(partial_path)
dataset_path = os.path.join(saved_dataset_path, "dataset.npy")
labels_path = os.path.join(saved_dataset_path, "labels.npy")
translation_path = os.path.join(saved_dataset_path, "translation.pkl")
dataset = np.load(dataset_path)
dataset = [np.array(d) for d in dataset]
temp = []
for d in dataset:
# dataset stores actual pen points, but we will train on differences between consecutive points
offs = d[1:, :2] - d[:-1, :2]
ends = d[1:, 2]
temp += [np.concatenate([[[0., 0., 1.]], np.concatenate([offs, ends[:, None]], axis=1)], axis=0)]
# because lines are of different length, we store them in python array (not numpy)
dataset = temp
labels = np.load(labels_path)
labels = [np.array(l) for l in labels]
with open(translation_path, 'rb') as f:
translation = pickle.load(f)
# be sure of consisten ordering
new_translation = OrderedDict()
for k in sorted(translation.keys()):
new_translation[k] = translation[k]
translation = new_translation
dataset_storage = {}
dataset_storage["data"] = dataset
dataset_storage["target"] = labels
inverse_translation = {v: k for k, v in translation.items()}
dataset_storage["target_phrases"] = ["".join([inverse_translation[ci] for ci in labels[i]]) for i in range(len(labels))]
dataset_storage["vocabulary_size"] = len(translation)
dataset_storage["vocabulary"] = sorted(translation.keys())
return dataset_storage
| {"/train.py": ["/tfdllib.py"]} |
77,707 | kastnerkyle/deconstructionism | refs/heads/master | /train.py | from __future__ import print_function
import os
import argparse
import numpy as np
import tensorflow as tf
from collections import namedtuple
from utils import next_experiment_path
from batch_generator import BatchGenerator
import logging
import shutil
from tfdllib import get_logger
from tfdllib import Linear
from tfdllib import LSTMCell
from tfdllib import GaussianAttentionCell
from tfdllib import BernoulliAndCorrelatedGMMCost
from tfdllib import scan
tf.set_random_seed(2899)
# TODO: add help info
parser = argparse.ArgumentParser()
parser.add_argument('--seq_len', dest='seq_len', default=256, type=int)
parser.add_argument('--batch_size', dest='batch_size', default=64, type=int)
parser.add_argument('--epochs', dest='epochs', default=8, type=int)
parser.add_argument('--window_mixtures', dest='window_mixtures', default=10, type=int)
parser.add_argument('--output_mixtures', dest='output_mixtures', default=20, type=int)
parser.add_argument('--lstm_layers', dest='lstm_layers', default=3, type=int)
parser.add_argument('--units_per_layer', dest='units', default=400, type=int)
parser.add_argument('--restore', dest='restore', default=None, type=str)
args = parser.parse_args()
epsilon = 1e-8
h_dim = args.units
forward_init = "truncated_normal"
rnn_init = "truncated_normal"
random_state = np.random.RandomState(1442)
output_mixtures = args.output_mixtures
window_mixtures = args.window_mixtures
num_units = args.units
def mixture(inputs, input_size, num_mixtures, bias, init="truncated_normal"):
forward_init = init
e = Linear([inputs], [input_size], 1, random_state=random_state,
init=forward_init, name="mdn_e")
pi = Linear([inputs], [input_size], num_mixtures, random_state=random_state,
init=forward_init, name="mdn_pi")
mu1 = Linear([inputs], [input_size], num_mixtures, random_state=random_state,
init=forward_init, name="mdn_mu1")
mu2 = Linear([inputs], [input_size], num_mixtures, random_state=random_state,
init=forward_init, name="mdn_mu2")
std1 = Linear([inputs], [input_size], num_mixtures, random_state=random_state,
init=forward_init, name="mdn_std1")
std2 = Linear([inputs], [input_size], num_mixtures, random_state=random_state,
init=forward_init, name="mdn_std2")
rho = Linear([inputs], [input_size], num_mixtures, random_state=random_state,
init=forward_init, name="mdn_rho")
return tf.nn.sigmoid(e), \
tf.nn.softmax(pi * (1. + bias), dim=-1), \
mu1, mu2, \
tf.exp(std1 - bias), tf.exp(std2 - bias), \
tf.nn.tanh(rho)
def create_graph(num_letters, batch_size,
num_units=400, lstm_layers=3,
window_mixtures=10, output_mixtures=20):
graph = tf.Graph()
with graph.as_default():
tf.set_random_seed(2899)
coordinates = tf.placeholder(tf.float32, shape=[None, batch_size, 3])
coordinates_mask = tf.placeholder(tf.float32, shape=[None, batch_size])
sequence = tf.placeholder(tf.float32, shape=[None, batch_size, num_letters])
sequence_mask = tf.placeholder(tf.float32, shape=[None, batch_size])
bias = tf.placeholder_with_default(tf.zeros(shape=[]), shape=[])
att_w_init = tf.placeholder(tf.float32, shape=[batch_size, num_letters])
att_k_init = tf.placeholder(tf.float32, shape=[batch_size, window_mixtures])
att_h_init = tf.placeholder(tf.float32, shape=[batch_size, num_units])
att_c_init = tf.placeholder(tf.float32, shape=[batch_size, num_units])
h1_init = tf.placeholder(tf.float32, shape=[batch_size, num_units])
c1_init = tf.placeholder(tf.float32, shape=[batch_size, num_units])
h2_init = tf.placeholder(tf.float32, shape=[batch_size, num_units])
c2_init = tf.placeholder(tf.float32, shape=[batch_size, num_units])
def create_model(generate=None):
in_coordinates = coordinates[:-1, :, :]
in_coordinates_mask = coordinates_mask[:-1]
out_coordinates = coordinates[1:, :, :]
#noise = tf.random_normal(tf.shape(out_coordinates), seed=random_state.randint(5000))
#noise_pwr = tf.sqrt(tf.reduce_sum(tf.square(out_coordinates[:, :, :-1]), axis=-1)) / 2.
#out_coordinates_part = noise_pwr[:, :, None] * noise[:, :, :-1] + out_coordinates[:, :, :-1]
#out_coordinates = tf.concat([out_coordinates_part, out_coordinates[:, :, -1][:, :, None]],
# axis=-1)
out_coordinates_mask = coordinates_mask[1:]
def step(inp_t, inp_mask_t,
att_w_tm1, att_k_tm1, att_h_tm1, att_c_tm1,
h1_tm1, c1_tm1, h2_tm1, c2_tm1):
o = GaussianAttentionCell([inp_t], [3],
(att_h_tm1, att_c_tm1),
att_k_tm1,
sequence,
num_letters,
num_units,
att_w_tm1,
input_mask=inp_mask_t,
conditioning_mask=sequence_mask,
attention_scale = 1. / 25.,
name="att",
random_state=random_state,
init=rnn_init)
att_w_t, att_k_t, att_phi_t, s = o
att_h_t = s[0]
att_c_t = s[1]
output, s = LSTMCell([inp_t, att_w_t, att_h_t],
[3, num_letters, num_units],
h1_tm1, c1_tm1, num_units,
input_mask=inp_mask_t,
random_state=random_state,
name="rnn1", init=rnn_init)
h1_t = s[0]
c1_t = s[1]
output, s = LSTMCell([inp_t, att_w_t, h1_t],
[3, num_letters, num_units],
h2_tm1, c2_tm1, num_units,
input_mask=inp_mask_t,
random_state=random_state,
name="rnn2", init=rnn_init)
h2_t = s[0]
c2_t = s[1]
return output, att_w_t, att_k_t, att_phi_t, att_h_t, att_c_t, h1_t, c1_t, h2_t, c2_t
r = scan(step,
[in_coordinates, in_coordinates_mask],
[None, att_w_init, att_k_init, None, att_h_init, att_c_init,
h1_init, c1_init, h2_init, c2_init])
output = r[0]
att_w = r[1]
att_k = r[2]
att_phi = r[3]
att_h = r[4]
att_c = r[5]
h1 = r[6]
c1 = r[7]
h2 = r[8]
c2 = r[9]
#output = tf.reshape(output, [-1, num_units])
mo = mixture(output, num_units, output_mixtures, bias)
e, pi, mu1, mu2, std1, std2, rho = mo
#coords = tf.reshape(out_coordinates, [-1, 3])
#xs, ys, es = tf.unstack(tf.expand_dims(coords, axis=2), axis=1)
xs = out_coordinates[..., 0][..., None]
ys = out_coordinates[..., 1][..., None]
es = out_coordinates[..., 2][..., None]
cc = BernoulliAndCorrelatedGMMCost(e, pi,
[mu1, mu2],
[std1, std2],
rho,
es,
[xs, ys],
name="cost")
# mask + reduce_mean, slightly unstable
#cc = in_coordinates_mask * cc
#loss = tf.reduce_mean(cc)
# mask + true weighted, better (flat) but also unstable
#loss = tf.reduce_sum(cc / (tf.reduce_sum(in_coordinates_mask)))
# no mask on loss - 0s become a form of biasing / noise?
loss = tf.reduce_mean(cc)
# save params for easier model loading and prediction
for param in [('coordinates', coordinates),
('in_coordinates', in_coordinates),
('out_coordinates', out_coordinates),
('coordinates_mask', coordinates_mask),
('in_coordinates_mask', in_coordinates_mask),
('out_coordinates_mask', out_coordinates_mask),
('sequence', sequence),
('sequence_mask', sequence_mask),
('bias', bias),
('e', e), ('pi', pi),
('mu1', mu1), ('mu2', mu2),
('std1', std1), ('std2', std2),
('rho', rho),
('att_w_init', att_w_init),
('att_k_init', att_k_init),
('att_h_init', att_h_init),
('att_c_init', att_c_init),
('h1_init', h1_init),
('c1_init', c1_init),
('h2_init', h2_init),
('c2_init', c2_init),
('att_w', att_w),
('att_k', att_k),
('att_phi', att_phi),
('att_h', att_h),
('att_c', att_c),
('h1', h1),
('c1', c1),
('h2', h2),
('c2', c2)]:
tf.add_to_collection(*param)
with tf.name_scope('training'):
steps = tf.Variable(0.)
learning_rate = tf.train.exponential_decay(0.001, steps, staircase=True,
decay_steps=10000, decay_rate=0.5)
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate, use_locking=True)
grad, var = zip(*optimizer.compute_gradients(loss))
grad, _ = tf.clip_by_global_norm(grad, 3.)
train_step = optimizer.apply_gradients(zip(grad, var), global_step=steps)
with tf.name_scope('summary'):
# TODO: add more summaries
summary = tf.summary.merge([
tf.summary.scalar('loss', loss)
])
things_names = ["coordinates",
"coordinates_mask",
"sequence",
"sequence_mask",
"att_w_init",
"att_k_init",
"att_h_init",
"att_c_init",
"h1_init",
"c1_init",
"h2_init",
"c2_init",
"att_w",
"att_k",
"att_phi",
"att_h",
"att_c",
"h1",
"c1",
"h2",
"c2",
"loss",
"train_step",
"learning_rate",
"summary"]
things_tf = [coordinates,
coordinates_mask,
sequence,
sequence_mask,
att_w_init,
att_k_init,
att_h_init,
att_c_init,
h1_init,
c1_init,
h2_init,
c2_init,
att_w,
att_k,
att_phi,
att_h,
att_c,
h1,
c1,
h2,
c2,
loss,
train_step,
learning_rate,
summary]
return namedtuple('Model', things_names)(*things_tf)
train_model = create_model(generate=None)
_ = create_model(generate=True) # just to create ops for generation
return graph, train_model
def make_mask(arr):
mask = np.ones_like(arr[:, :, 0])
last_step = arr.shape[0] * arr[0, :, 0]
for mbi in range(arr.shape[1]):
for step in range(arr.shape[0]):
if arr[step:, mbi].min() == 0. and arr[step:, mbi].max() == 0.:
last_step[mbi] = step
mask[step:, mbi] = 0.
break
return mask
def main():
restore_model = args.restore
seq_len = args.seq_len
batch_size = args.batch_size
num_epoch = args.epochs
batches_per_epoch = 1000
batch_generator = BatchGenerator(batch_size, seq_len, 2177)
g, vs = create_graph(batch_generator.num_letters, batch_size,
num_units=args.units, lstm_layers=args.lstm_layers,
window_mixtures=args.window_mixtures,
output_mixtures=args.output_mixtures)
with tf.Session(graph=g) as sess:
model_saver = tf.train.Saver(max_to_keep=2)
if restore_model:
model_file = tf.train.latest_checkpoint(os.path.join(restore_model, 'models'))
experiment_path = restore_model
epoch = int(model_file.split('-')[-1]) + 1
model_saver.restore(sess, model_file)
else:
sess.run(tf.global_variables_initializer())
experiment_path = next_experiment_path()
epoch = 0
logger = get_logger()
fh = logging.FileHandler(os.path.join(experiment_path, "experiment_run.log"))
fh.setLevel(logging.INFO)
logger.addHandler(fh)
logger.info(" ")
logger.info("Using experiment path {}".format(experiment_path))
logger.info(" ")
shutil.copy2(os.getcwd() + "/" + __file__, experiment_path)
shutil.copy2(os.getcwd() + "/" + "tfdllib.py", experiment_path)
for k, v in args.__dict__.items():
logger.info("argparse argument {} had value {}".format(k, v))
logger.info(" ")
logger.info("Model information")
for t_var in tf.trainable_variables():
logger.info(t_var)
logger.info(" ")
summary_writer = tf.summary.FileWriter(experiment_path, graph=g, flush_secs=10)
summary_writer.add_session_log(tf.SessionLog(status=tf.SessionLog.START),
global_step=epoch * batches_per_epoch)
logger.info(" ")
num_letters = batch_generator.num_letters
att_w_init_np = np.zeros((batch_size, num_letters))
att_k_init_np = np.zeros((batch_size, window_mixtures))
att_h_init_np = np.zeros((batch_size, num_units))
att_c_init_np = np.zeros((batch_size, num_units))
h1_init_np = np.zeros((batch_size, num_units))
c1_init_np = np.zeros((batch_size, num_units))
h2_init_np = np.zeros((batch_size, num_units))
c2_init_np = np.zeros((batch_size, num_units))
for e in range(epoch, num_epoch):
logger.info("Epoch {}".format(e))
for b in range(1, batches_per_epoch + 1):
coords, seq, reset, needed = batch_generator.next_batch2()
coords_mask = make_mask(coords)
seq_mask = make_mask(seq)
if needed:
att_w_init *= reset
att_k_init *= reset
att_h_init *= reset
att_c_init *= reset
h1_init *= reset
c1_init *= reset
h2_init *= reset
c2_init *= reset
feed = {vs.coordinates: coords,
vs.coordinates_mask: coords_mask,
vs.sequence: seq,
vs.sequence_mask: seq_mask,
vs.att_w_init: att_w_init_np,
vs.att_k_init: att_k_init_np,
vs.att_h_init: att_h_init_np,
vs.att_c_init: att_c_init_np,
vs.h1_init: h1_init_np,
vs.c1_init: c1_init_np,
vs.h2_init: h2_init_np,
vs.c2_init: c2_init_np}
outs = [vs.att_w, vs.att_k, vs.att_phi,
vs.att_h, vs.att_c,
vs.h1, vs.c1, vs.h2, vs.c2,
vs.loss, vs.summary, vs.train_step]
r = sess.run(outs, feed_dict=feed)
att_w_np = r[0]
att_k_np = r[1]
att_phi_np = r[2]
att_h_np = r[3]
att_c_np = r[5]
h1_np = r[5]
c1_np = r[6]
h2_np = r[7]
c2_np = r[8]
l = r[-3]
s = r[-2]
_ = r[-1]
# set next inits
att_w_init = att_w_np[-1]
att_k_init = att_k_np[-1]
att_h_init = att_h_np[-1]
att_c_init = att_c_np[-1]
h1_init = h1_np[-1]
c1_init = c1_np[-1]
h2_init = h2_np[-1]
c2_init = c2_np[-1]
summary_writer.add_summary(s, global_step=e * batches_per_epoch + b)
print('\r[{:5d}/{:5d}] loss = {}'.format(b, batches_per_epoch, l), end='')
logger.info("\n[{:5d}/{:5d}] loss = {}".format(b, batches_per_epoch, l))
logger.info(" ")
model_saver.save(sess, os.path.join(experiment_path, 'models', 'model'),
global_step=e)
if __name__ == '__main__':
main()
| {"/train.py": ["/tfdllib.py"]} |
77,708 | anilkumarpendela888/Mysite | refs/heads/master | /mysite/polls/forms.py | from django import forms
from polls.models import *
class QuestionForm(forms.ModelForm):
class Meta:
model = Question
fields = ['pub_date','question_text']
def clean_question_text(self):
que = self.cleaned_data['question_text']
l=len(que)
str = que.strip('')[l-1]
#import pdb;pdb.set_trace()
if str!='?':
raise forms.ValidationError("You must enter '?'")
return que
class NewForm(forms.Form):
your_name = forms.CharField(label="Your_name",max_length=100,required=False)
def clean_your_name(self):
n = self.cleaned_data['your_name']
l=len(n)
if l<=2:
raise forms.ValidationError("Your name is too short")
return n
class RegForm(forms.ModelForm):
class Meta:
model = RegModel
fields = ['user']
class AuthorForm(ModelForm):
class Meta:
model = Author
field
| {"/mysite/polls/views.py": ["/mysite/polls/models.py", "/mysite/polls/forms.py"]} |
77,709 | anilkumarpendela888/Mysite | refs/heads/master | /mysite/polls/urls.py | from django.conf.urls import url
from . import views
app_name = 'polls'
urlpatterns = [
url(r'^create/$',views.create,name='create'),
url(r'^$', views.index, name='index'),
url(r'^(?P<question_id>[0-9]+)/$', views.detail, name='detail'),
url(r'^(?P<question_id>[0-9]+)/results/$', views.results, name='results'),
url(r'^(?P<question_id>[0-9]+)/vote/$', views.vote, name='vote'),
url(r'^forms/',views.forms,name="forms"),
url(r'^registration/',views.registration,name="registration"),
] | {"/mysite/polls/views.py": ["/mysite/polls/models.py", "/mysite/polls/forms.py"]} |
77,710 | anilkumarpendela888/Mysite | refs/heads/master | /mysite/polls/models.py | from __future__ import unicode_literals
from django.db import models
from django.contrib.auth.models import User
class Question(models.Model):
question_text = models.CharField(max_length=200)
pub_date = models.DateTimeField('date published')
def __str__(self):
return self.question_text
class Choice(models.Model):
question = models.ForeignKey(Question, on_delete=models.CASCADE)
choice_text = models.CharField(max_length=200)
votes = models.IntegerField(default=0)
def __str__(self):
return self.choice_text
class RegModel(models.Model):
user = models.OneToOneField(User)
def __str__(self):
return self.user.username
TITLE_CHOICES = (
('MR','Mr.'),
('MRS','Mrs.'),
('MS','Ms.'),
)
class Author(models.Model):
name = models.CharField(max_length=100)
title = models.CharField(max_length=3,choices = TITLE_CHOICES)
birth_date = models.DateField(blanl=True,null=True)
def __str__(self):
return self.name | {"/mysite/polls/views.py": ["/mysite/polls/models.py", "/mysite/polls/forms.py"]} |
77,711 | anilkumarpendela888/Mysite | refs/heads/master | /mysite/polls/views.py | from __future__ import unicode_literals
from django.http import HttpResponse,HttpResponseRedirect
from django.http import Http404
from django.urls import reverse
from django.shortcuts import get_object_or_404
from django.shortcuts import render
from .models import Choice, Question,RegModel
from django import forms
from django.http import HttpResponseRedirect
from .forms import *
from django.contrib.auth import authenticate
def index(request):
latest_question_list = Question.objects.all()
context = {'latest_question_list': latest_question_list}
return render(request, 'polls/index.html', context)
def detail(request, question_id):
question = get_object_or_404(Question, pk=question_id)
return render(request, 'polls/detail.html', {'question': question})
def vote(request, question_id):
question = get_object_or_404(Question, pk=question_id)
try:
selected_choice = question.choice_set.get(pk=request.POST['choice'])
except (KeyError, Choice.DoesNotExist):
return render(request, 'polls/detail.html', {
'question': question,
'error_message': "You didn't select a choice.",
})
else:
selected_choice.votes += 1
selected_choice.save()
return HttpResponseRedirect(reverse('polls:results', args=(question.id,)))
def results(request, question_id):
question = get_object_or_404(Question, pk=question_id)
return render(request, 'polls/results.html', {'question': question})
def create(request):
if request.method == "POST":
form = QuestionForm(request.POST)
if form.is_valid():
form.save()
return HttpResponseRedirect('/polls/')
else:
form = QuestionForm()
return render(request, "polls/create.html", {'form': form})
def forms(request):
if request.method=="POST":
form = NewForm(request.POST)
if form.is_valid():
return HttpResponseRedirect('/polls/')
else:
form = NewForm()
return render(request,'polls/new_form.html',{'form':form})
def registration(request):
#import pdb;pdb.set_trace()
if request.method=="POST":
form = RegForm(request.POST)
if form.is_valid():
form.save()
return HttpResponseRedirect('/polls/')
else:
form = RegForm()
return render(request,"polls/regform.html",{'form':form})
| {"/mysite/polls/views.py": ["/mysite/polls/models.py", "/mysite/polls/forms.py"]} |
77,716 | wchill/WatchBot | refs/heads/master | /cytube_bot.py | import os
import asyncio
import collections
import discord
from discord.ext import commands
from utils import ask_for_int, parse_timestamp, escape_code_block, format_file_entry, format_dir_entry
import media_player
import file_explorer
class CytubeBot(object):
def __init__(self, bot, stream_url, rtmp_endpoint, media_directory, channel_whitelist):
self._bot = bot
self._stream_url = stream_url
self._rtmp_endpoint = rtmp_endpoint
self._channel_whitelist = channel_whitelist
self._file_explorer = file_explorer.FileExplorer(media_directory)
self._media_player = media_player.DiscordMediaPlayer(self._rtmp_endpoint)
self._last_ls_cache = (None, None)
# Start the media queue
self._media_queue = collections.deque()
asyncio.ensure_future(self._process_media_queue())
self._backup_queue = None
async def set_bot_presence(self, name=None):
bot_game = None
if name:
bot_game = discord.Game(name=name, url=self._stream_url, type=1)
await self._bot.change_presence(game=bot_game, status=None, afk=False)
async def on_ready(self):
print('Logged in as {}'.format(self._bot.user.name))
print('--------------')
async def _start_stream(self, relative_path: str):
await self._bot.say('Selected file: `{}`.'.format(escape_code_block(os.path.basename(relative_path))))
absolute_path = self._file_explorer.get_complete_path(relative_path)
audio_tracks, subtitle_tracks = self._media_player.get_human_readable_track_info(absolute_path)
audio_track = 1
subtitle_track = 1 if len(subtitle_tracks) > 0 else None
# Ask user to select audio track if multiple present
if len(audio_tracks) > 1:
ask_str = 'Please select an audio track:\n```{}```'.format(escape_code_block('\n'.join(audio_tracks)))
audio_track = await ask_for_int(self._bot, ask_str, lower_bound=1,
upper_bound=len(audio_tracks) + 1, default=1)
# Ask user to select subtitle track if multiple present
if len(subtitle_tracks) > 1:
ask_str = 'Please select a subtitle track:\n```{}```'.format(escape_code_block('\n'.join(subtitle_tracks)))
subtitle_track = await ask_for_int(self._bot, ask_str, lower_bound=1,
upper_bound=len(subtitle_tracks) + 1, default=1)
await self._bot.say('Added to queue (#{}).'.format(len(self._media_queue) + 1))
self._media_queue.append(
media_player.Video(absolute_path, audio_track=audio_track, subtitle_track=subtitle_track))
async def _process_media_queue(self):
while True:
video = None
while video is None:
try:
video = self._media_queue.popleft()
except IndexError:
await asyncio.sleep(1)
await self.set_bot_presence(video.name)
await self._media_player.play_video(video)
await self.set_bot_presence()
@commands.group(name='stream', pass_context=True, no_pm=True)
async def stream(self, ctx):
if ctx.invoked_subcommand is None:
await self._bot.say('Invalid stream command passed.')
@stream.command(name='play', no_pm=True)
async def start_stream(self, *, file: str):
try:
num = int(file)
_, files = self._last_ls_cache
if files is None:
_, files = self.get_sorted_files_and_dirs()
if num < 1 or num > len(files):
await self._bot.say('Invalid option.')
return
file = files[num - 1].name
except ValueError:
pass
if not self._file_explorer.file_exists(file):
await self._bot.say('File does not exist.')
return
await self._start_stream(file)
@stream.command(name='skip', no_pm=True)
async def skip_stream(self):
if not self._media_player.is_video_playing():
await self._bot.say('Stream not currently playing.')
return
await self._bot.say('Skipping current video.')
await self._media_player.stop_video()
@stream.command(name='pause', no_pm=True)
async def pause_stream(self):
if not self._media_player.is_video_playing():
await self._bot.say('Stream not currently playing.')
return
self._backup_queue = collections.deque()
self._backup_queue.extend(self._media_queue)
self._media_queue.clear()
video = self._media_player.get_current_video()
video.seek_time, _ = self._media_player.get_video_time()
self._backup_queue.appendleft(video)
await self._media_player.stop_video()
await self.set_bot_presence()
await self._bot.say('Stream paused at {}.'.format(self._media_player.convert_secs_to_str(video.seek_time)))
@stream.command(name='resume', no_pm=True)
async def resume_stream(self):
if self._backup_queue is None:
await self._bot.say('Stream not currently paused.')
return
self._media_queue.extend(self._backup_queue)
self._backup_queue = None
await self._bot.say('Resuming stream.')
@stream.command(name='stop', no_pm=True)
async def stop_stream(self):
if not self._media_player.is_video_playing():
await self._bot.say('Stream not currently playing.')
return
self._media_queue.clear()
_, current_time, _ = await self._media_player.stop_video()
await self.set_bot_presence()
if current_time:
await self._bot.say('Stream stopped at {}.'.format(self._media_player.convert_secs_to_str(current_time)))
else:
await self._bot.say('Stream stopped.')
async def _seek_stream(self, time):
if not self._media_player.is_video_playing():
await self._bot.say('Stream not currently playing.')
return
await self._bot.say('Restarting stream at {}.'.format(self._media_player.convert_secs_to_str(time)))
video = self._media_player.get_current_video()
video.seek_time = time
self._media_queue.appendleft(video)
await self._media_player.stop_video()
@stream.command(name='seek', no_pm=True)
async def seek_stream(self, timestamp: str):
time = parse_timestamp(timestamp)
if time:
await self._seek_stream(time)
else:
await self._bot.say('Invalid parameter.')
@stream.command(name='ff', no_pm=True)
async def ff_stream(self, length: str):
time = parse_timestamp(length)
if time:
current, _ = self._media_player.get_video_time()
await self._seek_stream(current + time)
else:
await self._bot.say('Invalid parameter.')
@stream.command(name='rew', no_pm=True)
async def rew_stream(self, length: str):
time = parse_timestamp(length)
if time:
current, _ = self._media_player.get_video_time()
if current + time < 0:
current = time
await self._seek_stream(current - time)
else:
await self._bot.say('Invalid parameter.')
@commands.command(name='ls', no_pm=True)
async def list_current_dir(self):
output_str = ('```diff\n'
'=== Contents of {path} ===\n'
'```{dirs}{files}')
dirs, files = self.get_sorted_files_and_dirs()
dir_str = '\n'.join([format_dir_entry(i + 1, len(dirs), dir) for i, dir in enumerate(dirs)])
if len(dir_str) > 0:
dir_str = '```c\n' + dir_str + '```'
files = self._file_explorer.get_files_in_current_dir(extensions=['.mkv', '.mp4', '.avi'])
files.sort(key=lambda x: x.name)
file_str = '\n'.join([format_file_entry(i + 1, len(files), entry) for i, entry in enumerate(files)])
if len(file_str) > 0:
file_str = '```c\n' + file_str + '```'
await self._bot.say(output_str.format(
path=self._file_explorer.get_current_path(),
dirs=dir_str,
files=file_str
))
self._last_ls_cache = (dirs, files)
def get_sorted_files_and_dirs(self):
dirs = self._file_explorer.get_dirs_in_current_dir()
dirs.sort(key=lambda x: x.name)
files = self._file_explorer.get_files_in_current_dir(extensions=['.mkv', '.mp4', '.avi'])
files.sort(key=lambda x: x.name)
self._last_ls_cache = (dirs, files)
return self._last_ls_cache
async def _change_directory(self, path: str):
if path[0] == '/':
path = self._file_explorer.build_absolute_path(path[1:])
res = self._file_explorer.change_directory(path, relative=False)
else:
res = self._file_explorer.change_directory(path)
self._last_ls_cache = (None, None)
if res:
send_str = 'Changed directory to `{}`'.format(escape_code_block(self._file_explorer.get_current_path()))
else:
send_str = 'Failed to change directory.'
await self._bot.say(send_str)
@commands.command(name='cd', no_pm=True)
async def change_directory(self, path: str):
await self._change_directory(path)
@commands.command(name='ezcd', no_pm=True)
async def change_directory_ez(self, num: int):
dirs, _ = self._last_ls_cache
if dirs is None:
dirs, _ = self.get_sorted_files_and_dirs()
if num < 1 or num > len(dirs):
await self._bot.say('Invalid option.')
return
await self._change_directory(dirs[num - 1].name)
| {"/cytube_bot.py": ["/utils.py", "/media_player.py", "/file_explorer.py"], "/app.py": ["/cytube_bot.py"]} |
77,717 | wchill/WatchBot | refs/heads/master | /media_player.py | import asyncio
import os
import re
import ffmpy3
from pymediainfo import MediaInfo
import ruamel.yaml
CONFIG_FILE = 'config.yaml'
with open(CONFIG_FILE, 'r') as f:
settings = ruamel.yaml.load(f.read(), ruamel.yaml.RoundTripLoader)
FONT_FILE = settings['ffmpeg']['font_file']
class Video(object):
def __init__(self, absolute_path, name=None, seek_time=0.0, audio_track=1, subtitle_track=None):
self.filename = os.path.basename(absolute_path)
self.name = name if name else os.path.splitext(self.filename)[0]
self.absolute_path = absolute_path
self.seek_time = seek_time
self.audio_track = audio_track
self.subtitle_track = subtitle_track
class DiscordMediaPlayer(object):
TOTAL_DURATION_REGEX = re.compile(r'Duration: (?P<hrs>[\d]+):(?P<mins>[\d]+):(?P<secs>[\d]+)\.(?P<ms>[\d]+)')
CURRENT_PROGRESS_REGEX = re.compile(r'time=(?P<hrs>[\d]+):(?P<mins>[\d]+):(?P<secs>[\d]+)\.(?P<ms>[\d]+)')
def __init__(self, stream_url):
self._stream_url = stream_url
self._ffmpeg_process = None
self._offset_time = 0
self._total_duration = None
self._current_video = None
@staticmethod
def get_human_readable_track_info(file_path):
mi = MediaInfo.parse(file_path)
audio_tracks, subtitle_tracks = [], []
for track in mi.tracks:
if track.track_type == 'Audio':
audio_tracks.append(
'{num}) {name} ({lang}, {codec} - {channels})'.format(
num=int(track.stream_identifier or '0') + 1,
name=track.title or 'Untitled',
lang=(track.other_language or ['Unknown language'])[0],
codec=track.format or 'Unknown codec',
channels=(str(track.channel_s) or 'Unknown') + ' channels'
)
)
elif track.track_type == 'Text':
subtitle_tracks.append(
'{num}) {name} ({lang})'.format(
num=int(track.stream_identifier or '0') + 1,
name=track.title or 'Untitled',
lang=(track.other_language or ['Unknown language'])[0]
)
)
return audio_tracks, subtitle_tracks
@staticmethod
def convert_to_secs(hrs, mins, secs, ms):
return int(hrs) * 3600 + int(mins) * 60 + int(secs) + int(ms) * 0.01
@staticmethod
def convert_secs_to_str(secs):
hrs, secs = int(secs // 3600), secs % 3600
mins, secs = int(secs // 60), secs % 60
if hrs > 0:
return '{}:{:02d}:{:05.2f}'.format(hrs, mins, secs)
else:
return '{}:{:05.2f}'.format(mins, secs)
def is_video_playing(self):
return self._ffmpeg_process and self._ffmpeg_process.process.returncode is None
def get_video_time(self):
return self._current_video.seek_time + self._offset_time, self._total_duration
def get_current_video(self):
return self._current_video
async def stop_video(self):
if self.is_video_playing():
try:
print('Stopping FFmpeg')
self._ffmpeg_process.process.terminate()
await self._ffmpeg_process.process.wait()
except ffmpy3.FFRuntimeError:
pass
if not self._ffmpeg_process or not self._ffmpeg_process.process:
exitcode = None
else:
exitcode = self._ffmpeg_process.process.returncode
current, total = self.get_video_time()
return exitcode, current, total
async def play_video(self, video):
if not os.path.exists(video.absolute_path):
raise FileNotFoundError('File not found: {}'.format(video.filename))
self._current_video = video
output_params = [
# Select the first video track (if there are multiple)
'-map', '0:v:0',
# Select the specified audio track (if there are multiple) - note that it's 0 indexed
'-map', '0:a:{}'.format(video.audio_track - 1)
]
# Build filtergraph
# First filter: change frame timestamps so that they are correct when starting at seek_time
vf_str = 'setpts=PTS+{}/TB,'.format(video.seek_time)
# Second filter: render embedded subtitle track from the media file
# Note that subtitles rely on the above timestamps and that tracks are 0 indexed
if video.subtitle_track:
vf_str += 'subtitles=\'{}\':si={},'.format(video.absolute_path, video.subtitle_track - 1)
# Third filter: Draw timestamp for current frame in the video to make seeking easier
# TODO: make these parameters more configurable
vf_str += 'drawtext=\'fontfile={}: fontcolor=white: x=0: y=h-line_h-5: fontsize=24: boxcolor=black@0.5: box=1: text=%{{pts\\:hms}}\','.format(FONT_FILE)
vf_str += 'setpts=PTS-STARTPTS'
# TODO: make these more configurable
output_params += [
# Filtergraph options from above
'-vf', vf_str,
# Use the following encoding settings:
# Encode using x264 veryfast preset (decent performance/quality for realtime streaming)
'-vcodec', 'libx264',
'-preset', 'veryfast',
# Specify max bitrate of 4.5Mbps with buffer size of 1.125Mbps (0.25 sec buffer for faster stream startup)
'-maxrate', '4500k',
'-bufsize', '1125k',
# Use YUV color space, 4:2:0 chroma subsampling, 8-bit render depth
'-pix_fmt', 'yuv420p',
# Set keyframe interval to 24
# (RTMP clients need to wait for the next keyframe, so this is a 1 second startup time)
'-g', '24',
# Use AAC-LC audio codec, 128Kbps stereo at 44.1KHz sampling rate
'-c:a', 'libfdk_aac',
'-ab', '128k',
'-ac', '2',
'-ar', '44100',
# Some more options to reduce startup time
'-probesize', '32',
'-analyzeduration', '500000',
'-flush_packets', '1',
# Output format is FLV
'-f', 'flv'
]
self._ffmpeg_process = ffmpy3.FFmpeg(
global_options=[
# Tell ffmpeg to start encoding from seek_time seconds into the video
'-ss', str(video.seek_time),
# Read input file at the frame rate it's encoded at (crucial for live streams and synchronization)
'-re',
],
inputs={video.absolute_path: None},
outputs={self._stream_url: output_params},
)
print('Starting FFmpeg')
print(self._ffmpeg_process.cmd)
# Start FFmpeg, redirect stderr so we can keep track of encoding progress
self._ffmpeg_process.run_async(stderr=asyncio.subprocess.PIPE)
# Buffer for incomplete line output
line_buf = bytearray()
my_stderr = self._ffmpeg_process.process.stderr
while True:
# Read some FFmpeg output (128 bytes is about 1 line worth)
in_buf = await my_stderr.read(128)
# Break if EOF
if not in_buf:
break
# FFmpeg encoding progress is displayed on the same line using CR, so replace with LF if present
in_buf = in_buf.replace(b'\r', b'\n')
# Append to the buffer
line_buf.extend(in_buf)
# Process each line present in the buffer
while b'\n' in line_buf:
line, _, line_buf = line_buf.partition(b'\n')
line = str(line)
# print(line)
if self._total_duration is None:
# Get total video duration
match = self.TOTAL_DURATION_REGEX.search(line)
if match:
self._total_duration = self.convert_to_secs(**match.groupdict())
else:
# Get current video playback duration
match = self.CURRENT_PROGRESS_REGEX.search(line)
if match:
self._offset_time = self.convert_to_secs(**match.groupdict())
# At this point, FFmpeg will already have stopped without us having to wait explicitly on it
# because it will close stderr when it is complete (breaking the loop)
print('FFmpeg finished')
return self._ffmpeg_process.process.returncode
| {"/cytube_bot.py": ["/utils.py", "/media_player.py", "/file_explorer.py"], "/app.py": ["/cytube_bot.py"]} |
77,718 | wchill/WatchBot | refs/heads/master | /app.py | import ruamel.yaml
from cytube_bot import CytubeBot
from discord.ext import commands
CONFIG_FILE = 'config.yaml'
with open(CONFIG_FILE, 'r') as f:
settings = ruamel.yaml.load(f.read(), ruamel.yaml.RoundTripLoader)
DISCORD_CLIENT_KEY = settings['login']['discord_client_key']
STREAM_URL = settings['stream']['stream_url']
RTMP_ENDPOINT = settings['stream']['rtmp_endpoint']
MEDIA_DIRECTORY = settings['stream']['media_directory']
CHANNEL_WHITELIST = settings['channels']['whitelist']
bot = commands.Bot(command_prefix=commands.when_mentioned_or('!'), description='A bot that plays videos on CyTube')
bot.add_cog(CytubeBot(bot, STREAM_URL, RTMP_ENDPOINT, MEDIA_DIRECTORY, CHANNEL_WHITELIST))
bot.run(DISCORD_CLIENT_KEY) | {"/cytube_bot.py": ["/utils.py", "/media_player.py", "/file_explorer.py"], "/app.py": ["/cytube_bot.py"]} |
77,719 | wchill/WatchBot | refs/heads/master | /utils.py | import re
import humanize
from io import StringIO
async def ask_for_int(bot, message, lower_bound=None, upper_bound=None, timeout=30, timeout_msg=None, default=None):
def check(msg):
s = msg.content
if not s.isdigit():
return False
n = int(s)
if lower_bound is not None and lower_bound > n:
return False
if upper_bound is not None and upper_bound < n:
return False
return True
await bot.say(message)
message = await bot.wait_for_message(timeout=timeout, check=check)
if message is None:
if not timeout_msg:
timeout_msg = 'No response received within 30 seconds. Using default value.'
await bot.say(timeout_msg)
return default
return int(message.content)
def escape_msg(msg):
return re.sub(r'(?P<c>[`*_\[\]~])', r'\\\g<c>', msg)
def escape_code_block(msg):
return re.sub(r'(?P<a>`)(?P<b>`)(?P<c>`)', r'\\\g<a>\\\g<b>\\\g<c>', msg)
def parse_timestamp(time_str):
match = re.search(r'(?:(\d+):)?(?:(\d+):)?(?:(\d+)(?:\.(\d+))?)', time_str)
if match:
hrs, mins, secs, ms = match.group(1, 2, 3, 4)
if hrs and mins is None:
mins = hrs
hrs = None
hrs = int(hrs) if hrs else 0
mins = int(mins) if mins else 0
secs = int(secs)
ms = int(ms) if ms else 0
time = 3600 * hrs + 60 * mins + secs + 0.01 * ms
return time
return None
def format_dir_entry(num, max_num, entry):
return '{pad}{num}) {name}'.format(num=num, pad=(len(str(max_num)) - len(str(num))) * ' ',
name=escape_code_block(entry.name))
def format_file_entry(num, max_num, entry):
MAX_WIDTH = 78
output = StringIO()
entry_str = '{pad}{num}) '.format(num=num, pad=(len(str(max_num)) - len(str(num))) * ' ')
current_width = len(entry_str)
output.write(entry_str)
escaped_name = escape_code_block(entry.name)
output.write(escaped_name)
current_width += len(entry.name)
size_str = humanize.naturalsize(entry.stat().st_size)
if MAX_WIDTH - current_width <= len(size_str):
output.write('\n')
current_width = 0
output.write(' ' * (MAX_WIDTH - current_width - len(size_str)))
output.write(size_str)
return output.getvalue()
| {"/cytube_bot.py": ["/utils.py", "/media_player.py", "/file_explorer.py"], "/app.py": ["/cytube_bot.py"]} |
77,720 | wchill/WatchBot | refs/heads/master | /file_explorer.py | import os
PROJECT_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
class PseudoDirEntry:
def __init__(self, name, scandir_path):
self.name = name
self._scandir_path = scandir_path
self.path = os.path.join(scandir_path, name)
self._stat = dict()
self._is_symlink = None
self._is_file = dict()
self._is_dir = dict()
def inode(self):
if False not in self._stat:
self._stat[False] = self.stat(follow_symlinks=False)
return self._stat[False].st_ino
def is_dir(self, *, follow_symlinks=True):
if follow_symlinks not in self._is_dir:
self._is_dir[follow_symlinks] = os.path.isdir(self.path) and (follow_symlinks or not self.is_symlink)
return self._is_file[follow_symlinks]
def is_file(self, *, follow_symlinks=True):
if follow_symlinks not in self._is_file:
self._is_file[follow_symlinks] = os.path.isfile(self.path) and (follow_symlinks or not self.is_symlink)
return self._is_file[follow_symlinks]
def is_symlink(self):
if self._is_symlink is None:
self._is_symlink = os.path.islink(self.path)
return self._is_symlink
def stat(self, *, follow_symlinks=True):
if follow_symlinks not in self._stat:
self._stat[follow_symlinks] = os.stat(self.path, follow_symlinks=follow_symlinks)
return self._stat[follow_symlinks]
class FileExplorer(object):
def __init__(self, root_path=None):
self._root_path = os.path.realpath(root_path) if root_path else PROJECT_ROOT_DIR
self._current_path = self._root_path
def is_safe_path(self, path, follow_symlinks=True):
# resolves symbolic links
if follow_symlinks:
return os.path.realpath(path).startswith(os.path.realpath(self._root_path))
return os.path.abspath(path).startswith(self._root_path)
def get_root_path(self):
return self._root_path
def get_current_path(self, relative=True):
if relative:
my_path = os.path.relpath(self._current_path, self._root_path)
return '/' + my_path if my_path != '.' else '/'
return self._current_path
def build_absolute_path(self, offset_abs_path):
return os.path.join(self._root_path, offset_abs_path)
def get_files_in_current_dir(self, hidden=False, extensions=None):
files = []
for entry in os.scandir(self._current_path):
if self.is_safe_path(entry.path) and entry.is_file() and (hidden or entry.name[0] != '.'):
if extensions is None or os.path.splitext(entry.name)[1] in extensions:
files.append(entry)
return files
def get_dirs_in_current_dir(self, hidden=False):
dirs = []
for entry in os.scandir(self._current_path):
if self.is_safe_path(entry.path) and entry.is_dir() and (hidden or entry.name[0] != '.'):
dirs.append(entry)
if self.is_safe_path(self.get_complete_path('..')):
dirs.append(PseudoDirEntry('..', self._current_path))
return dirs
def change_directory(self, path, relative=True):
if relative:
new_absolute_path = os.path.normpath(os.path.join(self._current_path, path))
else:
new_absolute_path = path
if self.is_safe_path(new_absolute_path) and os.path.exists(new_absolute_path):
self._current_path = new_absolute_path
return True
return False
def change_to_root_dir(self):
return self.change_directory(self._root_path, relative=False)
def get_complete_path(self, relative_path):
complete_path = os.path.join(self._current_path, relative_path)
return complete_path
def file_exists(self, path, relative=True):
if relative:
new_absolute_path = os.path.join(self._current_path, path)
else:
new_absolute_path = path
return self.is_safe_path(new_absolute_path) and os.path.exists(new_absolute_path) and os.path.isfile(new_absolute_path)
@staticmethod
def filter_filenames_by_ext(filenames, extensions):
filtered_filenames = [f for f in filenames if os.path.splitext(f)[1] in extensions]
return filtered_filenames
| {"/cytube_bot.py": ["/utils.py", "/media_player.py", "/file_explorer.py"], "/app.py": ["/cytube_bot.py"]} |
77,725 | ChNajib/livre_dor | refs/heads/master | /home/sib_sdk.py | # Include the SendinBlue library\
from __future__ import print_function
import time
import sib_api_v3_sdk
from sib_api_v3_sdk.rest import ApiException
from pprint import pprint
configuration = sib_api_v3_sdk.Configuration()
configuration.api_key['api-key'] = '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'
# sib_api_v3_sdk.configuration.api_key_prefix['api-key'] = 'Bearer'
api_instance = sib_api_v3_sdk.EmailCampaignsApi()
# Define the campaign settings\
email_campaigns = sib_api_v3_sdk.CreateEmailCampaign(
name= "Livre d'Or",
subject= "",
sender= { "name": "From name", "email": "paulo.najib@gmail.com"},
type= "classic",
# Content that will be sent\
html_content= "Congratulations! You successfully sent this example campaign via the SendinBlue API.",
# Select the recipients\
recipients= {"listIds": [2, 7]},
# Schedule the sending in one hour\
scheduled_at= "2018-01-01 00:00:01"
)
# Make the call to the client\
try:
api_response = api_instance.create_email_campaign(email_campaigns)
print(api_response)
except ApiException as e:
print("Exception when calling EmailCampaignsApi->create_email_campaign: %s\n" % e)
| {"/home/views.py": ["/home/models.py"]} |
77,726 | ChNajib/livre_dor | refs/heads/master | /home/views.py | import datetime
from django.contrib.auth import authenticate,login,logout
from home.forms import MessageForm, UserForm
from .models import Message
from django.shortcuts import render, get_object_or_404
import sib_sdk
import sib_api_v3_sdk
import sib_api_v3_sdk.models.send_template_email
def send_to(email,template_id):
send_email = sib_api_v3_sdk.SendEmail([email])
configuration = sib_api_v3_sdk.Configuration()
api_instance = sib_api_v3_sdk.SMTPApi(sib_api_v3_sdk.ApiClient(configuration))
api_instance.send_template(template_id, send_email)
########## REDIRECTING TO HOME PAGE ##########
def index(request):
if not request.user.is_authenticated():
return render(request, 'home/login_user.html')
else:
all_messages = Message.objects.all()
is_staff = False
if request.user.is_staff :
is_staff = True
context = {'all_messages' : all_messages,
'current_user':request.user,
'is_staff':is_staff}
return render(request,'home/index.html',context)
########## CREATING NEW MESSAGES ##########
def create_message(request):
if not request.user.is_authenticated():
return render(request, 'home/login_user.html')
else:
form = MessageForm(request.POST or None)
is_staff = False
if request.user.is_staff:
is_staff = True
if form.is_valid():
message = form.save(commit=False)
message.user = request.user
message.date = datetime.datetime.now().strftime('%d %b %H:%M')
message.save()
send_to(message.user.email,2)
return render(request, 'home/index.html', {'all_messages': Message.objects.all(),'current_user': request.user,'is_staff':is_staff})
return render(request, 'home/message_form.html', {'form': form})
########## DELETING EXISTING MESSAGES ##########
def delete_message(request, message_id):
msg = Message.objects.get(pk=message_id)
user_messages = Message.objects.filter(user=request.user)
all_messages = Message.objects.all()
users_email = request.user.email
is_staff = False
if request.user.is_staff:
is_staff = True
if msg.user == request.user or request.user.is_staff:
msg.delete()
send_to(users_email,5)
context ={'all_messages': all_messages,
'current_user': request.user,
'user_messages' : user_messages,
'is_staff':is_staff}
return render(request, 'home/index.html', context)
##################################################
# def update_message(request, message_id,title,content):
# msg = Message.objects.get(pk=message_id)
# user_messages = Message.objects.filter(user=request.user)
# all_messages = Message.objects.all()
# if msg.user == request.user :
# if(title != None and content!= None):
# msg.title = title
# msg.content = content
# # return render(request, 'home/index.html',{'all_messages':all_messages})
# context ={'all_messages': all_messages,
# 'current_user': request.user,
# 'user_messages' : user_messages}
# return render(request, 'home/index.html', context)
########## USER LOGIN ##########
def login_user(request):
if request.method == "POST":
username = request.POST['username']
password = request.POST['password']
user = authenticate(username=username, password=password)
is_staff = False
if request.user.is_staff :
is_staff = True
if user is not None:
if user.is_active:
login(request, user)
# messages = Message.objects.filter(user=request.user)
return render(request, 'home/index.html' , {'all_messages' : Message.objects.all(),'current_user': request.user,'is_staff':is_staff})
else:
return render(request, 'home/login_user.html', {'error_message': 'Your account has been disabled'})
else:
return render(request, 'home/login_user.html', {'error_message': 'Invalid login'})
return render(request, 'home/login_user.html')
########## CREATING NEW ACCOUNT ##########
def register(request):
form = UserForm(request.POST or None)
if form.is_valid():
user = form.save(commit=False)
username = form.cleaned_data['username']
password = form.cleaned_data['password']
user.set_password(password)
user.save()
user = authenticate(username=username, password=password)
if user is not None:
if user.is_active:
login(request, user)
messages = Message.objects.all()
return render(request, 'home/index.html', {'all_messages': messages,'current_user': request.user})
context = {
"form": form,
"user_is_in":request.user.is_authenticated(),
}
return render(request, 'home/register.html', context)
###########################################
########## USER LOGOUT ##########
def logout_user(request):
logout(request)
form = UserForm(request.POST or None)
context = {
"form": form,
}
return render(request, 'home/login_user.html', context)
| {"/home/views.py": ["/home/models.py"]} |
77,727 | ChNajib/livre_dor | refs/heads/master | /home/urls.py | from django.conf.urls import url, include
from django.views.generic import RedirectView
from . import views
urlpatterns = [
url(r'^$', views.index,name='index'),
# url(r'^(?P<message_id>[0-9]+)/$', views.detail,name='detail'),
url(r'^create_message/$', views.create_message,name='create_message'),
url(r'^(?P<message_id>[0-9]+)/delete_message/$', views.delete_message, name='delete_message'),
#url(r'^(?P<message_id>[0-9]+)/update_message/$', views.update_message, name='update_message'),
url(r'^register/$', views.register, name='register'),
url(r'^login_user/$', views.login_user, name='login_user'),
url(r'^logout_user/$', views.logout_user, name='logout_user'),
url(r'^.*$', RedirectView.as_view(url='home', permanent=False), name='index')
#url(r'^', include('home.urls', namespace='home')),
]
| {"/home/views.py": ["/home/models.py"]} |
77,728 | ChNajib/livre_dor | refs/heads/master | /home/models.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
from django.contrib.auth.models import Permission, User
from django.db import models
class Message(models.Model):
user = models.ForeignKey(User, default=1)
title = models.CharField(max_length=250)
content = models.CharField(max_length=1000)
date = models.CharField(max_length=1000,default= datetime.date.today())
def __str__(self):
return self.title
| {"/home/views.py": ["/home/models.py"]} |
77,729 | Valdecir190199/PythonEmCasa | refs/heads/master | /Adocao/servicos/urls.py | from django.urls import path
from .views import *
urlpatterns = [
path('servicos/',PaginaServicoView.as_view(),name="servicos")
]
| {"/Adocao/servicos/urls.py": ["/Adocao/servicos/views.py"], "/Adocao/animais/urls.py": ["/Adocao/animais/views.py"]} |
77,730 | Valdecir190199/PythonEmCasa | refs/heads/master | /Adocao/animais/urls.py | from django.urls import path
from .views import *
urlpatterns = [
path('inicio/',PaginaInicialView.as_view(), name="index"),
path('sobre/',PaginaSobreView.as_view(), name="sobre"),
path('portfolio/',PaginaPortfolioView.as_view(), name="portfolio"),
]
| {"/Adocao/servicos/urls.py": ["/Adocao/servicos/views.py"], "/Adocao/animais/urls.py": ["/Adocao/animais/views.py"]} |
77,731 | Valdecir190199/PythonEmCasa | refs/heads/master | /Adocao/animais/views.py | from django.shortcuts import render
#importando a classe genérica para exibir
#uma pagina simples
from django.views.generic import TemplateView
# Create your views here.
class PaginaInicialView(TemplateView):
template_name="index.html"
class PaginaSobreView(TemplateView):
template_name="sobre.html"
class PaginaPortfolioView(TemplateView):
template_name="portfolio.html"
| {"/Adocao/servicos/urls.py": ["/Adocao/servicos/views.py"], "/Adocao/animais/urls.py": ["/Adocao/animais/views.py"]} |
77,732 | Valdecir190199/PythonEmCasa | refs/heads/master | /Adocao/servicos/views.py | from django.shortcuts import render
from django.views.generic import TemplateView
# Create your views here.
class PaginaServicoView(TemplateView):
template_name="servicos.html"
| {"/Adocao/servicos/urls.py": ["/Adocao/servicos/views.py"], "/Adocao/animais/urls.py": ["/Adocao/animais/views.py"]} |
77,733 | Valdecir190199/PythonEmCasa | refs/heads/master | /django2019/bin/django-admin.py | #!/home/valdecir/Envs/django2019/bin/python3
from django.core import management
if __name__ == "__main__":
management.execute_from_command_line()
| {"/Adocao/servicos/urls.py": ["/Adocao/servicos/views.py"], "/Adocao/animais/urls.py": ["/Adocao/animais/views.py"]} |
77,734 | hanwen0529/Image-Colorization-Super_resolution-With-Unet | refs/heads/master | /regression_train.py | from __future__ import print_function
import os
import time
import numpy as np
import torch
import torch.nn as nn
from torch import optim
from data_processor import process_reg, get_batch, get_torch_vars, plot_reg
from load_data import load_cifar10
from model.regressioncnn import RegressionCNN
class AttrDict(dict):
def __init__(self, *args, **kwargs):
super(AttrDict, self).__init__(*args, **kwargs)
self.__dict__ = self
def train(args):
# Set the maximum number of threads to prevent crash in Teaching Labs
torch.set_num_threads(5)
# Numpy random seed
np.random.seed(args.seed)
# Save directory
save_dir = "outputs/" + args.experiment_name
# Create the outputs folder if not created already
if not os.path.exists(save_dir):
os.makedirs(save_dir)
# Load the model
cnn = RegressionCNN(args.kernel, args.num_filters)
# Set up L2 loss
criterion = nn.MSELoss()
optimizer = optim.Adam(cnn.parameters(), lr=args.learn_rate)
# Loading & transforming data
print("Loading data...")
(x_train, y_train), (x_test, y_test) = load_cifar10()
train_rgb, train_grey = process_reg(x_train, y_train)
test_rgb, test_grey = process_reg(x_test, y_test)
print("Beginning training ...")
if args.gpu:
cnn.cuda()
start = time.time()
for epoch in range(args.epochs):
# Train the Model
cnn.train() # Change model to 'train' mode
for i, (xs, ys) in enumerate(get_batch(train_grey,
train_rgb,
args.batch_size)):
images, labels = get_torch_vars(xs, ys, True, args.gpu)
# Forward + Backward + Optimize
optimizer.zero_grad()
# print("input",images.cpu().detach().numpy().min())
outputs = cnn(images)
loss = criterion(outputs, labels)
loss.backward()
optimizer.step()
print('Epoch [%d/%d], Loss: %.4f' % (epoch + 1, args.epochs, loss.data.item()))
# Evaluate the model
cnn.eval() # Change model to 'eval' mode (BN uses moving mean/var).
losses = []
for i, (xs, ys) in enumerate(get_batch(test_grey,
test_rgb,
args.batch_size)):
images, labels = get_torch_vars(xs, ys, args.gpu)
outputs = cnn(images)
val_loss = criterion(outputs, labels)
losses.append(val_loss.data.item())
val_loss = np.mean(losses)
print('Epoch [%d/%d], Val Loss: %.4f' % (epoch + 1, args.epochs, val_loss))
print("Generating predictions...")
plot_reg(xs, ys, outputs.cpu().data,
path=save_dir + "/regression_output.png", visualize=args.visualize)
if args.checkpoint:
print('Saving model...')
torch.save(cnn.state_dict(), args.checkpoint)
return cnn
if __name__ == '__main__':
args = AttrDict()
args_dict = {
'gpu': True,
'valid': False,
'checkpoint':"",
'kernel':3,
'num_filters':32,
'learn_rate':0.001,
'batch_size':100,
'epochs':5,
'seed':0,
'plot': True,
'experiment_name': 'regression_cnn',
'visualize': False,
'downsize_input': False,
}
args.update(args_dict)
cnn = train(args) | {"/regression_train.py": ["/data_processor.py", "/load_data.py", "/model/regressioncnn.py"], "/classification_train.py": ["/data_processor.py", "/load_data.py", "/model/colourizationcnn.py"], "/data_processor.py": ["/load_data.py"], "/model/colourizationcnn.py": ["/model/regressioncnn.py"]} |
77,735 | hanwen0529/Image-Colorization-Super_resolution-With-Unet | refs/heads/master | /model/regressioncnn.py | from __future__ import print_function
import torch
import torch.nn as nn
import torch.nn.functional as F
import math
"""
torch.nn是专门为神经网络设计的模块化接口。nn构建于autograd之上,可以用来定义和运行神经网络。
nn.Module是nn中十分重要的类,包含网络各层的定义及forward方法;Module既可以表示神经网络中的某个层(layer),也可以表示一个包含很多层的神经网络。
定义自已的网络:
需要继承nn.Module类,并实现forward方法。
一般把网络中具有可学习参数的层放在构造函数__init__()中,不具有可学习参数的层(如ReLU)可放在构造函数中,也可不放在构造函数中(而在forward中使
用nn.functional来代替)。
只要在nn.Module的子类中定义了forward函数,backward函数就会被自动实现(利用Autograd). 在forward函数中可以使用任何Variable支持的函数,
因为在整个pytorch构建的图中,是Variable在流动。还可以使用if,for,print,log等python语法.
注:Pytorch基于nn.Module构建的模型中,只支持mini-batch的Variable输入方式
"""
class MyConv2d(nn.Module):
"""
Our simplified implemented of nn.Conv2d module for 2D convolution
"""
def __init__(self, in_channels, out_channels, kernel_size, padding=None):
super(MyConv2d, self).__init__() # 自定义层必须继承nn.Module,并且在其构造函数中需调用nn.Module的构造函数
self.in_channels = in_channels
self.out_channels = out_channels
self.kernel_size = kernel_size
if padding is None:
self.padding = kernel_size // 2
else:
self.padding = padding
self.weight = nn.parameter.Parameter(torch.Tensor(out_channels, in_channels, kernel_size, kernel_size))
self.bias = nn.parameter.Parameter(torch.Tensor(out_channels))
# Parameter是torch.autograd.Variable的一个字类,常被用于Module的参数,例如权重和偏置,但不能设置volatile且require_grad默认设置为true。
# Parameters和Modules一起使用的时候会有一些特殊的属性,parameters赋值给Module的属性的时候,它会被自动加到Module的参数列表中。
# 即会出现在Parameter()迭代器中,将Varaible赋给Module的时候没有这样的属性。这样做是为了保存模型的时候只保存权重偏置参数,不保存节点值。
self.reset_parameters()
def reset_parameters(self):
n = self.in_channels * self.kernel_size * self.kernel_size
stdv = 1. / math.sqrt(n)
self.weight.data.uniform_(-stdv, stdv)
self.bias.data.uniform_(-stdv, stdv)
def forward(self, input):
return F.conv2d(input, self.weight, self.bias, padding=self.padding)
class RegressionCNN(nn.Module):
def __init__(self, kernel, num_filters):
super(RegressionCNN, self).__init__()
padding = kernel // 2
self.downconv1 = nn.Sequential(
nn.Conv2d(1, num_filters, kernel_size=kernel, padding=padding),
nn.BatchNorm2d(num_filters),
nn.ReLU(),
nn.MaxPool2d(2),)
self.downconv2 = nn.Sequential(
nn.Conv2d(num_filters, num_filters*2, kernel_size=kernel, padding=padding),
nn.BatchNorm2d(num_filters*2),
nn.ReLU(),
nn.MaxPool2d(2),)
self.rfconv = nn.Sequential(
nn.Conv2d(num_filters*2, num_filters*2, kernel_size=kernel, padding=padding),
nn.BatchNorm2d(num_filters*2),
nn.ReLU(),)
self.upconv1 = nn.Sequential(
nn.Conv2d(num_filters*2, num_filters, kernel_size=kernel, padding=padding),
nn.BatchNorm2d(num_filters),
nn.ReLU(),
nn.Upsample(scale_factor=2),)
self.upconv2 = nn.Sequential(
nn.Conv2d(num_filters, 3, kernel_size=kernel, padding=padding),
nn.BatchNorm2d(3),
nn.ReLU(),
nn.Upsample(scale_factor=2),)
self.finalconv = MyConv2d(3, 3, kernel_size=kernel)
def forward(self, x):
out = self.downconv1(x)
out = self.downconv2(out)
out = self.rfconv(out)
out = self.upconv1(out)
out = self.upconv2(out)
out = self.finalconv(out)
# 在前向传播函数中,我们有意识地将输出变量都命名成out,是为了能让Python回收一些中间层的输出,从而节省内存。但并不是所有都会被回收,
# 有些variable虽然名字被覆盖,但其在反向传播仍需要用到,此时Python的内存回收模块将通过检查引用计数,不会回收这一部分内存。
# 返回值也是一个Variable对象
return out | {"/regression_train.py": ["/data_processor.py", "/load_data.py", "/model/regressioncnn.py"], "/classification_train.py": ["/data_processor.py", "/load_data.py", "/model/colourizationcnn.py"], "/data_processor.py": ["/load_data.py"], "/model/colourizationcnn.py": ["/model/regressioncnn.py"]} |
77,736 | hanwen0529/Image-Colorization-Super_resolution-With-Unet | refs/heads/master | /classification_train.py | from __future__ import print_function
import os
import time
import numpy as np
import matplotlib.pyplot as plt
import torch
import torch.nn as nn
from data_processor import process_cls, get_rgb_cat, get_batch, get_torch_vars, compute_loss, plot_cls, run_validation_step, plot_activation
from load_data import load_cifar10
from model.colourizationcnn import CNN,UNet
class AttrDict(dict):
def __init__(self, *args, **kwargs):
super(AttrDict, self).__init__(*args, **kwargs)
self.__dict__ = self
def train(args, cnn=None):
# Set the maximum number of threads to prevent crash in Teaching Labs
torch.set_num_threads(5)
# Numpy random seed
np.random.seed(args.seed)
# Save directory
save_dir = "outputs/" + args.experiment_name
# LOAD THE COLOURS CATEGORIES
colours = np.load(args.colours,encoding='bytes')[0]
num_colours = np.shape(colours)[0]
# INPUT CHANNEL
num_in_channels = 1 if not args.downsize_input else 3
# LOAD THE MODEL
if cnn is None:
if args.model == "CNN":
cnn = CNN(args.kernel, args.num_filters, num_colours, num_in_channels)
elif args.model == "UNet":
cnn = UNet(args.kernel, args.num_filters, num_colours, num_in_channels)
# LOSS FUNCTION
criterion = nn.CrossEntropyLoss()
optimizer = torch.optim.Adam(cnn.parameters(), lr=args.learn_rate)
# DATA
print("Loading data...")
(x_train, y_train), (x_test, y_test) = load_cifar10()
print("Transforming data...")
train_rgb, train_grey = process_cls(x_train, y_train, downsize_input=args.downsize_input)
train_rgb_cat = get_rgb_cat(train_rgb, colours)
test_rgb, test_grey = process_cls(x_test, y_test, downsize_input=args.downsize_input)
test_rgb_cat = get_rgb_cat(test_rgb, colours)
# Create the outputs folder if not created already
if not os.path.exists(save_dir):
os.makedirs(save_dir)
print("Beginning training ...")
if args.gpu:
cnn.cuda()
start = time.time()
train_losses = []
valid_losses = []
valid_accs = []
for epoch in range(args.epochs):
# Train the Model
cnn.train() # change model to 'train' mode
losses = []
for i, (xs, ys) in enumerate(get_batch(train_grey,
train_rgb_cat,
args.batch_size)):
images, labels = get_torch_vars(xs, ys, args.gpu, False)
# Forward + Backward + Optimize
optimizer.zero_grad()
outputs = cnn(images)
loss = compute_loss(criterion,
outputs,
labels,
batch_size=args.batch_size,
num_colours=num_colours)
loss.backward()
optimizer.step()
losses.append(loss.data.item())
# plot training images
if args.plot:
_, predicted = torch.max(outputs.data, 1, keepdim=True)
plot_cls(xs, ys, predicted.cpu().numpy(), colours,
save_dir + '/train_%d.png' % epoch,
args.visualize,
args.downsize_input)
# plot training images
avg_loss = np.mean(losses)
train_losses.append(avg_loss)
time_elapsed = time.time() - start
print('Epoch [%d/%d], Loss: %.4f, Time (s): %d' % (
epoch + 1, args.epochs, avg_loss, time_elapsed))
# Evaluate the model
cnn.eval() # Change model to 'eval' mode (BN uses moving mean/var).
val_loss, val_acc = run_validation_step(cnn,
criterion,
test_grey,
test_rgb_cat,
args.batch_size,
colours,
save_dir + '/test_%d.png' % epoch,
args.visualize,
args.downsize_input,
args.gpu, False)
time_elapsed = time.time() - start
valid_losses.append(val_loss)
valid_accs.append(val_acc)
print('Epoch [%d/%d], Val Loss: %.4f, Val Acc: %.1f%%, Time(s): %d' % (
epoch + 1, args.epochs, val_loss, val_acc, time_elapsed))
# Plot training curve
plt.figure()
plt.plot(train_losses, "ro-", label="Train")
plt.plot(valid_losses, "go-", label="Validation")
plt.legend()
plt.title("Loss")
plt.xlabel("Epochs")
plt.savefig(save_dir + "/training_curve.png")
if args.checkpoint:
print('Saving model...')
torch.save(cnn.state_dict(), args.checkpoint)
return cnn
if __name__ == '__main__':
args = AttrDict()
args_dict = {
'gpu': True,
'valid': False,
'checkpoint': "",
'colours': './data/colours/colour_kmeans24_cat7.npy',
'model': "CNN", # ["CNN","Unet"]
'kernel': 3,
'num_filters': 32,
'learn_rate': 0.001,
'batch_size': 100,
'epochs': 5,
'seed': 0,
'plot': True,
'experiment_name': 'colourization_cnn',
'visualize': False,
'downsize_input': False, # [False, True] Using 'True' to do super-resolution experiment
}
args.update(args_dict)
cnn = train(args)
'''
# To visualize CNN
args = AttrDict()
args_dict = {
'colours':'./data/colours/colour_kmeans24_cat7.npy',
'index':0,
'experiment_name': 'colourization_cnn',
'downsize_input':False,
}
args.update(args_dict)
plot_activation(args, cnn, False)
# To visualize Unet
args = AttrDict()
args_dict = {
'colours':'./data/colours/colour_kmeans24_cat7.npy',
'index':0,
'experiment_name': 'colourization_unet',
'downsize_input':False,
}
args.update(args_dict)
plot_activation(args, unet_cnn, False)
# To visualize super-resolution
args = AttrDict()
args_dict = {
'colours':'./data/colours/colour_kmeans24_cat7.npy',
'index':0,
'experiment_name': 'super_res_unet',
'downsize_input':True,
}
args.update(args_dict)
plot_activation(args, sr_cnn, False)
''' | {"/regression_train.py": ["/data_processor.py", "/load_data.py", "/model/regressioncnn.py"], "/classification_train.py": ["/data_processor.py", "/load_data.py", "/model/colourizationcnn.py"], "/data_processor.py": ["/load_data.py"], "/model/colourizationcnn.py": ["/model/regressioncnn.py"]} |
77,737 | hanwen0529/Image-Colorization-Super_resolution-With-Unet | refs/heads/master | /data_processor.py | """
Colourization of CIFAR-10 Horses via regression/classification.
"""
from __future__ import print_function
import os
import scipy.misc
import numpy as np
import matplotlib.pyplot as plt
from PIL import Image
import torch
import torch.nn as nn
from torch.autograd import Variable
from load_data import load_cifar10
HORSE_CATEGORY = 7
######################################################################
# Torch Helper
######################################################################
def get_torch_vars(xs, ys, gpu=True, reg=True):
"""
Helper function to convert numpy arrays to pytorch tensors.
If GPU is used, move the tensors to GPU.
Args:
xs (float numpy tenosor): greyscale input
ys (float/int numpy tenosor): color-scale output/categorical labels
gpu (bool): whether to move pytorch tensor to GPU
Returns:
Variable(xs), Variable(ys)
"""
xs = torch.from_numpy(xs).float()
if reg:
ys = torch.from_numpy(ys).float()
else:
ys = torch.from_numpy(ys).long()
if gpu:
xs = xs.cuda()
ys = ys.cuda()
return Variable(xs), Variable(ys)
def compute_loss(criterion, outputs, labels, batch_size, num_colours):
"""
Helper function to compute the loss. Since this is a pixelwise
prediction task we need to reshape the output and ground truth
tensors into a 2D tensor before passing it in to the loss criteron.
Args:
criterion: pytorch loss criterion
outputs (pytorch tensor): predicted labels from the model
labels (pytorch tensor): ground truth labels
batch_size (int): batch size used for training
num_colours (int): number of colour categories
Returns:
pytorch tensor for loss
"""
loss_out = outputs.transpose(1,3) \
.contiguous() \
.view([batch_size*32*32, num_colours])
loss_lab = labels.transpose(1,3) \
.contiguous() \
.view([batch_size*32*32])
return criterion(loss_out, loss_lab)
def get_batch(x, y, batch_size):
'''
Generated that yields batches of data
Args:
x: input values
y: output values
batch_size: size of each batch
Yields:
batch_x: a batch of inputs of size at most batch_size
batch_y: a batch of outputs of size at most batch_size
'''
N = np.shape(x)[0]
assert N == np.shape(y)[0]
for i in range(0, N, batch_size):
batch_x = x[i:i+batch_size, :,:,:]
batch_y = y[i:i+batch_size, :,:,:]
yield (batch_x, batch_y)
######################################################################
# Regression Data related code
######################################################################
def process_reg(xs, ys, max_pixel=256.0, downsize_input=False):
"""
Pre-process CIFAR10 images by taking only the horse category,
shuffling, and have colour values be bound between 0 and 1
Args:
xs: the colour RGB pixel values
ys: the category labels
max_pixel: maximum pixel value in the original data
Returns:
xs: value normalized and shuffled colour images
grey: greyscale images, also normalized so values are between 0 and 1
"""
xs = xs / max_pixel
xs = xs[np.where(ys == HORSE_CATEGORY)[0], :, :, :]
np.random.shuffle(xs)
grey = np.mean(xs, axis=1, keepdims=True) # N * 1 * H * W If not using keepdims, it will become N * H * W
if downsize_input:
avg_pool = nn.Sequential(nn.AvgPool2d(2),nn.AvgPool2d(2),
nn.Upsample(scale_factor=2, mode='bilinear'), nn.Upsample(scale_factor=2, mode='bilinear'))
grey_downsized = avg_pool.forward(torch.from_numpy(grey).float())
grey = grey_downsized.data.numpy()
return (xs, grey)
def plot_reg(input, gtlabel, output, path, visualize):
"""
Generate png plots of input, ground truth, and outputs
Args:
input: the greyscale input to the colourization CNN
gtlabel: the grouth truth categories for each pixel
output: the predicted categories for each pixel
colours: numpy array of colour categories and their RGB values
path: output path
"""
k = 10
grey = np.transpose(input[:k,:,:,:], [0,2,3,1])
gtcolor = np.transpose(gtlabel[:k,:,:,:], [0,2,3,1])
predcolor = np.transpose(output[:k,:,:,:], [0,2,3,1])
img = np.vstack([
np.hstack(np.tile(grey, [1,1,1,3])),
np.hstack(gtcolor),
np.hstack(predcolor)])
#plt.figure()
plt.grid('off')
plt.imshow(img, vmin=0., vmax=1.)
if visualize:
plt.show()
else:
plt.savefig(path)
######################################################################
# Classification Data related code
######################################################################
def get_rgb_cat(xs, colours):
"""
Get colour categories given RGB values. This function doesn't
actually do the work, instead it splits the work into smaller
chunks that can fit into memory, and calls helper function
_get_rgb_cat
Args:
xs: float numpy array of RGB images in [B, C, H, W] format
colours: numpy array of colour categories and their RGB values
Returns:
result: int numpy array of shape [B, 1, H, W]
"""
if np.shape(xs)[0] < 100:
return _get_rgb_cat(xs)
batch_size = 100
nexts = []
for i in range(0, np.shape(xs)[0], batch_size):
next = _get_rgb_cat(xs[i:i + batch_size, :, :, :], colours)
nexts.append(next)
result = np.concatenate(nexts, axis=0)
return result
def _get_rgb_cat(xs, colours):
"""
Get colour categories given RGB values. This is done by choosing
the colour in `colours` that is the closest (in RGB space) to
each point in the image `xs`. This function is a little memory
intensive, and so the size of `xs` should not be too large.
Args:
xs: float numpy array of RGB images in [B, C, H, W] format
colours: numpy array of colour categories and their RGB values
Returns:
result: int numpy array of shape [B, 1, H, W]
"""
num_colours = np.shape(colours)[0]
xs = np.expand_dims(xs, 0)
cs = np.reshape(colours, [num_colours, 1, 3, 1, 1])
dists = np.linalg.norm(xs - cs, axis=2) # 2 = colour axis
cat = np.argmin(dists, axis=0)
cat = np.expand_dims(cat, axis=1)
return cat
def get_cat_rgb(cats, colours):
"""
Get RGB colours given the colour categories
Args:
cats: integer numpy array of colour categories
colours: numpy array of colour categories and their RGB values
Returns:
numpy tensor of RGB colours
"""
return colours[cats]
def process_cls(xs, ys, max_pixel=256.0, downsize_input=False):
"""
Pre-process CIFAR10 images by taking only the horse category,
shuffling, and have colour values be bound between 0 and 1
Args:
xs: the colour RGB pixel values
ys: the category labels
max_pixel: maximum pixel value in the original data
Returns:
xs: value normalized and shuffled colour images
grey: greyscale images, also normalized so values are between 0 and 1
"""
xs = xs / max_pixel
xs = xs[np.where(ys == HORSE_CATEGORY)[0], :, :, :]
np.random.shuffle(xs)
grey = np.mean(xs, axis=1, keepdims=True)
if downsize_input:
downsize_module = nn.Sequential(nn.AvgPool2d(2),
nn.AvgPool2d(2),
nn.Upsample(scale_factor=2),
nn.Upsample(scale_factor=2))
xs_downsized = downsize_module.forward(torch.from_numpy(xs).float())
xs_downsized = xs_downsized.data.numpy()
return (xs, xs_downsized)
else:
return (xs, grey)
def run_validation_step(cnn, criterion, test_grey, test_rgb_cat, batch_size,
colours, plotpath=None, visualize=True, downsize_input=False, gpu=False, reg=True):
correct = 0.0
total = 0.0
losses = []
num_colours = np.shape(colours)[0]
for i, (xs, ys) in enumerate(get_batch(test_grey,
test_rgb_cat,
batch_size)):
images, labels = get_torch_vars(xs, ys, gpu, reg)
outputs = cnn(images)
val_loss = compute_loss(criterion,
outputs,
labels,
batch_size=batch_size,
num_colours=num_colours)
losses.append(val_loss.data.item())
_, predicted = torch.max(outputs.data, 1, keepdim=True)
total += labels.size(0) * 32 * 32
correct += (predicted == labels.data).sum()
if plotpath: # only plot if a path is provided
plot_cls(xs, ys, predicted.cpu().numpy(), colours,
plotpath, visualize=visualize, compare_bilinear=downsize_input)
val_loss = np.mean(losses)
val_acc = 100 * correct / total
return val_loss, val_acc
def plot_cls(input, gtlabel, output, colours, path, visualize, compare_bilinear=False):
"""
Generate png plots of input, ground truth, and outputs
Args:
input: the greyscale input to the colourization CNN
gtlabel: the grouth truth categories for each pixel
output: the predicted categories for each pixel
colours: numpy array of colour categories and their RGB values
path: output path
visualize: display the figures inline or save the figures in path
"""
grey = np.transpose(input[:10, :, :, :], [0, 2, 3, 1])
gtcolor = get_cat_rgb(gtlabel[:10, 0, :, :], colours)
predcolor = get_cat_rgb(output[:10, 0, :, :], colours)
img_stack = [
np.hstack(np.tile(grey, [1, 1, 1, 3])),
np.hstack(gtcolor),
np.hstack(predcolor)]
if compare_bilinear:
downsize_module = nn.Sequential(nn.AvgPool2d(2),
nn.AvgPool2d(2),
nn.Upsample(scale_factor=2, mode='bilinear'),
nn.Upsample(scale_factor=2, mode='bilinear'))
gt_input = np.transpose(gtcolor, [0, 3, 1, 2, ])
color_bilinear = downsize_module.forward(torch.from_numpy(gt_input).float())
color_bilinear = np.transpose(color_bilinear.data.numpy(), [0, 2, 3, 1])
img_stack = [
np.hstack(np.transpose(input[:10, :, :, :], [0, 2, 3, 1])),
np.hstack(gtcolor),
np.hstack(predcolor),
np.hstack(color_bilinear)]
img = np.vstack(img_stack)
plt.grid('off')
plt.imshow(img, vmin=0., vmax=1.)
if visualize:
plt.show()
else:
plt.savefig(path)
def toimage(img, cmin, cmax):
return Image.fromarray((img.clip(cmin, cmax) * 255).astype(np.uint8))
def plot_activation(args, cnn, reg=True):
# LOAD THE COLOURS CATEGORIES
colours = np.load(args.colours)[0]
num_colours = np.shape(colours)[0]
(x_train, y_train), (x_test, y_test) = load_cifar10()
test_rgb, test_grey = process_cls(x_test, y_test, downsize_input=args.downsize_input)
test_rgb_cat = get_rgb_cat(test_rgb, colours)
# Take the idnex of the test image
id = args.index
outdir = "outputs/" + args.experiment_name + '/act' + str(id)
if not os.path.exists(outdir):
os.makedirs(outdir)
images, labels = get_torch_vars(np.expand_dims(test_grey[id], 0),
np.expand_dims(test_rgb_cat[id], 0),
args.gpu,reg)
cnn.cpu()
outputs = cnn(images)
_, predicted = torch.max(outputs.data, 1, keepdim=True)
predcolor = get_cat_rgb(predicted.cpu().numpy()[0, 0, :, :], colours)
img = predcolor
toimage(predcolor, cmin=0, cmax=1) \
.save(os.path.join(outdir, "output_%d.png" % id))
if not args.downsize_input:
img = np.tile(np.transpose(test_grey[id], [1, 2, 0]), [1, 1, 3])
else:
img = np.transpose(test_grey[id], [1, 2, 0])
toimage(img, cmin=0, cmax=1) \
.save(os.path.join(outdir, "input_%d.png" % id))
img = np.transpose(test_rgb[id], [1, 2, 0])
toimage(img, cmin=0, cmax=1) \
.save(os.path.join(outdir, "input_%d_gt.png" % id))
def add_border(img):
return np.pad(img, 1, "constant", constant_values=1.0)
def draw_activations(path, activation, imgwidth=4):
img = np.vstack([
np.hstack([
add_border(filter) for filter in
activation[i * imgwidth:(i + 1) * imgwidth, :, :]])
for i in range(activation.shape[0] // imgwidth)])
scipy.misc.imsave(path, img)
for i, tensor in enumerate([cnn.out1, cnn.out2, cnn.out3, cnn.out4, cnn.out5]):
draw_activations(
os.path.join(outdir, "conv%d_out_%d.png" % (i + 1, id)),
tensor.data.cpu().numpy()[0])
print("visualization results are saved to %s" % outdir) | {"/regression_train.py": ["/data_processor.py", "/load_data.py", "/model/regressioncnn.py"], "/classification_train.py": ["/data_processor.py", "/load_data.py", "/model/colourizationcnn.py"], "/data_processor.py": ["/load_data.py"], "/model/colourizationcnn.py": ["/model/regressioncnn.py"]} |
77,738 | hanwen0529/Image-Colorization-Super_resolution-With-Unet | refs/heads/master | /load_data.py | import os
from six.moves.urllib.request import urlretrieve
import tarfile
import numpy as np
import pickle
import sys
def get_file(fname,
origin,
untar=False,
extract=False,
archive_format='auto',
cache_dir='data'):
datadir = os.path.join(cache_dir)
if not os.path.exists(datadir):
os.makedirs(datadir)
if untar:
untar_fpath = os.path.join(datadir, fname)
fpath = untar_fpath + '.tar.gz'
else:
fpath = os.path.join(datadir, fname)
print(fpath)
if not os.path.exists(fpath):
print('Downloading data from', origin)
error_msg = 'URL fetch failure on {}: {} -- {}'
try:
try:
urlretrieve(origin, fpath)
except URLError as e:
raise Exception(error_msg.format(origin, e.errno, e.reason))
except HTTPError as e:
raise Exception(error_msg.format(origin, e.code, e.msg))
except (Exception, KeyboardInterrupt) as e:
if os.path.exists(fpath):
os.remove(fpath)
raise
if untar:
if not os.path.exists(untar_fpath):
print('Extracting file.')
with tarfile.open(fpath) as archive:
archive.extractall(datadir)
return untar_fpath
if extract:
_extract_archive(fpath, datadir, archive_format)
return fpath
'''
<data_batch_1, data_batch_2, ..., data_batch_5,test_batch>
Loaded in this way, each of the batch files contains a dictionary with the following elements:
data -- a 10000x3072 numpy array of uint8s. Each row of the array stores a 32x32xRGB colour image.
labels -- a list of 10000 numbers in the range 0-9. The number at index i indicates the label of the ith image in the array data.
<batches.meta>
It contains a Python dictionary object with the following elements:
label_names -- a 10-element list which gives meaningful names to the numeric labels in the labels array described above.
For example, label_names[0] == "airplane"
'''
def load_batch(fpath, label_key='labels'):
"""Internal utility for parsing CIFAR data.
# Arguments
fpath: path the file to parse.
label_key: key for label data in the retrieve dictionary.
# Returns
A tuple `(data, labels)`.
"""
f = open(fpath, 'rb')
if sys.version_info < (3,):
d = pickle.load(f)
else:
d = pickle.load(f, encoding='bytes')
# decode utf8
d_decoded = {}
for k, v in d.items():
d_decoded[k.decode('utf8')] = v
d = d_decoded
f.close()
data = d['data']
labels = d[label_key]
data = data.reshape(data.shape[0], 3, 32, 32)
return data, labels
def load_cifar10(transpose=False):
"""Loads CIFAR10 dataset.
# Returns
Tuple of Numpy arrays: `(x_train, y_train), (x_test, y_test)`.
"""
dirname = 'cifar-10-batches-py'
origin = 'http://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz'
path = get_file(dirname, origin=origin, untar=True)
num_train_samples = 50000
x_train = np.zeros((num_train_samples, 3, 32, 32), dtype='uint8')
y_train = np.zeros((num_train_samples,), dtype='uint8')
for i in range(1, 6):
fpath = os.path.join(path, 'data_batch_' + str(i))
data, labels = load_batch(fpath)
x_train[(i - 1) * 10000: i * 10000, :, :, :] = data
y_train[(i - 1) * 10000: i * 10000] = labels
fpath = os.path.join(path, 'test_batch')
x_test, y_test = load_batch(fpath)
y_train = np.reshape(y_train, (len(y_train), 1))
y_test = np.reshape(y_test, (len(y_test), 1))
if transpose:
x_train = x_train.transpose(0, 2, 3, 1)
x_test = x_test.transpose(0, 2, 3, 1)
return (x_train, y_train), (x_test, y_test)
######################################################################
# Download CIFAR datasets and other related files
######################################################################
if __name__ == '__main__':
colours_fpath = get_file(fname='colours',
origin='http://www.cs.toronto.edu/~jba/kmeans_colour_a2.tar.gz',
untar=True)
# Return "data/colours"
m = load_cifar10()
## x_train (N, num_channel, height, width)
# Return (x_train, y_train),(x_test, y_test) | {"/regression_train.py": ["/data_processor.py", "/load_data.py", "/model/regressioncnn.py"], "/classification_train.py": ["/data_processor.py", "/load_data.py", "/model/colourizationcnn.py"], "/data_processor.py": ["/load_data.py"], "/model/colourizationcnn.py": ["/model/regressioncnn.py"]} |
77,739 | hanwen0529/Image-Colorization-Super_resolution-With-Unet | refs/heads/master | /model/colourizationcnn.py | from __future__ import print_function
from model.regressioncnn import MyConv2d
import torch
import torch.nn as nn
######################################################################
# MODELS
######################################################################
class CNN(nn.Module):
def __init__(self, kernel, num_filters, num_colours, num_in_channels):
super(CNN, self).__init__()
padding = kernel // 2
self.downconv1 = nn.Sequential(
MyConv2d(num_in_channels, num_filters, kernel_size=kernel, padding=padding),
nn.BatchNorm2d(num_filters),
nn.ReLU(),
nn.MaxPool2d(2),)
self.downconv2 = nn.Sequential(
MyConv2d(num_filters, num_filters*2, kernel_size=kernel, padding=padding),
nn.BatchNorm2d(num_filters*2),
nn.ReLU(),
nn.MaxPool2d(2),)
self.rfconv = nn.Sequential(
MyConv2d(num_filters*2, num_filters*2, kernel_size=kernel, padding=padding),
nn.BatchNorm2d(num_filters*2),
nn.ReLU())
self.upconv1 = nn.Sequential(
MyConv2d(num_filters*2, num_filters, kernel_size=kernel, padding=padding),
nn.BatchNorm2d(num_filters),
nn.ReLU(),
nn.Upsample(scale_factor=2),)
self.upconv2 = nn.Sequential(
MyConv2d(num_filters, num_colours, kernel_size=kernel, padding=padding),
nn.BatchNorm2d(num_colours),
nn.ReLU(),
nn.Upsample(scale_factor=2),)
self.finalconv = MyConv2d(num_colours, num_colours, kernel_size=kernel)
def forward(self, x):
self.out1 = self.downconv1(x)
self.out2 = self.downconv2(self.out1)
self.out3 = self.rfconv(self.out2)
self.out4 = self.upconv1(self.out3)
self.out5 = self.upconv2(self.out4)
self.out_final = self.finalconv(self.out5)
return self.out_final
class UNet(nn.Module):
def __init__(self, kernel, num_filters, num_colours, num_in_channels):
super(UNet, self).__init__()
padding = kernel // 2
self.downconv1 = nn.Sequential(
MyConv2d(num_in_channels, num_filters, kernel_size = kernel, padding = padding),
nn.MaxPool2d(2),
nn.BatchNorm2d(num_filters),
nn.ReLU())
self.downconv2 = nn.Sequential(
MyConv2d(num_filters, num_filters * 2, kernel_size = kernel,
padding = padding),
nn.MaxPool2d(2),
nn.BatchNorm2d(num_filters * 2),
nn.ReLU())
self.rfconv = nn.Sequential(
MyConv2d(num_filters * 2, num_filters * 2, kernel_size = kernel,
padding = padding),
nn.BatchNorm2d(num_filters * 2),
nn.ReLU())
self.upconv1 = nn.Sequential(
MyConv2d(4 * num_filters, num_filters, kernel_size = kernel,
padding = padding),
nn.Upsample(scale_factor = 2),
nn.BatchNorm2d(num_filters),
nn.ReLU())
self.upconv2 = nn.Sequential(
MyConv2d(2 * num_filters, num_colours, kernel_size = kernel, padding = padding),
nn.Upsample(scale_factor = 2),
nn.BatchNorm2d(num_colours),
nn.ReLU())
self.finalconv = MyConv2d(num_colours + num_in_channels, num_colours, kernel_size = kernel)
def forward(self, x):
self.out1 = self.downconv1(x)
self.out2 = self.downconv2(self.out1)
self.out3 = self.rfconv(self.out2)
self.out4 = self.upconv1(torch.cat((self.out2, self.out3), 1))
self.out5 = self.upconv2(torch.cat((self.out1, self.out4), 1))
self.out_final = self.finalconv(torch.cat((self.out5, x), 1))
return self.out_final
| {"/regression_train.py": ["/data_processor.py", "/load_data.py", "/model/regressioncnn.py"], "/classification_train.py": ["/data_processor.py", "/load_data.py", "/model/colourizationcnn.py"], "/data_processor.py": ["/load_data.py"], "/model/colourizationcnn.py": ["/model/regressioncnn.py"]} |
77,749 | SeongHanC/FYP_WEB | refs/heads/master | /json_only_beta.py | import json
json_string = """{"Event Suppliers":[
{"state":"Selangor"}
]}"""
input_s = json.loads(json_string)
print input_s['Event Suppliers'] | {"/__init__.py": ["/RegistrationForm.py", "/DBConnect.py", "/rdflib_search.py"]} |
77,750 | SeongHanC/FYP_WEB | refs/heads/master | /__init__.py | from flask import Flask, render_template,flash,request,redirect,url_for,jsonify,session
from pyld import jsonld
import json
import sqlite3
from datetime import datetime
from flask_login import LoginManager,login_user,logout_user,current_user,login_required
from wtforms import Form, BooleanField,StringField,validators
from RegistrationForm import Registration
from DBConnect import connection
from MySQLdb import escape_string as thwart
import gc
#from rdflib_search import get_types,get_states
import rdflib_search
from rdflib import Graph,Namespace,RDF
import MySQLdb
app = Flask(__name__)
app.secret_key = '\x88\xe4\x18H\xf3> d\x08\xa2\xe9U\r\xfc\xff,\x88\xa8\xe6\x87\x99u\x9b\x84'
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'login'
@app.route('/')
def welcome():
return render_template('welcome_page.html')
@app.route('/homepage',methods=["GET","POST"])
def homepage():
service_list = get_types()
states_list = get_states()
services = remove_duplicates(service_list)
states = remove_duplicates(states_list)
error = ""
try:
if request.method == 'POST':
select_state = request.form.get('state')
select_et = request.form.get('service')
if select_et == "Concert" and select_state == "Selangor":
co_name = "BLM Music Solution"
location = "69, Jalan USJ 8"
state = "Selangor"
items = "Music Equipment (Guitar, Violin, etc), PA System"
for item in get_states():
if item == select_state:
hist_list_state.append(item)
break
for item1 in get_types():
if item1 == select_et:
hist_list_service.append(item1)
break
return render_template("result.html", co_name=co_name, state=state, location=location, items=items,
error=error, states=states, services=services)
elif select_et == "Costumes" and select_state == "Pulau Pinang":
co_name = "Ian Costumes Factory"
location = "12, Jalan PP, Gelugor"
state = "Pulau Pinang"
items = "All types of costumes (Halloween costumes, party costumes, etc)"
for item in get_states():
if item == select_state:
hist_list_state.append(item)
break
for item1 in get_types():
if item1 == select_et:
hist_list_service.append(item1)
break
return render_template("result.html", co_name=co_name, state=state, location=location, items=items,
error=error,states=states,services=services)
elif select_et == "Festival" and select_state == "Selangor":
co_name = "Adi's Fireworks Solution"
location = "9, Jalan Dato Huri 11, Damansara Utama"
state = "Selangor"
items = "Fireworks for festivals, celebration, etc."
for item in get_states():
if item == select_state:
hist_list_state.append(item)
break
for item1 in get_types():
if item1 == select_et:
hist_list_service.append(item1)
break
return render_template("result.html", co_name=co_name, state=state, location=location, items=items,
error=error, states=states, services=services)
elif select_et == "Food & Beverage" and select_state == "Perak":
co_name = "Ho Jiak Catering"
location = "11, Jalan Perak 89"
state = "Perak"
items = "Catering (Western, Malay, Chinese, Indian, Fusion)"
for item in get_states():
if item == select_state:
hist_list_state.append(item)
break
for item1 in get_types():
if item1 == select_et:
hist_list_service.append(item1)
break
return render_template("result.html", co_name=co_name, state=state, location=location, items=items,
error=error, states=states, services=services)
elif select_et == "Music Equipment":
if select_state == "Melaka":
co_name = "Nigel's"
location = "25, Jalan Selamat"
state = "Melaka"
items = "Music Instruments rental services."
for item in get_states():
if item == select_state:
hist_list_state.append(item)
break
for item1 in get_types():
if item1 == select_et:
hist_list_service.append(item1)
break
return render_template("result.html", co_name=co_name, state=state, location=location, items=items,
error=error, states=states, services=services)
elif select_state == "Selangor":
co_name = "BLM Music Solution"
location = "69, Jalan USJ 8"
state = "Selangor"
items = "Music Equipment (Guitar, Violin, etc), PA System"
for item in get_states():
if item == select_state:
hist_list_state.append(item)
break
for item1 in get_types():
if item1 == select_et:
hist_list_service.append(item1)
break
return render_template("result.html", co_name=co_name, state=state, location=location, items=items,
error=error, states=states, services=services)
elif select_et == "Photography" and select_state == "Johor":
co_name = "Bean's Photography & Studio"
location = "19, Jalan Johor Selatan"
state = "Johor"
items = "Camera, Camera parts, Photography service for all occasions."
for item in get_states():
if item == select_state:
hist_list_state.append(item)
break
for item1 in get_types():
if item1 == select_et:
hist_list_service.append(item1)
break
return render_template("result.html", co_name=co_name, state=state, location=location, items=items,
error=error, states=states, services=services)
elif select_et == "Venue" and select_state == "Kuala Lumpur":
for item in get_states():
if item == select_state:
hist_list_state.append(item)
break
for item1 in get_types():
if item1 == select_et:
hist_list_service.append(item1)
break
return render_template("result1.html",error=error, states=states, services=services)
else:
error = "No match found. Please try again."
return render_template("homepage.html", error=error,states=states,services=services)
except Exception as e:
return render_template("homepage.html", error=error,states=states,services=services)
@app.route('/login',methods=["GET","POST"])
def login():
error = ''
try:
if request.method == "POST":
attempted_username = request.form['username']
attempted_password = request.form['password']
if attempted_username == "edward" and attempted_password == "admin":
return redirect(url_for('homepage'))
else:
error = "Invalid credentials. Try Again."
return render_template("login.html", error=error)
except Exception as e:
return render_template("login.html", error=error)
@app.route('/register',methods=["GET","POST"])
def register():
error = ""
message = ""
try:
form = Registration(request.form)
if request.method == "POST" and form.validate():
username = form.username.data
password = form.password.data
state = form.state.data
location = form.location.data
c, conn = connection()
x = c.execute("SELECT * FROM USERS WHERE USERNAME = ('%s')" % \
(username))
if int(x) > 0:
error = "Username is already taken. Please choose another username"
return render_template('register.html', form=form,error = error)
else:
c.execute("INSERT INTO USERS (USERNAME, PASSWORD, STATE, LOCATION) VALUES ('%s', '%s', '%s'','%s')" % \
(username,password,state,location))
conn.commit()
flash("Congrats! You have been registered!")
c.close()
conn.close()
gc.collect()
session['logged_in'] = True
session['username'] = username
return redirect(url_for('login'))
return render_template("register.html", form=form)
except Exception as e:
return (str(e))
@app.route('/logout')
def logout():
logout_user()
return redirect(url_for('welcome'))
@app.route('/user_profile')
def user_profile():
username = "Edward"
output_state = []
output_service = []
for state in hist_list_state:
output_state.append(state)
for serv in hist_list_service:
output_service.append(serv)
return render_template("user_profile.html",username=username,states = output_state,services = output_service)
if __name__ == '__main__':
db = MySQLdb.connect(host="localhost", user="root", passwd="t1213121", db="User")
cursor = db.cursor()
g = Graph()
g.parse("rdf_output.owl")
hist_list_state = []
hist_list_service = []
my_namespace = Namespace("http://www.semanticweb.org/seonghan/ontologies/2016/7/untitled-ontology-3#")
# rdflib (get all the information from rdf ontology/owl file
def get_co_name():
co_name = []
for name in g.subjects(RDF.type, my_namespace.Event_suppliers):
co_name.append(g.value(name, my_namespace.ES_Name).toPython())
return co_name
def get_types():
types = []
for type in g.subjects(RDF.type, my_namespace.Event_suppliers):
types.append(g.value(type, my_namespace.ES_Type).toPython())
return types
def get_states():
states = []
for state in g.subjects(RDF.type, my_namespace.Event_suppliers):
states.append(g.value(state, my_namespace.ES_State).toPython())
states.sort()
return states
def get_loc():
loc = []
for location in g.subjects(RDF.type, my_namespace.Event_suppliers):
loc.append(g.value(location, my_namespace.ES_Location).toPython())
return loc
def get_items():
items = []
for item in g.subjects(RDF.type, my_namespace.Event_suppliers):
items.append(g.value(item, my_namespace.ES_Items).toPython())
items.sort()
return items
def remove_duplicates(a_list):
seen = set()
output_list = []
for i in a_list:
if i not in seen:
output_list.append(i)
seen.add(i)
return output_list
app.run(debug=True)
| {"/__init__.py": ["/RegistrationForm.py", "/DBConnect.py", "/rdflib_search.py"]} |
77,751 | SeongHanC/FYP_WEB | refs/heads/master | /LoginForm.py | from flask_wtf import Form
from wtforms import StringField,PasswordField,validators,IntegerField
class LoginForm(Form):
username = StringField('Username', validators.DataRequired())
password = PasswordField('Password',validators.DataRequired()) | {"/__init__.py": ["/RegistrationForm.py", "/DBConnect.py", "/rdflib_search.py"]} |
77,752 | SeongHanC/FYP_WEB | refs/heads/master | /createHistoryDB.py | import MySQLdb
db = MySQLdb.connect(host="localhost", user="root", passwd="t1213121", db="User")
cursor = db.cursor()
cursor.execute("DROP TABLE IF EXISTS HISTORY")
sql = """CREATE TABLE HISTORY(
TIMESTAMP TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
STATE CHAR(20),
SERVICE CHAR(20))
"""
cursor.execute(sql)
db.close() | {"/__init__.py": ["/RegistrationForm.py", "/DBConnect.py", "/rdflib_search.py"]} |
77,753 | SeongHanC/FYP_WEB | refs/heads/master | /jsonld_beta.py | import json
from pyld import jsonld
def print_json():
with open("project1_json.owl") as json_input:
json_data = json.load(json_input)
return(json_data)
def try_json_dump():
a_list = print_json()
return (json.dumps(a_list,indent=2))
def do_filter():
input_dict = json.loads(try_json_dump())
#input_dict = try_json_dump()
print input_dict
if __name__ == '__main__':
#print(try_json_dump())
#try_json_dump()
print (do_filter()) | {"/__init__.py": ["/RegistrationForm.py", "/DBConnect.py", "/rdflib_search.py"]} |
77,754 | SeongHanC/FYP_WEB | refs/heads/master | /rdflib_beta.py | from rdflib import Graph,Namespace,RDF
g = Graph()
g.parse("rdf_output.owl")
# a_list = g.serialize(destination="hello.owl")
my_namespace = Namespace("http://www.semanticweb.org/seonghan/ontologies/2016/7/untitled-ontology-3#")
co_name = []
types = []
states = []
loc = []
items = []
for name in g.subjects(RDF.type,my_namespace.Event_suppliers):
#output.append(states)
co_name.append(g.value(name,my_namespace.ES_Name).toPython())
types.append(g.value(name,my_namespace.ES_Type).toPython())
states.append(g.value(name,my_namespace.ES_State).toPython())
loc.append(g.value(name,my_namespace.ES_Location).toPython())
items.append(g.value(name,my_namespace.ES_Items).toPython())
#print output
| {"/__init__.py": ["/RegistrationForm.py", "/DBConnect.py", "/rdflib_search.py"]} |
77,755 | SeongHanC/FYP_WEB | refs/heads/master | /RegistrationForm.py | from flask_wtf import Form
from wtforms import StringField,PasswordField,validators,IntegerField,BooleanField
class Registration(Form):
username = StringField('Username', [validators.Length(min=4, max=25)])
password = PasswordField('New Password', [
validators.DataRequired(),
validators.EqualTo('confirm', message='Passwords must match')
])
confirm = PasswordField('Repeat Password')
state = StringField('State', [validators.Length(min=4, max=25)])
location = StringField('Location', [validators.Length(min=4, max=25)])
accept_tos = BooleanField('I accept the Terms of Service and Privacy Notice', [validators.DataRequired()]) | {"/__init__.py": ["/RegistrationForm.py", "/DBConnect.py", "/rdflib_search.py"]} |
77,756 | SeongHanC/FYP_WEB | refs/heads/master | /DBConnect.py | import MySQLdb
def connection():
connect = MySQLdb.connect(host="localhost",user = "root",passwd = "t1213121",db = "User")
c = connect.cursor()
return c, connect | {"/__init__.py": ["/RegistrationForm.py", "/DBConnect.py", "/rdflib_search.py"]} |
77,757 | SeongHanC/FYP_WEB | refs/heads/master | /rdflib_search.py | from rdflib import Graph,Namespace,RDF
def get_co_name():
co_name = []
for name in g.subjects(RDF.type,my_namespace.Event_suppliers):
co_name.append(g.value(name,my_namespace.ES_Name).toPython())
return co_name
def get_types():
types = []
for name in g.subjects(RDF.type,my_namespace.Event_suppliers):
types.append(g.value(name,my_namespace.ES_Type).toPython())
types.sort()
return types
def get_states():
states = []
output = []
for name in g.subjects(RDF.type, my_namespace.Event_suppliers):
states.append(g.value(name,my_namespace.ES_State).toPython())
output = remove_duplicates(states)
return output
def get_loc():
loc = []
for name in g.subjects(RDF.type, my_namespace.Event_suppliers):
loc.append(g.value(name,my_namespace.ES_Location).toPython())
return loc
def get_items():
items = []
for name in g.subjects(RDF.type, my_namespace.Event_suppliers):
items.append(g.value(name,my_namespace.ES_Items).toPython())
return items
def get_all():
items = []
for item in g.subjects(RDF.type, my_namespace.Event_suppliers):
items.append(g.value(item, my_namespace.Event.suppliers).toPython())
return items
def selangor_music():
co_list = get_co_name()
loc_list = get_loc()
items_list = get_items()
output = [co_list[0],loc_list[0],items_list[0]]
return output
def selangor_fnb():
co_list = get_co_name()
loc_list = get_loc()
items_list = get_items()
output = [co_list[1], loc_list[1], items_list[1]]
return output
def pp_cos():
co_list = get_co_name()
loc_list = get_loc()
items_list = get_items()
output = [co_list[2], loc_list[2], items_list[2]]
return output
def test():
list_a = get_types()
list_b = ",".join([str(i) for i in list_a])
return list_b
def remove_duplicates(a_list):
seen = set()
output_list = []
for i in a_list:
if i not in seen:
output_list.append(i)
seen.add(i)
return output_list
if __name__ == '__main__':
g = Graph()
g.parse("rdf_output.owl")
my_namespace = Namespace("http://www.semanticweb.org/seonghan/ontologies/2016/7/untitled-ontology-3#")
#print get_co_name()
#print get_types()
print get_types()
#print get_states()
# print selangor_music()
# print selangor_fnb()
# print pp_cos()
#print get_all()
| {"/__init__.py": ["/RegistrationForm.py", "/DBConnect.py", "/rdflib_search.py"]} |
77,758 | SeongHanC/FYP_WEB | refs/heads/master | /createUserDB.py | import MySQLdb
db = MySQLdb.connect(host="localhost", user="root", passwd="t1213121", db="User")
cursor = db.cursor()
cursor.execute("DROP TABLE IF EXISTS USERS")
sql = """CREATE TABLE USERS(
USERNAME CHAR(20) NOT NULL,
PASSWORD VARCHAR(20) NOT NULL,
STATE CHAR(20),
LOCATION CHAR(20))
"""
cursor.execute(sql)
db.close()
| {"/__init__.py": ["/RegistrationForm.py", "/DBConnect.py", "/rdflib_search.py"]} |
77,759 | mattwbarry/py_package_scaffold | refs/heads/master | /py_package_scaffold/package_scaffold.py | # TODO: update docstrings
import os
from jinja2 import Environment, PackageLoader
def create_package(location, name, template_args):
"""
Create a Python package scaffold on the filesystem.
:return: None
"""
create_modules(location, name)
create_files(location, name, template_args)
def create_modules(location, name):
"""
Create Python package scaffold base and test modules.
:return: None
"""
scaffold_path = os.path.join(location, name)
os.mkdir(scaffold_path)
for module_name in [name, 'tests']:
code_module_path = os.path.join(scaffold_path, module_name)
os.mkdir(code_module_path)
with open(os.path.join(code_module_path, '__init__.py'), 'w'):
pass
def create_files(location, name, template_kwargs):
"""
Create Python package scaffold setup and dependency files.
:return: None
"""
env = Environment(
loader=PackageLoader('py_package_scaffold', 'templates'),
)
filenames = [
'.gitignore',
'MANIFEST.in',
'pytest.ini',
'README.md',
'requirements.txt',
'requirements_dev.txt',
'run_tests',
'setup',
'setup.py',
]
for filename in filenames:
template = env.get_template(filename.replace('.', '_'))
template_string = template.render(
**template_kwargs
)
with open(os.path.join(location, name, filename), 'w') as scaffold_file:
scaffold_file.write(template_string)
| {"/py_package_scaffold/cli.py": ["/py_package_scaffold/package_scaffold.py"]} |
77,760 | mattwbarry/py_package_scaffold | refs/heads/master | /py_package_scaffold/__init__.py | __author__ = 'mwbarry'
| {"/py_package_scaffold/cli.py": ["/py_package_scaffold/package_scaffold.py"]} |
77,761 | mattwbarry/py_package_scaffold | refs/heads/master | /setup.py | import pip
from setuptools import setup, find_packages
APP_NAME = 'py_package_scaffold'
VERSION = '0.0.1'
REQUIRED = [
str(ir.req)
for ir
in pip.req.parse_requirements(
'requirements.txt', session=pip.download.PipSession()
)
]
SETTINGS = {
'name': APP_NAME,
'version': VERSION,
'author': 'Matt Barry',
'author_email': 'mattwbarry@gmail.com',
'packages': find_packages(exclude=['tests']),
'include_package_data': True,
'url': 'https://github.com/essessinc/py_package_scaffold.git',
'license': 'None',
'description': 'Scaffold your Python packages.',
'long_description': open('README.md').read(),
'install_requires': REQUIRED,
'classifiers': [
'Intended Audience :: Developers',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
'entry_points': {
'console_scripts': [
],
}
}
setup(**SETTINGS)
| {"/py_package_scaffold/cli.py": ["/py_package_scaffold/package_scaffold.py"]} |
77,762 | mattwbarry/py_package_scaffold | refs/heads/master | /py_package_scaffold/cli.py | #!/usr/bin/env python
from getpass import getuser
import click
from .package_scaffold import create_package
@click.group(invoke_without_command=True)
@click.pass_context
def cli(ctx):
if ctx.invoked_subcommand is not None:
pass
location = click.prompt('Package location:', type=str)
name = click.prompt('Package name:', type=str)
package_url = click.prompt('Package url:', type=str)
description = click.prompt('Description:', type=str)
author_email = click.prompt('Author email:', type=str)
license = click.prompt('License:', type=str)
author = getuser()
extra_args = {}
extra_key = 'start'
while extra_key:
extra_key = click.prompt('Extra key', type=str, default='')
if extra_key:
extra_val = click.prompt('Extra val', type=str)
extra_args[extra_key] = extra_val
extra_args.update({
'package_url': package_url,
'description': description,
'author_email': author_email,
'license': license,
'author': author
})
create_package(
location,
name,
extra_args
)
if __name__ == '__main__':
cli()
| {"/py_package_scaffold/cli.py": ["/py_package_scaffold/package_scaffold.py"]} |
77,780 | shimaomao/sanicdemo | refs/heads/master | /Message/db/helper.py | from sqlalchemy import Column
from sqlalchemy import create_engine
from sqlalchemy.sql import expression
from sqlalchemy import desc
from sqlalchemy.types import CHAR
from sqlalchemy.types import Integer
from sqlalchemy.types import Float
from sqlalchemy.types import String
from sqlalchemy.types import VARCHAR
from sqlalchemy.types import TIMESTAMP
from sqlalchemy.types import Text
from sqlalchemy.types import Date
from sqlalchemy.orm.exc import MultipleResultsFound
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.dialects.postgresql import VARCHAR
from sqlalchemy.dialects.postgresql import SMALLINT
from sqlalchemy.dialects.postgresql import INTEGER
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy import func
from sqlalchemy import or_
from sqlalchemy import not_
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.sql.expression import func
from sqlalchemy.schema import UniqueConstraint
from datetime import datetime
import logging
import traceback
from uuid import uuid4
import random
import string
import hashlib
import uuid
import os
import time
from Message.config import database_config
user = database_config.get('user', 'postgres')
password = database_config.get('password')
port = database_config.get('port', 5432)
host = database_config.get('host', '127.0.0.1')
db_name = database_config.get('db_name')
engine = create_engine('postgresql://{user}:{password}@{host}:{port}/{db_name}'.format(user=user, password=password,
host=host, port=port,
db_name=db_name))
base = declarative_base()
session = sessionmaker(bind=engine)
class Task(base):
__tablename__ = 'task'
id = Column(INTEGER, primary_key=True)
type = Column(VARCHAR)
status = Column(SMALLINT)
failed_reason = Column(VARCHAR)
create_time = Column(TIMESTAMP, server_default=expression.text('CURRENT_TIMESTAMP(3)'))
update_time = Column(TIMESTAMP, server_default=expression.text('CURRENT_TIMESTAMP(3)'))
if __name__ == '__main__':
base.metadata.create_all(engine) | {"/Message/db/helper.py": ["/Message/config.py"], "/structure/service.py": ["/base/service.py"], "/SanicGateway/controller/structure.py": ["/base/controller.py"], "/structure/model.py": ["/base/model.py"], "/base/application.py": ["/base/exception.py"], "/service.py": ["/base/service.py", "/base/exception.py"], "/model.py": ["/base/model.py", "/base/environment.py"], "/base/service.py": ["/structure/model.py", "/base/environment.py"], "/base/model.py": ["/base/sql_db.py"], "/route.py": ["/controller.py", "/middlemare.py"], "/controller.py": ["/base/controller.py", "/service.py"], "/base/environment.py": ["/base/sql_db.py", "/base/web_utils.py", "/base/model.py"], "/structure/server.py": ["/base/application.py", "/structure/config.py", "/base/environment.py", "/structure/route.py"], "/Message/script.py": ["/Message/config.py"], "/structure/route.py": ["/structure/controller.py", "/structure/middlemare.py"], "/script/db_helper.py": ["/script/create_data.py"], "/structure/controller.py": ["/base/controller.py", "/structure/config.py", "/structure/service.py"]} |
77,781 | shimaomao/sanicdemo | refs/heads/master | /structure/service.py | from base.service import BaseService
from sanic.exceptions import SanicException
import random
class SalaryService(BaseService):
async def get_base_info(self, show_id=False):
city_list = await self.model['salary'].get_all_city(show_id)
industry_list = await self.model['salary'].get_all_industry(show_id)
scope_list = await self.model['salary'].get_all_scope(show_id)
nature_list = await self.model['salary'].get_all_nature(show_id)
data = {
'city': city_list,
'industry': industry_list,
'scale': scope_list,
'nature': nature_list
}
return data
async def get_city_list(self):
city_list = await self.model['salary'].get_all_city()
return city_list
async def get_category_list(self):
category_list = await self.model['salary'].get_all_category()
return category_list
async def get_dep_cate_mapping(self):
dep_cate_mapping = await self.model['salary'].get_dep_cate_mapping()
return dep_cate_mapping
async def get_job_dep_mapping(self):
job_dep_mapping = await self.model['salary'].get_job_dep_mapping()
return job_dep_mapping
async def get_job_info(self, show_id=False):
rank_list = await self.model['salary'].get_all_rank(show_id)
job_mapping = await self.model['salary'].get_job_cate_mapping()
data = {
'rank': rank_list,
'job_category': job_mapping
}
return data
async def get_job_by_cate_rank(self, category_id, rank_id):
jobs = await self.model['salary'].get_job_by_cate_rank(category_id, rank_id)
return jobs
async def get_job_info_by_name(self, name_list):
jobs = await self.model['salary'].get_job_info_by_name(name_list)
job_dict = {}
data = []
for job in jobs:
job_dict[job['name_zh']] = {
'name': job['name_zh'],
'code': job['code'],
'rank_code': job['job_grade_code'],
'rank_name': job['job_grade_name'],
'job_category_code': job['job_category_code'],
'job_category_name': job['job_category_name'],
'market_50': random.randint(10000, 20000)
}
[data.append({'job_name':item, 'market_info':job_dict.get(item, None)}) for item in name_list]
return data
| {"/Message/db/helper.py": ["/Message/config.py"], "/structure/service.py": ["/base/service.py"], "/SanicGateway/controller/structure.py": ["/base/controller.py"], "/structure/model.py": ["/base/model.py"], "/base/application.py": ["/base/exception.py"], "/service.py": ["/base/service.py", "/base/exception.py"], "/model.py": ["/base/model.py", "/base/environment.py"], "/base/service.py": ["/structure/model.py", "/base/environment.py"], "/base/model.py": ["/base/sql_db.py"], "/route.py": ["/controller.py", "/middlemare.py"], "/controller.py": ["/base/controller.py", "/service.py"], "/base/environment.py": ["/base/sql_db.py", "/base/web_utils.py", "/base/model.py"], "/structure/server.py": ["/base/application.py", "/structure/config.py", "/base/environment.py", "/structure/route.py"], "/Message/script.py": ["/Message/config.py"], "/structure/route.py": ["/structure/controller.py", "/structure/middlemare.py"], "/script/db_helper.py": ["/script/create_data.py"], "/structure/controller.py": ["/base/controller.py", "/structure/config.py", "/structure/service.py"]} |
77,782 | shimaomao/sanicdemo | refs/heads/master | /demo/webshop/app.py | import os
from cubes.server.base import create_server, run_server
from cubes.server.utils import str_to_bool
# Set the configuration file
try:
CONFIG_PATH = os.environ["SLICER_CONFIG"]
except KeyError:
CONFIG_PATH = os.path.join(os.getcwd(), "slicer.ini")
run_server("slicer.ini") | {"/Message/db/helper.py": ["/Message/config.py"], "/structure/service.py": ["/base/service.py"], "/SanicGateway/controller/structure.py": ["/base/controller.py"], "/structure/model.py": ["/base/model.py"], "/base/application.py": ["/base/exception.py"], "/service.py": ["/base/service.py", "/base/exception.py"], "/model.py": ["/base/model.py", "/base/environment.py"], "/base/service.py": ["/structure/model.py", "/base/environment.py"], "/base/model.py": ["/base/sql_db.py"], "/route.py": ["/controller.py", "/middlemare.py"], "/controller.py": ["/base/controller.py", "/service.py"], "/base/environment.py": ["/base/sql_db.py", "/base/web_utils.py", "/base/model.py"], "/structure/server.py": ["/base/application.py", "/structure/config.py", "/base/environment.py", "/structure/route.py"], "/Message/script.py": ["/Message/config.py"], "/structure/route.py": ["/structure/controller.py", "/structure/middlemare.py"], "/script/db_helper.py": ["/script/create_data.py"], "/structure/controller.py": ["/base/controller.py", "/structure/config.py", "/structure/service.py"]} |
77,783 | shimaomao/sanicdemo | refs/heads/master | /middlemare.py | from sanic import response
async def success(req, resp, env=None):
return response.json({
'code': 0,
'msg': 'operation successful',
'id': 0,
'data': resp
})
| {"/Message/db/helper.py": ["/Message/config.py"], "/structure/service.py": ["/base/service.py"], "/SanicGateway/controller/structure.py": ["/base/controller.py"], "/structure/model.py": ["/base/model.py"], "/base/application.py": ["/base/exception.py"], "/service.py": ["/base/service.py", "/base/exception.py"], "/model.py": ["/base/model.py", "/base/environment.py"], "/base/service.py": ["/structure/model.py", "/base/environment.py"], "/base/model.py": ["/base/sql_db.py"], "/route.py": ["/controller.py", "/middlemare.py"], "/controller.py": ["/base/controller.py", "/service.py"], "/base/environment.py": ["/base/sql_db.py", "/base/web_utils.py", "/base/model.py"], "/structure/server.py": ["/base/application.py", "/structure/config.py", "/base/environment.py", "/structure/route.py"], "/Message/script.py": ["/Message/config.py"], "/structure/route.py": ["/structure/controller.py", "/structure/middlemare.py"], "/script/db_helper.py": ["/script/create_data.py"], "/structure/controller.py": ["/base/controller.py", "/structure/config.py", "/structure/service.py"]} |
77,784 | shimaomao/sanicdemo | refs/heads/master | /SanicGateway/controller/structure.py | from base.controller import BaseHandler | {"/Message/db/helper.py": ["/Message/config.py"], "/structure/service.py": ["/base/service.py"], "/SanicGateway/controller/structure.py": ["/base/controller.py"], "/structure/model.py": ["/base/model.py"], "/base/application.py": ["/base/exception.py"], "/service.py": ["/base/service.py", "/base/exception.py"], "/model.py": ["/base/model.py", "/base/environment.py"], "/base/service.py": ["/structure/model.py", "/base/environment.py"], "/base/model.py": ["/base/sql_db.py"], "/route.py": ["/controller.py", "/middlemare.py"], "/controller.py": ["/base/controller.py", "/service.py"], "/base/environment.py": ["/base/sql_db.py", "/base/web_utils.py", "/base/model.py"], "/structure/server.py": ["/base/application.py", "/structure/config.py", "/base/environment.py", "/structure/route.py"], "/Message/script.py": ["/Message/config.py"], "/structure/route.py": ["/structure/controller.py", "/structure/middlemare.py"], "/script/db_helper.py": ["/script/create_data.py"], "/structure/controller.py": ["/base/controller.py", "/structure/config.py", "/structure/service.py"]} |
77,785 | shimaomao/sanicdemo | refs/heads/master | /structure/middlemare.py | from sanic import response
import logging
import datetime
logging.getLogger().setLevel(logging.INFO)
async def success(req, resp, env=None):
return response.json({
'code': 0,
'msg': 'operation successful',
'id': 0,
'data': resp
})
async def log(req, env=None):
if req.method == "POST":
query = req.body
else:
query = req.query_string
logging.info({'path': req.path, 'query': query, 'time': datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")} )
| {"/Message/db/helper.py": ["/Message/config.py"], "/structure/service.py": ["/base/service.py"], "/SanicGateway/controller/structure.py": ["/base/controller.py"], "/structure/model.py": ["/base/model.py"], "/base/application.py": ["/base/exception.py"], "/service.py": ["/base/service.py", "/base/exception.py"], "/model.py": ["/base/model.py", "/base/environment.py"], "/base/service.py": ["/structure/model.py", "/base/environment.py"], "/base/model.py": ["/base/sql_db.py"], "/route.py": ["/controller.py", "/middlemare.py"], "/controller.py": ["/base/controller.py", "/service.py"], "/base/environment.py": ["/base/sql_db.py", "/base/web_utils.py", "/base/model.py"], "/structure/server.py": ["/base/application.py", "/structure/config.py", "/base/environment.py", "/structure/route.py"], "/Message/script.py": ["/Message/config.py"], "/structure/route.py": ["/structure/controller.py", "/structure/middlemare.py"], "/script/db_helper.py": ["/script/create_data.py"], "/structure/controller.py": ["/base/controller.py", "/structure/config.py", "/structure/service.py"]} |
77,786 | shimaomao/sanicdemo | refs/heads/master | /structure/model.py | from base.model import BaseModel
import random
class SalaryModel(BaseModel):
def data_format(self, data, show_id=True):
data_list = []
if show_id:
[data_list.append({'code': item['code'], 'name':item['name_zh'].strip()}) for item in data if item]
else:
[data_list.append(item['name_zh'].strip()) for item in data if item]
return data_list
async def get_all_city(self, show_id=False):
data = await self.db.find('city', 'list', {})
city_list = self.data_format(data,show_id)
return city_list
async def get_all_industry(self, show_id=False):
data = await self.db.find('industry', 'list', {})
category_list = self.data_format(data, show_id)
return category_list
async def get_all_scope(self, show_id=False):
data = await self.db.find('company_scope', 'list', {})
scope_list = self.data_format(data, show_id)
return scope_list
async def get_all_nature(self, show_id=False):
data = await self.db.find('company_scope', 'list', {})
nature_list = self.data_format(data, show_id)
return nature_list
async def get_dep_cate_mapping(self):
data = await self.db.find('x_department as dep', 'list', {
'fields': ['dep.id', 'dep.name as dep_name', 'cate.name as cate_name', 'category_id'],
'join': 'x_category as cate on cate.id = dep.category_id'
})
mapping_dict = {}
for item in data:
if item['category_id'] in mapping_dict:
mapping_dict[item['category_id']].append({item['id']: item['dep_name'].strip()})
else:
mapping_dict[item['category_id']] = [{item['id']: item['dep_name'].strip()}]
return mapping_dict
async def get_job_cate_mapping(self):
data = await self.db.find('job', 'list', {
'fields': ['job.code', 'job.name_zh', 'cate.name_zh as category_name', 'job_category_code'],
'join': 'job_category as cate on cate.code = job.job_category_code'
})
job_list = []
mapping_dict = {}
job_category_name_mapping = {}
for item in data:
if item['job_category_code'] in mapping_dict:
mapping_dict[item['job_category_code']].append({'code': item['code'], 'name': item['name_zh'].strip()})
else:
mapping_dict[item['job_category_code']] = [{'code': item['code'], 'name': item['name_zh'].strip()}]
job_category_name_mapping[item['job_category_code']] = item['category_name']
for k, v in mapping_dict.items():
job_list.append({'code': k, 'name': job_category_name_mapping.get(k), 'jobs': v})
return job_list
async def get_all_rank(self, show_id):
data = await self.db.find('job_grade', 'list', {})
rank_list = self.data_format(data, show_id)
return rank_list
async def get_job_by_cate_rank(self, category_code, rank_code):
data = await self.db.find('job', 'list', {
'condition': 'job_category_code={} and job_grade_code={}'.format(category_code, rank_code),
})
data_list = []
[data_list.append({'code': item['code'], 'name':item['name_zh'].strip(),
'market_50':random.randint(10000, 20000)}) for item in data if item]
return data_list
async def get_job_info_by_name(self, name_list):
condition = ''
if isinstance(name_list, list or tuple):
if len(name_list) > 1:
condition = 'job.name_zh in {}'.format(str(tuple(name_list)))
else:
condition = 'job.name_zh = {}'.format(name_list)
data = await self.db.find('job', 'list', {
'condition': condition,
'fields': ['job.*, job_category.name_zh as job_category_name, job_grade.name_zh as job_grade_name'],
'join': 'job_category on job_category.code = job.job_category_code left join job_grade on job_grade.code = job.job_grade_code'
})
return data
| {"/Message/db/helper.py": ["/Message/config.py"], "/structure/service.py": ["/base/service.py"], "/SanicGateway/controller/structure.py": ["/base/controller.py"], "/structure/model.py": ["/base/model.py"], "/base/application.py": ["/base/exception.py"], "/service.py": ["/base/service.py", "/base/exception.py"], "/model.py": ["/base/model.py", "/base/environment.py"], "/base/service.py": ["/structure/model.py", "/base/environment.py"], "/base/model.py": ["/base/sql_db.py"], "/route.py": ["/controller.py", "/middlemare.py"], "/controller.py": ["/base/controller.py", "/service.py"], "/base/environment.py": ["/base/sql_db.py", "/base/web_utils.py", "/base/model.py"], "/structure/server.py": ["/base/application.py", "/structure/config.py", "/base/environment.py", "/structure/route.py"], "/Message/script.py": ["/Message/config.py"], "/structure/route.py": ["/structure/controller.py", "/structure/middlemare.py"], "/script/db_helper.py": ["/script/create_data.py"], "/structure/controller.py": ["/base/controller.py", "/structure/config.py", "/structure/service.py"]} |
77,787 | shimaomao/sanicdemo | refs/heads/master | /script/create_data.py | # --*-- coding:utf-8 --*--
import xlrd
import uuid
import random
import logging
CITY_LIST = ['北京', '上海', '天津', '重庆', '广州', '深圳', '杭州', '苏州', '成都', '西安', '大连', '沈阳', '珠海']
CATEGORY_LIST = set()
DEP_DICT = {}
JOB_DICT = {}
GRADE_LIST = []
TIME_LIST = []
work_table = xlrd.open_workbook('/Users/panyang/Documents/市场数据库_样本数据.xlsx')
salary_page = 0
para_page = 1
structure_page = 2
code_set = set()
job_mapping = {}
industry_mapping = {}
print(CATEGORY_LIST, DEP_DICT, JOB_DICT)
logging.getLogger().setLevel(logging.INFO)
def gen_code():
while True:
code = random.randint(1000, 9999)
if code not in code_set:
code_set.add(code)
return code
async def read_structure(pool):
job_grade_set = set()
job_category_set = set()
job_category_mapping = {}
job_grade_mapping = {}
structure_table = work_table.sheet_by_index(structure_page)
for row in range(1, structure_table.nrows):
one_structure = structure_table.row_values(row)
job_grade_set.add(one_structure[2])
job_category_set.add(one_structure[1])
async with pool.acquire() as conn:
for job_category in job_category_set:
code = gen_code()
job_category_mapping[code] = job_category
job_category_mapping[job_category] = code
await conn.execute('insert into JOB_CATEGORY (CODE, NAME_ZH) VALUES ( \'{}\', \'{}\')'.format(code, job_category))
async with pool.acquire() as conn:
for job_grade in job_grade_set:
code = gen_code()
job_grade_mapping[code] = job_grade
job_grade_mapping[job_grade] = code
await conn.execute('insert into JOB_GRADE (CODE, NAME_ZH) VALUES ( \'{}\', \'{}\')'.format(code, job_grade))
async with pool.acquire() as conn:
for row in range(1, structure_table.nrows):
one_structure = structure_table.row_values(row)
code = gen_code()
job = one_structure[0]
job_mapping[code] = job
job_mapping[job] = code
job_category_code = job_category_mapping[one_structure[1]]
job_grade_code = job_grade_mapping[one_structure[2]]
await conn.execute('insert into JOB (CODE, NAME_ZH, JOB_GRADE_CODE, JOB_CATEGORY_CODE) '
'VALUES ( \'{}\', \'{}\', \'{}\', \'{}\')'.format(code, job, job_grade_code, job_category_code))
async def create_city(pool):
async with pool.acquire() as conn:
for item in CITY_LIST:
await conn.execute('insert into CITY (NAME_ZH, code) VALUES ({}, {})'.format('\'' + item + '\'', gen_code()))
async def create_nature_and_scope(pool):
print(gen_code())
print(gen_code())
print(gen_code())
async def create_industry(pool):
para_table = work_table.sheet_by_index(para_page)
async with pool.acquire() as conn:
for row in range(1, 28):
one_structure = para_table.row_values(row)
name = one_structure[0]
code = gen_code()
industry_mapping[name] = code
industry_mapping[code] = name
await conn.execute('insert into INDUSTRY (NAME_ZH, CODE) VALUES (\'{}\', \'{}\')'.format(name, code))
async def create_salary(pool):
salary_table = work_table.sheet_by_index(salary_page)
logging.info('start create data')
async with pool.acquire() as conn:
await conn.execute('TRUNCATE TABLE market_salary_data')
for row in range(2, salary_table.nrows):
one_structure = salary_table.row_values(row)
job_code = job_mapping[one_structure[0]]
industry_code = industry_mapping[one_structure[1]]
city_code = 8308
scope_code = 8010
nature_code = 7164
source = 'eraod'
for i in range(5, 104):
salary = one_structure[i]
await conn.execute('insert into market_salary_data (SOURCE, city_code, job_code, industry_code, scope_code'
', nature_code, base_salary) VALUES (\'{}\', {}, {},{}, {},{}, {})'.format(source,city_code,job_code,industry_code, scope_code,nature_code, salary))
logging.info('finish line {}'.format(row))
async def create_data(pool):
#await create_city(pool)
await create_industry(pool)
await create_nature_and_scope(pool)
await read_structure(pool)
await create_salary(pool)
| {"/Message/db/helper.py": ["/Message/config.py"], "/structure/service.py": ["/base/service.py"], "/SanicGateway/controller/structure.py": ["/base/controller.py"], "/structure/model.py": ["/base/model.py"], "/base/application.py": ["/base/exception.py"], "/service.py": ["/base/service.py", "/base/exception.py"], "/model.py": ["/base/model.py", "/base/environment.py"], "/base/service.py": ["/structure/model.py", "/base/environment.py"], "/base/model.py": ["/base/sql_db.py"], "/route.py": ["/controller.py", "/middlemare.py"], "/controller.py": ["/base/controller.py", "/service.py"], "/base/environment.py": ["/base/sql_db.py", "/base/web_utils.py", "/base/model.py"], "/structure/server.py": ["/base/application.py", "/structure/config.py", "/base/environment.py", "/structure/route.py"], "/Message/script.py": ["/Message/config.py"], "/structure/route.py": ["/structure/controller.py", "/structure/middlemare.py"], "/script/db_helper.py": ["/script/create_data.py"], "/structure/controller.py": ["/base/controller.py", "/structure/config.py", "/structure/service.py"]} |
77,788 | shimaomao/sanicdemo | refs/heads/master | /base/application.py | from collections import deque
from inspect import isawaitable
from traceback import format_exc
from sanic import Sanic
from sanic.exceptions import ServerError
from sanic.log import log
from sanic.response import HTTPResponse
from sanic.response import json
from base.exception import BaseExcep
class Application(Sanic):
def __init__(self, route_dict, err_route, middlewmare, env):
assert isinstance(route_dict, dict)
assert isinstance(middlewmare, dict)
assert isinstance(err_route, dict)
super(Application, self).__init__()
for k, v in route_dict.items():
self._add_route(v, k)
for k, v in err_route.items():
self.error_handler.add(k, v)
self.env = env
self.request_middleware = deque(
middlewmare['request']) if 'request' in middlewmare else deque()
self.response_middleware = deque(
middlewmare['response']) if 'response' in middlewmare else deque()
def middleware(self, *args, **kwargs):
raise NotImplementedError
def route(self, uri, methods=frozenset({'GET'}), host=None,
strict_slashes=False):
raise NotImplementedError
def _route(self, uri, methods=frozenset({'GET'}), host=None,
strict_slashes=False):
"""Decorate a function to be registered as a route
:param uri: path of the URL
:param methods: list or tuple of methods allowed
:param host:
:return: decorated function
"""
# Fix case where the user did not prefix the URL with a /
# and will probably get confused as to why it's not working
if not uri.startswith('/'):
uri = '/' + uri
def response(handler):
self.router.add(uri=uri, methods=methods, handler=handler,
host=host, strict_slashes=strict_slashes)
return handler
return response
def _add_route(self, handler, uri, methods=frozenset({'POST', 'GET'}), host=None,
strict_slashes=False):
"""A helper method to register class instance or
functions as a handler to the application url
routes.
:param handler: function or class instance
:param uri: path of the URL
:param methods: list or tuple of methods allowed, these are overridden
if using a HTTPMethodView
:param host:
:return: function or class instance
"""
# Handle HTTPMethodView differently
# if hasattr(handler, 'view_class'):
# methods = set()
#
# for method in HTTP_METHODS:
# if getattr(handler.view_class, method.lower(), None):
# methods.add(method)
#
# # handle composition view differently
# if isinstance(handler, CompositionView):
# methods = handler.handlers.keys()
self._route(uri=uri, methods=methods, host=host,
strict_slashes=strict_slashes)(handler)
return handler
def add_route(self, handler, uri, methods=frozenset({'GET'}), host=None,
strict_slashes=False):
raise NotImplementedError
async def handle_request(self, request, write_callback=None, stream_callback=None):
"""
Takes a request from the HTTP Server and returns a response object to
be sent back The HTTP Server only expects a response object, so
exception handling must be done here
:param request: HTTP Request object
:param response_callback: Response function to be called with the
response as the only argument
:return: Nothing
"""
try:
# -------------------------------------------- #
# Request Middleware
# -------------------------------------------- #
response = False
# The if improves speed. I don't know why
if self.request_middleware:
for middleware in self.request_middleware:
response = middleware(request, env=self.env)
if isawaitable(response):
response = await response
if response:
break
# No middleware results
if not response:
# -------------------------------------------- #
# Execute Handler
# -------------------------------------------- #
# Fetch handler from router
handler, args, kwargs = self.router.get(request)
if handler is None:
raise ServerError(
("'None' was returned while requesting a "
"handler from the router"))
# Run response handler
response = handler(request, self.env, *args, **kwargs)()
if isawaitable(response):
response = await response
# -------------------------------------------- #
# Response Middleware
# -------------------------------------------- #
if self.response_middleware:
for middleware in self.response_middleware:
_response = middleware(request, response, env=self.env)
if isawaitable(_response):
_response = await _response
if _response:
response = _response
break
except BaseExcep as e:
if e.log:
log.exception(e.args)
response = json({'code': e.code, 'data': e.data, 'id': None,
'msg': e.msg}, 200)
except Exception as e:
# -------------------------------------------- #
# Response Generation Failed
# -------------------------------------------- #
try:
log.exception(e.args)
response = self.error_handler.response(request, e)
if isawaitable(response):
response = await response
if response.status == 500:
if self.debug:
response = json({'code': -1, 'data': format_exc(),
'msg': e.__repr__(), 'id': None}, 200)
else:
response = json({'code': -1, 'data': e.__repr__(),
'msg': '系统出错', 'id': None}, 200)
except Exception as e:
if self.debug:
response = HTTPResponse(
"Error while handling error: {}\nStack: {}".format(
e, format_exc()))
else:
response = HTTPResponse(
"An error occured while handling an error")
write_callback(response)
| {"/Message/db/helper.py": ["/Message/config.py"], "/structure/service.py": ["/base/service.py"], "/SanicGateway/controller/structure.py": ["/base/controller.py"], "/structure/model.py": ["/base/model.py"], "/base/application.py": ["/base/exception.py"], "/service.py": ["/base/service.py", "/base/exception.py"], "/model.py": ["/base/model.py", "/base/environment.py"], "/base/service.py": ["/structure/model.py", "/base/environment.py"], "/base/model.py": ["/base/sql_db.py"], "/route.py": ["/controller.py", "/middlemare.py"], "/controller.py": ["/base/controller.py", "/service.py"], "/base/environment.py": ["/base/sql_db.py", "/base/web_utils.py", "/base/model.py"], "/structure/server.py": ["/base/application.py", "/structure/config.py", "/base/environment.py", "/structure/route.py"], "/Message/script.py": ["/Message/config.py"], "/structure/route.py": ["/structure/controller.py", "/structure/middlemare.py"], "/script/db_helper.py": ["/script/create_data.py"], "/structure/controller.py": ["/base/controller.py", "/structure/config.py", "/structure/service.py"]} |
77,789 | shimaomao/sanicdemo | refs/heads/master | /service.py | from base.service import BaseService
from sanic.exceptions import SanicException
import base64
import logging
from datetime import datetime, timedelta
from time import time
from base.exception import *
from scipy.stats import chi2
class SalaryService(BaseService):
async def get_city_list(self):
city_list = await self.model['salary'].get_all_city()
return city_list
async def get_category_list(self):
category_list = await self.model['salary'].get_all_category()
return category_list
async def get_dep_cate_mapping(self):
dep_cate_mapping = await self.model['salary'].get_dep_cate_mapping()
return dep_cate_mapping
async def get_job_dep_mapping(self):
job_dep_mapping = await self.model['salary'].get_job_dep_mapping()
return job_dep_mapping
| {"/Message/db/helper.py": ["/Message/config.py"], "/structure/service.py": ["/base/service.py"], "/SanicGateway/controller/structure.py": ["/base/controller.py"], "/structure/model.py": ["/base/model.py"], "/base/application.py": ["/base/exception.py"], "/service.py": ["/base/service.py", "/base/exception.py"], "/model.py": ["/base/model.py", "/base/environment.py"], "/base/service.py": ["/structure/model.py", "/base/environment.py"], "/base/model.py": ["/base/sql_db.py"], "/route.py": ["/controller.py", "/middlemare.py"], "/controller.py": ["/base/controller.py", "/service.py"], "/base/environment.py": ["/base/sql_db.py", "/base/web_utils.py", "/base/model.py"], "/structure/server.py": ["/base/application.py", "/structure/config.py", "/base/environment.py", "/structure/route.py"], "/Message/script.py": ["/Message/config.py"], "/structure/route.py": ["/structure/controller.py", "/structure/middlemare.py"], "/script/db_helper.py": ["/script/create_data.py"], "/structure/controller.py": ["/base/controller.py", "/structure/config.py", "/structure/service.py"]} |
77,790 | shimaomao/sanicdemo | refs/heads/master | /model.py | from base.model import BaseModel
from base.environment import r_cache
from collections import defaultdict
import asyncio
import itertools
try:
import uvloop as async_loop
except ImportError:
async_loop = asyncio
try:
import ujson as json
except ImportError:
import json
loop = async_loop.new_event_loop()
asyncio.set_event_loop(loop=loop)
class SalaryModel(BaseModel):
async def get_all_city(self):
data = await self.db.find('x_city', 'list', {})
city_list = []
[city_list.append({item['id']: item['name'].strip()}) for item in data]
return city_list
async def get_all_category(self):
data = await self.db.find('x_category', 'list', {})
category_list = []
[category_list.append({item['id']: item['name'].strip()}) for item in data]
return category_list
async def get_dep_cate_mapping(self):
data = await self.db.find('x_department as dep', 'list', {
'fields': ['dep.id', 'dep.name as dep_name', 'cate.name as cate_name', 'category_id'],
'join': 'x_category as cate on cate.id = dep.category_id'
})
mapping_dict = {}
for item in data:
if item['category_id'] in mapping_dict:
mapping_dict[item['category_id']].append({item['id']: item['dep_name'].strip()})
else:
mapping_dict[item['category_id']] = [{item['id']: item['dep_name'].strip()}]
return mapping_dict
async def get_job_dep_mapping(self):
data = await self.db.find('x_job as job', 'list', {
'fields': ['job.id', 'job.name as job_name', 'dep.name as dep_name', 'department_id'],
'join': 'x_department as dep on dep.id = job.department_id'
})
mapping_dict = {}
for item in data:
if item['department_id'] in mapping_dict:
mapping_dict[item['department_id']].append({item['id']: item['job_name'].strip()})
else:
mapping_dict[item['department_id']] = [{item['id']: item['job_name'].strip()}]
return mapping_dict
| {"/Message/db/helper.py": ["/Message/config.py"], "/structure/service.py": ["/base/service.py"], "/SanicGateway/controller/structure.py": ["/base/controller.py"], "/structure/model.py": ["/base/model.py"], "/base/application.py": ["/base/exception.py"], "/service.py": ["/base/service.py", "/base/exception.py"], "/model.py": ["/base/model.py", "/base/environment.py"], "/base/service.py": ["/structure/model.py", "/base/environment.py"], "/base/model.py": ["/base/sql_db.py"], "/route.py": ["/controller.py", "/middlemare.py"], "/controller.py": ["/base/controller.py", "/service.py"], "/base/environment.py": ["/base/sql_db.py", "/base/web_utils.py", "/base/model.py"], "/structure/server.py": ["/base/application.py", "/structure/config.py", "/base/environment.py", "/structure/route.py"], "/Message/script.py": ["/Message/config.py"], "/structure/route.py": ["/structure/controller.py", "/structure/middlemare.py"], "/script/db_helper.py": ["/script/create_data.py"], "/structure/controller.py": ["/base/controller.py", "/structure/config.py", "/structure/service.py"]} |
77,791 | shimaomao/sanicdemo | refs/heads/master | /Message/config.py | route_config = {
'excel': {
'host': '127.0.0.1',
'port': '',
'exchange': 'excel_exchange',
'type': 'direct',
'queue': 'excel_queue',
'binding_key': 'excel'
}
}
database_config = {
'host': '127.0.0.1',
'port': '5432',
'user': '',
'password': '',
'db_name': 'Message',
} | {"/Message/db/helper.py": ["/Message/config.py"], "/structure/service.py": ["/base/service.py"], "/SanicGateway/controller/structure.py": ["/base/controller.py"], "/structure/model.py": ["/base/model.py"], "/base/application.py": ["/base/exception.py"], "/service.py": ["/base/service.py", "/base/exception.py"], "/model.py": ["/base/model.py", "/base/environment.py"], "/base/service.py": ["/structure/model.py", "/base/environment.py"], "/base/model.py": ["/base/sql_db.py"], "/route.py": ["/controller.py", "/middlemare.py"], "/controller.py": ["/base/controller.py", "/service.py"], "/base/environment.py": ["/base/sql_db.py", "/base/web_utils.py", "/base/model.py"], "/structure/server.py": ["/base/application.py", "/structure/config.py", "/base/environment.py", "/structure/route.py"], "/Message/script.py": ["/Message/config.py"], "/structure/route.py": ["/structure/controller.py", "/structure/middlemare.py"], "/script/db_helper.py": ["/script/create_data.py"], "/structure/controller.py": ["/base/controller.py", "/structure/config.py", "/structure/service.py"]} |
77,792 | shimaomao/sanicdemo | refs/heads/master | /base/service.py | from structure.model import SalaryModel
from .environment import Environment
all_model = {'salary':SalaryModel}
class ModelDict(dict):
def __init__(self, service):
super(ModelDict, self).__init__()
self.service = service
def __getitem__(self, y):
try:
model = super(ModelDict, self).__getitem__(y)
except KeyError:
model = self.service.import_model(y)
return model
class BaseService:
def __init__(self, env, connection=None):
self.env = env
self.model = ModelDict(self)
def import_model(self, name):
model_cls = all_model.get(name)
if model_cls:
model = model_cls(self.env)
self.model[name] = model
else:
raise (KeyError, 'no model named {} in service.py'.format(name))
return model | {"/Message/db/helper.py": ["/Message/config.py"], "/structure/service.py": ["/base/service.py"], "/SanicGateway/controller/structure.py": ["/base/controller.py"], "/structure/model.py": ["/base/model.py"], "/base/application.py": ["/base/exception.py"], "/service.py": ["/base/service.py", "/base/exception.py"], "/model.py": ["/base/model.py", "/base/environment.py"], "/base/service.py": ["/structure/model.py", "/base/environment.py"], "/base/model.py": ["/base/sql_db.py"], "/route.py": ["/controller.py", "/middlemare.py"], "/controller.py": ["/base/controller.py", "/service.py"], "/base/environment.py": ["/base/sql_db.py", "/base/web_utils.py", "/base/model.py"], "/structure/server.py": ["/base/application.py", "/structure/config.py", "/base/environment.py", "/structure/route.py"], "/Message/script.py": ["/Message/config.py"], "/structure/route.py": ["/structure/controller.py", "/structure/middlemare.py"], "/script/db_helper.py": ["/script/create_data.py"], "/structure/controller.py": ["/base/controller.py", "/structure/config.py", "/structure/service.py"]} |
77,793 | shimaomao/sanicdemo | refs/heads/master | /base/middlemare.py |
from sanic.request import Request
from sanic import Blueprint, Sanic
class MjsonMiddlemare:
@staticmethod
@Sanic.middleware('request')
def setup(request):
assert isinstance(request, Request)
MjsonMiddlemare.setup_identification(request)
MjsonMiddlemare.setup_session(request)
MjsonMiddlemare.fix_lang(request)
@staticmethod
def setup_identification(req):
assert isinstance(req, Request)
setattr(req, 'identification', req.json.get('identication'))
setattr(req, 'data', req.json.get('data'))
@staticmethod
def setup_session(req):
sid = req.identification.get('session_id')
if sid:
req.session = session_mgr.get(sid)
else:
req.session = session_mgr.new()
@staticmethod
def fix_lang(req):
assert req.session
lang = req.data.get('lang')
if lang:
if lang.find('zh') > -1:
req.session.context['lang'] = 'zh_CN'
elif lang.find('en') > -1:
req.session.context['lang'] = 'en_US'
| {"/Message/db/helper.py": ["/Message/config.py"], "/structure/service.py": ["/base/service.py"], "/SanicGateway/controller/structure.py": ["/base/controller.py"], "/structure/model.py": ["/base/model.py"], "/base/application.py": ["/base/exception.py"], "/service.py": ["/base/service.py", "/base/exception.py"], "/model.py": ["/base/model.py", "/base/environment.py"], "/base/service.py": ["/structure/model.py", "/base/environment.py"], "/base/model.py": ["/base/sql_db.py"], "/route.py": ["/controller.py", "/middlemare.py"], "/controller.py": ["/base/controller.py", "/service.py"], "/base/environment.py": ["/base/sql_db.py", "/base/web_utils.py", "/base/model.py"], "/structure/server.py": ["/base/application.py", "/structure/config.py", "/base/environment.py", "/structure/route.py"], "/Message/script.py": ["/Message/config.py"], "/structure/route.py": ["/structure/controller.py", "/structure/middlemare.py"], "/script/db_helper.py": ["/script/create_data.py"], "/structure/controller.py": ["/base/controller.py", "/structure/config.py", "/structure/service.py"]} |
77,794 | shimaomao/sanicdemo | refs/heads/master | /base/model.py | from .sql_db import PostgresDb
from aioredis import Redis
import asyncio
try:
import ujson as json
except ImportError:
import json
class BaseModel:
def __init__(self, env):
self.db = env.db
self.env = env
REDIS_CACHE_NAME = 'redis_model_cache'
| {"/Message/db/helper.py": ["/Message/config.py"], "/structure/service.py": ["/base/service.py"], "/SanicGateway/controller/structure.py": ["/base/controller.py"], "/structure/model.py": ["/base/model.py"], "/base/application.py": ["/base/exception.py"], "/service.py": ["/base/service.py", "/base/exception.py"], "/model.py": ["/base/model.py", "/base/environment.py"], "/base/service.py": ["/structure/model.py", "/base/environment.py"], "/base/model.py": ["/base/sql_db.py"], "/route.py": ["/controller.py", "/middlemare.py"], "/controller.py": ["/base/controller.py", "/service.py"], "/base/environment.py": ["/base/sql_db.py", "/base/web_utils.py", "/base/model.py"], "/structure/server.py": ["/base/application.py", "/structure/config.py", "/base/environment.py", "/structure/route.py"], "/Message/script.py": ["/Message/config.py"], "/structure/route.py": ["/structure/controller.py", "/structure/middlemare.py"], "/script/db_helper.py": ["/script/create_data.py"], "/structure/controller.py": ["/base/controller.py", "/structure/config.py", "/structure/service.py"]} |
77,795 | shimaomao/sanicdemo | refs/heads/master | /route.py | from controller import CityList, CategoryList, CateDepMapping, DepJobMapping, CompanyDetail, SalaryData
from middlemare import success
route = {
'/get_all_city': CityList,
'/get_all_category': CategoryList,
'/get_department_mapping': CateDepMapping,
'/get_job_mapping': DepJobMapping,
'/get_company_detail': CompanyDetail,
'/upload_excel': SalaryData
}
middleware = {
'response': [success]
}
err_route = {} | {"/Message/db/helper.py": ["/Message/config.py"], "/structure/service.py": ["/base/service.py"], "/SanicGateway/controller/structure.py": ["/base/controller.py"], "/structure/model.py": ["/base/model.py"], "/base/application.py": ["/base/exception.py"], "/service.py": ["/base/service.py", "/base/exception.py"], "/model.py": ["/base/model.py", "/base/environment.py"], "/base/service.py": ["/structure/model.py", "/base/environment.py"], "/base/model.py": ["/base/sql_db.py"], "/route.py": ["/controller.py", "/middlemare.py"], "/controller.py": ["/base/controller.py", "/service.py"], "/base/environment.py": ["/base/sql_db.py", "/base/web_utils.py", "/base/model.py"], "/structure/server.py": ["/base/application.py", "/structure/config.py", "/base/environment.py", "/structure/route.py"], "/Message/script.py": ["/Message/config.py"], "/structure/route.py": ["/structure/controller.py", "/structure/middlemare.py"], "/script/db_helper.py": ["/script/create_data.py"], "/structure/controller.py": ["/base/controller.py", "/structure/config.py", "/structure/service.py"]} |
77,796 | shimaomao/sanicdemo | refs/heads/master | /base/utils.py | import logging
from decimal import Decimal, ROUND_HALF_UP
logging.basicConfig(
level=logging.INFO,
)
logger = logging.getLogger('')
class FrozenDict(dict):
""" An implementation of an immutable dictionary. """
def __delitem__(self, key):
raise NotImplementedError("'__delitem__' not supported on frozendict")
def __setitem__(self, key, val):
raise NotImplementedError("'__setitem__' not supported on frozendict")
def clear(self):
raise NotImplementedError("'clear' not supported on frozendict")
def pop(self, key, default=None):
raise NotImplementedError("'pop' not supported on frozendict")
def popitem(self):
raise NotImplementedError("'popitem' not supported on frozendict")
def setdefault(self, key, default=None):
raise NotImplementedError("'setdefault' not supported on frozendict")
def update(self, *args, **kwargs):
raise NotImplementedError("'update' not supported on frozendict")
def __hash__(self):
return hash(
frozenset((key, hash(val)) for key, val in self.iteritems()))
def dict_num_sum(value):
"""
对字典进行合计
:param value:
:return:
"""
if not value:
return 0.0
if isinstance(value, dict):
total = 0
for value1 in value.values():
value1 = dict_num_sum(value1)
total += value1
return total
else:
try:
value = float(value)
except Exception as e:
logger.exception(e)
value = 0.0
return value
def decimal_round(number, precision):
'''
@param number: 数值
@param precision: 精度处理位数
@return: 对数值进行四舍五入, precision 为其保留的位数, 采用decimal是为了防止float值 十进制转换为二进制时所造成的误差造成四舍五入出现错误
'''
# 兼容 '' None 等空值
if not number and number != 0:
return decimal_round(0, precision)
if isinstance(number, (float, int)):
# precision不能为负
if precision < 0:
return number
number = repr(number)
try:
precision_str = 1 if precision == 0 else '0.' + '0' * (
precision - 1) + '1'
# result为 decimal值,ROUND_HALF_UP 四舍五入, precision_str为精度
result = Decimal(number).quantize(Decimal(precision_str),
rounding=ROUND_HALF_UP)
except Exception as e:
logger.exception(e)
return decimal_round(0, precision)
return result
def get_formative_money(money, precision=2):
"""
按精度四舍五入
增加千位符
按精度保留小数 位数
0 处理为 0.00
100 处理为 100.00
"""
return "{:,}".format(decimal_round(money, precision))
def delete_zero(value, code='', dict1={}):
'''
删除字典中的0值 张海洋代码,未修改
:param value:
:param code:
:param dict1:
:return:
'''
if isinstance(value, dict):
for code1, value1 in value.items():
value1 = delete_zero(value1, code1, value)
try:
value1 = float(value1)
if value1 == 0:
value.pop(code1)
except:
pass
if dict1.get(code) == {}:
dict1.pop(code)
else:
return value
| {"/Message/db/helper.py": ["/Message/config.py"], "/structure/service.py": ["/base/service.py"], "/SanicGateway/controller/structure.py": ["/base/controller.py"], "/structure/model.py": ["/base/model.py"], "/base/application.py": ["/base/exception.py"], "/service.py": ["/base/service.py", "/base/exception.py"], "/model.py": ["/base/model.py", "/base/environment.py"], "/base/service.py": ["/structure/model.py", "/base/environment.py"], "/base/model.py": ["/base/sql_db.py"], "/route.py": ["/controller.py", "/middlemare.py"], "/controller.py": ["/base/controller.py", "/service.py"], "/base/environment.py": ["/base/sql_db.py", "/base/web_utils.py", "/base/model.py"], "/structure/server.py": ["/base/application.py", "/structure/config.py", "/base/environment.py", "/structure/route.py"], "/Message/script.py": ["/Message/config.py"], "/structure/route.py": ["/structure/controller.py", "/structure/middlemare.py"], "/script/db_helper.py": ["/script/create_data.py"], "/structure/controller.py": ["/base/controller.py", "/structure/config.py", "/structure/service.py"]} |
77,797 | shimaomao/sanicdemo | refs/heads/master | /controller.py | from sanic import response
from sanic.exceptions import ServerError
from base.controller import BaseHandler
import logging
from service import SalaryService
try:
import ujson as json
except ImportError:
import json
n=0
class CityList(BaseHandler):
async def handle(self):
try:
salary_service = SalaryService(env=self.env)
result = await salary_service.get_city_list()
return result
except Exception as e:
raise ServerError(str(e.args))
class CategoryList(BaseHandler):
async def handle(self):
try:
salary_service = SalaryService(env=self.env)
result = await salary_service.get_category_list()
return result
except Exception as e:
raise ServerError(str(e.args))
class CateDepMapping(BaseHandler):
async def handle(self):
try:
salary_service = SalaryService(env=self.env)
result = await salary_service.get_dep_cate_mapping()
return result
except Exception as e:
raise ServerError(str(e.args))
class DepJobMapping(BaseHandler):
async def handle(self):
try:
salary_service = SalaryService(env=self.env)
result = await salary_service.get_job_dep_mapping()
return result
except Exception as e:
raise ServerError(str(e.args))
class CompanyDetail(BaseHandler):
def handle(self, *args, **kwargs):
return {
"nature": [
{
"id": 1,
'name': "国企"
}, {
"id": 2,
'name': "上市公司"
}, {
"id": 3,
'name': "私营单位"
}
],
"stage": [
{
"id": 1,
'name': "初创"
}, {
"id": 2,
'name': "成长"
}, {
"id": 3,
'name': "稳定"
}, {
"id": 4,
'name': "衰退"
}
],
"scale": [
{
"id": 1,
'name': "1-50"
}, {
"id": 2,
'name': "50-500"
}, {
"id": 3,
'name': "500-5000"
}
],
"record": [
{
"id": 1,
'name': "本科"
}, {
"id": 2,
'name': "硕士"
}, {
"id": 3,
'name': "博士"
}
]
}
class SalaryData(BaseHandler):
def handle(self, *args, **kwargs):
import time
print(self.request) | {"/Message/db/helper.py": ["/Message/config.py"], "/structure/service.py": ["/base/service.py"], "/SanicGateway/controller/structure.py": ["/base/controller.py"], "/structure/model.py": ["/base/model.py"], "/base/application.py": ["/base/exception.py"], "/service.py": ["/base/service.py", "/base/exception.py"], "/model.py": ["/base/model.py", "/base/environment.py"], "/base/service.py": ["/structure/model.py", "/base/environment.py"], "/base/model.py": ["/base/sql_db.py"], "/route.py": ["/controller.py", "/middlemare.py"], "/controller.py": ["/base/controller.py", "/service.py"], "/base/environment.py": ["/base/sql_db.py", "/base/web_utils.py", "/base/model.py"], "/structure/server.py": ["/base/application.py", "/structure/config.py", "/base/environment.py", "/structure/route.py"], "/Message/script.py": ["/Message/config.py"], "/structure/route.py": ["/structure/controller.py", "/structure/middlemare.py"], "/script/db_helper.py": ["/script/create_data.py"], "/structure/controller.py": ["/base/controller.py", "/structure/config.py", "/structure/service.py"]} |
77,798 | shimaomao/sanicdemo | refs/heads/master | /base/controller.py | from collections import namedtuple
from inspect import isawaitable
from sanic.request import Request
from config import db_config
from aiohttp import client
try:
import ujson as json
except:
import json
setup = namedtuple('setup', ['identification', 'session', 'lang', 'db'])
class BaseHandler:
setuper = setup(False, False, False, False)
def __init__(self, req, env):
self.request = req
self.env = env
self.data = None
self.identification = None
self.session = None
self.trans = None
self.trans_conn = None
async def initialize(self):
for item in self.setuper._fields:
if item and hasattr(self, 'setup_'+item):
result = getattr(self, 'setup_'+item)()
if isawaitable(result):
await result
async def __call__(self, *args, **kwargs):
await self.initialize()
result = self.handle(*args, **kwargs)
if isawaitable(result):
result = await result
if self.session:
await self.env.session_mgr.save(self.session)
if self.trans_conn:
await self.trans_conn.close()
return result
async def handle(self, *args, **kwargs):
raise NotImplementedError
async def transaction(self):
if self.trans_conn:
await self.trans_conn.close()
self.trans_conn = await self.env.db.connection()
self.trans = self.trans_conn.transaction()
async def trans_start(self):
if not self.trans:
await self.transaction()
await self.trans.start()
async def trans_commit(self):
if not self.trans:
await self.transaction()
await self.trans.commit()
async def trans_rollback(self):
if not self.trans:
await self.transaction()
await self.trans.rollback()
class MjsonHandler(BaseHandler):
setuper = setup(True, True, True, True)
def setup_identification(self):
assert isinstance(self.request, Request)
self.identification = self.request.json.get('identication', {})
self.data = self.request.json.get('data', {})
async def setup_session(self):
sid = self.identification.get('session_id')
if sid:
self.session = await self.env.session_mgr.get(sid)
else:
self.session = self.env.session_mgr.new()
def setup_lang(self):
assert self.session
lang = self.identification.get('language')
if lang:
if lang.find('zh') > -1:
self.session.context['lang'] = 'zh_CN'
elif lang.find('en') > -1:
self.session.context['lang'] = 'en_US'
async def setup_db(self):
database = db_config.get('database')
db_name = self.request.headers.get('X-Company-Code', '').lower()
# 兼容config 里指定database 的模式,只有database 为空时才切换连接多数据库
if (not database) and db_name:
self.env.db.pool = await self.env.db.create_pool(database=db_name)
class JsonHandler(BaseHandler):
async def initialize(self):
await super(JsonHandler, self).initialize()
self.data = json.loads(self.request.body)
class RedirectHandler(object):
def __init__(self, req, env):
self.request = req
self.env = env
self.data = None
async def initialize(self):
self.request = request
async def __call__(self, *args, **kwargs):
await self.initialize()
result = self.handle(*args, **kwargs)
if isawaitable(result):
result = await result
if self.session:
await self.env.session_mgr.save(self.session)
if self.trans_conn:
await self.trans_conn.close()
return result
async def handle(self, *args, **kwargs):
raise NotImplementedError
| {"/Message/db/helper.py": ["/Message/config.py"], "/structure/service.py": ["/base/service.py"], "/SanicGateway/controller/structure.py": ["/base/controller.py"], "/structure/model.py": ["/base/model.py"], "/base/application.py": ["/base/exception.py"], "/service.py": ["/base/service.py", "/base/exception.py"], "/model.py": ["/base/model.py", "/base/environment.py"], "/base/service.py": ["/structure/model.py", "/base/environment.py"], "/base/model.py": ["/base/sql_db.py"], "/route.py": ["/controller.py", "/middlemare.py"], "/controller.py": ["/base/controller.py", "/service.py"], "/base/environment.py": ["/base/sql_db.py", "/base/web_utils.py", "/base/model.py"], "/structure/server.py": ["/base/application.py", "/structure/config.py", "/base/environment.py", "/structure/route.py"], "/Message/script.py": ["/Message/config.py"], "/structure/route.py": ["/structure/controller.py", "/structure/middlemare.py"], "/script/db_helper.py": ["/script/create_data.py"], "/structure/controller.py": ["/base/controller.py", "/structure/config.py", "/structure/service.py"]} |
77,799 | shimaomao/sanicdemo | refs/heads/master | /base/web_utils.py | from hashlib import sha1
import os
from time import time
from random import random
from aioredis import Redis
from aioredis.pool import RedisPool
import asyncio
import re
from aiohttp import request
import logging
try:
import ujson as json
except ImportError:
import json
_sha1_re = re.compile(r'^[a-f0-9]{40}$')
class Session(dict):
def __init__(self, sid, db, data=None):
self.id = sid
self.db = db
self.uid = None
self.context = {}
if data:
super(Session, self).__init__(data)
def __getattr__(self, attr):
return self.get(attr, None)
def __setattr__(self, k, v):
try:
object.__getattribute__(self, k)
except:
return self.__setitem__(k, v)
object.__setattr__(self, k, v)
def get_context(self, uid):
pass
class SessionManager(object):
def __init__(self, db, redis_pool, company_code=None,
key_template='oe-session:{}', timeout=60 * 60 * 24,
session_class=Session):
self.redis_pool = redis_pool
self.db = db
self.key_template = key_template
self.timeout = timeout
self.company_code = company_code or ''
self.session_class = session_class
self.access_timestamp = time()
@staticmethod
def _urandom():
if hasattr(os, 'urandom'):
return os.urandom(30)
return str(random()).encode('ascii')
@staticmethod
def generate_key(salt=None):
if salt is None:
salt = repr(salt).encode('ascii')
return sha1(b''.join([
salt,
str(time()).encode('ascii'),
SessionManager._urandom()
])).hexdigest()
def is_valid_key(self, key):
"""Check if a key has the correct format."""
return _sha1_re.match(key) is not None
def get_session_key(self, sid):
if isinstance(sid, str):
sid = sid
return self.key_template.format(sid)
async def get(self, sid):
if not self.is_valid_key(sid):
return self.new()
key = self.company_code + self.get_session_key(sid)
async with self.redis_pool.get() as conn:
saved = await conn.hgetall(key)
if saved:
data = {}
for k, v in saved.items():
data[k.decode()] = int(v) if v.isdigit() else v.decode()
await conn.expire(key, self.timeout)
if isinstance(saved, dict) and 'context' in data:
data['context'] = json.loads(data['context'])
return self.session_class(sid, self.db, data=data)
else:
return self.new()
def new(self):
return self.session_class(self.generate_key(), self.db)
async def delete(self, sid):
key = self.get_session_key(sid)
async with self.redis_pool.get() as conn:
return conn.delete(key)
async def save(self, session):
key = self.get_session_key(session.id)
session.access_timestamp = time()
session = dict(session)
for k, v in session.items():
if not isinstance(v, (str, int, float)):
session[k] = json.dumps(v)
async with self.redis_pool.get() as conn:
logging.info('save session {}'.format(session))
if conn.hmset_dict(key, session):
return conn.expire(key, self.timeout)
| {"/Message/db/helper.py": ["/Message/config.py"], "/structure/service.py": ["/base/service.py"], "/SanicGateway/controller/structure.py": ["/base/controller.py"], "/structure/model.py": ["/base/model.py"], "/base/application.py": ["/base/exception.py"], "/service.py": ["/base/service.py", "/base/exception.py"], "/model.py": ["/base/model.py", "/base/environment.py"], "/base/service.py": ["/structure/model.py", "/base/environment.py"], "/base/model.py": ["/base/sql_db.py"], "/route.py": ["/controller.py", "/middlemare.py"], "/controller.py": ["/base/controller.py", "/service.py"], "/base/environment.py": ["/base/sql_db.py", "/base/web_utils.py", "/base/model.py"], "/structure/server.py": ["/base/application.py", "/structure/config.py", "/base/environment.py", "/structure/route.py"], "/Message/script.py": ["/Message/config.py"], "/structure/route.py": ["/structure/controller.py", "/structure/middlemare.py"], "/script/db_helper.py": ["/script/create_data.py"], "/structure/controller.py": ["/base/controller.py", "/structure/config.py", "/structure/service.py"]} |
77,800 | shimaomao/sanicdemo | refs/heads/master | /base/sql_db.py | import asyncio
from asyncpg import pool, create_pool
from config import db_config, base_config
class PostgresDb:
_instance = None
def __new__(cls, *args, **kwargs):
if not hasattr(cls, '_instance') or not cls._instance:
kwargs = {}
cls._instance = super(PostgresDb, cls).__new__(cls, *args, **kwargs)
return cls._instance
def __init__(self, db_pool=None):
if not db_pool:
loop = asyncio.get_event_loop()
loop.stop()
loop.run_until_complete()
self.dicConfig = {}
self.pool = db_pool
async def create_pool(self, host=None, database=None, user=None,
password=None):
if self.pool and self.pool._connect_kwargs['database'] == database:
return self.pool
db_host = db_config.get('host', '127.0.0.1') if not host else host
database = db_config.get('database') if not database else database
db_user = db_config.get('user') if not user else user
db_pwd = db_config.get('password') if not password else password
# !!! 不同的公司会切换db,使用数据库服务端连接池, 注意客户端连接池数量
db_pool = await create_pool(min_size=2, host=db_host,
database=database,
user=db_user, password=db_pwd)
return db_pool
async def connection(self):
conn = await self.pool.acquire()
return conn
async def _execute(self, str_sql, connection=None):
if not connection:
async with self.pool.acquire() as conn:
data = await conn.execute(str_sql)
else:
data = await connection.execute(str_sql)
return data
async def _fetch(self, str_sql, connection=None):
if not connection:
async with self.pool.acquire() as conn:
data = await conn.fetch(str_sql)
else:
data = await connection.fetch(str_sql)
return data
async def _fetchval(self, str_sql, connection=None):
if not connection:
async with self.pool.acquire() as conn:
data = await conn.fetchval(str_sql)
else:
data = await connection.fetchval(str_sql)
return data
async def _fetchrow(self, str_sql, connection=None):
if not connection:
async with self.pool.acquire() as conn:
data = await conn.fetchrow(str_sql)
else:
data = await connection.fetchrow(str_sql)
return data
async def find(self, str_table_name, str_type, dic_data, boo_format_data=True, connection=None):
""" 读取一组数据
@params str_table_name string 表名
@params str_type string 类型,可以是list, first
@prams dic_data dict 数据字典
@params boo_format_data bool 是否格式化数据,默认为True
"""
if boo_format_data:
dic_data = self.formatData(dic_data)
str_table_name = self.build_table_name(str_table_name)
str_fields = self.build_fields(dic_data['fields'])
str_condition = self.build_condition(dic_data['condition'])
str_join = self.build_join(dic_data['join'])
str_limit = self.build_limit(dic_data['limit'])
str_group = self.build_group(dic_data['group'])
str_order = self.build_order(dic_data['order'])
str_select = self.build_select(dic_data['distinct'])
str_sql = "%s %s from %s %s %s %s %s %s" % (str_select, str_fields, str_table_name, str_join, str_condition, str_group,
str_order, str_limit)
#print(str_sql)
if str_type == 'list':
data = await self._fetch(str_sql, connection=connection)
elif str_type == 'first':
data = await self._fetchrow(str_sql, connection=connection)
return data
async def insert(self, str_table_name, dic_data, connection=None):
""" 插入数据
@params str_table_name string 表名
@params dic_data dict 数据字典
"""
dic_data = self.formatData(dic_data)
str_table_name = self.build_table_name(str_table_name)
str_sql = "insert into %s (%s) values (%s) RETURNING id" % (str_table_name, dic_data['key'], dic_data['val'])
# print str_sql
data = await self._fetchval(str_sql, connection=connection)
return data
async def update(self, str_table_name, dic_data, connection=None):
""" 修改数据
@params str_table_name string 表名
@params dic_data dict 数据字典
"""
dic_data = self.formatData(dic_data)
str_table_name = self.build_table_name(str_table_name)
str_fields = dic_data['fields']
str_condition = self.build_condition(dic_data['condition'])
str_sql = "update %s set %s %s" % (str_table_name, str_fields, str_condition)
data = await self._execute(str_sql, connection=connection)
return data
async def delete(self, str_table_name, dic_data, connection=None):
""" 删除数据
@params str_table_name string 表名
@params dic_data dict 数据字典
"""
dic_data = self.formatData(dic_data)
str_table_name = self.build_table_name(str_table_name)
str_condition = self.build_condition(dic_data['condition'])
str_sql = "delete from %s %s" % (str_table_name, str_condition)
# print str_sql
data = await self._execute(str_sql, connection=connection)
return data
def formatData(self, dic_data):
""" 格式化数据
将fields, condition, join 等数据格式化返回
@params dic_data dict 数据字典
"""
dic_data['fields'] = dic_data['fields'] if 'fields' in dic_data else ''
dic_data['join'] = dic_data['join'] if 'join' in dic_data else ''
dic_data['condition'] = dic_data['condition'] if 'condition' in dic_data else ''
dic_data['order'] = dic_data['order'] if 'order' in dic_data else ''
dic_data['group'] = dic_data['group'] if 'group' in dic_data else ''
dic_data['limit'] = dic_data['limit'] if 'limit' in dic_data else ''
dic_data['distinct'] = dic_data['distinct'] if 'distinct' in dic_data else False
if 'key' in dic_data:
if isinstance(dic_data['key'], list):
dic_data['key'] = ','.join(dic_data['key'])
else:
dic_data['key'] = ''
if 'val' in dic_data:
if isinstance(dic_data['val'], list):
dic_data['val'] = map(lambda f: '\''+f+'\'', dic_data['val'])
dic_data['val'] = ','.join(dic_data['val'])
else:
dic_data['val'] = ''
return dic_data
def build_table_name(self, str_table_name):
""" 构建表名
根据配置文件中的表前辍,构建表名
@params str_table_name string 表名
"""
# str_table_name = self.dicConfig['DB_TABLEPRE'] + str_table_name if self.dicConfig.has_key('DB_TABLEPRE') and \
# self.dicConfig['DB_TABLEPRE'] else str_table_name
return str_table_name
def build_fields(self, lis_fields):
""" 构建读取字段
@params lis_fields list 字段列表
"""
str_fields = ','.join(lis_fields) if lis_fields else '*'
return str_fields
def build_join(self, str_join):
""" 构建Join
@params dicCondition dict 条件字典
"""
return 'LEFT JOIN %s' % str_join if str_join else ''
def build_condition(self, str_condition):
""" 构建条件
@params dicCondition dict 条件字典
"""
return 'where %s' % str_condition if str_condition else ''
def build_group(self, str_group):
""" 构建order
未完成
@params
"""
return 'group by ' + str_group if str_group else ''
def build_order(self, str_order):
""" 构建order
未完成
@params
"""
return 'order by ' + str_order if str_order else ''
def build_limit(self, lis_limit):
""" 构建limit
@params lis_limit list limit
"""
str_limit = ','.join(lis_limit) if lis_limit else ''
return 'limit ' + str_limit if str_limit else ''
def build_select(self, distinct):
"""构建 select
:param distinct: bool 是否包括 DISTINCT
:return: str
"""
return 'select distinct' if distinct else 'select'
| {"/Message/db/helper.py": ["/Message/config.py"], "/structure/service.py": ["/base/service.py"], "/SanicGateway/controller/structure.py": ["/base/controller.py"], "/structure/model.py": ["/base/model.py"], "/base/application.py": ["/base/exception.py"], "/service.py": ["/base/service.py", "/base/exception.py"], "/model.py": ["/base/model.py", "/base/environment.py"], "/base/service.py": ["/structure/model.py", "/base/environment.py"], "/base/model.py": ["/base/sql_db.py"], "/route.py": ["/controller.py", "/middlemare.py"], "/controller.py": ["/base/controller.py", "/service.py"], "/base/environment.py": ["/base/sql_db.py", "/base/web_utils.py", "/base/model.py"], "/structure/server.py": ["/base/application.py", "/structure/config.py", "/base/environment.py", "/structure/route.py"], "/Message/script.py": ["/Message/config.py"], "/structure/route.py": ["/structure/controller.py", "/structure/middlemare.py"], "/script/db_helper.py": ["/script/create_data.py"], "/structure/controller.py": ["/base/controller.py", "/structure/config.py", "/structure/service.py"]} |
77,801 | shimaomao/sanicdemo | refs/heads/master | /base/environment.py | import asyncio
from asyncpg import pool, create_pool
from config import db_config, base_config
from base.sql_db import PostgresDb
from time import time
from base.web_utils import SessionManager
try:
import uvloop as async_loop
except ImportError:
async_loop = asyncio
try:
import ujson as json
except ImportError:
import json
import logging
from base.model import BaseModel
class Environment:
"""
目前Environment为针对一个公司的共有参数,随程序启动加载,不随request进行初始化及消除
"""
def __init__(self, loop=None, db_pool=None, redis_pool=None, redis_cache_pool=None):
self.company_code = base_config.get('company_code', '')
self.db = PostgresDb(db_pool=db_pool)
self.redis_pool = redis_pool
self.redis_cache_pool = redis_cache_pool
self.lang = base_config.get('lang')
self.currency_symbol = {}
self.loop = loop
# 兼容浦发项目,使能与 center 共用认证的 redis
if not self.company_code:
key_template = '{}'
else:
key_template = self.company_code + 'oe-session:{}'
self.session_mgr = SessionManager(
db_config.get('database'),
self.redis_pool, key_template=key_template)
async def get_currency_symbol(self, currency_id):
symbol = self.currency_symbol.get(currency_id)
if not symbol:
data = await self.db.find('res_currency as rc', 'list', {
'fields': ['rc.symbol'],
'condition': 'rc.id = {}'.format(currency_id)
})
if not data:
return None
symbol = data[0]['symbol']
self.currency_symbol[currency_id] = symbol
return symbol
async def get_hash_cache_info(self, table_name: str, identification, fields=None, exist_time=None):
"""
author: PAN Yang
对数据库表进行行级缓存, 采用在一个hash table内, 通过相应的值与过期时间命名方式,来取出代表时间的field, value 来判断对应的
缓存数据field是否过期
:param table_name:
:param identification:
:param fields:
:param exist_time:
:return:
"""
# 参数检查
if not identification or not isinstance(identification, int):
return {}
if fields:
assert isinstance(fields, list)
else:
fields = []
if exist_time:
assert isinstance(exist_time, int)
# redis key, fields命名
str_key = 'environment_cache_' + self.company_code + '_' + str(table_name)
str_id = str(identification)
str_id_expire = str(identification) + 'expire_at'
async with self.redis_pool.get() as conn:
data = await conn.hget(str_key, str_id)
# 过期日期
expire_at = await conn.hget(str_key, str_id_expire)
# 需要更新的字段
uncovered_fields = []
# 无期限或未过期的data
if data and (not expire_at or (expire_at and float(expire_at) >= time())):
data = json.loads(data)
# 未过期时更新本次查询中不在缓存内的字段
# [uncovered_fields.append(single) for single in fields if single not in data]
else:
# 过期清空缓存,全部重新查询写入缓存
data = {}
uncovered_fields = ['*']
# 重新更新未在缓存内的字段
if uncovered_fields:
added_data = await self.db.find(str(table_name), 'list', {
'fields': uncovered_fields,
'condition': 'id={}'.format(int(identification))
})
# 如果有更新字段,重新写入redis缓存,并设置过期时间
# TODO 可能存在的问题, 每次一次对一行数据中某个字段的更新,会重置整个行缓存的存在时间
if added_data:
data.update(dict(added_data[0]))
update_dict = {str_id: json.dumps(data)}
if exist_time:
update_dict.update({str_id_expire: time() + exist_time})
await conn.hmset_dict(str_key, update_dict)
logging.warning('using db')
# 如果查询单一字段,直接返回该字段的值
if len(fields) == 1:
data = data.get(fields[0])
return data
def r_cache(key=None, identification=None, time=None, company_code=None):
"""
异步缓存装饰器
:param key:
:param identification:
:param time:
:return:
"""
def _deco(func):
async def wrapper(*args, **kwargs):
model = args[0]
async with model.env.redis_pool.get() as conn:
if company_code is None:
str_name = 'redis_model_cache_' + model.env.company_code + '_'
else:
str_name = 'redis_model_cache_' + company_code + '_'
if key and isinstance(model, BaseModel):
str_name += str(key)
if id and isinstance(identification, int):
str_name += str(args[identification])
elif isinstance(identification, list):
for item in identification:
str_name += str(args[item])
cache = await conn.get(str_name)
if cache:
ret = json.loads(cache)
return trans_redis_type(ret)
ret = await (func(*args, **kwargs))
cache = json.dumps(ret)
if str_name:
await conn.set(str_name, cache)
if isinstance(time, int):
conn.expire(str_name, time)
return ret
return wrapper
return _deco
def trans_redis_type(data):
new_data = {}
if isinstance(data, dict):
for k, v in data.items():
if v == b'null':
v = None
new_data[k] = v
data = new_data
elif isinstance(data, bytes) and data == b'null':
data = None
return data
def format_num(self):
# TODO 根据设置进行格式化比如千分位设置
pass
| {"/Message/db/helper.py": ["/Message/config.py"], "/structure/service.py": ["/base/service.py"], "/SanicGateway/controller/structure.py": ["/base/controller.py"], "/structure/model.py": ["/base/model.py"], "/base/application.py": ["/base/exception.py"], "/service.py": ["/base/service.py", "/base/exception.py"], "/model.py": ["/base/model.py", "/base/environment.py"], "/base/service.py": ["/structure/model.py", "/base/environment.py"], "/base/model.py": ["/base/sql_db.py"], "/route.py": ["/controller.py", "/middlemare.py"], "/controller.py": ["/base/controller.py", "/service.py"], "/base/environment.py": ["/base/sql_db.py", "/base/web_utils.py", "/base/model.py"], "/structure/server.py": ["/base/application.py", "/structure/config.py", "/base/environment.py", "/structure/route.py"], "/Message/script.py": ["/Message/config.py"], "/structure/route.py": ["/structure/controller.py", "/structure/middlemare.py"], "/script/db_helper.py": ["/script/create_data.py"], "/structure/controller.py": ["/base/controller.py", "/structure/config.py", "/structure/service.py"]} |
77,802 | shimaomao/sanicdemo | refs/heads/master | /structure/server.py | from base.application import Application
from asyncpg import create_pool
import asyncio
from structure.config import db_config, server_config, redis_config
from base.environment import Environment
from aioredis import create_pool as create_redis_pool
from structure.route import route, err_route, middleware
try:
import ujson as json
except ImportError:
import json
try:
import uvloop as async_loop
except ImportError:
async_loop = asyncio
loop = async_loop.new_event_loop()
asyncio.set_event_loop(loop=loop)
env = None
async def init_db(*args):
application = args[0]
loop = args[1]
db_host = db_config.get('host', '127.0.0.1')
database = db_config.get('database')
db_user = db_config.get('user')
db_pwd = db_config.get('password')
db_pool = await create_pool(max_size=50, host=db_host, database=database, user=db_user, password=db_pwd, loop=loop)
redis_host = redis_config['redis'].get('host')
redis_port = redis_config['redis'].get('port')
redis_db = redis_config['redis'].get('db')
redis_pool = await create_redis_pool((redis_host, redis_port), db=redis_db, loop=loop)
redis_cache_host = redis_config['redis_cache'].get('host')
redis_cache_port = redis_config['redis_cache'].get('port')
redis_cache_db = redis_config['redis_cache'].get('db')
redis_cache_pool = await create_redis_pool((redis_cache_host, redis_cache_port), db=redis_cache_db, loop=loop)
application.env = Environment(loop=loop, db_pool=db_pool, redis_pool=redis_pool, redis_cache_pool=redis_cache_pool)
app = Application(route, err_route, middleware, env)
host = server_config.get('host')
port = server_config.get('port')
app.run(host=host, port=port, after_start=init_db, debug=False) | {"/Message/db/helper.py": ["/Message/config.py"], "/structure/service.py": ["/base/service.py"], "/SanicGateway/controller/structure.py": ["/base/controller.py"], "/structure/model.py": ["/base/model.py"], "/base/application.py": ["/base/exception.py"], "/service.py": ["/base/service.py", "/base/exception.py"], "/model.py": ["/base/model.py", "/base/environment.py"], "/base/service.py": ["/structure/model.py", "/base/environment.py"], "/base/model.py": ["/base/sql_db.py"], "/route.py": ["/controller.py", "/middlemare.py"], "/controller.py": ["/base/controller.py", "/service.py"], "/base/environment.py": ["/base/sql_db.py", "/base/web_utils.py", "/base/model.py"], "/structure/server.py": ["/base/application.py", "/structure/config.py", "/base/environment.py", "/structure/route.py"], "/Message/script.py": ["/Message/config.py"], "/structure/route.py": ["/structure/controller.py", "/structure/middlemare.py"], "/script/db_helper.py": ["/script/create_data.py"], "/structure/controller.py": ["/base/controller.py", "/structure/config.py", "/structure/service.py"]} |
77,803 | shimaomao/sanicdemo | refs/heads/master | /base/exception.py | try:
import ujson as json
except ImportError:
import json
from collections import defaultdict
from sanic.exceptions import SanicException
class BaseExcep(Exception):
def __init__(self, *args, msg='', code=1, data={}, log=False, **kwargs):
self.code = code
self.data = data
self.msg = msg
self.log = log
super().__init__(*args, **kwargs)
def __repr__(self):
return json.dumps({
"code": self.code,
"data": self.data,
"msg": self.msg,
"id": None
})
__str__ = __repr__
class LoginError(SanicException):
status_code = 110
msg = defaultdict(lambda: 'Wrong login/password!', {
'en_US': 'Wrong login/password!',
'zh_CN': '用户名或密码错误!'
})
reason = None
def __init__(self, code=None, message=None, reason=None):
if message and message:
self.message = message
else:
self.message = self.msg
if code and isinstance(code, int):
self.code = code
else:
self.code = self.status_code
if reason:
self.reason = reason
super(LoginError, self).__init__(message=self.message,
status_code=self.code)
class NoPwdError(LoginError):
status_code = 111
msg = 'no password'
class PwdRetryLimitError(LoginError):
status_code = 113
msg = defaultdict(lambda: 'retry limit', {
'zh_CN': '工资单密码错误',
'en_US': 'Wrong password'
})
class WrongPwdError(LoginError):
status_code = 112
msg = defaultdict(lambda: 'Wrong password', {
'zh_CN': '工资单密码错误',
'en_US': 'Wrong password'
})
class SecurityStrategyError(SanicException):
status_code = 120
msg = defaultdict(lambda: 'Security Strategy Error', {
'zh_CN': '安全策略错误',
'en_US': 'Security Strategy Error'
})
reason = None
def __init__(self, code=None, message=None, reason=None):
if message:
self.message = message
else:
self.message = self.msg
if code and isinstance(code, int):
self.code = code
else:
self.code = self.status_code
if reason:
self.reason = reason
super(SecurityStrategyError, self).__init__(message=self.message,
status_code=self.code)
class SessionExpiredError(SecurityStrategyError):
status_code = -5
msg = defaultdict(lambda: 'Session expired. Please retry', {
'zh_CN': '会话过期,请重新登录',
'en_US': 'Session expired. Please retry'
})
class PreventAppError(SecurityStrategyError):
status_code = 121
msg = defaultdict(lambda: 'Your are not allowed to login via APP', {
'zh_CN': '您未被允许使用APP登录',
'en_US': 'Your are not allowed to login via APP'
})
class ForceChgpwError(SecurityStrategyError):
status_code = 303
msg = defaultdict(lambda: 'Please change your password', {
'zh_CN': '请修改密码',
'en_US': 'Please change your password'
})
class PwdLockError(SecurityStrategyError):
status_code = 123
msg = defaultdict(
lambda: 'Your password has been locked. Please contact system admin', {
'zh_CN': '您的密码被锁定,请联系管理员',
'en_US': 'Your password has been locked. Please contact system admin'
})
class UserLockError(SecurityStrategyError):
status_code = 124
msg = defaultdict(
lambda: 'You are not allowed login, please contact Admin to unlock!', {
'zh_CN': '你已经被锁定, 请联系管理员解锁!',
'en_US': 'You are not allowed login, please contact Admin to unlock!'
})
| {"/Message/db/helper.py": ["/Message/config.py"], "/structure/service.py": ["/base/service.py"], "/SanicGateway/controller/structure.py": ["/base/controller.py"], "/structure/model.py": ["/base/model.py"], "/base/application.py": ["/base/exception.py"], "/service.py": ["/base/service.py", "/base/exception.py"], "/model.py": ["/base/model.py", "/base/environment.py"], "/base/service.py": ["/structure/model.py", "/base/environment.py"], "/base/model.py": ["/base/sql_db.py"], "/route.py": ["/controller.py", "/middlemare.py"], "/controller.py": ["/base/controller.py", "/service.py"], "/base/environment.py": ["/base/sql_db.py", "/base/web_utils.py", "/base/model.py"], "/structure/server.py": ["/base/application.py", "/structure/config.py", "/base/environment.py", "/structure/route.py"], "/Message/script.py": ["/Message/config.py"], "/structure/route.py": ["/structure/controller.py", "/structure/middlemare.py"], "/script/db_helper.py": ["/script/create_data.py"], "/structure/controller.py": ["/base/controller.py", "/structure/config.py", "/structure/service.py"]} |
77,804 | shimaomao/sanicdemo | refs/heads/master | /structure/config.py | db_config = dict({
'port': 6625,
'host': '127.0.0.1',
'database': 'salary',
'user': 'panyang',
'password':'',
})
server_config = dict({
'port': 6623,
'host': '0.0.0.0',
})
redis_config = dict({
'redis': dict({
'host': '127.0.0.1',
'port': 6379,
'db': 3,
'user_name': '',
'password': ''
}),
'redis_cache': dict({
'host': '127.0.0.1',
'port': 6379,
'db': 3,
'user_name': '',
'password': ''
})
})
base_config = {}
route_config = {
'excel': {
'host': '127.0.0.1',
'port': '',
'exchange': 'excel_exchange',
'type': 'direct',
'queue': 'excel_queue',
'binding_key': 'excel'
}
} | {"/Message/db/helper.py": ["/Message/config.py"], "/structure/service.py": ["/base/service.py"], "/SanicGateway/controller/structure.py": ["/base/controller.py"], "/structure/model.py": ["/base/model.py"], "/base/application.py": ["/base/exception.py"], "/service.py": ["/base/service.py", "/base/exception.py"], "/model.py": ["/base/model.py", "/base/environment.py"], "/base/service.py": ["/structure/model.py", "/base/environment.py"], "/base/model.py": ["/base/sql_db.py"], "/route.py": ["/controller.py", "/middlemare.py"], "/controller.py": ["/base/controller.py", "/service.py"], "/base/environment.py": ["/base/sql_db.py", "/base/web_utils.py", "/base/model.py"], "/structure/server.py": ["/base/application.py", "/structure/config.py", "/base/environment.py", "/structure/route.py"], "/Message/script.py": ["/Message/config.py"], "/structure/route.py": ["/structure/controller.py", "/structure/middlemare.py"], "/script/db_helper.py": ["/script/create_data.py"], "/structure/controller.py": ["/base/controller.py", "/structure/config.py", "/structure/service.py"]} |
77,805 | shimaomao/sanicdemo | refs/heads/master | /Message/script.py | import pika
from Message.config import route_config
rabbitmq_config = route_config['excel']
def excel_mq():
exchange_name = rabbitmq_config.get('exchange')
exchange_type = rabbitmq_config.get('type')
queue_name = rabbitmq_config.get('queue')
connection = pika.BlockingConnection(pika.ConnectionParameters(host=rabbitmq_config.get('host')))
channel = connection.channel()
channel.exchange_delete(exchange=exchange_name)
channel.exchange_declare(exchange=exchange_name, type=exchange_type, durable=True)
channel.queue_declare(queue_name, exclusive=False)
channel.queue_bind(queue_name, exchange_name)
connection.close()
if __name__ == '__main__':
excel_mq()
| {"/Message/db/helper.py": ["/Message/config.py"], "/structure/service.py": ["/base/service.py"], "/SanicGateway/controller/structure.py": ["/base/controller.py"], "/structure/model.py": ["/base/model.py"], "/base/application.py": ["/base/exception.py"], "/service.py": ["/base/service.py", "/base/exception.py"], "/model.py": ["/base/model.py", "/base/environment.py"], "/base/service.py": ["/structure/model.py", "/base/environment.py"], "/base/model.py": ["/base/sql_db.py"], "/route.py": ["/controller.py", "/middlemare.py"], "/controller.py": ["/base/controller.py", "/service.py"], "/base/environment.py": ["/base/sql_db.py", "/base/web_utils.py", "/base/model.py"], "/structure/server.py": ["/base/application.py", "/structure/config.py", "/base/environment.py", "/structure/route.py"], "/Message/script.py": ["/Message/config.py"], "/structure/route.py": ["/structure/controller.py", "/structure/middlemare.py"], "/script/db_helper.py": ["/script/create_data.py"], "/structure/controller.py": ["/base/controller.py", "/structure/config.py", "/structure/service.py"]} |
77,806 | shimaomao/sanicdemo | refs/heads/master | /structure/route.py | from structure.controller import GetBaseInfo, GetJobInfo, GetJobByCateAndRank, JobMapping
from structure.middlemare import success, log
route = {
'/get_base_info': GetBaseInfo,
'/get_job_info': GetJobInfo,
'/get_job_by_cate_and_rank': GetJobByCateAndRank,
'/job_mapping': JobMapping
}
middleware = {
'request': [log],
'response': [success]
}
err_route = {} | {"/Message/db/helper.py": ["/Message/config.py"], "/structure/service.py": ["/base/service.py"], "/SanicGateway/controller/structure.py": ["/base/controller.py"], "/structure/model.py": ["/base/model.py"], "/base/application.py": ["/base/exception.py"], "/service.py": ["/base/service.py", "/base/exception.py"], "/model.py": ["/base/model.py", "/base/environment.py"], "/base/service.py": ["/structure/model.py", "/base/environment.py"], "/base/model.py": ["/base/sql_db.py"], "/route.py": ["/controller.py", "/middlemare.py"], "/controller.py": ["/base/controller.py", "/service.py"], "/base/environment.py": ["/base/sql_db.py", "/base/web_utils.py", "/base/model.py"], "/structure/server.py": ["/base/application.py", "/structure/config.py", "/base/environment.py", "/structure/route.py"], "/Message/script.py": ["/Message/config.py"], "/structure/route.py": ["/structure/controller.py", "/structure/middlemare.py"], "/script/db_helper.py": ["/script/create_data.py"], "/structure/controller.py": ["/base/controller.py", "/structure/config.py", "/structure/service.py"]} |
77,807 | shimaomao/sanicdemo | refs/heads/master | /112.py | from psycopg2.pool import ThreadedConnectionPool
import psycopg2
import threading
import uuid
import time
bouncer_name = 'testdb'
database_name = 'salary'
port = 5432
pool = ThreadedConnectionPool(5, 50, database= database_name, port=port)
n= 0
def test_func(use_pool=False):
global n
while n<30000:
try:
if use_pool:
conn = pool.getconn()
else:
conn = psycopg2.connect(database=database_name, port=port)
except Exception as e:
continue
cr = conn.cursor()
cr.execute('select * from market_salary_data where id = 100')
data = cr.fetchall()
if use_pool:
pool.putconn(conn)
else:
conn.close()
if n%30 == 0:
print(n)
n+=1
test_pool = []
for a in range(0, 50):
test_pool.append(threading.Thread(target=test_func))
start_time = time.time()
print('start')
for a in test_pool:
a.start()
for a in test_pool:
a.join()
print('end: use {} second'.format(time.time() - start_time))
| {"/Message/db/helper.py": ["/Message/config.py"], "/structure/service.py": ["/base/service.py"], "/SanicGateway/controller/structure.py": ["/base/controller.py"], "/structure/model.py": ["/base/model.py"], "/base/application.py": ["/base/exception.py"], "/service.py": ["/base/service.py", "/base/exception.py"], "/model.py": ["/base/model.py", "/base/environment.py"], "/base/service.py": ["/structure/model.py", "/base/environment.py"], "/base/model.py": ["/base/sql_db.py"], "/route.py": ["/controller.py", "/middlemare.py"], "/controller.py": ["/base/controller.py", "/service.py"], "/base/environment.py": ["/base/sql_db.py", "/base/web_utils.py", "/base/model.py"], "/structure/server.py": ["/base/application.py", "/structure/config.py", "/base/environment.py", "/structure/route.py"], "/Message/script.py": ["/Message/config.py"], "/structure/route.py": ["/structure/controller.py", "/structure/middlemare.py"], "/script/db_helper.py": ["/script/create_data.py"], "/structure/controller.py": ["/base/controller.py", "/structure/config.py", "/structure/service.py"]} |
77,808 | shimaomao/sanicdemo | refs/heads/master | /script/db_helper.py | import asyncio
from asyncpg import create_pool
try:
import uvloop as async_loop
except ImportError:
async_loop = asyncio
from config import db_config
from script.create_data import create_data
async def create_db_pool():
db_host = db_config.get('host', '127.0.0.1')
database = db_config.get('database')
db_user = db_config.get('user')
db_pool = await create_pool(host=db_host, database=database, user=db_user)
return db_pool
async def _execute(pool, str_sql):
async with pool.acquire() as conn:
data = await conn.execute(str_sql)
return data
async def create_table(pool):
async with pool.acquire() as conn:
await conn.execute('DROP TABLE if EXISTS MARKET_SALARY_DATA')
await conn.execute('DROP TABLE if EXISTS JOB')
await conn.execute('DROP TABLE if EXISTS JOB_GRADE')
await conn.execute('DROP TABLE if EXISTS INDUSTRY')
await conn.execute('DROP TABLE if EXISTS JOB_CATEGORY')
#await conn.execute('DROP TABLE if EXISTS CITY')
#await conn.execute('DROP TABLE if EXISTS COMPANY_SCOPE')
#await conn.execute('DROP TABLE if EXISTS COMPANY_NATURE')
# ------------ CITY --------------------------
# await conn.execute('CREATE TABLE CITY ('
# 'ID SERIAL PRIMARY KEY,'
# 'CODE INT UNIQUE ,'
# 'NAME_ZH VARCHAR,'
# 'NAME_EN VARCHAR)')
# ------------ CATEGORY ----------------------
await conn.execute('CREATE TABLE JOB_CATEGORY ('
'ID SERIAL PRIMARY KEY,'
'CODE INT UNIQUE ,'
'NAME_ZH VARCHAR,'
'NAME_EN VARCHAR)')
# ------------ GRADE ----------------------
await conn.execute('CREATE TABLE JOB_GRADE ('
'ID SERIAL PRIMARY KEY,'
'CODE INT UNIQUE ,'
'NAME_ZH VARCHAR,'
'NAME_EN VARCHAR)')
# ------------ JOB ---------------------------
await conn.execute('CREATE TABLE JOB ('
'ID SERIAL PRIMARY KEY,'
'CODE INT UNIQUE ,'
'NAME_ZH VARCHAR,'
'NAME_EN VARCHAR,'
'JOB_GRADE_CODE INT REFERENCES JOB_GRADE(CODE),'
'JOB_CATEGORY_CODE INT REFERENCES JOB_CATEGORY(CODE))')
# ------------INDUSTRY --------------------------
await conn.execute('CREATE TABLE INDUSTRY ('
'ID SERIAL PRIMARY KEY,'
'CODE INT UNIQUE,'
'NAME_ZH VARCHAR,'
'NAME_EN VARCHAR)')
# # ------------nature --------------------------
# await conn.execute('CREATE TABLE COMPANY_NATURE ('
# 'ID SERIAL PRIMARY KEY,'
# 'CODE INT UNIQUE,'
# 'DESCRIPTION VARCHAR)')
#
# # ------------nature --------------------------
# await conn.execute('CREATE TABLE COMPANY_SCOPE ('
# 'ID SERIAL PRIMARY KEY,'
# 'CODE INT UNIQUE,'
# 'MIN_NUM INT ,'
# 'MAX_INT INT )')
# ------------- main table -------------------
await conn.execute('CREATE TABLE MARKET_SALARY_DATA('
'ID SERIAL PRIMARY KEY,'
'SOURCE VARCHAR,'
'CITY_CODE INT REFERENCES CITY(CODE),'
'JOB_CODE INT REFERENCES JOB(CODE),'
'INDUSTRY_CODE INT REFERENCES INDUSTRY(CODE),'
'SCOPE_CODE INT REFERENCES COMPANY_SCOPE(CODE),'
'NATURE_CODE INT REFERENCES COMPANY_NATURE(CODE),'
'BASE_SALARY NUMERIC (11,3),'
'FIX_SALARY NUMERIC(11,3),'
'TOTAL_SALARY NUMERIC (11,3))')
if __name__ == '__main__':
loop = async_loop.new_event_loop()
asyncio.set_event_loop(loop=loop)
pool = loop.run_until_complete(create_db_pool())
loop.run_until_complete(create_table(pool))
loop.run_until_complete(create_data(pool))
| {"/Message/db/helper.py": ["/Message/config.py"], "/structure/service.py": ["/base/service.py"], "/SanicGateway/controller/structure.py": ["/base/controller.py"], "/structure/model.py": ["/base/model.py"], "/base/application.py": ["/base/exception.py"], "/service.py": ["/base/service.py", "/base/exception.py"], "/model.py": ["/base/model.py", "/base/environment.py"], "/base/service.py": ["/structure/model.py", "/base/environment.py"], "/base/model.py": ["/base/sql_db.py"], "/route.py": ["/controller.py", "/middlemare.py"], "/controller.py": ["/base/controller.py", "/service.py"], "/base/environment.py": ["/base/sql_db.py", "/base/web_utils.py", "/base/model.py"], "/structure/server.py": ["/base/application.py", "/structure/config.py", "/base/environment.py", "/structure/route.py"], "/Message/script.py": ["/Message/config.py"], "/structure/route.py": ["/structure/controller.py", "/structure/middlemare.py"], "/script/db_helper.py": ["/script/create_data.py"], "/structure/controller.py": ["/base/controller.py", "/structure/config.py", "/structure/service.py"]} |
77,809 | shimaomao/sanicdemo | refs/heads/master | /structure/controller.py | from sanic import response
from sanic.exceptions import ServerError
from base.controller import BaseHandler, JsonHandler
import logging
from structure.config import route_config
import pika
from structure.service import SalaryService
try:
import ujson as json
except ImportError:
import json
class GetBaseInfo(BaseHandler):
async def handle(self):
try:
salary_service = SalaryService(env=self.env)
result = await salary_service.get_base_info(show_id=True)
return result
except Exception as e:
raise ServerError(str(e.args))
class GetJobInfo(BaseHandler):
async def handle(self):
try:
salary_service = SalaryService(env=self.env)
result = await salary_service.get_job_info(show_id=True)
return result
except Exception as e:
raise ServerError(str(e.args))
class GetJobByCateAndRank(JsonHandler):
async def handle(self):
try:
rank_code = self.data.get('rank_code')
category_code = self.data.get('category_code')
salary_service = SalaryService(env=self.env)
result = await salary_service.get_job_by_cate_rank(category_code, rank_code)
return result
except Exception as e:
raise ServerError(str(e.args))
class JobMapping(JsonHandler):
async def handle(self):
try:
name_list = self.data.get('job_list')
salary_service = SalaryService(env=self.env)
result = await salary_service.get_job_info_by_name(name_list)
return result
except Exception as e:
raise ServerError(str(e.args))
class ExcelUpload(BaseHandler):
async def handle(self):
try:
result = self.excel_mq()
return result
except Exception as e:
raise ServerError(str(e.args))
def excel_mq(self):
rabbitmq_config = route_config.get('excel')
exchange_name = rabbitmq_config.get('exchange')
exchange_type = rabbitmq_config.get('type')
queue_name = rabbitmq_config.get('queue')
connection = pika.BlockingConnection(pika.ConnectionParameters(host=rabbitmq_config.get('host')))
channel = connection.channel()
result = channel.basic_publish(exchange=exchange_name,
routing_key='excel',
body='hello')
connection.close()
return result | {"/Message/db/helper.py": ["/Message/config.py"], "/structure/service.py": ["/base/service.py"], "/SanicGateway/controller/structure.py": ["/base/controller.py"], "/structure/model.py": ["/base/model.py"], "/base/application.py": ["/base/exception.py"], "/service.py": ["/base/service.py", "/base/exception.py"], "/model.py": ["/base/model.py", "/base/environment.py"], "/base/service.py": ["/structure/model.py", "/base/environment.py"], "/base/model.py": ["/base/sql_db.py"], "/route.py": ["/controller.py", "/middlemare.py"], "/controller.py": ["/base/controller.py", "/service.py"], "/base/environment.py": ["/base/sql_db.py", "/base/web_utils.py", "/base/model.py"], "/structure/server.py": ["/base/application.py", "/structure/config.py", "/base/environment.py", "/structure/route.py"], "/Message/script.py": ["/Message/config.py"], "/structure/route.py": ["/structure/controller.py", "/structure/middlemare.py"], "/script/db_helper.py": ["/script/create_data.py"], "/structure/controller.py": ["/base/controller.py", "/structure/config.py", "/structure/service.py"]} |
77,811 | crisalid/chesstest | refs/heads/master | /test_chess.py | #!/usr/bin/python
import unittest
from chess import chessPieceMoves
# testing main function inputs
class TestChessMove(unittest.TestCase):
def test_bishop(self):
self.assertEqual(chessPieceMoves("bishop","d5"), 'a2, a8, b3, b7, c4, c6, e4, e6, f3, f7, g2, g8, h1')
self.assertEqual(chessPieceMoves("bishop","a1"), 'b2, c3, d4, e5, f6, g7, h8')
def test_rook(self):
self.assertEqual(chessPieceMoves("rook","e6"), 'a6, b6, c6, d6, e1, e2, e3, e4, e5, e7, e8, f6, g6, h6')
self.assertEqual(chessPieceMoves("rook","b2"), 'a2, b1, b3, b4, b5, b6, b7, b8, c2, d2, e2, f2, g2, h2')
def test_queen(self):
self.assertEqual(chessPieceMoves("queen","d1"), 'a1, a4, b1, b3, c1, c2, d2, d3, d4, d5, d6, d7, d8, e1, e2, f1, f3, g1, g4, h1, h5')
self.assertEqual(chessPieceMoves("queen","e2"), 'a2, a6, b2, b5, c2, c4, d1, d2, d3, e1, e3, e4, e5, e6, e7, e8, f1, f2, f3, g2, g4, h2, h5')
def test_knight(self):
self.assertEqual(chessPieceMoves("knight","e5"), 'c4, c6, d3, d7, f3, f7, g4, g6')
self.assertEqual(chessPieceMoves("knight","b6"), 'a4, a8, c4, c8, d5, d7')
if __name__ == '__main__':
unittest.main() | {"/test_chess.py": ["/chess.py"]} |
77,812 | crisalid/chesstest | refs/heads/master | /chess.py | #!/usr/bin/python
# Importing basic libraries to process user input
import sys, getopt
def coordToPos(x, y):
''' Converting numerical coordinates to board cells '''
r = 'abcdefgh'[x - 1] + str(y)
return r
def chessPieceMoves(piece, pos, printChessBoard = False):
''' Calculating moves for each piece. Also drawing a nice map of moves. '''
x = "abcdef".find(pos[:1])+1
y = int(pos[1:])
if not (x >= 1 and x <= 8 and y >= 1 and y <= 8):
print("Bad coordinates, try again ",coord[1:])
sys.exit(2);
yi = 8
positions = []
board = ''
while yi >= 1:
xi = 1
while xi <= 8:
here = False
dx = abs(xi - x)
dy = abs(yi - y)
if piece == 'rook':
here = (yi == y or xi == x)
if piece == 'bishop':
here = (dx == dy)
if piece == 'queen':
here = (yi == y or xi == x or dx == dy)
if piece == 'knight':
here = (dx == 2 and dy == 1 or dx == 1 and dy == 2)
if dx == 0 and dy == 0:
here = False
if here:
board = board + 'XX'
positions.append(coordToPos(xi, yi))
else:
board = board + ' '
xi = xi + 1
board += "\n"
yi = yi - 1
positions.sort()
if printChessBoard:
result = board
else:
result = ", ".join(positions)
return result
def main(argv):
''' Processing user input, handling exceptions, outputting result '''
piece = ''
coord = ''
printChessBoard = False
try:
opts, args = getopt.getopt(argv,"hp:c:bt", ["help", "piece=", "coord=", "board"])
except getopt.GetoptError:
print("chess.py -p <piece> -c <coord> [-b]")
sys.exit(2)
for opt, arg in opts:
if opt in ("-h", "--help"):
print('chess.py -p <piece> -c <coord>')
sys.exit()
elif opt in ("-p", "--piece"):
piece = arg
elif opt in ("-c", "--coord"):
coord = arg
elif opt in ("-b", "--board"):
printChessBoard = True
pieces = ['knight', 'rook', 'queen', 'bishop']
if not piece in pieces:
print("Bad chess piece, known pieces are:", ", ".join(pieces))
sys.exit(2)
if len(coord) != 2:
print("Bad coordinates! coordinate sample: 'd2'");
sys.exit(2);
result = chessPieceMoves(piece, coord, printChessBoard)
print(result)
if __name__ == "__main__":
main(sys.argv[1:])
| {"/test_chess.py": ["/chess.py"]} |
77,816 | Akashpb07/Chdproject | refs/heads/master | /chadigarh Dial/Webapp/models.py | from django.db import models
# Create your models here.
class Destination(models.Model):
name = models.CharField(max_length=100)
img = models.ImageField(upload_to='pics')
desc = models.TextField()
price = models.IntegerField()
offer = models.BooleanField(default=False)
#/////////////automobile/////
class automobile(models.Model):
drimg = models.ImageField(upload_to="drpics")
name = models.CharField(max_length=100)
speciality = models.CharField(max_length=100)
department = models.CharField(max_length=100)
location = models.TextField()
mobNo = models.CharField(max_length=15)
def __str__(self):
return self.name
#/////////////doctor/////
class dentists(models.Model):
drimg = models.ImageField(upload_to="denistsdoctors")
name = models.CharField(max_length=100)
speciality = models.CharField(max_length=100)
deprtimg = models.ImageField(upload_to="denistsdoctors")
department = models.CharField(max_length=100)
location = models.TextField(max_length=100)
mobNo = models.CharField(max_length=15)
def __str__(self):
return self.name
class eye(models.Model):
drimg = models.ImageField(upload_to="drpics")
name = models.CharField(max_length=100)
speciality = models.CharField(max_length=100)
deprtimg = models.ImageField(upload_to="drpics")
department = models.CharField(max_length=100)
location = models.TextField(max_length=100)
mobNo = models.CharField(max_length=15)
def __str__(self):
return self.name
class bone(models.Model):
drimg = models.ImageField(upload_to="drpics")
name = models.CharField(max_length=100)
speciality = models.CharField(max_length=100)
deprtimg = models.ImageField(upload_to="drpics")
department = models.CharField(max_length=100)
location = models.TextField(max_length=100)
mobNo = models.CharField(max_length=15)
def __str__(self):
return self.name
class dermatology(models.Model):
drimg = models.ImageField(upload_to="drpics")
name = models.CharField(max_length=100)
speciality = models.CharField(max_length=100)
deprtimg = models.ImageField(upload_to="drpics")
department = models.CharField(max_length=100)
location = models.TextField(max_length=100)
mobNo = models.CharField(max_length=15)
def __str__(self):
return self.name
#///////////plumber services//////////
class plumberservice(models.Model):
drimg = models.ImageField(upload_to="plumber-s")
name = models.CharField(max_length=100)
speciality = models.CharField(max_length=100)
deprtimg = models.ImageField(upload_to="plumber-s")
department = models.CharField(max_length=100)
location = models.TextField(max_length=100)
mobNo = models.CharField(max_length=15)
def __str__(self):
return self.name
class plumberproducts(models.Model):
drimg = models.ImageField(upload_to="plumber-p")
name = models.CharField(max_length=100)
speciality = models.CharField(max_length=100)
deprtimg = models.ImageField(upload_to="plumber-p")
department = models.CharField(max_length=100)
location = models.TextField(max_length=100)
mobNo = models.CharField(max_length=15)
def __str__(self):
return self.name
class plumbercont(models.Model):
drimg = models.ImageField(upload_to="plumber-c")
name = models.CharField(max_length=100)
speciality = models.CharField(max_length=100)
deprtimg = models.ImageField(upload_to="plumber-c")
department = models.CharField(max_length=100)
location = models.TextField(max_length=100)
mobNo = models.CharField(max_length=15)
def __str__(self):
return self.name
class plumberinstall(models.Model):
drimg = models.ImageField(upload_to="plumber-i")
name = models.CharField(max_length=100)
speciality = models.CharField(max_length=100)
deprtimg = models.ImageField(upload_to="plumber-i")
department = models.CharField(max_length=100)
location = models.TextField(max_length=100)
mobNo = models.CharField(max_length=15)
def __str__(self):
return self.name
#////////////electricians///////////////
class electrician(models.Model):
drimg = models.ImageField(upload_to="plumber-i")
name = models.CharField(max_length=100)
speciality = models.CharField(max_length=100)
deprtimg = models.ImageField(upload_to="plumber-i")
department = models.CharField(max_length=100)
location = models.TextField(max_length=100)
mobNo = models.CharField(max_length=15)
def __str__(self):
return self.name
#/////////////////hotels/////////////////
class hotel(models.Model):
drimg = models.ImageField(upload_to="plumber-i")
name = models.CharField(max_length=100)
speciality = models.CharField(max_length=100)
deprtimg = models.ImageField(upload_to="plumber-i")
department = models.CharField(max_length=100)
location = models.TextField(max_length=100)
mobNo = models.CharField(max_length=15)
def __str__(self):
return self.name
#///////////////reasurant///////////////////
class reasurant(models.Model):
drimg = models.ImageField(upload_to="plumber-i")
name = models.CharField(max_length=100)
speciality = models.CharField(max_length=100)
deprtimg = models.ImageField(upload_to="plumber-i")
department = models.CharField(max_length=100)
location = models.TextField(max_length=100)
mobNo = models.CharField(max_length=15)
def __str__(self):
return self.name
#/////////////hospitals Models///////////
class hospital(models.Model):
drimg = models.ImageField(upload_to="hospital")
name = models.CharField(max_length=100)
speciality = models.CharField(max_length=100)
deprtimg = models.ImageField(upload_to="hospital")
department = models.CharField(max_length=100)
location = models.TextField(max_length=100)
mobNo = models.CharField(max_length=15)
def __str__(self):
return self.name
class childhospital(models.Model):
drimg = models.ImageField(upload_to="childhospital")
name = models.CharField(max_length=100)
speciality = models.CharField(max_length=100)
deprtimg = models.ImageField(upload_to="childhospital")
department = models.CharField(max_length=100)
location = models.TextField(max_length=100)
mobNo = models.CharField(max_length=15)
def __str__(self):
return self.name
class eyehospital(models.Model):
drimg = models.ImageField(upload_to="eyehospital")
name = models.CharField(max_length=100)
speciality = models.CharField(max_length=100)
deprtimg = models.ImageField(upload_to="eyehospital")
department = models.CharField(max_length=100)
location = models.TextField(max_length=100)
mobNo = models.CharField(max_length=15)
def __str__(self):
return self.name
class publichospital(models.Model):
drimg = models.ImageField(upload_to="publichospital")
name = models.CharField(max_length=100)
speciality = models.CharField(max_length=100)
deprtimg = models.ImageField(upload_to="publichospital")
department = models.CharField(max_length=100)
location = models.TextField(max_length=100)
mobNo = models.CharField(max_length=15)
def __str__(self):
return self.name
class privatehospital(models.Model):
drimg = models.ImageField(upload_to="privatehospital")
name = models.CharField(max_length=100)
speciality = models.CharField(max_length=100)
deprtimg = models.ImageField(upload_to="privatehospital")
department = models.CharField(max_length=100)
location = models.TextField(max_length=100)
mobNo = models.CharField(max_length=15)
def __str__(self):
return self.name
class ENThospital(models.Model):
drimg = models.ImageField(upload_to="ENThospital")
name = models.CharField(max_length=100)
speciality = models.CharField(max_length=100)
deprtimg = models.ImageField(upload_to="ENThospital")
department = models.CharField(max_length=100)
location = models.TextField(max_length=100)
mobNo = models.CharField(max_length=15)
def __str__(self):
return self.name
class cancerhospital(models.Model):
drimg = models.ImageField(upload_to="cancerhospital")
name = models.CharField(max_length=100)
speciality = models.CharField(max_length=100)
deprtimg = models.ImageField(upload_to="cancerhospital")
department = models.CharField(max_length=100)
location = models.TextField(max_length=100)
mobNo = models.CharField(max_length=15)
def __str__(self):
return self.name
class mentalhospital(models.Model):
drimg = models.ImageField(upload_to="mentalhospital")
name = models.CharField(max_length=100)
speciality = models.CharField(max_length=100)
deprtimg = models.ImageField(upload_to="mentalhospital")
department = models.CharField(max_length=100)
location = models.TextField(max_length=100)
mobNo = models.CharField(max_length=15)
def __str__(self):
return self.name
class multisuperhospital(models.Model):
drimg = models.ImageField(upload_to="multisuperhospital")
name = models.CharField(max_length=100)
speciality = models.CharField(max_length=100)
deprtimg = models.ImageField(upload_to="multisuperhospital")
department = models.CharField(max_length=100)
location = models.TextField(max_length=100)
mobNo = models.CharField(max_length=15)
def __str__(self):
return self.name
class orthrohospital(models.Model):
drimg = models.ImageField(upload_to="othrohospital")
name = models.CharField(max_length=100)
speciality = models.CharField(max_length=100)
deprtimg = models.ImageField(upload_to="othrohospital")
department = models.CharField(max_length=100)
location = models.TextField(max_length=100)
mobNo = models.CharField(max_length=15)
def __str__(self):
return self.name
#////////////////Automobile /////////////
class newcars(models.Model):
drimg = models.ImageField(upload_to="automobile")
name = models.CharField(max_length=100)
speciality = models.CharField(max_length=100)
deprtimg = models.ImageField(upload_to="automobile")
department = models.CharField(max_length=100)
location = models.TextField(max_length=100)
mobNo = models.CharField(max_length=15)
def __str__(self):
return self.name
class carrepair(models.Model):
drimg = models.ImageField(upload_to="automobile")
name = models.CharField(max_length=100)
speciality = models.CharField(max_length=100)
deprtimg = models.ImageField(upload_to="automobile")
department = models.CharField(max_length=100)
location = models.TextField(max_length=100)
mobNo = models.CharField(max_length=15)
def __str__(self):
return self.name
class caracesseries(models.Model):
drimg = models.ImageField(upload_to="automobile")
name = models.CharField(max_length=100)
speciality = models.CharField(max_length=100)
deprtimg = models.ImageField(upload_to="automobile")
department = models.CharField(max_length=100)
location = models.TextField(max_length=100)
mobNo = models.CharField(max_length=15)
def __str__(self):
return self.name
class carwash(models.Model):
drimg = models.ImageField(upload_to="automobile")
name = models.CharField(max_length=100)
speciality = models.CharField(max_length=100)
deprtimg = models.ImageField(upload_to="automobile")
department = models.CharField(max_length=100)
location = models.TextField(max_length=100)
mobNo = models.CharField(max_length=15)
def __str__(self):
return self.name
class cartyres(models.Model):
drimg = models.ImageField(upload_to="automobile")
name = models.CharField(max_length=100)
speciality = models.CharField(max_length=100)
deprtimg = models.ImageField(upload_to="automobile")
department = models.CharField(max_length=100)
location = models.TextField(max_length=100)
mobNo = models.CharField(max_length=15)
def __str__(self):
return self.name
class motercyclerepair(models.Model):
drimg = models.ImageField(upload_to="automobile")
name = models.CharField(max_length=100)
speciality = models.CharField(max_length=100)
deprtimg = models.ImageField(upload_to="automobile")
department = models.CharField(max_length=100)
location = models.TextField(max_length=100)
mobNo = models.CharField(max_length=15)
def __str__(self):
return self.name
#///////////blood donate//////#
class blooddonor(models.Model):
name=models.CharField(max_length=100)
email=models.EmailField()
age=models.IntegerField()
gender=models.CharField(max_length=20)
blood_group=models.CharField(max_length=20)
mobile_no=models.CharField(max_length=15)
address=models.CharField(max_length=100)
city=models.CharField(max_length=50)
def __str__(self):
return self.name
#/////////Add services model//////
class Requestaddservice(models.Model):
Category =models.CharField(max_length=100)
Name =models.CharField(max_length=50)
Speciality =models.CharField(max_length=50)
Department =models.CharField(max_length=20)
Address=models.TextField(max_length=200)
ServiceDescription =models.TextField(max_length=200)
img =models.ImageField(upload_to='req add')
Ownername =models.CharField(max_length=50)
Ownermobno =models.CharField(max_length=15)
def __str__(self):
return self.Category
| {"/chadigarh Dial/Webapp/admin.py": ["/chadigarh Dial/Webapp/models.py"], "/chadigarh Dial/Webapp/views.py": ["/chadigarh Dial/Webapp/models.py"]} |
77,817 | Akashpb07/Chdproject | refs/heads/master | /chadigarh Dial/Webapp/migrations/0007_cancerhospital_childhospital_enthospital_eyehospital_hospital_mentalhospital_multisuperhospital_othr.py | # Generated by Django 3.0.4 on 2020-05-01 05:34
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Webapp', '0006_electrician_hotel_reasurant'),
]
operations = [
migrations.CreateModel(
name='cancerhospital',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('drimg', models.ImageField(upload_to='cancerhospital')),
('name', models.CharField(max_length=100)),
('speciality', models.CharField(max_length=100)),
('deprtimg', models.ImageField(upload_to='cancerhospital')),
('department', models.CharField(max_length=100)),
('location', models.TextField(max_length=100)),
('mobNo', models.CharField(max_length=15)),
],
),
migrations.CreateModel(
name='childhospital',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('drimg', models.ImageField(upload_to='childhospital')),
('name', models.CharField(max_length=100)),
('speciality', models.CharField(max_length=100)),
('deprtimg', models.ImageField(upload_to='childhospital')),
('department', models.CharField(max_length=100)),
('location', models.TextField(max_length=100)),
('mobNo', models.CharField(max_length=15)),
],
),
migrations.CreateModel(
name='ENThospital',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('drimg', models.ImageField(upload_to='ENThospital')),
('name', models.CharField(max_length=100)),
('speciality', models.CharField(max_length=100)),
('deprtimg', models.ImageField(upload_to='ENThospital')),
('department', models.CharField(max_length=100)),
('location', models.TextField(max_length=100)),
('mobNo', models.CharField(max_length=15)),
],
),
migrations.CreateModel(
name='eyehospital',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('drimg', models.ImageField(upload_to='eyehospital')),
('name', models.CharField(max_length=100)),
('speciality', models.CharField(max_length=100)),
('deprtimg', models.ImageField(upload_to='eyehospital')),
('department', models.CharField(max_length=100)),
('location', models.TextField(max_length=100)),
('mobNo', models.CharField(max_length=15)),
],
),
migrations.CreateModel(
name='hospital',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('drimg', models.ImageField(upload_to='hospital')),
('name', models.CharField(max_length=100)),
('speciality', models.CharField(max_length=100)),
('deprtimg', models.ImageField(upload_to='hospital')),
('department', models.CharField(max_length=100)),
('location', models.TextField(max_length=100)),
('mobNo', models.CharField(max_length=15)),
],
),
migrations.CreateModel(
name='mentalhospital',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('drimg', models.ImageField(upload_to='mentalhospital')),
('name', models.CharField(max_length=100)),
('speciality', models.CharField(max_length=100)),
('deprtimg', models.ImageField(upload_to='mentalhospital')),
('department', models.CharField(max_length=100)),
('location', models.TextField(max_length=100)),
('mobNo', models.CharField(max_length=15)),
],
),
migrations.CreateModel(
name='multisuperhospital',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('drimg', models.ImageField(upload_to='multisuperhospital')),
('name', models.CharField(max_length=100)),
('speciality', models.CharField(max_length=100)),
('deprtimg', models.ImageField(upload_to='multisuperhospital')),
('department', models.CharField(max_length=100)),
('location', models.TextField(max_length=100)),
('mobNo', models.CharField(max_length=15)),
],
),
migrations.CreateModel(
name='othrohospital',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('drimg', models.ImageField(upload_to='othrohospital')),
('name', models.CharField(max_length=100)),
('speciality', models.CharField(max_length=100)),
('deprtimg', models.ImageField(upload_to='othrohospital')),
('department', models.CharField(max_length=100)),
('location', models.TextField(max_length=100)),
('mobNo', models.CharField(max_length=15)),
],
),
migrations.CreateModel(
name='privatehospital',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('drimg', models.ImageField(upload_to='privatehospital')),
('name', models.CharField(max_length=100)),
('speciality', models.CharField(max_length=100)),
('deprtimg', models.ImageField(upload_to='privatehospital')),
('department', models.CharField(max_length=100)),
('location', models.TextField(max_length=100)),
('mobNo', models.CharField(max_length=15)),
],
),
migrations.CreateModel(
name='publichospital',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('drimg', models.ImageField(upload_to='publichospital')),
('name', models.CharField(max_length=100)),
('speciality', models.CharField(max_length=100)),
('deprtimg', models.ImageField(upload_to='publichospital')),
('department', models.CharField(max_length=100)),
('location', models.TextField(max_length=100)),
('mobNo', models.CharField(max_length=15)),
],
),
]
| {"/chadigarh Dial/Webapp/admin.py": ["/chadigarh Dial/Webapp/models.py"], "/chadigarh Dial/Webapp/views.py": ["/chadigarh Dial/Webapp/models.py"]} |
77,818 | Akashpb07/Chdproject | refs/heads/master | /chadigarh Dial/Webapp/urls.py | from django.urls import path
from . import views
urlpatterns = [
#/////////index page //
path("", views.index, name="index"),
path("viewprofile<int:pk>", views.viewprofile, name="viewprofile"),
path("add", views.addservices, name="addservices"),
#//////////services Urls
path("contactus",views.contactus, name="contactus"),
path("doctor", views.doctor, name="doctor"),
path("resutrants", views.resutrants, name="resutrants"),
path("plumbers", views.plumbers, name="plumber"),
path("electrician", views.ele, name="electrician"),
path("automobiles", views.automobiles, name="automobile"),
path("hotels", views.hotels, name="hostels"),
path("hospitals", views.hospitals, name="hospitals"),
path("blood", views.blooddonate, name="blood"),
path("bloodd", views.bloodd, name="bloodd"),
path("table", views.table, name="table"),
path("adddone", views.adddone, name="adddone"),
path("adds",views.adds, name="adds"),
#//////donate blood////
path("db", views.db, name="db"),
path("fb", views.fb, name="fb"),
#//////////doctor urls
path("d1", views.d1, name="d1"),
path("d3", views.d3, name="d3"),
path("d4", views.d4, name="d4"),
path("d2", views.d2, name="d2"),
#//////////autombile url
path("a1",views.a1 , name="a1"),
path("a2",views.a2 , name="a2"),
path("a3",views.a3 , name="a3"),
path("a4",views.a4 , name="a4"),
path("a5",views.a5 , name="a5"),
#//////////plumbing urls
path("pservice", views.pservice, name="pservice"),
path("pproduct", views.pproduct, name="pproduct"),
path("pcontractors", views.pcontractors, name="pcontractors"),
path("pinstalltion", views.pinstalltion, name="pinstalltion"),
# //////////Hospitals urls
path("h1", views.h1),
path("h2", views.h2),
path("h3", views.h3),
path("h4", views.h4),
path("h5", views.h5),
path("h6", views.h6),
path("h7", views.h7),
path("h8", views.h8),
path("h9", views.h9),
path("h10", views.h10),
]
| {"/chadigarh Dial/Webapp/admin.py": ["/chadigarh Dial/Webapp/models.py"], "/chadigarh Dial/Webapp/views.py": ["/chadigarh Dial/Webapp/models.py"]} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.