text stringlengths 38 1.54M |
|---|
#==============================================================================
# ou_simulation_1D is used for
# inputs
# x0:initial state for OU
# n = length of sampling period
# step = aggregation period
# a = drift of OU
# sigma = diffusion constant of OU
# outputs
# obs = aggregated observations (using trapezoidal rule)
# real_obs = non aggregated observations at aggregation times
# t = all times sampled
# aggr2 = equivalent to obs using aggregate_measurements function
# ou = complete non aggregated trace of OU
#==============================================================================
import numpy as np
import math
import random
def ou_simulation_1D(x0,n,step,a,sigma):
dt = 0.0001
t = np.arange(0,n+dt,dt)
def euler_maruyama(tau,c,x0,t):
tau = float(tau)
c = float(c)
print tau, c
l = t.size
w = np.zeros(l)
w[0] = x0
#sqrt_dt = math.sqrt(dt)
for i in range(0,l-1):
#### euler-maruyama ##################
##w[i+1] = w[i] -(1/tau)*w[i]*dt + np.sqrt(c)*random.gauss(0,1)*sqrt_dt
#w[i+1] = w[i] -(tau)*w[i]*dt + c*sqrt_dt*np.random.normal(loc=0.0, scale=1.0) #random.gauss(0,1)
# exact updating formula for w ########
w[i+1] = w[i]*np.exp(-tau*dt)+ np.sqrt((c**2*0.5/tau)*(1-np.exp(-2*dt*tau)))*random.gauss(0,1)
return w
ou = euler_maruyama(a,sigma,x0,t)
def aggregate_measurements(obs_all,dt):
y_aggr = np.zeros(len(obs_all)+1)
y_aggr[0] = 0#obs_all[0]
for i in range(0,len(obs_all)):
y_aggr[i+1] = y_aggr[i] + obs_all[i]*dt
return y_aggr
real_obs = x0
#aggr_obs_anal = x0
aggr2 = x0
obs = x0
xronos = 0
previous = 0
#obs2 = x0
for i in range(0,int((1/step)*n)):
## print t[i*(1/dt)], ou[i*(1/dt)]
#print t[(float(i)*step)*(1/dt)], t[(float(i)*step)*(1/dt)+1], t[(float(i+1)*step)*(1/dt)]
obs_all = ou[previous:(float(i+1)*step)*(1/dt)+1]
#time_all = t[(float(i+1)*step)*(1/dt)]
#print time_all
#print ou
y_aggr_2 = aggregate_measurements(obs_all,dt)
aggr2 = np.vstack((aggr2,y_aggr_2[len(y_aggr_2)-1]))
real_obs = np.vstack((real_obs,ou[(float(i+1)*step)*(1/dt)]))
aggr_obs = dt*0.5*(ou[(float(i)*step)*(1/dt)]+2*(sum(ou[((float(i)*step)*(1/dt)+1):((float(i+1)*step)*(1/dt))]))+ou[((float(i+1)*step)*(1/dt))])
obs = np.vstack((obs,aggr_obs))
#aggr_obs2 = np.trapz(ou[(float(i)*step)*(1/dt):(float(i+1)*step)*(1/dt)],t[(float(i)*step)*(1/dt):(float(i+1)*step)*(1/dt)])
#obs2 = np.vstack((obs2,aggr_obs2))
#aggr_obs_anal = np.vstack((aggr_obs_anal,aggr[(float(i+1)*step)*(1/dt)]))
xronos = np.vstack((xronos,t[(float(i+1)*step)*(1/dt)]))
previous = (float(i+1)*step)*(1/dt)+1
return obs, real_obs, t, aggr2,ou
####### example plots ##############
#import matplotlib.pyplot as plt
#n = 5
#x0 = 20
#step = 0.1 #give float number
#[obs,ou,time1,aggr_anal,trace] = ou_simulation_1D(x0,n,step,4.0,2.0)
#
#real_obs = ou[1:len(ou)]
#obs = aggr_anal[1:len(aggr_anal)]
##obs = real_obs
#obs_avg = obs/step #real_obs #obs/step
#
#plt.figure(0)
##plt.plot(obs_anal,'ko')
#plt.plot(obs,'ro')
#plt.plot(real_obs,'go')
#plt.plot(obs/step,'ko')
#plt.xlim((0,5))
#
##plt.figure()
##plt.plot(obs,'ro')
##plt.plot(ou,'ko')
#plt.plot(time1,trace)
#plt.xlim((0,1))
#
##plt.plot(aggr_anal,'bo')
##plt.title('Ornstein Uhlenbeck')
##plt.xlim((0,5))
##plt.xlabel('time')
##plt.ylabel('X')
|
from django.db import models
# Create your models here.
class Students(models.Model):
name=models.CharField(max_length=30)
age=models.IntegerField()
email=models.EmailField()
def __str__(self):
return self.name
class Meta:
db_table='student'
|
"""
Neurons represent connections and states.
A rudimentary encoding algorithm has been created for visualization purposes.
"""
class Neuron(object):
def __init__(self, state=0):
self.state = state
self.connections = []
self.parents = []
@staticmethod
def crush(neurons):
datum = 0
for index, neuron in enumerate(neurons):
datum ^= neuron.state << index
return datum
def get_code(self):
upper = Neuron.crush(self.parents)
lower = Neuron.crush(self.connections)
code = (upper << lower.bit_length()) + lower
return code
def connect(self, neuron, check_existing=True):
connect = True
if check_existing:
connect = neuron not in self.connections
if connect:
self.connections.append(neuron)
neuron.parents.append(self)
def visual(self):
code = self.get_code()
# A-Z,ord('A') => 65, ord('Z') => 90
code %= 25 # Z - A
code += 65 # + A
return chr(code)
|
'''
Datafeeder
2018-07-06
TODO:
[] Fix batch length difference
[] Add batch to output file w/ speaker info
ref:
- https://www.github.com/kyubyong/deepvoice3
'''
import ipdb as pdb
import os
import glob
import random
import numpy as np
import tensorflow as tf
import pandas as pd
import textgrid
from tqdm import tqdm
from hparams import hparams as hp
from utils import safe_mkdir, safe_rmdir, find_elements
def load_meta_data(meta_data):
'''Load speaker meta data'''
return pd.read_table(meta_data, sep=',')
def load_data(is_training=True):
'''Load data
Returns:
spkr_idx: a list of speaker index
mel_list: a list of mel files
spkr2mel: a dictionary for mel files (values) given speaker (key)
spkr2idx: a dictionary for speaker index
idx2spkr: a dictionary for finding speaker from index
TODO: make it simple
'''
# Make data lists with ID and data directory
spkr_list, mel_list = [], []
# Get Mel directories
mel_train = sorted(glob.glob(os.path.join(
hp.train_dir, '**', 'mel', '*.npy')))
mel_test = sorted(glob.glob(os.path.join(
hp.test_dir, '**', 'mel', '*.npy')))
mel_all = mel_train + mel_test
# Get speaker IDs
for m in mel_all:
spkr_list.append(m.split('/')[-3])
spkr_uq = list(set(spkr_list))
spkr2idx = {s: i for i, s in enumerate(spkr_uq)}
idx2spkr = {i: s for i, s in enumerate(spkr_uq)}
# Save
np.save(os.path.join(hp.data_dir, 'spkr2idx.npy'), spkr2idx)
np.save(os.path.join(hp.data_dir, 'idx2spkr.npy'), idx2spkr)
# Make balanced dataset
if is_training:
mel_list = mel_train
else:
mel_list = mel_test
mel_bal = [] # a list of mel files
spkr_bal = [] # a list of spkr (idx)
for i in range(hp.num_batch):
spkr_samp = random.sample(spkr_uq, hp.batch_spkr)
for s in spkr_samp:
_, _mel = find_elements(s, mel_list)
mel_samp = random.choices(_mel, k=hp.batch_utt)
mel_bal += mel_samp
spkr_bal += [spkr2idx[s]]*hp.batch_utt
return mel_bal, spkr_bal
def slice_mel(x, length):
'''Slice mel data'''
x = np.load(x)
beg_frame = random.randint(1, x.shape[0] - length)
return x[beg_frame:beg_frame+length, :] # (time, num_mels)
def gen_batch(is_training=True):
'''Load data and prepare queue'''
with tf.device('/cpu:0'):
# Load data
mel_list, spkr_list = load_data(is_training)
# For each mini-batch
i = 0
for b in range(hp.num_batch):
# Randomly choose frame number (140<= len <=180)
length = random.randint(hp.length[0], hp.length[1])
# Initiate (batch_size, length, num_mels)
mel_batch = np.zeros((hp.batch_size, length, hp.num_mels))
spkr_batch = [] # (batch_size)
# Stack data
m = 0
while 1:
mel_batch[m] = slice_mel(mel_list[i], length)
spkr_batch.append(spkr_list[i])
i += 1
m += 1
if i % hp.batch_size == 0:
break
yield mel_batch, spkr_batch
def save_plot(mel_data):
'''Save Mel outputs as png'''
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
fig, ax = plt.subplots(figsize=(10, 10))
# (num_mels x time)
ax.imshow(mel_data.T, aspect='auto', origin='bottom')
plt.savefig('out.png', format='png')
if __name__ == '__main__':
# batch_x, batch_y, batch_x_txt, num_batch = gen_batch()
gen = gen_batch()
for _ in range(hp.num_batch):
x, y = next(gen)
pdb.set_trace()
|
# Copyright (c) 2017 XLAB d.o.o.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from cloudify.decorators import operation
from dice_plugin import general
# This module contains some ugly hacks around the inflexibility of cloudify.
# It would be possible to refactor the core and avoid this monstrosity, but
# since we may need to migrate to aria soon-ish, we will do the cleanup at a
# later time.
@operation
def create(ctx):
props = ctx.node.properties
if not props["use_existing"]:
general.create_image(ctx)
rtprops = ctx.instance.runtime_properties
rtprops["_image"] = rtprops.copy()
dict.__setitem__(props, "image", rtprops["openstack_id"])
general.create_server(ctx)
@operation
def delete(ctx):
general.delete_server(ctx)
rtprops = ctx.instance.runtime_properties
if "_image" in rtprops:
rtprops.update(rtprops["_image"])
general.delete_image(ctx)
|
#A simple transliteration from DMG p.122ff
#d% = [monster,number of,type of,fraction change for treasure,level modifier for treasure]
level_1 = {
4: ["centipede (medium)","d3","critter",0.2,0],
9: ["dire rat","d3+1","critter",0.2,0],
14: ["fire beetle (giant)","d3+1","critter",0.2,0],
17: ["scorpion (small)","d3","critter",0.2,0],
20: ["spider (small)","d3","critter",0.2,0],
25: ["dragon","1","dragon",0.8,2],
30: ["dwarf warrior","d3","friend",0.8,1],
35: ["elf warrior","d3","friend",0.8,1],
40: ["CR1 NPC","1","NPC",1,0],
45: ["darkmantle","1","terror",0.5,1],
55: ["krenshar","1","terror",0.5,1],
60: ["lemure (devil)","1","terror",0.5,1],
65: ["goblin","d4+2","tough",0.8,1],
70: [["hobgoblin","goblin"],["1","d3"],"tough",0.8,1],
80: ["kobold","d6+3","tough",0.8,1],
90: ["skeleton (medium)","d3+1","undead",0.5,0],
100: ["zombie (medium)","d3","undead",0.5,0]
}
level_2 = {
5: ["centipede (large)","d3","critter",0.2,0],
10: ["giant ant","d3","critter",0.2,0],
15: ["scorpion (medium)","d3","critter",0.2,0],
20: ["spider (medium)","d3","critter",0.2,0],
25: ["dragon","1","dragon",0.8,4],
30: ["elf warrior","d4+2","friend",0.8,2],
35: ["CR1 NPC","d3","NPC",1,0],
37: ["choker","1","terror",0.2,0],
42: ["ethereal marauder","1","terror",0.2,0],
45: ["shrieker","d3","terror",0.2,0],
50: ["formian worker","d4+2","terror",0.8,2],
55: ["hobgoblin","d4+2","tough",0.8,2],
60: [["hobgoblin","goblin"],["d3","d4+2"],"tough",0.8,2],
70: ["lizardfolk","d3","tough",0.8,2],
80: ["orc","d4+2","tough",0.8,2],
90: ["zombie (medium)","d4+2","undead",0.5,0],
100: ["ghoul","d3","undead",0.5,0]
}
level_3 = {
2: ["bombardier beetle (giant)","d2","critter",0.2,0],
4: ["centipede (huge)","d2","critter",0.2,0],
6: ["dire badger","d2","critter",0.2,0],
8: ["dire bat","d2","critter",0.2,0],
11: ["gelatinous cube","1","critter",0.2,0],
13: ["praying mantis (giant)","d2","critter",0.2,0],
14: ["scorpion (large)","d2","critter",0.2,0],
15: ["spider (large)","d2","critter",0.2,0],
20: ["dragon","1","dragon",0.8,4],
25: ["imp (devil)","d2","fiend",0.8,3],
30: [["wererat","dire rat"],["1","d3+1"],"fiend",0.8,3],
35: ["dwarf warrior","d6+3","friend",0.8,3],
40: ["CR1 NPC","d3+1","NPC",1,0],
44: ["dretch (demon)","d2","terror",0.5,0],
48: ["ethereal filcher","1","terror",0.5,0],
52: ["phantom fungus","1","terror",0.2,0],
56: ["thoqqua","d2","terror",0.2,0],
60: ["vargouille","d2","terror",0.5,0],
62: [["bugbear","goblin"],["1","d4+2"],"tough",0.8,3],
67: ["gnoll","d3+1","tough",0.8,3],
69: [["goblin","wolf"],["d4+2","d3"],"tough",0.8,3],
71: [["hobgoblin","wolf"],["d3","d3"],"tough",0.8,3],
75: [["kobold","dire weasel"],["d6+3","1"],"tough",0.8,3],
80: ["troglodyte","d3+1","tough",0.8,3],
90: ["shadow","1","undead",0.5,0],
100: ["skeleton (large)","d3+1","undead",0.5,0]
}
level_4 = {
4: ["ankheg","d2","critter",0.2,0],
8: ["dire weasel","d3","critter",0.2,0],
12: ["gray ooze","1","critter",0.2,0],
15: ["snake (huge viper)","d2","critter",0.2,0],
20: ["dragon","1","dragon",0.8,4],
23: [["formian warrior","formian worker"],["1","d3"],"fiend",0.8,3],
26: [["imp","lemure (devil)"],["1","d3"],"fiend",0.8,3],
30: ["quasit (demon)","d2","fiend",0.8,3],
35: ["lantern archon (celestial)","d3","friend",0.5,2],
40: ["CR2 NPC","d3","NPC",1,0],
45: ["carrion crawler","1","terror",0.2,0],
50: ["mimic","1","terror",0.5,0],
55: ["rust monster","d2","terror",0.2,0],
60: ["violet fungi","d2","terror",0.2,0],
62: [["bugbear","hobgoblin"],["1","d6+3"],"tough",0.8,3],
65: ["ettercap","1","tough",0.8,3],
67: [["gnoll","hyena (as wolf)"],["d3","d3"],"tough",0.8,3],
70: [["lizardfolk","giant lizard"],["d3","1"],"tough",0.8,3],
73: ["magmin","d2","tough",0.8,3],
76: [["ogre","orc"],["1","d4+2"],"tough",0.8,3],
78: [["orc","dire boar"],["d3","d2"],"tough",0.8,3],
80: [["worg","goblin"],["d2","d4+2"],"tough",0.8,3],
85: ["allip","d2","undead",0.5,0],
90: ["ghost (NPC level 1d3)","1","undead",0.5,0],
95: ["vampire spawn","1","undead",0.5,0],
100: ["wight","d2","undead",0.5,0]
}
level_5 = {
2: [["giant soldier ant","giant worker ant"],["1","d4+2"],"critter",0.2,0],
5: ["dire wolverine","d2","critter",0.2,0],
9: ["ochre jelly","1","critter",0.2,0],
11: ["snake (giant constrictor)","1","critter",0.2,0],
12: ["spider (huge)","d2","critter",0.2,0],
15: ["spider eater","1","critter",0.2,0],
20: ["dragon","1","dragon",0.8,4],
23: ["doppelganger","d3","fiend",0.8,3],
25: ["green hag","1","fiend",0.8,3],
27: ["mephit","d3","fiend",0.8,3],
30: ["wererat","d3+1","fiend",0.8,3],
35: ["blink dog","d3+1","friend",0.5,2],
40: ["CR2 NPC","d3+1","NPC",1,0],
43: ["cockatrice","d3","terror",0.2,0],
47: ["gibbering mouther","1","terror",0.2,0],
50: ["grick","d3","terror",0.2,0],
52: ["hydra d3+4 heads","1","terror",0.2,0],
55: ["nightmare","1","terror",0.2,0],
58: ["shocker lizard","d3+1","terror",0.2,0],
60: [["violet fungus","shrieker"],["1","d3+1"],"terror",0.2,0],
64: ["azer","d3+1","tough",0.8,3],
67: ["bugbear","d3+1","tough",0.8,3],
69: [["ettercap","spider (medium)"],["1","d3"],"tough",0.8,3],
72: ["ogre","d3+1","tough",0.8,3],
75: ["salamander (small)","d3+1","tough",0.8,3],
77: [["troglodyte","giant lizard"],["d3+1","d2"],"tough",0.8,3],
80: ["worg","d3+1","tough",0.8,3],
85: [["ghast","ghoul"],["1","d3+1"],"undead",0.5,0],
90: ["mummy","d3","undead",0.5,0],
95: ["skeleton (huge)","d3+1","undead",0.5,0],
100: ["wraith","1","undead",0.5,0]
}
level_6 = {
2: ["digester","1","critter",0.2,0],
4: ["dire ape","d3+1","critter",0.2,0],
6: ["dire wolf","d3+1","critter",0.2,0],
7: ["giant stag beetle","d3","critter",0.2,0],
9: ["giant wasp","d3+1","critter",0.2,0],
12: ["owlbear","d3","critter",0.2,0],
15: ["shambling mound","1","critter",0.2,0],
20: ["dragon","1","dragon",0.8,4],
22: ["annis (hag)","1","fiend",0.8,3],
25: ["harpy","d3","fiend",0.8,3],
26: [["quasit","dretch (demon)"],["1","d2"],"fiend",0.8,3],
28: ["wereboar","d3+1","fiend",0.8,3],
30: ["werewolf","d3+1","fiend",0.8,3],
35: ["werebear","d2","friend",0.8,3],
40: ["CR3 NPC","d3+1","NPC",1,0],
43: ["arrowhawk (small)","d3+1","terror",0.2,0],
46: ["basilisk","d2","terror",0.2,0],
50: ["displacer beast","d3","terror",0.2,0],
53: ["gargoyle","d3","terror",0.5,0],
56: ["hell hound","d3+1","terror",0.2,0],
59: ["howler","d3+1","terror",0.2,0],
62: ["otyugh","d3","terror",0.2,0],
65: [["ravid","animated object (large)"],["1","1"],"terror",0.2,0],
67: ["xorn (small)(d6 gems each)","d3+1","terror",0.2,0],
70: ["yeth hounds","d3+1","terror",0.2,0],
77: [["ettin","orc"],["1","d6+3"],"tough",0.8,3],
82: [["ogre","boar"],["d3","d3"],"tough",0.8,3],
90: ["weretiger","d2","tough",0.8,3],
100: ["zombie (huge)","d3+1","undead",0.5,0]
}
level_7 = {
4: ["black pudding","1","critter",0.2,0],
5: ["centipede (gargantuan)","d2","critter",0.2,0],
8: ["criosphinx","1","critter",0.2,0],
10: ["dire boar","d3+1","critter",0.2,0],
14: ["remorhaz","1","critter",0.2,0],
15: ["scorpion (huge)","d2","critter",0.2,0],
20: ["dragon","1","dragon",0.8,4],
22: ["aranea","d3+1","fiend",0.8,3],
24: ["barghest (medium)","d3+1","fiend",0.8,3],
26: ["djinn","d3","fiend",0.8,3],
28: [["formian taskmaster","minotaur"],["1","1"],"fiend",0.8,3],
30: ["jann (genie)","d3+1","fiend",0.8,3],
35: ["hound archon","d3+1","friend",0.8,3],
40: ["CR4 NPC","d3+1","NPC",1,0],
45: ["cloaker","d3","terror",0.2,0],
48: ["cryohydra (d3+4 heads)","1","terror",0.2,0],
52: ["formian warrior","d4+2","terror",0.8,3],
57: ["invisible stalker","1","terror",0.2,0],
60: ["pyrohydra (d3+4 heads)","1","terror",0.2,0],
65: [["bugbear","wolf"],["d3+1","d3+1"],"tough",0.8,3],
70: [["ettin","brown bear"],["1","d2"],"tough",0.8,3],
75: ["minotaur","d3+1","tough",0.5,0],
80: [["salamander (medium)","salamander (small)"],["1","d3+1"],"tough",0.8,3],
90: ["ghost [NPC level d3+3]","1","undead",0.5,0],
100: ["vampire [NPC level d2+4]","1","undead",1,0]
}
level_8 = {
3: ["giant soldier ant","d6+5","critter",0.2,0],
8: ["dire bat","d6+5","critter",0.2,0],
10: ["spider (gargantuan)","d2","critter",0.2,0],
20: ["dragon","1","dragon",0.8,4],
22: [["aboleth","skum"],["1","d3+1"],"fiend",0.8,3],
24: ["barghest (large)","d3+1","fiend",0.8,3],
26: ["erinye (devil)","d2","fiend",0.8,3],
28: [["medusa","grimlock"],["1","d6+3"],"fiend",0.8,3],
30: ["mind flayer","1","fiend",0.8,3],
33: ["ogre mage","1","fiend",0.8,3],
35: [["yuan-ti halfblood","yuan-ti pureblood"],["1","d3"],"fiend",0.8,3],
40: ["lammasu","1","friend",0.8,3],
45: ["CR5 NPC","d3+1","NPC",1,0],
47: ["achaierai","d3+1","terror",0.2,0],
48: ["arrowhawk (medium)","d3+1","terror",0.2,0],
50: ["girallon","d3+1","terror",0.2,0],
52: ["flesh golem","d2","terror",0.2,0],
54: ["gray render","1","terror",0.2,0],
56: ["hieracosphinx","d3+1","terror",0.2,0],
59: ["hydra (d3+7 heads)","1","terror",0.2,0],
60: ["lernaean hydra (d3+4 heads)","1","terror",0.2,0],
62: ["phase spider","d3+1","terror",0.2,0],
64: ["rast","d3+1","terror",0.2,0],
66: ["shadow mastiff","d3+1","terror",0.2,0],
68: ["winter wolf","d3+1","terror",0.2,0],
70: ["xorn (medium)(2d6 gems each)","d3","terror",0.2,0],
74: [["drider","large monstrous spider"],["1","d3+1"],"tough",0.8,2],
78: ["ettin","d3+1","tough",0.8,2],
82: ["manticore","d3+1","tough",0.8,2],
86: ["salamander (medium)","d3+1","tough",0.8,2],
90: ["troll","d3+1","tough",0.8,2],
100: ["spectre","d2","undead",0.5,0]
}
level_9 = {
5: ["bulette","d3","critter",0.2,0],
10: ["dire lion","d4+2","critter",0.2,0],
20: ["dragon","1","dragon",0.8,3],
21: ["bebilith (demon)","1","fiend",0.8,2],
22: ["lamia","d3+1","fiend",0.8,2],
24: [["mind flayer","charmed creatures, roll Table4-20"],["1","1"],"fiend",0.8,2],
26: ["night hag","1","fiend",0.8,2],
28: [["ogre mage","ogre"],["1","d4+2"],"fiend",0.8,2],
30: ["rakshasa","1","fiend",0.8,2],
32: ["succubus","1","fiend",0.8,2],
34: ["xill (barbaric 00-50, civilized 51-100)","d3+1","fiend",0.8,2],
35: [["yuan-ti abomination","yuan-ti pureblood (00-50) or halfblood (51-100)"],["1","d3"],"fiend",0.8,2],
40: ["androsphinx","1","friend",0.8,2],
45: ["CR6 NPC","d3+1","NPC",1,0],
47: ["behir","d2","terror",0.2,0],
49: ["belker","d3+1","terror",0.2,0],
50: ["cryohydra (d3+6 heads)","1","terror",0.2,0],
52: ["delver","1","terror",0.2,0],
54: ["dragon turtle","1","terror",0.2,0],
55: ["pyrohydra (d3+6 heads)","1","terror",0.2,0],
57: ["will-o-wisp","d3+1","terror",0.2,0],
60: ["wyvern","d3+1","terror",0.2,0],
64: [["barbazu (devil)","osyluth"],["1","d2"],"tough",0.8,2],
68: [["hill giant","dire wolf"],["1","d3"],"tough",0.8,2],
72: ["kyton (devil)","d3+1","tough",0.8,2],
76: ["osyluth (devil)","d3+1","tough",0.8,2],
80: [["troll","dire boar"],["d3+1","d3"],"tough",0.8,2],
90: ["bodak","d2","undead",0.5,0],
100: ["vampire [NPC level d2+6]","1","undead",1,0]
}
level_10 = {
5: ["dire bear","d3+1","critter",0.2,0],
15: ["dragon","1","dragon",0.8,3],
17: ["aboleth","d3+1","fiend",0.8,2],
19: ["athach","d3+1","fiend",0.8,2],
21: ["formian myrmarch","1","fiend",0.8,2],
24: ["medusa","d3+1","fiend",0.8,2],
26: ["water naga","d3+1","fiend",0.8,2],
28: [["night hag","nightmare"],["1","1"],"fiend",0.8,2],
30: [["salamander (large)","salamander (medium)"],["1","d3"],"fiend",0.8,2],
32: ["yuan-ti abomination","d3+1","fiend",0.8,2],
37: ["lillend","d3+1","friend",0.8,2],
47: ["CR7 NPC","d3+1","NPC",1,0],
49: ["chaos beast","d3+1","terror",0.2,0],
51: ["chimera","d3+1","terror",0.2,0],
53: ["chuul","d3+1","terror",0.2,0],
54: ["lernaean cryohydra (d4+4 heads)","1","terror",0.2,0],
56: ["dragonne","d3+1","terror",0.2,0],
58: ["hellcat (devil)","d3+1","terror",0.2,0],
59: ["hydra (d3+9 heads)","1","terror",0.2,0],
60: ["phasm","1","terror",0.2,0],
61: ["lernaean pyrohydra (d4+4 heads)","1","terror",0.2,0],
63: ["retriever (demon)","1","terror",0.2,0],
65: ["red slaadi","d3+1","terror",0.8,2],
67: ["umber hulk","d3+1","terror",0.2,0],
71: ["barbazu (devil)","d3+1","tough",0.8,2],
75: ["drider","d3+1","tough",0.8,2],
79: [["frost giant","winter wolf"],["1","d3"],"tough",0.8,2],
83: [["stone giant","dire bear"],["1","d2"],"tough",0.8,2],
87: ["hill giant","d3+1","tough",0.8,2],
90: [["hamatula (devil)","barbazu (devil)"],["1","d2"],"tough",0.8,2],
100: ["ghost [NPC level d3+6]","1","undead",0.5,0]
}
level_11 = {
5: ["dire tiger","d3","critter",0.2,0],
15: ["dragon","1","dragon",0.8,3],
18: ["hag covey (see p124)","1","fiend",0.8,1],
21: ["efreet","d3+1","fiend",0.8,1],
24: [["formian myrmarch","formian warrior"],["1","d6+3"],"fiend",0.8,1],
27: ["gynosphinx","d3+1","fiend",0.8,1],
30: ["dark naga","d3+1","fiend",0.8,1],
35: ["avoral guardinal (celestial)","d3","friend",0.8,1],
45: ["CR8 NPC","d3+1","NPC",1,0],
48: ["arrowhawk (large)","d3","terror",0.2,0],
51: ["destrachan","d3","terror",0.2,0],
54: ["clay golem","d2","terror",0.2,0],
57: ["gorgon","d3+1","terror",0.2,0],
59: ["lernaean hydra (d3+7 heads)","1","terror",0.2,0],
62: ["blue slaadi","d3+1","terror",0.8,1],
65: ["xorn (large)(4d6 gems each)","d3+1","terror",0.2,0],
70: [["fire giant","hell hound"],["1","d6+3"],"tough",0.8,1],
75: ["stone giant","d3+1","tough",0.8,1],
80: ["hamatula (devil)","d3+1","tough",0.8,1],
90: ["devourer","1","undead",0.5,0],
100: ["mohrg","d3+1","undead",0.5,0]
}
level_12 = {
4: ["purple worm","1","critter",0.2,0],
5: ["scorpion (colossal)","d2","critter",0.2,0],
15: ["dragon","1","dragon",0.8,3],
20: ["mind flayer [inquisition]","d4+2","fiend",0.5,1],
25: ["spirit naga","d3+1","fiend",0.5,1],
30: ["green slaad","d3+1","fiend",0.5,1],
35: [["cloud giant","dire lion"],["1","d4+2"],"friend",0.5,1],
50: ["CR9 NPC","d3+1","NPC",1,0],
55: ["cryohydra (d3+9 heads)","1","terror",0.2,0],
60: ["stone golem","d2","terror",0.2,0],
65: ["pyrohydra (d3+9 heads)","1","terror",0.2,0],
70: ["yrthak","d3+1","terror",0.2,0],
75: [["cornugon (devil)","hamatula (devil)"],["1","d3"],"tough",0.5,1],
80: [["cloud giant","dire lion"],["1","d4+2"],"tough",0.5,1],
85: ["frost giant","d3+1","tough",0.5,1],
90: ["salamander (large)","d3+1","tough",0.5,1],
100: ["vampire [NPC level d3+8]","1","undead",1,0]
}
level_13 = {
15: ["dragon","1","dragon",0.8,3],
20: ["beholder","1","fiend",0.5,1],
30: [["night hag","nightmare"],["3","3"],"fiend",0.5,1], #need to edit dieRoll
35: ["gray slaad","d3+1","fiend",0.5,1],
40: ["couatl","d3+1","friend",0.5,1],
45: ["guardian naga","d3+1","friend",0.5,1],
60: ["CR10 NPC","d3+1","NPC",1,0],
67: ["frost worm","d2","terror",0.2,0],
73: ["lernaean hydra (d3+9 heads)","1","terror",0.2,0],
80: ["roper","d3+1","terror",0.2,0],
90: ["cornugon (devil)","d3+1","tough",0.5,1],
100: ["ghost [NPC level d3+9]","1","undead",0.5,1]
}
level_14 = {
15: ["dragon","1","dragon",0.8,2],
25: [["beholder","charmed creatures, roll Table4-25"],["1","1"],"fiend",0.5,1],
35: ["death slaad","d2","fiend",0.5,1],
40: ["cloud giant","d3+1","friend",0.5,1],
55: ["CR11 NPC","d3+1","NPC",1,0],
60: ["lernaean cryohydra (d4+8 heads)","1","terror",0.2,0],
65: ["iron golem","d2","terror",0.5,0],
70: ["lernaean pyrohydra (d4+8 heads)","1","terror",0.2,0],
80: ["cloud giant","d3+1","tough",0.5,1],
90: [["storm giant","griffon"],["1","d4+2"],"tough",0.5,1],
100: ["lich [NPC level d3+10]","1","undead",1,0]
}
level_15 = {
15: ["dragon","1","dragon",0.8,1],
30: ["beholder","d3","fiend",0.5,0],
40: [["death slaad","green slaad"],["d2","d3+1"],"fiend",0.5,0],
45: ["ghaele (celestial)","d3","friend",0.8,0],
70: ["CR12 NPC","d3+1","NPC",1,0],
80: ["hezrou (demon)","d2","tough",0.5,0],
90: [["gelugon (devil)","cornugon (devil)"],["1","d3+1"],"tough",0.5,0],
100: ["vampire [NPC level d3+11]","1","undead",1,0]
}
level_16 = {
20: ["dragon","1","dragon",0.8,1],
30: ["pit fiend (devil)","1","fiend",0.5,1],
35: ["astral deva (celestial)","d3","friend",0.5,1],
60: ["CR13 NPC","d3+1","NPC",1,0],
70: ["gelugon (devil)","d3+1","tough",0.5,0],
80: ["storm giant","d3+1","tough",0.5,0],
90: ["vrock (demon)","d3+1","tough",0.5,0],
100: ["ghost [NPC level d3+12]","1","undead",0.2,0]
}
level_17 = {
20: ["dragon","1","dragon",0.8,1],
30: ["marilith (demon)","1","fiend",0.5,0],
35: ["trumpet archon (celestial)","d3+1","friend",0.5,0],
60: ["CR14 NPC","d3+1","NPC",1,0],
70: ["glabrezu (demon)","d3","tough",0.5,0],
80: ["hezrou (demon)","d3+1","tough",0.5,0],
90: ["lich [NPC level d3+13]","1","undead",1,0],
100: ["nightwing (nightshade)","d3+1","undead",0.2,0]
}
level_18 = {
20: ["dragon","1","dragon",0.8,1],
30: ["balor (demon)","d3","fiend",0.5,0],
40: [["pit fiend (devil)","gelugon (devil)"],["1","d3+1"],"fiend",0.5,0],
45: ["planetar (celestial)","d3","friend",0.5,0],
70: ["CR15 NPC","d3+1","NPC",1,0],
80: ["glabrezu (demon)","d3+1","tough",0.5,0],
90: ["vampire [NPC level d3+14]","1","undead",1,0],
100: ["nightwalker (nightshade)","d3+1","undead",0.2,0]
}
level_19 = {
20: ["dragon","1","dragon",0.8,1],
30: [["marilith (demon)","glabrezu (demon)"],["1","d3"],"fiend",0.5,1],
40: ["pit fiend (devil)","d3+1","fiend",0.5,1],
45: ["solar (celestial)","1","friend",0.5,1],
70: ["CR16 NPC","d3+1","NPC",1,0],
80: ["nalfeshnee (demon)","d3+1","tough",0.5,1],
90: ["ghost [NPC level d3+15]","1","undead",0.2,0],
100: ["nightcrawler (nightshade)","d3","undead",0.2,0]
}
level_20 = {
20: ["dragon","1","dragon",0.8,1],
30: ["balor (demon)","d3","fiend",0.5,0],
40: ["marilith (demon)","d3+1","fiend",0.5,0],
45: [["solar (celestial)","planetar (celestial)"],["1","d2"],"friend",0.5,0],
55: ["CR17 NPC","d3+1","NPC",1,0],
60: ["CR18 NPC","d3","NPC",1,0],
65: ["CR19 NPC","d2","NPC",1,0],
70: ["CR20 NPC","1","NPC",1,0],
80: [["nalfeshnee (demon)","hezrou (demon)"],["d3+1","d3+1"],"tough",0.2,0],
85: ["ghost [NPC level d2+18]","1","undead",0.2,0],
90: ["lich [NPC level d4+16]","1","undead",1,0],
95: ["nightcrawler (nightshade)","d3","undead",0.2,0],
100: ["vampire [NPC level d3+17]","1","undead",1,0]
}
master_table = {
# actual_level: d%: [adjusted_level,monsterDiceModifier]
"lvl1": {
5: [1,1], 10: [1,1],
20: [1,1], 30: [1,1],
70: [1,1], 80: [2,0.5],
90: [2,0.5], 100: [3,0.3]
},
"lvl2": {
5: [1,2], 10: [1,2],
20: [1,2], 30: [2,1],
70: [2,1], 80: [3,0.7],
90: [4,0.5], 100: [5,0.3]
},
"lvl3": {
5: [1,3], 10: [1,3],
20: [2,1.5], 30: [2,1.5],
70: [3,1], 80: [4,0.7],
90: [5,0.5], 100: [6,0.3]
},
"lvl4": {
5: [1,4], 10: [1,4],
20: [2,2], 30: [3,1.5],
70: [4,1], 80: [5,0.7],
90: [6,0.5], 100: [7,0.3]
},
"lvl5": {
5: [2,4], 10: [2,3],
20: [3,2], 30: [4,1.5],
70: [5,1], 80: [6,0.7],
90: [7,0.5], 100: [8,0.3]
},
"lvl6": {
5: [2,4], 10: [3,3],
20: [4,2], 30: [5,1.5],
70: [6,1], 80: [7,0.7],
90: [8,0.5], 100: [9,0.3]
},
"lvl7": {
5: [3,4], 10: [4,3],
20: [5,2], 30: [6,1.5],
70: [7,1], 80: [8,0.7],
90: [9,0.5], 100: [10,0.3]
},
"lvl8": {
5: [4,4], 10: [5,3],
20: [6,2], 30: [7,1.5],
70: [8,1], 80: [9,0.7],
90: [10,0.5], 100: [11,0.3]
},
"lvl9": {
5: [5,4], 10: [6,3],
20: [7,2], 30: [8,1.5],
70: [9,1], 80: [10,0.7],
90: [11,0.5], 100: [12,0.3]
},
"lvl10": {
5: [6,4], 10: [7,3],
20: [8,2], 30: [9,1.5],
70: [10,1], 80: [11,0.7],
90: [12,0.5], 100: [13,0.3]
},
"lvl11": {
5: [7,4], 10: [8,3],
20: [9,2], 30: [10,1.5],
70: [11,1], 80: [12,0.7],
90: [13,0.5], 100: [14,0.3]
},
"lvl12": {
5: [8,4], 10: [9,3],
20: [10,2], 30: [10,1.5],
70: [12,1], 80: [12,0.7],
90: [14,0.5], 100: [14,0.3]
},
"lvl13": {
5: [9,4], 10: [10,3],
20: [11,2], 30: [12,1.5],
70: [13,1], 80: [14,0.7],
90: [15,0.5], 100: [16,0.3]
},
"lvl14": {
5: [10,4], 10: [11,3],
20: [12,2], 30: [13,1.5],
70: [14,1], 80: [15,0.7],
90: [16,0.5], 100: [17,0.3]
},
"lvl15": {
5: [11,4], 10: [12,3],
20: [13,2], 30: [14,1.5],
70: [15,1], 80: [16,0.7],
90: [17,0.5], 100: [18,0.3]
},
"lvl16": {
5: [12,4], 10: [13,3],
20: [14,2], 30: [15,1.5],
70: [16,1], 80: [17,0.7],
90: [18,0.5], 100: [19,0.3]
},
"lvl17": {
5: [13,4], 10: [14,3],
20: [15,2], 30: [16,1.5],
70: [17,1], 80: [18,0.7],
90: [19,0.5], 100: [20,0.3]
},
"lvl18": {
5: [14,4], 10: [15,3],
20: [16,2], 30: [17,1.5],
70: [18,1], 80: [19,0.7],
90: [20,0.5], 100: [20,0.5]
},
"lvl19": {
5: [15,4], 10: [16,3],
20: [17,2], 30: [18,1.5],
70: [19,1], 80: [19,1],
90: [20,0.7], 100: [20,0.7]
},
"lvl20": {
5: [16,4], 10: [17,3],
20: [18,2], 30: [19,1.5],
70: [20,1], 80: [20,1],
90: [20,1], 100: [20,1]
}
}
# monster_dice_modifier: #a simple transliteration from DMG3 p.120-21.
# given key1, change the original dieRoll (key2) to the new dieRoll (value2)
monster_dice_modifier = {
0.3: {"1": "1",
"d2": "1",
"d3": "1",
"d3+1": "1",
"d4+2": "d2",
"d6+3": "d3",
"d6+5": "d3+1" },
0.5: {"1": "1",
"d2": "1",
"d3": "1",
"d3+1": "d2",
"d4+2": "d3",
"d6+3": "d3+1",
"d6+5": "d4+2" },
0.7: {"1": "1",
"d2": "1",
"d3": "d2",
"d3+1": "d3",
"d4+2": "d3+1",
"d6+3": "d4+2",
"d6+5": "d6+3" },
1.5: {"1": "d2",
"d2": "d3",
"d3": "d3+1",
"d3+1": "d4+2",
"d4+2": "d6+3",
"d6+3": "d6+5",
"d6+5": "d4+10" },
2: {"1": "d3",
"d2": "d3+1",
"d3": "d4+2",
"d3+1": "d6+3",
"d4+2": "d6+5",
"d6+3": "d4+10",
"d6+5": "d100+100" },
3: {"1": "d3+1",
"d2": "d4+2",
"d3": "d6+3",
"d3+1": "d6+5",
"d4+2": "d4+10",
"d6+3": "d100+100",
"d6+5": "d100+100" },
4: {"1": "d4+2",
"d2": "d6+3",
"d3": "d6+5",
"d3+1": "d4+10",
"d4+2": "d100+100",
"d6+3": "d100+100",
"d6+5": "d100+100" }
}
|
from __future__ import division
from DPLocalSolver import DFS_Rec_for_Non_Monotone_General
from util import *
import os
import copy
import IPython
import time
import random
import numpy as np
from random import sample
from collections import OrderedDict
import Polygon as pn
from random import uniform, random
from itertools import combinations, product
# non monotone solver where the start/goal poses are not necessarily 2*i/2*i+1
class Non_Monotone_Solver_General(object):
def __init__(self, graph, obj_locations, start_poses, goal_poses):
self.obj_locations = obj_locations
self.path_dict = {}
self.dependency_dict = {}
self.object_ordering = []
self.start_poses = copy.deepcopy(start_poses)
self.goal_poses = copy.deepcopy(goal_poses)
self.n = len(self.start_poses)
self.linked_list_conversion(graph)
self.enumerate_cases()
if self.isConnected:
self.dependency_dict_conversion()
def enumerate_cases(self):
# enumerate possible cases
time_allowed = 500
start_time = time.clock()
FOUND = False
self.isConnected = False
for obj_num in range(self.n+1): # num of objects that need buffers
### if it needs more than 2 buffers, let's give up this instances
if obj_num >= 3:
return
print "number of objects that use buffers", obj_num
for obj_set in combinations(self.start_poses.keys(), obj_num): # which objs need buffers
if time.clock() - start_time > time_allowed:
### time exceed, solution not found
return
for buffer_set in product(sorted(self.obj_locations.keys(), reverse=True), repeat=obj_num): # which poses are buffers
if time.clock() - start_time > time_allowed:
### time exceed, solution not found
return
obj_buffer_dict = {}
Degrade = False # when an object uses its own start or goal pose as a buffer, Degrade = True.
for index in xrange(len(obj_set)):
obj = obj_set[index]
buffer = buffer_set[index]
if (buffer == self.start_poses[obj]) or (buffer == self.goal_poses[obj]):
Degrade = True
break
obj_buffer_dict[obj] = (self.n+index, buffer)
if Degrade:
continue
# monotone solver input path_dict, dependency_dict, obj_locations, LL, region_dict, obj_buffer_dict
# DFS = DFS_for_Non_Monotone_General(self.start_poses, self.goal_poses, self.dependency_dict, self.path_dict, self.obj_locations, self.LL, self.region_dict, obj_buffer_dict)
DFS = DFS_Rec_for_Non_Monotone_General(self.start_poses, self.goal_poses, self.dependency_dict, self.path_dict, self.obj_locations, self.LL, self.region_dict, obj_buffer_dict)
self.dependency_dict = copy.deepcopy(DFS.dependency_dict)
self.path_dict = copy.deepcopy(DFS.path_dict)
if len(DFS.object_ordering)>0:
print "Find a solution!"
FOUND = True
self.isConnected = True
print "obj_buffer_dict", obj_buffer_dict
self.obj_buffer_collections = {}
for obj, buffer_event in obj_buffer_dict.items():
self.obj_buffer_collections[obj] = buffer_event[1]
# print "DFS.object_ordering", DFS.object_ordering
self.object_ordering = DFS.object_ordering
print("object_ordering: " + str(self.object_ordering))
self.totalActions = len(self.object_ordering)
print("total actions: " + str(self.totalActions))
break
if FOUND:
break
if FOUND:
break
def linked_list_conversion(self, graph):
# print "graph"
# print graph
self.region_dict = {} # (1,2,'a'): 0
self.LL = {} # 0:[1,2,3]
for key in graph:
index = len(self.region_dict.keys())
self.region_dict[key] = index
self.LL[index] = []
for key in graph:
for v in graph[key]:
self.LL[self.region_dict[key]].append(self.region_dict[v])
# print "LL"
# print self.LL
# print "region dict"
# print self.region_dict
def dependency_dict_conversion(self):
for key in self.dependency_dict.keys():
number_set_list = self.dependency_dict[key]
pose_set_list = []
for number_set in number_set_list:
pose_set = set()
for number in number_set:
pose_set = pose_set.union({(number // 2, number % 2)})
pose_set_list.append(pose_set)
self.dependency_dict[key] = pose_set_list |
a = [1,2,3,2]
k = 5
from collections import deque
def maxLength(a, k):
q = deque()
m = 0
s = 0
for i in a:
q.append(i)
s += i
if s <= k:
m = max(m, len(q))
else:
s -= q.popleft()
return m
print maxLength(a, k) |
import hashlib
import json
import os
from pathlib import Path
import pickle
import shutil
import subprocess
import uuid
from enum import IntEnum
from django.apps import apps
from django.conf import settings
from django.core.exceptions import ValidationError
from django.core.mail import send_mail
from django.db import models, transaction
from django.db.models import signals
from django.urls import reverse
from django.utils import timezone
from djcall.models import Caller
from pymemcache.client.base import Client
from picklefield.fields import PickledObjectField
from timezone_field import TimeZoneField
from django.utils.translation import gettext_lazy as _
from django.conf import settings
from electeez_auth.models import User
from electeez_sites.models import Site
def above_0(value):
if value <= 0:
raise ValidationError(
_('Must be above 0, you have choosen:') + f'{value}'
)
class Contest(models.Model):
id = models.UUIDField(
primary_key=True,
editable=False,
default=uuid.uuid4
)
mediator = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
)
name = models.CharField(max_length=255)
about = models.CharField(
max_length=2048,
blank=True,
null=True
)
type = models.CharField(default='school', max_length=100)
votes_allowed = models.PositiveIntegerField(
default=1,
validators=[above_0],
)
quorum = models.IntegerField(
default=1,
verbose_name='quorum',
)
start = models.DateTimeField()
end = models.DateTimeField()
timezone = TimeZoneField(
choices_display='WITH_GMT_OFFSET',
default='Europe/Paris',
)
actual_start = models.DateTimeField(null=True, blank=True, db_index=True)
actual_end = models.DateTimeField(null=True, blank=True)
decrypting = models.BooleanField(default=False)
joint_public_key = PickledObjectField(null=True, blank=True)
metadata = PickledObjectField(null=True)
context = PickledObjectField(null=True)
device = PickledObjectField(null=True)
store = PickledObjectField(null=True)
plaintext_tally = PickledObjectField(null=True)
plaintext_spoiled_ballots = PickledObjectField(null=True)
ciphertext_tally = PickledObjectField(null=True)
coefficient_validation_sets = PickledObjectField(null=True)
artifacts_sha1 = models.CharField(max_length=255, null=True, blank=True)
artifacts_ipfs = models.CharField(max_length=255, null=True, blank=True)
class PublishStates(IntEnum):
ELECTION_NOT_DECENTRALIZED = 0,
ELECTION_CONTRACT_CREATED = 1,
ELECTION_OPENED = 2,
ELECTION_CLOSED = 3,
ELECTION_DECRYPTED = 4,
ELECTION_PUBLISHED = 5
@property
def number_elected(self):
return self.votes_allowed
@property
def number_guardians(self):
return self.guardian_set.count()
@property
def current_sequence(self):
client = Client(settings.MEMCACHED_HOST)
sequence = int(client.get(f'{self.pk}.sequence', 1))
client.set(f'{self.pk}.sequence', sequence + 1)
return sequence
@property
def artifacts_path(self):
return (
Path(settings.MEDIA_ROOT)
/ 'artifacts'
/ f'contest-{self.pk}'
)
@property
def artifacts_zip_path(self):
return Path(str(self.artifacts_path) + '.zip')
@property
def artifacts_url(self):
if self.artifacts_ipfs_url:
return self.artifacts_ipfs_url
return self.artifacts_local_url
@property
def artifacts_local_url(self):
return ''.join([
settings.BASE_URL,
settings.MEDIA_URL,
'artifacts/',
f'contest-{self.pk}.zip',
])
@property
def artifacts_ipfs_url(self):
if self.artifacts_ipfs:
return 'https://ipfs.io/ipfs/' + self.artifacts_ipfs
@property
def manifest_url(self):
return ''.join([
settings.BASE_URL,
reverse('contest_manifest', args=[self.pk]),
])
@property
def manifest_sha1(self):
return hashlib.sha1(
self.description.to_json().encode('utf8')
).hexdigest()
def launch_decryption(
self,
send_voters_email,
email_title,
email_body,
):
if self.decrypting or self.plaintext_tally:
return
self.decrypting = True
self.save()
Caller(
callback='djelectionguard.models.decrypt_contest',
kwargs=dict(
contest_id=str(self.pk),
user_id=self.mediator.pk,
send_voters_email=send_voters_email,
voters_email_title=email_title,
voters_email_msg=email_body
),
).spool('tally')
def decrypt(self):
from electionguard.tally import tally_ballots
self.ciphertext_tally = tally_ballots(self.store, self.metadata, self.context)
from electionguard.decryption_mediator import DecryptionMediator
decryption_mediator = self.decrypter
from electionguard.ballot import BallotBoxState
from electionguard.ballot_box import get_ballots
submitted_ballots = get_ballots(self.store, BallotBoxState.CAST)
submitted_ballots_list = list(submitted_ballots.values())
# Decrypt the tally with available guardian keys
for g in self.guardian_set.all().order_by('sequence'):
guardian = g.get_guardian()
guardian_key = guardian.share_election_public_key()
tally_share = guardian.compute_tally_share(
self.ciphertext_tally, self.context
)
ballot_shares = guardian.compute_ballot_shares(
submitted_ballots_list, self.context
)
decryption_mediator.announce(
guardian_key, tally_share, ballot_shares
)
self.plaintext_tally = decryption_mediator.get_plaintext_tally(
self.ciphertext_tally
)
if not self.plaintext_tally:
raise AttributeError('"self.plaintext_tally" is None')
self.plaintext_spoiled_ballots = decryption_mediator.get_plaintext_ballots([])
# And delete keys from memory
for guardian in self.guardian_set.all():
guardian.delete_keypair()
self.save()
plaintext_tally_contest = self.plaintext_tally.contests[str(self.pk)]
for candidate in self.candidate_set.all():
candidate.score = plaintext_tally_contest.selections[f'{candidate.pk}-selection'].tally
candidate.save()
self.decrypting = False
self.save()
self.publish()
def publish(self):
cwd = os.getcwd()
# provision directory path
from electionguard.election import ElectionConstants
from electionguard.publish import publish
self.artifacts_path.mkdir(parents=True, exist_ok=True)
os.chdir(self.artifacts_path)
publish(
self.description,
self.context,
ElectionConstants(),
[self.device],
self.store.all(),
[],
self.ciphertext_tally.publish(),
self.plaintext_tally,
self.coefficient_validation_sets,
)
# create the zip file of key to key.zip
os.chdir(self.artifacts_path / '..')
name = f'contest-{self.pk}'
shutil.make_archive(name, 'zip', name)
sha1 = hashlib.sha1()
with self.artifacts_zip_path.open('rb') as f:
while data := f.read(65536):
sha1.update(data)
self.artifacts_sha1 = sha1.hexdigest()
self.save()
os.chdir(cwd)
def publish_ipfs(self):
try:
url = settings.IPFS_URL + '/api/v0/'
out = subprocess.check_output(
['curl', '-F', f'file=@{self.artifacts_zip_path}', url+'add'],
stderr=subprocess.PIPE,
)
result = json.loads(out)
self.artifacts_ipfs = result['Hash']
self.save()
out = subprocess.check_output(
['curl', '-X', 'POST', url+f'pin/add?arg={self.artifacts_ipfs}'],
stderr=subprocess.PIPE,
)
except Exception as e:
print(e)
print('Could not upload to IPFS, see error above')
@property
def state(self):
if self.actual_end:
return 'finished'
elif self.actual_start:
return 'started'
return 'pending'
@property
def publish_state(self):
if self.artifacts_ipfs:
return self.PublishStates.ELECTION_PUBLISHED
elif self.plaintext_tally:
return self.PublishStates.ELECTION_DECRYPTED
elif self.actual_end:
return self.PublishStates.ELECTION_CLOSED
elif self.actual_start:
return self.PublishStates.ELECTION_OPENED
elif getattr(self, 'electioncontract', None):
return self.PublishStates.ELECTION_CONTRACT_CREATED
else:
return self.PublishStates.ELECTION_NOT_DECENTRALIZED
@property
def variation(self):
return 'one_of_m' if self.votes_allowed == 1 else 'n_of_m'
def get_absolute_url(self):
return reverse('contest_detail', args=[self.pk])
def get_ballot(self, *selections):
from electionguard.ballot import (
PlaintextBallot,
PlaintextBallotContest,
PlaintextBallotSelection,
)
ballot = PlaintextBallot(
object_id=str(uuid.uuid4()),
style_id=f"{self.pk}-style",
contests=[
PlaintextBallotContest(
object_id=str(self.pk),
ballot_selections=[
PlaintextBallotSelection(
object_id=f"{selection}-selection",
vote=1,
is_placeholder_selection=False,
extended_data=None,
) for selection in selections
]
)
]
)
return ballot
@property
def description(self):
from electionguard.manifest import Manifest
return Manifest.from_json_object(
self.get_manifest()
)
def prepare(self):
from electionguard.election_builder import ElectionBuilder
builder = ElectionBuilder(
number_of_guardians=self.number_guardians,
quorum=self.quorum,
manifest=self.description,
)
builder.set_public_key(self.joint_public_key.joint_public_key)
builder.set_commitment_hash(self.joint_public_key.commitment_hash)
self.metadata, self.context = builder.build()
from electionguard.data_store import DataStore
self.store = DataStore()
from electionguard.encrypt import EncryptionDevice, EncryptionMediator, generate_device_uuid
self.device = EncryptionDevice(
generate_device_uuid(),
12345,
67890,
str(self.pk), # location: str
)
@property
def encrypter(self):
from electionguard.encrypt import EncryptionMediator
return EncryptionMediator(
self.metadata,
self.context,
self.device,
)
@property
def decrypter(self):
from electionguard.decryption_mediator import DecryptionMediator
return DecryptionMediator(
'decryption-mediator',
self.context,
)
@property
def ballot_box(self):
from electionguard.ballot_box import BallotBox
return BallotBox(self.metadata, self.context, self.store)
def get_manifest(self):
return {
"geopolitical_units": [
{
"type": self.type,
"name": self.name,
"object_id": str(self.pk) + '-unit',
},
],
"parties": [],
"candidates": [
{
"object_id": str(candidate.pk),
"ballot_name": {
"text": [
{
"language": 'en',
"value": candidate.name,
}
]
},
"name": {
"text": []
}
} for candidate in self.candidate_set.all()
],
"contests": [
{
"@type": "CandidateContest",
"object_id": str(self.pk),
"sequence_order": 0,
"ballot_selections": [
{
"object_id": f"{candidate.pk}-selection",
"sequence_order": i,
"candidate_id": str(candidate.pk),
}
for i, candidate in enumerate(self.candidate_set.all())
],
"ballot_title": {
"text": [
{
"value": self.name,
"language": "en"
}
]
},
"ballot_subtitle": {
"text": [
{
"value": self.name,
"language": "en"
}
]
},
"vote_variation": self.variation,
"electoral_district_id": f"{self.pk}-unit",
"name": self.name,
"number_elected": self.number_elected,
"votes_allowed": self.votes_allowed,
}
],
"ballot_styles": [
{
"object_id": f"{self.pk}-style",
"geopolitical_unit_ids": [f"{self.pk}-unit"],
}
],
"name": {
"text": [
{
"value": "Test Contest",
"language": "en"
}
]
},
"start_date": "2020-03-01T08:00:00-05:00",
"end_date": "2020-03-01T20:00:00-05:00",
"election_scope_id": f"{self.pk}-style",
"type": "primary",
"spec_version": "v1.2.1"
}
def __str__(self):
return self.name
def send_mail(self, title, body, link, field):
Caller(
callback='djelectionguard.models.send_contest_mail',
kwargs=dict(
contest_id=str(self.pk),
title=title,
body=body,
link=link,
field=field,
),
).spool('email')
def send_contest_mail(contest_id, title, body, link, field, **kwargs):
voters_pks = Voter.objects.filter(
contest__pk=contest_id
).values_list('pk', flat=True)
for pk in voters_pks:
Caller(
callback='djelectionguard.models.send_voter_mail',
max_attempts=25,
kwargs=dict(
voter_id=str(pk),
title=title,
body=body,
link=link,
field=field,
),
).spool('email')
def send_voter_mail(voter_id, title, body, link, field):
voter = Voter.objects.select_related('user').get(pk=voter_id)
if f := getattr(voter, field):
return
otp_link = voter.user.otp_new(redirect=link).url
voter.user.save()
send_mail(
title,
body.replace('LINK', otp_link),
settings.DEFAULT_FROM_EMAIL,
[voter.user.email],
)
setattr(voter, field, timezone.now())
voter.save()
def decrypt_contest(
contest_id,
user_id,
send_voters_email,
voters_email_title,
voters_email_msg
):
from djlang.utils import gettext as _
contest = None
med_email_msg = None
has_error = True
user = User.objects.get(id=user_id)
try:
contest = Contest.objects.get(id=contest_id)
contest.decrypt()
has_error = False
med_email_msg = _('The contest %(contest)s has been tallied', contest=contest.name)
except Contest.DoesNotExist:
med_email_msg = _('The contest you wanted to decrypt was not found')
except Exception as e:
med_email_msg = _(
'The decryption raised the exception %(exception)s',
exception=e
)
finally:
if med_email_msg:
send_mail(
_(
'Contest %(contest)s decryption',
contest=contest.name if contest else _('unknown')
),
med_email_msg,
settings.DEFAULT_FROM_EMAIL,
[user.email]
)
if send_voters_email and not has_error:
contest.send_mail(
voters_email_title,
voters_email_msg,
reverse('contest_detail', args=[contest_id]),
'close_email_sent'
)
def upload_picture(instance, filename):
return f'{uuid.uuid4()}.{filename.split(".")[-1]}'
class Candidate(models.Model):
id = models.UUIDField(
primary_key=True,
editable=False,
default=uuid.uuid4
)
contest = models.ForeignKey(
Contest,
on_delete=models.CASCADE,
)
name = models.CharField(max_length=255)
subtext = models.CharField(
max_length=255,
blank=True,
null=True
)
description = models.CharField(
max_length=300,
blank=True,
null=True
)
picture = models.ImageField(
upload_to=upload_picture,
blank=True,
null=True
)
score = models.IntegerField(null=True)
def __str__(self):
return self.name
class Meta:
ordering = ['-score', 'name']
unique_together = [('name', 'contest')]
class Guardian(models.Model):
id = models.UUIDField(
primary_key=True,
editable=False,
default=uuid.uuid4
)
contest = models.ForeignKey(
Contest,
on_delete=models.CASCADE,
)
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
)
created = models.DateTimeField(auto_now_add=True)
downloaded = models.DateTimeField(null=True, blank=True)
verified = models.DateTimeField(null=True, blank=True)
erased = models.DateTimeField(null=True, blank=True)
uploaded = models.DateTimeField(null=True, blank=True)
uploaded_erased = models.DateTimeField(null=True, blank=True)
sequence = models.PositiveIntegerField(null=True, blank=True)
key_sha1 = models.CharField(max_length=255, null=True, blank=True)
def __str__(self):
return str(self.user)
class Meta:
ordering = ['created']
def delete_keypair(self):
Client(settings.MEMCACHED_HOST).delete(str(self.pk))
if not self.uploaded:
self.erased = timezone.now()
else:
self.uploaded_erased = timezone.now()
self.save()
def upload_keypair(self, content):
Client(settings.MEMCACHED_HOST).set(str(self.pk), content)
self.uploaded = timezone.now()
self.save()
def get_keypair(self):
client = Client(settings.MEMCACHED_HOST)
result = client.get(str(self.pk))
if not result:
from electionguard.guardian import Guardian
sequence = self.contest.current_sequence
guardian = Guardian(
f'guardian-{self.pk}',
sequence,
self.contest.number_guardians,
self.contest.quorum,
)
result = pickle.dumps(guardian)
client.set(str(self.pk), result)
self.key_sha1 = hashlib.sha1(result).hexdigest()
self.sequence = sequence
self.save()
return result
def get_guardian(self):
return pickle.loads(self.get_keypair())
class Voter(models.Model):
id = models.UUIDField(
primary_key=True,
editable=False,
default=uuid.uuid4
)
contest = models.ForeignKey(
Contest,
on_delete=models.CASCADE,
)
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
)
casted = models.BooleanField(null=True, blank=True)
ballot_id = models.UUIDField(null=True, blank=True)
ballot_sha1 = models.CharField(max_length=255, null=True, blank=True)
open_email_sent = models.DateTimeField(null=True, blank=True)
close_email_sent = models.DateTimeField(null=True, blank=True)
|
# !/usr/bin/env python3.7
# encoding: utf-8
# site:D:\users\lenovo\PycharmProjects\untitled
# Time : 2019/4/24 16:04
# Author : 御承扬
# e-mail:2923616405@qq.com
# site:
# File : ID3_code.py
# @oftware: PyCharm
import math
import operator
def createDataSet():
# outlook: 0 rain 1 overcast 2 sunny
# tem: 0 cool 1 mild 2 hot
# hum: 0 normal 1 high
# windy 0 not 1 medium 2 very
dataSet = [[1, 2, 1, 0, 'no'],
[1, 2, 1, 2, 'no'],
[1, 2, 1, 1, 'no'],
[2, 2, 1, 0, 'yes'],
[2, 2, 1, 1, 'yes'],
[0, 1, 1, 0, 'no'],
[0, 1, 1, 1, 'no'],
[0, 2, 0, 0, 'yes'],
[0, 0, 0, 1, 'no'],
[0, 2, 0, 2, 'no'],
[2, 0, 0, 2, 'yes'],
[2, 0, 0, 1, 'yes'],
[1, 1, 1, 0, 'no'],
[1, 1, 1, 1, 'no'],
[1, 0, 0, 0, 'yes'],
[1, 0, 0, 1, 'yes'],
[0, 1, 0, 0, 'no'],
[0, 1, 0, 1, 'no'],
[1, 1, 0, 1, 'yes'],
[1, 2, 0, 2, 'yes'],
[2, 1, 1, 2, 'yes'],
[2, 1, 1, 1, 'yes'],
[2, 2, 0, 0, 'yes'],
[0, 1, 1, 2, 'no'], ]
return dataSet
def calcShannonEnt(dataSet):
numEntries = len(dataSet) #获取数据的数目
labelCounts = {}
for featVec in dataSet:
currentLable = featVec[-1] #取得最后一列数据,做为标签
if currentLable not in labelCounts.keys(): #获取不同标签的数目
labelCounts[currentLable] = 0
labelCounts[currentLable] += 1
#计算熵
Ent = 0.0
for key in labelCounts:
prob = float(labelCounts[key]) / numEntries
Ent -= prob * math.log(prob, 2)
#print ("信息熵: ", Ent)
return Ent
def splitDataSet(dataSet, axis, value):
retDataSet = []
for featVec in dataSet:
if featVec[axis] == value: #每行中第axis个元素和value相等(去除第axis个数据)
reducedFeatVec = featVec[:axis]
reducedFeatVec.extend(featVec[axis+1:])
retDataSet.append(reducedFeatVec)
return retDataSet #返回分类后的新矩阵
# 根据香农熵,选择最优的划分方式 #根据某一属性划分后,类标签香农熵越低,效果越好
def chooseBestFeatureToSplit(dataSet):
baseEntropy = calcShannonEnt(dataSet) # 计算数据集的香农熵
numFeatures = len(dataSet[0]) - 1
bestInfoGain = 0.0 # 最大信息增益
bestFeature = 0 # 最优特征
for i in range(0, numFeatures):
featList = [example[i] for example in dataSet] # 所有子列表(每行)的第i个元素,组成一个新的列表
uniqueVals = set(featList)
newEntorpy = 0.0
for value in uniqueVals: # 数据集根据第i个属性进行划分,计算划分后数据集的香农熵
subDataSet = splitDataSet(dataSet, i, value)
prob = len(subDataSet) / float(len(dataSet))
newEntorpy += prob * calcShannonEnt(subDataSet)
infoGain = baseEntropy - newEntorpy # 划分后的数据集,香农熵越小越好,即信息增益越大越好
if (infoGain > bestInfoGain):
bestInfoGain = infoGain
bestFeature = i
return bestFeature
#如果数据集已经处理了所有属性,但叶子结点中类标签依然不是唯一的,此时需要决定如何定义该叶子结点。这种情况下,采用多数表决方法,对该叶子结点进行分类
def majorityCnt(classList):
classCount = {}
for vote in classList:
if vote not in classCount.keys():
classCount[vote] = 0
classCount[vote] += 1
sortedClassCount = sorted(classCount.iteritems(), key=operator.itemgetter(1), reverse=True)
return sortedClassCount[0][0]
def createTree(dataSet, labels): #创建树
classList = [example[-1] for example in dataSet] #数据集样本的类标签
if classList.count(classList[0]) == len(classList): #如果数据集样本属于同一类,说明该叶子结点划分完毕
return classList[0]
if len(dataSet[0]) == 1: #如果数据集样本只有一列(该列是类标签),说明所有属性都划分完毕,则根据多数表决方法,对该叶子结点进行分类
return majorityCnt(classList)
bestFeat = chooseBestFeatureToSplit(dataSet) #根据香农熵,选择最优的划分方式
bestFeatLabel = labels[bestFeat] #记录该属性标签
myTree = {bestFeatLabel:{}} #树
del(labels[bestFeat]) #在属性标签中删除该属性
#根据最优属性构建树
featValues = [example[bestFeat] for example in dataSet]
uniqueVals = set(featValues)
for value in uniqueVals:
subLabels = labels[:]
subDataSet = splitDataSet(dataSet, bestFeat, value)
myTree[bestFeatLabel][value] = createTree(subDataSet, subLabels)
print ("myTree:", myTree)
return myTree
#测试算法:使用决策树,对待分类样本进行分类
def classify(inputTree, featLabels, testVec): #传入参数:决策树,属性标签,待分类样本
firstStr = list(inputTree.keys())[0] #树根代表的属性
secondDict = inputTree[firstStr]
featIndex = featLabels.index(firstStr) #树根代表的属性,所在属性标签中的位置,即第几个属性
for key in secondDict.keys():
if testVec[featIndex] == key:
if type(secondDict[key]).__name__ == 'dict':
classLabel = classify(secondDict[key], featLabels, testVec)
else:
classLabel = secondDict[key]
return classLabel
if __name__ == '__main__':
dataSet = createDataSet()
labels = ['outlook', 'tem', 'hum', 'windy']
labelsForCreateTree = labels[:]
Tree = createTree(dataSet, labelsForCreateTree)
testvec = [2, 2, 1, 0]
print(classify(Tree, labels, testvec))
# from pandas import read_excel
# import pandas
# import math
# df=read_excel(r'.\\weather.xlsx')
# # print(df['play'])
# x=df.iloc[:,:-1]
# y=df.iloc[:,4].values
# count_Y = 0
# count_N = 0
# for i in y:
# if i == 'yes':
# count_Y=count_Y+1
# elif i == 'no':
# count_N=count_N+1
#
# E0 = -1*((count_Y/14*math.log2(count_Y/14))+(count_N/14*math.log2(count_N/14)))
# print('%.2f'%E0)
# i=1
# j=2
# print(x.iloc[i][j])
|
#!bin/usr/env python
import numpy as np
import matplotlib.pyplot as plt
"""
Reads 2 column data file with each column formated as "-----[dat1]-------[dat2]" where dashes are whitespace and [dat1] and [dat2] are the columns, each containing
"""
def readDat(f):
"""
Reads file and converts two column data into arrays of floats. Takes name of file as argument. Returns first column then second column as arrays
"""
infile = open(f, 'r')
column1 = []
column2 = []
for line in infile:
words = line.split()
if(isNumber(words[0])):
column1.append(words[0])
if(isNumber(words[1])):
column2.append(words[1])
col1 = np.array(column1)
col2 = np.array(column2)
# print (col1, col2) Debugging assistance
return col1, col2
#This function is borrowed from user Daniel Goldberg on Stack Overflow, post at http://stackoverflow.com/q/354038
def isNumber(s):
"""
This function takes a string as an argument and tries to parse it to float. If it can, it returns true. Else, it returns false. This will filter elements in the .dat file so that only numbers are included in the returned arrays
"""
try:
float(s)
return True
except ValueError:
return False
def main():
"""
Executes readDat on xy.dat
"""
dat = readDat("xy.dat")
plt.plot(dat[0], dat[1], '-r')
plt.xlabel('x variable')
plt.ylabel('y variable')
plt.title('Graphing 2 Column Data')
plt.show()
yvals = np.array(dat[1])
# print 'Mean y value is ', np.average(yvals)
# print 'Max y value is ', np.amax(yvals)
# print 'Minimum y value is ', np.amin(yvals)
def test_readDat():
"""
Tests readDat for test.dat file
"""
test = readDat("test.dat")
case = ([1.0,1.0,1.0],[1.0,1.0,0.0])
success = 0
def a_eq(x,y,tol = 1E-5):
x = float(x)
y = float(y)
return (abs(x - y) < tol)
for (i,j) in zip(test[0], case[0]):
if a_eq(i,j):
success += 1
for (i,j) in zip(test[1], case[1]):
if a_eq(i,j):
success += 1
assert (success == 6)
if __name__ == "__main__":
main();
|
import time, datetime, vk_api, os
def main():
token = os.environ.get('token')
vk_session = vk_api.VkApi(token=str(token))
try:
def countdown(stop):
while True:
difference = stop - datetime.datetime.now()
count_hours, rem = divmod(difference.seconds, 3600)
count_minutes, count_seconds = divmod(rem, 60)
if difference.days == 0 and count_hours == 0 and count_minutes == 0 and count_seconds == 0:
print("end")
break
#vk_session.auth()
#vk_session.method("status.set",f"{str(difference.days)}:{str(count_hours)}:{str(count_minutes)}:{str(count_seconds)}")
vk_session.method("status.set",{"text":f"{str(difference.days)}:{str(count_hours)}:{str(count_minutes)}:{str(count_seconds)}"})
time.sleep(30)
end_time = datetime.datetime(2019, 11, 1, 0, 0, 0)
countdown(end_time)
except vk_api.AuthError as error_msg:
print(error_msg)
return
main()
|
keywords = ["giant", "norco", "jamis", "trek", "\bgt\b", "specialized", "liv", "cannondale", "marin"]
url = '''https://www.kijiji.ca/b-markham-york-region/bikes/k0l1700274?ll=43.879401%2C-79.414110&address=23+Farmstead+Rd%2C+Richmond+Hill%2C+ON+L4S+1V8%2C+Canada&radius=50.0&dc=true''' |
# Generated by Django 2.0.7 on 2019-01-15 15:13
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0018_auto_20190104_1113'),
]
operations = [
migrations.RenameField(
model_name='caseresult',
old_name='result_status',
new_name='result_state',
),
migrations.RenameField(
model_name='stepresult',
old_name='result_status',
new_name='result_state',
),
migrations.RenameField(
model_name='suiteresult',
old_name='result_status',
new_name='result_state',
),
]
|
import tensorflow as tf
import numpy as np
from tensorflow import keras as K
if __name__ == "__main__":
a = np.array([[1, 1, 1, 1], [2, 2, 2, 2], [3, 3, 3, 3]])
b = np.array([[[1], [1], [1], [1]], [[2], [2], [2], [2]], [[3], [3], [3], [3]]])
aa = tf.Variable(initial_value=a)
bb = tf.Variable(initial_value=b)
c = tf.Variable(initial_value=np.array([[1, 1], [2, 2]]))
d = tf.Variable(initial_value=np.array([[[1, 1], [2, 2]], [[3, 3], [4, 4]]]))
e = tf.Variable(initial_value=np.array([[3, 3], [4, 4]]))
# cc = tf.matmul(aa, bb)
with tf.Session() as sess:
with tf.device("cpu:0"):
tf.initialize_all_variables().run()
'''aa = tf.reshape(aa, shape=[3, 1, 4])
print(aa.eval())
ans = []
for i in range(a.shape[0]):
ans.append(tf.matmul(aa[i], bb[i]))
anss = ans[0]
for i in range(len(ans) - 1):
anss = tf.concat((anss, ans[i+1]), axis=0)
print(anss.eval())'''
'''print(tf.reshape(d, shape=[2, 4]).eval())
print(tf.transpose(d).eval())
print(tf.concat([d[i] for i in range(d.shape[0])], axis=0).eval())
print(tf.matmul(c, tf.concat([d[i] for i in range(d.shape[0])], axis=1)).eval())
dc = tf.reshape(tf.reshape(d, [-1, 2]) @ c, shape=[-1, 2, 2])
print(dc.eval())
print(c[None])
cc = tf.tile(c[None], [2, 1, 1])
print((tf.tile(c[None], [2, 1, 1]) @ d).eval())'''
# print(tf.multiply(c, e).eval())
# print(tf.concat([d[i] for i in range(d.shape[0])], axis=2).eval())
# print(cc.eval())
|
#====MAXIMUM PATH SUM BETWEEN TWO LEAF NODES LOGIC====
# If node is None then don't do anything just return
# if node is a leaf node then return its data
# if node is not a leaf then update the max sum and return max(l,r)+node.data
# IMPORTANT: since we are passing a list(or array) to the function, the list is updated in the real time i.e. the actual
# list is updated that we passed to the function.
class BinaryTree():
def __init__(self, data):
self.data = data
self.left = None
self.right = None
def maxPathSum(root):
INT_MIN = - 2**32
res = [INT_MIN] # this is a list
maxPathSumUtil(root, res) # IMP: since a list is passed to this function, the updation happens in realtime
return res[0] # hence we can directly return the list instead!!
def maxPathSumUtil(root, res):
if root is None:
return
if root.left is None and root.right is None:
return root.data
ls = maxPathSumUtil(root.left, res)
rs = maxPathSumUtil(root.right, res)
if root.left is not None and root.right is not None:
res[0] = max(res[0], ls + rs + root.data)
return max(ls,rs) + root.data
if root.left is None:
return rs + root.data
else:
return ls + root.data
root = BinaryTree(-15)
root.left = BinaryTree(5)
root.right = BinaryTree(6)
root.left.left = BinaryTree(-8)
root.left.right = BinaryTree(1)
root.left.left.left = BinaryTree(2)
root.left.left.right = BinaryTree(6)
root.right.left = BinaryTree(3)
root.right.right = BinaryTree(9)
root.right.right.right= BinaryTree(0)
root.right.right.right.left = BinaryTree(4)
root.right.right.right.right = BinaryTree(-1)
root.right.right.right.right.left = BinaryTree(10)
print(maxPathSum(root)) |
import torch
import torchvision.models as models
import numpy as np
from tqdm import tqdm
import matplotlib.pylab as plt
from insilico_Exp_torch import TorchScorer
from grad_RF_estim import grad_RF_estimate, gradmap2RF_square
from GAN_utils import upconvGAN
from ZO_HessAware_Optimizers import CholeskyCMAES
from layer_hook_utils import get_module_names, register_hook_by_module_names, layername_dict
G = upconvGAN("fc6").cuda()
G.requires_grad_(False)
#%%
import sys
sys.path.append(r"D:\Github\Visual_Neuron_Modelling")
from CorrFeatTsr_lib import Corr_Feat_Machine
from featvis_lib import load_featnet, rectify_tsr, tsr_posneg_factorize
from CorrFeatTsr_visualize_lib import CorrFeatScore
#%% Creat a model neuron basis
scorer = TorchScorer("resnet50")
#%% Construct the modelling target neuron
neurlayer = ".layer4.Bottleneck0"
chan = 10
module_names, module_types, module_spec = get_module_names(scorer.model, input_size=(3, 227, 227), device="cuda", show=False);
layer_key = [k for k, v in module_names.items() if v == neurlayer][0]
feat_outshape = module_spec[layer_key]['outshape']
assert len(feat_outshape) == 3 # fc neurlayer will fail
cent_pos = (feat_outshape[1]//2, feat_outshape[2]//2)
print("Center position on the feature map is (%d %d) of neurlayer %s (tensor shape %s)" % (*cent_pos, neurlayer, feat_outshape))
scorer.select_unit(("resnet50", neurlayer, 25, 6, 6), allow_grad=True)
#%% Modelling Network, approximating the other neuron's tuning function.
# Model Network
from featvis_lib import load_featnet
net, featnet = load_featnet("resnet50_linf8")
featFetcher = Corr_Feat_Machine()
# featFetcher.register_hooks(net, ["conv2_2", "conv3_3", "conv4_3", "conv5_3"], netname='resnet_robust', verbose=False)
# featFetcher.register_hooks(net, [".layer3.Bottleneck2", ".layer3.Bottleneck6", ".layer4.Bottleneck0"], netname='resnet50_robust', verbose=False)
featFetcher.register_hooks(net, ["layer2", "layer3", "layer4"], netname='resnet50_robust', verbose=False)
featFetcher.init_corr()
#%% Use random image sampled from G to stimulate target and modeler
for i in tqdm(range(50)):
imgs = G.visualize(3 * torch.randn(40,4096).cuda())
resp = scorer.score_tsr_wgrad(imgs, B=40)
with torch.no_grad():
featnet(imgs)
del imgs
featFetcher.update_corr(resp.cpu())
featFetcher.calc_corr()
#%%
apprx_layer = "layer3"
cctsr = featFetcher.cctsr[apprx_layer].numpy()
Ttsr = featFetcher.Ttsr[apprx_layer].numpy()
stdstr = featFetcher.featStd[apprx_layer].numpy()
covtsr = cctsr * stdstr
rect_mode = "Tthresh"; thresh = (None, 3)
bdr = 1; NF = 2
rect_cctsr = rectify_tsr(covtsr, rect_mode, thresh, Ttsr=Ttsr)
rect_cctsr = np.nan_to_num(rect_cctsr)
Hmat, Hmaps, ccfactor, FactStat = tsr_posneg_factorize(rect_cctsr,
bdr=bdr, Nfactor=NF, show=True,)
#%% Visualize features of the factorized model
from featvis_lib import vis_featmap_corr, vis_featvec_wmaps
finimgs_col, mtg_col, score_traj_col = vis_featvec_wmaps(ccfactor, Hmaps, net, G, apprx_layer, netname="resnet50_linf8",
featnet=featnet, bdr=0, Bsize=2, figdir="", savestr="", imshow=False, score_mode="cosine")
#%%
apprx_layer = "layer3"
plt.imshow(np.nanmean(np.abs(rect_cctsr), axis=0))
plt.colorbar()
plt.show()
#%% Calculate gradient Amplitude RF for the recording unit.
gradAmpmap = grad_RF_estimate(scorer.model, neurlayer, (25, 6, 6), input_size=(3,256,256),
device="cuda", show=True, reps=100, batch=10)
Xlim, Ylim = gradmap2RF_square(gradAmpmap,)
#%%
finimgs_col_neur, mtg_col_neur, _ = vis_featvec_wmaps(ccfactor, Hmaps, scorer.model, G, neurlayer, netname="resnet50",
featnet=None, bdr=0, Bsize=2, figdir="", savestr="", imshow=False, score_mode="cosine")
#%%
batch = 5
langevin_eps = 1.0
code = 2 * torch.randn(batch, 4096).cuda()
code.requires_grad_(True)
optimizer = torch.optim.SGD([code], lr=0.025)
for i in range(100):
code = code + torch.randn(batch, 4096).cuda() * langevin_eps
optimizer.zero_grad()
scores = scorer.score_tsr_wgrad(G.visualize(code))
avg_score = scores.mean().detach().item()
loss = (-scores.sum())
loss.backward()
optimizer.step()
print("Score {}".format(avg_score))
#%%
from torchvision.utils import make_grid
from torchvision.transforms import ToPILImage
with torch.no_grad():
imgs = G.visualize(code).cpu()
ToPILImage()(make_grid(imgs.cpu())).show()
#%%
import cma
es = cma.CMAEvolutionStrategy(4096 * [0], 1.0)
for i in tqdm(range(150)):
z = es.ask()
z_arr = np.array(z)
imgs = G.visualize(torch.tensor(z_arr).float().cuda())
resp = scorer.score_tsr(imgs, B=40)
es.tell(z, (-resp).tolist())
print("Score {:.3f} +- {:.3f}".format(resp.mean(),resp.std()))
# Use CMAES is better than Adam or SGD in optimizing this scorer.
ToPILImage()(make_grid(imgs.cpu())).show()
#%%
net, featnet = load_featnet("resnet50_linf8")
featFetcher = Corr_Feat_Machine()
featFetcher.register_hooks(net, ["layer2", "layer3", "layer4"], netname='resnet50_robust', verbose=False)
#%%
import cma
featFetcher.init_corr()
es = cma.CMAEvolutionStrategy(4096 * [0], 1.0)
for i in tqdm(range(150)):
z = es.ask()
z_arr = np.array(z)
imgs = G.visualize(torch.tensor(z_arr).float().cuda())
resp = scorer.score_tsr_wgrad(imgs, B=40)
es.tell(z, (-resp).tolist())
print("Score {:.3f} +- {:.3f}".format(resp.mean(),resp.std()))
with torch.no_grad():
featnet(imgs)
del imgs
featFetcher.update_corr(resp.cpu())
featFetcher.calc_corr()
# Use CMAES is better than Adam or SGD in optimizing this scorer.
imgs = G.visualize(torch.tensor(z_arr).float().cuda()).cpu()
ToPILImage()(make_grid(imgs)).show()
#%%
apprx_layer = "layer3"
cctsr = featFetcher.cctsr[apprx_layer].numpy()
Ttsr = featFetcher.Ttsr[apprx_layer].numpy()
stdstr = featFetcher.featStd[apprx_layer].numpy()
covtsr = cctsr * stdstr
rect_mode = "Tthresh"; thresh = (None, 5)
bdr = 1; NF = 3
rect_cctsr = rectify_tsr(covtsr, rect_mode, thresh, Ttsr=Ttsr)
rect_cctsr = np.nan_to_num(rect_cctsr)
Hmat, Hmaps, ccfactor, FactStat = tsr_posneg_factorize(rect_cctsr,
bdr=bdr, Nfactor=NF, show=True,)
#%%
def image_gradMap(scorer, img_tsr):
img_tsr_wg = img_tsr
img_tsr_wg.requires_grad_(True)
scores = scorer.score_tsr_wgrad(img_tsr_wg)
scores.sum().backward()
imggradAmp = img_tsr_wg.grad
return imggradAmp
#%%
imggradAmp = image_gradMap(scorer, imgs[0:6,:,:,:])
imggradmap = imggradAmp.abs().sum(dim=[0, 1]).cpu()
plt.figure()
plt.imshow(imggradmap)
plt.show()
|
class Node:
def __init__(self, data):
self.left = None
self.right = None
self.data = data
def isSymmetricSubTree(left,right):
if left is None and right is None:
return True
if left is not None and right is not None:
return left.data == right.data and \
isSymmetricSubTree(left.left, right.right) and \
isSymmetricSubTree(left.right, right.left)
return False
def isSymmetric(head):
if head is not None:
return isSymmetricSubTree(head.left, head.right)
return False
def isBSTSubTree(left,right,parentData):
if left is not None:
if left.data>parentData or \
not isBSTSubTree(left.left, left.right, left.data):
return False
if right is not None:
if right.data<parentData or \
not isBSTSubTree(right.left, right.right, right.data):
return False
return True
def isBST(head):
if head is not None:
return isBSTSubTree(head.left, head.right, head.data)
return True #empty tree must BST
if __name__ == "__main__":
tree1 = Node(8)
tree1.left = Node(3)
tree1.right = Node(10)
tree1.left.left = Node(1)
tree1.left.right = Node(6)
tree1.left.right.left = Node(4)
tree1.left.right.right = Node(7)
tree1.right.right = Node(14)
tree1.right.right.left = Node(13)
print('isSymmetric: {}'.format(isSymmetric(tree1)))
print('isBST: {}'.format(isBST(tree1)))
tree2 = Node(314)
tree2.left = Node(6)
tree2.right = Node(6)
tree2.left.right = Node(2)
tree2.left.right.right = Node(3)
tree2.right.left = Node(2)
tree2.right.left.left = Node(3)
print('isSymmetric: {}'.format(isSymmetric(tree2)))
print('isBST: {}'.format(isBST(tree2))) |
class Time(object):
def __str__(self):
#So we don't print time as 10:7 instead of 10:07
if self.minute < 10:
return str(int(self.hour)) + ":" + "0" + str(int(self.minute))
return str(int(self.hour)) + ":" + str(int(self.minute))
def __init__(self, hour, minute):
#Military time
self.hour = hour
self.minute = minute
def add(self, minute):
# SPACE | TIME COMPLEXITY:
# O(1) | O(1)
total_minutes = minute + self.minute
if total_minutes >= 60:
hours = (total_minutes // 60)
self.hour += hours
total_minutes = (total_minutes - (hours * 60))
self.minute = total_minutes
# TOTAL SPACE | TIME COMPLEXITY: O(1) | O(1)
#Setup comparisons to have the ability to compare times
def __le__(self, other):
if other is not None:
if self.hour < other.hour:
return True
elif self.hour > other.hour:
return False
elif self.hour == other.hour:
if self.minute < other.minute or self.minute == other.minute:
return True
else:
return False
#If self.time < other.time return true
def __lt__(self, other):
if other is not None:
if self.hour < other.hour:
return True
elif self.hour > other.hour:
return False
else:
if self.minute < other.minute:
return True
else:
return False
#If self.time >= other return true
def __ge__(self, other):
if other is not None:
if self.hour > other.hour:
return True
elif self.hour < other.hour:
return False
elif self.hour == other.hour:
if self.minute > other.minute or self.minute == other.minute:
return True
else:
return False
#If self.time > other.time return true
def __gt__(self, other):
return not self.__lt__(other)
|
'''
This script creates a heat map in form of a latex table which
visualizes an alignment matrix in csv format
'''
import argparse
import operator
import sys
parser = argparse.ArgumentParser(description='Reads an alignment matrix from stdin '
'and creates latex code which displays it as heat map.')
parser.add_argument('-s','--source', help='File with source sentences', required=True)
parser.add_argument('-t','--target', help='File with target sentences', required=True)
parser.add_argument('-i','--index', help='Line number of sentence pair', type=int, required=True)
args = parser.parse_args()
with open(args.source) as f:
src_sen = f.readlines()[args.index].strip().replace('&', '\\&').split()
with open(args.target) as f:
trg_sen = f.readlines()[args.index].strip().replace('&', '\\&').split()
src_len = len(src_sen)
trg_len = len(trg_sen)
src_pos = 0
weights = []
for line in sys.stdin:
weight_line = [0.0] * trg_len
for trg_pos,weight in enumerate(line.strip().split()):
weight_line[trg_pos] = float(weight)
weights.append(weight_line)
while len(weights) < src_len:
weights.append([0.0] * trg_len)
print('\\documentclass[a4paper]{article}')
print('\\usepackage[utf8]{inputenc}')
print('\\usepackage{rotating}')
print('\\usepackage[table]{xcolor}')
print('\\begin{document}')
print('')
print('\\begin{tabular}{l %s}' % ('l'*trg_len))
print('& \\begin{sideways}%s\end{sideways} \\\\' % '\\end{sideways} & \\begin{sideways}'.join(trg_sen))
for src_pos,weight_line in enumerate(weights):
colors = ['{\\cellcolor[gray]{%f}}' % (1.0-w) for w in weight_line]
print("%s & %s \\\\" % (src_sen[src_pos], ' & '.join(colors)))
print('\\end{tabular}')
print('\\end{document}')
|
import emoji
import random
from functions import *
class Commands:
def __init__(self, bot, db):
self.bot = bot
self.db = db
self.commands = {
"start": self.start,
"menu": self.menu
}
self.cache = {
#states # last_seen
}
self.commands.update({
x: self.__getattribute__(x) for x in dir(Commands)
if callable(getattr(Commands, x)) and not x.startswith("__") and x not in ["get_commands", "exe"]
})
def get_commands(self):
return self.commands.keys()
def exe(self, command, *args):
if command is None:
return
return self.commands[command](*args)
def start(self, chat_id):
self.bot.send_message(chat_id, "Привет! Это бот Угадай Мелодию! "
"Ты можешь играть один, или поиграть с друзьями, добавив меня в чат!")
self.menu(chat_id)
def menu(self, chat_id):
self.bot.send_message(chat_id, "Меню",
reply_markup=createKeyboard(2, list(map(lambda x: emoji.emojize(x, use_aliases=True),
menu.keys()))))
self.cache[chat_id]["state"] = states["nothing"]
def settings(self, chat_id):
self.bot.send_message(chat_id, reply_markup=createKeyboardWithMenu(1,
["Количнство вопросов", "Время для ответа"]))
self.cache[chat_id]["state"] = states["settings"]
def about(self, chat_id):
self.bot.send_message(chat_id, about)
def feedback(self, chat_id):
self.bot.send_message(chat_id, "Оставьте ваш отзыв здесь, спасибо!")
self.cache[chat_id]["state"] = states["about"]
# Game
def play(self, chat_id):
if "questions" in self.cache[chat_id]:
self.send_question(chat_id)
return
self.cache[chat_id].update({
"state": states["play"],
"current_question": -1,
"total_question": 0,
"right_answers": 0,
"questions": []
})
self.bot.send_message(chat_id, "Выберите категорию", reply_markup=createKeyboardWithMenu(2, list(categories.keys()),
onetime=True))
self.cache[chat_id]["state"] = states["game"]
def game(self, message):
chat_id = message.json["chat"]["id"]
category = message.text
if category in categories:
if type(categories[category]) is dict:
self.bot.send_message(chat_id, "Выберите",
reply_markup=createKeyboardWithMenu(2, list(categories[category].keys()), onetime=True))
self.cache[chat_id]["category"] = category
self.cache[chat_id]["state"] = states["choosing"]
elif categories[category] == "own":
self.bot.send_message(chat_id, "Отправьте ссылку на плейлист в Spotify")
self.cache[chat_id]["state"] = states["ownchoosing"]
else:
self.maingame(chat_id, playlistid=categories[category])
self.cache[chat_id]["state"] = states["play"]
def maingame(self, smth, playlistid=0):
text = ""
if is_int(smth):
chat_id = smth
else:
chat_id = smth.json["chat"]["id"]
text = smth.text
if self.cache[chat_id]["state"] == states["choosing"]:
if type(categories[self.cache[chat_id]["category"]][text]) is list:
playlistid = random.choice(categories[self.cache[chat_id]["category"]][text])
else:
playlistid = categories[self.cache[chat_id]["category"]][text]
self.cache[chat_id]["state"] = states["play"]
elif self.cache[chat_id]["state"] == states["ownchoosing"]:
playlistid = text.split("playlist/")[-1].split("?")[0]
print(playlistid)
self.cache[chat_id]["state"] = states["play"]
if len(self.cache[chat_id]["questions"]) == 0:
self.cache[chat_id].update({
"state": states["play"],
"current_question": 0,
"total_question": number_questions,
"right_answers": 0,
"questions": self.get_questions(playlistid, number_questions)
})
self.send_question(chat_id)
return
current_question = self.cache[chat_id]['current_question']
right_answer = list(self.cache[chat_id]["questions"].keys())[current_question]
answer = text
if answer == right_answer:
self.bot.send_message(chat_id, emoji.emojize("Correct :white_check_mark:", use_aliases=True))
self.cache[chat_id]["right_answers"] += 1
else:
self.bot.send_message(chat_id, emoji.emojize("Incorrect :x: \n", use_aliases=True) +
"Right answer is " + right_answer)
if self.cache[chat_id]["current_question"] == self.cache[chat_id]["total_question"] - 1:
self.cache[chat_id]["state"] = states["nothing"]
self.bot.send_message(
chat_id,
f"You have finished the test! You have { self.cache[chat_id]['right_answers'] }"
f" out of { self.cache[chat_id]['total_question'] } questions",
reply_markup=createKeyboardWithMenu(1, [])
)
self.cache[chat_id] = pop_keys_from_dict(d=self.cache[chat_id], keys=[
"current_question",
"total_question",
"right_answers",
"questions"
])
return
self.cache[chat_id]["current_question"] += 1
self.send_question(chat_id)
def get_questions(self, id, num=10):
quest = {}
keys = list(songs(id).keys())
for i in range(num):
key = keys.pop(random.randint(0, len(keys)-1))
quest[key] = songs(id)[key]
return quest
def send_question(self, chat_id):
current_question = self.cache[chat_id]['current_question']
cur = list(self.cache[chat_id]["questions"])[current_question]
allsongs = open_json("data/allsongs.json")
if cur in list(allsongs.keys()):
self.bot.send_audio(chat_id, allsongs[cur], reply_markup=self.get_answer_keyboard(current_question, chat_id))
else:
m = self.bot.send_audio(channel_id, list(self.cache[chat_id]["questions"].values())[current_question])
id = m.json["audio"]["file_id"]
db = {cur: id}
with open("data/allsongs.json", "r+") as file:
data = json.load(file)
data.update(db)
file.seek(0)
json.dump(data, file)
self.bot.send_audio(
chat_id,
id,
reply_markup=self.get_answer_keyboard(current_question, chat_id)
)
def get_answer_keyboard(self, num, chat_id, n=4, width=2):
answers = []
right_answer = list(self.cache[chat_id]["questions"].keys())[num]
all = open_json("data/all.json")
for i in range(n - 1):
new_a = all.pop(random.randint(0, len(all)-1))
answers.append(new_a)
answers.append(right_answer)
random_answers = []
for i in range(len(answers)):
random_answers.append(answers.pop(random.randint(0, len(answers) - 1)))
return createKeyboardWithMenu(width, random_answers)
def changing(self, chat_id):
self.bot.send_message(chat_id, "Enter new number")
def gettingfeedback(self, message):
chat_id = message.json["chat"]["id"]
write(message.text, str(chat_id) + ".txt")
self.bot.send_message(chat_id, "Спасибо за отзыв!")
|
# given a pile of cards represented as array of integers (points)
# two players take turn to draw 1 to 3 cards from the left end
# returns the highest score of either player
def max_score(points):
"""
start from the base case when there're <= 3 cards (iterate from right to left)
dp[i]: how much the current player can out-score the other player
when it's the current player turn to pick from index i
dp[n-i] = sum(points[n-i:]) 1 <= i <= 3
dp[i] = max(sum(points[i:i+k]) - dp[i+k]) 1 <= k <= 3 (current player takes k cards)
"""
if not points:
return 0
n = len(points)
# pre_sum[k] = sum(points[:k])
pre_sum = [0] * (n + 1)
for i in range(1, n + 1):
pre_sum[i] = pre_sum[i - 1] + points[i - 1]
dp = [float('-inf')] * n
for k in range(1, min(4, n + 1)):
dp[-k] = pre_sum[n] - pre_sum[n - k]
for i in range(n - 4, -1, -1):
for k in range(1, 4):
curr_score = pre_sum[i + k] - pre_sum[i]
dp[i] = max(dp[i], curr_score - dp[i + k])
score_p1 = (dp[0] + pre_sum[n]) / 2
score_p2 = score_p1 - dp[0]
return max(score_p1, score_p2)
if __name__ == '__main__':
print max_score([]) # 0
print max_score([-1]) # 0
print max_score([-1, 3, 2]) # 4
print max_score([3, -2, 4]) # 5
print max_score([2, 3, -1, 4]) # 5
print max_score([2, 3, -1, 4, -2, 1, 6]) # 9
print max_score([1, 3, -2, -5, 6, 4]) # 8
print max_score([-1,-10,-2,-4,-2,-5]) # -12
|
import psycopg2
class HorseDatabase():
def __init__(self):
self.conn = None
def connect(self, host="localhost", dbname="", user="", password=""):
self.conn = psycopg2.connect("host={} dbname={} user={} password={}".format(host, dbname, user, password))
def get_conn(self):
return self.conn |
from ckeditor.widgets import CKEditorWidget
from ckeditor.fields import RichTextField
from django.core.mail import send_mail
from django import forms
from django.contrib.auth.models import User
from django.forms import ModelForm
from job.models import Job, Employee, Applicant, ApplicationTime, Messages, UsersMessage
from django.forms import DurationField
from authenticate.models import FacebookUser, GoogleUser, LinkedinUser, InstagramUser, TwitterProfile
class UserForm(ModelForm):
password = forms.CharField(widget=forms.PasswordInput())
class Meta:
model = User
fields = ('email', 'password')
def __init__(self, *args, **kwargs):
super(UserForm, self).__init__(*args, **kwargs)
self.fields['email'].widget.attrs.update({'class': 'form-control', 'placeholder':'EmailId'})
self.fields['email'].required = True
self.fields['password'].widget.attrs.update({'class': 'form-control', 'placeholder':'Password'})
class EmployeeForm(ModelForm):
employee_pic = forms.ImageField(label='Select a file', help_text='max. 42 megabytes')
class Meta:
model = Employee
fields = ('first_name', 'last_name','gender', 'email', 'address', 'contact', 'dob', 'joining_date', 'employee_pic')
def __init__(self, *args, **kwargs):
super(EmployeeForm, self).__init__(*args, **kwargs)
self.fields['first_name'].widget.attrs.update({'class': 'form-control'})
self.fields['last_name'].widget.attrs.update({'class': 'form-control'})
self.fields['gender'].widget.attrs.update({'class': 'form-control'})
self.fields['email'].widget.attrs.update({'class': 'form-control'})
self.fields['address'].widget.attrs.update({'class': 'form-control'})
self.fields['contact'].widget.attrs.update({'class': 'form-control'})
self.fields['dob'].widget.attrs.update({'class': 'form-control'})
self.fields['joining_date'].widget.attrs.update({'class': 'form-control', 'readonly': 'readonly'})
self.fields['employee_pic'].widget.attrs.update({'class': 'form-control'})
class ApplicantForm(ModelForm):
applicant_pic = forms.ImageField(label='Select a file', help_text='max. 42 megabytes')
class Meta:
model = Applicant
fields = ('applicant_pic', 'firstname', 'lastname','applicant_gender', 'address', 'contact', 'dob', 'tenth_percentage', 'twelth_percentage', 'graduation', 'graduation_percentage', 'post_graduation', 'post_graduation_percentage', 'currently_working_with', 'current_position', 'experience', 'skills')
def __init__(self, *args, **kwargs):
super(ApplicantForm, self).__init__(*args, **kwargs)
self.fields['applicant_pic'].widget.attrs.update({'class': 'form-control'})
self.fields['applicant_pic'].required = True
self.fields['firstname'].widget.attrs.update({'class': 'form-control', 'placeholder':'Firstname'})
self.fields['firstname'].required = True
self.fields['lastname'].widget.attrs.update({'class': 'form-control', 'placeholder':'Lastname'})
self.fields['lastname'].required = True
self.fields['applicant_gender'].widget.attrs.update({'class': 'form-control'})
self.fields['applicant_gender'].required = True
self.fields['address'].widget.attrs.update({'class': 'form-control', 'placeholder':'Address'})
self.fields['address'].required = True
self.fields['contact'].widget.attrs.update({'class': 'form-control', 'placeholder':'Contact'})
self.fields['contact'].required = True
self.fields['dob'].widget.attrs.update({'class': 'form-control', 'placeholder':'YYYY:MM:DD'})
self.fields['dob'].required = True
self.fields['tenth_percentage'].widget.attrs.update({'class': 'form-control', 'placeholder':'10th Percentage '})
self.fields['tenth_percentage'].required = True
self.fields['twelth_percentage'].widget.attrs.update({'class': 'form-control', 'placeholder':'12th Percentage'})
self.fields['twelth_percentage'].required = True
self.fields['graduation'].widget.attrs.update({'class': 'form-control', 'placeholder':'Graduation'})
self.fields['graduation_percentage'].widget.attrs.update({'class': 'form-control', 'placeholder':'Graduation Percentage'})
self.fields['post_graduation'].widget.attrs.update({'class': 'form-control', 'placeholder':'Post Graduation'})
self.fields['post_graduation_percentage'].widget.attrs.update({'class': 'form-control', 'placeholder':'Post Graduation Percentage'})
self.fields['currently_working_with'].widget.attrs.update({'class': 'form-control', 'placeholder':'Firm you are working with'})
self.fields['current_position'].widget.attrs.update({'class': 'form-control', 'placeholder':'Currently working as'})
self.fields['experience'].widget.attrs.update({'class': 'form-control', 'placeholder':'Experience in Months'})
self.fields['skills'].widget.attrs.update({'class': 'form-control', 'placeholder':'Skills'})
self.fields['skills'].required = True
class CreateJob(ModelForm):
job_description = forms.CharField(widget=CKEditorWidget())
class Meta:
model = Job
fields = ('job_title', 'job_description', 'job_location', 'job_skillsrequired','job_minqualification', 'job_experience', 'job_experience_months', 'job_valid_upto')
def __init__(self, *args, **kwargs):
super(CreateJob, self).__init__(*args, **kwargs)
self.fields['job_title'].widget.attrs.update({'class': 'form-control', 'placeholder':'Title'})
self.fields['job_description'].widget.attrs.update({'class': 'form-control', 'placeholder':'Description'})
self.fields['job_location'].widget.attrs.update({'class': 'form-control', 'placeholder':'Location'})
self.fields['job_skillsrequired'].widget.attrs.update({'class': 'form-control', 'placeholder':'Skills Required'})
self.fields['job_minqualification'].widget.attrs.update({'class': 'form-control', 'placeholder':'Minimum Qualification Required'})
self.fields['job_experience'].widget.attrs.update({'class': 'form-control'})
self.fields['job_experience_months'].widget.attrs.update({'class': 'form-control'})
self.fields['job_valid_upto'].widget.attrs.update({'class': 'form-control', 'placeholder':'YYYY:MM:DD'})
class MessagesForm(ModelForm):
class Meta:
model = Messages
fields = ('name', 'email', 'body')
def __init__(self, *args, **kwargs):
super(MessagesForm, self).__init__(*args, **kwargs)
self.fields['name'].widget.attrs.update({'class': 'form-control', 'placeholder':'Name'})
self.fields['email'].widget.attrs.update({'class': 'form-control', 'placeholder':'EmailId'})
self.fields['body'].widget.attrs.update({'class': 'form-control'})
class UsersMessageForm(ModelForm):
class Meta:
model = UsersMessage
fields = ('name', 'job', 'body')
def __init__(self, *args, **kwargs):
super(UsersMessageForm, self).__init__(*args, **kwargs)
self.fields['name'].widget.attrs.update({'class': 'form-control'})
self.fields['job'].widget.attrs.update({'class': 'form-control'})
self.fields['body'].widget.attrs.update({'class': 'form-control', 'placeholder':'Feel free to message..'})
class MailtoApplicantForm(forms.Form):
email = forms.EmailField()
title = forms.ChoiceField(widget = forms.Select(), choices = ([
('Call for Telephonic Interview','Call for Telephonic Interview'),
('Call for Personal Interview','Call for Personal Interview'),
('Call for HR Interview','Call for HR Interview'),
('Joining Letter','Joining Letter'),
]), required = True,)
body = forms.CharField(widget=forms.Textarea)
def __init__(self, *args, **kwargs):
super(MailtoApplicantForm, self).__init__(*args, **kwargs)
self.fields['email'].widget.attrs.update({'class': 'form-control'})
self.fields['title'].widget.attrs.update({'class': 'form-control'})
self.fields['body'].widget.attrs.update({'class': 'form-control'})
class SearchForm(forms.Form):
q = forms.CharField(max_length=255)
##############################################
#authenticate forms
##############################################
# class SignUpForm(ModelForm):
# password = forms.CharField(widget=forms.PasswordInput())
#
# class Meta:
# model = User
# fields = ('username', 'email', 'password')
#
# def __init__(self, *args, **kwargs):
# super(SignUpForm, self).__init__(*args, **kwargs)
# self.fields['username'].widget.attrs.update({'class': 'form-control', 'placeholder':'Username'})
# self.fields['email'].widget.attrs.update({'class': 'form-control', 'placeholder':'EmailId'})
# self.fields['password'].widget.attrs.update({'class': 'form-control', 'placeholder':'Password'})
class EmailForm(ModelForm):
class Meta:
model = FacebookUser
fields = ('facebook_userid', 'name', 'email')
def __init__(self, *args, **kwargs):
super(EmailForm, self).__init__(*args, **kwargs)
self.fields['facebook_userid'].widget.attrs.update({'class': 'form-control'})
self.fields['name'].widget.attrs.update({'class': 'form-control'})
self.fields['email'].widget.attrs.update({'class': 'form-control', 'placeholder':'EmailId'})
class LinkedinEmailForm(ModelForm):
class Meta:
model = LinkedinUser
fields = ('linkedin_userid', 'name', 'email')
def __init__(self, *args, **kwargs):
super(LinkedinEmailForm, self).__init__(*args, **kwargs)
self.fields['linkedin_userid'].widget.attrs.update({'class': 'form-control'})
self.fields['name'].widget.attrs.update({'class': 'form-control'})
self.fields['email'].widget.attrs.update({'class': 'form-control', 'placeholder':'EmailId'})
class TwitterEmailForm(ModelForm):
class Meta:
model = TwitterProfile
fields = ('user', 'email', 'twitter_user_id')
def __init__(self, *args, **kwargs):
super(TwitterEmailForm, self).__init__(*args, **kwargs)
self.fields['user'].widget.attrs.update({'class': 'form-control', 'readonly': 'readonly'})
self.fields['twitter_user_id'].widget.attrs.update({'class': 'form-control'})
self.fields['email'].widget.attrs.update({'class': 'form-control', 'placeholder':'EmailId'})
|
class cached_property(object):
"""
Decorate a class method to turn it into a cached, read-only
property. Works by dynamically adding a value member that overrides
the get method.
**Warning:** Only works for read-only properties!
Example::
def very_expensive_function():
print("called very_expensive_function")
return 3.14159
class MyClass(object):
@cached_property
def my_attribute(self):
return very_expensive_function()
>>> c = MyClass()
>>> print(c.my_attribute)
called very_expensive_function
3.14159
>>> print(c.my_attribute)
3.14159
"""
def __init__(self, fget):
for name in ('__name__', '__module__', '__doc__'):
setattr(self, name, getattr(fget, name))
self._fget = fget
def __get__(self, instance, owner):
if instance is None:
return self
value = instance.__dict__[self._fget.__name__] = self._fget(instance)
return value
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Aug 14 12:01:04 2020
@author: danielfurman
"""
# This file unzips climate tifs downloaded from Worldclim.org, and
# the expanded files are saved to a HardDrive folder "output_dir".
# Manually unzipping eight tif files for a single ssp scenario
# took 3 minutes and 24 seconds, in part due to an extensive opening
# of subfolders. In contrast, this code runs in approximately 31
# seconds, speeding processing by over 600%. Overall, we saved over
# 30 minutes when we downloaded CMIP6 data for 12 scenarios.
# see https://www.worldclim.org/data/cmip6/cmip6_clim2.5m.html
# It is important the files match the eight CMIP6 models in
# model_names, and that the data are 2.5m resolution.
import time # we will calculate the runtime
import zipfile # library for unzipping function
import glob # data loading functions
import shutil # moving files within directories functions
# output folder
ssp = 'ssp126'
output_dir = '/volumes/HardDrive/' + ssp + '_2061-2080'
# make sure only 8 relevant zip are in downloads
filenames = sorted(glob.glob('/Users/danielfurman/Downloads/wc2.1*.zip'))
print('The file names are:', filenames) # print filenames
len(filenames) # Confirmed 8 total files
start_time = time.time()
for f in filenames: # f is index with string filenames
with zipfile.ZipFile(f, "r") as zip_ref:
zip_ref.extractall(output_dir)
model_names = ['BCC-CSM2-MR', 'CanESM5', 'CNRM-CM6-1', 'CNRM-ESM2-1',
'IPSL-CM6A-LR', 'MIROC-ES2L', 'MIROC6', 'MRI-ESM2-0']
for model in model_names:
original = glob.glob(output_dir +
'/share/spatial03/worldclim/cmip6/7_fut/2.5m/' +
model + '/' + ssp + '/wc*.tif')
shutil.move(original[0], output_dir)
print("--- %s seconds ---" % (time.time() - start_time))
|
import os
import sys
import traceback
import webbrowser
import pyqrcode
import requests
import mimetypes
import json
import xml.dom.minidom
import urllib
import time
import re
import random
from traceback import format_exc
from requests.exceptions import ConnectionError, ReadTimeout
import html
UNKONWN = 'unkonwn'
SUCCESS = '200'
SCANNED = '201'
TIMEOUT = '408'
#emoji
def emoji_formatter(st):
''' _emoji_deebugger is for bugs about emoji match caused by wechat backstage
like :face with tears of joy: will be replaced with :cat face with tears of joy:
'''
def _emoji_debugger(st):
s = st.replace('<span class="emoji emoji1f450"></span',
'<span class="emoji emoji1f450"></span>') # fix missing bug
def __fix_miss_match(m):
return '<span class="emoji emoji%s"></span>' % ({
'1f63c': '1f601', '1f639': '1f602', '1f63a': '1f603',
'1f4ab': '1f616', '1f64d': '1f614', '1f63b': '1f60d',
'1f63d': '1f618', '1f64e': '1f621', '1f63f': '1f622',
}.get(m.group(1), m.group(1)))
return emojiRegex.sub(__fix_miss_match, s)
def _emoji_formatter(m):
s = m.group(1)
if len(s) == 6:
return ('\\U%s\\U%s'%(s[:2].rjust(8, '0'), s[2:].rjust(8, '0'))
).encode('utf8').decode('unicode-escape', 'replace')
elif len(s) == 10:
return ('\\U%s\\U%s'%(s[:5].rjust(8, '0'), s[5:].rjust(8, '0'))
).encode('utf8').decode('unicode-escape', 'replace')
else:
return ('\\U%s'%m.group(1).rjust(8, '0')
).encode('utf8').decode('unicode-escape', 'replace')
emojiRegex = re.compile(r'<span class="emoji emoji(.{1,10})"></span>')
if emojiRegex.search(st) != None:
st = _emoji_debugger(st)
st = emojiRegex.sub(_emoji_formatter, st)
return st
else:
return st
#用于显示qrcode
def show_image(file_path):
"""
跨平台显示图片文件
:param file_path: 图片文件路径
"""
#based on the version of python to decide include quote from which lib
if sys.version_info >= (3, 3):
from shlex import quote
else:
from pipes import quote
#if its a linux darwin system
if sys.platform == "darwin":
command = "open -a /Applications/Preview.app %s&" % quote(file_path)
os.system(command)
#else open the pic in browser
else:
webbrowser.open(os.path.join(os.getcwd(),'temp',file_path))
#会话类 继承自requests.Session里面,调用request就是调用父类里面的request
class SafeSession(requests.Session):
def request(self, method, url, params=None, data=None, headers=None, cookies=None, files=None, auth=None,
timeout=None, allow_redirects=True, proxies=None, hooks=None, stream=None, verify=None, cert=None,
json=None):
for i in range(3):
try:
return super(SafeSession, self).request(method, url, params, data, headers, cookies, files, auth,
timeout,
allow_redirects, proxies, hooks, stream, verify, cert, json)
except Exception as e:
#print(e.message, traceback.format_exc())
continue
#重试3次以后再加一次,抛出异常
try:
return super(SafeSession, self).request(method, url, params, data, headers, cookies, files, auth,
timeout,
allow_redirects, proxies, hooks, stream, verify, cert, json)
except Exception as e:
raise e
#自己的类
class KITBot:
def __init__(self):
self.session = SafeSession()
self.uuid = ''
self.conf = {'qr': 'png'}
self.redirect_uri =''#redirect url
self.base_uri=''
self.base_host=''
self.status=''
self.base_request={}
###base_request elements basically userinfo
self.uin=''
self.sid=''
self.skey=''
self.device_id = 'e' + repr(random.random())[2:17]
self.pass_ticket=''
###
###sync related attributes
self.sync_key_str = ''
self.sync_key = []
self.sync_host = ''
###
###user related
self.my_account =''
###
###debug
self.DEBUG= False
###
###contact information related
self.my_account = {} # 当前账户
# 所有相关账号: 联系人, 公众号, 群组, 特殊账号
self.member_list = []
# 所有群组的成员, {'group_id1': [member1, member2, ...], ...}
self.group_members = {}
# 所有账户, {'group_member':{'id':{'type':'group_member', 'info':{}}, ...}, 'normal_member':{'id':{}, ...}}
self.account_info = {'group_member': {}, 'normal_member': {}}
self.contact_list = [] # 联系人列表
self.public_list = [] # 公众账号列表
self.group_list = [] # 群聊列表
self.special_list = [] # 特殊账号列表
self.encry_chat_room_id_list = [] # 存储群聊的EncryChatRoomId,获取群内成员头像时需要用到
###
#文件缓存目录 for saving qrcode or
#getcwd:current working directory
#os.path.join 用平台特定的连接符可能是/ 连接两个string
self.temp_dir = os.path.join(os.getcwd(),'temp')
if os.path.exists(self.temp_dir) == False:
os.makedirs(self.temp_dir)
def getuuid(self):
url = 'https://login.weixin.qq.com/jslogin'
params = {
'appid': 'wx782c26e4c19acffb',#定死的appid 猜测是网页版的id
'redirect_url':'https%3A%2F%2Fwx.qq.com%2Fcgi-bin%2Fmmwebwx-bin%2Fwebwxnewloginpage',#新的redirect url参数
'fun': 'new',
'lang': 'zh_CN',#or en_US
'_': int(time.time()) * 1000 + random.randint(1, 999),#greenwidh time in ms?
}
r = self.session.get(url, params=params)
r.encoding = 'utf-8'
data = r.text
regx = r'window.QRLogin.code = (\d+); window.QRLogin.uuid = "(\S+?)"'
pm = re.search(regx, data)
if pm:
code = pm.group(1)
self.uuid = pm.group(2)
return code == '200'#if code == 200 then getuuid ok!
return False
def init_sync(self):
url = self.base_uri + '/webwxinit?r=%i&lang=en_US&pass_ticket=%s' % (int(time.time()), self.pass_ticket)
params = {
'BaseRequest': self.base_request
}
#json.dumps:make directory to json dorm
r = self.session.post(url, data=json.dumps(params))
r.encoding = 'utf-8'
dic = json.loads(r.text)
self.sync_key = dic['SyncKey']
self.my_account = dic['User']
self.sync_key_str = '|'.join([str(keyVal['Key']) + '_' + str(keyVal['Val']) for keyVal in self.sync_key['List']])
return dic['BaseResponse']['Ret'] == 0
#generate qrcode locally in qr_file_path/// qrcode is just a website!!
def gen_qrcode(self,qr_file_path):
string = 'https://login.weixin.qq.com/l/' + self.uuid #changed url!
'''
another method to get qrcode possibly generation exists online
r = self.session.get('https://login.weixin.qq.com/qrcode/' + self.uuid, stream = True)
with open('QRCode.jpg', 'wb') as f: f.write(r.content)
'''
qr = pyqrcode.create(string)
#config code type of png
if self.conf['qr'] == 'png':
qr.png(qr_file_path, scale=8)
show_image(qr_file_path)
# img = Image.open(qr_file_path)
# img.show()
elif self.conf['qr'] == 'tty':
#create an invalid qrcode
print(qr.terminal(quiet_zone=1))
def do_request(self, url,para):
r = self.session.get(url,params = para)
r.encoding = 'utf-8'
data = r.text
param = re.search(r'window.code=(\d+);', data)
code = param.group(1)
#print('before scanned',data)
return code, data
def check_scanning(self):
"""
http comet:
tip=1, 等待用户扫描二维码,
201: scanned
408: timeout
tip=0, 等待用户确认登录,
200: confirmed
"""
LOGIN_TEMPLATE = 'https://login.weixin.qq.com/cgi-bin/mmwebwx-bin/login'
tip = 1#没有扫之前是1
try_later_secs = 1
MAX_RETRY_TIMES = 10
code = UNKONWN
retry_time = MAX_RETRY_TIMES
#always in loop to look for scanning
while retry_time > 0:
#request for login status code
para = 'tip=%s&uuid=%s&_=%s'%(tip, self.uuid, int(time.time()))
code, data = self.do_request(LOGIN_TEMPLATE,para)
if code == SCANNED:
print('[INFO] Please confirm to login .')
tip = 0#扫了以后是0
elif code == SUCCESS: # 确认登录成功
#successfully login and then redirect to a new url
param = re.search(r'window.redirect_uri="(\S+?)";', data)
redirect_uri = param.group(1) + '&fun=new'
self.redirect_uri = redirect_uri
#rfind: from right to left check where is the last position of '/'
self.base_uri = redirect_uri[:redirect_uri.rfind('/')]
#print('base_uri',self.base_uri)
#https:// has 8 letter
temp_host = self.base_uri[8:]
#print('temp host',temp_host)
self.base_host = temp_host[:temp_host.find("/")]
#print('base host',self.base_host)
'''
window.redirect_uri="https://wx2.qq.com/cgi-bin/mmwebwx-bin/webwxnewloginpage?ticket=A7rQhP8VnCOBuSlHMFI7ALfx@qrticket_0&uuid=IYvmbvNdDQ==&lang=zh_CN&scan=1571584486";
base_uri https://wx2.qq.com/cgi-bin/mmwebwx-bin
temp host wx2.qq.com/cgi-bin/mmwebwx-bin
base host wx2.qq.com
'''
return code
elif code == TIMEOUT:
print('[ERROR] WeChat login timeout. retry in %s secs later...' % (try_later_secs,))
tip = 1 # 重置
retry_time -= 1
time.sleep(try_later_secs)
else:
print ('[ERROR] WeChat login exception return_code=%s. retry in %s secs later...' %
(code, try_later_secs))
tip = 1
retry_time -= 1
time.sleep(try_later_secs)
return code
def getUsrInfo(self):
#get user info by examining the dom elemnents from the website catched
if len(self.redirect_uri) < 4:
print ('[ERROR] Login failed due to network problem, please try again.')
return False
r = self.session.get(self.redirect_uri)
r.encoding = 'utf-8'
data = r.text
doc = xml.dom.minidom.parseString(data)
root = doc.documentElement
for node in root.childNodes:
if node.nodeName == 'skey':
self.skey = node.childNodes[0].data
elif node.nodeName == 'wxsid':
self.sid = node.childNodes[0].data
elif node.nodeName == 'wxuin':
self.uin = node.childNodes[0].data
elif node.nodeName == 'pass_ticket':
self.pass_ticket = node.childNodes[0].data
if '' in (self.skey, self.sid, self.uin, self.pass_ticket):
return False
self.base_request = {
'Uin': self.uin,
'Sid': self.sid,
'Skey': self.skey,
'DeviceID': self.device_id,
}
return True
#imitate the post request sequencely loaded by webwechat
def status_notify(self):
url = self.base_uri + '/webwxstatusnotify?lang=zh_CN&pass_ticket=%s' % self.pass_ticket
self.base_request['Uin'] = int(self.base_request['Uin'])
params = {
'BaseRequest': self.base_request,
"Code": 3,
"FromUserName": self.my_account['UserName'],
"ToUserName": self.my_account['UserName'],
"ClientMsgId": int(time.time())
}
r = self.session.post(url, data=json.dumps(params))
r.encoding = 'utf-8'
dic = json.loads(r.text)
#if send correctedly response will be 0
return dic['BaseResponse']['Ret'] == 0
#同样模拟网页请求,先get contact再 bachgetcontact
def get_contact(self):
"""获取当前账户的所有相关账号(包括联系人、公众号、群聊、特殊账号) 全部在一个json返回包里面"""
dic_list = []
"""base_uri https://wx2.qq.com/cgi-bin/mmwebwx-bin"""
url = self.base_uri + '/webwxgetcontact?seq=0&pass_ticket=%s&skey=%s&r=%s' \
% (self.pass_ticket, self.skey, int(time.time()))
#如果通讯录联系人过多,这里会直接获取失败
try:
r = self.session.post(url, data='{}', timeout=180)
except Exception as e:
return False
r.encoding = 'utf-8'
dic = json.loads(r.text)
dic_list.append(dic)
#Seq是列表最后元素=0,这里是联系人过多的情况,最后的seq!=0,那么就按依次的seq来post
while int(dic["Seq"]) != 0:
print ("[INFO] Geting contacts. Get %s contacts for now" % dic["MemberCount"])
url = self.base_uri + '/webwxgetcontact?seq=%s&pass_ticket=%s&skey=%s&r=%s' \
% (dic["Seq"], self.pass_ticket, self.skey, int(time.time()))
r = self.session.post(url, data='{}', timeout=180)
r.encoding = 'utf-8'
dic = json.loads(r.text)
dic_list.append(dic)
#如果是debug模式那么打印contact.json到文件夹
if self.DEBUG:
with open(os.path.join(self.temp_pwd,'contacts.json'), 'w') as f:
f.write(json.dumps(dic_list))
#成员组,如果这个contact是个群组就不为空,否则为空
self.member_list = []
#针对这里seq很多,导致dic很多的情况
for dic in dic_list:
#在尾部添加
self.member_list.extend(dic['MemberList'])
#特殊用户 比如微信公众平台,微信运动
special_users = ['newsapp', 'fmessage', 'filehelper', 'weibo', 'qqmail',
'fmessage', 'tmessage', 'qmessage', 'qqsync', 'floatbottle',
'lbsapp', 'shakeapp', 'medianote', 'qqfriend', 'readerapp',
'blogapp', 'facebookapp', 'masssendapp', 'meishiapp',
'feedsapp', 'voip', 'blogappweixin', 'weixin', 'brandsessionholder',
'weixinreminder', 'wxid_novlwrv3lqwv11', 'gh_22b87fa7cb3c',
'officialaccounts', 'notification_messages', 'wxid_novlwrv3lqwv11',
'gh_22b87fa7cb3c', 'wxitil', 'userexperience_alarm', 'notification_messages']
self.contact_list = []
self.public_list = []
self.special_list = []
self.group_list = []
#区分各个账号 并且归类,加入本地列表
#这里进入account_info的都是独立个体,后面会解决群的事情
for contact in self.member_list:
if contact['VerifyFlag'] & 8 != 0: # 公众号
self.public_list.append(contact)
self.account_info['normal_member'][contact['UserName']] = {'type': 'public', 'info': contact}
elif contact['UserName'] in special_users: # 特殊账户
self.special_list.append(contact)
self.account_info['normal_member'][contact['UserName']] = {'type': 'special', 'info': contact}
elif contact['UserName'].find('@@') != -1: # 群聊
self.group_list.append(contact)
self.account_info['normal_member'][contact['UserName']] = {'type': 'group', 'info': contact}
elif contact['UserName'] == self.my_account['UserName']: # 自己
self.account_info['normal_member'][contact['UserName']] = {'type': 'self', 'info': contact}
else:
self.contact_list.append(contact)#朋友
self.account_info['normal_member'][contact['UserName']] = {'type': 'contact', 'info': contact}
print(self.group_list)
#获取群的所有成员信息
self.batch_get_group_members()
for group in self.group_members:
for member in self.group_members[group]:
if member['UserName'] not in self.account_info:
self.account_info['group_member'][member['UserName']] = \
{'type': 'group_member', 'info': member, 'group': group}
#记录debug
if self.DEBUG:
with open(os.path.join(self.temp_pwd,'contact_list.json'), 'w') as f:
f.write(json.dumps(self.contact_list))
with open(os.path.join(self.temp_pwd,'special_list.json'), 'w') as f:
f.write(json.dumps(self.special_list))
with open(os.path.join(self.temp_pwd,'group_list.json'), 'w') as f:
f.write(json.dumps(self.group_list))
with open(os.path.join(self.temp_pwd,'public_list.json'), 'w') as f:
f.write(json.dumps(self.public_list))
with open(os.path.join(self.temp_pwd,'member_list.json'), 'w') as f:
f.write(json.dumps(self.member_list))
with open(os.path.join(self.temp_pwd,'group_users.json'), 'w') as f:
f.write(json.dumps(self.group_members))
with open(os.path.join(self.temp_pwd,'account_info.json'), 'w') as f:
f.write(json.dumps(self.account_info))
return True
def batch_get_group_members(self):
"""批量获取所有群聊成员信息"""
url = self.base_uri + '/webwxbatchgetcontact?type=ex&r=%s&pass_ticket=%s' % (int(time.time()), self.pass_ticket)
params = {
'BaseRequest': self.base_request,
"Count": len(self.group_list),
"List": [{"UserName": group['UserName'], "EncryChatRoomId": ""} for group in self.group_list]
}
r = self.session.post(url, data=json.dumps(params))
r.encoding = 'utf-8'
dic = json.loads(r.text)
group_members = {}
encry_chat_room_id = {}
for group in dic['ContactList']:
gid = group['UserName']
members = group['MemberList']
group_members[gid] = members
encry_chat_room_id[gid] = group['EncryChatRoomId']
self.group_members = group_members
self.encry_chat_room_id_list = encry_chat_room_id
def test_sync_check(self):
#要看那个才是synccheck的借口push还是push2
for host1 in ['webpush.', 'webpush2.']:
self.sync_host = host1+self.base_host
try:
retcode = self.sync_check()[0]
except:
retcode = -1
if retcode == '0':
return True
return False
def sync_check(self):
params = {
'r': int(time.time()),
'sid': self.sid,
'uin': self.uin,
'skey': self.skey,
'deviceid': self.device_id,
'synckey': self.sync_key_str,
'_': int(time.time()),
}
url = 'https://' + self.sync_host + '/cgi-bin/mmwebwx-bin/synccheck?' + urllib.parse.urlencode(params)
try:
r = self.session.get(url, timeout=60)
r.encoding = 'utf-8'
data = r.text
pm = re.search(r'window.synccheck=\{retcode:"(\d+)",selector:"(\d+)"\}', data)
retcode = pm.group(1)
selector = pm.group(2)
return [retcode, selector]
except:
return [-1, -1]
def sync(self):
url = self.base_uri + '/webwxsync?sid=%s&skey=%s&lang=en_US&pass_ticket=%s' \
% (self.sid, self.skey, self.pass_ticket)
params = {
'BaseRequest': self.base_request,
'SyncKey': self.sync_key,
'rr': ~int(time.time())
}
try:
r = self.session.post(url, data=json.dumps(params), timeout=60)
r.encoding = 'utf-8'
dic = json.loads(r.text)
if dic['BaseResponse']['Ret'] == 0:
self.sync_key = dic['SyncCheckKey']
self.sync_key_str = '|'.join([str(keyVal['Key']) + '_' + str(keyVal['Val'])
for keyVal in self.sync_key['List']])
return dic
except:
return None
def addgroupListener(self,r):
"""
r:wechat return msg
"""
for msg in r['AddMsgList']:
#group
if msg['FromUserName'][:2]== '@@':
g_name = ''
for gp in self.group_list:
if gp['UserName'] == msg['FromUserName']:
g_name = gp['NickName']
if g_name != '':
content = msg['Content']
content = emoji_formatter(content)
text = content[content.rfind('<br/>')+5:]
userid = content[:content.rfind(':<br/>')]
print('李泽凯id:',userid)
for memb in self.group_members[msg['FromUserName']]:
if memb['UserName'] == userid:
u_name = memb['NickName']
content = '来自群聊%s的%s给你发来的消息:%s'%(g_name,u_name,text)
#@9cef97ff87f96b39ed5ea4c5a888926fe960f5368528f6dade37d0fdb679b8ee:<br/>hello
for fr in self.contact_list:
if fr['NickName'].find('李泽凯')!=-1:
f_id = fr['UserName']
self.send_msg_by_uid(content,f_id)
self.send_msg_by_uid(content,'filehelper')
def check_groupid(self,id_,name):
for group in self.group_list:
if group['NickName'].find('张杰超')!=-1:
if group['UserName'] == id_:
return true
return false
def get_groupid_by_name(self,name):
for group in self.group_list:
if group['NickName'].find(name)!=-1:
return group['UserName']
def handle_msg(self,r):
"""
处理原始微信消息的内部函数
msg_type_id:
0 -> Init
1 -> Self
2 -> FileHelper
3 -> Group
4 -> Contact
5 -> Public
6 -> Special
99 -> Unknown
:param r: 原始微信消息
"""
self.addgroupListener(r)
for msg in r['AddMsgList']:
user = {'id': msg['FromUserName'], 'name': 'unknown'}
"""
if msg['MsgType'] == 51 and msg['StatusNotifyCode'] == 4: # init message
msg_type_id = 0
user['name'] = 'system'
#会获取所有联系人的username 和 wxid,但是会收到3次这个消息,只取第一次
if self.is_big_contact and len(self.full_user_name_list) == 0:
self.full_user_name_list = msg['StatusNotifyUserName'].split(",")
self.wxid_list = re.search(r"username>(.*?)</username", msg["Content"]).group(1).split(",")
with open(os.path.join(self.temp_pwd,'UserName.txt'), 'w') as f:
f.write(msg['StatusNotifyUserName'])
with open(os.path.join(self.temp_pwd,'wxid.txt'), 'w') as f:
f.write(json.dumps(self.wxid_list))
#print "[INFO] Contact list is too big. Now start to fetch member list ."
#self.get_big_contact()
"""
if msg['MsgType'] == 37: # friend request
msg_type_id = 37
pass
# content = msg['Content']
# username = content[content.index('fromusername='): content.index('encryptusername')]
# username = username[username.index('"') + 1: username.rindex('"')]
# #print u'[Friend Request]'
# #print u' Nickname:' + msg['RecommendInfo']['NickName']
# #print u' 附加消息:'+msg['RecommendInfo']['Content']
# # #print u'Ticket:'+msg['RecommendInfo']['Ticket'] # Ticket添加好友时要用
# #print u' 微信号:'+username #未设置微信号的 腾讯会自动生成一段微信ID 但是无法通过搜索 搜索到此人
elif msg['FromUserName'] == self.my_account['UserName']: # Self
msg_type_id = 1
user['name'] = 'self'
elif msg['ToUserName'] == 'filehelper': # File Helper
msg_type_id = 2
user['name'] = 'file_helper'
elif msg['FromUserName'][:2] == '@@': # Group
msg_type_id = 3
user['name'] = self.get_contact_prefer_name(self.get_contact_name(user['id']))
elif self.is_contact(msg['FromUserName']): # Contact
msg_type_id = 4
user['name'] = self.get_contact_prefer_name(self.get_contact_name(user['id']))
elif self.is_public(msg['FromUserName']): # Public
msg_type_id = 5
user['name'] = self.get_contact_prefer_name(self.get_contact_name(user['id']))
elif self.is_special(msg['FromUserName']): # Special
msg_type_id = 6
user['name'] = self.get_contact_prefer_name(self.get_contact_name(user['id']))
else:
msg_type_id = 99
user['name'] = 'unknown'
if not user['name']:
user['name'] = 'unknown'
user['name'] = html.unescape(user['name'])
if self.DEBUG and msg_type_id != 0:
print( u'[MSG] %s:' % user['name'])
content = self.extract_msg_content(msg_type_id, msg)
message = {'msg_type_id': msg_type_id,
'msg_id': msg['MsgId'],
'content': content,
'to_user_id': msg['ToUserName'],
'user': user}
print(message)
#三个name 里面有哪个就返回哪个
@staticmethod
def get_contact_prefer_name(name):
if name is None:
return None
if 'remark_name' in name:
return name['remark_name']
if 'nickname' in name:
return name['nickname']
if 'display_name' in name:
return name['display_name']
return None
#把三个name全部装入name
def get_contact_name(self, uid):
info = self.get_contact_info(uid)
if info is None:
return None
info = info['info']
name = {}
if 'RemarkName' in info and info['RemarkName']:
name['remark_name'] = info['RemarkName']
if 'NickName' in info and info['NickName']:
name['nickname'] = info['NickName']
if 'DisplayName' in info and info['DisplayName']:
name['display_name'] = info['DisplayName']
if len(name) == 0:
return None
else:
return name
def get_contact_info(self, uid):
return self.account_info['normal_member'].get(uid)
def extract_msg_content(self, msg_type_id, msg):
"""
content_type_id:
0 -> Text
1 -> Location
3 -> Image
4 -> Voice
5 -> Recommend
6 -> Animation
7 -> Share
8 -> Video
9 -> VideoCall
10 -> Redraw
11 -> Empty
99 -> Unknown
:param msg_type_id: 消息类型id
:param msg: 消息结构体
:return: 解析的消息
"""
mtype = msg['MsgType']
#使用html转义content里面的内容
content = html.unescape(msg['Content'])
msg_id = msg['MsgId']
msg_content = {}
if msg_type_id == 0:#init
return {'type': 11, 'data': ''}
elif msg_type_id == 2: # File Helper
return {'type': 0, 'data': content.replace('<br/>', '\n')}
else: # Self, Contact, Special, Public, Unknown
pass
"""
elif msg_type_id == 3: # 群聊
sp = content.find('<br/>')
uid = content[:sp]
content = content[sp:]
content = content.replace('<br/>', '')
uid = uid[:-1]
name = self.get_contact_prefer_name(self.get_contact_name(uid))
if not name:
name = self.get_group_member_prefer_name(self.get_group_member_name(msg['FromUserName'], uid))
if not name:
name = 'unknown'
msg_content['user'] = {'id': uid, 'name': name}
"""
msg_prefix = (msg_content['user']['name'] + ':') if 'user' in msg_content else ''
"""
content_type_id:
MsgType 说明
1 文本消息
3 图片消息
34 语音消息
37 好友确认消息
40 POSSIBLEFRIEND_MSG
42 共享名片
43 视频消息
47 动画表情
48 位置消息
49 分享链接
50 VOIPMSG
51 微信初始化消息
52 VOIPNOTIFY
53 VOIPINVITE
62 小视频
9999 SYSNOTICE
10000 系统消息
10002 撤回消息
:param msg_type_id: 消息类型id
:param msg: 消息结构体
:return: 解析的消息 msg_content
"""
if mtype == 1:
#if content includes following string
if content.find('http://weixin.qq.com/cgi-bin/redirectforward?args=') != -1:
r = self.session.get(content)
r.encoding = 'gbk'
data = r.text
pos = self.search_content('title', data, 'xml')
msg_content['type'] = 1
msg_content['data'] = pos
msg_content['detail'] = data
if self.DEBUG:
print ( '%s[Location] %s ' % (msg_prefix, pos))
else:
msg_content['type'] = 0
if msg_type_id == 3 or (msg_type_id == 1 and msg['ToUserName'][:2] == '@@'): # Group text message
msg_infos = self.proc_at_info(content)
str_msg_all = msg_infos[0]
str_msg = msg_infos[1]
detail = msg_infos[2]
msg_content['data'] = str_msg_all
msg_content['detail'] = detail
msg_content['desc'] = str_msg
else:
msg_content['data'] = content
if self.DEBUG:
try:
print('tt')
#print ' %s[Text] %s' % (msg_prefix, msg_content['data'])
except UnicodeEncodeError:
print('tt')
#print ' %s[Text] (illegal text).' % msg_prefix
elif mtype == 3:
msg_content['type'] = 3
msg_content['data'] = self.get_msg_img_url(msg_id)
msg_content['img'] = self.session.get(msg_content['data']).content.encode('hex')
if self.DEBUG:
image = self.get_msg_img(msg_id)
#print ' %s[Image] %s' % (msg_prefix, image)
elif mtype == 34:
msg_content['type'] = 4
msg_content['data'] = self.get_voice_url(msg_id)
msg_content['voice'] = self.session.get(msg_content['data']).content.encode('hex')
if self.DEBUG:
voice = self.get_voice(msg_id)
#print ' %s[Voice] %s' % (msg_prefix, voice)
elif mtype == 37:
msg_content['type'] = 37
msg_content['data'] = msg['RecommendInfo']
if self.DEBUG:
print('tt')
#print ' %s[useradd] %s' % (msg_prefix,msg['RecommendInfo']['NickName'])
elif mtype == 42:
msg_content['type'] = 5
info = msg['RecommendInfo']
msg_content['data'] = {'nickname': info['NickName'],
'alias': info['Alias'],
'province': info['Province'],
'city': info['City'],
'gender': ['unknown', 'male', 'female'][info['Sex']]}
if self.DEBUG:
print('tt')
#print ' %s[Recommend]' % msg_prefix
#print ' -----------------------------'
#print ' | NickName: %s' % info['NickName']
#print ' | Alias: %s' % info['Alias']
#print ' | Local: %s %s' % (info['Province'], info['City'])
#print ' | Gender: %s' % ['unknown', 'male', 'female'][info['Sex']]
#print ' -----------------------------'
elif mtype == 47:
msg_content['type'] = 6
msg_content['data'] = self.search_content('cdnurl', content)
if self.DEBUG:
print( ' %s[Animation] %s' % (msg_prefix, msg_content['data']))
elif mtype == 49:
msg_content['type'] = 7
if msg['AppMsgType'] == 3:
app_msg_type = 'music'
elif msg['AppMsgType'] == 5:
app_msg_type = 'link'
elif msg['AppMsgType'] == 7:
app_msg_type = 'weibo'
else:
app_msg_type = 'unknown'
msg_content['data'] = {'type': app_msg_type,
'title': msg['FileName'],
'desc': self.search_content('des', content, 'xml'),
'url': msg['Url'],
'from': self.search_content('appname', content, 'xml'),
'content': msg.get('Content') # 有的公众号会发一次性3 4条链接一个大图,如果只url那只能获取第一条,content里面有所有的链接
}
if self.DEBUG:
print (' %s[Share] %s' % (msg_prefix, app_msg_type))
#print ' --------------------------'
#print ' | title: %s' % msg['FileName']
#print ' | desc: %s' % self.search_content('des', content, 'xml')
#print ' | link: %s' % msg['Url']
#print ' | from: %s' % self.search_content('appname', content, 'xml')
#print ' | content: %s' % (msg.get('content')[:20] if msg.get('content') else "unknown")
#print ' --------------------------'
elif mtype == 62:
msg_content['type'] = 8
msg_content['data'] = content
if self.DEBUG:
print('tt')
#print ' %s[Video] Please check on mobiles' % msg_prefix
elif mtype == 53:
msg_content['type'] = 9
msg_content['data'] = content
if self.DEBUG:
print('tt')
#print ' %s[Video Call]' % msg_prefix
elif mtype == 10002:
msg_content['type'] = 10
msg_content['data'] = content
if self.DEBUG:
print('tt')
#print ' %s[Redraw]' % msg_prefix
elif mtype == 10000: # unknown, maybe red packet, or group invite
msg_content['type'] = 12
msg_content['data'] = msg['Content']
if self.DEBUG:
print('tt')
#print ' [Unknown]'
elif mtype == 43:
msg_content['type'] = 13
msg_content['data'] = self.get_video_url(msg_id)
if self.DEBUG:
print('tt')
#print(.*)
else:
msg_content['type'] = 99
msg_content['data'] = content
if self.DEBUG:
print('tt')
#print ' %s[Unknown]' % msg_prefix
return msg_content
@staticmethod
def proc_at_info(msg):
if not msg:
return '', []
segs = msg.split(u'\u2005')
str_msg_all = ''
str_msg = ''
infos = []
if len(segs) > 1:
for i in range(0, len(segs) - 1):
segs[i] += u'\u2005'
pm = re.search(u'@.*\u2005', segs[i]).group()
if pm:
name = pm[1:-1]
string = segs[i].replace(pm, '')
str_msg_all += string + '@' + name + ' '
str_msg += string
if string:
infos.append({'type': 'str', 'value': string})
infos.append({'type': 'at', 'value': name})
else:
infos.append({'type': 'str', 'value': segs[i]})
str_msg_all += segs[i]
str_msg += segs[i]
str_msg_all += segs[-1]
str_msg += segs[-1]
infos.append({'type': 'str', 'value': segs[-1]})
else:
infos.append({'type': 'str', 'value': segs[-1]})
str_msg_all = msg
str_msg = msg
return str_msg_all.replace(u'\u2005', ''), str_msg.replace(u'\u2005', ''), infos
def is_contact(self, uid):
for account in self.contact_list:
if uid == account['UserName']:
return True
return False
def is_public(self, uid):
for account in self.public_list:
if uid == account['UserName']:
return True
return False
def is_special(self, uid):
for account in self.special_list:
if uid == account['UserName']:
return True
return False
def proc_msg(self):
#keep synchronisation through checking out on the retcode
self.test_sync_check()
self.status = 'loginsuccess' #WxbotManage使用
while True:
if self.status == 'wait4loginout': #WxbotManage使用
return
check_time = time.time()
try:
[retcode, selector] = self.sync_check()
# #print '[DEBUG] sync_check:', retcode, selector
if retcode == '1100': # 从微信客户端上登出
break
elif retcode == '1101': # 从其它设备上登了网页微信
break
elif retcode == '0':
if selector == '2': # 有新消息
r = self.sync()
if r is not None:
self.handle_msg(r)
elif selector == '3': # 未知
r = self.sync()
if r is not None:
self.handle_msg(r)
elif selector == '4': # 通讯录更新
r = self.sync()
if r is not None:
self.get_contact()
elif selector == '6': # 可能是红包
r = self.sync()
if r is not None:
self.handle_msg(r)
elif selector == '7': # 在手机上操作了微信
r = self.sync()
if r is not None:
self.handle_msg(r)
elif selector == '0': # 无事件
r = self.sync()
if r is not None:
self.handle_msg(r)
else:
#print '[DEBUG] sync_check:', retcode, selector
r = self.sync()
if r is not None:
self.handle_msg(r)
else:
print('[DEBUG] sync_check:', retcode, selector)
time.sleep(10)
#self.schedule()
except Exception as e:
print ('[ERROR] Except in proc_msg: '%e)
#print format_exc()
check_time = time.time() - check_time
if check_time < 0.8:
time.sleep(1 - check_time)
'''send text to someone'''
def get_user_id(self, name):
if name == '':
return None
#name = self.to_unicode(name)
for contact in self.contact_list:
#如果contact里面有remarkname元素
if 'RemarkName' in contact and re.match(r'(.*)'+name+'.*',contact['RemarkName']):
return contact['UserName']
elif 'NickName' in contact and re.match(r'(.*)'+name+'.*',contact['NickName']):
return contact['UserName']
elif 'DisplayName' in contact and re.match(r'(.*)'+name+'.*',contact['DisplayName']):
return contact['UserName']
for group in self.group_list:
if 'RemarkName' in group and group['RemarkName'] == name:
return group['UserName']
if 'NickName' in group and group['NickName'] == name:
return group['UserName']
if 'DisplayName' in group and group['DisplayName'] == name:
return group['UserName']
return ''
#wcccy account changed evry time need dynamically set
def send_msg_by_uid(self, word, dst='FileHelper'):
url = self.base_uri + '/webwxsendmsg?pass_ticket=%s' % self.pass_ticket
msg_id = str(int(time.time() * 1000)) + str(random.random())[:5].replace('.', '')
#word = word.encode('utf-8') if isinstance(word,str) else word
params = {
'BaseRequest': self.base_request,
'Msg': {
"Type": 1,
"Content": word,
"FromUserName": self.my_account['UserName'],
"ToUserName": dst,
"LocalID": msg_id,
"ClientMsgId": msg_id
}
}
headers = {'content-type': 'application/json; charset=UTF-8'}
data = json.dumps(params, ensure_ascii=False).encode('utf8')
try:
r = self.session.post(url, data=data, headers=headers)
except (ConnectionError, ReadTimeout):
return False
dic = r.json()
print(dic)
return dic['BaseResponse']['Ret'] == 0
def send_msg(self, name, word, isfile=False):
uid = self.get_user_id(name)
if uid is not None:
if isfile:
with open(word, 'r') as f:
result = True
for line in f.readlines():
line = line.replace('\n', '')
#print '-> ' + name + ': ' + line
if self.send_msg_by_uid(line, uid):
pass
else:
result = False
time.sleep(1)
return result
else:
word = self.to_unicode(word)
if self.send_msg_by_uid(word, uid):
return True
else:
return False
else:
if self.DEBUG:
print ('[ERROR] This user does not exist .')
return True
def auto_login(self):
try:
#get uuid
self.getuuid()
#generate qrcode
self.gen_qrcode(os.path.join(self.temp_dir,'wxqr.png'))
print('[INFO] Please use WeChat to scan the QR code .')
#check scanning
return_code = self.check_scanning()
if return_code != SUCCESS:
print('ERROR:wx login falled error code:%s'%(return_code))
self.status = 'logout'
return
#get uid key...userinfo
if self.getUsrInfo():
print('wx successfully login')
else:
self.status = 'logout'
return
#use the gotten userinfo to initailze syncronisation(get syncro-key)
if self.init_sync():
print('wx successfully iniated sync')
else:
self.status = 'logout'
return
self.status_notify()
if self.get_contact():
print ('[INFO] Get %d contacts' % len(self.contact_list))
print ('[INFO] Start to process messages .')
self.proc_msg()
#out of loop
self.status = 'loginout'
except Exception as e:
print('ERROR:error code'%e)
if __name__ == '__main__':
bot = KITBot()
bot.auto_login()
|
#
# onlinestore-multi
# simple online store application
# (c) smit thakakar smitthakkar96@gmail.com
# 2015
# GPL
#
################################ IMPORT ################################
import os
CURDIR = os.path.abspath(os.path.dirname(__file__))
PS = os.path.sep
import sys
sys.path.append(CURDIR)
import re
import ConfigParser
import time
import datetime
import decimal
import urlparse
import random
import cStringIO
#
try:
from hashlib import md5
except ImportError:
from md5 import md5
#
from PIL import Image
from PIL import ImageFont
from PIL import ImageDraw
from PIL import ImageFilter
#
fgeo = None
try:
import GeoIP
ogeo = GeoIP.new(GeoIP.GEOIP_MEMORY_CACHE)
fgeo = ogeo.country_name_by_addr
except ImportError:
try:
import pygeoip
ogeo = pygeoip.GeoIP(CURDIR + PS + 'GeoIP.dat', pygeoip.MEMORY_CACHE)
fgeo = ogeo.country_name_by_addr
except:
fgeo = None
#
import web
import yaml
from HTMLParser import HTMLParser
import sqlite3
import messages as m
reload(m)
class StripHTMLParser(HTMLParser):
def __init__(self):
self.reset()
self.text = []
def handle_data(self, data):
self.text.append(data)
############################# MODULE PARAM #############################
web.config.debug = False
web.config.session_parameters['cookie_name'] = 'SESSION'
web.config.session_parameters['ignore_expiry'] = True
web.config.session_parameters['timeout'] = 1800 #30 minute
############################ URLS / WEB APP ############################
URLS = (
'/', 'index',
'/browser/set/(.*)', 'browser_set',
'/captcha', 'captcha',
'/product', 'product',
'/lang/set/(.*)', 'lang_set',
'/fs/(\d+)', 'fs',
'/cart', 'cart',
'/cart/add', 'cart_add',
'/cart/del', 'cart_del',
'/cart/empty', 'cart_empty',
'/cart/checkout', 'cart_checkout',
'/cart/checkout/done', 'cart_checkout_done',
'/payment/confirm', 'payment_confirm',
'/contact', 'contact',
'/login', 'login',
'/logout', 'logout',
'/news', 'news',
'/faq', 'faq',
'/news/hide', 'news_hide',
'/go/(\d+)', 'go',
'/admin', 'admin',
'/passwd', 'passwd',
'/profile', 'profile',
'/promote', 'promote',
'/admin/fs', 'admin_fs',
'/admin/fs/del', 'admin_fs_del',
'/admin/fs/upload', 'admin_fs_upload',
'/admin/fs/view/(\d+)', 'admin_fs_view',
'/admin/system', 'admin_system',
'/admin/product', 'admin_product',
'/admin/product/category', 'admin_product_category',
'/admin/product/category/del', 'admin_product_category_del',
'/admin/product/category/save', 'admin_product_category_save',
'/admin/product/group', 'admin_product_group',
'/admin/product/group/del', 'admin_product_group_del',
'/admin/product/group/save', 'admin_product_group_save',
'/admin/product/group/edit/(\d+)', 'admin_product_group_edit',
'/admin/product/item', 'admin_product_item',
'/admin/product/item/del', 'admin_product_item_del',
'/admin/product/item/save', 'admin_product_item_save',
'/admin/bank', 'admin_bank',
'/admin/bank/del', 'admin_bank_del',
'/admin/bank/save', 'admin_bank_save',
'/admin/paypal', 'admin_paypal',
'/admin/paypal/del', 'admin_paypal_del',
'/admin/paypal/save', 'admin_paypal_save',
'/admin/yahoo', 'admin_yahoo',
'/admin/yahoo/del', 'admin_yahoo_del',
'/admin/yahoo/save', 'admin_yahoo_save',
'/admin/link', 'admin_link',
'/admin/link/del', 'admin_link_del',
'/admin/link/save', 'admin_link_save',
'/admin/news', 'admin_news',
'/admin/news/del', 'admin_news_del',
'/admin/news/save', 'admin_news_save',
'/admin/news/edit/(\d+)', 'admin_news_edit',
'/admin/faq', 'admin_faq',
'/admin/faq/del', 'admin_faq_del',
'/admin/faq/save', 'admin_faq_save',
'/admin/faq/edit/(\d+)', 'admin_faq_edit',
'/admin/invoice', 'admin_invoice',
'/admin/invoice/view/(\d+)', 'admin_invoice_view',
'/admin/invoice/approval', 'admin_invoice_approval',
'/admin/stat', 'admin_stat',
'/admin/doc', 'admin_doc',
'/admin/redir', 'admin_redir',
'/admin/redir/del', 'admin_redir_del',
'/admin/redir/save', 'admin_redir_save',
'/admin/go', 'admin_go',
'/admin/go/del', 'admin_go_del',
'/admin/go/save', 'admin_go_save',
'/admin/go/edit/(\d+)', 'admin_go_edit',
'/(.*)', 'redir',
)
wapp = web.application(URLS, globals())
############################## DATABASE ################################
DATA_SQL_DEFAULT = 'data.sql'
DATA_FILE_DEFAULT = 'onlinestore-multi.db'
DATA_SQL = os.path.join(CURDIR, DATA_SQL_DEFAULT)
DATA_FILE = os.path.join(CURDIR, DATA_FILE_DEFAULT)
db_error = False
if not os.path.exists(DATA_FILE) or not os.path.getsize(DATA_FILE):
try:
test_db = sqlite3.connect(DATA_FILE)
test_db.executescript(open(DATA_SQL).read())
test_db.close()
except:
db_error = True
else:
if not os.access(DATA_FILE, os.W_OK):
db_error = True
db = None
if not db_error:
try:
conn = web.database(dbn = 'sqlite', db = DATA_FILE)
except:
conn = None
if conn and hasattr(conn, 'query'):
db = conn
else:
db = None
del conn
##################### FROM SQLITEBOY (UNMODIFIED) ######################
def sqliteboy_chunk(s, n, separator, justify, padding):
s = str(s)
separator = str(separator)
padding = str(padding)
#
if (not n) or (not s) or (n < 1):
return s
#
if padding:
pad = padding[0]
else:
pad = ' '
#
if not justify:
justify = 0
#
mod = len(s) % n
if mod:
ln = len(s) + (n - mod)
if justify == 0: #left
s = s.ljust(ln, pad)
else: #right
s = s.rjust(ln, pad)
#
res = [s[i:i+n] for i in range(0, len(s), n)]
ret = separator.join(res)
#
return ret
########################### IMPORTANT FUNCTION #########################
def query(q, a = {}):
global db
#
r = db.query(q, vars = a)
try:
ret = list(r)
except:
ret = r
#
return ret
def rget(r, key, count=1, index=0, default='', to_yaml=False):
try:
test = r and len(r) >= count and r[index].has_key(key)
except:
test = False
#
if test and r[index].get(key):
ret = r[index].get(key)
else:
ret = default
#
if to_yaml:
try:
ret2 = yaml.load(ret)
except:
ret2 = ret
else:
ret2 = ret
#
return ret2
def pget(option, default='', strip=True, callback=None):
q = 'select value from ms_config where param=$p'
a = {'p': option}
r = query(q, a)
ret = rget(r, 'value', default=default)
#
if strip and hasattr(ret, 'strip'):
ret = ret.strip()
#
if callback:
ret = callback(id=ret, pget_helper=True)
#
return ret
############################### CONSTANT ###############################
VERSION = '1.03'
NAME = 'onlinestore-multi'
PRECISION = 2
TEMPLATE_DIR = CURDIR + PS + 'template'
DOC_ADMIN = CURDIR + PS + 'README.txt'
DOMAIN = ''
BASEURL_DEFAULT = '/store'
HOME_DEFAULT = '/product'
TEMPLATE_DEFAULT = 'default'
LANG_DEFAULT = 'en_US'
MAIL_DEFAULT = ''
FORCE_SINGLE_CURRENCY = True
CWIDTH = {'product': 42, 'qty': 8, 'price': 17, 'vat': 16, 'subtotal': 18}
CSPACE = ' '
CART_ADD_MAX = 10
ADMIN_URL_GROUP = ('/profile', '/passwd', '/promote')
REGEX_EMAIL = r'^[\w\.\+-]+@[\w\.-]+\.[a-zA-Z]+$'
################################ GLOBAL ################################
sess = None
if not db_error:
sess = web.session.Session(wapp, web.session.DBStore(
db, 'sessions'),
initializer={
'captcha': '',
'p' : {},
'lang' : '',
'c': {},
'u': None,
'log': None,
'co': {},
'newsread': False,
'msg': [],
'browserclass': '',
'fullpath': None,
}
)
msgs = ''
menu = ''
mobile = ''
#new res hack, eliminate app_global_conf.py
#as of 16-October-2012
#quick hack, once again, put config in database
#as of 22-October-2012
res_fix = ['cart', 'user_content', 'blog']
res = {
'cart' : True,
'user_content' : True,
'blog' : True,
'promote' : True,
'payments' : [1,2,3],
'value' : 200,
'max_product_category' : 100,
'max_product' : 500,
'max_file_size' : 600 * 1024,
'max_files' : 600,
}
#quick hack as of 18-October-2012
FORCE_PROMOTE = res['promote']
PAYMENT_TYPE = res['payments']
rendertime = [0, 0]
############################### FUNCTION ###############################
def is_valid_email(email):
#previous implementation was based on codes from internet
#(around 2010, thank you very much, however, i forgot the site :( )
#as of 21-April-2013, regex is used
if re.match(REGEX_EMAIL, email):
return True
#
return False
def detect_ua(ua):
ret = {}
ret['mobile_document'] = ua
return ret
def number_format(number, localeset='', places=0):
'''from sqliteboy, modified'''
n = str(number)
decimals = places
decimal_point = m.t(m.NUMBER_FORMAT, localeset)['decimal_point']
thousands_sep = m.t(m.NUMBER_FORMAT, localeset)['thousands_separator']
#
neg = False
try:
f = float(n)
if f < 0:
neg = True
except:
return n
#
if 'e' in n.lower():
efmt = '%%.%sf' %len(n)
n = efmt %float(n)
#
dec = decimals
if not isinstance(dec, int) or dec < 0:
dec = 0
#
nn = ''
dd = ''
if '.' in n: #float
nn, dd = n.split('.')
else:
nn = n
nn = nn.replace('-', '')
nn = nn.replace('+', '')
nn = nn.strip()
dd = dd.strip()
#
if dd:
if dec <= len(dd):
dd = dd[:dec]
else:
dd = dd.ljust(dec, '0')
#
nn = sqliteboy_chunk(nn, 3, thousands_sep, 1, '').strip()
dd = dd.strip()
#
if neg:
nn = '-' + nn
#
if dd:
ret = nn + decimal_point + dd
else:
ret = nn
#
return ret
def now(format='%Y-%m-%d %H:%M:%S'):
return time.strftime(format)
def dtf(t):
t0 = time.strptime(t, m.t(m.DATETIME_FORMAT, 'default')['datetime'])
return time.strftime(m.t(m.DATETIME_FORMAT, sess.lang)['datetime'], t0)
def nf(number, decimal=PRECISION):
return number_format(number, sess.lang, decimal)
def striphtml(text):
data = StripHTMLParser()
data.feed(text)
ret = ''.join(data.text)
return ret
def stripsall(text, remove, ignorecase=True):
if ignorecase:
premove = remove.lower()
pat = re.compile(premove, re.IGNORECASE)
text = pat.sub(premove, text)
else:
premove = remove
text = text
#
ret = text
while True:
newtext = ret
ret = web.utils.strips(newtext, premove)
if ret == newtext:
break
#
return ret
def sendmail(to, subject, message, reply_to=MAIL_DEFAULT):
#
UA = '%s v%s' %(NAME, VERSION)
XM = UA
#
try:
web.config.smtp_server = pget('mail_smtp')
web.config.smtp_username = pget('mail_user')
web.config.smtp_password = pget('mail_pass')
#
web.sendmail(MAIL_DEFAULT, to, subject, message,
headers=({'User-Agent': UA, 'X-Mailer': XM, 'Reply-To': reply_to})
)
except:
web.config.smtp_server = ''
web.config.smtp_username = ''
web.config.smtp_password = ''
#
web.sendmail(MAIL_DEFAULT, to, subject, message,
headers=({'User-Agent': UA, 'X-Mailer': XM, 'Reply-To': reply_to})
)
def mlget(field, default=m.COUNTRY['default'][0], all=False, get_non_empty=True):
global sess
#
if not field:
return ''
#
try:
d = yaml.load(field)
except:
return ''
#
if not type(d) == type({}):
return d
#
langs = [x[0] for x in m.COUNTRY.values()]
#
if not all:
if sess.lang in d.keys() and d[sess.lang]:
return d[sess.lang]
else:
if d.has_key(default) and d[default]:
return d[default]
else:
if get_non_empty:
ne = ''
for i in d.keys():
if d[i]:
ne = d[i]
break
#
return ne
else:
return ''
else:
for i in langs:
if not d.has_key(i):
d[i] = ''
return d
#
#error, should not reach this
return ''
def ub(url):
base = pget('url_base', default=BASEURL_DEFAULT)
if base == '/':
ret = url
else:
ret = base + url
return ret
def tget(page, globals={}):
p = page + '.html'
tdir = TEMPLATE_DIR
tdd = tdir + PS + TEMPLATE_DEFAULT
tdc = tdd + PS + p
tud = tdir + PS + pget('template', default=TEMPLATE_DEFAULT)
tuc = tud + PS + p
if os.path.exists(tuc):
tc = tud
elif os.path.exists(tdc):
tc = tdd
else:
return None
#
info = tinfo(pget('template'))
if not info:
tc = tdd
else:
try:
owner = info['general']['owner']
except:
owner = ''
if owner and DOMAIN.lower() not in owner:
tc = tdd
#
ret = web.template.render(tc, globals=globals)
return ret
def cidget():
q = '''
select seq from sqlite_sequence where
name='tr_invoice_header';
'''
r = query(q)
p1 = rget(r, 'seq', default=0)
p2 = str(time.time())[-5:].replace('.','')
#
ret = '%d-%s' %(p1, p2)
return ret
def dnews(id=0, read_session=True):
global sess
#
ret = []
#
if read_session:
if sess.newsread:
return ret
#
max = pget('news_max')
try:
imax = int(max)
except:
imax = None
#
if id:
q = 'select * from tr_news where id=$id'
a = {'id': id}
else:
if imax:
q = 'select * from tr_news order by id desc limit $limit'
a = {'limit': imax}
else:
q = 'select * from tr_news order by id desc'
a = {}
#
r = query(q, a)
for i in r:
i.date_news = dtf(i.date_news)
i.title = (mlget(i.title, all=True), mlget(i.title))
i.description = (mlget(i.description, all=True), mlget(i.description))
i.news = (mlget(i.news, all=True), mlget(i.news))
ret.append(i)
#
return ret
def dyahoo(id=0, field='*', format=True, format_type=1):
if format: field='*'
if id:
q = 'select $field from ms_yahoo where id=$id order by id'
a = {'id': id, 'field': web.SQLLiteral(field)}
else:
q = 'select $field from ms_yahoo order by id'
a = {'field': web.SQLLiteral(field)}
r = query(q, a)
for i in r:
a = i.account
t = i.type
if format_type:
t = format_type
y = 'http://opi.yahoo.com/online?u=%s&m=g&t=%s' %(a, t)
aty = ()
if format:
aty = (a, t, y)
#
a2 = (a, aty)
i.account = a2
#
return r
def dpro():
ret = []
q = 'select id,name from ms_category where active=1 order by priority desc'
r = query(q)
for i in r:
#check 1
nameall = mlget(i.name, all=True)
if disblank(nameall, False): continue
#
#check 2
q_cat = 'select id from ms_product where category_id=$category_id'
a_cat = {'category_id': i.id}
r_cat = query(q_cat, a_cat)
if not r_cat: continue
#
#
i.name = mlget(i.name)
#
temp = []
q2 = 'select id,name,description,full_info,file_id as file from ms_product where active=1 and category_id=$cid order by priority desc'
a2 = {'cid': i.id}
r2 = query(q2, a2)
for j in r2:
#
q_file = 'select id,name,type from ms_file where id=$file_id'
a_file = {'file_id': j.file}
r_file = query(q_file, a_file)
if r_file:
finfo = r_file[0]
j.file = (ub('/fs' + '/' + str(j.file)), finfo)
else:
finfo = []
j.file = []
#
j.name = mlget(j.name)
j.description = mlget(j.description)
j.full_info = mlget(j.full_info)
#
if pget('cart_check_stock') == '1':
q3 = 'select id,name,stock,price,currency_id,variant_file_id from ms_product_variant where active=1 and product_id=$pid and stock > 0 order by id'
else:
q3 = 'select id,name,stock,price,currency_id,variant_file_id from ms_product_variant where active=1 and product_id=$pid order by id'
a3 = {'pid': j.id}
r3 = query(q3, a3)
for k in r3:
k.name = mlget(k.name)
#
k.price = nf(k.price)
if FORCE_SINGLE_CURRENCY:
k.currency = pget('currency', callback=dcur).csymbol
else:
q4 = 'select csymbol from ms_currency where id=$currid'
a4 = {'currid': k.currency_id}
r4 = query(q4, a4)
k.currency = r4[0].csymbol
#
#stock check
if pget('cart_check_stock') == '1':
if sess.c.has_key(k.id) and sess.c[k.id]:
in_cart = sess.c[k.id]
else:
in_cart = 0
k.stock = k.stock - in_cart
if k.stock > CART_ADD_MAX:
k.stock = CART_ADD_MAX
else:
k.stock = CART_ADD_MAX
#
#
temp.append((j, r3))
#
ret.append( ( (i.id, i.name) , temp) )
#
return ret
def dcart():
global db
global sess
#
carts = []
keys = sess.c.keys()
keys.sort()
#
total = 0
for i in keys:
vid = i
qty = sess.c[i]
sqty = nf(qty, 0)
#
q = '''
select name from ms_product where id in
(select product_id from ms_product_variant where id=$vid)
'''
a = {'vid': vid}
r = query(q, a)
product = rget(r, 'name')
product = mlget(product)
#
q = '''
select csymbol from ms_currency where id in
(select currency_id from ms_product_variant where id=$vid)
'''
a = {'vid': vid}
r = query(q, a)
csymbol = rget(r, 'csymbol')
#
q = 'select name,price,taxratio from ms_product_variant where id=$vid'
a = {'vid': vid}
r = query(q, a)
variant = rget(r, 'name')
variant = mlget(variant)
price = rget(r, 'price', default=0)
sprice = nf(price)
tax = rget(r, 'taxratio', default=0) * price
stax = nf(tax)
#
pv = '%s - %s' %(product, variant)
#
subt = qty * (price+tax)
ssubt = nf(subt)
#
temp = (vid, csymbol, pv, (qty, sqty), (price, sprice), (tax, stax), (subt, ssubt))
carts.append(temp)
#
total += subt
#
stotal = nf(total)
#
q = 'select id,name from ms_payment_type order by id'
r = query(q)
r_pay = []
for i in r:
if i.id in PAYMENT_TYPE:
r_pay.append(i)
#
if FORCE_SINGLE_CURRENCY:
currency = pget('currency', callback=dcur).csymbol
else:
currency = ''
#
ret = (carts, (total, stotal), r_pay, currency)
#
return ret
def dadmin(section=['user', 'admin']):
global sess
#
ret = []
#
if not sess.u: return ret
#
sorted = []
#
mall = []
for i in section:
stemp = m.t(m.MENUADMIN, sess.lang)[i]
if i == 'admin' or i.startswith('admin.'):
if isadmin():
for i in stemp:
mall.append(i)
else:
for i in stemp:
mall.append(i)
#
#
for i in mall:
if i[3]:#check res
if res[i[3]]:
sorted.append(i[2])
else:
sorted.append(i[2])
#
sorted.sort()
#
for i in sorted:
for j in mall:
if j[2] == i:
ret.append( (j[0], ub(j[1]), j[2]) )
#
return ret
def ddata(fields):
global msgs
#
ret = {}
if 'yahoo' in fields:
ret['yahoo'] = dyahoo()
#
if 'link' in fields:
ret['link'] = dlink()
#
if 'extra' in fields:
extra = pget('extra_info')
extra = mlget(extra)
ret['extra'] = extra
#
if 'sticky' in fields:
sticky = pget('sticky_info')
sticky = mlget(sticky)
ret['sticky'] = sticky
#
if 'promo_host' in fields:
if pget('promo_host') == '1' or FORCE_PROMOTE:
promo_host = msgs['promo_host_default']
else:
promo_host = ''
ret['promo_host'] = promo_host
#
if 'news' in fields:
ret['news'] = dnews()
#
if 'product' in fields:
ret['product'] = dpro()
#
if 'cart' in fields:
ret['cart'] = dcart()
#
return ret
def ucget(all=False):
ret = []
#
if all:
q = 'select id,page,priority from ms_user_content where active=1 order by priority asc'
else:
q = 'select id,page,priority from ms_user_content where active=1 and show_in_menu=1 order by priority asc'
r = query(q)
for i in r:
i.page = mlget(i.page)
url = '/go/%s' %(i.id)
temp = (i.page, ub(url), i.priority, url)
ret.append(temp)
#
return ret
def menugen(hidden_user_content=False):
global menu
global sess
#
all = []
all2 = []
sorted = []
all3 = []
#
for i in menu['default']:
all.append(i)
if pget('use_cart') == '1' and res['cart']:
for i in menu['cart']:
all.append(i)
if dfaq():
for i in menu['faq']:
all.append(i)
if dnews(read_session=False):
for i in menu['news']:
all.append(i)
if sess.u:
for i in menu['auth']:
all.append(i)
else:
for i in menu['noauth']:
all.append(i)
#
for i in all:
all2.append( ( i[0], ub(i[1]), i[2], i[1] ) )
sorted.append(i[2])
#
if res['user_content']:
if hidden_user_content:
uc = ucget(True)
else:
uc = ucget()
for i in uc:
all2.append(i)
sorted.append(i[2])
#
sorted.sort()
for i in sorted:
for j in all2:
if j[2] == i:
all3.append(j)
#
return all3
def invoicegen(cartdata, cart_id, date_purchase, payment, cust_name, cust_email, ship_addr, note):
global msgs
#
carts = cartdata[0]
#
cw_pro = CWIDTH['product']
cw_qty = CWIDTH['qty']
cw_price = CWIDTH['price']
cw_vat = CWIDTH['vat']
cw_subt = CWIDTH['subtotal']
#
maxlen = 0
for i in CWIDTH.keys(): maxlen += CWIDTH[i]
line = '-' * maxlen
endl = '\r\n'
invtext = ''
#info
invtext += DOMAIN + endl
site_desc = pget('site_description')
site_desc = mlget(site_desc)
if site_desc:
invtext += site_desc + endl
inv_extra = pget('invoice_extra_info')
inv_extra = mlget(inv_extra)
if inv_extra:
invtext += inv_extra + endl
#to
inv_to = '%s <%s>' %(cust_name, cust_email)
#payment
q = 'select name from ms_payment_type where id=$payment'
a = {'payment': payment}
r_pay_type = query(q, a)
payment_type = rget(r_pay_type, 'name')
#header
invtext += endl + msgs['header_cart_invoice_date'].capitalize() + ': ' + date_purchase
invtext += endl + msgs['header_cart_invoice'].capitalize() + ': ' + cart_id
invtext += endl + msgs['header_cart_invoice_to'].capitalize() + ': ' + inv_to
invtext += endl + msgs['header_cart_invoice_addr'].capitalize() + ': ' + ship_addr
invtext += endl + msgs['header_cart_invoice_payment'].capitalize() + ': ' + payment_type
invtext += endl + msgs['header_cart_invoice_note'].capitalize() + ': ' + note
#
invtext += endl + endl + line + endl
invtext += msgs['header_cart_product'].capitalize().center(cw_pro)
invtext += msgs['header_cart_qty'].capitalize().center(cw_qty)
invtext += msgs['header_cart_price'].capitalize().center(cw_price)
invtext += msgs['header_cart_vat'].center(cw_vat)
invtext += msgs['header_cart_subtotal'].capitalize().center(cw_subt)
invtext += endl + line + endl
#detail
for i in carts:
if FORCE_SINGLE_CURRENCY:
csym = cartdata[3][:3].ljust(3)
else:
csym = '' #fixme when multi currency is supported
lcsym = len(csym)
lcs = len(CSPACE)
#
c_pro = cw_pro - lcs
c_qty = cw_qty - lcs
c_price = cw_price - lcs - lcsym
c_vat = cw_vat - lcs - lcsym
c_subt = cw_subt - lcs - lcsym
c_totstr = c_pro + lcs + c_qty + lcs + lcsym + c_price + lcs + lcsym + c_vat
#
i_pro = i[2][:c_pro].ljust(c_pro)
i_qty = i[3][1][:c_qty].rjust(c_qty)
i_price = i[4][1][:c_price].rjust(c_price)
i_vat = i[5][1][:c_vat].rjust(c_vat)
i_subt = i[6][1][:c_subt].rjust(c_subt)
#
invtext += i_pro + CSPACE + i_qty + CSPACE + csym + i_price + CSPACE + csym + i_vat + CSPACE + csym + i_subt + endl
#
invtext += line + endl
invtext += msgs['header_cart_total'].capitalize().rjust(c_totstr) + CSPACE + csym + cartdata[1][1][:c_subt].rjust(c_subt) + endl
invtext += line + endl
#bank info
show_bank = pget('invoice_show_bank')
if show_bank == '1':
q = 'select * from ms_bank where active=1 order by id'
r_bank = query(q)
if r_bank:
invtext += msgs['header_cart_bank_account'].capitalize() + ':' + endl
for i in r_bank:
invtext += i.name + ' ' + i.branch + ' ' + '(' + i.holder + '/' + i.account+ ')' + endl
invtext += endl
#paypal info
show_pp = pget('invoice_show_paypal')
if show_pp == '1':
q = 'select * from ms_paypal where active=1 order by id'
r_pp = query(q)
if r_pp:
invtext += msgs['header_cart_paypal_account'].capitalize() + ':' + endl
for i in r_pp:
invtext += i.account + endl
invtext += endl
#
return invtext
def invoicesave(payment, cust_name, cust_email, ship_addr, note, clear_cart=True, mail=True):
global db
global sess
global msgs
#
ret = 0
#
cartdata = dcart()
carts = cartdata[0]
#
curid = None
if FORCE_SINGLE_CURRENCY:
curid = pget('currency', default=0)
#
cart_id = cidget()
date_purchase = now()
invtext = invoicegen(cartdata, cart_id, date_purchase, payment, cust_name, cust_email, ship_addr, note)
#
t = db.transaction()
try:
insert_id = db.insert('tr_invoice_header', cart_id=cart_id,
log_id=sess.log, total=cartdata[1][0], date_purchase=date_purchase,
payment_type=payment, used_currency=curid,
cust_name=cust_name, cust_email=cust_email,
ship_addr=ship_addr, note=note, invoice_text=invtext,
invoice_lang=sess.lang
)
#
for i in carts:
db.insert('tr_invoice_detail', header_id=insert_id,
product_variant=i[0], saved_price=i[4][0], saved_tax=i[5][0],
amount=i[3][0], log_id=sess.log)
#
except:
t.rollback()
else:
t.commit()
#
ret = insert_id
#
sess.co['invoice_text'] = invtext
#
if clear_cart:
sess.c = {}
#
if mail:
subj = msgs['header_cart_invoice'].capitalize() + ': ' + cart_id
subj_copy = msgs['msg_copy_of'].capitalize() + ' ' + msgs['header_cart_invoice'].capitalize() + ': ' + cart_id
sendmail(cust_email, subj, invtext)
sendmail(MAIL_DEFAULT, subj_copy, invtext)
sess.co['mail_sent'] = msgs['header_cart_invoice_mail_sent']
else:
sess.co['mail_sent'] = ''
#
return ret
def dfs(id=0, content=False, name_add=True, format=True, format_div=1, filter=None):
if not id:
if content:
q = 'select id, name, name_add, size, type, type_options, disposition, disposition_options, date_file, headers, content from ms_file order by id desc'
else:
q = 'select id, name, name_add, size, type, type_options, disposition, disposition_options, date_file, headers from ms_file order by id desc'
a = {}
else:
if content:
q = 'select id, name, name_add, size, type, type_options, disposition, disposition_options, date_file, headers, content from ms_file where id=$id order by id desc'
else:
q = 'select id, name, name_add, size, type, type_options, disposition, disposition_options, date_file, headers from ms_file where id=$id order by id desc'
a = {'id': id}
r = query(q, a)
#
for i in r:
if format:
if not i.size: i.size = 0
i.size = nf(float(i.size)/format_div)
if name_add:
if i.name_add:
i.name = '%s (%s)' %(i.name, i.name_add)
i.type_options = yaml.load(i.type_options)
i.disposition_options = yaml.load(i.disposition_options)
i.headers = yaml.load(i.headers)
#
if filter:
ret = []
for i in r:
for j in filter:
if i.type.find(j) > -1:
ret.append(i)
break
else:
ret = r
return ret
def smget(next=[]):
ret = sess.msg or []
sess.msg = next
return ret
def mlset(input, field_prefix, separator='.', strip=True, strip_br=True, check_empty_html=True):
data = {}
ccode = [m.COUNTRY[x][0] for x in m.COUNTRY.keys()]
for i in input.keys():
try:
s = i.split(separator)
except:
s = []
if len(s) == 2 and s[0] == field_prefix and s[1] in ccode:
inputi = input[i]
if strip and hasattr(inputi, 'strip'):
inputi = inputi.strip()
#
if strip_br:
inputi = stripsall(inputi, '<br>')
#
data[s[1]] = inputi
#
if check_empty_html:
empcheck = striphtml(inputi).strip()
if empcheck:
data[s[1]] = inputi
else:
data[s[1]] = ''
#
ret = yaml.dump(data)
#
return ret
def isadmin():
if not sess.u: return False
#
q = "select id from ms_user where id=$uid and group_id in (select id from ms_group where name='ADMIN')";
a = {'uid': sess.u}
r = query(q, a)
if r:
return True
#
return False
def ypget(option, default={}, lang=True):
o = pget(option, default=default)
if not o: o = '{}'
o2 = yaml.load(o)
#
if not lang:
return o2
#
ccode = [m.COUNTRY[x][0] for x in m.COUNTRY.keys()]
#
ret = {}
#
for i in ccode:
if not o2.has_key(i):
ret[i] = ''
else:
ret[i] = o2[i]
#
return ret
def dpro_category(id=0, field='*'):
if id:
q = 'select $field from ms_category where active=1 and id=$id order by priority desc'
a = {'id': id, 'field': web.SQLLiteral(field)}
else:
q = 'select $field from ms_category where active=1 order by priority desc'
a = {'field': web.SQLLiteral(field)}
#
ret = query(q, a)
for i in ret:
if i.has_key('priority'):
if not i.priority: i.priority = 0
if i.has_key('name'):
i.name = (mlget(i.name, all=True), mlget(i.name) )
return ret
def disblank(data, is_yaml, strip=True):
if is_yaml:
try:
ydata = yaml.load(data)
except:
return True
else:
ydata = data
#
ret = True
for k in ydata.keys():
test = ydata[k]
if strip and hasattr(test, 'strip'):
test = test.strip()
if test:
ret = False
break
#
return ret
def dpro_group(id=0, field='*'):
cat = [int(x.id) for x in dpro_category(field='id')]
ret = []
if id:
q = 'select $field,category_id from ms_product where active=1 and id=$id order by priority desc'
a = {'id': id, 'field': web.SQLLiteral(field), 'cid': id}
test = query(q, a)
if test[0].category_id in cat:
ret = test
else:
for i in cat:
q = 'select $field,category_id from ms_product where active=1 and category_id=$cid order by priority desc'
a = {'field': web.SQLLiteral(field), 'cid': i}
ret += query(q, a)
#
for i in ret:
if i.has_key('category_id'):
cat = dpro_category(id=i.category_id, field='name')
i.category_id = (i.category_id, cat[0].name[1])
if i.has_key('name'):
i.name = (mlget(i.name, all=True), mlget(i.name) )
if i.has_key('description'):
i.description = (mlget(i.description, all=True), mlget(i.description) )
if i.has_key('full_info'):
i.full_info = (mlget(i.full_info, all=True), mlget(i.full_info) )
return ret
def dpro_item(id=0, field='*'):
group = [int(x.id) for x in dpro_group(field='id')]
ret = []
if id:
q = 'select $field,product_id,currency_id from ms_product_variant where active=1 and id=$id order by id'
a = {'id': id, 'field': web.SQLLiteral(field)}
test = query(q, a)
if test and test[0].product_id in group:
ret = test
else:
for i in group:
q = 'select $field,product_id,currency_id from ms_product_variant where active=1 and product_id=$pid order by id'
a = {'field': web.SQLLiteral(field), 'pid': i}
ret += query(q, a)
#
for i in ret:
group = dpro_group(id=i.product_id, field='name')
i.product_id = (i.product_id, group[0].name[1])
if i.has_key('name'):
i.name = (mlget(i.name, all=True), mlget(i.name), '%s - %s' %(group[0].name[1], mlget(i.name)) )
if i.has_key('currency_id'):
if FORCE_SINGLE_CURRENCY:
curoptions = ()
mcur = pget('currency', callback=dcur).csymbol
else:
curoptions = ()
mcur = ''
#
r_csymbol = query('select csymbol from ms_currency where id=$curid', {'curid': i.currency_id})
csymbol = rget(r_csymbol, 'csymbol', default='')
i.currency_id = (i.currency_id, csymbol, curoptions, mcur)
if i.has_key('taxratio'):
if not i.taxratio:
i.taxratio = 0
else:
i.taxratio = nrfloat(i.taxratio)
if i.has_key('price'):
i.price = nrfloat(i.price)
return ret
def siget(input, field_prefix, field_suffix, separator='.', strip=True, strip_br=True, check_empty_html=True):
ret = ''
for i in input.keys():
try:
s = i.split(separator)
except:
s = []
if len(s) == 2 and s[0] == field_prefix and s[1] == field_suffix:
ret = input[i]
#
if strip and hasattr(ret, 'strip'):
ret = ret.strip()
#
if strip_br:
ret = stripsall(ret, '<br>')
#
if check_empty_html:
empcheck = striphtml(ret).strip()
if not empcheck:
ret = ''
#
return ret
def nlimit(number, min, max):
ret = number
if number < min:
ret = min
elif number > max:
ret = max
#
return ret
def nrfloat(snumber, precision=PRECISION, round=decimal.ROUND_UP):
le = '0' * precision
if not le:
dec = '1'
else:
dec = '.' + le
#
num = str(snumber)
try:
d = decimal.Decimal(num)
ret = d.quantize(decimal.Decimal(dec), rounding=round)
except:
ret = None
#
return ret
def rt(precision=4, show_second=True):
if not pget('expose_time') == '1': return ''
#
x = rendertime[1] - rendertime[0]
if x <= 0:
x = 0
#
if x:
ret = nrfloat(x, precision, decimal.ROUND_DOWN)
if show_second:
ret = '%s %s' %(ret, msgs['header_second'])
else:
ret = 0
if show_second:
ret = ''
#
return ret
def dcur(id=0, field='*', pget_helper=''):
if id:
q = 'select $field from ms_currency where id=$id order by id'
a = {'id': id, 'field': web.SQLLiteral(field)}
else:
q = 'select $field from ms_currency order by id'
a = {'field': web.SQLLiteral(field)}
r = query(q, a)
#
if pget_helper:
if not id or id == 0 or not r:
no = {'csymbol': '', 'id': '', 'name': ''}
no = web.utils.storify(no)
return no
else:
return r[0]
#
return r
def atpl():
ret = []
tdir = TEMPLATE_DIR
files = os.listdir(tdir)
for i in files:
info = tinfo(i)
try:
owner = info['general']['owner']
except:
owner = ''
if owner and DOMAIN.lower() not in owner:
pass
else:
ret.append(i)
return ret
def dbank(id=0, field='*', complete_name=False):
if complete_name:
field = '*'
if id:
q = 'select $field from ms_bank where id=$id and active=1 order by id'
a = {'id': id, 'field': web.SQLLiteral(field)}
else:
q = 'select $field from ms_bank where active=1 order by id'
a = {'field': web.SQLLiteral(field)}
r = query(q, a)
#
if complete_name:
for i in r:
cur = dcur(id=i.currency_id)[0].csymbol
if i.branch:
branch = '(%s)' %(i.branch)
else:
branch = ''
i.name = '%s %s - %s - %s - %s' %(i.name, branch, i.holder, i.account, cur)
#
return r
def captcha_gen_word(allowed='aAbBdDeEGhHnNQrRtT23456789', min=4, max=7):
ret = ''
length_choice = range(min, max+1)
length = random.choice(length_choice)
for i in range(length):
r = random.choice(allowed)
ret += r
return ret
def captcha_gen_image(word, font_file='', font_size=20, format='JPEG', fg=-1, bg=-1):
if fg == -1:
fg = random.randint(0, 0xffffff)
if bg == -1:
bg = fg ^ 0xffffff
#
font_dir = pget('font_dir')
if not font_file:
try:
font_file = random.choice(os.listdir(font_dir))
font_file = font_dir + PS + font_file
except:
font_file = ''
else:
font_file = font_dir + PS + font_file
#
if os.path.exists(font_file):
font = ImageFont.truetype(font_file, font_size)
else:
font = ImageFont.load_default()
#
size = font.getsize(word)
#
img = Image.new('RGB', (size[0]+random.randint(0,10),
size[1]+random.randint(0,10)), bg)
draw = ImageDraw.Draw(img)
#
for i in range(5):
lines = []
for i in range(random.randint(3,5)):
lines.append((random.randint(0, 100), random.randint(0, 100)))
draw.polygon(lines, outline=fg)
#
width = size[0]/len(word)
i = 0
for w in word:
x = (i * width) + random.randint(-1, 2)
y = random.randint(-1, 4)
draw.text((x, y), w, font=font, fill=fg)
i += 1
#
img = img.filter(ImageFilter.EDGE_ENHANCE_MORE)
return img
def ulangd(x):
return x or msgs['header_no_translation'].capitalize()
def ucart():
return str( int(bool(pget('use_cart', '')) and res['cart']) )
def uid():
return sess.u
def title(ttl):
host = web.ctx.env.get('HTTP_HOST', msgs['host_default'])
desc = pget('site_description', default='')
desc = mlget(desc)
if desc:
ret = '[%s] %s - %s' %(host, desc, ttl)
else:
ret = '[%s] - %s' %(host, ttl)
return ret
def ddatab():
#return ddata(fields=['extra','promo_host','news','sticky'])
return ddata(fields=['yahoo','extra','promo_host','news','sticky','link'])
def ddatac(p):
return ddata(fields=[p])
def tpl(page):
return tget(page,
globals={
'ub': ub,
'ucart': ucart,
'uid': uid,
'msg': msgs,
'ulang': m.COUNTRY,
'ulangd': ulangd,
'rt': rt,
'now': now,
'iflash': iflash,
'logo': logo,
'meta': meta,
'dkget': dkget,
'favicon': favicon,
'adminurlgroup': ADMIN_URL_GROUP,
'mobile': mobile,
})
def tplb(o):
return tpl('base').base(menugen(), ddatab(), o, web.ctx.path)
def logo():
id = int(pget('logo_file', default=-1))
return dfs(id=id)
def meta():
ret = [
{'name': 'keywords', 'content': pget('site_keywords')},
{'name': 'description', 'content': mlget(pget('site_description'))},
]
return ret
def lastlog():
return None
def lastupdate():
return None
def iflash(name, quality='high', wmode='transparent', scale='exactfit', width='100%', height='100%'):
ret = '''
<object classid='clsid:D27CDB6E-AE6D-11cf-96B8-444553540000' codebase='http://download.macromedia.com/pub/shockwave/cabs/flash/swflash.cab#version=6,0,40,0' width='%s' height='%s'>
<param name='movie' value='%s'>
<param name='quality' value='%s'>
<param name='wmode' value='%s'>
<param name='scale' value='%s'>
<embed src='%s' width='%s' height='%s' scale='%s' quality='%s' pluginspage='http://www.macromedia.com/go/getflashplayer' type='application/x-shockwave-flash' wmode='%s'>
</embed>
</object>
''' %(
width, height,
name,
quality,
wmode,
scale,
name, width, height, scale, quality, wmode
)
return ret
def dkget(dictionary, key, prefix='', suffix='', default='', strip=True):
ret = default
if dictionary.has_key(key):
if dictionary[key]:
ret = dictionary[key]
ret = '%s%s%s' %(prefix, ret, suffix)
#
if ret and hasattr(ret, 'strip'):
ret = ret.strip()
#
return ret
def dprofile(id, field='*'):
ret = {}
#
q = 'select $field from ms_user where active=1 and id=$id'
a = {'field': web.SQLLiteral(field), 'id': id}
ret = query(q, a)
#
if ret:
ret = ret[0]
if ret.has_key('password'): ret['password'] = ''
for i in ['email', 'phone', 'fax', 'web','icontact','acontact','govid']:
if ret.has_key(i):
if ret[i]:
ret[i] = ','.join(yaml.load(ret[i]))
#
return ret
def sepnorm(field, separator=',', remove_space=True, unique=True, replace_underscore_with_space=True, as_string=True):
strs = field.strip()
splitted = strs.split(separator)
splitted2 = [x.strip() for x in splitted]
if remove_space:
splitted3 = [x.replace(' ', '') for x in splitted2]
else:
splitted3 = splitted2
if unique:
splitted4 = []
for i in splitted3:
if i not in splitted4:
splitted4.append(i)
else:
splitte4 = splitted3
if replace_underscore_with_space:
splitted5 = [x.replace('_', ' ') for x in splitted4]
else:
splitted5 = splitted4
newlist = []
for part in splitted5:
if part:
newlist.append(part)
#
if not as_string:
ret = newlist
else:
ret = separator.join(newlist)
#
return ret
def favicon():
rel = ['SHORTCUT ICON', 'icon']
f = ub('/static/favicon.ico')
ret = ''
for i in rel:
ret += "<link rel='%s' href='%s'>" %(i, f)
#
return ret
def dpaypal(id=0, field='*'):
if id:
q = 'select $field from ms_paypal where id=$id and active=1 order by id'
a = {'id': id, 'field': web.SQLLiteral(field)}
else:
q = 'select $field from ms_paypal where active=1 order by id'
a = {'field': web.SQLLiteral(field)}
r = query(q, a)
#
return r
def dlink(id=0, field='*'):
if id:
q = 'select $field from ms_link where id=$id order by id'
a = {'id': id, 'field': web.SQLLiteral(field)}
else:
q = 'select $field from ms_link order by id'
a = {'field': web.SQLLiteral(field)}
r = query(q, a)
#
return r
def dpromote():
ret = []
url = "http://%s%s" %(DOMAIN, ub('/'))
#
bbcode = {
'style' : 'BBCode',
'code' : "[url=%s]%s[/url]" %(url, DOMAIN),
}
#
a = {
'style' : 'HTML Link',
'code' : "<a href='%s'>%s</a>" %(url, DOMAIN),
}
#
aimg = {
'style' : 'HTML Link (image)',
'code' : "<a href='%s'><img src='%s/fs/%s'></a>" %(url, url, pget('logo_file')),
}
#
markdown = {
'style' : 'Markdown',
'code' : "[%s](%s)" %(DOMAIN, url),
}
#
mediawiki = {
'style' : 'MediaWiki',
'code' : "[%s %s]" %(url, DOMAIN),
}
#
textile = {
'style' : 'Textile',
'code' : '"%s":%s' %(DOMAIN, url),
}
#
ret.append(a)
if pget('logo_file'): ret.append(aimg)
ret.append(bbcode)
ret.append(markdown)
ret.append(mediawiki)
ret.append(textile)
#
return ret
def dfaq(id=0, field='*', group=False):
if id:
q = 'select $field from tr_faq where id=$id order by id'
a = {'id': id, 'field': web.SQLLiteral(field)}
else:
q = 'select $field from tr_faq order by id'
a = {'field': web.SQLLiteral(field)}
r = query(q, a)
#
for i in r:
if i.has_key('category'):
i.category = (mlget(i.category, all=True), mlget(i.category))
if i.has_key('question'):
i.question = (mlget(i.question, all=True), mlget(i.question))
if i.has_key('answer'):
i.answer = (mlget(i.answer, all=True), mlget(i.answer))
#
if group:
q_cat = 'select distinct category from tr_faq order by id'
a_cat = {}
r_cat = query(q_cat, a_cat)
ret = {}
for i in r_cat:
cat = mlget(i.category)
ret[cat] = []
#
for j in r:
if j.category[1] == cat:
ret[cat].append( j )
else:
ret = r
#
return ret
def dinvoice(id=0, field='*', date_from='', date_to='', closed=False, all_confirm=False):
if id:
q = 'select $field from tr_invoice_header where id=$id'
a = {'id': id, 'field': web.SQLLiteral(field)}
else:
q = 'select $field from tr_invoice_header where date_purchase >= $df and date_purchase <= date($dt, "+1 days") and done=$closed order by date_purchase desc'
a = {'field': web.SQLLiteral(field), 'df': date_from, 'dt': date_to, 'closed': closed}
r = query(q, a)
#
for i in r:
if i.has_key('total'):
i.total = (i.total, nf(i.total))
if i.has_key('used_currency'):
if FORCE_SINGLE_CURRENCY:
i.used_currency = pget('currency', callback=dcur).csymbol
else:
q = 'select csymbol from ms_currency where id=$curid'
a = {'curid': i.used_currency}
r_cur = query(q, a)
i.used_currency = rget(r_cur, 'csymbol')
if i.has_key('confirm_info'):
confirm = rget([i], 'confirm_info', default=[], to_yaml=True)
i.confirm_info = confirm
if confirm:
if all_confirm == False:
i.confirm_info = confirm[-1]
#
return r
def dstat(date_from='', date_to=''):
ret = []
#
all = [
[
msgs['header_stat_country'],
'select count(*) as count from tr_log where date_log>=$df and date_log<= date($dt, "+1 days")',
'select distinct country as var,count(country) as val from tr_log where date_log>=$df and date_log <= date($dt, "+1 days") group by var order by val desc',
None,
],
[
msgs['header_stat_top_products'],
'select count(*) as count from tr_invoice_detail where header_id in (select id from tr_invoice_header where date_purchase>=$df and date_purchase<= date($dt, "+1 days"))',
'select distinct product_variant as var,count(product_variant) as val from tr_invoice_detail where header_id in (select id from tr_invoice_header where date_purchase>=$df and date_purchase<= date($dt, "+1 days")) group by var order by val desc',
(dpro_item, 'name', True, 2),
],
]
#
for x in all:
temp = {'category': x[0]}
q = x[1]
a = {'df': date_from, 'dt': date_to}
r_total = query(q, a)
c_total = rget(r_total, 'count', default=1)
q = x[2]
r = query(q, a)
counted = 0
for i in r:
counted += i.val
for i in r:
if not i.var:
i.var = msgs['msg_unknown']
i.val = c_total - counted
percent = nrfloat((float(i.val) * 100)/c_total, round=decimal.ROUND_DOWN)
#
if not x[3]:
i.var = str(i.var)
else:
if x[3][2]: #mlget
try:
#i.var = mlget(x[3][0](i.var)[0].name)
i.var = x[3][0](i.var)[0][ x[3][1] ][ x[3][3] ]
except:
i.var = msgs['msg_unknown']
else:
try:
i.var = x[3][0](i.var)[0].name
except:
i.var = msgs['msg_unknown']
#
i.val = (i.val, '%s%%' %(percent))
temp['stat'] = r
temp['total'] = c_total
if r:
ret.append(temp)
#
return ret
def tinfo(template, separator=','):
ret = {}
#
tdir = TEMPLATE_DIR
info = tdir + PS + template + PS + 'info'
#
t = ConfigParser.ConfigParser()
try:
t.read(info)
except:
return ret
#
sections = t.sections()
for i in sections:
items = t.options(i)
temp = {}
for j in items:
citem = t.get(i, j)
if hasattr(citem, 'strip'):
citem = citem.strip()
if citem and citem[0] == '[' and citem[-1] == ']':
sitem = citem[1:-1].split(separator)
item = [x.strip() for x in sitem]
else:
item = citem.strip()
temp[j] = item
ret[i] = temp
#
return ret
def dredir(id=0, field='*'):
if id:
q = 'select $field from ms_redirect where id=$id'
a = {'id': id, 'field': web.SQLLiteral(field)}
else:
q = 'select $field from ms_redirect order by id'
a = {'field': web.SQLLiteral(field)}
#
ret = query(q, a)
return ret
def dgo(id=0, field='*'):
if id:
q = 'select $field from ms_user_content where id=$id and active=1'
a = {'id': id, 'field': web.SQLLiteral(field)}
else:
q = 'select $field from ms_user_content where active=1 order by priority asc,id desc'
a = {'field': web.SQLLiteral(field)}
#
ret = query(q, a)
for i in ret:
if i.has_key('priority'):
if not i.priority: i.priority = 0
if i.has_key('page'):
i.page = (mlget(i.page, all=True), mlget(i.page) )
if i.has_key('content'):
i.content = (mlget(i.content, all=True), mlget(i.content) )
return ret
############################# CUSTOM ERROR #############################
def notfound():
expose_error = pget('expose_error')
general_error_message = pget('general_error_message')
if expose_error == '1':
t = tpl('error_notfound')
ttl = msgs['msg_error'].capitalize()
o = t.error_notfound(title(ttl), general_error_message)
o = tplb(o)
return web.notfound(o)
#
raise web.seeother('/')
def internalerror():
expose_error = pget('expose_error')
general_error_message = pget('general_error_message')
ttl = msgs['msg_error'].capitalize()
if expose_error == '1':
t = tpl('error_internalerror')
o = t.error_internalerror(title(ttl), general_error_message)
o = tplb(o)
return web.internalerror(o)
#
return web.internalerror(ttl)
class nomethod(web.webapi.HTTPError):
def __init__(self, cls=None):
status = '405 Method Not Allowed'
headers = {}
headers['Content-Type'] = 'text/html'
methods = ['GET', 'HEAD', 'POST', 'PUT', 'DELETE']
if cls:
methods = [method for method in methods if hasattr(cls, method)]
headers['Allow'] = ', '.join(methods)
data = 'REQUEST METHOD ERROR'
web.webapi.HTTPError.__init__(self, status, headers, data)
wapp.notfound = notfound
wapp.internalerror = internalerror
web.webapi.nomethod = nomethod
########################### WSGI + PROCESSOR ###########################
def proc_db_check(handle):
if db_error or not os.path.exists(DATA_FILE) or not os.access(DATA_FILE, os.W_OK):
msgs = m.t(m.MSG, LANG_DEFAULT)
return msgs['msg_error_db_connect']
return handle()
def proc_set_log(handle):
if sess.log:
q = 'update tr_log set date_log_last=$log_last,activity=activity+1,user_id_last=$user_id_last where id=$log_id'
a = {'log_last': now(), 'log_id': sess.log, 'user_id_last': sess.u}
r = query(q, a)
return handle()
#
ip = web.ctx.ip
ref = web.ctx.env.get('HTTP_REFERER', '')
ua = web.ctx.env.get('HTTP_USER_AGENT', '')
url = web.ctx.fullpath
met = web.ctx.method
dt = now()
#
country = None
if callable(fgeo):
country = fgeo(ip)
if not country: country = None
#
insert_id = db.insert('tr_log',
date_log=dt, date_log_last=dt, activity=1, ip=ip,
country=country, referer=ref, url=url,
user_agent=ua, method=met, user_id=sess.u
)
if insert_id:
sess.log = insert_id
return handle()
def proc_set_lang(handle):
global msgs
global menu
#
lang = LANG_DEFAULT
#
if sess.lang: #already set
lang = sess.lang
else: #not set
plang = pget('lang') #read db
if plang:
lang = plang
else:
#auto
ip = web.ctx.ip
#
country = None
if callable(fgeo):
country = fgeo(ip)
if not country: country = None
#
#
if m.COUNTRY.has_key(country):
lang = m.COUNTRY[country][0]
#set
sess.lang = lang
#
msgs = m.t(m.MSG, lang)
menu = m.t(m.MENU, lang)
#
return handle()
def proc_limit_cart(handle):
if ucart() != '1' or not res['cart']:
path = web.ctx.fullpath.lower()
if path.startswith('/cart') or path.startswith('/payment') or path.startswith('/admin/invoice'):
raise web.notfound()
#
return handle()
def proc_limit_user_content(handle):
if not res['user_content']:
path = web.ctx.fullpath.lower()
if path.startswith('/go') or path.startswith('/admin/go') or path.startswith('/admin/redir'):
raise web.notfound()
#
return handle()
def proc_limit_blog(handle):
if not res['blog']:
path = web.ctx.fullpath.lower()
if path.startswith('/blog') or path.startswith('/admin/blog') or path.startswith('/admin/comment'):
raise web.notfound()
#
return handle()
def proc_check_offline(handle):
path = web.ctx.fullpath.lower()
off = pget('offline')
if off and path != '/login' \
and path != '/logout' \
and path != '/contact' \
and not path.startswith('/lang/set/') \
and not path.startswith('/admin') \
and not path.startswith('/captcha') \
and not path.startswith('/fs/') \
and not path.startswith('/passwd') \
and not path.startswith('/profile') \
and not path.startswith('/promote') \
:
t = tpl('offline')
ttl = msgs['title_offline'].capitalize()
o = t.offline(title(ttl), '')
o = tplb(o)
return o
#
return handle()
def proc_check_auth(handle):
path = web.ctx.fullpath.lower()
if path.startswith('/admin/'):
if not sess.u:
raise web.seeother('/login')
#
if not isadmin():
raise web.internalerror()
#
if path == '/passwd' or path == '/admin' or path == '/profile' or path == '/promote':
if not sess.u:
raise web.seeother('/login')
#
return handle()
def proc_check_already_auth(handle):
path = web.ctx.fullpath.lower()
met = web.ctx.method
if sess.u and path == '/login':
if met.lower() == 'get':
raise web.seeother('/admin')
else:
return 'error'
#
return handle()
def proc_detect_ua(handle):
global mobile
#
if sess.browserclass == 'mobile':
mobile = 'xhtmlmp'
elif sess.browserclass == 'wap':
mobile = 'wml'
elif sess.browserclass == 'desktop':
mobile = ''
else:
#detect
ua = web.ctx.env.get('HTTP_USER_AGENT', '')
mobile = detect_ua(ua)['mobile_document']
#
return handle()
def proc_calc_render_start(handle):
rendertime[0] = time.time()
return handle()
def proc_calc_render_finish(handle):
rendertime[1] = time.time()
return handle()
def proc_check_http_env(handle):
ret = False
#
host = web.ctx.host.lower()
method = web.ctx.method.lower()
domain = DOMAIN.lower()
ref = web.ctx.env.get('HTTP_REFERER', '')
#get checked host
if method == 'post' and ref:
p = urlparse.urlparse(ref)
phost = p[1].split(':')
chost = phost[0]
else:
chost = host
#from allowed domain
if chost == domain:
ret = True
else:
s = chost.split('.')
ret = True #quick hack as of 13-October-2012
#if len(s) == 3 and '.'.join(s[1:]) == domain and s[0] in ALLOWED_SUBDOMAIN:
# ret = True
#
if ret:
return handle()
#
raise web.forbidden('access forbidden')
def proc_audit_post(handle):
path = web.ctx.fullpath
met = web.ctx.method.lower()
if sess.log and met == 'post':
q = 'select audit from tr_log where id=$logid'
a = {'logid': sess.log}
r = query(q, a)
if r:
audit = rget(r, 'audit', default=[], to_yaml=True)
audit.append(
(now(), path)
)
r2 = db.update('tr_log', where='id=$logid', audit=yaml.dump(audit), vars={'logid': sess.log})
return handle()
def proc_set_fullpath(handle):
path0 = web.ctx.path
path = web.ctx.fullpath
menu = [ x[3] for x in menugen(True) ]
if sess.has_key('fullpath') and path0 in menu:
sess['fullpath'] = path
#
return handle()
def proc_set_res(handle):
global res
global FORCE_PROMOTE
global PAYMENT_TYPE
global DOMAIN
global MAIL_DEFAULT
#
for _rk in res.keys():
if _rk in res_fix: continue
_rt = pget(_rk).lower()
_rtv = ''
if _rk == 'promote':
_rtv = False
if _rt == '1':
_rtv = True
elif _rk == 'payments':
if _rt.find(',') > 0:
try:
_rtv = [x for x in _rt.split(',')]
_rtv.remove('')
_rtv = [int(x) for x in _rtv]
except:
_rtv = res['payments']
else:
try:
_rtv = int(_rt)
except:
pass
#
if type(_rtv) in [type(True), type(0), type([])]: res[_rk] = _rtv
#
FORCE_PROMOTE = res['promote']
PAYMENT_TYPE = res['payments']
DOMAIN = web.ctx.env.get('HTTP_HOST', '')
MAIL_DEFAULT = '%s <%s>' %(DOMAIN, pget('mail_default'))
#
return handle()
wapp.add_processor(proc_db_check)
wapp.add_processor(proc_calc_render_start)
wapp.add_processor(proc_set_res)
wapp.add_processor(proc_detect_ua)
wapp.add_processor(proc_set_lang)
wapp.add_processor(proc_set_log)
wapp.add_processor(proc_check_http_env)
wapp.add_processor(proc_check_offline)
wapp.add_processor(proc_check_auth)
wapp.add_processor(proc_check_already_auth)
wapp.add_processor(proc_limit_cart)
wapp.add_processor(proc_limit_user_content)
wapp.add_processor(proc_limit_blog)
wapp.add_processor(proc_audit_post)
wapp.add_processor(proc_set_fullpath)
wapp.add_processor(proc_calc_render_finish)
application = wapp.wsgifunc()
################################# CLASS ################################
class index:
def GET(self):
target = HOME_DEFAULT
menu = [ x[1] for x in menugen() ]
#
home = pget('homepage')
if home:
if ub(home) in menu:
target = home
#
raise web.seeother(target)
def POST(self):
raise web.seeother('/')
class captcha:
def GET(self):
word = captcha_gen_word()
#
img = captcha_gen_image(word, bg=0xcccccc, fg=0x000000)
f = cStringIO.StringIO()
img.save(f, 'GIF')
del img
#
f.seek(0)
content = f.read()
f.close()
del f
#
sess.captcha = word
#
web.header('Content-type', 'image/gif')
return content
def POST(self):
raise web.seeother('/')
class redir:
def GET(self, url):
q = 'select type, target from ms_redirect where url=$url order by id limit 1'
a = {'url': url}
r = query(q, a)
if len(r) == 1 and r[0].target:
if r[0].type == 'redirect':
raise web.redirect(r[0].target)
else:
raise web.seeother(r[0].target)
#
raise web.notfound()
def POST(self):
raise web.seeother('/')
class product:
def GET(self):
t = tpl('product')
ttl = msgs['title_product'].capitalize()
o = t.product(title(ttl), ddata(['product', 'news']))
o = tplb(o)
return o
def POST(self):
raise web.seeother('/')
class promote:
def GET(self):
t = tpl('promote')
ttl = msgs['title_promote'].capitalize()
data = {'promote': dpromote()}
o = t.promote(title(ttl), data)
o = tplb(o)
return o
def POST(self):
raise web.seeother('/')
class login:
def GET(self):
t = tpl('login')
ttl = msgs['title_login'].capitalize()
o = t.login(title(ttl), None)
o = tplb(o)
return o
def POST(self):
i = web.input(username='', password='', api=0)
iusername = i.username.strip()
ipassword = i.password.strip()
if not iusername or not ipassword:
return 'value'
#
ipassword_md5 = md5(ipassword).hexdigest()
q = 'select id from ms_user where name=$username and password=$password and active=1'
a = {'username': iusername, 'password': ipassword_md5}
r = query(q, a)
if len(r) == 1:
try:
sess.u = r[0].id
return 'ok'
except:
return 'error'
else:
return 'failed'
#
return 'error'
class logout:
def GET(self):
sess.u = None
raise web.seeother('/')
def POST(self):
raise web.seeother('/')
class contact:
def GET(self):
t = tpl('contact')
ttl = msgs['title_contact'].capitalize()
data = dprofile(1)
o = t.contact(title(ttl), data)
o = tplb(o)
return o
def POST(self):
i = web.input(name='', email='', msg='', captcha='', api=0)
iname = i.name.strip()
iemail = i.email.strip()
imsg = i.msg.strip()
#
if not i.captcha == sess.captcha:
return 'captcha'
#
if not iname or not iemail or not imsg:
return 'value'
#
if not is_valid_email(iemail):
return 'invalid_email'
#
content = ' %s: %s \r\n %s: %s \r\n %s: %s \r\n %s: \r\n %s' %(
msgs['header_contact_date'].capitalize(), now(),
msgs['header_contact_from'].capitalize(), iname,
msgs['header_contact_email'].capitalize(), iemail,
msgs['header_contact_message'].capitalize(), imsg
)
sendmail(MAIL_DEFAULT, msgs['header_contact_email_subject'].capitalize(), content, iemail)
return 'ok'
class cart:
def GET(self):
t = tpl('cart')
ttl = msgs['title_cart'].capitalize()
o = t.cart(title(ttl), ddatac('cart'))
o = tplb(o)
return o
def POST(self):
raise web.seeother('/')
class cart_add:
def GET(self):
raise web.seeother('/')
def POST(self):
i = web.input(vid='', amt='', api=0)
try:
ivid = int(i.vid)
iamt = int(i.amt)
except ValueError:
return 'value,-1'
except:
return 'error,-1'
#
p = dpro_item(id=ivid)
if not p:
return 'value,-1'
#
if pget('cart_check_stock') == '1':
stock = p[0].stock
amt = 0
#
if sess.c.has_key(ivid):
in_cart = sess.c[ivid]
else:
in_cart = 0
#
avail = stock - in_cart
left = avail - iamt
#
if left > CART_ADD_MAX:
left = CART_ADD_MAX
#
if avail < 1 or left < 0:
return 'outofstock, -1'
else:
amt = iamt
else:
left = CART_ADD_MAX
amt = iamt
#
if sess.c.has_key(ivid):
sess.c[ivid] += amt
else:
sess.c[ivid] = amt
#
return 'ok,%s' %(left)
class cart_del:
def GET(self):
raise web.seeother('/')
def POST(self):
i = web.input(vid='', api=0)
try:
ivid = int(i.vid)
except ValueError:
return 'value'
except:
return 'error'
#
if not sess.c.has_key(ivid):
return 'value'
#
sess.c.pop(ivid)
return 'ok'
class cart_empty:
def GET(self):
raise web.seeother('/')
def POST(self):
i = web.input(api=0)
try:
sess.c = {}
except:
return 'error'
#
return 'ok'
class cart_checkout:
def GET(self):
if not sess.c:
raise web.seeother('/cart')
else:
t = tpl('checkout')
ttl = msgs['title_checkout'].capitalize()
o = t.checkout(title(ttl), ddatac('cart'))
o = tplb(o)
return o
def POST(self):
raise web.seeother('/')
class cart_checkout_done:
def GET(self):
if not sess.co:
raise web.seeother('/')
else:
codata = sess.co
sess.co = {}
t = tpl('checkout_done')
ttl = msgs['title_checkout'].capitalize()
o = t.checkout_done(title(ttl), codata)
o = tplb(o)
return o
def POST(self):
i = web.input(payment=0, cust_name='', cust_email='', ship_addr='', note='', captcha='', api=0)
if not sess.c:
return 'error'
#
try:
payment = int(i.payment)
except:
return 'error'
#
if i.captcha != sess.captcha:
return 'captcha'
#
cust_name = i.cust_name.strip()
cust_email = i.cust_email.strip()
ship_addr = i.ship_addr.strip()
note = i.note.strip()
if not payment > 0 or not cust_name or not cust_email or not ship_addr:
return 'value'
#
if not is_valid_email(cust_email):
return 'invalid_email'
#
invoice_id = invoicesave(payment, cust_name, cust_email, ship_addr, note)
if invoice_id:
return 'ok'
#
return 'error'
class payment_confirm:
def GET(self):
t = tpl('payment_confirm')
ttl = msgs['title_payment_confirm'].capitalize()
data = {}
yy = int(time.strftime('%Y'))
mm = int(time.strftime('%m'))
dd = int(time.strftime('%d'))
data['date_range'] = ( (range(1,32), dd), (range(1,13), mm), ((yy-1, yy, yy+1), yy))
data['all_bank'] = dbank(complete_name=True)
data['method'] = m.t(m.BANK_PAYMENT_METHOD, sess.lang)['method']
data['message'] = smget()
o = t.payment_confirm(title(ttl), data)
o = tplb(o)
return o
def POST(self):
i = web.input(api=0, invoice='',
date_day=0, date_month=0, date_year=0, name='', account='',
total=0, bank='', method='', note='', captcha='')
try:
iday = int(i.date_day)
imonth = int(i.date_month)
iyear = int(i.date_year)
dt = datetime.date(iyear, imonth, iday)
except:
dt = None
#
invoice = i.invoice.strip()
name = i.name.strip()
account = i.account.strip()
bank = i.bank.strip()
method = i.method.strip()
note = i.note.strip()
try:
total = nrfloat(i.total)
except:
total = 0
#
if not i.captcha == sess.captcha:
sess.msg = ['error', msgs['msg_input_error_captcha']]
raise web.seeother('/payment/confirm')
#
if not invoice or not name or not total or not bank or not method:
sess.msg = ['error', msgs['msg_payment_confirm_error_required']]
raise web.seeother('/payment/confirm')
#
if not dt:
sess.msg = ['error', msgs['msg_payment_confirm_error_invalid_date']]
raise web.seeother('/payment/confirm')
#
q = 'select cart_id, confirm_info from tr_invoice_header where cart_id=$cart_id and done <> 1'
a = {'cart_id': invoice}
r = query(q, a)
cart_id = rget(r, 'cart_id')
confirm = rget(r, 'confirm_info', default=[], to_yaml=True)
if not cart_id:
sess.msg = ['error', msgs['msg_payment_confirm_error_notfound']]
raise web.seeother('/payment/confirm')
#
#ok
confirm.append ({
'date': dt,
'name': name,
'account': account,
'bank': bank,
'method': method,
'total': total,
'note': note,
})
confirm_info = yaml.dump(confirm)
q = 'update tr_invoice_header set confirm_info=$confirm_info,log_id=$logid where cart_id=$cart_id'
a = {'cart_id': invoice, 'confirm_info': confirm_info, 'logid': sess.log}
r = query(q, a)
#
email_field = (
msgs['header_payment_confirm_date'].capitalize(), dt,
msgs['header_payment_confirm_invoice'].capitalize(), invoice,
msgs['header_payment_confirm_name'].capitalize(), name,
msgs['header_payment_confirm_bank'].capitalize(), bank,
msgs['header_payment_confirm_method'].capitalize(), method,
msgs['header_payment_confirm_account'].capitalize(), account,
msgs['header_payment_confirm_total'].capitalize(), total,
msgs['header_payment_confirm_note'].capitalize(), note,
)
email_text = ' %s: %s \r\n %s: %s \r\n %s: %s \r\n %s: %s \r\n %s: %s \r\n %s: %s \r\n %s: %s \r\n %s: \r\n %s' %email_field
#
sendmail(MAIL_DEFAULT, msgs['header_payment_confirm_email_subject'].capitalize(), email_text)
#
sess.msg = ['ok', '']
raise web.seeother('/payment/confirm')
class fs:
def GET(self, id):
f = dfs(id, content=True, format=False, name_add=False)
if not f:
raise web.notfound()
#
disposition = 'attachment; filename=' + f[0].name
inline = ['flash', 'image']
for i in inline:
if i in f[0].type.lower():
disposition = 'inline; filename=' + f[0].name
break
#
if web.input(download='').download == '1':
disposition = 'attachment; filename=' + f[0].name
#
web.header('Content-Type', f[0].type)
web.header('Content-Length', f[0].size)
web.header('Content-Disposition', disposition)
return f[0].content
def POST(self):
raise web.seeother('/')
class lang_set:
def GET(self, lang):
testmsg = m.t(m.MSG, lang)
if testmsg:
sess.lang = lang
#
path = '/'
if sess.has_key('fullpath'):
test = sess['fullpath']
if test:
path = sess['fullpath']
#
raise web.seeother(path)
def POST(self):
raise web.seeother('/')
class browser_set:
def GET(self, category):
category = category.lower().strip()
sess.browserclass = ''
if category in ['mobile', 'wap', 'desktop']:
sess.browserclass = category
#
raise web.seeother('/')
def POST(self):
raise web.seeother('/')
class news:
def GET(self):
i = web.input(id=0, api=0)
t = tpl('news')
ttl = msgs['title_news'].capitalize()
cdata = {'news': dnews(read_session=False)}
o = t.news(title(ttl), cdata)
o = tplb(o)
return o
def POST(self):
raise web.seeother('/')
class news_hide:
def GET(self):
raise web.seeother('/news')
def POST(self):
i = web.input(api=0)
sess.newsread = True
if sess.newsread:
return 'ok'
#
return 'error'
class faq:
def GET(self):
t = tpl('faq')
ttl = msgs['title_faq'].capitalize()
data = {'faq': dfaq(group=True)}
o = t.faq(title(ttl), data)
o = tplb(o)
return o
def POST(self):
raise web.seeother('/')
class go:
def GET(self, id):
if not id:
raise web.notfound()
#
q = 'select page,content from ms_user_content where active=1 and id=$id'
a = {'id': id}
r = query(q, a)
if not r:
raise web.notfound()
#
uc = r[0]
uc['page'] = mlget(uc['page'])
uc['content'] = mlget(uc['content'])
#
t = tpl('go')
ttl = uc['page'].capitalize()
o = t.go(title(ttl), uc)
o = tplb(o)
return o
def POST(self):
raise web.seeother('/')
class admin:
def GET(self):
t = tpl('admin')
ttl = msgs['title_admin'].capitalize()
#
data = {}
data['menu'] = dadmin()
#
log_size_r = query('select count(id) as count from tr_log')
log_size = rget(log_size_r, 'count', default=0)
#
if isadmin():
data['version'] = VERSION
data['database_size'] = nf(os.path.getsize(DATA_FILE))
data['log_size'] = nf(log_size)
else:
data['version'] = ''
data['database_size'] = ''
data['log_size'] = ''
#
o = t.admin(title(ttl), data)
o = tplb(o)
return o
def POST(self):
raise web.seeother('/')
class passwd:
def GET(self):
t = tpl('passwd')
ttl = msgs['title_passwd'].capitalize()
o = t.passwd(title(ttl), '')
o = tplb(o)
return o
def POST(self):
i = web.input(old_password='', new_password_1='', new_password_2='', api=0)
#
if not i.new_password_1 or not i.new_password_2:
return 'value'
#
if i.new_password_1 != i.new_password_2:
return 'mismatch'
#
oldpassword_md5 = md5(i.old_password).hexdigest()
q = 'select id from ms_user where id=$uid and password=$password and active=1'
a = {'uid': sess.u, 'password': oldpassword_md5}
r = query(q, a)
if len(r) != 1:
return 'auth'
#
newpassword_md5 = md5(i.new_password_1).hexdigest()
if oldpassword_md5 == newpassword_md5:
return 'same'
#
a = {'uid': sess.u, 'newpassword': newpassword_md5}
r = db.update('ms_user', where='id=$uid', password=newpassword_md5, log_id=sess.log, vars=a)
if r:
return 'ok'
#
return 'error'
class profile:
def GET(self):
t = tpl('profile')
ttl = msgs['title_profile'].capitalize()
data = {}
data['profile'] = dprofile(id=sess.u)
data['message'] = smget()
o = t.profile(title(ttl), data)
o = tplb(o)
return o
def POST(self):
i = web.input(first_name='', last_name='', email='', phone='', fax='', web='', address='')
fn = i.first_name.strip()
ln = i.last_name.strip()
em = yaml.dump(sepnorm(i.email, as_string=False))
ph = yaml.dump(sepnorm(i.phone, as_string=False))
fx = yaml.dump(sepnorm(i.fax, as_string=False))
ww = yaml.dump(sepnorm(i.web, as_string=False))
ad = i.address.strip()
r = db.update('ms_user', where='id=$id', first_name=fn, last_name=ln, email=em,
phone=ph, fax=fx, web=ww, address=ad, log_id=sess.log, vars={'id': sess.u})
if r:
sess.msg = ['ok', msgs['msg_profile_updated']]
raise web.seeother('/profile')
class admin_fs:
def GET(self):
max = res['max_file_size']/1024
data = {'files': dfs(), 'max_file_size_kb': nf(max), 'error_message': smget()}
t = tpl('admin_fs')
ttl = msgs['title_fs'].capitalize()
o = t.admin_fs(title(ttl), data)
o = tplb(o)
return o
def POST(self):
raise web.seeother('/')
class admin_fs_del:
def GET(self):
raise web.seeother('/admin/fs')
def POST(self):
i = web.input(id=0, api=0)
try:
iid = int(i.id)
except:
return 'error'
#
r = db.select('ms_file', what='name', where='id=$id', vars={'id':iid})
if not r:
return 'error'
#
r = db.delete('ms_file', where='id=$id', vars={'id':iid})
if r:
return 'ok'
#
return 'error'
class admin_fs_upload:
def GET(self):
raise web.seeother('/admin/fs')
def POST(self):
i = web.input(userfile={}, api=0)
iname = i.userfile.filename
#
if not iname.strip():
raise web.seeother('/admin/fs')
#
name_add = ''
q = 'select max(abs(cast(name_add as integer)))+1 as max from ms_file where name=$name'
a = {'name':iname}
r_check = query(q, a)
if r_check and r_check[0].max > 0:
name_add= str(r_check[0].max)
#
icontent = i.userfile.value
size = len(icontent)
if size > res['max_file_size']:
size = nf(size/1024)
sess.msg = ['ok', '%s (%s: %s KB)' %(msgs['msg_fs_error_upload_file_too_big'], iname, size)]
raise web.seeother('/admin/fs')
#
itype = i.userfile.type
itype_opt = yaml.dump(i.userfile.type_options)
idisposition = i.userfile.disposition
idisposition_opt = yaml.dump(i.userfile.disposition_options)
date_file = now()
#
headers = {}
for j in i.userfile.headers.keys():
headers[j] = i.userfile.headers[j]
iheaders = yaml.dump(headers)
#
r = db.insert('ms_file', log_id=sess.log, name=iname, name_add=name_add, size=size, type=itype,
type_options=itype_opt, disposition=idisposition,
disposition_options=idisposition_opt, content=sqlite3.Binary(icontent), date_file=date_file,
headers=iheaders)
if r:
raise web.seeother('/admin/fs')
#
return 'error'
class admin_fs_view:
def GET(self, id):
f = dfs(id)
if not f:
raise web.seeother('/admin/fs')
#
t = tpl('admin_fs_view')
ttl = msgs['title_fs_view'].capitalize()
o = t.admin_fs_view(title(ttl), f[0])
o = tplb(o)
return o
def POST(self):
raise web.seeother('/')
class admin_system:
def GET(self):
t = tpl('admin_system')
ttl = msgs['title_system'].capitalize()
#
data = {}
data['site_description'] = ypget('site_description')
data['extra_info'] = ypget('extra_info')
data['cart'] = res['cart']
data['use_cart'] = ucart()
data['cart_check_stock'] = pget('cart_check_stock')
data['invoice_extra_info'] = ypget('invoice_extra_info')
data['offline'] = pget('offline')
data['currency'] = pget('currency', callback=dcur)
data['all_currency'] = dcur()
data['template'] = pget('template')
data['all_template'] = atpl()
data['logo_file'] = int(pget('logo_file', default=0))
data['all_files'] = dfs(filter=['image','flash'])
data['keywords'] = pget('site_keywords', default='')
data['news_max'] = pget('news_max')
data['message'] = smget()
data['expose_time'] = pget('expose_time')
data['promote'] = pget('promote')
data['max_product_category'] = pget('max_product_category')
data['max_product'] = pget('max_product')
data['max_file_size'] = pget('max_file_size')
data['max_files'] = pget('max_files')
data['mail_smtp'] = pget('mail_smtp')
data['mail_user'] = pget('mail_user')
data['mail_pass'] = pget('mail_pass')
data['mail_default'] = pget('mail_default')
data['homepage'] = pget('homepage')
data['font_dir'] = pget('font_dir')
data['payments'] = pget('payments')
data['lang'] = pget('lang')
#
o = t.admin_system(title(ttl), data)
o = tplb(o)
return o
def POST(self):
i = web.input(api=0,
site_desc='',
extra_info='',
use_cart='',
cart_check_stock='',
invoice_extra_info='',
site_offline='',
currency='',
template=TEMPLATE_DEFAULT,
logo_file='',
keywords='',
news_max='',
expose_time='',
promote='',
payments='',
max_product_category='',
max_product='',
max_file_size='',
max_files='',
mail_smtp='',
mail_user='',
mail_pass='',
mail_default='',
homepage='',
font_dir='',
lang='',
)
#
site_desc = mlset(i, 'site_desc')
extra_info = mlset(i, 'extra_info')
use_cart = i.use_cart
cart_check_stock = i.cart_check_stock
invoice_extra_info = mlset(i, 'invoice_extra_info')
site_offline = i.site_offline
currency = i.currency
template = i.template
logo_file = i.logo_file
keywords = i.keywords
news_max = i.news_max
expose_time= i.expose_time
promote= i.promote
payments= i.payments
max_product_category= i.max_product_category
max_product= i.max_product
max_file_size= i.max_file_size
max_files= i.max_files
mail_smtp= i.mail_smtp
mail_user= i.mail_user
mail_pass= i.mail_pass
mail_default= i.mail_default
homepage= i.homepage
font_dir= i.font_dir
lang = i.lang
#
tpinfo = tinfo(template)
if not tpinfo:
template = TEMPLATE_DEFAULT
else:
try:
owner = tpinfo['general']['owner']
except:
owner = ''
if owner and DOMAIN.lower() not in owner:
template = TEMPLATE_DEFAULT
#
config = {
'site_description' : site_desc,
'extra_info' : extra_info,
'use_cart' : use_cart,
'invoice_extra_info': invoice_extra_info,
'offline' : site_offline,
'currency' : currency,
'template' : template,
'cart_check_stock' : cart_check_stock,
'logo_file' : logo_file,
'site_keywords' : keywords,
'news_max' : news_max,
'expose_time' : expose_time,
'promote' : promote,
'payments' : payments,
'max_product_category' : max_product_category,
'max_product' : max_product,
'max_file_size' : max_file_size,
'max_files' : max_files,
'mail_smtp' : mail_smtp,
'mail_user' : mail_user,
'mail_pass' : mail_pass,
'mail_default' : mail_default,
'homepage' : homepage,
'font_dir' : font_dir,
'lang' : lang,
}
for i in config.keys():
r = db.update('ms_config', value=config[i], where='param=$param', log_id=sess.log, vars={'param': i})
#
sess.msg = ['ok', msgs['msg_sys_saved']]
raise web.seeother('/admin/system')
class admin_product:
def GET(self):
t = tpl('admin_product')
ttl = msgs['title_admin_product'].capitalize()
data = dadmin(section=['admin.product'])
o = t.admin_product(title(ttl), data)
o = tplb(o)
return o
def POST(self):
raise web.seeother('/')
class admin_product_category:
def GET(self):
t = tpl('admin_product_category')
ttl = msgs['title_admin_product_category'].capitalize()
data = {}
data['category'] = dpro_category()
data['message'] = smget()
o = t.admin_product_category(title(ttl), data)
o = tplb(o)
return o
def POST(self):
raise web.seeother('/')
class admin_product_category_del:
def GET(self):
raise web.seeother('/admin/product/category')
def POST(self):
i = web.input(api=0, id=0)
try:
iid = i.id
except:
return 'error'
#
cat = dpro_category(iid)
if not cat:
return 'error'
#
q = 'update ms_category set active=0,log_id=$logid where id=$id'
a = {'id': iid, 'logid': sess.log}
r = query(q, a)
return 'ok'
class admin_product_category_save:
def GET(self):
raise web.seeother('/admin/product/category')
def POST(self):
i = web.input(api=0, type='add')
#
if not i.type in ['add', 'edit']:
raise web.internalerror()
#
if i.type == 'edit':
allid = [c.id for c in dpro_category(field='id')]
elif i.type == 'add':
allid = ['new']
#
for a in allid:
try:
ipriority = int( siget(i, 'priority', str(a)) )
except:
ipriority = 0
#
m = mlset(i, str(a))
#
if disblank(m, True):
sess.msg = ['error', msgs['msg_product_category_error_required']]
raise web.seeother('/admin/product/category')
else:
if i.type == 'edit':
q = 'update ms_category set name=$name, active=1, priority=$priority, log_id=$logid where id=$id'
a = {'id': a, 'name': m, 'priority': ipriority, 'logid': sess.log}
r = query(q, a)
sess.msg = ['ok', msgs['msg_product_category_saved']]
elif i.type == 'add':
cat = dpro_category()
if len(cat) >= res['max_product_category']:
sess.msg = ['error', msgs['msg_product_category_error_max']]
raise web.seeother('/admin/product/category')
#
q = 'insert into ms_category (name,active, priority,log_id) values($name, 1, $priority, $logid)'
a = {'name': m, 'priority': ipriority, 'logid': sess.log}
r = query(q, a)
sess.msg = ['ok', msgs['msg_product_category_added']]
#
raise web.seeother('/admin/product/category')
class admin_product_group:
def GET(self):
t = tpl('admin_product_group')
ttl = msgs['title_admin_product_group'].capitalize()
data = {}
data['category'] = dpro_category(field='id,name')
data['group'] = dpro_group()
data['message'] = smget()
data['files'] = dfs(filter=['image','flash'])
o = t.admin_product_group(title(ttl), data)
o = tplb(o)
return o
def POST(self):
raise web.seeother('/')
class admin_product_group_del:
def GET(self):
raise web.seeother('/admin/product/group')
def POST(self):
i = web.input(api=0, id=0)
try:
iid = i.id
except:
return 'error'
#
grp = dpro_group(id=iid)
if not grp:
return 'error'
#
q = 'update ms_product set active=0,log_id=$logid where id=$id'
a = {'id': iid, 'logid': sess.log}
r = query(q, a)
return 'ok'
class admin_product_group_save:
def GET(self):
raise web.seeother('/admin/product/group')
def POST(self):
i = web.input(api=0, type='add', id=0, file_id=0)
#
if not i.type in ['add', 'edit']:
raise web.internalerror()
#
try:
iid = int(i.id)
icategory_id = int(i.category_id)
ifile_id = int(i.file_id)
except:
sess.msg = ['error', msgs['msg_product_group_error_required']]
raise web.seeother('/admin/product/group')
#
cat = [x.id for x in dpro_category(field='id')]
files = [x.id for x in dfs(filter=['image','flash'])]
if (not icategory_id in cat) or (not ifile_id in files and ifile_id != 0): #illegal
raise web.internalerror()
#
try:
ipriority = int(i.priority)
except:
ipriority = 0
iname = mlset(i, 'name')
idesc = mlset(i, 'description')
ifull = mlset(i, 'fullinfo')
#
if disblank(iname, True):
sess.msg = ['error', msgs['msg_product_group_error_required']]
else:
if i.type == 'add':
grp = dpro_group()
if len(grp) >= res['max_product']:
sess.msg = ['error', msgs['msg_product_group_error_max']]
raise web.seeother('/admin/product/group')
#
r = db.insert('ms_product', category_id=icategory_id, active=1, file_id=ifile_id,
name=iname, description=idesc, full_info=ifull, priority=ipriority, log_id=sess.log)
if r:
sess.msg = ['ok', msgs['msg_product_group_added']]
elif i.type == 'edit':
r = db.update('ms_product', category_id=icategory_id, active=1, file_id=ifile_id,
name=iname, description=idesc, full_info=ifull, priority=ipriority, log_id=sess.log, where='id=$id',
vars={'id': iid})
if r:
sess.msg = ['ok', msgs['msg_product_group_saved']]
#
if i.type == 'edit':
raise web.seeother('/admin/product/group/edit/%s' %(iid))
else:
raise web.seeother('/admin/product/group')
class admin_product_group_edit:
def GET(self, id):
t = tpl('admin_product_group_edit')
ttl = msgs['title_admin_product_group'].capitalize()
data = {}
data['category'] = dpro_category(field='id,name')
data['group'] = dpro_group()
data['detail'] = dpro_group(id=id)
data['message'] = smget()
data['files'] = dfs(filter=['image','flash'])
o = t.admin_product_group_edit(title(ttl), data)
o = tplb(o)
return o
def POST(self):
#handled by admin_product_group_save
raise web.seeother('/')
class admin_product_item:
def GET(self):
t = tpl('admin_product_item')
ttl = msgs['title_admin_product_item'].capitalize()
data = {}
data['group'] = dpro_group(field='id,name')
data['message'] = smget()
data['item'] = dpro_item()
data['cart_check_stock'] = pget('cart_check_stock')
data['files'] = dfs()
o = t.admin_product_item(title(ttl), data)
o = tplb(o)
return o
def POST(self):
raise web.seeother('/')
class admin_product_item_del:
def GET(self):
raise web.seeother('/admin/product/item')
def POST(self):
i = web.input(api=0, id=0)
try:
iid = i.id
except:
return 'error'
#
items = dpro_item(id=iid)
if not items:
return 'error'
#
q = 'update ms_product_variant set active=0,log_id=$logid where id=$id'
a = {'id': iid, 'logid': sess.log}
r = query(q, a)
return 'ok'
class admin_product_item_save:
def GET(self):
raise web.seeother('/admin/product/item')
def POST(self):
i = web.input(api=0, type='add')
#
if not i.type in ['add', 'edit']:
raise web.internalerror()
#
if i.type == 'edit':
allid = [c.id for c in dpro_item(field='id')]
elif i.type == 'add':
allid = ['new']
#
for a in allid:
try:
ivfid = int( siget(i, 'vfile_id', str(a)) )
except:
ivfid = 0
files = [x.id for x in dfs()]
if not ivfid in files: ivfid = 0
#
try:
ipid = int( siget(i, 'product_id', str(a)) )
except:
ipid = 0
#
try:
istock = int( siget(i, 'stock', str(a)) )
except:
istock = 0
#
try:
iprice = nrfloat( siget(i, 'price', str(a)) )
except:
iprice = 0
#
try:
itaxratio = nrfloat( siget(i, 'taxratio', str(a)) )
itaxratio = nlimit(itaxratio, decimal.Decimal('0.0'), decimal.Decimal('1.0'))
except:
itaxratio = 0
#
m = mlset(i, str(a))
#
if disblank(m, True) or ipid == 0:
sess.msg = ['error', msgs['msg_product_item_error_required']]
raise web.seeother('/admin/product/item')
else:
if i.type == 'edit':
q = 'update ms_product_variant set name=$name, active=1, product_id=$ipid, stock=$istock, price=$iprice, taxratio=$itaxratio, variant_file_id=$variant_file_id, log_id=$logid where id=$id'
a = {'id': a, 'name': m, 'ipid': ipid, 'istock': istock, 'iprice': float(iprice), 'itaxratio': float(itaxratio), 'variant_file_id': ivfid, 'logid': sess.log}
r = query(q, a)
sess.msg = ['ok', msgs['msg_product_item_saved']]
elif i.type == 'add':
r = db.insert('ms_product_variant', active=1, product_id=ipid, stock=istock, price=float(iprice),
taxratio=float(itaxratio), name=m, variant_file_id=ivfid, log_id=sess.log)
if r:
sess.msg = ['ok', msgs['msg_product_item_added']]
#
raise web.seeother('/admin/product/item')
class admin_bank:
def GET(self):
t = tpl('admin_bank')
ttl = msgs['title_admin_bank'].capitalize()
data = {}
data['all_currency'] = dcur()
data['message'] = smget()
data['bank'] = dbank()
o = t.admin_bank(title(ttl), data)
o = tplb(o)
return o
def POST(self):
raise web.seeother('/')
class admin_bank_del:
def GET(self):
raise web.seeother('/admin/bank')
def POST(self):
i = web.input(api=0, id=0)
try:
iid = i.id
except:
return 'error'
#
bank = dbank(id=iid)
if not bank:
return 'error'
#
q = 'update ms_bank set active=0,log_id=$logid where id=$id'
a = {'id': iid, 'logid': sess.log}
r = query(q, a)
#
return 'ok'
class admin_bank_save:
def GET(self):
raise web.seeother('/admin/bank')
def POST(self):
i = web.input(api=0, type='add')
#
if not i.type in ['add', 'edit']:
raise web.internalerror()
#
if i.type == 'edit':
allid = [c.id for c in dbank(field='id')]
elif i.type == 'add':
allid = ['new']
#
for a in allid:
try:
icur = int( siget(i, 'currency_id', str(a)) )
except:
icur = 0
#
name = siget(i, 'name', str(a))
holder = siget(i, 'holder', str(a))
account = siget(i, 'account', str(a))
branch = siget(i, 'branch', str(a))
address = siget(i, 'address', str(a))
country = siget(i, 'country', str(a))
swift = siget(i, 'swift', str(a))
#
if not name or not holder or not account:
sess.msg = ['error', msgs['msg_bank_error_required']]
raise web.seeother('/admin/bank')
else:
if i.type == 'edit':
q = 'update ms_bank set name=$name, active=1, holder=$holder, account=$account, branch=$branch, address=$address, country=$country, swift=$swift, currency_id=$icur, log_id=$logid where id=$id'
a = {'id': a, 'name': name, 'icur': icur, 'holder':holder, 'account':account, 'branch':branch, 'address':address, 'country':country, 'swift':swift, 'logid': sess.log}
r = query(q, a)
sess.msg = ['ok', msgs['msg_bank_saved']]
elif i.type == 'add':
r = db.insert('ms_bank', name=name, active=1, holder=holder, account=account, branch=branch, address=address, country=country, swift=swift, currency_id=icur, log_id=sess.log)
if r:
sess.msg = ['ok', msgs['msg_bank_added']]
#
raise web.seeother('/admin/bank')
class admin_paypal:
def GET(self):
t = tpl('admin_paypal')
ttl = msgs['title_admin_paypal'].capitalize()
data = {}
data['paypal'] = dpaypal()
data['message'] = smget()
o = t.admin_paypal(title(ttl), data)
o = tplb(o)
return o
def POST(self):
raise web.seeother('/')
class admin_paypal_del:
def GET(self):
raise web.seeother('/admin/paypal')
def POST(self):
i = web.input(api=0, id=0)
try:
iid = i.id
except:
return 'error'
#
paypal = dpaypal(iid)
if not paypal:
return 'error'
#
q = 'update ms_paypal set active=0,log_id=$logid where id=$id'
a = {'id': iid, 'logid': sess.log}
r = query(q, a)
return 'ok'
class admin_paypal_save:
def GET(self):
raise web.seeother('/admin/paypal')
def POST(self):
i = web.input(api=0, type='add')
#
if not i.type in ['add']:
raise web.internalerror()
#
allid = []
if i.type == 'add':
allid = ['new']
#
for a in allid:
account = siget(i, 'account', str(a)).lower()
if not account:
sess.msg = ['error', msgs['msg_paypal_error_required']]
raise web.seeother('/admin/paypal')
else:
if i.type == 'add':
paypal = [x.account.lower() for x in dpaypal()]
if account in paypal:
sess.msg = ['error', msgs['msg_paypal_error_exists']]
raise web.seeother('/admin/paypal')
else:
r = db.insert('ms_paypal', active=1, account=account, log_id=sess.log)
if r:
sess.msg = ['ok', msgs['msg_paypal_added']]
#
raise web.seeother('/admin/paypal')
class admin_yahoo:
def GET(self):
t = tpl('admin_yahoo')
ttl = msgs['title_admin_yahoo'].capitalize()
data = {}
data['yahoo'] = dyahoo()
data['message'] = smget()
o = t.admin_yahoo(title(ttl), data)
o = tplb(o)
return o
def POST(self):
raise web.seeother('/')
class admin_yahoo_del:
def GET(self):
raise web.seeother('/admin/yahoo')
def POST(self):
i = web.input(api=0, id=0)
try:
iid = i.id
except:
return 'error'
#
yahoo = dyahoo(iid)
if not yahoo:
return 'error'
#
q = 'delete from ms_yahoo where id=$id'
a = {'id': iid}
r = query(q, a)
return 'ok'
class admin_yahoo_save:
def GET(self):
raise web.seeother('/admin/yahoo')
def POST(self):
i = web.input(api=0, type='add')
#
if not i.type in ['add']:
raise web.internalerror()
#
allid = []
if i.type == 'add':
allid = ['new']
#
for a in allid:
account = siget(i, 'account', str(a)).lower()
if not account:
sess.msg = ['error', msgs['msg_yahoo_error_required']]
raise web.seeother('/admin/yahoo')
else:
if i.type == 'add':
yahoo = [x.account[0].lower() for x in dyahoo()]
if account in yahoo:
sess.msg = ['error', msgs['msg_yahoo_error_exists']]
raise web.seeother('/admin/yahoo')
else:
r = db.insert('ms_yahoo', account=account, log_id=sess.log)
if r:
sess.msg = ['ok', msgs['msg_yahoo_added']]
#
raise web.seeother('/admin/yahoo')
class admin_link:
def GET(self):
t = tpl('admin_link')
ttl = msgs['title_admin_link'].capitalize()
data = {}
data['link'] = dlink()
data['message'] = smget()
o = t.admin_link(title(ttl), data)
o = tplb(o)
return o
def POST(self):
raise web.seeother('/')
class admin_link_del:
def GET(self):
raise web.seeother('/admin/link')
def POST(self):
i = web.input(api=0, id=0)
try:
iid = i.id
except:
return 'error'
#
link = dlink(iid)
if not link:
return 'error'
#
q = 'delete from ms_link where id=$id'
a = {'id': iid}
r = query(q, a)
return 'ok'
class admin_link_save:
def GET(self):
raise web.seeother('/admin/link')
def POST(self):
i = web.input(api=0, id=0, type='add')
#
if not i.type in ['add', 'edit']:
raise web.internalerror()
#
allid = []
if i.type == 'add':
allid = ['new']
elif i.type == 'edit':
allid = [x.id for x in dlink(field='id')]
#
for a in allid:
try:
iid = int ( siget(i, 'id', str(a)) )
except:
iid = 0
code = siget(i, 'code', str(a)).lower()
if not code:
sess.msg = ['error', msgs['msg_link_error_required']]
raise web.seeother('/admin/link')
else:
if i.type == 'add':
r = db.insert('ms_link', code=code, log_id=sess.log)
if r:
sess.msg = ['ok', msgs['msg_link_added']]
elif i.type == 'edit':
r = db.update('ms_link', code=code, log_id=sess.log, where='id=$id', vars={'id': iid})
if r:
sess.msg = ['ok', msgs['msg_link_updated']]
#
raise web.seeother('/admin/link')
class admin_news:
def GET(self):
t = tpl('admin_news')
ttl = msgs['title_admin_news'].capitalize()
data = {}
data['news'] = dnews(read_session=False)
data['message'] = smget()
data['files'] = dfs(filter=['image','flash'])
o = t.admin_news(title(ttl), data)
o = tplb(o)
return o
def POST(self):
raise web.seeother('/')
class admin_news_del:
def GET(self):
raise web.seeother('/admin/news')
def POST(self):
i = web.input(api=0, id=0)
try:
iid = i.id
except:
return 'error'
#
news = dnews(id=iid, read_session=False)
if not news:
return 'error'
#
q = 'delete from tr_news where id=$id'
a = {'id': iid}
r = query(q, a)
return 'ok'
class admin_news_save:
def GET(self):
raise web.seeother('/admin/news')
def POST(self):
i = web.input(api=0, type='add', id=0, file_id=0)
#
if not i.type in ['add', 'edit']:
raise web.internalerror()
#
try:
iid = int(i.id)
ifile_id = int(i.file_id)
except:
sess.msg = ['error', msgs['msg_news_error_required']]
raise web.seeother('/admin/news')
#
files = [x.id for x in dfs(filter=['image','flash'])]
if (not ifile_id in files and ifile_id != 0): #illegal
raise web.internalerror()
#
ititle = mlset(i, 'title')
idesc = mlset(i, 'description')
inews = mlset(i, 'news')
#
if not i.has_key('date'):
date = now()
else:
date = i.date()
#
if disblank(ititle, True):
sess.msg = ['error', msgs['msg_news_error_required']]
else:
if i.type == 'add':
r = db.insert('tr_news', date_news=date, title=ititle, description=idesc, news=inews, file_id=ifile_id, log_id=sess.log)
if r:
sess.msg = ['ok', msgs['msg_news_added']]
elif i.type == 'edit':
r = db.update('tr_news', title=ititle, description=idesc, news=inews, file_id=ifile_id, log_id=sess.log,
where='id=$id', vars={'id': iid})
if r:
sess.msg = ['ok', msgs['msg_news_saved']]
#
if i.type == 'edit':
raise web.seeother('/admin/news/edit/%s' %(iid))
else:
raise web.seeother('/admin/news')
class admin_news_edit:
def GET(self, id):
t = tpl('admin_news_edit')
ttl = msgs['title_admin_news'].capitalize()
data = {}
data['detail'] = dnews(id=id, read_session=False)
data['message'] = smget()
data['files'] = dfs(filter=['image','flash'])
o = t.admin_news_edit(title(ttl), data)
o = tplb(o)
return o
def POST(self):
#handled by admin_news_save
raise web.seeother('/')
class admin_faq:
def GET(self):
t = tpl('admin_faq')
ttl = msgs['title_admin_faq'].capitalize()
data = {}
data['faq'] = dfaq()
data['message'] = smget()
data['files'] = dfs()
o = t.admin_faq(title(ttl), data)
o = tplb(o)
return o
def POST(self):
raise web.seeother('/')
class admin_faq_del:
def GET(self):
raise web.seeother('/admin/faq')
def POST(self):
i = web.input(api=0, id=0)
try:
iid = i.id
except:
return 'error'
#
faq = dfaq(id=iid)
if not faq:
return 'error'
#
q = 'delete from tr_faq where id=$id'
a = {'id': iid}
r = query(q, a)
return 'ok'
class admin_faq_save:
def GET(self):
raise web.seeother('/admin/faq')
def POST(self):
i = web.input(api=0, type='add', id=0, file_id=0)
#
if not i.type in ['add', 'edit']:
raise web.internalerror()
#
try:
iid = int(i.id)
ifile_id = int(i.file_id)
except:
sess.msg = ['error', msgs['msg_faq_error_required']]
raise web.seeother('/admin/faq')
#
files = [x.id for x in dfs()]
if (not ifile_id in files and ifile_id != 0): #illegal
raise web.internalerror()
#
icat = mlset(i, 'category')
iq = mlset(i, 'question')
ia = mlset(i, 'answer')
#
if disblank(icat, True) or disblank(iq, True) or disblank(ia, True):
sess.msg = ['error', msgs['msg_faq_error_required']]
else:
if i.type == 'add':
r = db.insert('tr_faq', category=icat, question=iq, answer=ia, file_id=ifile_id, log_id=sess.log)
if r:
sess.msg = ['ok', msgs['msg_faq_added']]
elif i.type == 'edit':
r = db.update('tr_faq', category=icat, question=iq, answer=ia, file_id=ifile_id, log_id=sess.log,
where='id=$id', vars={'id': iid})
if r:
sess.msg = ['ok', msgs['msg_faq_saved']]
#
if i.type == 'edit':
raise web.seeother('/admin/faq/edit/%s' %(iid))
else:
raise web.seeother('/admin/faq')
class admin_faq_edit:
def GET(self, id):
t = tpl('admin_faq_edit')
ttl = msgs['title_admin_faq'].capitalize()
data = {}
data['detail'] = dfaq(id=id)
data['message'] = smget()
data['files'] = dfs()
o = t.admin_faq_edit(title(ttl), data)
o = tplb(o)
return o
def POST(self):
#handled by admin_faq_save
raise web.seeother('/')
class admin_invoice:
def GET(self):
t = tpl('admin_invoice')
ttl = msgs['title_admin_invoice'].capitalize()
data = {}
data['date_from'] = ''
data['date_to'] = ''
data['closed'] = '0'
data['invoice'] = None
data['message'] = smget()
o = t.admin_invoice(title(ttl), data)
o = tplb(o)
return o
def POST(self):
i = web.input(date_from='', date_to='', closed='0')
try:
iclosed = int(i.closed)
except:
iclosed = 0
#
t = tpl('admin_invoice')
ttl = msgs['title_admin_invoice'].capitalize()
data = {}
data['date_from'] = i.date_from
data['date_to'] = i.date_to
data['closed'] = i.closed
data['invoice'] = dinvoice(date_from=i.date_from, date_to=i.date_to, closed=iclosed)
if not data['invoice']:
sess.msg = ['ok', msgs['msg_invoice_empty']]
data['message'] = smget()
o = t.admin_invoice(title(ttl), data)
o = tplb(o)
return o
class admin_invoice_view:
def GET(self, id):
t = tpl('admin_invoice_view')
ttl = msgs['title_admin_invoice_view'].capitalize()
data = {}
data['invoice'] = dinvoice(id)
if not data['invoice']:
raise web.internalerror()
#
data['message'] = smget()
o = t.admin_invoice_view(title(ttl), data)
o = tplb(o)
return o
def POST(self):
raise web.seeother('/')
class admin_invoice_approval:
def GET(self, id):
raise web.seeother('/')
def POST(self):
i = web.input(type='', id=0, api=0)
if not i.type in ['approve', 'disapprove']:
return 'error'
#
try:
iid = int(i.id)
except:
return 'error'
#
inv = dinvoice(id=iid, all_confirm=True)
confirm = rget(inv, 'confirm_info', default=[], to_yaml=True)
if i.type == 'approve':
try:
if confirm and confirm[-1]['date']:
r = db.update('tr_invoice_header', log_id=sess.log, where='id=$id', done=True,
date_paid=confirm[-1]['date'], vars={'id':iid})
if r:
return 'ok'
except:
return 'error'
elif i.type == 'disapprove':
if not inv[0].done:
confirm.append('')
r = db.update('tr_invoice_header', log_id=sess.log, where='id=$id', confirm_info=yaml.dump(confirm),
vars={'id':iid})
if r:
return 'ok'
#
return 'error'
class admin_stat:
def GET(self):
t = tpl('admin_stat')
ttl = msgs['title_admin_stat'].capitalize()
data = {}
data['date_from'] = ''
data['date_to'] = ''
data['stat'] = None
data['message'] = smget()
o = t.admin_stat(title(ttl), data)
o = tplb(o)
return o
def POST(self):
i = web.input(date_from='', date_to='')
t = tpl('admin_stat')
ttl = msgs['title_admin_stat'].capitalize()
data = {}
data['date_from'] = i.date_from
data['date_to'] = i.date_to
data['stat'] = dstat(date_from=i.date_from, date_to=i.date_to)
if not data['stat']:
sess.msg = ['ok', msgs['msg_stat_empty']]
data['message'] = smget()
o = t.admin_stat(title(ttl), data)
o = tplb(o)
return o
class admin_doc:
def GET(self):
f = DOC_ADMIN
if not f or not os.path.exists(f):
raise web.seeother('/admin')
else:
disposition = 'attachment; filename=' + os.path.basename(f)
web.header('Content-Type', 'text/plain')
web.header('Content-Length', os.path.getsize(f))
web.header('Content-Disposition', disposition)
return open(f).read()
def POST(self):
raise web.seeother('/')
class admin_redir:
def GET(self):
t = tpl('admin_redir')
ttl = msgs['title_admin_redir'].capitalize()
data = {}
data['redir'] = dredir()
data['message'] = smget()
o = t.admin_redir(title(ttl), data)
o = tplb(o)
return o
def POST(self):
raise web.seeother('/')
class admin_redir_del:
def GET(self):
raise web.seeother('/admin/redir')
def POST(self):
i = web.input(api=0, id=0)
try:
iid = i.id
except:
return 'error'
#
redirs = dredir(id=iid)
if not redirs:
return 'error'
#
q = 'delete from ms_redirect where id=$id'
a = {'id': iid}
r = query(q, a)
return 'ok'
class admin_redir_save:
def GET(self):
raise web.seeother('/admin/redir')
def POST(self):
i = web.input(api=0, type='add')
#
if not i.type in ['add', 'edit']:
raise web.internalerror()
#
if i.type == 'edit':
allid = [c.id for c in dredir(field='id')]
elif i.type == 'add':
allid = ['new']
#
for a in allid:
iurl = siget(i, 'url', str(a)).lower()
iurl = iurl.lstrip('/')
iurl = iurl.strip()
itarget = siget(i, 'target', str(a)).lower()
itarget = itarget.strip()
#
if not iurl or not itarget:
sess.msg = ['error', msgs['msg_admin_redir_error_required']]
raise web.seeother('/admin/redir')
else:
check = [x.url for x in dredir() if x.id != a]
if iurl in check:
sess.msg = ['error', '%s: %s' %(msgs['msg_admin_redir_error_exists'], iurl)]
raise web.seeother('/admin/redir')
#
used = []
for u in range(len(URLS)):
if u % 2 == 0:
u2 = URLS[u].split('/')
used.append(u2[1])
iurl2 = iurl.split('/')
try:
iurl_sys = iurl2[0]
except:
iurl_sys = iurl
if iurl_sys in used:
sess.msg = ['error', '%s: %s' %(msgs['msg_admin_redir_error_used_system'], iurl)]
raise web.seeother('/admin/redir')
#
parsed = urlparse.urlparse(itarget)
parsed_dom_split = parsed[1].split('.')
raw_dom_len = len(DOMAIN.split('.'))
if len(parsed_dom_split) > raw_dom_len:
parsed_dom_start = len(parsed_dom_split) - raw_dom_len
else:
parsed_dom_start = 0
parsed_dom = '.'.join(parsed_dom_split[parsed_dom_start:])
if itarget == iurl or (parsed_dom == DOMAIN and parsed[2] == ub('/' + iurl)):
sess.msg = ['error', '%s: %s' %(msgs['msg_admin_redir_error_same'], iurl)]
raise web.seeother('/admin/redir')
#
if i.type == 'edit':
q = 'update ms_redirect set url=$url, target=$target, log_id=$logid where id=$id'
a = {'id': a, 'logid': sess.log, 'url': iurl, 'target': itarget}
r = query(q, a)
sess.msg = ['ok', msgs['msg_admin_redir_saved']]
elif i.type == 'add':
r = db.insert('ms_redirect', log_id=sess.log, url=iurl, target=itarget)
if r:
sess.msg = ['ok', msgs['msg_admin_redir_added']]
#
raise web.seeother('/admin/redir')
class admin_go:
def GET(self):
t = tpl('admin_go')
ttl = msgs['title_admin_go'].capitalize()
data = {}
data['go'] = dgo()
data['message'] = smget()
o = t.admin_go(title(ttl), data)
o = tplb(o)
return o
def POST(self):
raise web.seeother('/')
class admin_go_del:
def GET(self):
raise web.seeother('/admin/go')
def POST(self):
i = web.input(api=0, id=0)
try:
iid = i.id
except:
return 'error'
#
gos = dgo(id=iid)
if not gos:
return 'error'
#
q = 'update ms_user_content set active=0,log_id=$logid where id=$id'
a = {'id': iid, 'logid': sess.log}
r = query(q, a)
return 'ok'
class admin_go_save:
def GET(self):
raise web.seeother('/admin/go')
def POST(self):
i = web.input(api=0, type='add', id=0)
#
if not i.type in ['add', 'edit']:
raise web.internalerror()
#
try:
iid = int(i.id)
except:
sess.msg = ['error', msgs['msg_go_error_required']]
raise web.seeother('/admin/go')
#
try:
ipriority = int(i.priority)
except:
ipriority = 10000
#
try:
ishow = int(i.show_in_menu)
except:
ishow = 0
#
ipage = mlset(i, 'page')
icontent = mlset(i, 'content')
#
if disblank(ipage, True) or disblank(icontent, True):
sess.msg = ['error', msgs['msg_go_error_required']]
else:
if i.type == 'add':
r = db.insert('ms_user_content', page=ipage, active=1, content=icontent, show_in_menu=ishow,
priority=ipriority, log_id=sess.log)
if r:
sess.msg = ['ok', msgs['msg_go_added']]
elif i.type == 'edit':
r = db.update('ms_user_content', page=ipage, active=1, content=icontent, show_in_menu=ishow,
priority=ipriority, log_id=sess.log, where='id=$id',
vars={'id': iid})
if r:
sess.msg = ['ok', msgs['msg_go_saved']]
#
if i.type == 'edit':
raise web.seeother('/admin/go/edit/%s' %(iid))
else:
raise web.seeother('/admin/go')
class admin_go_edit:
def GET(self, id):
t = tpl('admin_go_edit')
ttl = msgs['title_admin_go'].capitalize()
data = {}
data['go'] = dgo(id=id)
data['message'] = smget()
o = t.admin_go_edit(title(ttl), data)
o = tplb(o)
return o
def POST(self):
#handled by admin_go_save
raise web.seeother('/')
if __name__ == '__main__':
wapp.run()
|
import json
import asyncio
import requests
from bs4 import BeautifulSoup
class Item:
def __init__(self, name: str, id: int):
self.name = name
self.id = id
async def get_item_price(self):
response = requests.get(url=f'https://catalog.roblox.com/v1/search/items/details?id={self.id}')
json = response.json()["data"][0]
return json["price"]
async def get_item_description(self):
response = requests.get(url=f'https://catalog.roblox.com/v1/search/items/details?id={self.id}')
json = response.json()["data"][0]
return json["description"]
async def get_thumb(self):
response = requests.get(url=f'https://thumbnails.roblox.com/v1/assets')
json = response.json()["data"][0]
return json["image"]
|
'''
@ junhyeon.kim
@ emil - sleep4725@naver.com
@ 한국 관광공사 - 지역코드
@ 2019-01-26
'''
# ==================================
import requests
from yaml import load, load_all, YAMLError
import sys
from urllib.parse import urlencode
import pprint
import re
import json
import time
# ==================================
class PROJ:
# 생성자
def __init__(self):
self.url_target = None
self.url_path = None
self.params = {
"serviceKey":None, "pageNo":None, "numOfRows":None, "MobileOS":None,
"MobileApp":None, "areaCode":None, "_type":None,
}
self.yaml_doc = None
self.resp_data = None
self.json_data = []
self.f = None
self.jsonFileCreate() # function call
self.localinfo = {
}
# instance method - 0
# json 파일 생성
def jsonFileCreate(self):
try:
self.f = open("./OUTPUT/TU_JSON/areacode.json", "x")
except FileExistsError as e:
print (e)
else:
self.f.close()
# instance method - 1
# yaml 파일 읽어 오기
def paramsSett(self):
try:
with open("./CONF/TU/data_go_kr_tu_tourareacode.yml", "r", encoding="utf-8") as f:
self.yaml_doc = load(f)
f.close()
except FileNotFoundError as e: # 해당 파일이 조재하지 않는 경우
print (e)
sys.exit(1) # program 종료
except YAMLError as e:
print(e)
sys.exit(1) # program 종료
else:
print (self.yaml_doc)
# params setting =========================================
self.url_target = self.yaml_doc["url"] # url
self.url_path = self.yaml_doc["params"]["path"] # url path
self.params['serviceKey'] = self.yaml_doc["params"]["serviceKey"] # serviceKey
self.params['pageNo'] = self.yaml_doc["params"]["pageNo"] # pageNo
self.params['numOfRows'] = self.yaml_doc["params"]["numOfRows"] # numOfRows
self.params['MobileOS'] = self.yaml_doc["params"]["MobileOS"] # type (json/xml)
self.params['MobileApp'] = self.yaml_doc["params"]["MobileApp"] # numOfRows
self.params['areaCode'] = self.yaml_doc["params"]["areaCode"] # areaCode
self.params['_type'] = self.yaml_doc["params"]["_type"] # _type
# =========================================================
# instance method - 2
def urlRequests(self):
num = 1
# pageNo
while True:
e_params = urlencode(self.params)
url = self.url_target + self.url_path + "?" + e_params
# print (url)
html = requests.get(url)
if html.status_code == 200:
self.resp_data = html.json()
response = self.resp_data['response']["body"]['items']
if "item" in dict(response).keys():
pprint.pprint(response)
self.params['pageNo'] += 1
else:
if num:
self.params['areaCode'] += 1
num = 0
self.params['pageNo'] = 1
else:
break
# instance method - 3
def reponseDataParcing(self):
data = self.resp_data["list"]
# json 파일에 데이터 적재
with open("./OUTPUT/TU_JSON/areacode.json", "a", encoding="utf-8") as make_json:
json.dump(self.json_data, make_json, ensure_ascii=False)
make_json.close()
def main():
proj_node = PROJ() # 객체 생성
proj_node.paramsSett()
proj_node.urlRequests()
# proj_node.reponseDataParcing()
if __name__ == "__main__":
main() |
n = 1000000
primes = [True if i > 1 else False for i in range(n+1)]
for p in range(2, n+1):
if p*p <= n and primes[p] is True:
for i in range(p * p, n+1, p): primes[i] = False
for p in range(2, n+1):
if primes[p] is True:
_i, s = [p], str(p)
for i in range(len(s)-1):
s = s[1:] + s[0]
if s[0] != '0': _i.append(int(s))
if all([primes[i] for i in _i]) is False:
for i in _i: primes[i] = False
print(sum(primes))
|
from django.conf.urls import include, url
from . import views
urlpatterns = [
url(r'^username/$', views.username, name="username"),
url(r'^profile/$', views.profile_redirect, name="profile_redirect"),
] |
import numpy as np
number_of_leaves = 9
epsilon = 0.15
delta = 0.15
uniform_samples = 500
iterations = 50
def beta(s):
return np.log(number_of_leaves / delta) + np.log(np.log(s) + 1)
class Node:
def __init__(self, name, left_child, middle_child, right_child, parent, is_root=False, is_max_node=False):
self.left_child = left_child
self.middle_child = middle_child
self.right_child = right_child
self.parent = parent
self.is_root = is_root
self.is_max_node = is_max_node
self.name = name
def update_left_child(self, left_child):
self.left_child = left_child
def update_middle_child(self, middle_child):
self.middle_child = middle_child
def update_right_child(self, right_child):
self.right_child = right_child
def value(self):
values = []
values.append(self.left_child.empirical_mean())
values.append(self.middle_child.empirical_mean())
values.append(self.right_child.empirical_mean())
values.sort()
return min(values)
def representative_node(self):
left_value = self.left_child.value()
middle_value = self.middle_child.value()
right_value = self.right_child.value()
if left_value >= middle_value and left_value >= right_value:
return self.left_child
elif middle_value >= left_value and middle_value >= right_value:
return self.middle_child
else:
return self.right_child
def representative_leaf(self):
# For our case the node is the min node - so let's have a fixed rule
left_lower, _ = self.left_child.confidence_interval()
middle_lower, _ = self.middle_child.confidence_interval()
right_lower, _ = self.right_child.confidence_interval()
if left_lower <= middle_lower and left_lower <= middle_lower:
return self.left_child
elif middle_lower <= left_lower and middle_lower <= right_lower:
return self.middle_child
else:
return self.right_child
def confidence_interval(self):
lowers = []
uppers = []
left_lower, left_upper = self.left_child.confidence_interval()
lowers.append(left_lower)
uppers.append(left_upper)
middle_lower, middle_upper = self.middle_child.confidence_interval()
lowers.append(middle_lower)
uppers.append(middle_upper)
right_lower, right_upper = self.right_child.confidence_interval()
lowers.append(right_lower)
uppers.append(right_upper)
lowers.sort()
uppers.sort()
return [min(lowers), min(uppers)]
def find_best_arm_and_competitor_by_value(self):
b_c = {}
left_lower, left_upper = self.left_child.confidence_interval()
b_c[self.left_child] = self.left_child.value() + np.sqrt(
2 * (left_upper - left_lower) / self.left_child.representative_leaf().number_sampled)
middle_lower, middle_upper = self.middle_child.confidence_interval()
b_c[self.middle_child] = self.middle_child.value() + np.sqrt(
2 * (middle_upper - middle_lower) / self.middle_child.representative_leaf().number_sampled)
right_lower, right_upper = self.right_child.confidence_interval()
b_c[self.right_child] = self.right_child.value() + np.sqrt(
2 * (right_upper - right_lower) / self.right_child.representative_leaf().number_sampled)
best_arm, competitor = [k for k, v in sorted(b_c.items(), key=lambda item: item[1], reverse=True)][:2]
return best_arm, competitor
def find_best_arm_and_competitor(self):
left_lower, left_upper = self.left_child.confidence_interval()
middle_lower, middle_upper = self.middle_child.confidence_interval()
right_lower, right_upper = self.right_child.confidence_interval()
B_left_middle = middle_upper - left_lower
B_left_right = right_upper = left_lower
B_left = None
if B_left_middle > B_left_right:
B_left = B_left_middle
else:
B_left = B_left_right
B_middle_left = left_upper - middle_lower
B_middle_right = right_upper - middle_lower
B_middle = None
if B_middle_left > B_middle_right:
B_middle = B_middle_left
else:
B_middle = B_middle_right
B_right_left = left_upper - right_lower
B_right_middle = middle_upper - right_lower
B_right = None
if B_right_left > B_right_middle:
B_right = B_right_left
else:
B_right = B_right_middle
b_t = None
c_t_list = []
if B_left < B_middle and B_left < B_right:
b_t = self.left_child
c_t_list.append(self.middle_child)
c_t_list.append(self.right_child)
elif B_middle < B_left and B_middle < B_right:
b_t = self.middle_child
c_t_list.append(self.left_child)
c_t_list.append(self.right_child)
else:
b_t = self.right_child
c_t_list.append(self.left_child)
c_t_list.append(self.middle_child)
c_t = None
_, ub_first = c_t_list[0].confidence_interval()
_, ub_second = c_t_list[1].confidence_interval()
if ub_first > ub_second:
c_t = c_t_list[0]
else:
c_t = c_t_list[1]
return b_t, c_t
class LeafNode:
def __init__(self, name, parent, true_mean_val):
self.parent = parent
self.true_mean_val = true_mean_val
self.values = []
self.number_sampled = 0
self.name = name
def sample(self):
random_value = np.random.normal(self.true_mean_val, 0.2)
self.values.append(random_value)
self.number_sampled += 1
return random_value
def empirical_mean(self):
return np.mean(self.values)
def lower_bound(self):
confidence_gap = np.sqrt(beta(self.number_sampled) / self.number_sampled)
emp_mean = self.empirical_mean()
return emp_mean - confidence_gap
def upper_bound(self):
confidence_gap = np.sqrt(beta(self.number_sampled) / self.number_sampled)
emp_mean = self.empirical_mean()
return emp_mean + confidence_gap
def confidence_interval(self):
lower_bound = self.lower_bound()
upper_bound = self.upper_bound()
return [lower_bound, upper_bound]
def true_mean(self):
return self.true_mean_val
def sample_complexity(self):
return self.number_sampled
def reset(self):
self.number_sampled = 0
self.values = []
leaves = []
root = Node('root', None, None, None, None, is_root=True, is_max_node=True)
leaf_1 = LeafNode('leaf_1', root, 0.45)
leaves.append(leaf_1)
leaf_2 = LeafNode('leaf_2', root, 0.50)
leaves.append(leaf_2)
leaf_3 = LeafNode('leaf_3', root, 0.55)
leaves.append(leaf_3)
node_left = Node('node_left', leaf_1, leaf_2, leaf_3, root, is_root=False, is_max_node=False)
root.update_left_child(node_left)
leaf_4 = LeafNode('leaf_4', root, 0.35)
leaves.append(leaf_4)
leaf_5 = LeafNode('leaf_5', root, 0.40)
leaves.append(leaf_5)
leaf_6 = LeafNode('leaf_6', root, 0.60)
leaves.append(leaf_6)
node_middle = Node('node_middle', leaf_4, leaf_5, leaf_6, root, is_root=False, is_max_node=False)
root.update_middle_child(node_middle)
leaf_7 = LeafNode('leaf_7', root, 0.30)
leaves.append(leaf_7)
leaf_8 = LeafNode('leaf_8', root, 0.47)
leaves.append(leaf_8)
leaf_9 = LeafNode('leaf_9', root, 0.52)
leaves.append(leaf_9)
node_right = Node('node_right', leaf_7, leaf_8, leaf_9, root, is_root=False, is_max_node=False)
root.update_right_child(node_right)
def total_sample_complexity():
samples = 0
for leaf in leaves:
samples += leaf.sample_complexity()
return samples
def get_best_arm():
node_left_vlaue = node_left.value()
node_middle_value = node_middle.value()
node_right_value = node_right.value()
if node_left_vlaue >= node_middle_value and node_left_vlaue >= node_right_value:
return 'node_left'
elif node_middle_value >= node_left_vlaue and node_middle_value >= node_right_value:
return 'node_middle'
else:
return 'node_right'
def root_value():
values = []
values.append(root.left_child.value())
values.append(root.middle_child.value())
values.append(root.right_child.value())
# root is a max node
values.sort()
return max(values)
def reset():
leaf_1.reset()
leaf_2.reset()
leaf_3.reset()
leaf_4.reset()
leaf_5.reset()
leaf_6.reset()
leaf_7.reset()
leaf_8.reset()
leaf_9.reset()
# MCTS-BAI
print('Running MCTS-BAI ... ')
sample_complexity_list = []
best_arm_list = []
root_values_list = []
for i in range(iterations):
reset()
leaf_1.sample()
leaf_2.sample()
leaf_3.sample()
leaf_4.sample()
leaf_5.sample()
leaf_6.sample()
leaf_7.sample()
leaf_8.sample()
leaf_9.sample()
while True:
b_t, c_t = root.find_best_arm_and_competitor()
b_lower, b_upper = b_t.confidence_interval()
b_diff = b_upper - b_lower
c_lower, c_upper = c_t.confidence_interval()
c_diff = c_upper - c_lower
if c_upper - b_lower <= epsilon:
break
next_move = None
if b_diff > c_diff:
next_move = b_t
else:
next_move = c_t
representative_leaf = next_move.representative_leaf()
representative_leaf.sample()
samples = total_sample_complexity()
best_arm = get_best_arm()
sample_complexity_list.append(samples)
best_arm_list.append(best_arm)
root_values_list.append(root_value())
averate_complexity = 0
for sample in sample_complexity_list:
averate_complexity += sample
averate_complexity = averate_complexity / iterations
average_root_value = 0
for root_val in root_values_list:
average_root_value += root_val
average_root_value = average_root_value / iterations
percentage_correct = None
average_best_arm = None
count_left = 0
count_middle = 0
count_right = 0
for b_arm in best_arm_list:
if b_arm == 'node_left':
count_left += 1
elif b_arm == 'node_middle':
count_middle += 1
else:
count_right += 1
if count_left > count_middle and count_left > count_right:
average_best_arm = 'node_left'
percentage_correct = (count_left / iterations) * 100
elif count_middle > count_left and count_middle > count_right:
average_best_arm = 'node_middle'
percentage_correct = (count_middle / iterations) * 100
else:
average_best_arm = 'node_right'
percentage_correct = (count_right / iterations) * 100
print('MCTS-BAI')
print(f'Average Best Arm: {average_best_arm}')
print(f'Average Complexity: {averate_complexity}')
print(f'Average Root Value: {round(average_root_value, 2)}')
print(f'Percentage Correct: {percentage_correct} %')
# MCTS-BAI - Dharma
print('\n')
print('Running MCTS-BAI-Dharma ... ')
sample_complexity_list = []
best_arm_list = []
root_values_list = []
for i in range(iterations):
reset()
leaf_1.sample()
leaf_2.sample()
leaf_3.sample()
leaf_4.sample()
leaf_5.sample()
leaf_6.sample()
leaf_7.sample()
leaf_8.sample()
leaf_9.sample()
while True:
b_t, c_t = root.find_best_arm_and_competitor_by_value()
b_lower, b_upper = b_t.confidence_interval()
b_diff = b_upper - b_lower
c_lower, c_upper = c_t.confidence_interval()
c_diff = c_upper - c_lower
if c_upper - b_lower <= epsilon:
break
next_move = None
if b_diff > c_diff:
next_move = b_t
else:
next_move = c_t
representative_leaf = next_move.representative_leaf()
representative_leaf.sample()
samples = total_sample_complexity()
best_arm = get_best_arm()
sample_complexity_list.append(samples)
best_arm_list.append(best_arm)
root_values_list.append(root_value())
averate_complexity = 0
for sample in sample_complexity_list:
averate_complexity += sample
averate_complexity = averate_complexity / iterations
average_root_value = 0
for root_val in root_values_list:
average_root_value += root_val
average_root_value = average_root_value / iterations
percentage_correct = None
average_best_arm = None
count_left = 0
count_middle = 0
count_right = 0
for b_arm in best_arm_list:
if b_arm == 'node_left':
count_left += 1
elif b_arm == 'node_middle':
count_middle += 1
else:
count_right += 1
if count_left > count_middle and count_left > count_right:
average_best_arm = 'node_left'
percentage_correct = (count_left / iterations) * 100
elif count_middle > count_left and count_middle > count_right:
average_best_arm = 'node_middle'
percentage_correct = (count_middle / iterations) * 100
else:
average_best_arm = 'node_right'
percentage_correct = (count_right / iterations) * 100
print('MCTS-BAI-Dharma')
print(f'Average Best Arm: {average_best_arm}')
print(f'Average Complexity: {averate_complexity}')
print(f'Average Root Value: {round(average_root_value, 2)}')
print(f'Percentage Correct: {percentage_correct} %')
# Uniform-Sampling
reset()
for i in range(uniform_samples):
leaf_1.sample()
leaf_2.sample()
leaf_3.sample()
leaf_4.sample()
leaf_5.sample()
leaf_6.sample()
leaf_7.sample()
leaf_8.sample()
leaf_9.sample()
samples = total_sample_complexity()
best_arm = get_best_arm()
print('\n')
print('Uniform Sampling')
print(f'Best Arm: {best_arm}')
print(f'Samples: {samples}')
print(f'Value at root: {round(root_value(), 2)}')
|
from __future__ import absolute_import, division, print_function
from dials.array_family import flex
from dials_algorithms_centroid_simple_ext import *
from dxtbx import model
|
'''
@author: aby
'''
from django.db import models
from tinymce.models import HTMLField
from jsonfield.fields import JSONField
from django.core.urlresolvers import reverse
from django.utils.text import slugify
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from django.db.models.fields import URLField
from haystack import indexes
from django.contrib.sitemaps import ping_google
from constance import config
class Topic(models.Model):
title = models.CharField(_("Title"), max_length=256, help_text="Provide title for this topic")
slug = models.SlugField(_("Slug"), max_length=256, null=True, blank=True, )
short_description = models.CharField(_('Short Description'), max_length=256, null=True, blank=True, )
description = HTMLField(_('Description'), null=True, blank=True, )
footnote = HTMLField(_('Footnote'), blank=True, null=True)
thumbnail_url = URLField(_('Thumbnail URL'), null=True, blank=True)
thumbnail_credit = models.CharField(_('Thumbnail Credit'), max_length=128, null=True, blank=True, )
scales = models.CharField(_('Scales'), max_length=12, choices=[("human", _("Human")), ("cosmological", _("Cosmological")),], default="human" )
tags = models.CharField(_('Tags'), max_length=128, null=True, blank=True, )
authors = models.CharField(_('Authors'), max_length=256, null=True, blank=True, )
published = models.BooleanField(default=True, )
published_date = models.DateTimeField(null=True, blank=True,)
creation_date = models.DateTimeField(auto_now_add=True, null=True, blank=True,)
modify_date = models.DateTimeField(auto_now_add=True, null=True, blank=True,)
xtra_attrs = JSONField(_('JSON Attributes'), null=True, blank=True, default={})
class Meta:
get_latest_by = "creation_date"
ordering = ['title', 'creation_date',]
def get_absolute_url(self):
return reverse('topic', kwargs={'slug': self.slug})
def __str__(self):
return self.title
def save(self, force_insert=False, force_update=False, using=None,
update_fields=None):
#current_user = request.user
#print (current_user.id)
self.slug = slugify(self.title)
self.modify_date = timezone.now()
if self.creation_date == None:
self.creation_date = timezone.now()
if self.pk is not None:
orig = Topic.objects.get(pk=self.pk)
if (orig.published != self.published) & self.published:
self.published_date = timezone.now()
print ('existing instance, update published_date')
else:
if self.published:
self.published_date = timezone.now()
retval = models.Model.save(self, force_insert=force_insert,
force_update=force_update,
using=using, update_fields=update_fields)
if config.PING_GOOGLE:
try:
ping_google()
except Exception:
# Bare 'except' because we could get a variety
# of HTTP-related exceptions.
pass
return retval
class Event(models.Model):
name = models.CharField(_('Name'), max_length=256)
topic = models.ForeignKey(Topic)
description = HTMLField(_('Description'), blank=True, null=True)
footnote = HTMLField(_('Footnote'), blank=True, null=True)
visible = models.BooleanField(_("Visible"), blank=True, default=True)
group = models.CharField(_('Event Group'), max_length=64, null=True, blank=True, )
media_url = URLField(_('Media URL'), null=True, blank=True)
media_caption = models.CharField(_('Media Caption'), max_length=128, null=True, blank=True, )
media_credit = models.CharField(_('Media Credit'), max_length=128, null=True, blank=True, )
tags = models.CharField(_('Tags'),blank=True, null=True, max_length=128)
authors = models.CharField(_('Authors'), max_length=256, null=True, blank=True, )
creation_date = models.DateTimeField(auto_now_add=True, null=True, blank=True)
modify_date = models.DateTimeField(auto_now_add=True, null=True, blank=True)
calendar_type_choice = [("hijri", _("Hijri")), ("gregorian", _("Gregorian")),]
calendar_type = models.CharField('Calendar Type', max_length=16, choices=calendar_type_choice, default="gregorian")
year_start = models.IntegerField('Occurance Year', default=timezone.now().year,
help_text='Occurance year, you may fill negative value if it is BC in Gregorian Calendar')
month_start = models.PositiveSmallIntegerField('Occurance Month', blank=True, null=True)
day_start = models.PositiveSmallIntegerField('Occurance Day', blank=True, null=True)
time_start = models.TimeField('Occurance Time', blank=True, null=True)
year_end = models.IntegerField('Year of Occurance End', blank=True, null=True)
month_end = models.PositiveSmallIntegerField('Month of Occurance End', blank=True, null=True)
day_end = models.PositiveSmallIntegerField('Date of Occurance End', blank=True, null=True)
time_end = models.TimeField('Time of Occurance End', blank=True, null=True)
related_events = models.TextField('Related events', blank=True, null=True,
help_text='Related events, one event per line')
xtra_attrs = JSONField('JSON Attributes', blank=True, null=True, default={})
class Meta:
get_latest_by = "year_start"
ordering = ['-year_start', '-month_start', '-day_start', '-time_start',
]
def get_absolute_url(self):
return reverse('event', kwargs={'slug': self.topic.slug, 'pk': self.pk})
def __str__(self):
return self.name
def save(self, force_insert=False, force_update=False, using=None,
update_fields=None):
self.modify_date = timezone.now()
if self.creation_date == None:
self.creation_date = timezone.now()
# force signal indexing on parent
if config.BUILD_INDEX_ON_SAVED:
self.topic.save(force_insert=force_insert,
force_update=force_update,
using=using, update_fields=update_fields)
return models.Model.save(self, force_insert=force_insert,
force_update=force_update,
using=using, update_fields=update_fields) |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @Date : Nov-29-20 16:18
# @Author : Kelly Hwong (dianhuangkan@gmail.com)
import os
import argparse
from datetime import datetime
import tensorflow as tf
from keras_fn.resnet import model_depth, resnet_v2
from keras_fn.transfer_utils import transfer_weights
from utils.gpu_utils import get_gpu_memory, get_available_gpu_indices
def cmd_parser():
"""parse arguments
"""
parser = argparse.ArgumentParser()
parser.add_argument('--date_time', type=str, dest='date_time',
action='store', default=None, help='tmp, manually set date time, for model data save path configuration.')
# Input parameters
parser.add_argument('--side_length', type=int, dest='side_length',
action='store', default=224, help='side_length, the length of image width and height to be cast to.')
# Device
parser.add_argument('--gpu', type=int, dest='gpu',
action='store', default=0, help='gpu, the number of the gpu used for experiment.')
# parser.add_argument('--src_gpu', type=int, dest='src_gpu',
# action='store', default=0, help='src_gpu.')
# parser.add_argument('--dest_gpu', type=int, dest='dest_gpu',
# action='store', default=0, help='dest_gpu.')
args = parser.parse_args()
return args
def main():
args = cmd_parser()
# Config GPUs
physical_devices = tf.config.list_physical_devices('GPU')
tf.config.set_visible_devices(physical_devices[args.gpu:], 'GPU')
gpu_list = tf.config.list_physical_devices('GPU')
gpus_memory = get_gpu_memory()
available_gpu_indices = get_available_gpu_indices(gpus_memory)
# Data path
competition_name = "semi-conductor-image-classification-first"
data_dir = os.path.expanduser(
f"~/.kaggle/competitions/{competition_name}")
date_time = args.date_time
prefix = os.path.join(
"~", "Documents", "DeepLearningData", competition_name)
model_type = os.path.join(
"ResNet20v2_56v2", date_time, "ResNet20v2" + "_pretrain")
loss_type = "normal"
subfix = os.path.join(model_type, loss_type)
ckpt_dir = os.path.expanduser(os.path.join(prefix, "ckpts", subfix))
# Input paramaters
SIDE_LENGTH = args.side_length # default 224
IMAGE_WIDTH = IMAGE_HEIGHT = SIDE_LENGTH
image_size = (IMAGE_WIDTH, IMAGE_HEIGHT)
IMAGE_CHANNELS = 1
input_shape = (IMAGE_WIDTH, IMAGE_HEIGHT, IMAGE_CHANNELS)
num_classes = 2
# Create src model: ResNet20v2
with tf.device("/device:GPU:" + str(available_gpu_indices[0])):
src_model = resnet_v2(input_shape=input_shape,
depth=model_depth(n=2, version=2), num_classes=num_classes)
# Load model's last checkpoint if there is any
latest_ckpt = tf.train.latest_checkpoint(ckpt_dir)
assert latest_ckpt is not None
src_model.load_weights(latest_ckpt)
# Create dest model: ResNet56v2
with tf.device("/device:GPU:" + str(available_gpu_indices[1])):
dest_model = resnet_v2(input_shape=input_shape,
depth=model_depth(n=6, version=2), num_classes=num_classes)
# Do weights transferring
transfer_weights(src_model, dest_model)
model_type = os.path.join(
"ResNet20v2_56v2", date_time, "ResNet56v2" + "_continue")
loss_type = "normal"
subfix = os.path.join(model_type, loss_type)
ckpt_dir = os.path.expanduser(os.path.join(prefix, "ckpts", subfix))
ckpt_name = "ResNet56v2-transfer-from-ResNet20v2"
dest_model.save_weights(os.path.join(ckpt_dir, ckpt_name))
if __name__ == "__main__":
main()
|
"""
At Chip's Fast Food emporium there is a very simple menu. Each food item is selected by entering a digit choice.
Here are the three burger choices:
1 – Cheeseburger (461 Calories)
2 – Fish Burger (431 Calories)
3 – Veggie Burger (420 Calories)
4 – no burger Here are the three drink choices:
1 – Soft Drink (130 Calories)
2 – Orange Juice (160 Calories)
3 – Milk (118 Calories)
4 – no drink
Here are the three side order choices:
1 – Fries (100 Calories)
2 – Baked Potato (57 Calories)
3 – Chef Salad (70 Calories)
4 – no side order Here are the three dessert choices:
1 – Apple Pie (167 Calories)
2 – Sundae (266 Calories)
3 – Fruit Cup (75 Calories)
4 – no dessert
Write a program that will compute the total Calories of a meal.
"""
a = int(input())
c = int(input())
b = int(input())
d = int(input())
if a == 1:
q = 461
elif a == 2:
q = 431
elif a == 3:
q = 420
elif a == 4:
q = 0
if b == 1:
e = 130
elif b == 2:
e = 160
elif b == 3:
e = 118
elif b == 4:
e = 0
if c == 1:
f = 100
elif c == 2:
f = 57
elif c == 3:
f = 70
elif c == 4:
f = 0
if d == 1:
h = 167
elif d == 2:
h = 266
elif d == 3:
h = 75
elif d == 4:
h = 0
print("Your total Calorie count is" + ' ' + str((q)+(e)+(f)+(h)) +".")
|
import os
import json
import h5py
import numpy as np
from scipy import sparse
import collections
import math
import torch
import caption.readers.base
NUM_RELS = 6
UNK_WORDEMBED = np.zeros((300, ), dtype=np.float32)
PIXEL_REDUCE = 1
class ImageSceneGraphFlatReader(caption.readers.base.CaptionDatasetBase):
def __init__(self, name_file, mp_ft_file, obj_ft_dir, region_anno_dir,
word2int_file, max_attn_len=10, max_words_in_sent=15,
is_train=False, return_label=False, _logger=None,
pred_caption_file=None):
super().__init__(word2int_file, max_words_in_sent=max_words_in_sent,
is_train=is_train, return_label=return_label, _logger=_logger)
if 'VisualGenome' in name_file:
global PIXEL_REDUCE
PIXEL_REDUCE = 0
self.obj_ft_dir = obj_ft_dir
self.max_attn_len = max_attn_len
self.region_anno_dir = region_anno_dir
img_names = np.load(name_file)
self.img_id_to_ftidx_name = {x.split('.')[0]: (i, x) \
for i, x in enumerate(img_names)}
self.mp_fts = np.load(mp_ft_file)
self.print_fn('mp_fts %s'%(str(self.mp_fts.shape)))
self.names = np.load(os.path.join(region_anno_dir, os.path.basename(name_file)))
self.num_data = len(self.names)
self.print_fn('num_data %d' % (self.num_data))
if pred_caption_file is None:
self.pred_captions = None
else:
self.pred_captions = json.load(open(pred_caption_file))
def __getitem__(self, idx):
image_id, region_id = self.names[idx]
name = '%s_%s'%(image_id, region_id)
anno = json.load(open(os.path.join(self.region_anno_dir, '%s.json'%image_id)))
region_graph = anno[region_id]
region_caption = anno[region_id]['phrase']
with h5py.File(os.path.join(self.obj_ft_dir, '%s.jpg.hdf5'%image_id.replace('/', '_')), 'r') as f:
key = '%s.jpg'%image_id.replace('/', '_')
obj_fts = f[key][...]
obj_bboxes = f[key].attrs['boxes']
obj_box_to_ft = {tuple(box): ft for box, ft in zip(obj_bboxes, obj_fts)}
attn_ft, node_types, attr_order_idxs = [], [], []
obj_id_to_box = {}
for x in region_graph['objects']:
box = (x['x'], x['y'], x['x']+x['w']-PIXEL_REDUCE, x['y']+x['h']-PIXEL_REDUCE)
obj_id_to_box[x['object_id']] = box
attn_ft.append(obj_box_to_ft[box])
attr_order_idxs.append(0)
node_types.append(0)
for ia, attr in enumerate(x['attributes']):
attn_ft.append(obj_box_to_ft[box])
attr_order_idxs.append(ia + 1)
node_types.append(1)
for x in region_graph['relationships']:
obj_box = obj_id_to_box[x['object_id']]
subj_box = obj_id_to_box[x['subject_id']]
box = (min(obj_box[0], subj_box[0]), min(obj_box[1], subj_box[1]),
max(obj_box[2], subj_box[2]), max(obj_box[3], subj_box[3]))
attn_ft.append(obj_box_to_ft[box])
node_types.append(2)
attr_order_idxs.append(0)
num_nodes = len(node_types)
attn_ft, attn_mask = self.pad_or_trim_feature(
np.array(attn_ft[:self.max_attn_len], np.float32),
self.max_attn_len)
node_types = node_types[:self.max_attn_len] + [0] * max(0, self.max_attn_len - num_nodes)
node_types = np.array(node_types, np.int32)
attr_order_idxs = attr_order_idxs[:self.max_attn_len] + [0] * max(0, self.max_attn_len - num_nodes)
attr_order_idxs = np.array(attr_order_idxs, np.int32)
out = {
'names': name,
'mp_fts': self.mp_fts[self.img_id_to_ftidx_name[image_id][0]],
'attn_fts': attn_ft,
'attn_masks': attn_mask,
'node_types': node_types,
'attr_order_idxs': attr_order_idxs,
}
if self.is_train or self.return_label:
sent = region_caption
caption_ids, caption_masks = self.pad_sents(self.sent2int(sent))
out.update({
'caption_ids': caption_ids,
'caption_masks': caption_masks,
'ref_sents': [sent],
})
return out
def __len__(self):
return self.num_data
def flat_collate_fn(data):
outs = {}
for key in ['names', 'mp_fts', 'attn_fts', 'attn_masks',
'caption_ids', 'caption_masks', 'node_types', 'attr_order_idxs']:
if key in data[0]:
outs[key] = [x[key] for x in data]
outs['mp_fts'] = np.array(outs['mp_fts'])
max_attn_len = np.max(np.sum(outs['attn_masks'], 1))
outs['attn_fts'] = np.array(outs['attn_fts'])[:, :max_attn_len]
outs['attn_masks'] = np.array(outs['attn_masks'])[:, :max_attn_len]
for key in ['node_types', 'attr_order_idxs']:
if key in data[0]:
outs[key] = np.array(outs[key])[:, :max_attn_len]
if 'caption_ids' in data[0]:
outs['caption_ids'] = np.array(outs['caption_ids'], np.int32)
outs['caption_masks'] = np.array(outs['caption_masks'], np.bool)
max_sent_len = np.max(np.sum(outs['caption_masks']))
outs['caption_ids'] = outs['caption_ids'][:, :max_sent_len]
outs['caption_masks'] = outs['caption_masks'][:, :max_sent_len]
outs['ref_sents'] = {}
for x in data:
outs['ref_sents'][x['names']] = x['ref_sents']
return outs
class ImageSceneGraphReader(ImageSceneGraphFlatReader):
def add_obj_attr_edge(self, edges, obj_node_id, attr_node_id):
edges.append([obj_node_id, attr_node_id, 0])
edges.append([attr_node_id, obj_node_id, 1])
def add_rel_subj_edge(self, edges, rel_node_id, subj_node_id):
edges.append([subj_node_id, rel_node_id, 2])
edges.append([rel_node_id, subj_node_id, 3])
def add_rel_obj_edge(self, edges, rel_node_id, obj_node_i):
edges.append([rel_node_id, obj_node_i, 4])
edges.append([obj_node_i, rel_node_id, 5])
def __getitem__(self, idx):
image_id, region_id = self.names[idx]
name = '%s_%s'%(image_id, region_id)
anno = json.load(open(os.path.join(self.region_anno_dir, '%s.json'%image_id)))
region_graph = anno[region_id]
region_caption = anno[region_id]['phrase']
with h5py.File(os.path.join(self.obj_ft_dir, '%s.jpg.hdf5'%image_id.replace('/', '_')), 'r') as f:
key = '%s.jpg'%image_id.replace('/', '_')
obj_fts = f[key][...]
obj_bboxes = f[key].attrs['boxes']
obj_box_to_ft = {tuple(box): ft for box, ft in zip(obj_bboxes, obj_fts)}
attn_fts, node_types, attr_order_idxs, edges = [], [], [], []
obj_id_to_box = {}
obj_id_to_graph_id = {}
n = 0
for x in region_graph['objects']:
box = (x['x'], x['y'], x['x']+x['w']-PIXEL_REDUCE, x['y']+x['h']-PIXEL_REDUCE)
obj_id_to_box[x['object_id']] = box
attn_fts.append(obj_box_to_ft[box])
attr_order_idxs.append(0)
node_types.append(0)
obj_id_to_graph_id[x['object_id']] = n
n += 1
if n >= self.max_attn_len:
break
for ia, attr in enumerate(x['attributes']):
attn_fts.append(obj_box_to_ft[box])
attr_order_idxs.append(ia + 1)
node_types.append(1)
self.add_obj_attr_edge(edges, obj_id_to_graph_id[x['object_id']], n)
n += 1
if n >= self.max_attn_len:
break
if n >= self.max_attn_len:
break
if n < self.max_attn_len:
for x in region_graph['relationships']:
obj_box = obj_id_to_box[x['object_id']]
subj_box = obj_id_to_box[x['subject_id']]
box = (min(obj_box[0], subj_box[0]), min(obj_box[1], subj_box[1]),
max(obj_box[2], subj_box[2]), max(obj_box[3], subj_box[3]))
attn_fts.append(obj_box_to_ft[box])
attr_order_idxs.append(0)
node_types.append(2)
self.add_rel_subj_edge(edges, n, obj_id_to_graph_id[x['subject_id']])
self.add_rel_obj_edge(edges, n, obj_id_to_graph_id[x['object_id']])
n += 1
if n >= self.max_attn_len:
break
num_nodes = len(node_types)
attn_fts = np.array(attn_fts, np.float32)
attn_fts, attn_masks = self.pad_or_trim_feature(attn_fts, self.max_attn_len)
node_types = node_types[:self.max_attn_len] + [0] * max(0, self.max_attn_len - num_nodes)
node_types = np.array(node_types, np.int32)
attr_order_idxs = attr_order_idxs[:self.max_attn_len] + [0] * max(0, self.max_attn_len - num_nodes)
attr_order_idxs = np.array(attr_order_idxs, np.int32)
if len(edges) > 0:
src_nodes, tgt_nodes, edge_types = tuple(zip(*edges))
src_nodes = np.array(src_nodes, np.int32)
tgt_nodes = np.array(tgt_nodes, np.int32)
edge_types = np.array(edge_types, np.int32)
edge_counter = collections.Counter([(tgt_node, edge_type) for tgt_node, edge_type in zip(tgt_nodes, edge_types)])
edge_norms = np.array(
[1 / edge_counter[(tgt_node, edge_type)] for tgt_node, edge_type in zip(tgt_nodes, edge_types)],
np.float32)
else:
tgt_nodes = src_nodes = edge_types = edge_norms = np.array([])
edge_sparse_matrices = []
for i in range(NUM_RELS):
idxs = (edge_types == i)
edge_sparse_matrices.append(
sparse.coo_matrix((edge_norms[idxs], (tgt_nodes[idxs], src_nodes[idxs])),
shape=(self.max_attn_len, self.max_attn_len)))
out = {
'names': name,
'mp_fts': self.mp_fts[self.img_id_to_ftidx_name[image_id][0]],
'attn_fts': attn_fts,
'attn_masks': attn_masks,
'node_types': node_types,
'attr_order_idxs': attr_order_idxs,
'edge_sparse_matrices': edge_sparse_matrices,
}
if self.is_train or self.return_label:
sent = region_caption
caption_ids, caption_masks = self.pad_sents(self.sent2int(sent))
out.update({
'caption_ids': caption_ids,
'caption_masks': caption_masks,
'ref_sents': [sent],
})
return out
def sg_sparse_collate_fn(data):
outs = {}
for key in ['names', 'mp_fts', 'attn_fts', 'attn_masks', 'node_types', 'attr_order_idxs', \
'edge_sparse_matrices', 'caption_ids', 'caption_masks']:
if key in data[0]:
outs[key] = [x[key] for x in data]
outs['mp_fts'] = np.array(outs['mp_fts'])
max_attn_len, dim_attn_ft = data[0]['attn_fts'].shape
# (batch, max_attn_len, dim_attn_ft)
outs['attn_fts'] = np.array(outs['attn_fts'])
outs['attn_masks'] = np.array(outs['attn_masks'])
if 'caption_ids' in data[0]:
outs['caption_ids'] = np.array(outs['caption_ids'], np.int32)
outs['caption_masks'] = np.array(outs['caption_masks'], np.bool)
max_sent_len = np.max(np.sum(outs['caption_masks']))
outs['caption_ids'] = outs['caption_ids'][:, :max_sent_len]
outs['caption_masks'] = outs['caption_masks'][:, :max_sent_len]
outs['ref_sents'] = {}
for x in data:
outs['ref_sents'][x['names']] = x['ref_sents']
return outs
class ImageSceneGraphFlowReader(ImageSceneGraphReader):
def __getitem__(self, idx):
image_id, region_id = self.names[idx]
name = '%s_%s'%(image_id, region_id)
anno = json.load(open(os.path.join(self.region_anno_dir, '%s.json'%image_id)))
region_graph = anno[region_id]
if self.pred_captions is not None:
region_caption = self.pred_captions[name][0]
else:
region_caption = anno[region_id]['phrase']
with h5py.File(os.path.join(self.obj_ft_dir, '%s.jpg.hdf5'%image_id.replace('/', '_')), 'r') as f:
key = '%s.jpg'%image_id.replace('/', '_')
obj_fts = f[key][...]
obj_bboxes = f[key].attrs['boxes']
obj_box_to_ft = {tuple(box): ft for box, ft in zip(obj_bboxes, obj_fts)}
attn_fts, node_types, attr_order_idxs = [], [], []
attn_node_names = []
edges, flow_edges = [], []
obj_id_to_box = {}
obj_id_to_graph_id = {}
n = 0
for x in region_graph['objects']:
box = (x['x'], x['y'], x['x']+x['w']-PIXEL_REDUCE, x['y']+x['h']-PIXEL_REDUCE)
obj_id_to_box[x['object_id']] = box
attn_fts.append(obj_box_to_ft[box])
attn_node_names.append(x['name'])
attr_order_idxs.append(0)
node_types.append(0)
obj_id_to_graph_id[x['object_id']] = n
n += 1
if n >= self.max_attn_len:
break
for ia, attr in enumerate(x['attributes']):
attn_fts.append(obj_box_to_ft[box])
attn_node_names.append(attr)
attr_order_idxs.append(ia + 1)
node_types.append(1)
self.add_obj_attr_edge(edges, obj_id_to_graph_id[x['object_id']], n)
# bi-directional for obj-attr
flow_edges.append((obj_id_to_graph_id[x['object_id']], n))
flow_edges.append((n, obj_id_to_graph_id[x['object_id']]))
n += 1
if n >= self.max_attn_len:
break
if n >= self.max_attn_len:
break
if n < self.max_attn_len:
for x in region_graph['relationships']:
obj_box = obj_id_to_box[x['object_id']]
subj_box = obj_id_to_box[x['subject_id']]
box = (min(obj_box[0], subj_box[0]), min(obj_box[1], subj_box[1]),
max(obj_box[2], subj_box[2]), max(obj_box[3], subj_box[3]))
attn_fts.append(obj_box_to_ft[box])
attn_node_names.append(x['name'])
attr_order_idxs.append(0)
node_types.append(2)
self.add_rel_subj_edge(edges, n, obj_id_to_graph_id[x['subject_id']])
self.add_rel_obj_edge(edges, n, obj_id_to_graph_id[x['object_id']])
flow_edges.append((obj_id_to_graph_id[x['subject_id']], n))
flow_edges.append((n, obj_id_to_graph_id[x['object_id']]))
n += 1
if n >= self.max_attn_len:
break
num_nodes = len(node_types)
attn_fts = np.array(attn_fts, np.float32)
attn_fts, attn_masks = self.pad_or_trim_feature(attn_fts, self.max_attn_len)
node_types = node_types[:self.max_attn_len] + [0] * max(0, self.max_attn_len - num_nodes)
node_types = np.array(node_types, np.int32)
attr_order_idxs = attr_order_idxs[:self.max_attn_len] + [0] * max(0, self.max_attn_len - num_nodes)
attr_order_idxs = np.array(attr_order_idxs, np.int32)
if len(edges) > 0:
src_nodes, tgt_nodes, edge_types = tuple(zip(*edges))
src_nodes = np.array(src_nodes, np.int32)
tgt_nodes = np.array(tgt_nodes, np.int32)
edge_types = np.array(edge_types, np.int32)
edge_counter = collections.Counter([(tgt_node, edge_type) for tgt_node, edge_type in zip(tgt_nodes, edge_types)])
edge_norms = np.array(
[1 / edge_counter[(tgt_node, edge_type)] for tgt_node, edge_type in zip(tgt_nodes, edge_types)],
np.float32)
else:
tgt_nodes = src_nodes = edge_types = edge_norms = np.array([])
# build python sparse matrix
edge_sparse_matrices = []
for i in range(NUM_RELS):
idxs = (edge_types == i)
edge_sparse_matrices.append(
sparse.coo_matrix((edge_norms[idxs], (tgt_nodes[idxs], src_nodes[idxs])),
shape=(self.max_attn_len, self.max_attn_len)))
# add end flow loop
flow_src_nodes = set([x[0] for x in flow_edges])
for k in range(n):
if k not in flow_src_nodes:
flow_edges.append((k, k)) # end loop
# flow order graph
flow_src_nodes, flow_tgt_nodes = tuple(zip(*flow_edges))
flow_src_nodes = np.array(flow_src_nodes, np.int32)
flow_tgt_nodes = np.array(flow_tgt_nodes, np.int32)
# normalize by src (collumn)
flow_counter = collections.Counter(flow_src_nodes)
flow_edge_norms = np.array(
[1 / flow_counter[src_node] for src_node in flow_src_nodes])
flow_sparse_matrix = sparse.coo_matrix((flow_edge_norms, (flow_tgt_nodes, flow_src_nodes)),
shape=(self.max_attn_len, self.max_attn_len))
out = {
'names': name,
'mp_fts': self.mp_fts[self.img_id_to_ftidx_name[image_id][0]],
'attn_fts': attn_fts,
'attn_masks': attn_masks,
'node_types': node_types,
'attr_order_idxs': attr_order_idxs,
'edge_sparse_matrices': edge_sparse_matrices,
'flow_sparse_matrix': flow_sparse_matrix,
}
if self.is_train or self.return_label:
sent = region_caption
caption_ids, caption_masks = self.pad_sents(self.sent2int(sent))
out.update({
'caption_ids': caption_ids,
'caption_masks': caption_masks,
'ref_sents': [sent],
'attn_node_names': attn_node_names,
})
return out
def sg_sparse_flow_collate_fn(data):
outs = sg_sparse_collate_fn(data)
outs['flow_sparse_matrix'] = [x['flow_sparse_matrix'] for x in data]
return outs
|
import numpy as np
import random
number_of_try=1000
number_of_restart=10
# number_of_try=100
# number_of_restart=5
FileName="large"
with open(FileName+".txt") as f:
file=f.readlines()
f.close()
minizinc=""
file = [x.strip() for x in file]
file = [x.split(" ") for x in file]
#file = [x.split("\t") for x in file]
print(file)
max=0;
for x in file:
for y in x:
if int(y)>max:
max=int(y)
def fitness(array,file,max):
unsatisfied=0
sum=0
number_of_ones=0
for x in file:
for y in x:
sum+=array[int(y)-1]
#print("sum ="+str(sum))
if(sum%2!=0):
unsatisfied+=1
#print(unsatisfied)
sum=0
for i in array:
if i==1:
number_of_ones+=1
if(number_of_ones==0):
#print(len(file)*max*max)
return len(file)*max*max
return unsatisfied*max+number_of_ones
def getbetter(array,file,max):
index=-1
index_fitness=fitness(array,file,max)
unsatisfied=index_fitness/max
sum=0
unsat=[]
for i in range(0,len(file)):
for y in file[i]:
sum+=int(y)
if(sum%2!=0):
unsat.append(i)
sum=0
#constraint=random.randrange(len(unsat))
for z in unsat:
for m in file[z]:
x=int(m)-1
if array[x]==1:
array[x]=0
else :
array[x]=1
a=fitness(array,file,max)
# print(array)
# print("change index "+str(x)+"for fitness"+str(a))
if a<index_fitness:
index_fitness=a
index=x
if array[x]==1:
array[x]=0
else :
array[x]=1
return index
x=0
best=[];
best_fitness=max*max
while x<number_of_restart:
x+=1
first=np.random.choice([0,1], size=(max,))
print(first)
print(fitness(first,file,max))
#print(fitness([1,1,0,1,1],file,max))
a=max*max
while a>=max:
b=getbetter(first,file,max)
print(b)
# if b==-1:
# b=random.randrange(max)
# first[b]^=1
if b!=-1:
first[b]^=1
else:break
print("----------------------------------------")
print(first)
a=fitness(first,file,max)
print(a)
if a<best_fitness:
best_fitness=a
# best.clear()
best=first.copy()
print("***************************************************************")
print(best)
print(best_fitness)
f = open(FileName+"_inexact_solution.txt", "w")
for j in range(1,max):
f.write("x"+str(j)+"=")
f.write(str(best[j]))
f.write("\n")
f.write("\nfitness="+str(best_fitness))
# unsatisfied=0
# sum=0
# for x in file:
# for y in x:
# sum+=best[int(y)-1]
# #print("sum ="+str(sum))
# if(sum%2!=0):
# unsatisfied+=1
# #print(unsatisfied)
# sum=0
f.write("\nnumber of unsatisfied="+str(int(best_fitness/max)))
f.close()
|
# https://leetcode-cn.com/problems/ba-shu-zi-fan-yi-cheng-zi-fu-chuan-lcof/
# 剑指 Offer 46. 把数字翻译成字符串
class Solution:
def translateNum(self, num: int) -> int:
num = str(num)
cnt = 0
def recur(p: int):
nonlocal cnt
if p >= len(num) - 1:
cnt += 1
return
recur(p + 1)
if p < len(num) - 1 and '10' <= num[p] + num[p + 1] <= '25':
recur(p + 2)
recur(0)
return cnt
s = Solution()
res = s.translateNum(12258)
print(res)
|
"""Check if userbot alive. If you change these, you become the gayest gay such that even the gay world will disown you."""
import asyncio
from telethon import events
from telethon.tl.types import ChannelParticipantsAdmins
from platform import uname
from userbot import ALIVE_NAME
from userbot.utils import admin_cmd
DEFAULTUSER = str(ALIVE_NAME) if ALIVE_NAME else "No Name set yet."
@command(outgoing=True, pattern="^.alive$")
async def amireallyalive(alive):
""" For .alive command, check if the bot is running. """
await alive.edit("⌔︙ TeleThon For Iraq \n"
"⌔︙ Version: 1.0.2\n"
# Don't change this else you a TikTok loser, Son of Jinping. Add your own.
"⌔︙ Created By: [CH KLANR](https://t.me/RXXRX) || [CH IQ](https://t.me/IQTHON)\n"
"⌔︙ Confidence @uruur\n"
"⌔︙ The Files : [Here](https://t.me/YZZZY)\n"
"⌔︙ Source link : [Here](https://heroku.com/deploy?template=https://github.com/klanrali/TeleThon-IRAQ)\n"
f"⌔︙ My Master : {DEFAULTUSER}\n") |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class ShallowTestCaseResult(Model):
"""ShallowTestCaseResult.
:param automated_test_storage:
:type automated_test_storage: str
:param id:
:type id: int
:param is_re_run:
:type is_re_run: bool
:param outcome:
:type outcome: str
:param owner:
:type owner: str
:param priority:
:type priority: int
:param ref_id:
:type ref_id: int
:param run_id:
:type run_id: int
:param test_case_title:
:type test_case_title: str
"""
_attribute_map = {
'automated_test_storage': {'key': 'automatedTestStorage', 'type': 'str'},
'id': {'key': 'id', 'type': 'int'},
'is_re_run': {'key': 'isReRun', 'type': 'bool'},
'outcome': {'key': 'outcome', 'type': 'str'},
'owner': {'key': 'owner', 'type': 'str'},
'priority': {'key': 'priority', 'type': 'int'},
'ref_id': {'key': 'refId', 'type': 'int'},
'run_id': {'key': 'runId', 'type': 'int'},
'test_case_title': {'key': 'testCaseTitle', 'type': 'str'}
}
def __init__(self, automated_test_storage=None, id=None, is_re_run=None, outcome=None, owner=None, priority=None, ref_id=None, run_id=None, test_case_title=None):
super(ShallowTestCaseResult, self).__init__()
self.automated_test_storage = automated_test_storage
self.id = id
self.is_re_run = is_re_run
self.outcome = outcome
self.owner = owner
self.priority = priority
self.ref_id = ref_id
self.run_id = run_id
self.test_case_title = test_case_title
|
import os
path = r'/home/igeng/PycharmProjects/google_recog/Spoken_Number_Recognition_mfcc/codes/google_commands/audio_tr/'
for name in range(30):
os.mkdir(path+str(name)) |
import random
pool_size=10
mutation_prob=0.2
cross_prob=0.8
def marking_function(x):
value=int(x, 2)
return 2*(value*value+1)
def roulette_sum(genes):
sum=0
for x in genes:
sum+=marking_function(x)
return sum
def prob(x,sum):
return marking_function(x)/sum
def select_genes(genes):
roulette=[]
sum = roulette_sum(genes)
roulette.append(prob(genes[0],sum))
for i in range(1,len(genes)):
roulette.append(prob(genes[i],sum))
roulette[i]+=roulette[i-1]
selected_genes=[]
for i in range(0,pool_size):
selected_genes.append(genes[choose(roulette)])
return selected_genes
def cross_genes(genes):
random.shuffle(genes)
crossed_genes=[]
for i in range(0,len(genes),2):
if random.random()<cross_prob:
locus=random.randint(0,len(genes[0]))
crossed_genes.append(cross(genes[i+1],genes[i],locus))
crossed_genes.append(cross(genes[i],genes[i+1],locus))
else:
crossed_genes.append(genes[i])
crossed_genes.append(genes[i+1])
return crossed_genes
def cross(gene1,gene2,locus):
crossed_gene=gene1[:locus]+gene2[locus:]
return crossed_gene
def mutate_genes(genes):
mutated_genes=[]
for x in genes:
mutated_genes.append(mutate(x))
return mutated_genes
def mutate(gene):
if random.random()<mutation_prob:
locus=random.randint(0,len(gene)-1)
temp=list(gene)
if temp[locus]=='1':
temp[locus]='0'
else:
temp[locus]='1'
return "".join(temp)
return gene
def choose(roulette):
val=random.random()
index=0
for i in range(0,len(roulette)):
if val<float(roulette[i]):
index=i
break
return index
temp_genes=[]
for i in range(1,128):
temp_genes.append(i)
all_genes=[]
for i in range(0,127):
all_genes.append("{:08b}".format(temp_genes.pop(0)))
genes=[]
for i in range(0,50):
genes=select_genes(all_genes)
all_genes = [x for x in all_genes if x not in genes]
genes=cross_genes(genes)
genes=mutate_genes(genes)
all_genes.extend(genes)
print(genes)
|
def count(fileData):
inputData = open(fileData,'r')
lines = inputData.readlines()
N = int(lines[0])
open('output.txt', 'w').close()
outputData = open('output.txt', 'a')
j = 1
i = 0
maxim = 10000000
last_num = 0
while j <= N:
i += 1
list_num = set()
number = int(lines[i])
for x in range(1,maxim):
next_number = str(x*number)
for n in next_number:
list_num.add(int(n))
if(len(list_num) == 10):
last_num = next_number
break
if (last_num == 0):
last_num = 'INSOMNIA'
print('case #{0}: {1}'.format(j,last_num))
outputData.write('case #{0}: {1}\n'.format(j,last_num))
j += 1
count('A-large.in')
|
# -*- coding: utf-8 -*-
"""
Created on Tues Jun 03 08:00:53 2014
@author: nataliecmoore
Script Name: USDA_AJ_PY018_SCRAPER
Purpose:
Find the Georgia f.o.b. dock quoted price on broilers/fryers
Approach:
Found the relevant section of the website and used python string parsing
to find the quoted price.
Author: Natalie Moore
History:
Date Author Purpose
----- ------- -----------
06/03/2014 Natalie Moore Initial development/release
"""
import urllib2
import pytz
import pandas as pd
import datetime
import sys
date=datetime.datetime.now(pytz.timezone('US/Eastern')).strftime('%Y-%m-%d') # holds the date in YYYY-MM-DD format
# stores report in variable 'site_contents'
url='http://www.ams.usda.gov/mnreports/aj_py018.txt'
site_contents=urllib2.urlopen(url).read()
decimal=site_contents.rfind('.', 0, site_contents.find('based')) # find the decimal point in the price
space_before=site_contents.rfind(' ', 0, decimal) # find the space before the price
space_after=site_contents.find(' ', decimal) # find the space after the price
dock_quoted_price=float(site_contents[space_before:space_after].strip()) # store the quoted price as a float and remove extra spaces around it
headings=['Date', 'Quoted Price (cents)']
data={'Date': [date], 'Quoted Price (cents)': [dock_quoted_price]}
data_df = pd.DataFrame(data, columns = headings)
data_df.index = data_df['Date']
data_df = data_df.drop('Date', 1)
quandl_code = 'USDA_AJ_PY018_BROILER_FRYER_QUOTED_PRICE\r'# build unique quandl code
reference_text = ' Historical figures from USDA can be verified using the LMR datamart located ' \
'\n at http://mpr.datamart.ams.usda.gov.\n'
print 'code: ' + quandl_code+'\n'
print 'name: Georgia F.O.B. Dock Broiler/Fryer Quoted Price\n'
print 'description: Georgia F.O.B. Dock quoted price on broilers/fryers'\
'\n from the USDA AJ_PY018 report published by the USDA Agricultural Marketing Service ' \
'\n (AMS).\n'\
+ reference_text+'\n'
print 'reference_url: http://www.ams.usda.gov/mnreports/AJ_PY018.txt\n'
print 'frequency: daily\n'
print 'private: false\n'
print '---\n'
data_df.to_csv(sys.stdout)
print '\n'
print '\n'
|
# Stdlib
import datetime
import time
import pdb
# 3rd party
from dateutil.parser import parse as date_parse
import pytz
# Custom
from .constants import THIS_DIR
from .regex_ import make_bdry_regex, match_all
"""
times = [
'', 'abc', 'abc:def', '4:3', '40:3', # Fail
'09:50', '10:25', '3:30', '24:00', '24:50', '34:00' # Pass
]
for t in times: print t, TIME_REGEX.findall(t)
"""
TIME_PATTERN = ur"""
(?:
(?: (?: [01]{,1}[0-9]) | (?: 2[0-4]) ) # Hours
\: # sep
(?: (?: [0-5][0-9]) | 60 ) # Minutes
(?:
\: # sep
(?: [0-5][0-9]) | 60 # Seconds (optional)
)?
)
(?:
\s*(?: pm|am )
)?
"""
TIME_REGEX = make_bdry_regex(TIME_PATTERN, name='time')
MONTHS_FULL = [
"january", "february", "march", "april", "may",
"june", "july", "august", "september",
"october", "november", "december"
]
MONTHS_ABBR = [
"jan", "feb", "mar",
"apr", "may", "jun",
"jul", "aug", "sep", "sept",
"oct", "nov", "dec"
]
MONTHS_ALPHA = MONTHS_FULL + MONTHS_ABBR
MONTHS_ALPHA_PAT = ur"|".join([ur"(?:{0})".format(s) if len(s) > 1 else s for s in MONTHS_ALPHA])
MONTHS_NUM = [unicode(i) for i in range(1, 13)]
MONTHS_NUM += [ u"0" + unicode(i) for i in range(1, 10) ]
MONTHS_NUM_PAT = ur"|".join([ur"(?:{0})".format(s) if len(s) > 1 else s for s in MONTHS_NUM])
MONTHS = MONTHS_ALPHA + MONTHS_NUM
MONTHS_PAT = ur"|".join(MONTHS)
DAYS_NUM = [ unicode(i) for i in range(1, 32) ]
DAYS_NUM += [ u"0" + unicode(i) for i in range(4, 9) ]
DAYS_NUM_PAT = ur"|".join([ur"(?:{0})".format(s) if len(s) > 1 else s for s in DAYS_NUM])
DAYS_ALPHA = [ unicode(i) + u"st" for i in [u"01", u"1", u"21", u"31"] ]
DAYS_ALPHA += [ unicode(i) + u"nd" for i in [u"02", u"2", u"22"] ]
DAYS_ALPHA += [ unicode(i) + u"rd" for i in [u"03", u"3", u"23"] ]
DAYS_ALPHA += [ unicode(i) + u"th" for i in range(4, 21) + range(24, 31) ]
DAYS_ALPHA += [ u"0" + unicode(i) + u"th" for i in range(4, 9) ]
DAYS_ALPHALONG = [
"first", "second", "third", "fourth", "fifth", "sixth",
"seventh", "eigth", "ninth"
]
DAYS_ALPHA += DAYS_ALPHALONG
DAYS_ALPHA += [
"tenth", "eleventh", "twelfth", "thirteenth", "fourteeth",
"fifteenth", "sixteenth", "seventeenth", "eighteenth",
"nineteenth", "twentieth", "thirtieth",
]
DAYS_ALPHA += [ "twenty-" + d for d in DAYS_ALPHALONG ]
DAYS_ALPHA += [ "twenty" + d for d in DAYS_ALPHALONG ]
DAYS_ALPHA += [ "thirty-" + d for d in DAYS_ALPHALONG ]
DAYS_ALPHA += [ "thirty" + d for d in DAYS_ALPHALONG ]
DAYS_ALPHA_PAT = ur"|".join(
[ur"(?:{0})".format(s) if len(s) > 1 else s for s in DAYS_ALPHA]
)
DAYS = DAYS_NUM + DAYS_ALPHA
DAYS_PAT = ur"|".join([ur"(?:{0})".format(s) if len(s) > 1 else s for s in DAYS])
YEAR_2_PAT = ur'(?: \d{2})'
# 1500 - 1999, 2000 - 2199
YEAR_4_PAT = ur'(?: (?: 1[5-9]\d{2} ) | (?: 2[01]\d{2} ))'
YEAR_PAT = ur"(?: {0} | {1})".format(YEAR_2_PAT, YEAR_4_PAT)
"""
formats
3 segment
month day year
jul[y][- ][0]4[th] 2014
[0]7-[0]4[,] 2014
[0]7-[0]4-[20]14
day month year
[0]4[th] jul[y][,] 2014
[0]4-[0]7[,] 2014
[0]4-[0]7-[20]14
year month day
2014[,] jul[y][- ][0]4[th]
2014[,] [0]7-[0]4
2014-[0]7-[0]4
year day month
2014[,] [0]4[- ]jul[y]
2014[,] [0]4-[0]7
2014-[0]4-[0]7
2 segment
month year
jul[y][,- ]2014
month day
jul[y][,- ][0]4[th]
day month
[0]4[th][,- ]jul[y]
year month
2014[,- ]jul[y]
2014[,- ][0]7
"""
SEP_YEAR = ur'[\/\-, ]\s{,2}'
SEP_MONTH_DAY = ur'\s{,2}[\/\- ]\s{,2}'
SEP_GROUP = ur'\s{,2}(?P<sep>[\/\-])\s{,2}' # Case where both separators should be the same.
SEP_GROUP_REF = ur'\s{,2}(?P=sep)\s{,2}'
DATE_MDY_PAT = ur"""
(?P<month>
(?: {month_alpha} ) | (?: {month_num} )
)
(?: {sep_md})
(?P<day> {day} )
(?: {sep_year} )
(?P<year> {year_4})
""".format(
month_alpha=MONTHS_ALPHA_PAT,
day=DAYS_PAT,
year_4=YEAR_4_PAT,
month_num=MONTHS_NUM_PAT,
sep_md=SEP_MONTH_DAY,
sep_year=SEP_YEAR
)
import re
DATE_MDY_REGEX = make_bdry_regex(DATE_MDY_PAT, name='date')
DATE_MDY_2_PAT = ur"""
(?P<month>
(?: {month_alpha} ) | (?: {month_num} )
)
(?: {sep} )
(?P<day> {day} )
(?: {sep_ref} )
(?P<year> {year})
""".format(
month_alpha=MONTHS_ALPHA_PAT,
day=DAYS_PAT,
year=YEAR_PAT,
month_num=MONTHS_NUM_PAT,
sep=SEP_GROUP,
sep_ref=SEP_GROUP_REF
)
DATE_MDY_2_REGEX = make_bdry_regex(DATE_MDY_2_PAT, name='date')
DATE_DMY_PAT = ur"""
(?P<day> {day} )
(?: {sep_md} )
(?P<month>
(?: {month_alpha} ) | (?: {month_num} )
)
(?: {sep_year} )
(?P<year> {year_4})
""".format(
month_alpha=MONTHS_ALPHA_PAT,
day=DAYS_PAT,
year_4=YEAR_4_PAT,
month_num=MONTHS_NUM_PAT,
sep_md=SEP_MONTH_DAY,
sep_year=SEP_YEAR
)
DATE_DMY_REGEX = make_bdry_regex(DATE_DMY_PAT, name='date')
DATE_DMY_2_PAT = ur"""
(?P<day> {day} )
(?: {sep} )
(?P<month>
(?: {month_alpha} ) | (?: {month_num} )
)
(?: {sep_ref} )
(?P<year> {year})
""".format(
month_alpha=MONTHS_ALPHA_PAT,
day=DAYS_PAT,
year=YEAR_PAT,
month_num=MONTHS_NUM_PAT,
sep=SEP_GROUP,
sep_ref=SEP_GROUP_REF
)
DATE_DMY_2_REGEX = make_bdry_regex(DATE_DMY_2_PAT, name='date')
DATE_YMD_PAT = ur"""
(?P<year> {year_4})
(?: {sep_year} )
(?P<month>
(?: {month_alpha} ) | (?: {month_num} )
)
(?: {sep_md} )
(?P<day> {day} )
""".format(
month_alpha=MONTHS_ALPHA_PAT,
day=DAYS_PAT,
year_4=YEAR_4_PAT,
month_num=MONTHS_NUM_PAT,
sep_md=SEP_MONTH_DAY,
sep_year=SEP_YEAR
)
DATE_YMD_REGEX = make_bdry_regex(DATE_YMD_PAT, name='date')
DATE_YMD_2_PAT = ur"""
(?P<year> {year})
(?: {sep} )
(?P<month>
(?: {month_alpha} ) | (?: {month_num} )
)
(?: {sep_ref} )
(?P<day> {day} )
""".format(
month_alpha=MONTHS_ALPHA_PAT,
day=DAYS_PAT,
year=YEAR_PAT,
month_num=MONTHS_NUM_PAT,
sep=SEP_GROUP,
sep_ref=SEP_GROUP_REF
)
DATE_YMD_2_REGEX = make_bdry_regex(DATE_YMD_2_PAT, name='date')
DATE_YDM_PAT = ur"""
(?P<year> {year_4})
(?: {sep_year} )
(?P<day> {day} )
(?: {sep_md} )
(?P<month>
(?: {month_alpha} ) | (?: {month_num} )
)
""".format(
month_alpha=MONTHS_ALPHA_PAT,
day=DAYS_PAT,
year_4=YEAR_4_PAT,
month_num=MONTHS_NUM_PAT,
sep_md=SEP_MONTH_DAY,
sep_year=SEP_YEAR
)
DATE_YDM_REGEX = make_bdry_regex(DATE_YDM_PAT, name='date')
DATE_YDM_2_PAT = ur"""
(?P<year> {year})
(?: {sep} )
(?P<day> {day} )
(?: {sep_ref} )
(?P<month>
(?: {month_alpha} ) | (?: {month_num} )
)
""".format(
month_alpha=MONTHS_ALPHA_PAT,
day=DAYS_PAT,
year=YEAR_PAT,
month_num=MONTHS_NUM_PAT,
sep=SEP_GROUP,
sep_ref=SEP_GROUP_REF
)
DATE_YDM_2_REGEX = make_bdry_regex(DATE_YDM_2_PAT, name='date')
DATE_MY_PAT = ur"""
# jul[y][,- ]2014
(?P<month>
(?: {month_alpha} ) | (?: {month_num} )
)
(?: {sep_year} )
(?P<year> {year_4})
""".format(
month_alpha=MONTHS_ALPHA_PAT,
month_num=MONTHS_NUM_PAT,
year_4=YEAR_4_PAT,
sep_year=SEP_YEAR
)
DATE_MY_REGEX = make_bdry_regex(DATE_MY_PAT, name='date')
DATE_YM_PAT = ur"""
# 2014[,- ]jul[y]
# 2014[,- ][0]7
(?P<year> {year_4})
(?: {sep_year} )
(?P<month>
(?: {month_alpha} ) | (?: {month_num} )
)
""".format(
month_alpha=MONTHS_ALPHA_PAT,
month_num=MONTHS_NUM_PAT,
year_4=YEAR_4_PAT,
sep_year=SEP_YEAR
)
DATE_YM_REGEX = make_bdry_regex(DATE_YM_PAT, name='date')
DATE_MD_PAT = ur"""
# jul[y][,- ][0]4[th]
(?P<month> {month_alpha} )
(?: {sep_md} )
(?P<day> {day} )
""".format(
month_alpha=MONTHS_ALPHA_PAT,
day=DAYS_PAT,
sep_md=SEP_MONTH_DAY,
)
DATE_MD_REGEX = make_bdry_regex(DATE_MD_PAT, name='date')
DATE_DM_PAT = ur"""
# jul[y][,- ][0]4[th]
(?P<day> {day})
(?: {sep_md} )
(?P<month> {month_alpha} )
""".format(
month_alpha=MONTHS_ALPHA_PAT,
day=DAYS_PAT,
sep_md=SEP_MONTH_DAY,
)
DATE_DM_REGEX = make_bdry_regex(DATE_DM_PAT, name='date')
def match_dates(text):
"""
>>> match_dates('The program starts at 1/1/2013 and goes to 3/3/14. Also, there's a meeting tomorrow (2012-5-12)')
{'date_0': ('1/1/2013', (13, 27)), 'date_1': ('3/3/14', (31, 46)), 'date_2': ('12-5-12', (31, 46))}
"""
regexes = [
(DATE_MDY_REGEX, 'date_mdy_1'),
(DATE_MDY_2_REGEX, 'date_mdy_2'),
(DATE_DMY_REGEX, 'date_dmy_1'),
(DATE_DMY_2_REGEX, 'date_dmy_2'),
(DATE_YMD_REGEX, 'date_ymd_1'),
(DATE_YMD_2_REGEX, 'date_ymd_2'),
(DATE_YDM_REGEX, 'date_ydm_1'),
(DATE_YDM_2_REGEX, 'date_ydm_2'),
(DATE_YM_REGEX, 'date_ym'),
(DATE_MY_REGEX, 'date_my'),
(DATE_MD_REGEX, 'date_md'),
(DATE_DM_REGEX, 'date_dm'),
]
res = match_all(
text, regexes, ('date', 'year', 'month', 'day'),
shortcircuit=True, ignore_submatches=False, ignore_errors=True
)
return res
"""
dates = [
'', 'abc', 'abc:def', '4:3', '40:3', # Fail
'1/1/2013', '2013/1/1', '01-01-2013', '2013-01-01' # Pass
'2014/1', '2014/01', '14-01', ''
'09:50', '10:25', '3:30', '24:00', '24:50', '34:00' # Pass
]
for t in dates: print t, DATE_REGEX.findall(t)
"""
def test_match_date():
dates = [
# MDY
('july 4th 2014', [
dict(name='date_mdy_1_date', text='july 4th 2014', pos=(0, 13)),
dict(name='date_mdy_1_year', text='2014', pos=(9, 13)),
dict(name='date_mdy_1_month', text='july', pos=(0, 4)),
dict(name='date_mdy_1_day', text='4th', pos=(5, 8))
]
),
('july 4 2014', [
dict(name='date_mdy_1_date', text='july 4 2014', pos=(0, 11)),
dict(name='date_mdy_1_year', text='2014', pos=(7, 11)),
dict(name='date_mdy_1_month', text='july', pos=(0, 4)),
dict(name='date_mdy_1_day', text='4', pos=(5, 6))
]
),
# ('jul-04th 2014', {'date_mdy_1_date_0': ('jul-04th 2014', (0, 13)), 'date_mdy_1_month_0': ('jul', (0, 3)), 'date_mdy_1_year_0': ('2014', (9, 13)), 'date_mdy_1_day_0': ('04th', (4, 8))}),
# ('july 4 2014', {'date_mdy_1_date_0': ('july 4 2014', (0, 11)), 'date_mdy_1_month_0': ('july', (0, 4)), 'date_mdy_1_year_0': ('2014', (7, 11)), 'date_mdy_1_day_0': ('4', (5, 6))}),
# ('jul 4 2014', {'date_mdy_1_date_0': ('jul 4 2014', (0, 10)), 'date_mdy_1_month_0': ('jul', (0, 3)), 'date_mdy_1_year_0': ('2014', (6, 10)), 'date_mdy_1_day_0': ('4', (4, 5))}),
# ('7-4 2014', {'date_mdy_1_date_0': ('7-4 2014', (0, 8)), 'date_mdy_1_month_0': ('7', (0, 1)), 'date_mdy_1_year_0': ('2014', (4, 8)), 'date_mdy_1_day_0': ('4', (2, 3))}),
# ('7-04, 2014', {'date_mdy_1_date_0': ('7-04, 2014', (0, 10)), 'date_mdy_1_month_0': ('7', (0, 1)), 'date_mdy_1_year_0': ('2014', (6, 10)), 'date_mdy_1_day_0': ('04', (2, 4))}),
# ('7-4-2014', {'date_mdy_1_date_0': ('7-4-2014', (0, 8)), 'date_mdy_1_month_0': ('7', (0, 1)), 'date_mdy_1_year_0': ('2014', (4, 8)), 'date_mdy_1_day_0': ('4', (2, 3))}),
# ('7-4-14', {'date_mdy_2_month_0': ('7', (0, 1)), 'date_mdy_2_day_0': ('4', (2, 3)), 'date_mdy_2_date_0': ('7-4-14', (0, 6)), 'date_mdy_2_year_0': ('14', (4, 6))}),
# ('7-4th-2104', {'date_mdy_1_date_0': ('7-4th-2104', (0, 10)), 'date_mdy_1_month_0': ('7', (0, 1)), 'date_mdy_1_year_0': ('2104', (6, 10)), 'date_mdy_1_day_0': ('4th', (2, 5))}),
# # DMY
# ('4th july 2014', {'date_dmy_1_day_0': ('4th', (0, 3)), 'date_dmy_1_year_0': ('2014', (9, 13)), 'date_dmy_1_date_0': ('4th july 2014', (0, 13)), 'date_dmy_1_month_0': ('july', (4, 8))}),
# ('4 july, 2014', {'date_dmy_1_day_0': ('4', (0, 1)), 'date_dmy_1_year_0': ('2014', (8, 12)), 'date_dmy_1_date_0': ('4 july, 2014', (0, 12)), 'date_dmy_1_month_0': ('july', (2, 6))}),
# ('04th-jul 2014', {'date_dmy_1_day_0': ('04th', (0, 4)), 'date_dmy_1_year_0': ('2014', (9, 13)), 'date_dmy_1_date_0': ('04th-jul 2014', (0, 13)), 'date_dmy_1_month_0': ('jul', (5, 8))}),
# ('4 july 2014', {'date_dmy_1_day_0': ('4', (0, 1)), 'date_dmy_1_year_0': ('2014', (7, 11)), 'date_dmy_1_date_0': ('4 july 2014', (0, 11)), 'date_dmy_1_month_0': ('july', (2, 6))}),
# ('4 jul 2014', {'date_dmy_1_day_0': ('4', (0, 1)), 'date_dmy_1_year_0': ('2014', (6, 10)), 'date_dmy_1_date_0': ('4 jul 2014', (0, 10)), 'date_dmy_1_month_0': ('jul', (2, 5))}),
# ('4-jul-2014', {'date_dmy_1_day_0': ('4', (0, 1)), 'date_dmy_1_year_0': ('2014', (6, 10)), 'date_dmy_1_date_0': ('4-jul-2014', (0, 10)), 'date_dmy_1_month_0': ('jul', (2, 5))}),
# ('4th july, 2014', {'date_dmy_1_day_0': ('4th', (0, 3)), 'date_dmy_1_year_0': ('2014', (10, 14)), 'date_dmy_1_date_0': ('4th july, 2014', (0, 14)), 'date_dmy_1_month_0': ('july', (4, 8))}),
# YMD
# '2014 july 4th', '2014, july 4', '2014 jul-04th',
# '2014 july 4', '2014 jul 4',
# '2014-jul-4', '2014, july 4th',
# '14-7-4', '2014-07-04',
# YDM
# '2014 4th july', '2014, 4 july', '2014 04th-jul',
# '2014 4 july', '2014 4 jul',
# '2014-4-jul', '2014, 4th july',
# '14-4-7', '2014-04-07',
# Fail
# 'ju 4 2014', '14-07/4'
]
# regexes = [
# ('mdy_1', DATE_MDY_REGEX),
# ('mdy_2', DATE_MDY_2_REGEX),
# ('dmy_1', DATE_DMY_REGEX),
# ('dmy_2', DATE_DMY_2_REGEX),
# ('ymd_1', DATE_YMD_REGEX),
# ('ymd_2', DATE_YMD_2_REGEX),
# ('ydm_1', DATE_YDM_REGEX),
# ('ydm_2', DATE_YDM_2_REGEX),
# ]
for text, expected in dates:
print "ORIG:", text
print "\tEXPECTED:", expected
res = match_dates(text)
print '\tRESULT:', res
assert res == expected
print
print 'test_match_date passed!'
def match_times(text):
"""
>>> match_time('The meeting starts at 10:00 AM and goes to 3:52 PM. Also, there's a meeting tomorrow at 1:00')
{'time_0': ('10:00 AM', (13, 27)), 'time_1': ('3:52 PM', (31, 46)), 'time_2': ('1:00', (31, 46))}
"""
res = match_all(text, ((TIME_REGEX, None),), ('time',))
return res
def test_match_times():
data = [
(
"The meeting starts at 10:00 AM and goes to 3:52 PM. Also, there's a meeting tomorrow at 1:00",
[
dict(name="time", text="1:00", pos=(88, 92)),
dict(name="time", text="3:52 PM", pos=(43, 50)),
dict(name="time", text="10:00 AM", pos=(22, 30)),
]
),
('abc:def', []),
('4:3', []),
('40:3', []),
]
# pdb.set_trace()
for text, expected in data:
print "ORIG:", text
print "\tEXPECTED:", expected
res = match_times(text)
print '\tRESULT:', res
assert res == expected
print
print 'test_match_times passed!'
def test():
test_match_date()
test_match_times()
if __name__ == '__main__':
test()
|
import requests
from assessment import price_result, create_file
def main():
"""
Main function will convert the data into json format and storing into Data varaible
"""
url = 'https://api.coinranking.com/v1/public/coin/1/history/30d'
get_data = requests.get(url)
json_data = get_data.json()
data = json_data['data']['history']
#Calaculate the bitcoin price
result = price_result(data)
#Converting into JSON Format
file = create_file(result)
print(file)
if __name__ == "__main__":
main()
|
import numpy as np
from astropy.io import ascii
from astropy.constants import c
import astropy.units as u
import naima
from naima.models import (ExponentialCutoffBrokenPowerLaw, Synchrotron,
InverseCompton)
ECBPL = ExponentialCutoffBrokenPowerLaw(amplitude=3.699e36 / u.eV,
e_0=1 * u.TeV,
e_break=0.265 * u.TeV,
alpha_1=1.5,
alpha_2=3.233,
e_cutoff=1863 * u.TeV,
beta=2.)
eopts = {'Eemax': 50 * u.PeV, 'Eemin': 0.1 * u.GeV}
SYN = Synchrotron(ECBPL, B=125 * u.uG, Eemax=50 * u.PeV, Eemin=0.1 * u.GeV)
# Compute photon density spectrum from synchrotron emission assuming R=2.1 pc
Rpwn = 2.1 * u.pc
Esy = np.logspace(-7, 9, 100) * u.eV
Lsy = SYN.flux(Esy, distance=0 * u.cm) # use distance 0 to get luminosity
phn_sy = Lsy / (4 * np.pi * Rpwn**2 * c) * 2.24
IC = InverseCompton(ECBPL,
seed_photon_fields=['CMB',
['FIR', 70 * u.K, 0.5 * u.eV / u.cm**3],
['NIR', 5000 * u.K, 1 * u.eV / u.cm**3],
['SSC', Esy, phn_sy]],
Eemax=50 * u.PeV, Eemin=0.1 * u.GeV)
# Use plot_data from naima to plot the observed spectra
data = ascii.read('CrabNebula_spectrum.ecsv')
figure = naima.plot_data(data, e_unit=u.eV)
ax = figure.axes[0]
# Plot the computed model emission
energy = np.logspace(-7, 15, 100) * u.eV
ax.loglog(energy, IC.sed(energy, 2 * u.kpc) + SYN.sed(energy, 2 * u.kpc),
lw=3, c='k', label='Total')
for i, seed, ls in zip(
range(4), ['CMB', 'FIR', 'NIR', 'SSC'], ['--', '-.', ':', '-']):
ax.loglog(energy, IC.sed(energy, 2 * u.kpc, seed=seed),
lw=2, c=naima.plot.color_cycle[i + 1], label=seed, ls=ls)
ax.loglog(energy, SYN.sed(energy, 2 * u.kpc),
lw=2, c=naima.plot.color_cycle[5], label='SYN', ls=':')
ax.set_ylim(1e-12, 1e-7)
ax.legend(loc='upper right', frameon=False)
figure.tight_layout()
figure.savefig('CrabNebula_SynSSC.png')
|
cont = 0
maior = menor = 0
maior_nome = []
menor_nome = []
while True:
name = input('Nome: ')
weight = float(input('Peso : '))
if cont == 0:
menor = maior = weight
elif cont >= 1:
if weight > maior:
maior = weight
maior_nome.append(name)
if weight < menor:
menor = weight
menor_nome.clear()
menor_nome.append(name)
answer = input('Quer continuar ? [S/N] ').upper()
cont += 1
if answer == 'N':
break
print('Ao todo, você cadastrou {} pessoas'.format(cont))
print('O maior peso foi de {}. Peso de {}'.format(maior, maior_nome))
print('O menor peso foi de {}. Peso de {}'.format(menor, menor_nome))
|
#Program that allows a user to access two different financial calculators:
#an investment calculator and a home loan repayment calculator
import sys
import math
#Determine type of calculator the user requires
print("Choose either 'investment' or 'bond' from the menu below to proceed:")
print("\nInvestment\t - to calculate the amount of interest you'll earn on interest")
print("Bond\t\t - to calculate the amount you'll have to pay on a home loan")
print()
calculator = input()
#Match calculation and requirements via if-elif-else statement
if (calculator.lower() == "investment") :
#Get required variables from user
deposit = float(input("How mamy Rands are you depositing?\nR"))
interest = float(input("What percentage interest are you earning?\n"))
#Convert given interest value to a percentage
interest /= 100
#choice of declaring investment period as months or years and converting to calculation required years if needed
time_unit = input("Would you like to indicate investment period as 'years' or 'months'?\n")
if (time_unit.lower() == "years") :
time = float(input("Over how many years would you like to invest?\n"))
elif (time_unit.lower() == "months") :
time = float(input("Over how many months would you like to invest?\n"))
time /= 12
else :
sys.exit("Your input matches neither 'years' or 'months'.")
#choice of compound or simple interest calculation
interest_type = input("Will your investment be calculated using 'simple' or 'compound' interest?\n")
if (interest_type.lower() != "simple" and interest_type.lower() != "compound") :
sys.exit("Your input matches neither 'simple' or 'compound'.")
#Calculate expected end value of investment
if (interest_type.lower() == "simple") :
final_value = deposit * (1 + interest * time)
else :
final_value = deposit * math.pow((1 + interest), time)
print(f"\nThe expected final value on your investment of R{deposit:.2f}, calculated over {time:.2f} years and at {interest * 100:.2f}%, is:\nR{final_value:.2f}")
elif (calculator.lower() == "bond") :
#Get required variables from user
house_value = float(input("How mamy Rands is your house worth?\nR"))
interest = float(input("What percentage interest are you being charged?\n"))
#Convert given interest value to a percentage
interest /= 100
#choice of declaring repayment period as months or years and converting to calculation required months if needed
time_unit = input("Would you like to indicate your repayment period as 'years' or 'months'?\n")
if (time_unit.lower() == "years") :
time = float(input("Over how many years would you like to repay your bond?\n"))
time *= 12
elif (time_unit.lower() == "months") :
time = float(input("Over how many months would you like to repay your bond?\n"))
else :
sys.exit("Your input matches neither 'years' or 'months'.")
#Calculate expected monthly installment
monthly_installment = ((interest / 12) * house_value) / (1 - math.pow(1 + (interest / 12), -time))
print(f"Your monthly bond repayment on a R{house_value:.2f} house, over the course of {time: .0f} months, is\nR{monthly_installment}")
else :
sys.exit("Your input matches neither 'investment' or 'bond'.")
|
"""
Homework 8 max
"""
# if-block
if 1 == 1:
x = 123
print("global x =", x)
if x == 123:
print("x is a global variable")
else:
print("x is not a global variable")
# for-block
for i in (1,2,3):
pass
if i == 3:
print("i is a global variable")
else:
print("i is not a global variable")
|
import itertools
import sys
import time
import unittest
import numpy as np
import pandapower as pp
import pandas as pd
from lib.data_utils import indices_to_hot, hot_to_indices
from lib.dc_opf import (
StandardDCOPF,
LineSwitchingDCOPF,
TopologyOptimizationDCOPF,
MultistepTopologyDCOPF,
GridDCOPF,
load_case,
StandardParameters,
LineSwitchingParameters,
SinglestepTopologyParameters,
MultistepTopologyParameters,
ForecastsPlain,
)
class TestStandardDCOPF(unittest.TestCase):
"""
Test standard DC-OPF implementation.
"""
@classmethod
def setUpClass(cls):
print("\nStandard DC-OPF tests.\n")
def runner_opf(self, model, n_tests=20, eps=1e-4, verbose=False):
conditions = list()
for i in range(n_tests):
np.random.seed(i)
model.gen["cost_pu"] = np.random.uniform(
1.0, 5.0, (model.grid.gen.shape[0],)
)
model.build_model()
result = model.solve_and_compare(verbose=verbose)
conditions.append(
{
"cost": np.less_equal(result["res_cost"]["diff"], eps).all(),
"bus": np.less_equal(result["res_bus"]["diff"], eps).all(),
"line": np.less_equal(result["res_line"]["diff"], eps).all(),
"gen": np.less_equal(result["res_gen"]["diff"], eps).all(),
"load": np.less_equal(result["res_load"]["diff"], eps).all(),
"ext_grid": np.less_equal(
result["res_ext_grid"]["diff"], eps
).all(),
"trafo": np.less_equal(result["res_trafo"]["diff"], eps).all(),
"line_loading": np.less_equal(
result["res_line"]["diff_loading"], 100 * eps
).all(),
"trafo_loading": np.less_equal(
result["res_trafo"]["diff_loading"], 100 * eps
).all(),
}
)
conditions = pd.DataFrame(conditions)
conditions["passed"] = np.all(conditions.values, axis=-1)
print(f"\n\n{model.name}\n")
print(conditions.to_string())
time.sleep(0.1)
# Test DC Power Flow
self.assertTrue(conditions["passed"].values.all())
def test_case3(self):
case = load_case("case3")
grid = GridDCOPF(
case, base_unit_v=case.base_unit_v, base_unit_p=case.base_unit_p
)
params = StandardParameters(tol=1e-9)
model = StandardDCOPF(
f"{case.name} Standard DC OPF",
grid=grid,
grid_backend=case.grid_backend,
base_unit_p=case.base_unit_p,
base_unit_v=case.base_unit_v,
params=params,
)
self.runner_opf(model, verbose=False)
def test_case4(self):
case = load_case("case4")
grid = GridDCOPF(
case, base_unit_v=case.base_unit_v, base_unit_p=case.base_unit_p
)
params = StandardParameters(tol=1e-9)
model = StandardDCOPF(
f"{case.name} Standard DC OPF",
grid=grid,
grid_backend=case.grid_backend,
base_unit_p=case.base_unit_p,
base_unit_v=case.base_unit_v,
params=params,
)
self.runner_opf(model, verbose=False)
def test_case6(self):
case = load_case("case6")
grid = GridDCOPF(
case, base_unit_v=case.base_unit_v, base_unit_p=case.base_unit_p
)
params = StandardParameters(tol=1e-9)
model = StandardDCOPF(
f"{case.name} Standard DC OPF",
grid=grid,
grid_backend=case.grid_backend,
base_unit_p=case.base_unit_p,
base_unit_v=case.base_unit_v,
params=params,
)
self.runner_opf(model, verbose=False)
def test_case3_by_value(self):
"""
Test for power flow computation.
"""
case = load_case("case3")
grid = GridDCOPF(
case, base_unit_v=case.base_unit_v, base_unit_p=case.base_unit_p
)
params = StandardParameters(tol=1e-9)
model = StandardDCOPF(
f"{case.name} Standard DC OPF",
grid=grid,
grid_backend=case.grid_backend,
base_unit_p=case.base_unit_p,
base_unit_v=case.base_unit_v,
params=params,
)
model.build_model()
result = model.solve(verbose=False)
model.print_results()
time.sleep(0.1)
# Test DC Power Flow
self.assertTrue(
np.equal(
result["res_bus"]["delta_pu"].values,
np.array([0.0, -0.250, -0.375, 0.0, 0.0, 0.0]),
).all()
)
def test_rte_case5(self):
case = load_case("rte_case5_example")
grid = GridDCOPF(
case, base_unit_v=case.base_unit_v, base_unit_p=case.base_unit_p
)
params = StandardParameters(tol=1e-9)
model = StandardDCOPF(
f"{case.name} Standard DC OPF",
grid=grid,
grid_backend=case.grid_backend,
base_unit_p=case.base_unit_p,
base_unit_v=case.base_unit_v,
params=params,
)
self.runner_opf(model, verbose=False)
def test_l2rpn2019(self):
case = load_case("l2rpn_2019")
grid = GridDCOPF(
case, base_unit_v=case.base_unit_v, base_unit_p=case.base_unit_p
)
params = StandardParameters(tol=1e-9)
model = StandardDCOPF(
f"{case.name} Standard DC OPF",
grid=grid,
grid_backend=case.grid_backend,
base_unit_p=case.base_unit_p,
base_unit_v=case.base_unit_v,
params=params,
)
self.runner_opf(model, verbose=False)
# def test_l2rpn2020(self):
# if sys.platform != "win32":
# print("L2RPN 2020 not available.")
# self.assertTrue(True)
# return
#
# case = load_case("l2rpn_wcci_2020")
# grid = GridDCOPF(
# case, base_unit_v=case.base_unit_v, base_unit_p=case.base_unit_p
# )
#
# params = StandardParameters(tol=1e-9)
# model = StandardDCOPF(
# f"{case.name} Standard DC OPF",
# grid=grid,
# grid_backend=case.grid_backend,
# base_unit_p=case.base_unit_p,
# base_unit_v=case.base_unit_v,
# params=params,
# )
#
# self.runner_opf(model, eps=1e-3, verbose=False)
class TestLineSwitchingDCOPF(unittest.TestCase):
"""
Test DC-OPF with line status switching implementation.
"""
@classmethod
def setUpClass(cls):
print("\nDC-OPF with line switching tests.\n")
def runner_opf_line_switching(
self,
model,
grid,
verbose=False,
):
np.random.seed(0)
model.gen["cost_pu"] = np.random.uniform(1.0, 5.0, (model.grid.gen.shape[0],))
model.build_model()
if verbose:
model.print_model()
"""
BACKEND BRUTE FORCE.
"""
# Construct all possible configurations
line_statuses = list()
for i in range(
model.params.n_max_line_status_changed + 1
): # Number of line disconnection 0, 1, ..., n
line_statuses.extend(
[
~indices_to_hot(
list(line_status),
length=grid.line.shape[0],
dtype=np.bool,
)
for line_status in itertools.combinations(grid.line.index, i)
]
)
results_backend = pd.DataFrame(
columns=["status", "objective", "loads_p", "generators_p", "valid"]
)
for idx, status in enumerate(line_statuses):
model.grid_backend.line["in_service"] = status[~grid.line.trafo]
model.grid_backend.trafo["in_service"] = status[grid.line.trafo]
result_backend = model.solve_backend()
if (
model.params.gen_cost
and model.params.line_margin
and model.params.solver_name != "glpk"
):
objective = (
result_backend["res_cost"]
+ np.square(
result_backend["res_line"]["p_pu"] / model.line["max_p_pu"]
).sum()
)
elif model.params.line_margin and model.params.solver_name != "glpk":
objective = np.square(
result_backend["res_line"]["p_pu"] / model.line["max_p_pu"]
).sum()
else:
objective = result_backend["res_cost"]
loads_p = grid.load["p_pu"].sum()
generators_p = result_backend["res_gen"]["p_pu"].sum()
valid = result_backend["valid"]
results_backend = results_backend.append(
{
"status": tuple(status),
"objective": objective,
"loads_p": loads_p,
"generators_p": generators_p,
"valid": valid,
},
ignore_index=True,
)
# Solve for optimal line status configuration
result = model.solve(verbose=verbose)
result_status = result["res_x"]
result_objective = result["res_cost"]
result_gap = result["res_gap"] # Gap for finding the optimal configuration
if verbose:
model.print_results()
# Check with brute force solution
objective_brute = results_backend["objective"][results_backend["valid"]].min()
hot_brute = (
results_backend["objective"].values
< (1 + result_gap) * objective_brute + result_gap
)
hot_brute = np.logical_and(hot_brute, results_backend["valid"])
indices_brute = hot_to_indices(hot_brute)
status_brute = results_backend["status"][indices_brute]
match_idx = [
idx
for idx, line_status in zip(indices_brute, status_brute)
if np.equal(line_status, result_status).all()
]
# Compare
results_backend["candidates"] = hot_brute
results_backend["result_objective"] = np.nan
results_backend["result_objective"][match_idx] = result_objective
solution_idx = [
idx
for idx, line_status in zip(
results_backend.index, results_backend["status"]
)
if np.equal(line_status, result_status).all()
]
results_backend["solution"] = 0
results_backend["solution"][solution_idx] = 1
results_backend["status"] = [
" ".join(np.array(line_status).astype(int).astype(str))
for line_status in results_backend["status"]
]
results_backend = results_backend[
results_backend["valid"] & results_backend["candidates"]
]
print(f"\n{model.name}\n")
print(f"Solver: {result_objective}")
print(results_backend.to_string())
time.sleep(0.1)
self.assertTrue(bool(match_idx))
def test_case3_line_switching(self):
case = load_case("case3")
grid = GridDCOPF(
case, base_unit_v=case.base_unit_v, base_unit_p=case.base_unit_p
)
params = LineSwitchingParameters(n_max_line_status_changed=2, tol=1e-9)
model = LineSwitchingDCOPF(
f"{case.name} DC OPF Line Switching",
grid=grid,
grid_backend=case.grid_backend,
base_unit_p=case.base_unit_p,
base_unit_v=case.base_unit_v,
params=params,
)
self.runner_opf_line_switching(model, grid, verbose=False)
def test_case4_line_switching(self):
case = load_case("case4")
grid = GridDCOPF(
case, base_unit_v=case.base_unit_v, base_unit_p=case.base_unit_p
)
params = LineSwitchingParameters(n_max_line_status_changed=2, tol=1e-9)
model = LineSwitchingDCOPF(
f"{case.name} DC OPF Line Switching",
grid=grid,
grid_backend=case.grid_backend,
base_unit_p=case.base_unit_p,
base_unit_v=case.base_unit_v,
params=params,
)
self.runner_opf_line_switching(model, grid, verbose=False)
def test_case6_line_switching(self):
case = load_case("case6")
grid = GridDCOPF(
case, base_unit_v=case.base_unit_v, base_unit_p=case.base_unit_p
)
params = LineSwitchingParameters(n_max_line_status_changed=1, tol=1e-9)
model = LineSwitchingDCOPF(
f"{case.name} DC OPF Line Switching",
grid=grid,
grid_backend=case.grid_backend,
base_unit_p=case.base_unit_p,
base_unit_v=case.base_unit_v,
params=params,
)
self.runner_opf_line_switching(model, grid, verbose=False)
def test_rte_case5_line_switching(self):
case = load_case("rte_case5_example")
grid = GridDCOPF(
case, base_unit_v=case.base_unit_v, base_unit_p=case.base_unit_p
)
params = LineSwitchingParameters(n_max_line_status_changed=3, tol=1e-9)
model = LineSwitchingDCOPF(
f"{case.name} DC OPF Line Switching",
grid=grid,
grid_backend=case.grid_backend,
base_unit_p=case.base_unit_p,
base_unit_v=case.base_unit_v,
params=params,
)
self.runner_opf_line_switching(model, grid, verbose=False)
def test_l2rpn2019_line_switching(self):
case = load_case("l2rpn_2019")
grid = GridDCOPF(
case, base_unit_v=case.base_unit_v, base_unit_p=case.base_unit_p
)
params = LineSwitchingParameters(n_max_line_status_changed=2, tol=1e-9)
model = LineSwitchingDCOPF(
f"{case.name} DC OPF Line Switching",
grid=grid,
grid_backend=case.grid_backend,
base_unit_p=case.base_unit_p,
base_unit_v=case.base_unit_v,
params=params,
)
self.runner_opf_line_switching(model, grid, verbose=False)
# def test_l2rpn2020_line_switching(self):
# if sys.platform != "win32":
# print("L2RPN 2020 not available.")
# self.assertTrue(True)
# return
#
# case = load_case("l2rpn_wcci_2020")
# grid = GridDCOPF(
# case, base_unit_v=case.base_unit_v, base_unit_p=case.base_unit_p
# )
#
# params = LineSwitchingParameters(n_max_line_status_changed=1, tol=1e-9)
# model = LineSwitchingDCOPF(
# f"{case.name} DC OPF Line Switching",
# grid=grid,
# grid_backend=case.grid_backend,
# base_unit_p=case.base_unit_p,
# base_unit_v=case.base_unit_v,
# params=params,
# )
#
# self.runner_opf_line_switching(model, grid, verbose=False)
class TestTopologyOptimizationDCOPF(unittest.TestCase):
"""
Test DC-OPF with line status switching implementation.
"""
@classmethod
def setUpClass(cls):
print("\nDC-OPF with topology optimization tests.\n")
@staticmethod
def topology_to_parts(x_topology, n_gen, n_load, n_line):
x_gen = x_topology[:n_gen]
x_load = x_topology[n_gen : (n_gen + n_load)]
x_line_or_1 = x_topology[(n_gen + n_load) : (n_gen + n_load + n_line)]
x_line_or_2 = x_topology[
(n_gen + n_load + n_line) : (n_gen + n_load + 2 * n_line)
]
x_line_ex_1 = x_topology[
(n_gen + n_load + 2 * n_line) : (n_gen + n_load + 3 * n_line)
]
x_line_ex_2 = x_topology[(n_gen + n_load + 3 * n_line) :]
return x_gen, x_load, x_line_or_1, x_line_or_2, x_line_ex_1, x_line_ex_2
def is_valid_topology(self, x_topology, n_gen, n_load, n_line):
(
x_gen,
x_load,
x_line_or_1,
x_line_or_2,
x_line_ex_1,
x_line_ex_2,
) = self.topology_to_parts(x_topology, n_gen, n_load, n_line)
cond_line_or = np.less_equal(x_line_or_1 + x_line_or_2, 1).all()
cond_line_ex = np.less_equal(x_line_ex_1 + x_line_ex_2, 1).all()
cond_line_disconnected = np.equal(
x_line_or_1 + x_line_or_2, x_line_ex_1 + x_line_ex_2
).all()
return cond_line_or and cond_line_ex and cond_line_disconnected
def runner_opf_topology_optimization(
self,
model,
verbose=False,
):
np.random.seed(0)
model.gen["cost_pu"] = np.random.uniform(1.0, 5.0, (model.grid.gen.shape[0],))
model.build_model()
if verbose:
model.print_model()
if model.params.solver_name == "glpk":
print("Solver does not support bilinear or quadratic terms.")
self.assertTrue(True)
return
result = model.solve(verbose=verbose)
result_x = result["res_x"]
result_objective = result["res_cost"]
n_gen = model.grid.gen.shape[0]
n_load = model.grid.load.shape[0]
n_line = model.grid.line.shape[0]
"""
BACKEND BRUTE FORCE.
"""
results_backend = []
for idx, x_topology in enumerate(
itertools.product([0, 1], repeat=n_gen + n_load + 4 * n_line)
):
# x_topology = [x_gen, x_load, x_line_or_1, x_line_or_2, x_line_ex_1, x_line_ex_2]
x_topology = np.array(x_topology, dtype=np.int)
# Initialization of variables
(
x_gen,
x_load,
x_line_or_1,
x_line_or_2,
x_line_ex_1,
x_line_ex_2,
) = self.topology_to_parts(x_topology, n_gen, n_load, n_line)
# Check valid topology
if self.is_valid_topology(x_topology, n_gen, n_load, n_line):
# Generator bus
gen_sub_bus = np.ones_like(x_gen, dtype=np.int)
gen_sub_bus[x_gen.astype(np.bool)] = 2
gen_bus = [
model.grid.sub["bus"][sub_id][sub_bus - 1]
for sub_bus, sub_id in zip(gen_sub_bus, model.grid.gen["sub"])
]
# Load bus
load_sub_bus = np.ones_like(x_load, dtype=np.int)
load_sub_bus[x_load.astype(np.bool)] = 2
load_bus = [
model.grid.sub["bus"][sub_id][sub_bus - 1]
for sub_bus, sub_id in zip(load_sub_bus, model.grid.load["sub"])
]
# Power line status
line_status = np.logical_and(
np.logical_or(x_line_or_1, x_line_or_2),
np.logical_or(x_line_ex_1, x_line_ex_2),
)
# Power line - Origin bus
line_or_sub_bus = -np.ones_like(x_line_or_1, dtype=np.int)
line_or_sub_bus[x_line_or_1.astype(np.bool)] = 1
line_or_sub_bus[x_line_or_2.astype(np.bool)] = 2
line_or_bus = np.array(
[
model.grid.sub["bus"][sub_id][sub_bus - 1]
if sub_bus != -1
else model.grid.sub["bus"][sub_id][0]
for sub_bus, sub_id in zip(
line_or_sub_bus, model.grid.line["sub_or"]
)
]
)
# Power line - Extremity bus
line_ex_sub_bus = -np.ones_like(x_line_ex_1, dtype=np.int)
line_ex_sub_bus[x_line_ex_1.astype(np.bool)] = 1
line_ex_sub_bus[x_line_ex_2.astype(np.bool)] = 2
line_ex_bus = np.array(
[
model.grid.sub["bus"][sub_id][sub_bus - 1]
if sub_bus != -1
else model.grid.sub["bus"][sub_id][0]
for sub_bus, sub_id in zip(
line_ex_sub_bus, model.grid.line["sub_ex"]
)
]
)
# Construct grid for backend
grid_tmp = model.grid_backend.deepcopy()
grid_tmp.gen["bus"] = gen_bus
grid_tmp.load["bus"] = load_bus
grid_tmp.line["in_service"] = line_status[~model.grid.line.trafo]
grid_tmp.line["from_bus"] = line_or_bus[~model.grid.line.trafo]
grid_tmp.line["to_bus"] = line_ex_bus[~model.grid.line.trafo]
grid_tmp.trafo["in_service"] = line_status[model.grid.line.trafo]
grid_tmp.trafo["hv_bus"] = line_or_bus[model.grid.line.trafo]
grid_tmp.trafo["lv_bus"] = line_ex_bus[model.grid.line.trafo]
for gen_id in grid_tmp.gen.index.values:
pp.create_poly_cost(
grid_tmp,
gen_id,
"gen",
cp1_eur_per_mw=model.convert_per_unit_to_mw(
model.grid.gen["cost_pu"][gen_id]
),
)
print(f"{len(results_backend) + 1}/{idx}: Running DC-OPF ...")
try:
pp.rundcopp(grid_tmp)
valid = True
except (pp.optimal_powerflow.OPFNotConverged, IndexError) as e:
grid_tmp.res_cost = 0.0
print(e)
continue
load_p = model.convert_mw_to_per_unit(grid_tmp.load["p_mw"].sum())
gen_p = model.convert_mw_to_per_unit(grid_tmp.res_gen["p_mw"].sum())
valid = valid and np.abs(gen_p - load_p) < 1e-6
if (
model.params.obj_gen_cost
and model.params.obj_reward_quad
and model.params.solver_name != "glpk"
):
objective = (
grid_tmp.res_cost
+ np.square(
model.convert_mw_to_per_unit(grid_tmp.res_line["p_from_mw"])
/ model.grid.line["max_p_pu"]
).sum()
)
elif (
model.params.obj_reward_quad and model.params.solver_name != "glpk"
):
objective = +np.square(
model.convert_mw_to_per_unit(grid_tmp.res_line["p_from_mw"])
/ model.grid.line["max_p_pu"]
).sum()
else:
objective = grid_tmp.res_cost
results_backend.append(
{
"x": np.concatenate(
(
x_gen,
x_load,
x_line_or_1,
x_line_or_2,
x_line_ex_1,
x_line_ex_2,
)
),
"gen_bus": gen_bus,
"load_bus": load_bus,
"line_or_bus": line_or_bus,
"line_ex_bus": line_ex_bus,
"line_status": line_status.astype(int),
"valid": valid,
"objective": np.round(objective, 3),
"load_p": load_p,
"gen_p": np.round(gen_p, 3),
}
)
results_backend = pd.DataFrame(results_backend)
# Check with brute force solution
objective_brute = results_backend["objective"][results_backend["valid"]].min()
hot_brute = np.abs(results_backend["objective"].values - objective_brute) < 0.05
indices_brute = hot_to_indices(hot_brute)
status_brute = results_backend["x"][indices_brute]
match_idx = [
idx
for idx, line_status in zip(indices_brute, status_brute)
if np.equal(line_status, result_x).all()
]
# Compare
results_backend["candidates"] = hot_brute
results_backend["result_objective"] = np.nan
results_backend["result_objective"][match_idx] = np.round(result_objective, 3)
print(f"\n{model.name}\n")
print(f"Solver: {result_objective}")
print(
results_backend[
[
"gen_bus",
"load_bus",
"line_or_bus",
"line_ex_bus",
"line_status",
"load_p",
"gen_p",
"valid",
"candidates",
"objective",
"result_objective",
]
][results_backend["candidates"] & results_backend["valid"]].to_string()
)
time.sleep(0.1)
self.assertTrue(bool(match_idx))
def test_case3_topology(self):
case = load_case("case3")
grid = GridDCOPF(
case, base_unit_v=case.base_unit_v, base_unit_p=case.base_unit_p
)
params = SinglestepTopologyParameters(
obj_gen_cost=True,
obj_reward_lin=False,
obj_reward_quad=True,
obj_reward_max=False,
obj_lin_gen_penalty=False,
obj_quad_gen_penalty=False,
)
model = TopologyOptimizationDCOPF(
f"{case.name} DC OPF Topology Optimization",
grid=grid,
grid_backend=case.grid_backend,
base_unit_p=case.base_unit_p,
base_unit_v=case.base_unit_v,
params=params,
)
self.runner_opf_topology_optimization(model, verbose=False)
def test_case3_topology_multistep(self):
case = load_case("case3")
grid = GridDCOPF(
case, base_unit_v=case.base_unit_v, base_unit_p=case.base_unit_p
)
params = MultistepTopologyParameters(
horizon=1,
obj_gen_cost=True,
obj_reward_lin=False,
obj_reward_quad=True,
obj_reward_max=False,
obj_lin_gen_penalty=False,
obj_quad_gen_penalty=False,
)
forecasts = ForecastsPlain(env=case.env, horizon=params.horizon)
model = MultistepTopologyDCOPF(
f"{case.name} DC OPF Topology Optimization",
grid=grid,
forecasts=forecasts,
grid_backend=case.grid_backend,
base_unit_p=case.base_unit_p,
base_unit_v=case.base_unit_v,
params=params,
)
self.runner_opf_topology_optimization(model, verbose=False)
|
# Sample : https://qiita.com/Kosuke-Szk/items/eea6457616b6180c82d3
REPLY_ENDPOINT = 'https://api.line.me/v2/bot/message/reply'
def post_text(reply_token, text):
header = {
"Content-Type": "application/json",
"Authorization": "Bearer {ENTER_ACCESS_TOKEN}"
}
payload = {
"replyToken":reply_token,
"messages":[
{
"type":"text",
"text": text
}
]
}
requests.post(REPLY_ENDPOINT, headers=header, data=json.dumps(payload))
|
from numpy import exp, array, random, dot
import numpy as np
class randomise():
# Creating the neuron layer and randomising the weights.
def __init__(self, numberNeurons, numberInputs):
self.weights = 2 * random.random((numberInputs, numberNeurons)) - 1
class NeuralNetwork():
def __init__(self, lowerLayer, upperLayer):
self.lowerLayer = lowerLayer
self.upperLayer = upperLayer
# The Sigmoid function, which describes an S shaped curve.
# We pass the weighted sum of the inputs through this function to
# normalise them between 0 and 1.
def __sigmoid(self, x):
return 1 / (1 + exp(-x))
# The derivative of the Sigmoid function.
# This is the gradient of the Sigmoid curve.
# It indicates how confident we are about the existing weight.
def __sigmoid_derivative(self, x):
return x * (1 - x)
def softmax(self, x):
return np.exp(x) / np.sum(top)
# We train the neural network through a process of trial and error.
# Adjusting the learning rate each time.
def train(self, training_set_inputs, training_set_outputs, maxEpochs,test):
error_array = np.zeros([])
for e in range(maxEpochs):
# Pass the training set through our neural network
output_from_layer_1, output_from_layer_2 = self.predict(training_set_inputs)
# Calculate the error for the upper layer (The difference between the desired output
# and the predicted output).
upperLayer_error = training_set_outputs - output_from_layer_2
upperLayer_delta = upperLayer_error * self.__sigmoid_derivative(output_from_layer_2)
# Calculate the error for lower layer (By looking at the weights in the upper layer,
# we can determine by how much lower layer contributed to the error in upper layer).
lowerLayer_error = upperLayer_delta.dot(self.upperLayer.weights.T)
lowerLayer_delta = lowerLayer_error * self.__sigmoid_derivative(output_from_layer_1)
# Calculate how much to adjust the weights by
lowerLayer_adjustment = training_set_inputs.T.dot(lowerLayer_delta)
upperLayer_adjustment = output_from_layer_1.T.dot(upperLayer_delta)
# Adjust the weights.
self.lowerLayer.weights += lowerLayer_adjustment
self.upperLayer.weights += upperLayer_adjustment
error_array = np.append(error_array,upperLayer_error[0][0])
np.savetxt("epoch_error_"+test+".csv", error_array, delimiter=",")
def predict(self, inputs):
output_from_lowerLayer = self.__sigmoid(dot(inputs, self.lowerLayer.weights))
output_from_upperLayer = self.__sigmoid(dot(output_from_lowerLayer, self.upperLayer.weights))
return output_from_lowerLayer, output_from_upperLayer
if __name__ == "__main__":
# Setting a random seed
np.random.seed(1234)
# Setting the number of epochs
maxEpochs = 1000
# Number of Inputs
NI = 2
# Number of Hidden Units
NH = 3
# Number of Outputs
NO = 1
# Create the lower layer with NH neurons, each with NI inputs
lowerLayer = randomise(NH, NI)
# Create the higher layer with NO neurons, each with NH inputs)
upperLayer = randomise(NO, NH)
# Combine the lower and higher layers to create the neural network
neuralNetwork = NeuralNetwork(lowerLayer, upperLayer)
# The training set.
training_set_inputs = array([[0, 0], [0, 1], [1, 0], [1, 1]])
training_set_outputs = array([[0, 1, 1, 0]]).T
neuralNetwork = NeuralNetwork(lowerLayer, upperLayer)
# Train the neural network using the training set.
neuralNetwork.train(training_set_inputs, training_set_outputs, maxEpochs,test='test1')
print("---------------------------------------------------------")
print("Test 1:")
print("Input of [X,Y] has an output of [Z]")
hidden_state, output = neuralNetwork.predict(array([0, 0]))
print("[0, 0] -> ", output)
hidden_state, output = neuralNetwork.predict(array([0, 1]))
print("[0, 1] -> ", output)
hidden_state, output = neuralNetwork.predict(array([1, 0]))
print("[1, 0] -> ", output)
hidden_state, output = neuralNetwork.predict(array([1, 1]))
print("[1, 1] -> ", output)
print("-------------------------- \n Test 2")
np.random.seed(12)
# Setting the number of epochs
maxEpochs = 1000
# Number of Inputs
NI = 4
# Number of Hidden Units
NH = 5
# Number of Outputs
NO = 1
# Create the lower layer with NH neurons, each with NI inputs
lowerLayer = randomise(NH, NI)
# Create the higher layer with NO neurons, each with NH inputs)
upperLayer = randomise(NO, NH)
# Combine the lower and higher layers to create the neural network
neuralNetwork = NeuralNetwork(lowerLayer, upperLayer)
# Creating the random test set
#low=-1.0, high=1.0, size=(200,4)
RandomVectors = np.random.rand(200,4)
def SinVector(RandomVectors):
output = RandomVectors[0] - RandomVectors[1] + RandomVectors[2] - RandomVectors[3]
return np.sin(output)
RandomVectorsOutput = np.apply_along_axis(SinVector, 1, RandomVectors)
RandomVectorsOutput = np.reshape(RandomVectorsOutput, (200,1))
# The training set.
training_set_inputs = RandomVectors[0:150]
training_set_outputs = RandomVectorsOutput[0:150]
# The test set.
test_set_inputs = RandomVectors[-50:]
test_set_outputs = RandomVectorsOutput[-50:]
neuralNetwork.train(training_set_inputs, training_set_outputs, maxEpochs,test='test2')
error = np.zeros((50,2))
for x in range(1,test_set_outputs.size - 1):
print("--------")
output = neuralNetwork.predict(test_set_inputs[x])
print("The predicted output is {} \n The actual output is {}".format(output[1][0],test_set_outputs[x][0]))
error[x,0] = output[1][0]
error[x,1] = test_set_outputs[x][0]
np.savetxt("PredictvsOutputTest2.csv", error, delimiter=",")
print("-------------------------- \n Test 3")
"""
# Setting the number of epochs
maxEpochs = 2500
# Number of Inputs
NI = 16
# Number of Hidden Units
NH = 10
# Number of Outputs
NO = 26
# Create the lower layer with NH neurons, each with NI inputs
lowerLayer = randomise(NH, NI)
# Create the higher layer with NO neurons, each with NH inputs)
upperLayer = randomise(NO, NH)
# Combine the lower and higher layers to create the neural network
neuralNetwork = NeuralNetwork(lowerLayer, upperLayer)
# Reading and formatting the data.
def scale(X, x_min, x_max):
nom = (X - X.min(axis=0)) * (x_max - x_min)
denom = X.max(axis=0) - X.min(axis=0)
denom[denom == 0] = 1
return x_min + nom / denom
data_inputs = np.loadtxt("letter-recognition.data", delimiter=",",
usecols=(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16))
data_outputs = np.loadtxt("letter-recognition.data", delimiter=",", usecols=0, dtype=np.str)
data_outputs = np.reshape(data_outputs, (20000, 1))
output = np.zeros((20000, 1), dtype=np.longdouble)
for x in range(0, data_outputs.size -1):
output[x] = ord(data_outputs[x][0])
# The training set.
output = scale(output, 0, 1)
training_set_inputs = data_inputs[0:14999]
training_set_outputs = output[0:14999]
# The test set.
test_set_inputs =data_inputs[-5000:]
test_set_outputs = output[-5000:]
neuralNetwork.train(training_set_inputs, training_set_outputs, maxEpochs, test = 'test3')
for x in range(0,test_set_outputs.size - 1):
print("--------")
output = neuralNetwork.predict(test_set_inputs[x])
print("The predicted output is {} \n The actual output is {}".format(output[1:x],test_set_outputs[x]))
""" |
import base64, ast
import pandas as pd
def decode_game_state(state):
decoded = base64.b64decode(state)
dict_string = decoded.decode("UTF-8")
data = ast.literal_eval(dict_string)
df = pd.DataFrame(data)
grid = pd.DataFrame(df['gridObj'])
grid = grid.drop(grid.index[0])
grid.reset_index(inplace=True)
mine_coordinates = []
for i in grid['gridObj']:
del i[0]
for i, j in enumerate(grid['gridObj']):
for k, m in enumerate(j):
if m < 0:
mine_coordinates.append((i+1, k+1))
return mine_coordinates |
__author__ = 'Markus Prim'
def rgb_to_hex_rep(r, g, b):
color = str()
color += "#"
color += str(hex(r)[2:]).zfill(2)
color += str(hex(g)[2:]).zfill(2)
color += str(hex(b)[2:]).zfill(2)
return color
|
import logging
import os
LOG = logging.getLogger()
INFO = 'INFO'
DEBUG = 'DEBUG'
WARNING = 'WARNING'
FATAL = 'FATAL'
# lp = r'\('
# rp = r'\)'
# lb = r'\['
# rb = r'\]'
#
# lbp = lb + lp
# rbp = rb + rp
#
# LOG_LEVEL = r'[A-Z]+'
# LOG_TID = r'\d+'
# LOG_TIME = r'.*'
# LOG_FILE_LINE = r'.*:\d+'
# LOG_FUNC_NAME = r'\w+'
# LOG_MSG = r'.*'
#
# LOG_START = re.compile(r'^' + LOG_LEVEL + lb)
# LOG_FIELD = re.compile(
# lbp + LOG_LEVEL + rbp +
# lb + r'\s*' + lp + LOG_TID + rbp +
# lbp + LOG_TIME + rbp +
# lbp + LOG_FILE_LINE + rbp +
# lbp + LOG_FUNC_NAME + rp + r'\W*' + rb +
# '\s*' + LOG_MSG
# )
LOG_FIELDS = [
('level', None),
('tid', lambda x: int(x)),
# ('time', lambda x: datetime.datetime.strptime(x, '%y-%m-%dT%H:%M:%S.%f')),
('time', lambda x: '20' + x.replace('T', ' ')),
('fileline', None),
('function', None),
('msg', None)
]
class LogStream(object):
def __init__(self, filename):
self.filename = filename
self.node = os.path.split(filename)[-1].rpartition('.')[0]
self._fp = None
self._msg_pending = False
self._msg_buf = []
self._log_buf = []
self.open()
def open(self):
if not self._fp or self._fp.closed:
self._fp = open(self.filename, 'rb')
def reload(self):
self.open()
self.close()
def close(self):
self._fp.close()
def __del__(self):
self._fp.close()
def __iter__(self):
for line in self._fp:
if not line:
continue
if line[0] == '[':
if self._log_buf:
msg = '\n'.join(self._msg_buf)
log = self._log_buf
log.append(msg)
self._log_buf = []
self._msg_buf = []
self._msg_pending = False
yield log
fields = line.split(']', 5)
if len(fields) < 5:
LOG.error("Unparsed line: " + line)
continue
for name, formatter in LOG_FIELDS:
f = fields.pop(0).lstrip('[').strip()
if formatter:
try:
f = formatter(f)
except Exception as e:
self._log_buf = []
self._msg_buf = []
self._msg_pending = False
LOG.error("Error: %s\n" % e + "Unparsed line: " + line)
continue
if name == 'msg':
self._msg_buf = [f]
else:
self._log_buf.append(f)
self._msg_pending = True
else:
if not self._log_buf or not self._msg_pending:
LOG.error("Unparsed line: " + line)
continue
self._msg_buf.append(line)
if self._log_buf:
self._log_buf.append('\n'.join(self._msg_buf))
yield self._log_buf
def __process(l):
try:
stream = LogStream(l)
print('processing:', stream.node)
msg_len = 0
for i in stream:
msg_len = max(msg_len, len(i[-1]))
print('done processing:', stream.node, 'max_msg:', msg_len)
except:
LOG.exception('error')
if __name__ == '__main__':
import time
import glob
from multiprocessing import Pool, cpu_count
workers = cpu_count()
pool = Pool(workers)
files = glob.glob('logs/*.txt')
size = sum(os.stat(i).st_size for i in files)
t = time.time()
pool.map(__process, files)
dur = time.time() - t
print('workers: ', workers)
print('count: ', len(files))
print('duration:', dur, 'sec')
print('speed: ', size / dur / 1024 / 1024, 'Mb/s')
|
import hashlib
from django.core.mail import send_mail
from django.template import loader
from flask import render_template
from flask_mail import Message
from app.ext import mail
from app.models import Cart, Goods
from settings import SERVER_HOST, SERVER_PORT
def hash_str(source):
return hashlib.new('sha512', source.encode('utf-8')).hexdigest()
def send_email_activate(username, receive, u_token):
subject = '%s AXF Activate' % username
recipient_list = [receive, ]
username = username,
activate_url = 'http://{}:{}/axf/activate/?u_token={}'.format(SERVER_HOST, SERVER_PORT, u_token)
html_message = render_template('user/activate.html', username=username, activate_url=activate_url)
msg = Message(recipients=recipient_list, html=html_message, subject=subject)
mail.send(msg)
def get_total_price():
carts = Cart.query.filter(Cart.c_is_select==True)
total = 0
for cart in carts:
good = Goods.query.get(cart.c_goods_id)
total += cart.c_goods_num * good.price
return "{:.2f}".format(total)
|
import matplotlib.pyplot as plt
import numpy as np
from sklearn import datasets, linear_model
from sklearn.metrics import mean_squared_error, r2_score
def visualise_dataset_sample(features, targets, num_samples = 20):
indices = list(range(features.shape[0]))
num_samples = min(num_samples, len(indices))
sample_indices = np.random.choice(indices, num_samples)
sample_features = features[sample_indices]
sample_targets = targets[sample_indices]
plt.xlabel("BMI")
plt.ylabel("Progression")
plt.scatter(sample_features, sample_targets, color='black');plt.show()
if __name__ == "_main__":
#load the diabetes dataset and plot random sample for BMI feature
features_all, targets = datasets.load_diabetes(return_X_y=True)
visualise_dataset_sample(features_all[:, np.newaxis, 3], targets)
features = features_all[:, np.newaxis, 3]
# Split the features into training/testing sets
features_train = features[:-100]
features_test = features[-100:]
# Split the targets into training/testing sets
targets_train = targets[:-100]
targets_test = targets[-100:]
# Create linear regression object and fit to training data
regr = linear_model.LinearRegression()
regr.fit(features_train, targets_train)
#predict the progression values for the test features
predictions_test = regr.predict(features_test)
print('Mean squared error: %.2f'
% mean_squared_error(targets_test, predictions_test))
# Plot outputs
plt.scatter(features_test, targets_test, color='black')
plt.plot(features_test, predictions_test, color='blue', linewidth=3)
plt.xticks(())
plt.yticks(())
plt.show()
|
# -*- coding: utf-8 -*-
# @Time: 2020/3/28 11:48
# @Author: Rollbear
# @Filename: test_label_pick.py
# 测试附件一(三级标签)的抓取
from entity.label import LabelNode
from util.xl_read import *
import openpyxl
def main():
# 两种方式:
# 从excel表格获取所有标签,组成树形结构
root = read_labels("../xls/e1.xlsx")
# 从excel表格读取标签,组成列表
rows = read_labels_by_lines("../xls/e1.xlsx")
if __name__ == '__main__':
main()
|
#Tal Aizikov 101150420
#This will display a calculation and solve it
print("Do you want to know what (2.019x10^-9 x 5.76x10^-7) / (7.16x10^-4 + 9.23x10^-7)") #getting user engaged
num = ((2.019*(10^-9) * 5.76*(10^-7)) / (7.16*(10^-4) + 9.23*(10^-7))) #Calculating what it is
#The line above is not neccesary but it makes the program look more organized
print("It is "+ str(num)) #printing it out / making sure num is changed to string
|
# import
from zipfile import *
import os
# set build path
builddir = r'C:\Users\roey.c\Desktop\lab_days'
# for all sub-folders inside the build folder
for subdir, dirs, files in os.walk(builddir):
print subdir, dirs, files
# change the current directory
os.chdir(subdir)
# for each file in the current folder
for file in files:
if is_zipfile(file):
print "true"
else:
print "false"
if file=="NetworkingPackage.zip":
file_name = "NetworkingPackage.zip"
zip_archive = ZipFile(file_name)
print zip_archive.namelist()
zip_archive.close()
def zipi():
file_name = "bla.zip"
zip_archive = ZipFile(file_name)
print zip_archive.infolist();
zip_archive.close()
|
from .callbacks import supports_callbacks
__version__ = '0.2.0'
__doc__ = """
This library allows you to place decorators on functions and methods that
enable them to register callbacks.
"""
|
#!/usr/bin/python -S
"""
oheap2_test.py: Tests for oheap2.py
"""
from __future__ import print_function
import unittest
from ovm2 import oheap2 # module under test
class Oheap2Test(unittest.TestCase):
def testAlign4(self):
self.assertEqual(0, oheap2.Align4(0))
self.assertEqual(4, oheap2.Align4(1))
self.assertEqual(4, oheap2.Align4(3))
self.assertEqual(4, oheap2.Align4(4))
self.assertEqual(8, oheap2.Align4(5))
def testAlign16(self):
self.assertEqual(0, oheap2.Align16(0))
self.assertEqual(16, oheap2.Align16(1))
self.assertEqual(16, oheap2.Align16(15))
self.assertEqual(16, oheap2.Align16(16))
self.assertEqual(32, oheap2.Align16(17))
if __name__ == '__main__':
unittest.main()
|
"""
Routable Cherry Controllers
This code let's you combine CherryPy's nice default hierarchial dispatcher
with the ability to setup arbitrary routes to chosen targets.
Routes are attached directly to the method that handles the request rather than
in a separate configuration file
For example:
AccountsController(BaseController):
@route('/:username/:action')
def useraction(self, username, action='view'):
# do some magic
RootControler(BaseController):
accounts = AccountsController()
All requests to /accounts/username/action then go to the useraction method
additionally, since useraction specifies a default for action, /accounts/username will
go there too.
@route accepts variable, static and greedy components:
@route('/:username') - Any value for username within 1 path segment
@route('/:action=view') - Only requests that begin with /view match - the action keyword argument will always be set to view
@route('/:username/@category/:action') - Adds a keyword argument called category that matches as many path segments as possible
methods can have more than one route attached to them if required
If no routes match then the default method is executed as per usual
You can also construct urls based on route using the path to the controller as the base:
make_route('/accounts', username='auser', action='delete')
you can also add a query string by passing a list of tuples to make_path
make_route('/accounts', username='auser', action='delete', _qs=[ ('return_to', '/') ])
"""
from __future__ import absolute_import
from builtins import range
from splunk.util import cmp
from functools import total_ordering
from builtins import object
import cherrypy
from cherrypy._cpdispatch import test_callable_spec
import sys
import inspect
import logging
import types
import splunk.util
import splunk.appserver.mrsparkle.lib.i18n as i18n
logger = logging.getLogger('splunk.appserver.mrsparkle.lib.routes')
class RequestRefused(cherrypy.HTTPError):
"""
Used by decorators to reject handling of a request (and thus allow
routes to try and find an alternate handler)
Don't raise this in a handler yourself; raise cherrypy.NotFound instead
"""
pass
class RouteError(Exception):
"""Thrown if route syntax is invalid"""
pass
def route(route=None, methods=None, leave_exposed=False):
"""
The @route decorator
@route('/:vararg/:static=constant/*greedy', methods='POST')
methods or the route can be ommitted
methods can be a list of methods
defaults for route varargs are taken from the method's arguments
Don't set leave_exposed to true unless you really want your routed method
to be available at it's method name as well as it's route path
"""
def decorator(fn):
if not hasattr(fn, 'routes'):
fn.routes = []
fnargs = inspect.getargspec(fn)
if fnargs[3]:
defaults = dict( zip(fnargs[0][0-len(fnargs[3]):], fnargs[3]) )
else:
defaults = {}
fn.routes.append(Route(fn, route, defaults, methods))
if leave_exposed:
fn.route_exposed = True
return fn
return decorator
def make_route(base_path, **kw):
"""
Return a url path for a route
make_route('/search/jobs', job_id=1234, action=delete)
Can also add a query string by passing a list of tuples or a dict as _qs:
make_route('/search/jobs', job_id=1234, action=delete, _qs=[('q', 'search val to quote')])
or
make_route('/search/jobs', job_id=1234, action=delete, _qs=dict(q='search val to quote'))
"""
qargs = kw.get('_qs', '')
if qargs:
del kw['_qs']
if isinstance(qargs, dict):
# translate {'v1':'k1', 'v2':['v2-1','v2-2']} to [('v1','k1'), ('v2','v2-1'), ('v2','v2-2')]
# nexted list comprehensions ftw
input = qargs
qargs = []
[ qargs.extend([(k, e) for e in v]) for k, v in [ (k, v if isinstance(v, (list, tuple)) else (v,) ) for k, v in input.items() ] ]
qargs = '?'+'&'.join( [ '%s=%s' % (k,splunk.util.safeURLQuote(v)) for k,v in qargs ] )
if not kw:
return i18n.make_i18n_url(base_path + qargs)
app = cherrypy.request.app
ctrl = app.root
base_path = base_path.strip('/').split('/')
for el in base_path:
if hasattr(ctrl, el):
newctrl = getattr(ctrl, el)
if not isinstance(newctrl, object):
break
ctrl = newctrl
else:
break
# find route that matches kw
for route in ctrl.routes:
path = route.build_path(**kw)
if path:
base_path.extend(path)
return i18n.make_i18n_url('/'+'/'.join([str(x) for x in base_path]) + qargs)
@total_ordering
class Route(object):
"""
An individual route - You don't normally want to instantiate this directly
Use the @route decorator instead
"""
DYN = 1
STATIC = 2
GREEDY = 3
def __init__(self, target, route_def, defaults={}, methods=None):
self.route_str = route_def
self.target = target
if isinstance(methods, splunk.util.string_type):
self.methods = [methods]
else:
self.methods = methods
self.staticcount = 0
self.greedy = 0
self.defaults = {}
self.requires = [] # keys that must be set to build a route dynamically
self.elnames = []
nodelist = []
if route_def and route_def!='/':
for i, el in enumerate(route_def.strip('/').split('/')):
if el[0]=='*':
el = el[1:]
self.elnames.append(el)
nodelist.append( (self.GREEDY, el) )
self.requires.append(el)
self.greedy += 1
elif el[0]==':':
el = el[1:]
k = el.split('=', 1)
if len(k)==2:
nodelist.append( (self.STATIC, k[0], k[1]) )
self.staticcount += 1
self.elnames.append(k[0])
self.requires.append(k[0])
else:
nodelist.append( (self.DYN, el) )
self.elnames.append(el)
if el not in defaults:
self.requires.append(el)
else:
self.defaults[el] = defaults[el]
elif el[0]=='=':
nodelist.append( (self.STATIC, None, el[1:]) )
else:
raise RouteError(_('Invalid route definition: %s') % route_def)
self.nodelist = nodelist
self.nodelen = len(nodelist)
def __repr__(self):
return 'Route<route="%s", methods=%s>' % (self.route_str, self.methods)
def matchpath(self, path, method=None):
if method and self.methods and method not in self.methods:
# this route defines a list of methods it can handle
# and method isn't one of them
return False
if isinstance(path, splunk.util.string_type):
path = path.strip('/').split('/')
pathlen = len(path)
i = 0
nodenum = 0
result = {}
for node in self.nodelist:
nodename = node[1]
if node[0] == self.STATIC:
if i>pathlen-1:
return False # no defaults for static elements
if path[i] != node[2]:
return False # must matach
if node[1]:
result[node[1]] = node[2]
elif node[0] == self.DYN:
nodename = node[1]
if i>pathlen-1:
if nodename not in self.defaults:
return False
result[nodename] = self.defaults[nodename]
else:
result[nodename] = path[i]
elif node[0] == self.GREEDY:
# match as many nodes as possible, allowing for remaining
# nodes in the nodelist
nodesleft = self.nodelen - nodenum - 1
pathleft = len(path) - i
suck = pathleft-nodesleft
if pathleft - nodesleft < 1:
# not enough elements left to match the Route
return False
suck = pathleft-nodesleft
slice = '/'.join(path[i:i+suck])
result[nodename] = slice
i += suck-1 # will be increment by one more below
i += 1
nodenum += 1
if i < pathlen:
return False # If dangling path elements haven't been sucked up by a greedy then we don't match
return result
def build_path(self, **args):
"""
Attempt to build a path from a set of defaults
Returns None if the defaults don't fit the Route
Returns a list of path elements otherwise
"""
for key in self.requires:
if key not in args:
return None
for key in args.keys():
if key not in self.elnames:
return None
result = []
for node in self.nodelist:
nodetype, el = node[0], node[1]
if nodetype == self.STATIC:
if args[el] != node[2]:
return None
result.append(node[2])
elif nodetype == self.DYN:
result.append(args.get(el, self.defaults.get(el, None)))
elif nodetype == self.GREEDY:
result.append(args[el])
return result
def __eq__(self, other):
return self._compare_routes(other) == 0
def __lt__(self, other):
return self._compare_routes(other) < 0
def _compare_routes(self, other):
"""
Compare nodelength and staticlength so arrays of Routes sort correctly based on how specific they are
Specificity is basead on (in descending priority):
* The number of static components in the path
* The number of other components in the path
* Whether the route is restricted to a specific method or methods
"""
if self.staticcount == other.staticcount:
result = self.nodelen - other.nodelen
else:
result = self.staticcount - other.staticcount
if result == 0:
# all other things being equal, make sure routes that specify a method are returned before routes that accept any
if self.methods and other.methods:
return 0 # don't care whether the methods match, just that they both specify some
if self.methods:
return 1
if other.methods:
return -1
return 0
return result
def collides_with(self, other):
"""
Unlike a basic comparison, this method determines whether or not two routes will collide when CherryPy resolves them.
This includes checking whether or not they have any static segments that can be used to distinguish them
and whether there is any overlap in the HTTP methods they expose.
"""
if self != other:
return False
# initially, we set the flag for whether static segments with resolve the routes if neither route has static segments
# and their total number of segments is equal
cant_resolve_with_static = ((self.staticcount == 0 and other.staticcount == 0) and (self.nodelen == other.nodelen))
# if the flag is not set above, loop through the segments and look for a pair of static segments for resolving
# if we find a pair, we can return False immediately
if not cant_resolve_with_static:
for i in range(len(self.nodelist)):
self_node, other_node = self.nodelist[i], other.nodelist[i]
if self_node[0] == self.STATIC and other_node[0] == self.STATIC:
if self_node[2] == other_node[2]:
cant_resolve_with_static = True
else:
return False
# if the static segments can't be used to resolve, check for overlap in the HTTP methods exposed
if cant_resolve_with_static:
if self.methods and other.methods:
for m in self.methods:
if m in other.methods:
return True
else:
return True
return False
class RoutableType(type):
"""
Metaclass to route-enable a controller class
"""
def __new__(m, clsname, bases, dict_obj):
routes = []
for attrname, attr in dict_obj.items():
if isinstance(attr, types.FunctionType):
if hasattr(attr, 'routes'):
routes.extend(attr.routes)
if not hasattr(attr, 'route_exposed') and hasattr(attr, 'exposed'):
del attr.exposed
dict_obj['routes'] = routes
if routes:
routes.sort(reverse=True) # place most specific routes first
for i in range(len(routes) - 1):
if routes[i].collides_with(routes[i + 1]):
logger.error('ROUTE COLLISION: a potential collision was found in %s: %s, %s' % (clsname, routes[i], routes[i + 1]))
def default(self, *path, **kw):
# attempt to find matching route
path = list(path)
for i in range(len(path)):
try:
if isinstance(path[i], str):
if sys.version_info < (3, 0):
path[i] = path[i].decode('utf-8')
pass
except UnicodeDecodeError:
pass
pathlen = len(path)
method = cherrypy.request.method
e = None
for route in self.routes:
match = route.matchpath(path, method)
if match is False:
continue
#any methods with routes attached are assumed to be expoed
#the method itself should not have the .exposed attribute else
# it'll be accessible at an unexpected url
kw.update(match) # should probably create a new dict here
try:
if getattr(route.target, 'lock_session', None):
# the default wrapper only acquires a read lock
# honour the target's request for a write lock prior to dispatch
cherrypy.session.escalate_lock()
return route.target(self, **kw)
except TypeError as x:
test_callable_spec(route.target, [], kw)
raise
except RequestRefused as e:
pass
if hasattr(default, 'original'):
try:
return default.original(*path, **kw)
except TypeError as x:
test_callable_spec(default.original, path, kw)
raise
if e:
raise # re-raise the last exception
raise cherrypy.NotFound
default.exposed = True
if 'default' in dict_obj:
default.original = dict_obj['default']
dict_obj['default'] = default
return super(RoutableType, m).__new__(m, clsname, bases, dict_obj)
|
# -*- coding: utf-8 -*-
import scrapy
import collections
import json
class AirbnbSpider(scrapy.Spider):
name = 'airbnb'
allowed_domains = ['www.airbnb.com']
start_urls = ['http://www.airbnb.com/']
def start_requests(self):
url = ('https://www.airbnb.ca/api/v2/explore_tabs?_format=for_explore_search_web'
'&_intents=p1&auto_ib=false&client_session_id=6c7f3e7b-c038-4d92-b2b0-0bc7c25f1054¤cy=CAD'
'&experiences_per_grid=20&fetch_filters=true&guidebooks_per_grid=20&has_zero_guest_treatment=true'
'&is_guided_search=true&is_new_cards_experiment=true&is_standard_search=true&items_per_grid=18'
'&key=d306zoyjsyarp7ifhu67rjxn52tv0t20&locale=en-CA&luxury_pre_launch=false&metadata_only=false'
'&place_id=ChIJ21P2rgUrTI8Ris1fYjy3Ms4&query=Canc%C3%BAn%2C%20Mexico&query_understanding_enabled=true'
'&refinement_paths%5B%5D=%2Fhomes&s_tag=b7cT9Z3U&satori_version=1.1.9&screen_height=948&screen_size=medium'
'&screen_width=1105&search_type=section_navigation&selected_tab_id=home_tab&show_groupings=true'
'&supports_for_you_v3=true&timezone_offset=-240&version=1.5.7')
yield scrapy.Request(url=url, callback=self.parse)
def parse(self, response):
data = json.loads(response.body)
homes = data.get('explore_tabs')[0].get('sections')[3].get('listings')
BASE_URL = 'https://www.airbnb.com/rooms/'
data_dict = collections.defaultdict(dict)
for home in homes:
room_id = str(home.get('listing').get('id'))
data_dict[room_id]['url'] = BASE_URL + str(home.get('listing').get('id'))
data_dict[room_id]['price'] = home.get('pricing_quote').get('rate').get('amount')
data_dict[room_id]['avg_rating'] = home.get('listing').get('avg_rating')
data_dict[room_id]['reviews_count'] = home.get('listing').get('reviews_count')
yield data_dict
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014-Today Jonathan Finlay <jfinlay@riseup.net>.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "CYG - Seguridad",
"version": '14-01-2015',
"description": """Modulo de seguridad
""",
"shortdesc": "CYG - Seguridad",
"author": "CyG",
"website": "http://www.cyg.ec",
"category": "Construction",
"sequence": 1,
"complexity": "easy",
"depends": [
'cyg_base',
'cyg_inmobiliario',
'cyg_plan_pagos',
'cyg_sale',
],
"data": [
#'security/groups.xml',
'security/ir.model.access.csv',
'security/ir_rule.xml'
],
"init_xml": [],
"demo_xml": [],
"update_xml": [],
"active": False,
"installable": True,
"certificate": "",
}
|
import constants as C
def checkTop(host,target):
rect = host.rect
target = target.rect
# since it is 2 pixels, it breaks when we get closer than 2 pixels on a given edge
# Need to make it so that the code will move our object back if it will overlap,
# Perhaps checking if top+1 will make us on top of another obejct
if rect.top + host.velocityY < target.bottom and rect.top + host.velocityY > target.top:
if (rect.left < target.right and rect.left > target.left) or\
(rect.right > target.left and rect.right < target.right)or\
(rect.left == target.left and rect.right == target.right):
return True
def checkBottom(host,target):
rect = host.rect
target = target.rect
# if host.name == 'hesto2':
# print(target.top)
if rect.bottom + host.velocityY > target.top and rect.bottom + host.velocityY < target.bottom:
if (rect.left < target.right and rect.left > target.left) or\
(rect.right > target.left and rect.right < target.right)or\
(rect.left == target.left and rect.right == target.right):
return True
def checkLeft(host,target):
rect = host.rect
target = target.rect
if rect.left + host.velocityX < target.right and rect.left + host.velocityX > target.left:
if (rect.bottom > target.top and rect.bottom < target.bottom) or\
(rect.top < target.bottom and rect.top > target.top)or\
(rect.top == target.top and rect.bottom == target.bottom):
return True
def checkRight(host,target):
rect = host.rect
target = target.rect
if rect.right + host.velocityX > target.left and rect.right + host.velocityX < target.right:
if (rect.bottom > target.top and rect.bottom < target.bottom) or\
(rect.top < target.bottom and rect.top > target.top)or\
(rect.top == target.top and rect.bottom == target.bottom):
return True
def moveToEdge(host,target,side):
if side == C.SIDE_TOP:
target.velocityY = host.rect.bottom - target.rect.top
elif side == C.SIDE_BOTTOM:
target.velocityY = host.rect.top - target.rect.bottom
elif side == C.SIDE_LEFT:
target.velocityX = host.rect.right - target.rect.left
elif side == C.SIDE_RIGHT:
target.velocityX = host.rect.left - target.rect.right
|
from django.shortcuts import render
from stats.models import Master, Batting, Fielding, Pitching
from rest_framework.generics import ListCreateAPIView, RetrieveUpdateDestroyAPIView
from stats.serializers import MasterSerializer, BattingSerializer, FieldingSerializer, PitchingSerializer
class MasterListCreateAPIView(ListCreateAPIView):
queryset = Master.objects.all()
serializer_class = MasterSerializer
class MasterDetailUpdateDestroyAPIView(RetrieveUpdateDestroyAPIView):
queryset = Master.objects.all()
serializer_class = MasterSerializer
class BattingListCreateAPIView(ListCreateAPIView):
queryset = Batting.objects.all()
serializer_class = BattingSerializer
class BattingDetailUpdateDestroyAPIView(RetrieveUpdateDestroyAPIView):
queryset = Batting.objects.all()
serializer_class = BattingSerializer
class FieldingListCreateAPIView(ListCreateAPIView):
queryset = Fielding.objects.all()
serializer_class = FieldingSerializer
class FieldingDetailUpdateDestroyAPIView(RetrieveUpdateDestroyAPIView):
queryset = Fielding.objects.all()
serializer_class = FieldingSerializer
class PitchingListCreateAPIView(ListCreateAPIView):
queryset = Pitching.objects.all()
serializer_class = PitchingSerializer
class PitchingDetailUpdateDestroyAPIView(RetrieveUpdateDestroyAPIView):
queryset = Pitching.objects.all()
serializer_class = PitchingSerializer
|
# Generated by Django 3.1 on 2020-08-30 20:32
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('authenticate', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Patient',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=100)),
('second_name', models.CharField(max_length=100)),
('date_registered', models.DateTimeField(default=django.utils.timezone.now)),
],
),
migrations.CreateModel(
name='Patient_Diagnosis',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Diagnosis', models.CharField(max_length=255)),
('diagnosis_detail', models.TextField()),
('date_of_diagnosis', models.DateTimeField(default=django.utils.timezone.now)),
('report', models.TextField()),
('nurse', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('patient', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='authenticate.patient')),
],
),
]
|
# -*- coding: utf-8 -*-
from hearthstone.entities import Entity
from entity.spell_entity import SpellEntity
class LT21_016_(SpellEntity):
"""
颅骨之尘4
嗞啦会获得流血(5)。
"""
def __init__(self, entity: Entity):
super().__init__(entity)
def equip(self, hero):
pass
|
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APITestCase, APIRequestFactory
from .models import Room
from .views import ListCreateRoomAPIView, RetrieveUpdateDestroyRoomAPIView
class CreateRoomTest(APITestCase):
def setUp(self):
self.view = ListCreateRoomAPIView.as_view()
self.factory = APIRequestFactory()
self.url = reverse('meetingroom:create')
self.data = {'name': 'Sala da Justiça'}
def test_create_room(self):
request = self.factory.post(self.url, self.data, format='json')
response = self.view(request).render()
self.data['pk'] = Room.objects.first().pk
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(response.data, self.data)
class UpdateDestroyRoomTest(APITestCase):
def setUp(self):
self.view = RetrieveUpdateDestroyRoomAPIView.as_view()
self.factory = APIRequestFactory()
self.data = {'name': 'Sala da Justiça'}
self.room = Room.objects.create(name=self.data['name'])
self.url = reverse(
'meetingroom:update-destroy',
kwargs={'pk': self.room.pk}
)
def test_update_name_room(self):
data = {'name': 'BatCaverna'}
request = self.factory.put(self.url, data, format='json')
response = self.view(request, pk=self.room.pk).render()
data['pk'] = self.room.pk
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data, data)
def test_delete_a_room(self):
request = self.factory.delete(self.url)
response = self.view(request, pk=self.room.pk).render()
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertFalse(Room.objects.exists())
|
# Generated by Django 2.1.7 on 2019-05-21 13:27
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('cv', '0008_auto_20190517_0952'),
]
operations = [
migrations.AlterField(
model_name='job',
name='company',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.DO_NOTHING, to='root.Company'),
),
migrations.AlterField(
model_name='school',
name='school',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.DO_NOTHING, to='root.Company'),
),
migrations.AlterField(
model_name='skill',
name='category',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.DO_NOTHING, to='root.SkillCategory'),
),
migrations.DeleteModel(
name='Company',
),
migrations.DeleteModel(
name='SkillCategory',
),
]
|
#quicksort python implementation
##pseudocode
#quicksort(A,start,end)
#if(start<end)
#partition(A,start,end)
#quicksort(A,start,pIndex-1)
#quicksort(A,pIndex+1,end)
#
#partition(A,start,end)
#pivot = A[end]
#pIndex = (start-1)
#for(0 to end-1)
#{if(A[i]<=pivot)
#swap A[i],A[pIndex]
#pIndex = pIndex+1
#}
#swap A[pIndex+1],A[end]
#return (pIndex+1)
def quicksort(A,start,end):
if(start<end):
pIndex = partition(A,start,end)
quicksort(A,start,pIndex-1)
quicksort(A,pIndex+1,end)
def partition(A,start,end):
pivot = A[end]
pIndex = (start -1)
for i in range(start,end):
if A[i]<=pivot:
pIndex = pIndex+1
A[i],A[pIndex] = A[pIndex],A[i]
A[pIndex+1],A[end] = A[end],A[pIndex+1]
return (pIndex+1)
# Driver code to test above
arr = [10, 7, 8, 9, 1, 5]
n = len(arr)
quicksort(arr,0,n-1)
print ("Sorted array is:"),
for i in range(n):
print ("%d" %arr[i]),
|
import pandas as pd
from sqlalchemy import create_engine
engine = create_engine('mysql+pymysql://Joe:sjjgtjytz@localhost:3306/quant')
sql_query = 'select * from user;'
df_read = pd.read_sql_query(sql_query, engine)
test = df_read.to_json(orient='records')
print(test) |
import json
import sys
from src.common.Logger import Logger
from src.common.app import App
from src.common.AppiumScreenshot import ScreenShot
log = Logger("main").logger()
with open('../config/MyObservatoryEle.json') as json_file:
ele = json.load(json_file)
class MyObservatory(App):
@classmethod
def my_click(cls, type_name, name, timeout=15):
value = ele.get(name)
if value is None:
value = name
if not cls.click(type_name, value, name, timeout=timeout):
name = 'cannot_find_' + name
log.info(name)
cls.shot(name)
return False
return True
@classmethod
def shot(cls, path, name):
ScreenShot(cls.driver).get_screenshot(path, name)
|
def style(output):
def decorate(f):
if output == "bold":
def wrap():
return "<bold>" + f() + "</bold>"
elif output == "italics":
def wrap():
return "<i>" + f() + "</i>"
return wrap
return decorate
def bold():
print "bold() called: "
def decorate(f):
print "Decorator called: f = ", f
def wrap():
print "Wrapper called"
return "<bold>" + f() + "</bold>"
return wrap
return decorate
@bold()
def greet():
return "Hello world"
@style(output="italics")
def welcome():
return "Welcome to Python"
print greet()
print welcome()
|
import argparse
import tarfile
from collections import defaultdict
from typing import Dict, List
from sacrerouge.data import Metrics, MetricsDict
from sacrerouge.io import JsonlWriter
def load_summaries(eval_tar_2: str):
summaries = defaultdict(dict)
with tarfile.open(eval_tar_2, 'r') as tar:
for member in tar.getmembers():
if member.name.startswith('NISTeval2/ROUGE/peers/'):
parts = member.name.split('/')[-1].split('.')
assert len(parts) == 5
instance_id = parts[0].lower()
summarizer_id = parts[-1]
if summarizer_id.isalpha():
summarizer_type = 'reference'
else:
summarizer_type = 'peer'
sentences = tar.extractfile(member).read().decode(errors='replace').splitlines()
summary = {
'summarizer_id': summarizer_id,
'summarizer_type': summarizer_type,
'text': sentences
}
if summarizer_id in summaries[instance_id]:
assert summaries[instance_id][summarizer_id] == summary
summaries[instance_id][summarizer_id] = summary
return summaries
def load_rouge_jk_output(eval_tar: str, file_path: str, metrics: Dict[str, Dict[str, MetricsDict]]):
jk_metrics = defaultdict(lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(list))))
with tarfile.open(eval_tar, 'r') as tar:
lines = tar.extractfile(file_path).read().decode().splitlines()
for line in lines:
columns = line.split()
if len(columns) == 7 and columns[2] == 'Eval':
summarizer_id = columns[0]
rouge_metric = columns[1].lower() + '_jk'
filename = columns[3].split('.')
instance_id = filename[0].lower()
summarizer_id = filename[-1]
recall = float(columns[4][2:]) * 100
precision = float(columns[5][2:]) * 100
f1 = float(columns[6][2:]) * 100
jk_metrics[instance_id][summarizer_id][rouge_metric]['recall'].append(recall)
jk_metrics[instance_id][summarizer_id][rouge_metric]['precision'].append(precision)
jk_metrics[instance_id][summarizer_id][rouge_metric]['f1'].append(f1)
for instance_id in jk_metrics.keys():
for summarizer_id in jk_metrics[instance_id].keys():
for rouge_metric in jk_metrics[instance_id][summarizer_id].keys():
recalls = jk_metrics[instance_id][summarizer_id][rouge_metric]['recall']
precisions = jk_metrics[instance_id][summarizer_id][rouge_metric]['precision']
f1s = jk_metrics[instance_id][summarizer_id][rouge_metric]['f1']
metrics[instance_id][summarizer_id][rouge_metric] = {
'recall': sum(recalls) / len(recalls),
'precision': sum(precisions) / len(precisions),
'f1': sum(f1s) / len(f1s)
}
def load_responsiveness_tables(eval_tar: str, metrics: Dict[str, Dict[str, MetricsDict]]):
with tarfile.open(eval_tar, 'r') as tar:
lines = tar.extractfile('NISTeval/responsiveness/overall.table').read().decode().splitlines()
for line in lines[6:]:
columns = line.split()
instance_id = columns[0].lower()
summarizer_id = columns[3]
score = int(columns[4])
metrics[instance_id][summarizer_id]['overall_responsiveness'] = score
lines = tar.extractfile('NISTeval/responsiveness/content.table').read().decode().splitlines()
for line in lines[6:]:
columns = line.split()
instance_id = columns[0].lower()
summarizer_id = columns[3]
score = int(columns[4])
metrics[instance_id][summarizer_id]['content_responsiveness'] = score
def load_linguistic_quality_table(eval_tar: str, metrics: Dict[str, Dict[str, MetricsDict]]):
with tarfile.open(eval_tar, 'r') as tar:
lines = tar.extractfile('NISTeval/linguistic_quality/linguistic_quality.table').read().decode().splitlines()
for line in lines[7:]:
columns = line.split()
instance_id = columns[0].lower()
summarizer_id = columns[3]
question = columns[4]
score = int(columns[5])
metrics[instance_id][summarizer_id]['linguistic_quality'][f'Q{question}'] = score
def load_pyramid_scores(pyramid_tar: str, metrics: Dict[str, Dict[str, MetricsDict]]):
with tarfile.open(pyramid_tar, 'r') as tar:
lines = tar.extractfile('scoring/2006_modified_scores.txt').read().decode().splitlines()
for line in lines:
columns = line.split()
instance_id = columns[0].lower()
summarizer_id = columns[1]
# There are some typos which cause the summarizer_id to be "01".
# Since the file only has peers, its ok to cast the id to a integer and back
summarizer_id = str(int(summarizer_id))
# Only peers are included, so no jackknifing
metrics[instance_id][summarizer_id]['modified_pyramid_score'] = float(columns[2])
metrics[instance_id][summarizer_id]['num_scus'] = int(columns[3])
metrics[instance_id][summarizer_id]['num_repetitions'] = int(columns[4])
def get_references(summaries, instance_id, summarizer_id):
summarizer_ids = list(summaries[instance_id].keys())
reference_ids = list(filter(lambda sid: sid.isalpha(), summarizer_ids))
references = []
for reference_id in reference_ids:
if summarizer_id == reference_id:
continue
references.append(summaries[instance_id][reference_id])
return references
def save_metrics(summaries: Dict[str, Dict[str, List[str]]],
metrics: Dict[str, Dict[str, List[int]]],
output_dir: str):
with JsonlWriter(f'{output_dir}/task1.summaries.jsonl') as out_summaries:
with JsonlWriter(f'{output_dir}/task1.metrics.jsonl') as out_metrics:
for instance_id in sorted(metrics.keys()):
for summarizer_id in metrics[instance_id].keys():
summary = summaries[instance_id][summarizer_id]
instance_metrics = metrics[instance_id][summarizer_id]
references = get_references(summaries, instance_id, summarizer_id)
out_summaries.write({
'instance_id': instance_id,
'summarizer_id': summarizer_id,
'summarizer_type': summary['summarizer_type'],
'summary': summary,
'references': references
})
out_metrics.write(Metrics(instance_id, summarizer_id, summary['summarizer_type'], instance_metrics))
def setup(data_root: str, output_dir: str):
eval_tar = f'{data_root}/scrapes/duc.nist.gov/past_duc_aquaint/duc2006/results/NIST/NISTeval.tar.gz'
eval_tar_2 = f'{data_root}/scrapes/duc.nist.gov/past_duc_aquaint/duc2006/results/NIST-secondary-automatic/NISTeval2.tar.gz'
pyramid_tar = f'{data_root}/scrapes/duc.nist.gov/past_duc_aquaint/duc2006/results/Pyramid/DUC2006pyramiddata.tar.gz'
main(eval_tar, eval_tar_2, pyramid_tar, output_dir)
def main(eval_tar, eval_tar_2, pyramid_tar, output_dir):
summaries = load_summaries(eval_tar_2)
metrics = defaultdict(lambda: defaultdict(MetricsDict))
load_rouge_jk_output(eval_tar_2, 'NISTeval2/ROUGE/rougejk.m.out', metrics)
load_rouge_jk_output(eval_tar_2, 'NISTeval2/BE/simplejk.m.hm.out', metrics)
load_responsiveness_tables(eval_tar, metrics)
load_linguistic_quality_table(eval_tar, metrics)
load_pyramid_scores(pyramid_tar, metrics)
save_metrics(summaries, metrics, output_dir)
if __name__ == '__main__':
argp = argparse.ArgumentParser()
argp.add_argument('eval_tar')
argp.add_argument('eval_tar_2')
argp.add_argument('pyramid_tar')
argp.add_argument('output_dir')
args = argp.parse_args()
main(args.eval_tar, args.eval_tar_2, args.pyramid_tar, args.output_dir)
|
import subprocess
from config import *
vert_imname_res = [
re.compile(r'cm[em][0-9]+$'),
re.compile(r'knk[clq][0-9]+$'),
re.compile(r'nk2[cj][0-9]+$'),
re.compile(r'ssk[ikno][0-9]+$'),
re.compile(r'zps[efim][0-9]+$'), ] # 哪些文件是横向文字
for k, v in dict_image.items():
language = 'jpn'
for vert_imname_re in vert_imname_res:
if vert_imname_re.match(k) != None:
language = 'jpn_vert'
break
impath = dst_impath_fmt(k)
cmd = ['tesseract', impath, 'stdout', '-l', language, '--oem', '3']
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
txt = ''
for line in p.stdout:
s = line.decode().strip().replace(' ', '')
txt = txt + s + "\n"
print(k, '>>>', s)
txtpath = impath + '.txt'
with open(txtpath, 'w') as f:
f.write(txt)
|
import math
import numpy as np
import matplotlib.pyplot as plt
# http://nghiaho.com/?page_id=671
fig = plt.figure()
axes1 = plt.subplot(111)
axes1.axis("equal")
original_pts_np = np.mat([[0,1.0],[2.0,2.0],[1.0,0]])
rot_angle = np.pi/4.0
c = math.cos(rot_angle)
s = math.sin(rot_angle)
ground_truth_rot_mat = np.mat([[c,-s],[s,c]])
pts_after_rotate_np = (ground_truth_rot_mat * original_pts_np.T).T
axes1.plot(original_pts_np[:,0],original_pts_np[:,1],"-go")
axes1.plot(pts_after_rotate_np[:,0],pts_after_rotate_np[:,1],"-ro")
axes1.annotate("before rotate", xy=(original_pts_np[1,0], original_pts_np[1,1]))
axes1.annotate("after rotate", xy=(pts_after_rotate_np[1,0], pts_after_rotate_np[1,1]))
u,s,v = np.linalg.svd(original_pts_np.T * pts_after_rotate_np)
guess_rot_mat= v.T* u.T
vec1_a = np.stack([np.array([[0,0]]),u[0,:]])
vec1_b = np.stack([np.array([[0,0]]), u[1,:]])
axes1.plot(vec1_a[:,0],vec1_a[:,1],"-g")
axes1.plot(vec1_b[:,0],vec1_b[:,1],"-g")
axes1.annotate("u", xy=(u[0,0], u[0,1]))
vec2_a = np.stack([np.array([[0,0]]),v[0,:]])
vec2_b = np.stack([np.array([[0,0]]), v[1,:]])
axes1.plot(vec2_a[:,0],vec2_a[:,1],"-r")
axes1.plot(vec2_b[:,0],vec2_b[:,1],"-r")
axes1.annotate("v", xy=(v[1,0], v[1,1]))
print(ground_truth_rot_mat - guess_rot_mat)
plt.show() |
import os.path as osp
import cv2
import os
import numpy as np
import torch
import models.modules.EDVR_arch as EDVR_arch
import math
from torchvision.utils import make_grid
testpath = '/input/testpngs'
out_path = '../results'
index = 0
def read_img(img_path):
"""Read an image from a given image path
Args:
img_path (str): image path
Returns:
img (Numpy): size (H, W, C), BGR, [0, 1]
"""
img = cv2.imread(img_path)
img = img.astype(np.float32) / 255.
return img
def read_img_seq(img_list_l):
"""Read a sequence of images from a given folder path
Args:
img_folder_path (str): image folder path
Returns:
imgs (Tensor): size (T, C, H, W), RGB, [0, 1]
"""
img_l = [read_img(v) for v in img_list_l]
# stack to Torch tensor
imgs = np.stack(img_l, axis=0)
imgs = imgs[:, :, :, [2, 1, 0]]
imgs = torch.from_numpy(np.ascontiguousarray(np.transpose(imgs, (0, 3, 1, 2)))).float()
return imgs
list_testpath = [[] for _ in range(len(os.listdir(testpath)))]
for clip_name in sorted(os.listdir(testpath)):
out_path_clip = osp.join(out_path,clip_name)
testpath_clips = osp.join(testpath,clip_name)
for i in range(100):
if i == 0:
for x in range(3):
list_testpath[index].append(osp.join(testpath_clips, sorted(os.listdir(testpath_clips))[i]))
elif i == 99:
for x in range(3):
list_testpath[index].append(osp.join(testpath_clips, sorted(os.listdir(testpath_clips))[i]))
else:
list_testpath[index].append(osp.join(testpath_clips, sorted(os.listdir(testpath_clips))[i]))
index += 1
def mkdir(path):
if not os.path.exists(path):
os.makedirs(path)
def tensor2img(tensor, out_type=np.uint8, min_max=(0, 1)):
'''
Converts a torch Tensor into an image Numpy array
Input: 4D(B,(3/1),H,W), 3D(C,H,W), or 2D(H,W), any range, RGB channel order
Output: 3D(H,W,C) or 2D(H,W), [0,255], np.uint8 (default)
'''
tensor = tensor.squeeze().float().cpu().clamp_(*min_max) # clamp
tensor = (tensor - min_max[0]) / (min_max[1] - min_max[0]) # to range [0,1]
n_dim = tensor.dim()
if n_dim == 4:
n_img = len(tensor)
img_np = make_grid(tensor, nrow=int(math.sqrt(n_img)), normalize=False).numpy()
img_np = np.transpose(img_np[[2, 1, 0], :, :], (1, 2, 0)) # HWC, BGR
elif n_dim == 3:
img_np = tensor.numpy()
img_np = np.transpose(img_np[[2, 1, 0], :, :], (1, 2, 0)) # HWC, BGR
elif n_dim == 2:
img_np = tensor.numpy()
else:
raise TypeError(
'Only support 4D, 3D and 2D tensor. But received with dimension: {:d}'.format(n_dim))
if out_type == np.uint8:
img_np = (img_np * 255.0).round()
# Important. Unlike matlab, numpy.unit8() WILL NOT round by default.
return img_np.astype(out_type)
def save_img(img, img_path, mode='RGB'):
cv2.imwrite(img_path, img)
model_path = '../experiments/AI4K_TEST/models/latest_G.pth'
device = torch.device('cuda')
os.environ['CUDA_VISIBLE_DEVICES'] = '0'
N_in = 5
model = EDVR_arch.EDVR(64, N_in, 4, 5, 20, predeblur=False, HR_in=False,w_TSA=True)
model.load_state_dict(torch.load(model_path), strict=True)
for t in range(len(list_testpath)):
list_testpath = sorted(list_testpath)
save_name = list_testpath[t][0].split('/')[-2]
mkdir(osp.join(out_path,save_name))
save_dir = osp.join(out_path,save_name)
print('<<<<<<<<<<<<<<<<<<<<<<<<------------------>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>{}'.format(t))
for x in range(100):
imgs_list = list_testpath[t][x:x + 5]
imgs_l_torch = read_img_seq(imgs_list)
with torch.no_grad():
model.eval()
model = model.to(device)
sr = model(imgs_l_torch.unsqueeze(0).to(device))
out_img = tensor2img(sr.squeeze(0))
print(osp.join(save_dir,'{}_%.4d.png'.format(save_name) %(x+1)))
save_img(out_img,osp.join(save_dir,'{}_%.4d.png'.format(save_name) %(x+1)))
# print(list_testpath)
|
import objPais
import pickle
import btree as b
btree = b.BTree(4)
binaries = open('binaries.kbb', 'rb')
arq = open('tourism.pkl', 'wb')
for i in range(1, 196):
pais = pickle.load(binaries)
print (pais.name)
btree.insert([pais.tourism, pais.name, i])
pickle.dump(btree, arq)
binaries.close()
arq.close()
|
# BASIC STATISTICS
# DATA ANALYSIS
# _______________
# GET TO KNOW YOUR DATA
# LITTLE DATA -> SIMPLY LOOK AT IT
# BIG DATA -> ???
# NUMPY HAS FRIENDLY STATISTICAL FUNCTIONS SUCH AS:
# _________________________________________________
# np.mean(), np.median(), np.corrcoef() to check for correlations
# np.std() for standard deviation, np.sum(), np.sort()
# NUMPY'S FUNCTIONS ARE MUCH FASTER THAN THE PYTHON EQUIVALENTS
# GENERATING DUMMY/SAMPLE DATA WITH NUMPY
import numpy as np
# GENERATE RANDOM HEIGHTS
height = np.round(np.random.normal(1.75, 0.20, 5000), 2)
# the above samples two random distributions 5000 times
# 1.75 - distribution mean
# 0.20 - distribution standard deviation
# 5000 - number of samples
# GENERATE RANDOM WEIGHTS
weight = np.round(np.random.normal(60.32, 15, 5000), 2)
# the above samples two random distributions 5000 times
# 60.32 - distribution mean
# 15 - distribution standard deviation
# 5000 - number of samples
# COMBINE RANDOM SAMPLES INTO TWO COLUMNS
np_city = np.column_stack((height, weight))
# PRACTICE WITH NUMPY ARRAYS
np_height = np_city[:, 0] # PULLS OFF COLUMN OF ALL HEIGHTS
print(np.mean(np_height)) # PRINT MEAN HEIGHT
print(np.median(np_height)) # PRINT MEDIAN HEIGHT
print(np.std(np_height)) # PRINT STANDARD DEVIATION OF HEIGHT
print(np.corrcoef(np_city[:, 0], np_city[:, 1])) # PRINT CORRELATIONS
|
import os
import re
import difflib
import datetime
def find_max(data, devices):
try:
if devices == "2DVD":
output = max(data)
return output
elif devices == "OTT":
list_ott = []
for i in data:
list_ott.append(datetime.datetime.strptime(i, "%Y%m%d"))
output = max(list_ott).strftime("%Y%m%d")
return output
elif devices == "CAWS":
list_ott = []
for i in data:
list_ott.append(datetime.datetime.strptime(i, "%Y-%m-%d"))
output = max(list_ott).strftime("%Y-%m-%d")
return output
elif devices == "Hyvis":
list_ott = []
for i in data:
list_ott.append(datetime.datetime.strptime(i, "%Y%m%d"))
output = max(list_ott).strftime("%Y%m%d")
return output
else:
pass
except Exception as e:
# print("err,please check the device name")
# print(e)
pass
def change_dect(path, hou, regular, device):
try:
for root, dirs, files in os.walk(path):
list = []
list_full = []
list_sel = []
# print(root) # 当前目录路径
# print(dirs) # 当前路径下所有子目录
# print(files) # 当前路径下所有非目录子文件
for i in files:
if ("".join(i).split(".", 1)[1] == hou):
list_sel.append(i)
for i in list_sel:
out = re.findall(regular, i)
list.extend(out)
#print(i)
#print(out)
# print("list:", list_sel)
recent_file = find_max(list, device)
max_name = difflib.get_close_matches(recent_file, list_sel, 1, cutoff=0.5)
max_name1 = "".join(max_name)
#print("max:",max_name1)
return max_name1
except Exception as e:
print("err,please check the path")
print(e)
pass
path = "D:/OTT/"
hou = "mis"
regular = '^[a-zA-Z]+([0-9]+)'
device = "OTT"
recent_file = change_dect(path,hou,regular,device)
print(recent_file) |
from selenium import webdriver
import time
driver = webdriver.Chrome()
url = "https://www.dailyobjects.com/auth/login"
driver.get(url)
phone_number=driver.find_elements_by_class_name("mat-form-field-autofill-control")[0]
phone_number.click()
phone_number.send_keys("9999999999")
time.sleep(2)
driver.find_elements_by_class_name("get-otp-btn")[0].click()
time.sleep(5)
otp_input=driver.find_elements_by_class_name("otp-input")[0]
otp_input.click()
otp_input.send_keys("8888")
time.sleep(5)
driver.find_elements_by_class_name("login-btn")[0].click()
driver.maximize_window()
time.sleep(10)
driver.find_elements_by_class_name("material-icons")[1].click()
time.sleep(5)
driver.find_elements_by_tag_name("li")[0].click()
time.sleep(5)
driver.find_elements_by_class_name("mat-ripple")[3].click()
time.sleep(2)
driver.find_element_by_class_name("add-new-address").click()
time.sleep(2)
name = driver.find_elements_by_class_name("mat-form-field-autofill-control")[0]
name.click()
name.send_keys("Saubhagya")
number = driver.find_elements_by_class_name("mat-form-field-autofill-control")[1]
number.click()
number.send_keys("someText")
pincode = driver.find_elements_by_class_name("mat-form-field-autofill-control")[2]
pincode.click()
pincode.send_keys("22100")
address = driver.find_elements_by_class_name("mat-form-field-autofill-control")[3]
address.click()
address.send_keys("My new address")
time.sleep(5)
country = driver.find_elements_by_class_name("mat-select-placeholder")[0]
country.click()
time.sleep(2)
driver.find_elements_by_class_name("mat-option-text")[99].click()
city = driver.find_elements_by_class_name("mat-form-field-autofill-control")[4]
city.click()
city.send_keys("Delhi")
time.sleep(5)
state = driver.find_elements_by_class_name("mat-select-placeholder")[0]
state.click()
time.sleep(2)
driver.find_elements_by_class_name("mat-option")[22].click()
for i in range(0, len(driver.find_elements_by_class_name("ng-star-inserted"))-1):
if (driver.find_elements_by_class_name("ng-star-inserted")[i].text=="Mobile must be a valid mobile number"):
print("Error detected in mobile number")
number = driver.find_elements_by_class_name("mat-form-field-autofill-control")[1]
number.click()
number.clear()
number.send_keys("7979901194")
print("Error in mobile number corrected")
break
for i in range(0, len(driver.find_elements_by_class_name("ng-star-inserted"))-1):
if (driver.find_elements_by_class_name("ng-star-inserted")[i].text=="Pin Code must be valid"):
print("Error detected in pincode")
pincode = driver.find_elements_by_class_name("mat-form-field-autofill-control")[2]
pincode.click()
pincode.clear()
pincode.send_keys("110030")
print("Error in pincode corrected")
break
driver.find_elements_by_class_name("add-new-address")[0].click()
print("New address saved") |
#!/usr/bin/env python3
# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import shutil
import subprocess
from glob import glob
from common import die, green
def main() -> None:
ensure_shellcheck_installed()
run_shellcheck()
def ensure_shellcheck_installed() -> None:
if shutil.which("shellcheck") is None:
die("`shellcheck` not installed! You may download this your operating system's package manager, "
"such as brew, apt, or yum. See https://github.com/koalaman/shellcheck#installing.")
def run_shellcheck() -> None:
targets = glob("./**/*.sh", recursive=True) + ["./pants"]
command = ["shellcheck", "--shell=bash"] + targets
try:
subprocess.run(command, check=True)
except subprocess.CalledProcessError:
die("Please fix the above errors and run again.")
else:
green("./pants passed the shellcheck!")
if __name__ == "__main__":
main()
|
import math
def Fahrenheit_to_Kelvin(F):
return 273.5 + ((F - 32.0) * (5.0/9.0))
Pt=2068000 #1500psi --> Pa #Pt is chamber pressure
gam = 1.3 #property of your working fluid
Tt=Fahrenheit_to_Kelvin(3539.93) #temperature in the chamber
p0=101352.932 #free stream pressure outside nozzle -> Pa 1 atm?
AverageMolecularWeight=0.024049 #this is in kg. Not typical for molecular weight
R=8.314462 #gas constant
while True:
print("""Select input parameter by number
1. Mass Flow
2. Throat radius""")
mode=input()
if mode=="1":
userMassFlow=float(input("Input Desired Mass Flow in kg/s"))
r = (2**((-1*(gam+1))/(4*(gam-1)))*(gam+1)**(-(gam+1)/(2*(2-(2*gam))))*math.sqrt(userMassFlow)*Tt**(1/4))/(math.sqrt(3.14159)*math.sqrt(Pt)*(gam/R)**(1/4))
break
elif mode=="2":
r=float(input("Input Desired throat radius in meters"))
break
else:
print("Input not recognised. Please try again.")
Rspecific=R/AverageMolecularWeight
#machExit=math.sqrt((2/(gam-1))*(Pt/p0)**((gam-1)/gam)-1) #WROOOOOOONNNG?
Texit=((p0/Pt)**((gam - 1)/gam))*Tt
Vexit=math.sqrt(((2*gam)/(gam-1))*Rspecific*Tt*(1-(p0/Pt)**((gam-1)/gam)))
machExit=Vexit/math.sqrt(gam*R*Texit) #derived. more below
#AoverAstar=9.297 #get from calculator
#to get area at exit for use in base equations
Athroat=3.1415*(r**2.0)
AoverAstar=(((gam+1)/2)**(-1*((gam+1)/(2*(gam-1)))))*(((1+((gam-1)/2)*((machExit)**2))**((gam+1)/(2*(gam-1))))/machExit)
Aexit=Athroat*AoverAstar
#base equasions
mdot=((Athroat*Pt)/math.sqrt(Tt))*math.sqrt(gam/R)*((gam+1)/2)**(-1*((gam+1)/(2*(gam-1))))
#ToverTt=((1 + (machExit**2) * ((gam-1)/2))**(-1)) updated below with RPE eq above
#Texit=ToverTt*Tt #calculated earlier from Tx/Ty=(px/py)^((k-1)/k) on page 48 of RPE
ToverTt=Texit/Tt
PeOverPt=((1+(machExit**2)*((gam-1)/2))**(-1*(gam/(gam-1))))
Pexit=PeOverPt*Pt
#VexitFromMach = machExit * math.sqrt(gam * R * Texit) #Already solved for without using mach@exit. See RPE page 52
F = mdot * Vexit + (Pexit - p0) * Aexit
print("Force: "+str(round(F, 4))+"\n")
print("Mass Flow Rate: "+str(round(mdot, 4)))
print("Choke Flow diameter: "+str(round(2*r, 4)))
print("Exit diameter: "+str(round(2*r*AoverAstar, 4)))
print("A/A*: "+str(round(AoverAstar, 4))+"\n")
print("Mach at exit: "+str(round(machExit, 4)))
print("Temperature at exit: "+str(round(Texit, 4)))
print("Pressure at exit: "+str(round(Pexit, 4)))
print("Velocity at exit: "+str(round(Vexit, 4))+"\n")
#print("Velocity at exit from Mach: "+str(round(VexitFromMach, 4))+"\n") #equal to above line
print("Specific Gas constant: " + str(Rspecific))
print("T/Tt: " + str(ToverTt))
print("Pe/Pt: " + str(PeOverPt)) |
import unittest
from my_jc import ActionProcessor
class JCTest(unittest.TestCase):
# Negative Tests
def test_neg_1(self):
data = '{"abc":"run", "time": 10}'
s = ActionProcessor()
res = s.addAction(data)
self.assertEqual(res, -1)
stats = s.getStats()
self.assertEqual(stats, '')
def test_neg_2(self):
data = '{"action":"run"}'
s = ActionProcessor()
res = s.addAction(data)
self.assertEqual(res, -1)
stats = s.getStats()
self.assertEqual(stats, '')
def test_neg_3(self):
data = '{"time":"run"}'
s = ActionProcessor()
res = s.addAction(data)
self.assertEqual(res, -1)
stats = s.getStats()
self.assertEqual(stats, '')
def test_neg_4(self):
data = '{"action":"", "time":10}'
s = ActionProcessor()
res = s.addAction(data)
self.assertEqual(res, -1)
stats = s.getStats()
self.assertEqual(stats, '')
def test_neg_5(self):
data = '{"":"run", "time":10}'
s = ActionProcessor()
res = s.addAction(data)
self.assertEqual(res, -1)
stats = s.getStats()
self.assertEqual(stats, '')
def test_neg_6(self):
data = '{"action":"run", "":10}'
s = ActionProcessor()
res = s.addAction(data)
self.assertEqual(res, -1)
stats = s.getStats()
self.assertEqual(stats, '')
def test_neg_7(self):
data = '{"action":"run", time:10}'
s = ActionProcessor()
res = s.addAction(data)
self.assertEqual(res, -1)
stats = s.getStats()
self.assertEqual(stats, '')
def test_neg_8(self):
data = '{"action":run, "time":10}'
s = ActionProcessor()
res = s.addAction(data)
self.assertEqual(res, -1)
stats = s.getStats()
self.assertEqual(stats, '')
def test_neg_9(self):
data = '{"action":"run", "time":"10"}'
s = ActionProcessor()
res = s.addAction(data)
self.assertEqual(res, -1)
stats = s.getStats()
self.assertEqual(stats, '')
def test_neg_10(self):
data = '{"action":"run ", "time":10}'
s = ActionProcessor()
res = s.addAction(data)
self.assertEqual(res, -1)
stats = s.getStats()
self.assertEqual(stats, '')
def test_neg_11(self):
data = '{"action":"run", "time":12345678901}'
s = ActionProcessor()
res = s.addAction(data)
self.assertEqual(res, -1)
stats = s.getStats()
self.assertEqual(stats, '')
def test_neg_12(self):
data = '{"action ":"run", "time":10}'
s = ActionProcessor()
res = s.addAction(data)
self.assertEqual(res, -1)
stats = s.getStats()
self.assertEqual(stats, '')
def test_neg_13(self):
data = '{"action":"run", " time":10}'
s = ActionProcessor()
res = s.addAction(data)
self.assertEqual(res, -1)
stats = s.getStats()
self.assertEqual(stats, '')
def test_neg_14(self):
data = '{"action":10, "time":"run"}'
s = ActionProcessor()
res = s.addAction(data)
self.assertEqual(res, -1)
stats = s.getStats()
self.assertEqual(stats, '')
def test_neg_15(self):
data = '{"time":10, "action":"run"}'
s = ActionProcessor()
res = s.addAction(data)
self.assertEqual(res, -1)
stats = s.getStats()
self.assertEqual(stats, '')
def test_neg_16(self):
data = '{"Action":"run", "time":10}'
s = ActionProcessor()
res = s.addAction(data)
self.assertEqual(res, -1)
stats = s.getStats()
self.assertEqual(stats, '')
def test_neg_17(self):
data = '{"action":"run", "Time":10}'
s = ActionProcessor()
res = s.addAction(data)
self.assertEqual(res, -1)
stats = s.getStats()
self.assertEqual(stats, '')
def test_neg_18(self):
data = '{"action":"run", "":10}'
s = ActionProcessor()
res = s.addAction(data)
self.assertEqual(res, -1)
stats = s.getStats()
self.assertEqual(stats, '')
def test_neg_19(self):
data = '{"action":"run", "ime":10}'
s = ActionProcessor()
res = s.addAction(data)
self.assertEqual(res, -1)
stats = s.getStats()
self.assertEqual(stats, '')
def test_neg_20(self):
data = '{"action":"run", "time":10s}'
s = ActionProcessor()
res = s.addAction(data)
self.assertEqual(res, -1)
stats = s.getStats()
self.assertEqual(stats, '')
def test_neg_21(self):
data = '{"action":"run" "time":10}'
s = ActionProcessor()
res = s.addAction(data)
self.assertEqual(res, -1)
stats = s.getStats()
self.assertEqual(stats, '')
def test_neg_22(self):
data = '{"action":"run". "time":10}'
s = ActionProcessor()
res = s.addAction(data)
self.assertEqual(res, -1)
stats = s.getStats()
self.assertEqual(stats, '')
def test_neg_23(self):
data = '{"action";"run", "time":10}'
s = ActionProcessor()
res = s.addAction(data)
self.assertEqual(res, -1)
stats = s.getStats()
self.assertEqual(stats, '')
def test_neg_24(self):
data = '{"action":"run", "time";10}'
s = ActionProcessor()
res = s.addAction(data)
self.assertEqual(res, -1)
stats = s.getStats()
self.assertEqual(stats, '')
def test_neg_25(self):
data = '["action":"run", "time":10]'
s = ActionProcessor()
res = s.addAction(data)
self.assertEqual(res, -1)
stats = s.getStats()
self.assertEqual(stats, '')
def test_neg_26(self):
data = '{"action":"run", "time":10]'
s = ActionProcessor()
res = s.addAction(data)
self.assertEqual(res, -1)
stats = s.getStats()
self.assertEqual(stats, '')
def test_neg_27(self):
data = '{{"action":"run", "time":10}'
s = ActionProcessor()
res = s.addAction(data)
self.assertEqual(res, -1)
stats = s.getStats()
self.assertEqual(stats, '')
def test_neg_28(self):
data = '{"action":"run, "time":10}}'
s = ActionProcessor()
res = s.addAction(data)
self.assertEqual(res, -1)
stats = s.getStats()
self.assertEqual(stats, '')
def test_neg_29(self):
data = '"{action":"run", "time":10}'
s = ActionProcessor()
res = s.addAction(data)
self.assertEqual(res, -1)
stats = s.getStats()
self.assertEqual(stats, '')
def test_neg_30(self):
data = '{"action":"run", "time:"10}'
s = ActionProcessor()
res = s.addAction(data)
self.assertEqual(res, -1)
stats = s.getStats()
self.assertEqual(stats, '')
def test_neg_31(self):
data = '{"action:run", "time":10}'
s = ActionProcessor()
res = s.addAction(data)
self.assertEqual(res, -1)
stats = s.getStats()
self.assertEqual(stats, '')
def test_neg_32(self):
data = '{action:run, time:10}'
s = ActionProcessor()
res = s.addAction(data)
self.assertEqual(res, -1)
stats = s.getStats()
self.assertEqual(stats, '')
# Functionality Tests
def test_action_1(self):
s = ActionProcessor()
stats = s.getStats()
self.assertEqual(stats, '')
def test_action_2(self):
d1 = '{"action":"run", "time":0}'
s = ActionProcessor()
res = s.addAction(d1)
self.assertEqual(res, 0)
stats = s.getStats()
self.assertEqual(stats, '[\n\t{"action":"run", "avg":0}\n]\n')
def test_action_3(self):
d1 = '{"action":"run", "time":10}'
s = ActionProcessor()
res = s.addAction(d1)
self.assertEqual(res, 0)
stats = s.getStats()
self.assertEqual(stats, '[\n\t{"action":"run", "avg":10}\n]\n')
def test_action_4(self):
d1 = '{"action":"run", "time":0}'
d2 = '{"action":"run", "time":1.0}'
s = ActionProcessor()
res = s.addAction(d1)
self.assertEqual(res, 0)
res = s.addAction(d2)
self.assertEqual(res, 0)
stats = s.getStats()
self.assertEqual(stats, '[\n\t{"action":"run", "avg":1.0}\n]\n')
def test_action_5(self):
d1 = '{"action":"run", "time":0}'
d2 = '{"action":"run", "time":10}'
s = ActionProcessor()
res = s.addAction(d1)
self.assertEqual(res, 0)
res = s.addAction(d2)
self.assertEqual(res, 0)
stats = s.getStats()
self.assertEqual(stats, '[\n\t{"action":"run", "avg":10}\n]\n')
def test_action_6(self):
d1 = '{"action":"run", "time":0}'
d2 = '{"action":"run", "time":0}'
s = ActionProcessor()
res = s.addAction(d1)
self.assertEqual(res, 0)
res = s.addAction(d2)
self.assertEqual(res, 0)
stats = s.getStats()
self.assertEqual(stats, '[\n\t{"action":"run", "avg":0}\n]\n')
def test_action_7(self):
d1 = '{"action":"run", "time":10}'
d2 = '{"action":"run", "time":1}'
s = ActionProcessor()
res = s.addAction(d1)
self.assertEqual(res, 0)
res = s.addAction(d2)
self.assertEqual(res, 0)
stats = s.getStats()
self.assertEqual(stats, '[\n\t{"action":"run", "avg":5.5}\n]\n')
def test_action_8(self):
d1 = '{"action":"run", "time":10}'
d2 = '{"action":"jog", "time":1}'
d3 = '{"action":"run", "time":1.0}'
s = ActionProcessor()
res = s.addAction(d1)
self.assertEqual(res, 0)
res = s.addAction(d2)
self.assertEqual(res, 0)
res = s.addAction(d3)
self.assertEqual(res, 0)
stats = s.getStats()
self.assertEqual(stats, '[\n\t{"action":"run", "avg":5.5},\n\t{"action":"jog", "avg":1}\n]\n')
def test_action_9(self):
d1 = '{"action":"run", "time":10}'
d2 = '{"action":"jog", "time":1}'
d3 = '{"action":"run", "time":1.0}'
d4 = '{"action":"jog", "time":399}'
s = ActionProcessor()
res = s.addAction(d1)
self.assertEqual(res, 0)
res = s.addAction(d2)
self.assertEqual(res, 0)
res = s.addAction(d3)
self.assertEqual(res, 0)
res = s.addAction(d4)
self.assertEqual(res, 0)
stats = s.getStats()
self.assertEqual(stats, '[\n\t{"action":"run", "avg":5.5},\n\t{"action":"jog", "avg":200}\n]\n')
def test_action_10(self):
d1 = '{"action":"run", "time":10}'
d2 = '{"action":"jog", "time":1}'
d3 = '{"action":"jump", "time":1.0}'
d4 = '{"action":"swim", "time":399}'
s = ActionProcessor()
res = s.addAction(d1)
self.assertEqual(res, 0)
res = s.addAction(d2)
self.assertEqual(res, 0)
res = s.addAction(d3)
self.assertEqual(res, 0)
res = s.addAction(d4)
self.assertEqual(res, 0)
stats = s.getStats()
self.assertEqual(stats, '[\n\t{"action":"run", "avg":10},\n\t{"action":"jog", "avg":1},\n\t{"action":"jump", "avg":1.0},\n\t{"action":"swim", "avg":399}\n]\n')
def test_action_11(self):
d1 = '{"action":"run", "time":10}'
d2 = '{"action":"jog", "time":1}'
d3 = '{"action":"jump", "time":1.0}'
d4 = '{"action":"swim", "time":399}'
d5 = '{"action":"run", "time":50}'
d6 = '{"action":"walk", "time":1}'
d7 = '{"action":"jump", "time":100.0}'
d8 = '{"action":"run", "time":200}'
s = ActionProcessor()
res = s.addAction(d1)
self.assertEqual(res, 0)
res = s.addAction(d2)
self.assertEqual(res, 0)
res = s.addAction(d3)
self.assertEqual(res, 0)
res = s.addAction(d4)
self.assertEqual(res, 0)
res = s.addAction(d5)
self.assertEqual(res, 0)
res = s.addAction(d6)
self.assertEqual(res, 0)
res = s.addAction(d7)
self.assertEqual(res, 0)
res = s.addAction(d8)
self.assertEqual(res, 0)
stats = s.getStats()
self.assertEqual(stats, '[\n\t{"action":"run", "avg":86.67},\n\t{"action":"jog", "avg":1},\n\t{"action":"jump", "avg":50.5},\n\t{"action":"swim", "avg":399},\n\t{"action":"walk", "avg":1}\n]\n')
# Mix of functionality and negative tests
def test_mix_1(self):
d1 = '{"action":"run", "time":10}'
d2 = '{"action":"run", "time":1}'
d3 = '{"action:"run", "time":1}'
s = ActionProcessor()
res = s.addAction(d3)
self.assertEqual(res, -1)
res = s.addAction(d1)
self.assertEqual(res, 0)
res = s.addAction(d2)
self.assertEqual(res, 0)
stats = s.getStats()
self.assertEqual(stats, '[\n\t{"action":"run", "avg":5.5}\n]\n')
def test_mix_2(self):
d1 = '{"action":"run", "time":10}'
d2 = '{"action":"run", "time":1}'
d3 = '{"action:"run", "time":1}'
s = ActionProcessor()
res = s.addAction(d1)
self.assertEqual(res, 0)
res = s.addAction(d3)
self.assertEqual(res, -1)
res = s.addAction(d2)
self.assertEqual(res, 0)
stats = s.getStats()
self.assertEqual(stats, '[\n\t{"action":"run", "avg":5.5}\n]\n')
def test_mix_3(self):
d1 = '{"action":"run", "time":10}'
d2 = '{"action":"run", "time":1}'
d3 = '{"action:"run", "time":1}'
s = ActionProcessor()
res = s.addAction(d1)
self.assertEqual(res, 0)
res = s.addAction(d2)
self.assertEqual(res, 0)
res = s.addAction(d3)
self.assertEqual(res, -1)
stats = s.getStats()
self.assertEqual(stats, '[\n\t{"action":"run", "avg":5.5}\n]\n')
def test_mix_4(self):
d1 = '{"action":"run", "time":10}'
d2 = '{"action":"jog", "time":1}'
d3 = '{"action":"jump", "time":1.0}'
d4 = '{"action":"swim", "time":399}'
d5 = '{"action":"run", "time":50}'
d6 = '{"action":"walk", "time":1}'
d7 = '{"action":"jump", "time":100.0}'
d8 = '{"action":"run", "time":200}'
d9 = '{"action:"run", "time":1}'
s = ActionProcessor()
res = s.addAction(d9)
self.assertEqual(res, -1)
res = s.addAction(d1)
self.assertEqual(res, 0)
res = s.addAction(d2)
self.assertEqual(res, 0)
res = s.addAction(d3)
self.assertEqual(res, 0)
res = s.addAction(d4)
self.assertEqual(res, 0)
res = s.addAction(d5)
self.assertEqual(res, 0)
res = s.addAction(d6)
self.assertEqual(res, 0)
res = s.addAction(d7)
self.assertEqual(res, 0)
res = s.addAction(d8)
self.assertEqual(res, 0)
stats = s.getStats()
self.assertEqual(stats, '[\n\t{"action":"run", "avg":86.67},\n\t{"action":"jog", "avg":1},\n\t{"action":"jump", "avg":50.5},\n\t{"action":"swim", "avg":399},\n\t{"action":"walk", "avg":1}\n]\n')
def test_mix_5(self):
d1 = '{"action":"run", "time":10}'
d2 = '{"action":"jog", "time":1}'
d3 = '{"action":"jump", "time":1.0}'
d4 = '{"action":"swim", "time":399}'
d5 = '{"action":"run", "time":50}'
d6 = '{"action":"walk", "time":1}'
d7 = '{"action":"jump", "time":100.0}'
d8 = '{"action":"run", "time":200}'
d9 = '{"action:"run", "time":1}'
s = ActionProcessor()
res = s.addAction(d1)
self.assertEqual(res, 0)
res = s.addAction(d2)
self.assertEqual(res, 0)
res = s.addAction(d3)
self.assertEqual(res, 0)
res = s.addAction(d4)
self.assertEqual(res, 0)
res = s.addAction(d9)
self.assertEqual(res, -1)
res = s.addAction(d5)
self.assertEqual(res, 0)
res = s.addAction(d6)
self.assertEqual(res, 0)
res = s.addAction(d7)
self.assertEqual(res, 0)
res = s.addAction(d8)
self.assertEqual(res, 0)
stats = s.getStats()
self.assertEqual(stats, '[\n\t{"action":"run", "avg":86.67},\n\t{"action":"jog", "avg":1},\n\t{"action":"jump", "avg":50.5},\n\t{"action":"swim", "avg":399},\n\t{"action":"walk", "avg":1}\n]\n')
def test_mix_6(self):
d1 = '{"action":"run", "time":10}'
d2 = '{"action":"jog", "time":1}'
d3 = '{"action":"jump", "time":1.0}'
d4 = '{"action":"swim", "time":399}'
d5 = '{"action":"run", "time":50}'
d6 = '{"action":"walk", "time":1}'
d7 = '{"action":"jump", "time":100.0}'
d8 = '{"action":"run", "time":200}'
d9 = '{"action:"run", "time":1}'
s = ActionProcessor()
res = s.addAction(d1)
self.assertEqual(res, 0)
res = s.addAction(d2)
self.assertEqual(res, 0)
res = s.addAction(d3)
self.assertEqual(res, 0)
res = s.addAction(d4)
self.assertEqual(res, 0)
res = s.addAction(d5)
self.assertEqual(res, 0)
res = s.addAction(d6)
self.assertEqual(res, 0)
res = s.addAction(d7)
self.assertEqual(res, 0)
res = s.addAction(d8)
self.assertEqual(res, 0)
res = s.addAction(d9)
self.assertEqual(res, -1)
stats = s.getStats()
self.assertEqual(stats, '[\n\t{"action":"run", "avg":86.67},\n\t{"action":"jog", "avg":1},\n\t{"action":"jump", "avg":50.5},\n\t{"action":"swim", "avg":399},\n\t{"action":"walk", "avg":1}\n]\n')
def test_mix_7(self):
d1 = '{"action":"run", "time":10}'
d2 = '{"action":"jog", "time":1}'
d3 = '{"action":"jump", "time":1.0}'
d4 = '{"action":"swim", "time":399}'
d5 = '{"action":"run", "time":50}'
d6 = '{"action":"walk", "time":1}'
d7 = '{"action":"jump", "time":100.0}'
d8 = '{"action":"run", "time":200}'
d9 = '{"action:"run", "time":1}'
s = ActionProcessor()
res = s.addAction(d9)
self.assertEqual(res, -1)
res = s.addAction(d1)
self.assertEqual(res, 0)
res = s.addAction(d2)
self.assertEqual(res, 0)
res = s.addAction(d3)
self.assertEqual(res, 0)
res = s.addAction(d4)
self.assertEqual(res, 0)
res = s.addAction(d9)
self.assertEqual(res, -1)
res = s.addAction(d5)
self.assertEqual(res, 0)
res = s.addAction(d6)
self.assertEqual(res, 0)
res = s.addAction(d7)
self.assertEqual(res, 0)
res = s.addAction(d8)
self.assertEqual(res, 0)
res = s.addAction(d9)
self.assertEqual(res, -1)
stats = s.getStats()
self.assertEqual(stats, '[\n\t{"action":"run", "avg":86.67},\n\t{"action":"jog", "avg":1},\n\t{"action":"jump", "avg":50.5},\n\t{"action":"swim", "avg":399},\n\t{"action":"walk", "avg":1}\n]\n')
if __name__ == '__main__':
unittest.main()
|
from django.test import TestCase
from django.contrib.auth.models import User
from .models import TravelType, Trip, Questions
import datetime
from .forms import TripForm, TravelTypeForm, QuestionsForm
from django.urls import reverse_lazy, reverse
# Create your tests here.
class TravelTypeTest(TestCase):
def setUp(self):
self.type = TravelType(travelname = 'Yellow stone')
def test_typestring(self):
self.assertEqual(str(self.type), 'Yellow stone')
def test_tablename(self):
self.assertEqual(str(TravelType._meta.db_table), 'traveltype')
class TripTest(TestCase):
def setUp(self):
self.type = TravelType(travelname = 'red tree')
self.user = User(username='user1')
self.trip = Trip(travelname='Yellow Stone Tour', traveltype=self.type, user=self.user, dateentered = datetime.date(2021,3,5), travelprice=979.877, reservedurl='http://wwww.yellowstone.html', questions="Havefun")
def test_string(self):
self.assertEqual(str(self.trip), 'Yellow Stone Tour')
def test_discount(self):
disc = self.trip.travelprice * .05
self.assertEqual(self.trip.discountPercent(), disc)
class QuestionTest(TestCase):
def setUp(self):
self.type = TravelType(travelname = 'red tree')
self.user = User(username='user1')
self.trip = Trip(travelname='Yellow Stone Tour', traveltype=self.type, user=self.user, dateentered = datetime.date(2021,3,5), travelprice=979.877, reservedurl='http://wwww.yellowstone.html', questions="Havefun")
self.question = Questions(questiontitle="Where can I go", user=self.user, trip=self.trip, questiondate=datetime.date(2021,3,5), questiontext="Fun!")
class NewTravelTypeForm(TestCase):
def test_traveltypeform(self):
data={
'travelname' : 'Yellow stone',
'travelquestions' : 'Is it hot?',
'dateentered' : '2021-3-7',
'questions' : 'Fun!'
}
form=TravelTypeForm(data)
self.assertTrue(form.is_valid)
class NewTripForm(TestCase):
def test_tripform(self):
data={
'travelname' : 'Yellow Stone',
'traveltype' : 'Trip',
'user' : 'Hijiri',
'dateentered' : '2021-3-7',
'travelprice' : '2099',
'reservedurl' : 'http://yellow.com',
'questions' : 'Have fun!'
}
form=TripForm(data)
self.assertTrue(form.is_valid)
class NewQuestionsForm(TestCase):
def test_questionsform(self):
data={
'questiontitle' : 'Yellow stone is big',
'user' : 'Hijiri',
'trip' : 'Yellow stone',
'questiondate' : '2021-3-7',
'questiontext' : 'Fun!'
}
form=QuestionsForm(data)
self.assertTrue(form.is_valid)
class New_Trip_Authentication_Test(TestCase):
def setUp(self):
self.test_user= User.objects.create_user(username='testuser1', password='P@ssw0rd1')
self.type=TravelType.objects.create(travelname = 'Statue of liberty', travelquestions = 'fun', dateentered = datetime.date(2021,1,29), questions='Super')
self.trip=Trip.objects.create(travelname='Yellow Stone Tour', traveltype=self.type, user=self.test_user, dateentered = datetime.date(2021,1,29), travelprice=979.877, reservedurl='http://wwww.yellowstone.html', questions="Havefun")
def test_redirect_if_not_logged_in(self):
response=self.client.get(reverse('newtrip'))
self.assertRedirects(response, '/accounts/login/?next=/cruise/newtrip/')
class New_TravelType_Authentication_Test(TestCase):
def setUp(self):
self.test_user=User.objects.create_user(username='testuser1', password='P@ssw0rd1')
self.type=TravelType.objects.create(travelname = 'Statue of liberty', travelquestions = 'fun', dateentered = datetime.date(2021,1,29), questions='Super')
self.trip=Trip.objects.create(travelname='Yellow Stone Tour', traveltype=self.type, user=self.test_user, dateentered = datetime.date(2021,1,29), travelprice=979.877, reservedurl='http://wwww.yellowstone.html', questions="Havefun")
# self.traveltype=TravelType.objects.create(questions='Fun!')
def test_redirect_if_not_logged_in(self):
response=self.client.get(reverse('newtraveltype'))
self.assertRedirects(response, '/accounts/login/?next=/cruise/newtraveltype/')
class New_Questions_Authentication_Test(TestCase):
def setUp(self):
self.test_user=User.objects.create_user(username='testuser1', password='P@ssw0rd1')
self.type=TravelType.objects.create(travelname = 'Statue of liberty', travelquestions = 'fun', dateentered = datetime.date(2021,1,29), questions='Super')
self.trip=Trip.objects.create(travelname='Yellow Stone Tour', traveltype=self.type, user=self.test_user, dateentered = datetime.date(2021,3,5), travelprice=979.877, reservedurl='http://wwww.yellowstone.html', questions="Havefun")
self.question=Questions.objects.create(questiontitle="Where can I go", user=self.test_user, trip=self.trip, questiondate=datetime.date(2021,3,5), questiontext="Fun!")
def test_redirect_if_not_logged_in(self):
response=self.client.get(reverse('newquestions'))
self.assertRedirects(response, '/accounts/login/?next=/cruise/newquestions/')
|
from pynag.Model import Host
from pynag.Control.Command import send_command
from wb_services.hosts import Hosts
from wb_services.hosts.ttypes import Datapoint
ATMO_HOSTGROUP_NAME = 'atmo-vms'
ATMO_HOST_TEMPLATE = 'atmo_vm'
ATMO_HOST_FILE = '/etc/nagios/atmo-hosts.cfg'
class Handler(Hosts.Iface):
def __init__(self, graphite):
self.graphite = graphite
@staticmethod
def to_pynag_host(host):
new_host = Host()
new_host.host_name = host.id
new_host.alias = new_host.display_name = host.display_name
new_host.address = host.address
new_host.hostgroups = ATMO_HOSTGROUP_NAME
new_host.use = ATMO_HOST_TEMPLATE
new_host.set_filename(ATMO_HOST_FILE)
return new_host
def set_hosts(self, hosts):
"""
Write Nagios config file; restart daemon
"""
# Remove All monitored hosts
for host in Host.objects.filter(hostgroups=ATMO_HOSTGROUP_NAME):
host.delete()
# Add all passed-in hosts
for host in [Handler.to_pynag_host(h) for h in hosts]:
host.save()
# Restart the nagios daemon
send_command("RESTART_PROGRAM")
def get_metrics(self, host_name, key):
"""
Query graphite for metrics
"""
stats = self.graphite.get_stats(host_name, query=key)
result = {}
for s in stats:
target = s[u'target']
datapoints = [Datapoint(time=d[1], value=d[0])
for d in s[u'datapoints']]
result[target] = datapoints
return result
|
# halite -d "30 30" "python3 shummiev3-6.py" "python3 shummiev3-5.py"
import subprocess
import re
from collections import Counter
num_games = 100
games_played = 0
rank_list = []
while games_played < num_games:
if games_played % 5 == 0:
print("Running Game #:" + str(games_played))
#stdoutdata = subprocess.getoutput('halite -d "30 30" "python3 shummiev3-7.py" "python3 shummiev3-5.py"')
stdoutdata = subprocess.getoutput('halite -d "30 30" "python shummiev7-10-1.py" "python shummiev7-10-2.py" "python shummiev7-10-3.py"')
players = re.findall("Player #[0-9], (.*), came", stdoutdata)
rank = re.findall("came in rank #([0-9])", stdoutdata)
for a, b in zip(players, rank):
rank_list.append(str(a) + ": " + str(b))
#subprocess.getoutput("rm *.hlt")
print(Counter(rank_list).most_common())
games_played += 1
# Display stats.
#print(Counter(rank_list).most_common())
|
from bota.web_scrap.dotavoyance.dotavoyance import Dotavoyance
from bota.web_scrap.heroes_process import find_hero_name
from bota.image_processing import add_border_to_image, write_text_pil
import cv2
import os
from bota import constant
from bota.help import TEAM_CMD_EXAMPLE
DV = Dotavoyance()
SORT_BY_KEYS = ['high', 'med', 'low', 'highest', 'higher', 'normal', 'medium', 'norm', 'mid', 'top', 'bot', 'lowerst',
'lower', 'middle']
SORT_BY_ALT_KEYS = {'highest': 'high', 'higher': 'high', 'normal': 'med', 'medium': 'med', 'norm': 'norm',
'mid': 'med', 'top': 'high', 'bot': 'low', 'lowest': 'low', 'lower': 'low', 'middle': 'med'}
SKILL_DICT = {'high': 'High Skill', 'med': 'Normal Skill', 'low': 'Low Skill', '': 'All Skill'}
HERO_COUNT_MIN_GAMES = {1: 100, 2: 50, 3: 5, 4: 2}
def display(img):
import matplotlib.pyplot as plt
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
plt.imshow(img)
plt.show()
def extract_heroname_and_sortby(arguments):
hero_names = []
sort_by = ''
is_correct_flag = True
incorrect_hero_name = ''
for i, argument in enumerate(arguments, 1):
arg_split = argument.split()
if i == len(arguments):
if len(arg_split) > 1:
if arg_split[-1] in SORT_BY_KEYS:
sort_by = arg_split[-1]
is_correct_flag, hero_name = find_hero_name(" ".join(arg_split[:-1]))
incorrect_hero_name = incorrect_hero_name if is_correct_flag else " ".join(arg_split[:-1])
hero_names.append(hero_name)
else:
is_correct_flag, hero_name = find_hero_name(argument)
incorrect_hero_name = incorrect_hero_name if is_correct_flag else argument
hero_names.append(hero_name)
else:
is_correct_flag, hero_name = find_hero_name(argument)
incorrect_hero_name = incorrect_hero_name if is_correct_flag else argument
hero_names.append(hero_name)
else:
is_correct_flag, hero_name = find_hero_name(argument)
incorrect_hero_name = incorrect_hero_name if is_correct_flag else argument
hero_names.append(hero_name)
if not is_correct_flag:
break
if sort_by in SORT_BY_ALT_KEYS:
sort_by = SORT_BY_ALT_KEYS[sort_by]
return is_correct_flag, hero_names, sort_by, incorrect_hero_name
def make_teammate_image(my_heroes, results, skill_level, bg_image=constant.DV_TEAM_BG_IMAGE):
bg_image = cv2.imread(bg_image)
bg_h, bg_w, _ = bg_image.shape
start_left = 50
start_top = 170
my_hero_start_top = 675
my_hero_start_left = ((bg_w // 2) - 85) - (((len(my_heroes) -1) * 200) // 2)
for i, hero_name in enumerate(my_heroes):
hero_icon = cv2.imread(os.path.join(constant.ICON_PATH_BIG, hero_name + '.png'))
hero_icon = add_border_to_image(hero_icon)
icon_h, icon_w, _ = hero_icon.shape
hero_icon_x = my_hero_start_top
hero_icon_y = my_hero_start_left + (i * 200)
bg_image[hero_icon_x: hero_icon_x + icon_h, hero_icon_y: hero_icon_y + icon_w, :] = hero_icon
count = 0
for i, result in enumerate(results):
hero_names = result['heroes']
for j, hero_name in enumerate(hero_names):
if count >= 10:
break
hero_icon = cv2.imread(os.path.join(constant.ICON_PATH_BIG, hero_name + '.png'))
icon_h, icon_w, _ = hero_icon.shape
k = i + j
if i % 2 == 1:
hero_icon_x = start_top + (((k) // 2) * 90)
hero_icon_y = start_left + 700
else:
hero_icon_x = start_top + (((k+1) // 2) * 90)
hero_icon_y = start_left
bg_image[hero_icon_x: hero_icon_x + icon_h, hero_icon_y: hero_icon_y + icon_w, :] = hero_icon
win_rate_x = hero_icon_x + (icon_h // 3)
win_rate_y = hero_icon_y + (icon_w // 2) + 275
percen = str(result['win_percentage']) + '%'
pos = (win_rate_y, win_rate_x)
bg_image = write_text_pil(bg_image, percen, pos, size=40)
count += 1
bg_image = write_text_pil(bg_image, skill_level, ((bg_image.shape[1]//2) - 70, (bg_image.shape[0]//2)-50), size=45)
return bg_image
def make_teammate_image_without_percentage(my_heroes, results, skill_level, bg_image=constant.DV_TEAM_BG_IMAGE_WITHOUT_WINRATE):
bg_image = cv2.imread(bg_image)
bg_h, bg_w, _ = bg_image.shape
my_hero_start_top = 675
my_hero_start_left = ((bg_w // 2) - 85) - (((len(my_heroes) -1) * 200) // 2)
for i, hero_name in enumerate(my_heroes):
hero_icon = cv2.imread(os.path.join(constant.ICON_PATH_BIG, hero_name + '.png'))
hero_icon = add_border_to_image(hero_icon)
icon_h, icon_w, _ = hero_icon.shape
hero_icon_x = my_hero_start_top
hero_icon_y = my_hero_start_left + (i * 200)
bg_image[hero_icon_x: hero_icon_x + icon_h, hero_icon_y: hero_icon_y + icon_w, :] = hero_icon
start_left = 160
start_top = 270
for i, result in enumerate(results):
hero_names = result['heroes']
for j, hero_name in enumerate(hero_names):
image = cv2.imread(os.path.join(constant.ICON_PATH_BIG, hero_name + '.png'))
image = cv2.resize(image, (int(constant.COUNTER_ICON_SHAPE[1]*1.5), int(constant.COUNTER_ICON_SHAPE[0]*1.5)))
image = add_border_to_image(image)
x, y = start_top, start_left
x = x + ((i // constant.COUNTER_MAX_COLUMN) * 150)
y = y + ((i % constant.COUNTER_MAX_COLUMN) * image.shape[1]) + \
((i % constant.COUNTER_MAX_COLUMN) * constant.COUNTER_WIDTH_DIST)
bg_image[x: x + image.shape[0], y: y + image.shape[1], :] = image
break
bg_image = write_text_pil(bg_image, skill_level, ((bg_image.shape[1]//2) - 70, start_top - 100), size=45)
return bg_image
def get_team_mate(message_string):
result = ''
arguments = message_string.split()[1:]
arguments = " ".join(arguments)
arguments = arguments.split(',')
is_correct_flag, hero_names, sort_by, incorrect_hero_name = extract_heroname_and_sortby(arguments)
if len(hero_names) != len(set(hero_names)):
summary = "Cannot have more than 2 same hero in the team\n" + TEAM_CMD_EXAMPLE
return is_correct_flag, summary, result, []
if not is_correct_flag and len(message_string.split()) == 1:
summary = f"Please provide a hero name: \n" + TEAM_CMD_EXAMPLE
return is_correct_flag, summary, result, []
if not is_correct_flag:
summary = f"Could not find any hero name: **{incorrect_hero_name}**\n" + TEAM_CMD_EXAMPLE
return is_correct_flag, summary, result, []
min_games = HERO_COUNT_MIN_GAMES[len(hero_names)]
flag, team_mate_raw = DV.get_teammates(hero_list=hero_names, sort_by=sort_by, min_games=min_games)
team_mate = []
for t in team_mate_raw:
if t['win_percentage'] != 100:
team_mate.append(t)
# sort_by = DV.get_sort_col(sort_by)
file_name = "_".join(sorted(hero_names)) + '-' + sort_by + '.jpg'
file_path = os.path.join(constant.DV_TEAM_IMAGE_PATH, file_name)
if not flag:
if team_mate_raw == 'timeout':
summary = f"Sorry! Currently facing server down from https://www.dotavoyance"
return False, summary, result, []
elif len(hero_names) > 3:
summary = f"Sorry! Could not find any results. Please try with **{len(hero_names) - 1}** Hero Names"
return False, summary, result, []
else:
summary = f"Sorry! Could not find any results. Please try with other Hero Names"
return False, summary, result, []
skill_level = SKILL_DICT[sort_by]
final_image = make_teammate_image_without_percentage(hero_names, team_mate, skill_level)
cv2.imwrite(file_path, final_image)
summary = f'eg 1: **`!team lion, am high`**\n' \
f'eg 2: **`!team lion, am normal`**\n'
return is_correct_flag, summary, file_path, hero_names
if __name__ == '__main__':
flag, summary, image_path, hero_names = get_team_mate("!team snap")
print(flag, summary, image_path)
# get_team_mate("!sy wr, wk, am high")
# get_team_mate("!sy wr, wk, am ")
# get_team_mate("!sy wr high")
# get_team_mate("!sy wr")
# good, result = DV.get_teammates('!teammates wk,wr high')
|
"""UI module of Colorium's Asset Management Tool. This module creates the UI using Colorium's CUI module."""
import maya.cmds as cmds
import colorium.ui as ui
import colorium.asset_type_definition as asset_type_definition
import colorium.scene_name_parser as scene_name_parser
import colorium.command as command
import colorium.data_binding as data_binding
import colorium.settings as settings
class AssetManagementToolUI(ui.CUI):
"""This class manages the creation process of the Colorium's Asset Management Tool UI from the layout to the data binding."""
def build_ui(self):
self.build_asset_information_section()
self.build_save_options_section()
self.build_publish_options_section()
self.build_export_options_section()
self.build_file_name_preview_section()
self.build_path_preview_section()
self.build_actions_section()
def build_asset_information_section(self):
"""Builds the asset information section."""
frm_asset_information = cmds.frameLayout("frm_asset_information", l="Asset Information", p=self.main_layout, mh=5, mw=5)
type_input = ui.CComboInput("type", "Asset's type", frm_asset_information,\
enabled=True,\
items=asset_type_definition.names(),\
default_value=asset_type_definition.get_type_by_code(self.controller.asset.type).name,\
)
data_binding.bind(type_input, "enabled", self.controller.asset, "has_type")
data_binding.bind(type_input, "value", self.controller.asset, "type")
self.add_control(type_input)
name_input = ui.CTextInput("name", "Asset's name", frm_asset_information,\
enabled=True,\
default_value=self.controller.asset.name,\
)
data_binding.bind(name_input, "enabled", self.controller.asset, "has_name")
data_binding.bind(name_input, "text", self.controller.asset, "name")
self.add_control(name_input)
variant_input = ui.CIntInput("variant", "Asset's variant", frm_asset_information,\
enabled=self.controller.asset.has_variant,\
toggleable=True,\
min_value=1,\
max_value=99,\
default_value=self.controller.asset.variant,\
)
data_binding.bind(variant_input, "enabled", self.controller.asset, "has_variant")
data_binding.bind(variant_input, "value", self.controller.asset, "variant")
self.add_control(variant_input)
scene_input = ui.CIntInput("scene", "Asset's scene", frm_asset_information,\
enabled=self.controller.asset.has_scene,\
toggleable=True,\
min_value=0,\
max_value=995,\
default_value=self.controller.asset.scene,\
)
data_binding.bind(scene_input, "enabled", self.controller.asset, "has_scene")
data_binding.bind(scene_input, "value", self.controller.asset, "scene")
self.add_control(scene_input)
shot_input = ui.CIntInput("shot", "Asset's shot", frm_asset_information,\
enabled=self.controller.asset.has_shot,\
toggleable=True,\
min_value=0,\
max_value=995,\
default_value=self.controller.asset.shot,\
)
data_binding.bind(shot_input, "enabled", self.controller.asset, "has_shot")
data_binding.bind(shot_input, "value", self.controller.asset, "shot")
self.add_control(shot_input)
version_input = ui.CIntInput("version", "Asset's version", frm_asset_information,\
enabled=True,\
min_value=1,\
max_value=99,\
default_value=self.controller.asset.version,\
)
data_binding.bind(version_input, "value", self.controller.asset, "version")
self.add_control(version_input)
def build_save_options_section(self):
"""Builds the save options section."""
frm_save_options = cmds.frameLayout("frm_save_options", l="Save Options", p=self.main_layout, mh=5, mw=5, cll=True)
save_input = ui.CComboInput("save_type", "Save type", frm_save_options,\
enabled=False,\
toggleable=True,\
items=command.get_command_names_by_action("save"),\
changed_command=self.controller.set_save_config_command,\
default_value=self.controller.asset.save_config.command.name,\
)
self.add_control(save_input)
increment_option = ui.CCheckControl("increment_on_save", "Increment on save", frm_save_options,\
default_value=False,\
)
self.add_control(increment_option)
def build_publish_options_section(self):
"""Builds the publish options section."""
frm_publish_options = cmds.frameLayout("frm_publish_options", l="Publish Options", p=self.main_layout, mh=5, mw=5, cll=True)
publish_input = ui.CComboInput("publish_type", "Publish type", frm_publish_options,\
enabled=False,\
toggleable=True,\
items=command.get_command_names_by_action("publish"),\
changed_command=self.controller.set_publish_config_command,\
default_value=self.controller.asset.publish_config.command.name,\
)
self.add_control(publish_input)
def build_export_options_section(self):
"""Builds the export options section."""
frm_export_options = cmds.frameLayout("frm_export_options", l="Export Options", p=self.main_layout, mh=5, mw=5, cll=True)
export_input = ui.CComboInput("export_type", "Export type", frm_export_options,\
enabled=False,\
toggleable=True,\
items=command.get_command_names_by_action("export"),\
changed_command=self.controller.set_export_config_command,\
default_value=self.controller.asset.export_config.command.name,\
)
self.add_control(export_input)
def build_file_name_preview_section(self):
"""Builds the file name preview section."""
frm_file_name_preview = cmds.frameLayout("frm_file_name_preview", l="File Name Preview", p=self.main_layout, mh=5, mw=5, cll=True)
save_file_name_input = ui.CTextInput("save_file_name", "Save file name", frm_file_name_preview,\
enabled=False,\
toggleable=True,\
default_value=self.controller.asset.save_config.file_name,\
)
data_binding.bind(save_file_name_input, "enabled", self.controller.asset.save_config, "file_name_overridden")
data_binding.bind(save_file_name_input, "text", self.controller.asset.save_config, "file_name")
self.add_control(save_file_name_input)
publish_file_name_input = ui.CTextInput("publish_file_name", "Publish file name", frm_file_name_preview,\
enabled=False,\
toggleable=True,\
default_value=self.controller.asset.publish_config.file_name,\
)
data_binding.bind(publish_file_name_input, "enabled", self.controller.asset.publish_config, "file_name_overridden")
data_binding.bind(publish_file_name_input, "text", self.controller.asset.publish_config, "file_name")
self.add_control(publish_file_name_input)
export_file_name_input = ui.CTextInput("export_file_name", "Export file name", frm_file_name_preview,\
enabled=False,\
toggleable=True,\
default_value=self.controller.asset.export_config.file_name,\
)
data_binding.bind(export_file_name_input, "enabled", self.controller.asset.export_config, "file_name_overridden")
data_binding.bind(export_file_name_input, "text", self.controller.asset.export_config, "file_name")
self.add_control(export_file_name_input)
def build_path_preview_section(self):
"""Builds the path preview section."""
frm_path_preview = cmds.frameLayout("frm_path_preview", l="Path Preview", p=self.main_layout, mh=5, mw=5, cll=True)
save_path_input = ui.CFilePathInput("save_path", "Save path", frm_path_preview,\
enabled=False,\
toggleable=True,\
default_value=self.controller.asset.save_config.path,\
open_command=self.controller.open_save_path_explorer,\
)
data_binding.bind(save_path_input, "enabled", self.controller.asset.save_config, "path_overridden")
data_binding.bind(save_path_input, "text", self.controller.asset.save_config, "path")
self.add_control(save_path_input)
publish_path_input = ui.CFilePathInput("publish_path", "Publish path", frm_path_preview,\
enabled=False,\
toggleable=True,\
default_value=self.controller.asset.publish_config.path,\
open_command=self.controller.open_publish_path_explorer,\
)
data_binding.bind(publish_path_input, "enabled", self.controller.asset.publish_config, "path_overridden")
data_binding.bind(publish_path_input, "text", self.controller.asset.publish_config, "path")
self.add_control(publish_path_input)
export_path_input = ui.CFilePathInput("export_path", "Export path", frm_path_preview,\
enabled=False,\
toggleable=True,\
default_value=self.controller.asset.export_config.path,\
open_command=self.controller.open_export_path_explorer,\
)
data_binding.bind(export_path_input, "enabled", self.controller.asset.export_config, "path_overridden")
data_binding.bind(export_path_input, "text", self.controller.asset.export_config, "path")
self.add_control(export_path_input)
def build_actions_section(self):
"""Builds the actions section."""
frm_actions = cmds.frameLayout("frm_actions", l="Actions", p=self.main_layout, mh=5, mw=5)
lay_actions = ui.CInlineLayout("actions", "Actions", frm_actions, [], "right")
cancel_button = ui.CButtonControl("cancel", "Cancel", lay_actions.name, self.controller.cancel)
lay_actions.add_children(cancel_button)
open_button = ui.CButtonControl("open", "Open", lay_actions.name, self.controller.open)
lay_actions.add_children(open_button)
create_button = ui.CButtonControl("create", "Create", lay_actions.name, self.controller.create)
lay_actions.add_children(create_button)
delete_button = ui.CButtonControl("delete", "Delete", lay_actions.name, self.controller.delete)
lay_actions.add_children(delete_button)
commit_button = ui.CButtonControl("commit", "Commit", lay_actions.name, self.controller.commit)
lay_actions.add_children(commit_button)
self.add_control(lay_actions)
class AssetManagementToolController(ui.CController):
"""This class controls the interactions between Colorium's Asset Management Tool UI and the business layer."""
@property
def asset(self):
"""The asset manipulated from the UI."""
return self._asset
@asset.setter
def asset(self, value):
self._asset = value
def __init__(self):
super(AssetManagementToolController, self).__init__()
self._asset = scene_name_parser.parse_scene_name_to_asset()
def display_ui_callback(self):
pass
def cancel(self, value):
"""Command used to close the tool."""
cmds.deleteUI(self.ui.main_window, wnd=True)
def open(self, value):
"""Command used to open a maya scene based on the asset data."""
if settings.DEFAULT_FILE_FORMAT == 'Maya Ascii':
self.asset.save_config.command = command.get_command('open', 'Maya Ascii')
elif settings.DEFAULT_FILE_FORMAT == 'Maya Binary':
self.asset.save_config.command = command.get_command('open', 'Maya Binary')
self.asset.save_config.execute_command()
def create(self, value):
"""Command used to create a maya scene based on the asset data."""
if settings.DEFAULT_FILE_FORMAT == 'Maya Ascii':
self.asset.save_config.command = command.get_command("create", "Blank Maya Ascii")
elif settings.DEFAULT_FILE_FORMAT == 'Maya Binary':
self.asset.save_config.command = command.get_command('create', 'Blank Maya Binary')
self.asset.save_config.execute_command()
def delete(self, value):
"""Command used to delete a maya scene based on the asset data."""
if settings.DEFAULT_FILE_FORMAT == 'Maya Ascii':
self.asset.save_config.command = command.get_command("delete", "Maya Ascii")
elif settings.DEFAULT_FILE_FORMAT == 'Maya Binary':
self.asset.save_config.command = command.get_command('delete', 'Maya Binary')
self.asset.save_config.execute_command()
def commit(self, value):
"""Command used to commit (save, publish and/or export) the scene based on the asset data."""
save_enabled = self.ui.get_control_by_name("save_type").enabled
publish_enabled = self.ui.get_control_by_name("publish_type").enabled
export_enabled = self.ui.get_control_by_name("export_type").enabled
if save_enabled:
increment_on_save = self.ui.get_control_by_name('increment_on_save').value
if increment_on_save:
self.asset.version += 1
self.asset.save_config.execute_command()
if publish_enabled:
self.asset.publish_config.execute_command()
if export_enabled:
self.asset.export_config.execute_command()
def set_save_config_command(self, value):
"""Command used to set the asset's save configuration command based on a given value."""
self.asset.save_config.command = command.get_command("save", value)
print self.asset.save_config.command.name
def set_publish_config_command(self, value):
"""Command used to set the asset's publish configuration command based on a given value."""
self.asset.publish_config.command = command.get_command("publish", value)
print self.asset.publish_config.command.name
def set_export_config_command(self, value):
"""Command used to set the asset's export configuration command based on a given value."""
self.asset.export_config.command = command.get_command("export", value)
print self.asset.export_config.command.name
def open_save_path_explorer(self, value):
"""Command used to open the asset's save configuration path in the Explorer."""
self.asset.save_config.command = command.get_command("open", "Explorer")
self.asset.save_config.execute_command()
def open_publish_path_explorer(self, value):
"""Command used to open the asset's publish configuration path in the Explorer."""
self.asset.publish_config.command = command.get_command("open", "Explorer")
self.asset.publish_config.execute_command()
def open_export_path_explorer(self, value):
"""Command used to open the asset's export configuration path in the Explorer."""
self.asset.export_config.command = command.get_command("open", "Explorer")
self.asset.export_config.execute_command()
|
"""
README.md example
"""
import goenrich
# build the ontology
G = goenrich.obo.graph('db/go-basic.obo')
# use all entrez geneid associations form gene2go as background
# use goenrich.read.goa('db/gene_association.goa_ref_human.gz') for uniprot
background = goenrich.read.gene2go('db/gene2go.gz')
goenrich.enrich.set_background(G, background, 'GeneID', 'GO_ID')
# extract some list of entries as example query
query = set(background['GeneID'].unique()[:20])
# for additional export to graphviz just specify the gvfile argument
# the show argument keeps the graph reasonably small
result = goenrich.enrich.analyze(G, query, gvfile='example.dot', show='top20')
# generate html
result[['name', 'x', 'p', 'q', 'namespace']].head().to_html('example.html')
# call to graphviz
import subprocess
subprocess.call(['dot', '-Tpng', 'example.dot', '-o', 'example.png'])
|
import os
import numpy as np
import pandas as pd
import pickle
from sklearn.model_selection import train_test_split
from utils import taLogging
logger = taLogging.getFileLogger(name='util',file='log/util.log')
sep=' '
max_len = 60
flags = r'[。!?;]'
import re
line_max = 20
def get_entity(x,y,id2tag):
"""
组合实体
:param x: text
:param y: pre
:param id2tag:
:return:
"""
entity=""
res=[]
for i in range(len(x)): #for every sen
for j in range(len(x[0])): #for every word
if y[i][j]==0:
continue
if id2tag[y[i][j]][0]=='B':
entity=id2tag[y[i][j]][2:]+':'+x[i][j]
elif id2tag[y[i][j]][0]=='M' and len(entity)!=0 :
entity+=x[i][j]
elif id2tag[y[i][j]][0]=='E' and len(entity)!=0 :
entity+=x[i][j]
res.append(entity)
entity=[]
else:
entity=[]
return res
def padding(ids):
if len(ids) >= max_len:
return ids[:max_len]
else:
ids.extend([0]*(max_len-len(ids)))
return ids
def padding_word(sen):
if len(sen) >= max_len:
return sen[:max_len]
else:
return sen
def test_input(model,sess,word2id,id2tag,batch_size):
while True:
text = input("Enter your input: ")
text = re.split(flags, text)
text_id=[]
for sen in text:
word_id=[]
for word in sen:
if word in word2id:
word_id.append(word2id[word])
else:
word_id.append(word2id["unknow"])
text_id.append(padding(word_id))
zero_padding=[]
zero_padding.extend([0]*max_len)
text_id.extend([zero_padding]*(batch_size-len(text_id)))
feed_dict = {model.input_data:text_id}
pre = sess.run([model.viterbi_sequence], feed_dict)
entity = get_entity(text,pre[0],id2tag)
print( 'result:')
for i in entity:
print (i)
pass
def write_entity(outp,x,y,id2tag):
'''
注意,这个函数每次使用是在文档的最后添加新信息。
'''
entity=''
for i in range(len(x)):
if y[i]==0:
continue
if id2tag[y[i]][0]=='B':
entity=id2tag[y[i]][2:]+':'+x[i]
elif id2tag[y[i]][0]=='M' and len(entity)!=0:
entity+=x[i]
elif id2tag[y[i]][0]=='E' and len(entity)!=0:
entity+=x[i]
print(entity)
outp.write(entity+' ')
entity=''
else:
entity=''
return
def write_entity_for_tag(x,y,id2tag):
'''
注意,这个函数每次使用是在文档的最后添加新信息。
'''
entity=''
line = []
tag = ''
le = min(len(x),max_len)
for i in range(le):
if y[i]==0:
continue
if id2tag[y[i]][0]=='B':
tag = id2tag[y[i]][2:]
line.append(x[i]+"/"+"B_"+tag)
entity=id2tag[y[i]][2:]+':'+x[i]
elif id2tag[y[i]][0]=='M' and len(entity)!=0:
line.append(x[i] + "/" + "M_" + tag)
entity+=x[i]
elif id2tag[y[i]][0]=='E' and len(entity)!=0:
line.append(x[i] + "/" + "E_" + tag)
entity+=x[i]
entity=''
elif id2tag[y[i]][0]=='O':
line.append(x[i] + "/" + "O")
entity = ''
pass
else:
entity = ''
l = sep.join(line)
return l
def splitText(inp,oup):
new_lines = []
oup=oup+".tmp"
logger.debug("start split Text")
with open(inp,'r',encoding='utf-8') as readin,\
open(oup,'w',encoding='utf-8') as writeto:
i = 0
for line in readin.readlines():
if len(line.strip()) == 0:
continue
i+=1
if (i % 100) == 0:
i = 1
logger.debug(".")
if len(line.strip()) == 0:
continue
lines = re.split(flags,line)
for l in lines:
if len(l) > line_max:
ll = l.split(",")
writeto.write("\n".join(ll))
new_lines.extend(ll)
else:
writeto.write(l)
new_lines.extend([l])
logger.debug("End split Text")
return new_lines
def tagText(input_path, output_path, model, sess, word2id, id2tag, batch_size,pre=False):
info = {
'input':input_path,
"output":output_path
}
logger.debug(info)
if pre == False:
lines = splitText(input_path,output_path)
else:
oup = output_path + ".tmp"
with open(oup,'r',encoding='utf-8') as readin:
lines = readin.readlines()
text_id = []
text = []
y = 0
for line in lines:
y += 1
if (y % 1000) == 0:
y = 1
logger.debug("read Text+..")
if len(line.strip()) == 0:
continue
word_id = []
for word in line:
if word in word2id:
word_id.append(word2id[word])
else:
word_id.append(word2id["unknow"])
text_id.append(padding(word_id))
text.append(padding_word(line))
zero_padding = []
zero_padding.extend([0]*max_len)
text_id.extend([zero_padding]*(batch_size - len(text_id)%batch_size))
text_id = np.asarray(text_id)
text_id = text_id.reshape(-1,batch_size,max_len)
predict = []
logger.debug("len(text_id):"+str(len(text_id)))
for index in range(len(text_id)):
if (index % 1000) == 0:
logger.debug("pre:"+str(index))
feed_dict = {model.input_data: text_id[index]}
pre = sess.run([model.viterbi_sequence], feed_dict)
predict.append(pre[0])
predict = np.asarray(predict).reshape(-1, max_len)
def dumpPkl(ll,out):
for l in ll:
pickle.dump(l,out)
with open("tmp.pkl",'wb') as out:
dumpPkl([predict],out)
pass
with open(output_path,'w',encoding='utf-8') as outp:
logger.debug("len(text):"+str(len(text)))
for index in range(len(text)):
if (index % 1000) == 0:
logger.debug("get entity:" + str(index))
result = write_entity_for_tag(text[index], predict[index], id2tag)
outp.write(result+'\n')
pass
def savePkl(inpf,oupf,base):
datas = []
labels = []
tags = set()
inpf = os.path.join(base,inpf)
oupf = os.path.join(base, oupf)
logger.debug("start save PKl")
with open(inpf,'r',encoding='utf-8') as inp:
y = 0
for line in inp.readlines():
y += 1
if (y % 1000) == 0:
y = 1
logger.debug("*")
line = line.split()
linedata = []
linelabel = []
numNotO = 0
for word in line:
word = word.split('/')
linedata.append(word[0])
linelabel.append(word[1])
tags.add(word[1])
if word[1] != 'O':
numNotO += 1
if numNotO != 0:
datas.append(linedata)
labels.append(linelabel)
logger.debug(len(datas))
logger.debug(tags)
logger.debug(len(labels))
new_datas = []
for data in datas:
new_datas.extend(data)
allwords = pd.Series(new_datas).value_counts()
set_words = allwords.index
set_ids = range(1,len(set_words)+1)
tags = [i for i in tags]
tag_ids = range(len(tags))
word2id = pd.Series(set_ids,index=set_words)
id2word = pd.Series(set_words,index=set_ids)
tag2id = pd.Series(tag_ids,index=tags)
id2tag = pd.Series(tags,index=tag_ids)
word2id["unknow"] = len(word2id)+1
def padding(type,words):
if type == "X":
ids = list(word2id[words])
if type == "Y":
ids = list(tag2id[words])
if len(ids) >= max_len:
return ids[:max_len]
ids.extend([0]*(max_len - len(ids)))
return ids
df_datas = pd.DataFrame({"words":datas,"tags":labels},index=list(range(len(datas))))
df_datas['x'] = df_datas["words"].apply(lambda x:padding("X", x))
df_datas['y'] = df_datas["tags"].apply(lambda x:padding("Y", x))
x = np.asarray(list(df_datas['x'].values))
y = np.asarray(list(df_datas['y'].values))
x_train,x_test,y_train,y_test = train_test_split(x,y,test_size=0.2,random_state=12)
def dumpPkl(ll,out):
for l in ll:
pickle.dump(l,out)
with open(oupf,'wb') as out:
x_train = x
y_train = y
ll = [word2id,id2word,tag2id,id2tag,x_train,y_train,x_test,y_test]
dumpPkl(ll,out)
pass
logger.debug("End save PKl")
|
#
# SPDX-FileCopyrightText: Copyright (c) 1993-2023 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
File for containing model file abstraction. Useful for generating models.
"""
import os
from abc import ABCMeta, abstractmethod
from typing import Union, List
from shutil import copytree, rmtree
# polygraphy
from polygraphy.backend.trt import (
network_from_onnx_path,
engine_from_network,
save_engine,
Profile,
)
from polygraphy.backend.trt import CreateConfig
from polygraphy.logger import G_LOGGER as PG_LOGGER
# torch
from torch import load, save
from torch.nn import Module
# tensorrt
from tensorrt import PreviewFeature, MemoryPoolType
# TRT-HuggingFace
from NNDF.networks import NetworkMetadata
from NNDF.logger import G_LOGGER
class ModelFileConverter:
"""Abstract class for converting one model format to another."""
def __init__(self, onnx_class, torch_class, trt_engine_class):
self.onnx_class = onnx_class
self.torch_class = torch_class
self.trt_engine_class = trt_engine_class
def torch_to_onnx(
self, output_fpath: str, model: Module, network_metadata: NetworkMetadata
):
"""
Converts a torch.Model into an ONNX model on disk specified at output_fpath.
Arg:
output_fpath (str): File location of the generated ONNX file.
input_fpath (str): Input file location of the generated ONNX file.
network_metadata (NetworkMetadata): Network metadata of the network being converted.
Returns:
ONNXModelFile: Newly generated ONNXModelFile
"""
raise NotImplementedError(
"Current model does not support exporting to ONNX model."
)
def onnx_to_torch(
self, output_fpath: str, input_fpath: str, network_metadata: NetworkMetadata
):
"""
Converts ONNX file into torch.Model which is written to disk.
Arg:
output_fpath (str): File location of the generated ONNX file.
input_fpath (str): Input file location of the generated ONNX file.
network_metadata (NetworkMetadata): Network metadata of the network being converted.
Returns:
TorchModelFile: Newly generated TorchModelFile
"""
raise NotImplementedError(
"Current model does not support exporting to torch model."
)
def onnx_to_trt(
self,
output_fpath: str,
input_fpath: str,
network_metadata: NetworkMetadata,
profiles: List[Profile],
preview_features: List[PreviewFeature],
):
"""
Converts ONNX file to TRT engine.
Since TensorRT already supplies converter functions and scripts,
a default implementation is already provided.
Arg:
output_fpath (str): File location of the generated ONNX file.
input_fpath (str): Input file location of the generated ONNX file.
network_metadata (NetworkMetadata): Network metadata of the network being converted.
profiles (List[polygraphy.backend.trt.Profile]): The optimization profiles used to build the engine.
preview_features (List[tensorrt.PreviewFeature]): The preview features to set when building the engine.
Returns:
TRTEngineFile: Newly generated engine.
"""
result = self.trt_engine_class(output_fpath, network_metadata)
G_LOGGER.info("Using optimization profiles: {:}".format(profiles))
try:
self.trt_inference_config = CreateConfig(
tf32=True,
fp16=network_metadata.precision.fp16,
memory_pool_limits = {MemoryPoolType.WORKSPACE: result.max_trt_workspace * 1024 * 1024},
profiles=profiles,
precision_constraints=("obey" if result.use_obey_precision_constraints() else None),
preview_features=preview_features
)
except TypeError as e:
G_LOGGER.error(f"This demo may have an outdated polygraphy. Please see requirements.txt for more details.")
raise e
if G_LOGGER.level == G_LOGGER.DEBUG:
g_logger_verbosity = PG_LOGGER.EXTRA_VERBOSE
elif G_LOGGER.level == G_LOGGER.INFO:
g_logger_verbosity = PG_LOGGER.INFO
else:
g_logger_verbosity = PG_LOGGER.WARNING
with PG_LOGGER.verbosity(g_logger_verbosity):
network_definition = result.get_network_definition(network_from_onnx_path(input_fpath))
trt_engine = engine_from_network(
network_definition, config=self.trt_inference_config
)
save_engine(trt_engine, output_fpath)
return result
class NNModelFile(metaclass=ABCMeta):
"""
Model abstraction. Allows for loading model as various formats.
The class assumes models live on the disk in order to reduce complexity of model loading into memory.
The class guarantees that once export functions are called, models exist on the disk for other
code to parse or use in other libraries.
"""
def __init__(
self,
default_converter: ModelFileConverter = None,
network_metadata: NetworkMetadata = None,
):
"""
Since torch functions often allow for models to either be from disk as fpath or from a loaded object,
we provide a similar option here. Arguments can either be a path on disk or from model itself.
Args:
model (Union[str, torch.Model]): Location of the model as fpath OR loaded torch.Model object.
"""
if default_converter is not None:
self.default_converter = default_converter()
else:
self.default_converter = NullConverter()
self.network_metadata = network_metadata
def as_torch_model(
self,
output_fpath: str,
converter: ModelFileConverter = None,
force_overwrite: bool = False,
):
"""
Converts ONNX file into torch.Model which is written to disk.
Uses provided converter to convert object or default_convert is used instead if available.
Arg:
output_fpath (str): File location of the generated torch file.
converter (ModelFileConverter): Class to convert current model instance into another.
force_overwrite (bool): If the file already exists, tell whether or not to overwrite.
Since torch models folders, can potentially erase entire folders.
Returns:
TorchModelFile: Newly generated TorchModelFile
"""
raise NotImplementedError(
"Current model does not support exporting to pytorch model."
)
def as_onnx_model(
self,
output_fpath: str,
converter: ModelFileConverter = None,
force_overwrite: bool = False,
):
"""
Converts current model into an ONNX model.
Uses provided converter to convert object or default_convert is used instead if available.
Args:
output_fpath (str): File location of the generated ONNX file.
converter (ModelFileConverter): Class to convert current model instance into another.
force_overwrite (bool): If the file already exists, tell whether or not to overwrite.
Since torch models folders, can potentially erase entire folders.
Returns:
ONNXModelFile: Newly generated ONNXModelFile
"""
raise NotImplementedError(
"Current model does not support exporting to onnx model."
)
def as_trt_engine(
self,
output_fpath: str,
converter: ModelFileConverter = None,
force_overwrite: bool = False,
profiles: List[Profile] = [],
preview_features: List[PreviewFeature] = []
):
"""
Converts current model into an TRT engine.
Uses provided converter to convert object or default_convert is used instead if available.
Args:
output_fpath (str): File location of the generated ONNX file.
converter (ModelFileConverter): Class to convert current model instance into another.
force_overwrite (bool): If the file already exists, tell whether or not to overwrite.
Since torch models folders, can potentially erase entire folders.
profiles (List[polygraphy.backend.trt.Profile]): The optimization profiles used to build the engine.
preview_features (List[tensorrt.PreviewFeature]): The preview features to enable when building the engine.
Returns:
TRTEngineFile: Newly generated ONNXModelFile
"""
raise NotImplementedError(
"Current model does not support exporting to trt engine."
)
@abstractmethod
def cleanup(self) -> None:
"""Cleans up any saved models or loaded models from memory."""
class TorchModelFile(NNModelFile):
def __init__(
self,
model: Union[str, Module],
default_converter: ModelFileConverter = None,
network_metadata: NetworkMetadata = None,
):
"""
Since torch functions often allow for models to either be from disk as fpath or from a loaded object,
we provide a similar option here. Arguments can either be a path on disk or from model itself.
Args:
model (Union[str, torch.Model]): Location of the model as fpath OR loaded torch.Model object.
"""
super().__init__(default_converter, network_metadata)
if isinstance(model, Module):
self.is_loaded = True
self.fpath = None
self.model = model
else:
self.is_loaded = False
self.fpath = model
self.model = None
def load_model(self) -> Module:
"""
Loads the model from disk if isn't already loaded.
Does not attempt to load if given model is already loaded and instead returns original instance.
Use as_torch_model() instead to always guarantee a new instance and location on disk.
Args:
None
Returns:
torch.Model: Loaded torch model.
"""
if self.is_loaded:
return self.model
return load(self.fpath)
def as_onnx_model(
self,
output_fpath: str,
converter: ModelFileConverter = None,
force_overwrite: bool = False,
):
"""
Converts the torch model into an onnx model.
Args:
output_fpath (str): File location of the generated ONNX file.
converter (ModelFileConverter): Class to convert current model instance into another.
force_overwrite (bool): If the file already exists, tell whether or not to overwrite.
Since torch models folders, can potentially erase entire folders.
Return:
(converter.onnx_class): Returns a converted instance of ONNXModelFile.
"""
converter = self.default_converter if converter is None else converter()
if not force_overwrite and os.path.exists(output_fpath):
return converter.onnx_class(output_fpath, self.network_metadata)
return converter.torch_to_onnx(
output_fpath, self.load_model(), self.network_metadata
)
def as_torch_model(
self,
output_fpath: str,
converter: ModelFileConverter = None,
force_overwrite: bool = False,
):
"""
Since the model is already a torch model, forces a save to specified folder and returns new TorchModelFile object from that file location.
Args:
output_fpath (str): File location of the generated ONNX file.
converter (ModelFileConverter): Class to convert current model instance into another.
force_overwrite (bool): If the file already exists, tell whether or not to overwrite.
Since torch models folders, can potentially erase entire folders.
Return:
(converter.torch_class): Returns a converted instance of TorchModelFile.
"""
converter = self.default_converter if converter is None else converter()
if not force_overwrite and os.path.exists(output_fpath):
return converter.torch_class(output_fpath, self.network_metadata)
if self.is_loaded:
save(self.model, output_fpath)
else:
copytree(self.fpath, output_fpath)
return converter.torch_class(output_fpath, self.network_metadata)
def cleanup(self) -> None:
if self.model:
G_LOGGER.debug("Freeing model from memory: {}".format(self.model))
del self.model
if self.fpath:
G_LOGGER.debug("Removing saved torch model from location: {}".format(self.fpath))
rmtree(self.fpath)
class ONNXModelFile(NNModelFile):
def __init__(
self,
model: str,
default_converter: ModelFileConverter = None,
network_metadata: NetworkMetadata = None,
):
"""
Keeps track of ONNX model file. Does not support loading into memory. Only reads and writes to disk.
Args:
model (str): Location of the model as fpath OR loaded torch.Model object.
"""
super().__init__(default_converter, network_metadata)
self.fpath = model
def as_onnx_model(
self,
output_fpath: str,
converter: ModelFileConverter = None,
force_overwrite: bool = False,
):
"""
Since the model is already a onnx model, forces a save to specified folder and returns new ONNXModelFile object from that file location.
Args:
output_fpath (str): File location of the generated ONNX file.
converter (ModelFileConverter): Class to convert current model instance into another.
force_overwrite (bool): If the file already exists, tell whether or not to overwrite.
Return:
(converter.onnx_class): Returns a converted instance of ONNXModelFile.
"""
converter = self.default_converter if converter is None else converter()
if not force_overwrite and os.path.exists(output_fpath):
return converter.onnx_class(output_fpath, self.network_metadata)
else:
copytree(self.fpath, output_fpath)
return converter.onnx_class(output_fpath, self.network_metadata)
def as_torch_model(
self,
output_fpath: str,
converter: ModelFileConverter = None,
force_overwrite: bool = False,
):
"""
Converts the onnx model into an torch model.
Args:
output_fpath (str): File location of the generated ONNX file.
converter (ModelFileConverter): Class to convert current model instance into another.
force_overwrite (bool): If the file already exists, tell whether or not to overwrite.
Since torch models folders, can potentially erase entire folders.
Return:
(converter.torch_class): Returns a converted instance of TorchModelFile.
"""
converter = self.default_converter if converter is None else converter()
if not force_overwrite and os.path.exists(output_fpath):
return converter.torch_class(output_fpath, self.network_metadata)
return converter.onnx_to_torch(output_fpath, self.fpath, self.network_metadata)
def _cleanup_onnx_folder(self, folder_dir):
for d in os.listdir(folder_dir):
fpath = os.path.join(folder_dir, d)
# Remove everything related to onnx other than engine
if (os.path.isfile(fpath)) and (".engine" not in d):
os.remove(fpath)
def cleanup(self) -> None:
G_LOGGER.debug("Removing saved ONNX model from location: {}".format(self.fpath))
if (not self.network_metadata.other.kv_cache) or ("encoder" in self.fpath):
# Clean up any onnx external files by removing integer named values and weight files
workspace_path = os.path.split(self.fpath)[0]
self._cleanup_onnx_folder(workspace_path)
else:
# In kv cache mode, hard to remove the decoder. Therefore need to search for temporary WAR.
decoder_path = os.path.split(self.fpath)[0]
decoder_non_kv_path = os.path.join(decoder_path, "non-kv")
decoder_kv_path = os.path.join(decoder_path, "kv")
# Remove kv and nonkv folder correspondingly.
self._cleanup_onnx_folder(decoder_non_kv_path)
self._cleanup_onnx_folder(decoder_kv_path)
def as_trt_engine(
self,
output_fpath: str,
converter: ModelFileConverter = None,
force_overwrite: bool = False,
profiles = [],
preview_features = []
):
"""
Converts the onnx model into an trt engine.
Args:
output_fpath (str): File location of the generated ONNX file.
converter (ModelFileConverter): Class to convert current model instance into another.
force_overwrite (bool): If the file already exists, tell whether or not to overwrite.
Since torch models folders, can potentially erase entire folders.
profiles (List[polygraphy.backend.trt.Profile]): The optimization profiles used to build the engine.
preview_features (List[tensorrt.PreviewFeature]): The preview features to set when building the engine.
Return:
(converter.trt_engine_class): Returns a converted instance of TRTEngineFile.
"""
converter = self.default_converter if converter is None else converter()
# TODO: Need to check if the old engine file is compatible with current setting
if not force_overwrite and os.path.exists(output_fpath):
return converter.trt_engine_class(output_fpath, self.network_metadata)
return converter.onnx_to_trt(
output_fpath,
self.fpath,
self.network_metadata,
profiles,
preview_features
)
class TRTEngineFile(NNModelFile):
@abstractmethod
def use_obey_precision_constraints(self):
pass
# get_network_definition can be overloaded to alter the network definition.
# For example, this function can be used to change the precisions of ops or
# data type of intermediate tensors.
def get_network_definition(self, network_definition):
return network_definition
def __init__(
self,
model: str,
default_converter: ModelFileConverter = None,
network_metadata: NetworkMetadata = None,
):
super().__init__(default_converter, network_metadata)
self.fpath = model
self.max_trt_workspace = 3072
def cleanup(self) -> None:
G_LOGGER.debug("Removing saved engine model from location: {}".format(self.fpath))
os.remove(self.fpath)
class NullConverter(ModelFileConverter):
def __init__(self):
super().__init__(ONNXModelFile, TorchModelFile, TRTEngineFile)
|
import os
import math
import random
import pyautogui
import subprocess
from system_hotkey import SystemHotkey
import time
time.sleep(5)
# Playing around with pyautogui and SystemHotkey packages in Paint
pyautogui.FAILSAFE = False
# pyautogui.PAUSE = 0.5
def abort(self):
print("ABORT")
os._exit(0)
# Generate n x,y points of a circle of radius r
def PointsInCircum(r, n=100):
return [(math.cos(2 * math.pi / n * x) * r, math.sin(2 * math.pi / n * x) * r) for x in range(0, n + 1)]
# Move cursor to center of screen, return coordinates (reduces repetition)
def moveToCenter():
screenWidth, screenHeight = pyautogui.size()
centerX = screenWidth / 2
centerY = screenHeight / 2
pyautogui.moveTo(centerX, centerY)
return centerX, centerY
# Draw a shape of radius r (with a shape tool selected)
def drawShape(r):
x, y = moveToCenter()
pyautogui.moveRel(-r, -r)
pyautogui.mouseDown()
pyautogui.moveRel(r * 2, r * 2)
pyautogui.mouseUp()
moveToCenter()
# Draw three square spirals to test out pyautogui
def draw_test():
pickRandomBrush()
centerX, centerY = moveToCenter()
distance = 200
origDistance = distance
while distance > 0:
pyautogui.dragRel(distance, 0)
distance -= 5
pyautogui.dragRel(0, distance)
pyautogui.dragRel(-distance, 0)
distance -= 5
pyautogui.dragRel(0, -distance)
pyautogui.moveTo(centerX - origDistance, centerY)
distance = origDistance
while distance > 0:
pyautogui.dragRel(distance, 0)
distance -= 5
pyautogui.dragRel(0, distance)
pyautogui.dragRel(-distance, 0)
distance -= 5
pyautogui.dragRel(0, -distance)
pyautogui.moveTo(centerX - origDistance, centerY - origDistance)
distance = origDistance
while distance > 0:
pyautogui.dragRel(distance, 0)
distance -= 5
pyautogui.dragRel(0, distance)
pyautogui.dragRel(-distance, 0)
distance -= 5
pyautogui.dragRel(0, -distance)
# Early experiment with pyautogui: Draw Captain America's shield using individual brush strokes
# Usage: call with a radius value with a brush tool selected in Paint.
# Try tweaking the radius and NumPoints in PointsInCircum for interesting effects!
# Simplified version is below (draw_shield)
def draw_circle(radius, numPoints):
screenWidth, screenHeight = pyautogui.size()
centerX = screenWidth / 2
centerY = screenHeight / 2
pyautogui.moveTo(centerX, centerY)
pyautogui.click()
points = PointsInCircum(radius, numPoints)
pyautogui.click(x=830, y=62)
for idx, val in enumerate(points):
x, y = val
pyautogui.moveTo(centerX, centerY)
pyautogui.dragRel(x, y)
points.reverse()
x, y = points[idx]
pyautogui.moveTo(centerX, centerY)
pyautogui.dragRel(x, y)
points.reverse()
pyautogui.click(x=764, y=84)
points = PointsInCircum(radius * 0.90, numPoints)
for idx, val in enumerate(points):
x, y = val
pyautogui.moveTo(centerX, centerY)
pyautogui.dragRel(x, y)
points.reverse()
x, y = points[idx]
pyautogui.moveTo(centerX, centerY)
pyautogui.dragRel(x, y)
points.reverse()
pyautogui.click(x=830, y=62)
points = PointsInCircum(radius * 0.80, numPoints)
for idx, val in enumerate(points):
x, y = val
pyautogui.moveTo(centerX, centerY)
pyautogui.dragRel(x, y)
points.reverse()
x, y = points[idx]
pyautogui.moveTo(centerX, centerY)
pyautogui.dragRel(x, y)
points.reverse()
pyautogui.click(x=764, y=84)
points = PointsInCircum(radius * 0.70, numPoints)
for idx, val in enumerate(points):
x, y = val
pyautogui.moveTo(centerX, centerY)
pyautogui.dragRel(x, y)
points.reverse()
x, y = points[idx]
pyautogui.moveTo(centerX, centerY)
pyautogui.dragRel(x, y)
points.reverse()
pyautogui.click(x=830, y=62)
points = PointsInCircum(radius * 0.60, numPoints)
for idx, val in enumerate(points):
x, y = val
pyautogui.moveTo(centerX, centerY)
pyautogui.dragRel(x, y)
points.reverse()
x, y = points[idx]
pyautogui.moveTo(centerX, centerY)
pyautogui.dragRel(x, y)
points.reverse()
pyautogui.click(x=941, y=60)
points = PointsInCircum(radius * 0.40, numPoints)
for idx, val in enumerate(points):
x, y = val
pyautogui.moveTo(centerX, centerY)
pyautogui.dragRel(x, y)
points.reverse()
x, y = points[idx]
pyautogui.moveTo(centerX, centerY)
pyautogui.dragRel(x, y)
points.reverse()
# Wrapper function showing standard usage of draw_circle
def drawCircle():
draw_circle(400, 300)
# Improved method to draw Captain America's shield using pyautogui.
# Switches colors in course of drawing each line, rather than painting and painting over
# draw_circle looks better, but draw_shield is much more efficient
def draw_shield(radius, numPoints):
pyautogui.click(x=689, y=67)
centerX, centerY = moveToCenter()
points = PointsInCircum(radius, numPoints)
points90 = PointsInCircum(radius * 0.90, numPoints)
points80 = PointsInCircum(radius * 0.80, numPoints)
points70 = PointsInCircum(radius * 0.70, numPoints)
points60 = PointsInCircum(radius * 0.60, numPoints)
points50 = PointsInCircum(radius * 0.50, numPoints)
for idx in range(0, len(points)):
x, y = points50[idx]
pyautogui.click(x=941, y=60)
pyautogui.moveTo(centerX, centerY)
pyautogui.dragRel(x, y)
x, y = pyautogui.position()
x, y = points60[idx]
pyautogui.dragRel(x, y)
x, y = pyautogui.position()
pyautogui.click(x=764, y=84)
pyautogui.moveTo(x, y)
x, y = points70[idx]
pyautogui.dragRel(x, y)
x, y = pyautogui.position()
pyautogui.click(x=830, y=62)
pyautogui.moveTo(x, y)
x, y = points80[idx]
pyautogui.dragRel(x, y)
x, y = pyautogui.position()
pyautogui.click(x=764, y=84)
pyautogui.moveTo(x, y)
x, y = points90[idx]
pyautogui.dragRel(x, y)
x, y = pyautogui.position()
pyautogui.click(x=830, y=62)
pyautogui.moveTo(x, y)
x, y = points[idx]
pyautogui.dragRel(x, y)
pyautogui.click(x=764, y=84)
pyautogui.click(x=421, y=102)
pyautogui.click(x=733, y=68)
pyautogui.click(x=764, y=84)
moveToCenter()
drawShape(radius * 0.50)
# Wrapper function showing standard usage of draw_shield
def drawShield():
moveToCenter()
pyautogui.click(x=689, y=67)
draw_shield(80, 100)
# Simple test to do some recursive drawing
def draw_node(step, centerX, centerY):
pyautogui.moveTo(centerX, centerY)
radius = random.randrange(10, 200, 1)
numSpokes = random.randrange(5, 100, 1)
points = PointsInCircum(radius, numSpokes)
for x, y in points:
pyautogui.dragRel(x, y)
pyautogui.moveTo(centerX, centerY)
if step > 0:
gen_Point = random.randrange(0, len(points) - 1, 1)
x, y = points[gen_Point]
pyautogui.dragRel(x, y)
newStep = step - 1
newCenterX, newCenterY = pyautogui.position()
draw_node(newStep, newCenterX, newCenterY)
# Kick off recursive draw_node from the center of the screen
def complexDrawing():
centerX, centerY = moveToCenter()
draw_node(centerX, centerY, 100, 5)
# Select a random color from the color bar
def pickRandomColor():
colorX = [805, 831, 854, 872, 894, 920, 942, 959]
colorY = [59, 81]
colXIdx = random.randrange(0, len(colorX), 1)
colYIdx = random.randrange(0, 2, 1)
pyautogui.click(colorX[colXIdx], colorY[colYIdx])
return colXIdx, colYIdx
# Select a random shape tool from the "Shapes" dialog
def pickRandomShape():
moveToCenter()
pyautogui.click()
shapeXIdx = random.choice([396, 421, 442, 461, 480, 498])
shapeYIdx = random.choice([66, 85, 102])
pyautogui.click(shapeXIdx, shapeYIdx)
return shapeXIdx, shapeYIdx
# Select a random brush type from the "Brushes" menu
def pickRandomBrush():
moveToCenter()
pyautogui.click()
located = locate_Click('BrushMenu.png')
if not located:
locate_Click('BrushMenu2.png')
brushX = [332, 373, 417, 463]
brushY = [135, 181, 211]
brushXIdx = random.randrange(0, len(brushX))
brushYIdx = random.randrange(0, len(brushY))
if brushXIdx > 0 and brushYIdx > 0:
brushYIdx = 0
pyautogui.moveTo(brushX[brushXIdx], brushY[brushYIdx])
pyautogui.click(clicks=2, interval=0.25)
return brushXIdx, brushYIdx
# My personal favorite recursive web drawing function, iterates upon draw_node above (adds range checking,
# increases numSpokes as we approach base case)
def draw_Otra(step, centerX, centerY, radius, numSpokes):
if centerY < 170 or centerY > 990 or centerX < 10 or centerX > 1070:
screenWidth, screenHeight = pyautogui.size()
centerX = screenWidth / 2
centerY = screenHeight / 2
pyautogui.moveTo(centerX, centerY)
points = PointsInCircum(radius, numSpokes)
for x, y in points:
pyautogui.dragRel(x, y)
if step > 0:
newStep = step - 1
newRadius = radius / 2
newNumSpokes = numSpokes + 2
newCenterX, newCenterY = pyautogui.position()
draw_Otra(newStep, newCenterX, newCenterY, newRadius, newNumSpokes)
pyautogui.moveTo(centerX, centerY)
# Wrapper to call above function with random brush type and set seed values
def drawOtraWrapper():
pickRandomBrush()
centerX, centerY = moveToCenter()
draw_Otra(5, centerX, centerY, 2400, 4)
# Very similar to draw_Otra, doesn't change numSpokes. Again, try messing around with these step, radius, and numSpokes values
# for some interesting effects!
def draw_modified(step, centerX, centerY, radius, numSpokes):
colorX = [805, 831, 854, 872, 894, 920, 942, 959]
colorY = [59, 81]
colXIdx, colYIdx = pickRandomColor()
oldCenterX = centerX
oldCenterY = centerY
if centerY < 170:
centerY = 990 - (centerY + 170)
oldCenterY = 988
if centerY > 990:
centerY = 170 + (centerY - 990)
oldCenterY = 172
if centerX < 10:
centerX = 1070 - (centerX + 10)
oldCenterX = 1068
if centerX > 1070:
centerX = 10 + (centerX - 1070)
oldCenterX = 12
if pyautogui.onScreen(oldCenterX, oldCenterY) and pyautogui.onScreen(centerX, centerY):
pyautogui.moveTo(oldCenterX, oldCenterY)
pyautogui.dragTo(centerX, centerY)
else:
centerX, centerY = moveToCenter()
points = PointsInCircum(radius, numSpokes)
numPoints = len(points)
flipFlag = random.randrange(0, 100, 1)
if flipFlag > 50:
points.reverse()
startIdx = random.randrange(0, numPoints - 1, 1)
endIdx = random.randrange(startIdx, numPoints - 1, 1)
if startIdx >= endIdx:
startIdx = 0
for idx in range(startIdx, endIdx):
x, y = points[idx]
pyautogui.dragRel(x, y)
if step > 0:
newStep = step - 1
newRadius = radius / 2
newCenterX, newCenterY = pyautogui.position()
drawModified(newStep, newCenterX, newCenterY, newRadius, numSpokes)
pyautogui.click(x=colorX[colXIdx], y=colorY[colYIdx])
pyautogui.moveTo(centerX, centerY)
# Wrapper to kick off draw_Modified recursive function
def drawWeb():
# pickRandomBrush()
centerX, centerY = moveToCenter()
drawModified(5, centerX, centerY, 2400, 4)
# Call draw_Modified with random parameters
def drawRandWeb():
centerX, centerY = moveToCenter()
r = random.randrange(800, 2400)
spokes = random.randrange(4, 12)
drawModified(5, centerX, centerY, r, spokes)
# Helper function to determine on-screen location of items. Usage: call loc_helper and nothing else, press Ctrl+F to print the x,y
# coordinates of the mouse
def loc_helper(self):
print(pyautogui.position())
# Locate and move to the center of the image provided by 'img'. Note that you must provide full filename e.g. 'example.png' or it
# will fail. Also, the image must be in the same directory as this script.
def locate_MoveTo(img):
imgLocated = False
try:
left, top, width, height = pyautogui.locateOnScreen(img)
imgLocated = True
except (FileNotFoundError, TypeError) as e:
print("locate_MoveTo failed:", e)
print('unable to find: ', img)
return False
if imgLocated:
pyautogui.moveTo(x=left + (width / 2), y=top + (height / 2))
return True
# Locate and click on the image provided by 'img'. Useful for selecting specific controls.
def locate_Click(img):
found = locate_MoveTo(img)
if found:
pyautogui.click()
return found
# Currently unused, was toying around with opening and maximizing Paint when this script is run.
def launch_subprocess(path):
subprocess.Popen([path])
# Draw a spiral by first calculating the PointsInCircum for a number of radii using a list comprehension.
def draw_spiral():
# moveToCenter()
screenWidth, screenHeight = pyautogui.size()
centerX = screenWidth / 2
centerY = screenHeight / 2
pyautogui.moveTo(centerX, centerY)
numPoints = 60
startRadius = 30
for i in range(0, 10):
points = [PointsInCircum(x, numPoints) for x in range(startRadius, startRadius + numPoints)]
innerIdx = 0
for idx in range(0, len(points)):
arr = points[idx]
x, y = arr[idx]
pyautogui.moveRel(x, y)
pyautogui.mouseUp()
pyautogui.moveTo(centerX, centerY)
pyautogui.click()
pyautogui.moveRel(x, y)
pyautogui.mouseDown()
pyautogui.moveTo(centerX, centerY)
# innerIdx += 1
startRadius += numPoints
# Draw a number of concentric shapes
def draw_Concentric():
pickRandomShape()
for r in range(5, 200, 20):
drawShape(r)
# Clear the canvas: select all -> delete
def clear_Canvas():
moveToCenter()
pyautogui.click()
found = locate_Click('SelectMenu.png')
if not found:
locate_Click('SelectMenu2.png')
pyautogui.press(['down', 'down', 'down'])
pyautogui.press('enter')
pyautogui.press('delete')
hk = SystemHotkey()
hk.register(('control', 'x'), callback=abort)
hk.register(('control', 'f'), callback=loc_helper)
# Playing around with looking up a function in a dict
def totalRandomDrawing():
funcdict = {
'draw_Concentric': draw_Concentric,
'drawRandWeb': drawRandWeb,
'drawCircle': drawCircle,
'drawShield': drawShield
}
random.choice(list(funcdict.values()))()
if __name__ == "__main__":
totalRandomDrawing() |
import cv2
import numpy as np
from os import listdir
from os.path import isfile, join
################## Data set collection #################
face_classifier = cv2.CascadeClassifier('C:/Users/Abhilasha kumari/IdeaProjects/Face_Recognition_Mini_Project/opencv-master/data/haarcascades/haarcascade_frontalface_default.xml')
def face_extractor(img):
gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY) # converting image into gray scale
faces = face_classifier.detectMultiScale(gray,1.3,5)
if faces is():
return None
for(x,y,w,h) in faces:
cropped_face = img[y:y+h, x:x+w]
return cropped_face
cap = cv2.VideoCapture(0) # for video capture in GUI
count = 0
while True:
ret, frame = cap.read()
if face_extractor(frame) is not None:
count+=1
face = cv2.resize(face_extractor(frame),(200,200)) # resizeing image into 200x200
face = cv2.cvtColor(face, cv2.COLOR_BGR2GRAY)
file_name_path = 'C:/Users/Abhilasha kumari/IdeaProjects/Sample/Sample'+str(count)+'.jpg' # declaare the path location
cv2.imwrite(file_name_path,face) # saving image to the declare folder
cv2.putText(face,str(count),(50,50),cv2.FONT_HERSHEY_COMPLEX,1,(0,255,0),2)
cv2.imshow('Face Cropper',face) # displaying image
else:
print("Face not found")
pass
if cv2.waitKey(1) & count==50: # After pressing the enter the GUI window will be closed
break
cap.release()
cv2.destroyAllWindows() # for close all running window
print('Samples Colletion Completed ')
################ Data set collection ###############
# Get the training data we previously made
data_path = 'C:/Users/Abhilasha kumari/IdeaProjects/Sample/'
onlyfiles = [f for f in listdir(data_path) if isfile(join(data_path, f))]
# print(onlyfiles[0])
# Create arrays for training data and labels
Training_Data, Labels = [], []
# Open training images in our datapath
# Create a numpy array for training data
for i, files in enumerate(onlyfiles):
image_path = data_path + onlyfiles[i]
# print(image_path)
images = cv2.imread(image_path, cv2.IMREAD_GRAYSCALE)
# print(type(images),images)
Training_Data.append(np.asarray(images, dtype=np.uint8))
Labels.append(i)
# Create a numpy array for both training data and labels
Labels = np.asarray(Labels, dtype=np.int32)
# Initialize facial recognizer
model = cv2.face.LBPHFaceRecognizer_create()
model.train(np.asarray(Training_Data), np.asarray(Labels))
# Let's train our model
print("Dataset Model Training Completed..")
#################### Detection ###################
face_classifier = cv2.CascadeClassifier('C:/Users/Abhilasha kumari/IdeaProjects/Face_Recognition_Mini_Project/opencv-master/data/haarcascades/haarcascade_frontalface_default.xml')
def face_detector(img, size = 0.5):
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) #convert gray code to color image
faces = face_classifier.detectMultiScale(gray,1.3,5)
if faces is ():
return img,[]
for(x,y,w,h) in faces:
cv2.rectangle(img, (x,y),(x+w,y+h),(0,255,0),2) # create a rectange on the face
roi = img[y:y+h, x:x+w] # crop the image height and width
roi = cv2.resize(roi, (200,200)) # resizing the data
return img,roi
cap = cv2.VideoCapture(0) # for video capture in GUI
while True:
ret, frame = cap.read()
image, face = face_detector(frame)
try:
face = cv2.cvtColor(face, cv2.COLOR_BGR2GRAY) # convert into color image
result = model.predict(face) # redecting face from the trained model
if result[1] < 500:
confidence = int(100*(1-(result[1])/300))
if confidence > 82: # If matches the result with 82% then it will show the student name
cv2.putText(image, "Abhilasha", (250, 450), cv2.FONT_HERSHEY_COMPLEX, 1, (255, 255, 255), 2)
cv2.imshow('Face Cropper', image)
else: # if not matched it will show Unknown
cv2.putText(image, "Unknown", (250, 450), cv2.FONT_HERSHEY_COMPLEX, 1, (0, 0, 255), 2)
cv2.imshow('Face Cropper', image)
except: # if no face then it will show Face not found
cv2.putText(image, "Face Not Found", (250, 450), cv2.FONT_HERSHEY_COMPLEX, 1, (255, 0, 0), 2)
cv2.imshow('Face Cropper', image)
pass
if cv2.waitKey(1) & 0xFF==ord('q'): # After pressing the enter the GUI window will be closed
break
cap.release()
cv2.destroyAllWindows() # for close all running window
|
# -*- coding: utf-8 -*-
import utils.sqlbase as sqlbase
import utils.sqlitebase as sqlitebase
import initres
TBL_NAME = 'tbl_wall'
DB_NAME = initres.bingpath + '/wall.db'
_DB_SCRIPT = """
CREATE TABLE tbl_wall (
"day" VARCHAR(128) NOT NULL default '',
"urlbase" VARCHAR(256) NOT NULL default '',
"copyright" VARCHAR(256) NOT NULL default '',
"copyrightlink" VARCHAR(256) NOT NULL default '',
"hd" VARCHAR(256) NOT NULL default '',
"osshd" VARCHAR(256) NOT NULL default '',
"uhd" VARCHAR(256) NOT NULL default '',
"ossuhd" VARCHAR(256) NOT NULL default '',
"download" INTEGER NOT NULL default 0,
"share" INTEGER NOT NULL default 0,
PRIMARY KEY ("day")
);
"""
class CSqlite:
def __init__(self):
self.conn = None
self.sqlbase = sqlitebase.SqliteBase(DB_NAME)
def __del__(self):
pass
def getimg(self, day):
sql = ''' select * from {} where day = \'{}\' '''.format(TBL_NAME, day)
return self.sqlbase.exec_fetchall(sql)
def getall(self, limit):
sql = "select * from {} LIMIT {}".format(TBL_NAME, limit)
return self.sqlbase.exec_fetchall(sql)
# 增加
def add(self, dic):
sql = sqlbase.dic2insertsql(dic, TBL_NAME)
if self.sqlbase.exec(sql):
return self.sqlbase.getlastrowid()
return 0
sqlitebase.SqliteBase.initdb(DB_NAME, _DB_SCRIPT)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.