repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
borgaster/SpaceWarsEvolved | main.py | 1 | 16816 | import time
from animation import *
from asteroidField import *
from background import *
from loader import *
from physics import *
from player import *
from powerup import *
import pygame
from pygame.locals import *
from rotatingMenu_img import *
from spacemenu import *
from starField import *
# teclas dos jogadores default
keyPresset1 = [K_LEFT,K_RIGHT,K_UP,K_DOWN, K_SPACE, K_m]
keyPresset2 = [K_a, K_d, K_w, K_s, K_x, K_r]
pygame.init()
def game(numkills,nave1,nave2):
SCREENSIZE = [800,600]
#screen = pygame.display.set_mode(SCREENSIZE,pygame.FULLSCREEN)
## uncomment for debug
screen = pygame.display.set_mode(SCREENSIZE)
pygame.mouse.set_visible(0)
clock = pygame.time.Clock()
#init background
background = Background(screen,'galaxy.jpg')
#init efeito campo estrelado e asteroids
starfield = StarField(screen)
asteroidField = AsteroidField(screen)
#init musica
rand = random.randrange(0,2)
# if rand == 0:
# load_music('After Burner.mp3')
#else:
#load_music('Spybreak.mp3')
#load_music('Gundam.mp3')
#init players
player1 = Player((200,SCREENSIZE[1]/2),keyPresset1,1,nave1,numkills)
playerSprite1 = pygame.sprite.RenderPlain((player1))
player1.spin(90,3)
player2 = Player((SCREENSIZE[0]-200,SCREENSIZE[1]/2),keyPresset2,2,nave2,numkills)
playerSprite2 = pygame.sprite.RenderPlain((player2))
player2.spin(90,1)
#powerup stuff variables
powerups_on_screen = False
done = False
retval = 0
powerup_available = 0
#vars apenas para animacao do rapaz no canto do ecra
i = random.randrange(1,4)
pickup_timer = 0
while not done:
clock.tick(40)
#se nao ha asteroides, respawn
current_asteroids = len(asteroidField.asteroidSprites)
if current_asteroids <= 0:
current_asteroids = asteroidField.refresh(asteroidField.num_asteroids +1)
if pickup_timer != 0:
elapsed = round(time.clock())
##desenhar informacoes do jogadores
font = pygame.font.SysFont("consola", 20)
ScorePanel1 ="Player 1 - Lives: "+str(player1.statistics[0])+" "+"Score: "+str(player1.statistics[3])
scorePlayer1 = font.render(ScorePanel1, True, (255,255,255))
if nave2 != 0:
ScorePanel2 ="Player 2 - Lives: "+str(player2.statistics[0])+" Score: "+str(player2.statistics[3])
scorePlayer2 = font.render(ScorePanel2, True, (255,255,255))
# desenhar informacoes de powerups disponiveis
font = pygame.font.SysFont("consola", 40)
PowerupPanel = ""
if powerups_on_screen == False:
poweruppanel = font.render(PowerupPanel, True, (0,255,0))
#############################
##MOVER JOGADORES
#se esta so um jogador
if nave2 == 0:
for event in pygame.event.get():
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
done = True
elif event.key == keyPresset1[0]:
player1.dx = -10
player1.spin(90,1)
elif event.key == keyPresset1[1]:
player1.dx = 10
player1.spin(90,3)
elif event.key == keyPresset1[2]:
player1.dy = -10
player1.spin(90,0)
elif event.key == keyPresset1[3]:
player1.dy = 10
player1.spin(90,2)
elif event.type == KEYUP:
if event.key == keyPresset1[0]:
player1.dx = -3
elif event.key == keyPresset1[1]:
player1.dx = 3
elif event.key == keyPresset1[2]:
player1.dy = -3
elif event.key == keyPresset1[3]:
player1.dy = 3
elif event.key == keyPresset1[5]:
player1.changeWeapon()
# ha dois jogadores a jogar, apanhar teclas todas
else:
for event in pygame.event.get():
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
done = True
elif event.key == keyPresset1[0]:
player1.dx = -10
player1.spin(90,1)
elif event.key == keyPresset1[1]:
player1.dx = 10
player1.spin(90,3)
elif event.key == keyPresset1[2]:
player1.dy = -10
player1.spin(90,0)
elif event.key == keyPresset1[3]:
player1.dy = 10
player1.spin(90,2)
elif event.key == keyPresset2[0]:
player2.dx = -10
player2.spin(90,1)
elif event.key == keyPresset2[1]:
player2.dx = 10
player2.spin(90,3)
elif event.key == keyPresset2[2]:
player2.dy = -10
player2.spin(90,0)
elif event.key == keyPresset2[3]:
player2.dy = 10
player2.spin(90,2)
elif event.type == KEYUP:
if event.key == keyPresset1[0]:
player1.dx = -3
elif event.key == keyPresset1[1]:
player1.dx = 3
elif event.key == keyPresset1[2]:
player1.dy = -3
elif event.key == keyPresset1[3]:
player1.dy = 3
elif event.key == keyPresset1[5]:
player1.changeWeapon()
elif event.key == keyPresset2[0]:
player2.dx = -3
elif event.key == keyPresset2[1]:
player2.dx = 3
elif event.key == keyPresset2[2]:
player2.dy = -3
elif event.key == keyPresset2[3]:
player2.dy = 3
elif event.key == keyPresset2[5]:
player2.changeWeapon()
background.update()
starfield.update()
#calcular tempo de activacao de um powerup novo e o tipo
#se estiver em single player so ha powerup de armas
activate_powerups = random.randrange(0,200)
if nave2 != 0:
powerup_type = random.randrange(1,4)
else:
powerup_type = 2
if activate_powerups == 150:
if powerups_on_screen == False:
powerup_available = powerup_type
if (powerup_type == 1):
PowerupPanel = "Health Powerup Available!"
poweruppanel = font.render(PowerupPanel, True, (0,255,0))
elif powerup_type == 2:
PowerupPanel = "Weapon Powerup Available!"
poweruppanel = font.render(PowerupPanel, True, (255,0,0))
else:
PowerupPanel = "Mines Available!!"
poweruppanel = font.render(PowerupPanel, True, (255,0,0))
powerup = Powerup(powerup_available,SCREENSIZE)
powerupSprite = pygame.sprite.RenderPlain((powerup))
powerups_on_screen = True
## POWERUP JA ESTA NO ECRA
########################
#calculos de intersects
#Calcular colisoes de lasers entre jogadores
kill = lasers(player1,player2,playerSprite1,playerSprite2,asteroidField)
#se matou algum jogador, sai
if kill == 1:
done = True
kill = asteroids(player1,player2,playerSprite1,playerSprite2,asteroidField)
#se matou algum jogador, sai
if kill == 1:
done = True
#apanhar powerups
if powerups_on_screen == True:
retval = pickup_powerup(powerup,powerupSprite,player1,playerSprite1,powerup_available)
if retval == 1:
retval = 0
powerups_on_screen = False
if powerup.tipo == 2 and powerup.damagefactor == 4:
pickup_timer = round(time.clock())
elapsed = pickup_timer
else:
retval = pickup_powerup(powerup,powerupSprite,player2,playerSprite2,powerup_available)
if retval == 1:
retval = 0
powerups_on_screen = False
if powerup.tipo == 2 and powerup.damagefactor == 4:
pickup_timer = round(time.clock())
elapsed = pickup_timer
#############################
# Desenhar
#desenhar jogador 1
screen.blit(scorePlayer1, (10, 740))
playerSprite1.update(screen)
playerSprite1.draw(screen)
player1.draw_health(screen)
player1.draw_stats(screen)
#desenhar jogador 2
if nave2 != 0:
screen.blit(scorePlayer2, (10, 750))
playerSprite2.update(screen)
playerSprite2.draw(screen)
player2.draw_health(screen)
player2.draw_stats(screen)
#powerups
screen.blit(poweruppanel, (350, 10))
if powerups_on_screen == True:
powerupSprite.draw(screen)
#desenhar powerup_pickups
for sprite in weapon_pickups:
sprite.render(screen,False)
for sprite in health_pickups:
sprite.render(screen,False)
#desenhar asteroides
asteroidField.update()
#desenhar explosoes
for sprite in explosoes:
sprite.render(screen,False)
#desenhar humor pic
if pickup_timer != 0:
if (elapsed - pickup_timer) < 1.5:
toasty_pic, toasty_rect = load_image("toasty"+str(i)+".PNG", -1)
screen.blit(toasty_pic,(885,650))
else:
pickup_timer = 0
#Alterei o random pois o grau de aleatoriedade eh baixo
#desta forma aparecemos todos mais vezes :)
listagem=[1,2,3,4]
random.shuffle(listagem)
random.shuffle(listagem)
i = listagem[0]
pygame.display.flip()
##FIM DO WHILE
#####################################
stop_music()
pygame.display.set_mode([800,600])
return player1,player2
def main():
pygame.init()
SCREENSIZE = [800,600]
screen = pygame.display.set_mode(SCREENSIZE)
pygame.display.set_caption("Space War Evolved")
pygame.mouse.set_visible(0)
#init musica
#load_music('menu.mp3')
clock = pygame.time.Clock()
SP, rect = load_image("SP.png", -1)
MP, rect2 = load_image("MP.png", -1)
S, rect3 = load_image("S.png", -1)
H, rect4 = load_image("H.png", -1)
A, rect5 = load_image("A.png", -1)
E, rect6 = load_image("E.png", -1)
SP_red, rect = load_image("SP_red_35_433.png", -1)
MP_red, rect = load_image("MP_red_93_433.png", -1)
S_red, rect = load_image("S_red_151_478.png", -1)
H_red, rect = load_image("H_red_93_478.png", -1)
A_red, rect = load_image("A_red_151_433.png", -1)
E_red, rect = load_image("E_red_35_478.png", -1)
extra, rect = load_image("extra.png", -1)
multi = []
multi_images = load_sliced_sprites(221,34,'multi_player_anim_221x34.png')
single = []
single_images = load_sliced_sprites(243,34,'single_anim_243x34.png')
help = []
help_images = load_sliced_sprites(74,35,'help_anim_74x35.png')
about = []
about_images = load_sliced_sprites(112,29,'about_anim_112x29.png')
exit = []
exit_images = load_sliced_sprites(74,28,'exit_anim_74x28.png')
setkeys = []
setkeys_images = load_sliced_sprites(179,29,'setkeys_anim_179x29.png')
jiproj = []
jiproj_images = load_sliced_sprites(128,160,'ji_proj_128x160.png')
jiproj.append(AnimatedSprite(jiproj_images,129,31))
autores = []
autores_images = load_sliced_sprites(111,160,'autores.png')
autores.append(AnimatedSprite(autores_images,129,217))
moverCursor = load_sound('moverCursor.wav')
moverCursor.set_volume(0.2)
clock = pygame.time.Clock()
menu = RotatingMenu(x=520, y=295, radius=160, arc=pi, defaultAngle=pi/2.0)
background = Background(screen,'Stargate_menu.png')
menu.addItem(MenuItem(H))
menu.addItem(MenuItem(S))
menu.addItem(MenuItem(SP))
menu.addItem(MenuItem(MP))
menu.addItem(MenuItem(A))
menu.addItem(MenuItem(E))
menu.selectItem(2)
#Loop
while True:
#Handle events
events = pygame.event.get()
for event in events:
if event.type == pygame.QUIT:
return False
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_LEFT:
moverCursor.play()
menu.selectItem(menu.selectedItemNumber + 1)
if event.key == pygame.K_RIGHT:
moverCursor.play()
menu.selectItem(menu.selectedItemNumber - 1)
if event.key == pygame.K_RETURN:
if menu.selectedItemNumber == 0:
option2()
elif menu.selectedItemNumber == 1:
option4()
elif menu.selectedItemNumber == 2:
option0()
elif menu.selectedItemNumber == 3:
option1()
elif menu.selectedItemNumber == 4:
option3()
elif menu.selectedItemNumber == 5:
option5()
return False
#Update stuff
background.update()
menu.update()
for sprite in jiproj:
sprite.render(screen,True)
for sprite in autores:
sprite.render(screen,True)
screen.blit(extra, (124,24))
if menu.selectedItemNumber == 0:
single = []
multi = []
exit = []
about = []
setkeys = []
screen.blit(H_red, (93,478))
help.append(AnimatedSprite(help_images,490,280))
elif menu.selectedItemNumber == 1:
single = []
help = []
exit = []
about = []
multi = []
screen.blit(S_red, (151,478))
setkeys.append(AnimatedSprite(setkeys_images,435,280))
elif menu.selectedItemNumber == 2:
help = []
multi = []
exit = []
about = []
setkeys = []
screen.blit(SP_red, (35,433))
single.append(AnimatedSprite(single_images,403,280))
elif menu.selectedItemNumber == 3:
single = []
help = []
exit = []
about = []
setkeys = []
screen.blit(MP_red, (93,433))
multi.append(AnimatedSprite(multi_images,410,280))
elif menu.selectedItemNumber == 4:
single = []
multi = []
exit = []
help = []
setkeys = []
screen.blit(A_red, (151,433))
about.append(AnimatedSprite(about_images,470,280))
elif menu.selectedItemNumber == 5:
single = []
multi = []
help = []
about = []
setkeys = []
screen.blit(E_red, (35,478))
exit.append(AnimatedSprite(exit_images,490,280))
for sprite in multi:
sprite.render(screen,True)
for sprite in single:
sprite.render(screen,True)
for sprite in about:
sprite.render(screen,True)
for sprite in exit:
sprite.render(screen,True)
for sprite in help:
sprite.render(screen,True)
for sprite in setkeys:
sprite.render(screen,True)
#Draw stuff
#display.fill((0,0,0))
menu.draw(screen)
pygame.display.flip() #Show the updated scene
clock.tick(fpsLimit) #Wait a little
if __name__ == "__main__":
main()
| mit |
kgullikson88/GSSP_Analyzer | gsspy/fitting.py | 1 | 19991 | from __future__ import print_function, division, absolute_import
import numpy as np
import matplotlib.pyplot as plt
import os
import sys
import subprocess
from astropy.io import fits
from astropy import time
import DataStructures
from ._utils import combine_orders, read_grid_points, ensure_dir
from .analyzer import GSSP_Analyzer
import logging
import glob
home = os.environ['HOME']
GSSP_EXE = '{}/Applications/GSSP/GSSP_single/GSSP_single'.format(home)
GSSP_ABUNDANCE_TABLES = '{}/Applications/GSSPAbundance_Tables/'.format(home)
GSSP_MODELS = '/media/ExtraSpace/GSSP_Libraries/LLmodels/'
class GSSP_Fitter(object):
teff_minstep = 100
logg_minstep = 0.1
feh_minstep = 0.1
vsini_minstep = 10
vmicro_minstep = 0.1
def __init__(self, filename, gssp_exe=None, abund_tab=None, models_dir=None):
"""
A python wrapper to the GSSP code (must already be installed)
Parameters:
===========
filename: string
The filename of the (flattened) fits spectrum to fit.
gssp_exe: string (optional)
The full path to the gssp executable file
abund_tab: string (optional)
The full path to the directory containing
GSSP abundance tables.
models_dir: string:
The full path to the directory containing
GSSP atmosphere models.
Methods:
==========
fit: Fit the parameters
"""
if gssp_exe is None:
gssp_exe = GSSP_EXE
if abund_tab is None:
abund_tab = GSSP_ABUNDANCE_TABLES
if models_dir is None:
models_dir = GSSP_MODELS
# Read in the file and combine the orders
orders = self._read_fits_file(filename)
combined = combine_orders(orders)
#TODO: Cross-correlate the data to get it close. GSSP might have trouble with huge RVs...
# Get the object name/date
header = fits.getheader(filename)
star = header['OBJECT']
date = header['DATE-OBS']
try:
jd = time.Time(date, format='isot', scale='utc').jd
except TypeError:
jd = time.Time('{}T{}'.format(date, header['UT']), format='isot',
scale='utc').jd
# Save the data to an ascii file
output_basename = '{}-{}'.format(star.replace(' ', ''), jd)
np.savetxt('data_sets/{}.txt'.format(output_basename),
np.transpose((combined.x, combined.y)),
fmt='%.10f')
# Save some instance variables
self.data = combined
self.jd = jd
self.starname = star
self.output_basename = output_basename
self.gssp_exe = os.path.abspath(gssp_exe)
self.abundance_table = abund_tab
self.model_dir = models_dir
self.gssp_gridpoints = read_grid_points(models_dir)
def _run_gssp(self, teff_lims=(7000, 30000), teff_step=1000,
logg_lims=(3.0, 4.5), logg_step=0.5,
feh_lims=(-0.5, 0.5), feh_step=0.5,
vsini_lims=(50, 350), vsini_step=50,
vmicro_lims=(1, 5), vmicro_step=1,
R=80000, ncores=1):
"""
Coarsely fit the parameters Teff, log(g), and [Fe/H].
"""
# First, make sure the inputs are reasonable.
teff_step = max(teff_step, self.teff_minstep)
logg_step = max(logg_step, self.logg_minstep)
feh_step = max(feh_step, self.feh_minstep)
vsini_step = max(vsini_step, self.vsini_minstep)
vmicro_step = max(vmicro_step, self.vmicro_minstep)
teff_lims = (min(teff_lims), max(teff_lims))
logg_lims = (min(logg_lims), max(logg_lims))
feh_lims = (min(feh_lims), max(feh_lims))
vsini_lims = (min(vsini_lims), max(vsini_lims))
vmicro_lims = (min(vmicro_lims), max(vmicro_lims))
teff_lims, logg_lims, feh_lims = self._check_grid_limits(teff_lims,
logg_lims,
feh_lims)
# Make the input file for GSSP
inp_file=self._make_input_file(teff_lims=teff_lims, teff_step=teff_step,
logg_lims=logg_lims, logg_step=logg_step,
feh_lims=feh_lims, feh_step=feh_step,
vsini_lims=vsini_lims, vsini_step=vsini_step,
vmicro_lims=vmicro_lims, vmicro_step=vmicro_step,
resolution=R)
# Run GSSP
subprocess.check_call(['mpirun', '-n', '{}'.format(ncores),
'{}'.format(self.gssp_exe),
'{}'.format(inp_file)])
# Move the output directory to a new name that won't be overridden
output_dir = '{}_output'.format(self.output_basename)
ensure_dir(output_dir)
for f in glob.glob('output_files/*'):
subprocess.check_call(['mv', f, '{}/'.format(output_dir)])
return
def fit(self, teff_lims=(7000, 30000), teff_step=1000,
logg_lims=(3.0, 4.5), logg_step=0.5,
feh_lims=(-0.5, 0.5), feh_step=0.5,
vsini_lims=(50, 350), vsini_step=50,
vmicro_lims=(1, 5), vmicro_step=1,
R=80000, ncores=1, refine=True):
"""
Fit the stellar parameters with GSSP
Parameters:
=============
par_lims: iterable with (at least) two objects
The limits on the given parameter. 'par' can be one of:
1. teff: The effective temperature
2. logg: The surface gravity
3. feh: The metallicity [Fe/H]
4. vsini: The rotational velocity
5. vmicro: The microturbulent velocity
The default values are a very large, very course grid.
Consider refining based on spectral type first!
par_step: float
The initial step size to take in the given parameter.
'par' can be from the same list as above.
R: float
The spectrograph resolving power (lambda/delta-lambda)
ncores: integer, default=1
The number of cores to use in the GSSP run.
refine: boolean
Should we run GSSP again with a smaller grid after the
initial fit? If yes, the best answers will probably be
better.
Returns:
=========
A pd.Series object with the best parameters
"""
# Run GSSP
self._run_gssp(teff_lims=teff_lims, teff_step=teff_step,
logg_lims=logg_lims, logg_step=logg_step,
feh_lims=feh_lims, feh_step=feh_step,
vsini_lims=vsini_lims, vsini_step=vsini_step,
vmicro_lims=vmicro_lims, vmicro_step=vmicro_step,
R=R, ncores=ncores)
# Look at the output and save the figures
output_dir = '{}_output'.format(self.output_basename)
best_pars, figs = GSSP_Analyzer(output_dir).estimate_best_parameters()
for par in figs.keys():
fig = figs[par]
fig.savefig(os.path.join(output_dir, '{}_course.pdf'.format(par)))
plt.close('all')
if not refine:
return best_pars
# If we get here, we should restrict the grid near the
# best solution and fit again
teff_lims = self._get_refined_limits(lower=best_pars['1sig_CI_lower_Teff'],
upper=best_pars['1sig_CI_upper_Teff'],
values=self.gssp_gridpoints.teff)
logg_lims = self._get_refined_limits(lower=best_pars['1sig_CI_lower_logg'],
upper=best_pars['1sig_CI_upper_logg'],
values=self.gssp_gridpoints.logg)
feh_lims = self._get_refined_limits(lower=best_pars['1sig_CI_lower_feh'],
upper=best_pars['1sig_CI_upper_feh'],
values=self.gssp_gridpoints.feh)
vsini_lower = best_pars.best_vsini*(1-1.5) + 1.5*best_pars['1sig_CI_lower_vsini']
vsini_upper = best_pars.best_vsini*(1-1.5) + 1.5*best_pars['1sig_CI_upper_vsini']
vsini_lims = (max(10, vsini_lower), min(400, vsini_upper))
vsini_step = max(self.vsini_minstep, (vsini_lims[1] - vsini_lims[0])/10)
vmicro_lims = (best_pars.micro_turb, best_pars.micro_turb)
# Rename the files in the output directory so they don't get overwritten
file_list = ['CCF.dat', 'Chi2_table.dat',
'Observed_spectrum.dat', 'Synthetic_best_fit.rgs']
ensure_dir(os.path.join(output_dir, 'course_output', ''))
for f in file_list:
original_fname = os.path.join(output_dir, f)
new_fname = os.path.join(output_dir, 'course_output', f)
subprocess.check_call(['mv', original_fname, new_fname])
# Run GSSP on the refined grid
self._run_gssp(teff_lims=teff_lims, teff_step=self.teff_minstep,
logg_lims=logg_lims, logg_step=self.logg_minstep,
feh_lims=feh_lims, feh_step=self.feh_minstep,
vsini_lims=vsini_lims, vsini_step=round(vsini_step),
vmicro_lims=vmicro_lims, vmicro_step=vmicro_step,
R=R, ncores=ncores)
best_pars, figs = GSSP_Analyzer(output_dir).estimate_best_parameters()
for par in figs.keys():
fig = figs[par]
fig.savefig(os.path.join(output_dir, '{}_fine.pdf'.format(par)))
fig.close()
return best_pars
def _check_grid_limits_old(self, teff_lims, logg_lims, feh_lims):
df = self.gssp_gridpoints[['teff', 'logg', 'feh']].drop_duplicates()
# First, check if the limits are do-able
lower = df.loc[(df.teff <= teff_lims[0]) &
(df.logg <= logg_lims[0]) &
(df.feh <= feh_lims[0])]
upper = df.loc[(df.teff >= teff_lims[1]) &
(df.logg >= logg_lims[1]) &
(df.feh >= feh_lims[1])]
if len(upper) >= 1 and len(lower) >= 1:
return teff_lims, logg_lims, feh_lims
# If we get here, there is a problem...
# Check temperature first:
if not (len(df.loc[df.teff <= teff_lims[0]]) >= 1 and
len(df.loc[df.teff >= teff_lims[1]]) >= 1):
# Temperature grid is no good.
low_teff, high_teff = df.teff.min(), df.teff.max()
print('The temperature grid is not available in the model library!')
print('You wanted temperatures from {} - {}'.format(*teff_lims))
print('The model grid extends from {} - {}'.format(low_teff, high_teff))
new_teff_lims = (max(low_teff, teff_lims[0]),
min(high_teff, teff_lims[1]))
print('Resetting temperature limits to {} - {}'.format(*new_teff_lims))
return self._check_grid_limits(new_teff_lims, logg_lims, feh_lims)
# Check log(g) next:
teff_df = df.loc[(df.teff >= teff_lims[0]) & (df.teff <= teff_lims[1])]
if not (len(teff_df.loc[df.logg <= logg_lims[0]]) >= 1 and
len(teff_df.loc[df.logg >= logg_lims[1]]) >= 1):
# Temperature grid is no good.
low_logg, high_logg = df.logg.min(), df.logg.max()
print('The log(g) grid is not available in the model library!')
print('You wanted log(g) from {} - {}'.format(*logg_lims))
print('The model grid extends from {} - {}'.format(low_logg, high_logg))
new_logg_lims = (max(low_logg, logg_lims[0]),
min(high_logg, logg_lims[1]))
print('Resetting log(g) limits to {} - {}'.format(*new_logg_lims))
return self._check_grid_limits(teff_lims, new_logg_lims, feh_lims)
# Finally, check [Fe/H]:
subset_df = df.loc[(df.teff >= teff_lims[0]) &
(df.teff <= teff_lims[1]) *
(df.logg >= logg_lims[0]) &
(df.logg <= logg_lims[1])]
if not (len(subset_df.loc[df.feh <= feh_lims[0]]) >= 1 and
len(subset_df.loc[df.feh >= feh_lims[1]]) >= 1):
# Temperature grid is no good.
low_feh, high_feh = df.feh.min(), df.feh.max()
print('The [Fe/H] grid is not available in the model library!')
print('You wanted [Fe/H] from {} - {}'.format(*feh_lims))
print('The model grid extends from {} - {}'.format(low_feh, high_feh))
new_feh_lims = (max(low_feh, feh_lims[0]),
min(high_feh, feh_lims[1]))
print('Resetting [Fe/H] limits to {} - {}'.format(*new_feh_lims))
return self._check_grid_limits(teff_lims, logg_lims, new_feh_lims)
# We should never get here
raise ValueError('Something weird happened while checking limits!')
def _check_grid_limits(self, teff_lims, logg_lims, feh_lims):
df = self.gssp_gridpoints[['teff', 'logg', 'feh']].drop_duplicates()
# First, check if the limits are do-able as is
lower = df.loc[(df.teff == teff_lims[0]) & (df.feh == feh_lims[0])]
upper = df.loc[(df.teff == teff_lims[1]) & (df.feh == feh_lims[1])]
if (lower.logg.min() <= logg_lims[0] and
lower.logg.max() >= logg_lims[1] and
upper.logg.min() <= logg_lims[0] and
upper.logg.max() >= logg_lims[1]):
return teff_lims, logg_lims, feh_lims
# If we get here, there is a problem...
# Check temperature first:
low_teff, high_teff = df.teff.min(), df.teff.max()
if low_teff > teff_lims[0] or high_teff < teff_lims[1]:
print('The temperature grid is not available in the model library!')
print('You wanted temperatures from {} - {}'.format(*teff_lims))
print('The model grid extends from {} - {}'.format(low_teff, high_teff))
new_teff_lims = (max(low_teff, teff_lims[0]),
min(high_teff, teff_lims[1]))
print('Resetting temperature limits to {} - {}'.format(*new_teff_lims))
return self._check_grid_limits(new_teff_lims, logg_lims, feh_lims)
# Check [Fe/H] next
subset_df = df.loc[(df.teff >= teff_lims[0]) &
(df.teff <= teff_lims[1])]
low_feh, high_feh = subset_df.feh.min(), subset_df.feh.max()
if low_feh > feh_lims[0] or high_feh < feh_lims[1]:
print('The [Fe/H] grid is not available in the model library!')
print('You wanted [Fe/H] from {} - {}'.format(*feh_lims))
print('The model grid extends from {} - {}'.format(low_feh, high_feh))
new_feh_lims = (max(low_feh, feh_lims[0]),
min(high_feh, feh_lims[1]))
print('Resetting [Fe/H] limits to {} - {}'.format(*new_feh_lims))
return self._check_grid_limits(teff_lims, logg_lims, new_feh_lims)
# Finally, check log(g)
subset_df = subset_df.loc[(subset_df.feh >= feh_lims[0]) &
(subset_df.feh <= feh_lims[1])]
low_logg, high_logg = subset_df.logg.min(), subset_df.logg.max()
if low_logg > logg_lims[0] or high_logg < logg_lims[1]:
print('The log(g) grid is not available in the model library!')
print('You wanted log(g) from {} - {}'.format(*logg_lims))
print('The model grid extends from {} - {}'.format(low_logg, high_logg))
new_logg_lims = (max(low_logg, logg_lims[0]),
min(high_logg, logg_lims[1]))
print('Resetting log(g) limits to {} - {}'.format(*new_logg_lims))
return self._check_grid_limits(teff_lims, new_logg_lims, feh_lims)
# We should never get here
raise ValueError('Something weird happened while checking limits!')
def _get_refined_limits(self, lower, upper, values):
"""
Get the items in the 'values' array that are just
less than lower and just more than upper.
"""
unique_values = sorted(np.unique(values))
l_idx = np.searchsorted(unique_values, lower, side='left')
r_idx = np.searchsorted(unique_values, upper, side='right')
if l_idx > 0:
l_idx -= 1
if r_idx < len(unique_values) - 1:
r_idx += 1
return unique_values[l_idx], unique_values[r_idx]
def _read_fits_file(self, fname):
orders = []
hdulist = fits.open(fname)
for i, hdu in enumerate(hdulist[1:]):
xypt = DataStructures.xypoint(x=hdu.data['wavelength'],
y=hdu.data['flux'],
cont=hdu.data['continuum'],
err=hdu.data['error'])
xypt.x *= 10 #Convert from nanometers to angstrom
orders.append(xypt)
return orders
def _make_input_file(self, teff_lims, teff_step, logg_lims, logg_step,
feh_lims, feh_step, vsini_lims, vsini_step,
vmicro_lims, vmicro_step, resolution):
""" Make the input file for the given star
"""
output_string = '{:.1f} {:.0f} {:.1f}\n'.format(teff_lims[0],
teff_step,
teff_lims[-1])
output_string += '{:.1f} {:.1f} {:.1f}\n'.format(logg_lims[0],
logg_step,
logg_lims[1])
output_string += '{:.1f} {:.1f} {:.1f}\n'.format(vmicro_lims[0],
vmicro_step,
vmicro_lims[1])
output_string += '{:.1f} {:.1f} {:.1f}\n'.format(vsini_lims[0],
vsini_step,
vsini_lims[1])
output_string += "skip 0.03 0.02 0.07 !dilution factor\n"
output_string += 'skip {:.1f} {:.1f} {:.1f}\n'.format(feh_lims[0],
feh_step,
feh_lims[1])
output_string += 'He 0.04 0.005 0.06 ! Individual abundance\n'
output_string += '0.0 {:.0f}\n'.format(resolution)
output_string += '{}\n{}\n'.format(self.abundance_table, self.model_dir)
output_string += '2 1 !atmosphere model vmicro and mass\n'
output_string += 'ST ! model atmosphere chemical composition flag\n'
dx = self.data.x[1] - self.data.x[0]
output_string += '1 {:.5f} fit\n'.format(dx)
output_string += 'data_sets/{}.txt\n'.format(self.output_basename)
output_string += '0.5 0.99 0.0 adjust ! RV determination stuff\n'
xmin, xmax = self.data.x[0]-1, self.data.x[-1]+1
output_string += '{:.1f} {:.1f}\n'.format(xmin, xmax)
outfilename = '{}.inp'.format(self.output_basename)
with open(outfilename, 'w') as outfile:
outfile.write(output_string)
return outfilename
| mit |
gussmith23/babble_bot | mangle.py | 1 | 3519 | import random
import configparser
from enum import Enum
import googletrans
# get config
config = configparser.ConfigParser()
config.read("babble_bot.cfg")
class MangleMethod(Enum):
flipflop = 1
straight = 2
manual = 3
def __str__(self):
if self == MangleMethod.flipflop:
return "flip flop: flip flop between a primary language and random languages."
elif self == MangleMethod.straight:
return "straight: run through a completely random list of languages."
elif self == MangleMethod.manual:
return "manual: language path specified by the user manually."
else:
raise NotImplementedError(
"MangleMethod value's __str__ conversion not implemented.")
class Mangle:
def __init__(self, client_key, language, low, high, language_blacklist):
self.language = language
self.translator = googletrans.Translator()
self.languages = set(googletrans.LANGUAGES.keys()) - language_blacklist
self.low = low
self.high = high
def mangle(self, message_text, times=0, method=None, language_list=None):
if method == MangleMethod.manual and not language_list:
raise ValueError("No language list given.")
if method is None:
method = random.sample(
set(MangleMethod) - set([MangleMethod.manual]), 1)[0]
if times < 0:
raise ValueError("Parameter times must be greater than 0.")
if times == 0:
times = random.randint(self.low, self.high)
if method == MangleMethod.manual:
language_list.insert(0, self.language)
language_list.append(self.language)
elif method == MangleMethod.flipflop:
language_list = []
language_list.append(self.language)
for i in range(int(times / 2)):
language_list.extend(
[random.sample(self.languages, 1)[0], self.language])
elif method == MangleMethod.straight:
language_list = []
language_list.append(self.language)
language_list.extend(random.sample(self.languages, times))
language_list.append(self.language)
else:
raise NotImplementedError(
"MangleMethod {} not implemented.".format(method))
all_messages = [message_text]
for i in range(len(language_list)):
if i == 0:
continue
try:
#text = self.translator.translate(all_messages[i - 1],
# from_lang = language_list[i - 1],
# to_lang = language_list[i])
params = {
'text': all_messages[i - 1],
'from': language_list[i - 1],
'to': language_list[i],
'contentType': 'text/plain',
'category': 'general',
}
text = self.translator.translate(params['text'],
src=params['from'],
dest=params['to']).text
all_messages.append(text)
except Exception as e:
all_messages = False
break
message_info = {
'method': str(method),
'languages': language_list,
'all_messages': all_messages
}
return message_info
| gpl-3.0 |
webmasterraj/GaSiProMo | flask/lib/python2.7/site-packages/pandas/computation/eval.py | 14 | 8348 | #!/usr/bin/env python
"""Top level ``eval`` module.
"""
import tokenize
from pandas.core import common as com
from pandas.computation.expr import Expr, _parsers, tokenize_string
from pandas.computation.scope import _ensure_scope
from pandas.compat import DeepChainMap, builtins
from pandas.computation.engines import _engines
from distutils.version import LooseVersion
def _check_engine(engine):
"""Make sure a valid engine is passed.
Parameters
----------
engine : str
Raises
------
KeyError
* If an invalid engine is passed
ImportError
* If numexpr was requested but doesn't exist
"""
if engine not in _engines:
raise KeyError('Invalid engine {0!r} passed, valid engines are'
' {1}'.format(engine, list(_engines.keys())))
# TODO: validate this in a more general way (thinking of future engines
# that won't necessarily be import-able)
# Could potentially be done on engine instantiation
if engine == 'numexpr':
try:
import numexpr
except ImportError:
raise ImportError("'numexpr' not found. Cannot use "
"engine='numexpr' for query/eval "
"if 'numexpr' is not installed")
else:
ne_version = numexpr.__version__
if ne_version < LooseVersion('2.1'):
raise ImportError("'numexpr' version is %s, "
"must be >= 2.1" % ne_version)
def _check_parser(parser):
"""Make sure a valid parser is passed.
Parameters
----------
parser : str
Raises
------
KeyError
* If an invalid parser is passed
"""
if parser not in _parsers:
raise KeyError('Invalid parser {0!r} passed, valid parsers are'
' {1}'.format(parser, _parsers.keys()))
def _check_resolvers(resolvers):
if resolvers is not None:
for resolver in resolvers:
if not hasattr(resolver, '__getitem__'):
name = type(resolver).__name__
raise TypeError('Resolver of type %r does not implement '
'the __getitem__ method' % name)
def _check_expression(expr):
"""Make sure an expression is not an empty string
Parameters
----------
expr : object
An object that can be converted to a string
Raises
------
ValueError
* If expr is an empty string
"""
if not expr:
raise ValueError("expr cannot be an empty string")
def _convert_expression(expr):
"""Convert an object to an expression.
Thus function converts an object to an expression (a unicode string) and
checks to make sure it isn't empty after conversion. This is used to
convert operators to their string representation for recursive calls to
:func:`~pandas.eval`.
Parameters
----------
expr : object
The object to be converted to a string.
Returns
-------
s : unicode
The string representation of an object.
Raises
------
ValueError
* If the expression is empty.
"""
s = com.pprint_thing(expr)
_check_expression(s)
return s
def _check_for_locals(expr, stack_level, parser):
at_top_of_stack = stack_level == 0
not_pandas_parser = parser != 'pandas'
if not_pandas_parser:
msg = "The '@' prefix is only supported by the pandas parser"
elif at_top_of_stack:
msg = ("The '@' prefix is not allowed in "
"top-level eval calls, \nplease refer to "
"your variables by name without the '@' "
"prefix")
if at_top_of_stack or not_pandas_parser:
for toknum, tokval in tokenize_string(expr):
if toknum == tokenize.OP and tokval == '@':
raise SyntaxError(msg)
def eval(expr, parser='pandas', engine='numexpr', truediv=True,
local_dict=None, global_dict=None, resolvers=(), level=0,
target=None):
"""Evaluate a Python expression as a string using various backends.
The following arithmetic operations are supported: ``+``, ``-``, ``*``,
``/``, ``**``, ``%``, ``//`` (python engine only) along with the following
boolean operations: ``|`` (or), ``&`` (and), and ``~`` (not).
Additionally, the ``'pandas'`` parser allows the use of :keyword:`and`,
:keyword:`or`, and :keyword:`not` with the same semantics as the
corresponding bitwise operators. :class:`~pandas.Series` and
:class:`~pandas.DataFrame` objects are supported and behave as they would
with plain ol' Python evaluation.
Parameters
----------
expr : str or unicode
The expression to evaluate. This string cannot contain any Python
`statements
<http://docs.python.org/2/reference/simple_stmts.html#simple-statements>`__,
only Python `expressions
<http://docs.python.org/2/reference/simple_stmts.html#expression-statements>`__.
parser : string, default 'pandas', {'pandas', 'python'}
The parser to use to construct the syntax tree from the expression. The
default of ``'pandas'`` parses code slightly different than standard
Python. Alternatively, you can parse an expression using the
``'python'`` parser to retain strict Python semantics. See the
:ref:`enhancing performance <enhancingperf.eval>` documentation for
more details.
engine : string, default 'numexpr', {'python', 'numexpr'}
The engine used to evaluate the expression. Supported engines are
- ``'numexpr'``: This default engine evaluates pandas objects using
numexpr for large speed ups in complex expressions
with large frames.
- ``'python'``: Performs operations as if you had ``eval``'d in top
level python. This engine is generally not that useful.
More backends may be available in the future.
truediv : bool, optional
Whether to use true division, like in Python >= 3
local_dict : dict or None, optional
A dictionary of local variables, taken from locals() by default.
global_dict : dict or None, optional
A dictionary of global variables, taken from globals() by default.
resolvers : list of dict-like or None, optional
A list of objects implementing the ``__getitem__`` special method that
you can use to inject an additional collection of namespaces to use for
variable lookup. For example, this is used in the
:meth:`~pandas.DataFrame.query` method to inject the
:attr:`~pandas.DataFrame.index` and :attr:`~pandas.DataFrame.columns`
variables that refer to their respective :class:`~pandas.DataFrame`
instance attributes.
level : int, optional
The number of prior stack frames to traverse and add to the current
scope. Most users will **not** need to change this parameter.
target : a target object for assignment, optional, default is None
essentially this is a passed in resolver
Returns
-------
ndarray, numeric scalar, DataFrame, Series
Notes
-----
The ``dtype`` of any objects involved in an arithmetic ``%`` operation are
recursively cast to ``float64``.
See the :ref:`enhancing performance <enhancingperf.eval>` documentation for
more details.
See Also
--------
pandas.DataFrame.query
pandas.DataFrame.eval
"""
expr = _convert_expression(expr)
_check_engine(engine)
_check_parser(parser)
_check_resolvers(resolvers)
_check_for_locals(expr, level, parser)
# get our (possibly passed-in) scope
level += 1
env = _ensure_scope(level, global_dict=global_dict,
local_dict=local_dict, resolvers=resolvers,
target=target)
parsed_expr = Expr(expr, engine=engine, parser=parser, env=env,
truediv=truediv)
# construct the engine and evaluate the parsed expression
eng = _engines[engine]
eng_inst = eng(parsed_expr)
ret = eng_inst.evaluate()
# assign if needed
if env.target is not None and parsed_expr.assigner is not None:
env.target[parsed_expr.assigner] = ret
return None
return ret
| gpl-2.0 |
orchidinfosys/odoo | addons/crm/report/crm_opportunity_report.py | 3 | 5126 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from openerp.addons.crm import crm_stage
from openerp.osv import fields, osv
from openerp import tools
class crm_opportunity_report(osv.Model):
""" CRM Opportunity Analysis """
_name = "crm.opportunity.report"
_auto = False
_description = "CRM Opportunity Analysis"
_rec_name = 'date_deadline'
_columns = {
'date_deadline': fields.date('Expected Closing', readonly=True),
'create_date': fields.datetime('Creation Date', readonly=True),
'opening_date': fields.datetime('Assignation Date', readonly=True),
'date_closed': fields.datetime('Close Date', readonly=True),
'date_last_stage_update': fields.datetime('Last Stage Update', readonly=True),
'active': fields.boolean('Active', readonly=True),
# durations
'delay_open': fields.float('Delay to Assign',digits=(16,2),readonly=True, group_operator="avg",help="Number of Days to open the case"),
'delay_close': fields.float('Delay to Close',digits=(16,2),readonly=True, group_operator="avg",help="Number of Days to close the case"),
'delay_expected': fields.float('Overpassed Deadline',digits=(16,2),readonly=True, group_operator="avg"),
'user_id':fields.many2one('res.users', 'User', readonly=True),
'team_id':fields.many2one('crm.team', 'Sales Team', oldname='section_id', readonly=True),
'nbr_activities': fields.integer('# of Activities', readonly=True),
'city': fields.char('City'),
'country_id':fields.many2one('res.country', 'Country', readonly=True),
'company_id': fields.many2one('res.company', 'Company', readonly=True),
'probability': fields.float('Probability',digits=(16,2),readonly=True, group_operator="avg"),
'total_revenue': fields.float('Total Revenue',digits=(16,2),readonly=True),
'expected_revenue': fields.float('Expected Revenue', digits=(16,2),readonly=True),
'stage_id': fields.many2one ('crm.stage', 'Stage', readonly=True, domain="['|', ('team_id', '=', False), ('team_id', '=', team_id)]"),
'stage_name': fields.char('Stage Name', readonly=True),
'partner_id': fields.many2one('res.partner', 'Partner' , readonly=True),
'company_id': fields.many2one('res.company', 'Company', readonly=True),
'priority': fields.selection(crm_stage.AVAILABLE_PRIORITIES, 'Priority'),
'type':fields.selection([
('lead','Lead'),
('opportunity','Opportunity'),
],'Type', help="Type is used to separate Leads and Opportunities"),
'lost_reason': fields.many2one('crm.lost.reason', 'Lost Reason', readonly=True),
'date_conversion': fields.datetime('Conversion Date', readonly=True),
'campaign_id': fields.many2one('utm.campaign', 'Campaign', readonly=True),
'source_id':fields.many2one('utm.source', 'Source', readonly=True),
'medium_id': fields.many2one('utm.medium', 'Medium', readonly=True),
}
def init(self, cr):
tools.drop_view_if_exists(cr, 'crm_opportunity_report')
cr.execute("""
CREATE OR REPLACE VIEW crm_opportunity_report AS (
SELECT
c.id,
c.date_deadline,
c.date_open as opening_date,
c.date_closed as date_closed,
c.date_last_stage_update as date_last_stage_update,
c.user_id,
c.probability,
c.stage_id,
stage.name as stage_name,
c.type,
c.company_id,
c.priority,
c.team_id,
activity.nbr_activities,
c.active,
c.campaign_id,
c.source_id,
c.medium_id,
c.partner_id,
c.city,
c.country_id,
c.planned_revenue as total_revenue,
c.planned_revenue*(c.probability/100) as expected_revenue,
c.create_date as create_date,
extract('epoch' from (c.date_closed-c.create_date))/(3600*24) as delay_close,
abs(extract('epoch' from (c.date_deadline - c.date_closed))/(3600*24)) as delay_expected,
extract('epoch' from (c.date_open-c.create_date))/(3600*24) as delay_open,
c.lost_reason,
c.date_conversion as date_conversion
FROM
"crm_lead" c
LEFT JOIN (
SELECT m.res_id, COUNT(*) nbr_activities
FROM "mail_message" m
WHERE m.model = 'crm.lead'
GROUP BY m.res_id ) activity
ON
(activity.res_id = c.id)
LEFT JOIN "crm_stage" stage
ON stage.id = c.stage_id
GROUP BY c.id, activity.nbr_activities, stage.name
)""")
| gpl-3.0 |
aitormf/JdeRobot | src/libs/comm_py/comm/ice/pose3dIceClient.py | 7 | 6073 | #
# Copyright (C) 1997-2017 JDE Developers Team
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
# Authors :
# Aitor Martinez Fernandez <aitor.martinez.fernandez@gmail.com>
#
import traceback
import jderobot
import threading
import Ice
from .threadSensor import ThreadSensor
from math import asin, atan2, pi
from jderobotTypes import Pose3d
class Pose3D:
'''
Pose3d Connector. Recives Pose3d from Ice interface when you run update method.
'''
def __init__(self, jdrc, prefix):
'''
Pose3d Contructor.
Exits When it receives a Exception diferent to Ice.ConnectionRefusedException
@param jdrc: Comm Communicator
@param prefix: prefix name of client in config file
@type ic: Ice Communicator
@type prefix: String
'''
self.lock = threading.Lock()
self.pose = Pose3d()
try:
ic = jdrc.getIc()
proxyStr = jdrc.getConfig().getProperty(prefix+".Proxy")
base = ic.stringToProxy(proxyStr)
self.proxy = jderobot.Pose3DPrx.checkedCast(base)
prop = ic.getProperties()
self.update()
if not self.proxy:
print ('Interface ' + prefix + ' not configured')
except Ice.ConnectionRefusedException:
print(prefix + ': connection refused')
except:
traceback.print_exc()
exit(-1)
def update(self):
'''
Updates Pose3d.
'''
pos = Pose3d()
if self.hasproxy():
pose = self.proxy.getPose3DData()
pos.yaw = self.quat2Yaw(pose.q0, pose.q1, pose.q2, pose.q3)
pos.pitch = self.quat2Pitch(pose.q0, pose.q1, pose.q2, pose.q3)
pos.roll = self.quat2Roll(pose.q0, pose.q1, pose.q2, pose.q3)
pos.x = pose.x
pos.y = pose.y
pos.z = pose.z
pos.h = pose.h
pos.q = [pose.q0, pose.q1, pose.q2, pose.q3]
self.lock.acquire()
self.pose = pos
self.lock.release()
def hasproxy (self):
'''
Returns if proxy has ben created or not.
@return if proxy has ben created or not (Boolean)
'''
return hasattr(self,"proxy") and self.proxy
def getPose3d(self):
'''
Returns last Pose3d.
@return last JdeRobotTypes Pose3d saved
'''
self.lock.acquire()
pose = self.pose
self.lock.release()
return pose
def quat2Yaw(self, qw, qx, qy, qz):
'''
Translates from Quaternion to Yaw.
@param qw,qx,qy,qz: Quaternion values
@type qw,qx,qy,qz: float
@return Yaw value translated from Quaternion
'''
rotateZa0=2.0*(qx*qy + qw*qz)
rotateZa1=qw*qw + qx*qx - qy*qy - qz*qz
rotateZ=0.0
if(rotateZa0 != 0.0 and rotateZa1 != 0.0):
rotateZ=atan2(rotateZa0,rotateZa1)
return rotateZ
def quat2Pitch(self, qw, qx, qy, qz):
'''
Translates from Quaternion to Pitch.
@param qw,qx,qy,qz: Quaternion values
@type qw,qx,qy,qz: float
@return Pitch value translated from Quaternion
'''
rotateYa0=-2.0*(qx*qz - qw*qy)
rotateY=0.0
if(rotateYa0 >= 1.0):
rotateY = pi/2.0
elif(rotateYa0 <= -1.0):
rotateY = -pi/2.0
else:
rotateY = asin(rotateYa0)
return rotateY
def quat2Roll (self, qw, qx, qy, qz):
'''
Translates from Quaternion to Roll.
@param qw,qx,qy,qz: Quaternion values
@type qw,qx,qy,qz: float
@return Roll value translated from Quaternion
'''
rotateXa0=2.0*(qy*qz + qw*qx)
rotateXa1=qw*qw - qx*qx - qy*qy + qz*qz
rotateX=0.0
if(rotateXa0 != 0.0 and rotateXa1 != 0.0):
rotateX=atan2(rotateXa0, rotateXa1)
return rotateX
class Pose3dIceClient:
'''
Pose3d Ice Client. Recives Pose3d from Ice interface running Pose3d update method in a thread.
'''
def __init__(self,ic,prefix, start = False):
'''
Pose3dIceClient Contructor.
@param ic: Ice Communicator
@param prefix: prefix name of client in config file
@param start: indicates if start automatically the client
@type ic: Ice Communicator
@type prefix: String
@type start: Boolean
'''
self.pose3d = Pose3D(ic,prefix)
self.kill_event = threading.Event()
self.thread = ThreadSensor(self.pose3d, self.kill_event)
self.thread.daemon = True
if start:
self.start()
def start(self):
'''
Starts the client. If client is stopped you can not start again, Threading.Thread raised error
'''
self.kill_event.clear()
self.thread.start()
def stop(self):
'''
Stops the client. If client is stopped you can not start again, Threading.Thread raised error
'''
self.kill_event.set()
def getPose3d(self):
'''
Returns last Pose3d.
@return last JdeRobotTypes Pose3d saved
'''
return self.pose3d.getPose3d()
def hasproxy (self):
'''
Returns if proxy has ben created or not.
@return if proxy has ben created or not (Boolean)
'''
return self.pose3d.hasproxy()
| gpl-3.0 |
ychfan/tensorflow | tensorflow/contrib/rnn/python/tools/checkpoint_convert.py | 37 | 10962 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""Convert checkpoints using RNNCells to new name convention.
Usage:
python checkpoint_convert.py [--write_v1_checkpoint] \
'/path/to/checkpoint' '/path/to/new_checkpoint'
For example, if there is a V2 checkpoint to be converted and the files include:
/tmp/my_checkpoint/model.ckpt.data-00000-of-00001
/tmp/my_checkpoint/model.ckpt.index
/tmp/my_checkpoint/model.ckpt.meta
use the following command:
mkdir /tmp/my_converted_checkpoint &&
python checkpoint_convert.py \
/tmp/my_checkpoint/model.ckpt /tmp/my_converted_checkpoint/model.ckpt
This will generate three converted checkpoint files corresponding to the three
old ones in the new directory:
/tmp/my_converted_checkpoint/model.ckpt.data-00000-of-00001
/tmp/my_converted_checkpoint/model.ckpt.index
/tmp/my_converted_checkpoint/model.ckpt.meta
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import collections
import re
import sys
from tensorflow.core.protobuf import saver_pb2
from tensorflow.python import pywrap_tensorflow
from tensorflow.python.client import session
from tensorflow.python.framework import ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import app
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import saver as saver_lib
# Mapping between old <=> new names. Externalized so that user scripts that
# may need to consume multiple checkpoint formats can use this metadata.
RNN_NAME_REPLACEMENTS = collections.OrderedDict([
############################################################################
# contrib/rnn/python/ops/core_rnn_cell_impl.py
# BasicRNNCell
('basic_rnn_cell/weights', 'basic_rnn_cell/kernel'),
('basic_rnn_cell/biases', 'basic_rnn_cell/bias'),
# GRUCell
('gru_cell/weights', 'gru_cell/kernel'),
('gru_cell/biases', 'gru_cell/bias'),
('gru_cell/gates/weights', 'gru_cell/gates/kernel'),
('gru_cell/gates/biases', 'gru_cell/gates/bias'),
('gru_cell/candidate/weights', 'gru_cell/candidate/kernel'),
('gru_cell/candidate/biases', 'gru_cell/candidate/bias'),
# BasicLSTMCell
('basic_lstm_cell/weights', 'basic_lstm_cell/kernel'),
('basic_lstm_cell/biases', 'basic_lstm_cell/bias'),
# LSTMCell
('lstm_cell/weights', 'lstm_cell/kernel'),
('lstm_cell/biases', 'lstm_cell/bias'),
('lstm_cell/projection/weights', 'lstm_cell/projection/kernel'),
('lstm_cell/projection/biases', 'lstm_cell/projection/bias'),
# OutputProjectionWrapper
('output_projection_wrapper/weights', 'output_projection_wrapper/kernel'),
('output_projection_wrapper/biases', 'output_projection_wrapper/bias'),
# InputProjectionWrapper
('input_projection_wrapper/weights', 'input_projection_wrapper/kernel'),
('input_projection_wrapper/biases', 'input_projection_wrapper/bias'),
############################################################################
# contrib/rnn/python/ops/lstm_ops.py
# LSTMBlockFusedCell ??
('lstm_block_wrapper/weights', 'lstm_block_wrapper/kernel'),
('lstm_block_wrapper/biases', 'lstm_block_wrapper/bias'),
############################################################################
# contrib/rnn/python/ops/rnn_cell.py
# LayerNormBasicLSTMCell
('layer_norm_basic_lstm_cell/weights', 'layer_norm_basic_lstm_cell/kernel'),
('layer_norm_basic_lstm_cell/biases', 'layer_norm_basic_lstm_cell/bias'),
# UGRNNCell, not found in g3, but still need it?
('ugrnn_cell/weights', 'ugrnn_cell/kernel'),
('ugrnn_cell/biases', 'ugrnn_cell/bias'),
# NASCell
('nas_rnn/weights', 'nas_rnn/kernel'),
('nas_rnn/recurrent_weights', 'nas_rnn/recurrent_kernel'),
# IntersectionRNNCell
('intersection_rnn_cell/weights', 'intersection_rnn_cell/kernel'),
('intersection_rnn_cell/biases', 'intersection_rnn_cell/bias'),
('intersection_rnn_cell/in_projection/weights',
'intersection_rnn_cell/in_projection/kernel'),
('intersection_rnn_cell/in_projection/biases',
'intersection_rnn_cell/in_projection/bias'),
# PhasedLSTMCell
('phased_lstm_cell/mask_gates/weights',
'phased_lstm_cell/mask_gates/kernel'),
('phased_lstm_cell/mask_gates/biases', 'phased_lstm_cell/mask_gates/bias'),
('phased_lstm_cell/new_input/weights', 'phased_lstm_cell/new_input/kernel'),
('phased_lstm_cell/new_input/biases', 'phased_lstm_cell/new_input/bias'),
('phased_lstm_cell/output_gate/weights',
'phased_lstm_cell/output_gate/kernel'),
('phased_lstm_cell/output_gate/biases',
'phased_lstm_cell/output_gate/bias'),
# AttentionCellWrapper
('attention_cell_wrapper/weights', 'attention_cell_wrapper/kernel'),
('attention_cell_wrapper/biases', 'attention_cell_wrapper/bias'),
('attention_cell_wrapper/attn_output_projection/weights',
'attention_cell_wrapper/attn_output_projection/kernel'),
('attention_cell_wrapper/attn_output_projection/biases',
'attention_cell_wrapper/attn_output_projection/bias'),
('attention_cell_wrapper/attention/weights',
'attention_cell_wrapper/attention/kernel'),
('attention_cell_wrapper/attention/biases',
'attention_cell_wrapper/attention/bias'),
############################################################################
# contrib/legacy_seq2seq/python/ops/seq2seq.py
('attention_decoder/weights',
'attention_decoder/kernel'),
('attention_decoder/biases',
'attention_decoder/bias'),
('attention_decoder/Attention_0/weights',
'attention_decoder/Attention_0/kernel'),
('attention_decoder/Attention_0/biases',
'attention_decoder/Attention_0/bias'),
('attention_decoder/AttnOutputProjection/weights',
'attention_decoder/AttnOutputProjection/kernel'),
('attention_decoder/AttnOutputProjection/biases',
'attention_decoder/AttnOutputProjection/bias'),
])
_RNN_SHARDED_NAME_REPLACEMENTS = collections.OrderedDict([
('LSTMCell/W_', 'lstm_cell/weights/part_'),
('BasicLSTMCell/Linear/Matrix_', 'basic_lstm_cell/weights/part_'),
('GRUCell/W_', 'gru_cell/weights/part_'),
('MultiRNNCell/Cell', 'multi_rnn_cell/cell_'),
])
def _rnn_name_replacement(var_name):
for pattern in RNN_NAME_REPLACEMENTS:
if pattern in var_name:
old_var_name = var_name
var_name = var_name.replace(pattern, RNN_NAME_REPLACEMENTS[pattern])
logging.info('Converted: %s --> %s' % (old_var_name, var_name))
break
return var_name
def _rnn_name_replacement_sharded(var_name):
for pattern in _RNN_SHARDED_NAME_REPLACEMENTS:
if pattern in var_name:
old_var_name = var_name
var_name = var_name.replace(pattern,
_RNN_SHARDED_NAME_REPLACEMENTS[pattern])
logging.info('Converted: %s --> %s' % (old_var_name, var_name))
return var_name
def _split_sharded_vars(name_shape_map):
"""Split shareded variables.
Args:
name_shape_map: A dict from variable name to variable shape.
Returns:
not_sharded: Names of the non-sharded variables.
sharded: Names of the sharded variables.
"""
sharded = []
not_sharded = []
for name in name_shape_map:
if re.match(name, '_[0-9]+$'):
if re.sub('_[0-9]+$', '_1', name) in name_shape_map:
sharded.append(name)
else:
not_sharded.append(name)
else:
not_sharded.append(name)
return not_sharded, sharded
def convert_names(checkpoint_from_path,
checkpoint_to_path,
write_v1_checkpoint=False):
"""Migrates the names of variables within a checkpoint.
Args:
checkpoint_from_path: Path to source checkpoint to be read in.
checkpoint_to_path: Path to checkpoint to be written out.
write_v1_checkpoint: Whether the output checkpoint will be in V1 format.
Returns:
A dictionary that maps the new variable names to the Variable objects.
A dictionary that maps the old variable names to the new variable names.
"""
with ops.Graph().as_default():
logging.info('Reading checkpoint_from_path %s' % checkpoint_from_path)
reader = pywrap_tensorflow.NewCheckpointReader(checkpoint_from_path)
name_shape_map = reader.get_variable_to_shape_map()
not_sharded, sharded = _split_sharded_vars(name_shape_map)
new_variable_map = {}
conversion_map = {}
for var_name in not_sharded:
new_var_name = _rnn_name_replacement(var_name)
tensor = reader.get_tensor(var_name)
var = variables.Variable(tensor, name=var_name)
new_variable_map[new_var_name] = var
if new_var_name != var_name:
conversion_map[var_name] = new_var_name
for var_name in sharded:
new_var_name = _rnn_name_replacement_sharded(var_name)
var = variables.Variable(tensor, name=var_name)
new_variable_map[new_var_name] = var
if new_var_name != var_name:
conversion_map[var_name] = new_var_name
write_version = (saver_pb2.SaverDef.V1
if write_v1_checkpoint else saver_pb2.SaverDef.V2)
saver = saver_lib.Saver(new_variable_map, write_version=write_version)
with session.Session() as sess:
sess.run(variables.global_variables_initializer())
logging.info('Writing checkpoint_to_path %s' % checkpoint_to_path)
saver.save(sess, checkpoint_to_path)
logging.info('Summary:')
logging.info(' Converted %d variable name(s).' % len(new_variable_map))
return new_variable_map, conversion_map
def main(_):
convert_names(
FLAGS.checkpoint_from_path,
FLAGS.checkpoint_to_path,
write_v1_checkpoint=FLAGS.write_v1_checkpoint)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.register('type', 'bool', lambda v: v.lower() == 'true')
parser.add_argument('checkpoint_from_path', type=str,
help='Path to source checkpoint to be read in.')
parser.add_argument('checkpoint_to_path', type=str,
help='Path to checkpoint to be written out.')
parser.add_argument('--write_v1_checkpoint', action='store_true',
help='Write v1 checkpoint')
FLAGS, unparsed = parser.parse_known_args()
app.run(main=main, argv=[sys.argv[0]] + unparsed)
| apache-2.0 |
asimshankar/tensorflow | tensorflow/contrib/tensorrt/test/batch_matmul_test.py | 3 | 4304 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Model script to test TF-TensorRT integration."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.tensorrt.test import tf_trt_integration_test_base as trt_test
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
class BatchMatMulTest(trt_test.TfTrtIntegrationTestBase):
def GetParams(self):
"""Testing conversion of BatchMatMul in TF-TRT conversion."""
dtype = dtypes.float32
input_name = "input"
input_dims = [12, 5, 8, 12]
output_name = "output"
w1_name = "matmul_w1"
w1_dims = [12, 5, 12, 7]
w2_name = "matmul_w2"
w2_dims = [12, 12, 7]
g = ops.Graph()
with g.as_default():
inp = array_ops.placeholder(
dtype=dtype, shape=[None] + input_dims[1:], name=input_name)
w1 = array_ops.placeholder(dtype=dtype, shape=w1_dims, name=w1_name)
w2 = array_ops.placeholder(dtype=dtype, shape=w2_dims, name=w2_name)
with g.device("/GPU:0"):
b = constant_op.constant(np.random.randn(12, 5, 12, 7), dtype=dtype)
x1 = math_ops.matmul(inp, b)
c = constant_op.constant(np.random.randn(5, 1, 1), dtype=dtype)
x1 = x1 + c
x2 = math_ops.matmul(inp, w1)
d = constant_op.constant(np.random.randn(5, 1, 1), dtype=dtype)
x2 = x2 * d
e = self.trt_incompatible_op(inp)
e = gen_array_ops.reshape(e, [12, 40, 12])
x3 = math_ops.matmul(e, w2)
f = constant_op.constant(np.random.randn(40, 1), dtype=dtype)
x3 = x3 + f
x3 = gen_array_ops.reshape(x3, [12, 5, 8, 7])
x3 = self.trt_incompatible_op(x3)
out = x1 + x2 + x3
array_ops.squeeze(out, name=output_name)
return trt_test.TfTrtIntegrationTestParams(
gdef=g.as_graph_def(),
input_names=[input_name, w1_name, w2_name],
input_dims=[input_dims, w1_dims, w2_dims],
output_names=[output_name],
expected_output_dims=[(12, 5, 8, 7)])
def ExpectedEnginesToBuild(self, run_params):
"""Return the expected engines to build."""
if (run_params.dynamic_engine and
not trt_test.IsQuantizationMode(run_params.precision_mode)):
return ["TRTEngineOp_0", "TRTEngineOp_1"]
return ["TRTEngineOp_1"]
def ExpectedEnginesToRun(self, run_params):
"""Return the expected engines to run."""
return ["TRTEngineOp_1"]
def ShouldRunTest(self, run_params):
"""Whether to run the test."""
# TODO(aaroey): Trt library will fail like:
#
# ../builder/cudnnBuilder2.cpp:685:
# virtual std::vector<nvinfer1::query::Ports<
# nvinfer1::query::TensorRequirements>>
# nvinfer1::builder::Node::getSupportedFormats(
# const nvinfer1::query::Ports<nvinfer1::query::AbstractTensor>&,
# const nvinfer1::cudnn::HardwareContext&,
# nvinfer1::builder::Format::Type,
# const nvinfer1::builder::FormatTypeHack&) const:
# Assertion `sf' failed.
#
# To reproduce, run:
# bazel test -c opt --copt=-mavx \
# --test_arg=BatchMatMulTest.testTfTrt_ToolConversion_INT8_DynamicEngine \
# tensorflow/contrib/tensorrt:batch_matmul_test
#
# Investigate and fix it.
return not trt_test.IsQuantizationMode(run_params.precision_mode)
if __name__ == "__main__":
test.main()
| apache-2.0 |
prescott66/devedeng | src/devedeng/settings.py | 4 | 4803 | # Copyright 2014 (C) Raster Software Vigo (Sergio Costas)
#
# This file is part of DeVeDe-NG
#
# DeVeDe-NG is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# DeVeDe-NG is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
from gi.repository import Gtk
import os
import devedeng.configuration_data
import devedeng.interface_manager
import gettext
import devedeng.converter
class settings_window(devedeng.interface_manager.interface_manager):
def __init__(self):
devedeng.interface_manager.interface_manager.__init__(self)
self.config = devedeng.configuration_data.configuration.get_config()
if (self.config.multicore > 0):
if self.config.cores < self.config.multicore:
cores = self.config.cores
else:
cores = self.config.multicore
else:
if self.config.cores <= -self.config.multicore:
cores = -self.config.cores+1
else:
cores = self.config.multicore
self.core_elements = {}
list_core_elements = []
default_value = _("Use all cores")
counter = 1
for c in range(self.config.cores-1,-self.config.cores, -1):
if c > 0:
translated_string = gettext.ngettext("Use %(X)d core","Use %(X)d cores",c) % {"X":c}
value = c
if c == cores:
default_value = translated_string
counter += 1
elif c < 0:
translated_string = gettext.ngettext("Use all except %(X)d core","Use all except %(X)d cores", -c) % {"X": -c}
value = c
if c == cores:
default_value = translated_string
counter += 1
else:
translated_string = _("Use all cores")
value = c
self.core_elements[translated_string] = value
list_core_elements.append(translated_string)
self.add_combobox("multicore",list_core_elements,default_value)
self.add_filebutton("tempo_path", self.config.tmp_folder)
c = devedeng.converter.converter.get_converter()
(analizers, players, menuers, converters, burners, mkiso) = c.get_available_programs()
self.add_combobox("analizer", analizers,self.config.film_analizer)
self.add_combobox("player", players,self.config.film_player)
self.add_combobox("converter", converters,self.config.film_converter,self.set_data_converter)
self.add_combobox("menuer", menuers,self.config.menu_converter)
self.add_combobox("mkiso", mkiso, self.config.mkiso)
self.add_combobox("burner", burners, self.config.burner)
self.builder = Gtk.Builder()
self.builder.set_translation_domain(self.config.gettext_domain)
self.builder.add_from_file(os.path.join(self.config.glade,"wsettings.ui"))
self.builder.connect_signals(self)
wsettings_window = self.builder.get_object("settings")
self.wconverter = self.builder.get_object("converter")
self.wtypes = self.builder.get_object("disc_types_supported")
wsettings_window.show_all()
self.update_ui(self.builder)
self.set_data_converter(None)
retval = wsettings_window.run()
self.store_ui(self.builder)
wsettings_window.destroy()
if retval == 1:
self.config.multicore = self.core_elements[self.multicore]
self.config.tmp_folder = self.tempo_path
self.config.film_analizer = self.analizer
self.config.film_player = self.player
self.config.film_converter = self.converter
self.config.menu_converter = self.menuer
self.config.burner = self.burner
self.config.mkiso = self.mkiso
self.config.save_config()
def set_data_converter(self,b):
self.store_ui(self.builder)
cv = devedeng.converter.converter.get_converter()
cv2 = cv.get_disc_converter_by_name(self.converter)
data = ""
for t in cv2.disc_types:
if data != "":
data += ", "
data += t
if data != "":
self.wtypes.set_text(data)
else:
self.wtypes.set_text(_("No discs supported")) | gpl-3.0 |
PopCap/GameIdea | Engine/Source/ThirdParty/HTML5/emsdk/Win64/python/2.7.5.3_64bit/Lib/site-packages/win32/Demos/win32wnet/testwnet.py | 17 | 3432 | import win32api
import win32wnet
import sys
from winnetwk import *
import os
possible_shares = []
def _doDumpHandle(handle, level = 0):
indent = " " * level
while 1:
items = win32wnet.WNetEnumResource(handle, 0)
if len(items)==0:
break
for item in items:
try:
if item.dwDisplayType == RESOURCEDISPLAYTYPE_SHARE:
print indent + "Have share with name:", item.lpRemoteName
possible_shares.append(item)
elif item.dwDisplayType == RESOURCEDISPLAYTYPE_GENERIC:
print indent + "Have generic resource with name:", item.lpRemoteName
else:
# Try generic!
print indent + "Enumerating " + item.lpRemoteName,
k = win32wnet.WNetOpenEnum(RESOURCE_GLOBALNET, RESOURCETYPE_ANY,0,item)
print
_doDumpHandle(k, level + 1)
win32wnet.WNetCloseEnum(k) # could do k.Close(), but this is a good test!
except win32wnet.error, details:
print indent + "Couldn't enumerate this resource: " + details.strerror
def TestOpenEnum():
print "Enumerating all resources on the network - this may take some time..."
handle = win32wnet.WNetOpenEnum(RESOURCE_GLOBALNET,RESOURCETYPE_ANY,0,None)
try:
_doDumpHandle(handle)
finally:
handle.Close()
print "Finished dumping all resources."
def findUnusedDriveLetter():
existing = [x[0].lower() for x in win32api.GetLogicalDriveStrings().split('\0') if x]
handle = win32wnet.WNetOpenEnum(RESOURCE_REMEMBERED,RESOURCETYPE_DISK,0,None)
try:
while 1:
items = win32wnet.WNetEnumResource(handle, 0)
if len(items)==0:
break
xtra = [i.lpLocalName[0].lower() for i in items if i.lpLocalName]
existing.extend(xtra)
finally:
handle.Close()
for maybe in 'defghijklmnopqrstuvwxyz':
if maybe not in existing:
return maybe
raise RuntimeError("All drive mappings are taken?")
def TestConnection():
if len(possible_shares)==0:
print "Couldn't find any potential shares to connect to"
return
localName = findUnusedDriveLetter() + ':'
for share in possible_shares:
print "Attempting connection of", localName, "to", share.lpRemoteName
try:
win32wnet.WNetAddConnection2(share.dwType, localName, share.lpRemoteName)
except win32wnet.error, details:
print "Couldn't connect: " + details.strerror
continue
# Have a connection.
try:
fname = os.path.join(localName + "\\", os.listdir(localName + "\\")[0])
try:
print "Universal name of '%s' is '%s'" % (fname, win32wnet.WNetGetUniversalName(fname))
except win32wnet.error, details:
print "Couldn't get universal name of '%s': %s" % (fname, details.strerror)
print "User name for this connection is", win32wnet.WNetGetUser(localName)
finally:
win32wnet.WNetCancelConnection2(localName, 0, 0)
# and do it again, but this time by using the more modern
# NETRESOURCE way.
nr = win32wnet.NETRESOURCE()
nr.dwType = share.dwType
nr.lpLocalName = localName
nr.lpRemoteName = share.lpRemoteName
win32wnet.WNetAddConnection2(nr)
win32wnet.WNetCancelConnection2(localName, 0, 0)
# and one more time using WNetAddConnection3
win32wnet.WNetAddConnection3(0, nr)
win32wnet.WNetCancelConnection2(localName, 0, 0)
# Only do the first share that succeeds.
break
def TestGetUser():
u = win32wnet.WNetGetUser()
print "Current global user is", repr(u)
if u != win32wnet.WNetGetUser(None):
raise RuntimeError("Default value didnt seem to work!")
TestGetUser()
TestOpenEnum()
TestConnection()
| bsd-2-clause |
Icenowy/shadowsocks | shadowsocks/encrypt.py | 990 | 5180 | #!/usr/bin/env python
#
# Copyright 2012-2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import os
import sys
import hashlib
import logging
from shadowsocks import common
from shadowsocks.crypto import rc4_md5, openssl, sodium, table
method_supported = {}
method_supported.update(rc4_md5.ciphers)
method_supported.update(openssl.ciphers)
method_supported.update(sodium.ciphers)
method_supported.update(table.ciphers)
def random_string(length):
return os.urandom(length)
cached_keys = {}
def try_cipher(key, method=None):
Encryptor(key, method)
def EVP_BytesToKey(password, key_len, iv_len):
# equivalent to OpenSSL's EVP_BytesToKey() with count 1
# so that we make the same key and iv as nodejs version
cached_key = '%s-%d-%d' % (password, key_len, iv_len)
r = cached_keys.get(cached_key, None)
if r:
return r
m = []
i = 0
while len(b''.join(m)) < (key_len + iv_len):
md5 = hashlib.md5()
data = password
if i > 0:
data = m[i - 1] + password
md5.update(data)
m.append(md5.digest())
i += 1
ms = b''.join(m)
key = ms[:key_len]
iv = ms[key_len:key_len + iv_len]
cached_keys[cached_key] = (key, iv)
return key, iv
class Encryptor(object):
def __init__(self, key, method):
self.key = key
self.method = method
self.iv = None
self.iv_sent = False
self.cipher_iv = b''
self.decipher = None
method = method.lower()
self._method_info = self.get_method_info(method)
if self._method_info:
self.cipher = self.get_cipher(key, method, 1,
random_string(self._method_info[1]))
else:
logging.error('method %s not supported' % method)
sys.exit(1)
def get_method_info(self, method):
method = method.lower()
m = method_supported.get(method)
return m
def iv_len(self):
return len(self.cipher_iv)
def get_cipher(self, password, method, op, iv):
password = common.to_bytes(password)
m = self._method_info
if m[0] > 0:
key, iv_ = EVP_BytesToKey(password, m[0], m[1])
else:
# key_length == 0 indicates we should use the key directly
key, iv = password, b''
iv = iv[:m[1]]
if op == 1:
# this iv is for cipher not decipher
self.cipher_iv = iv[:m[1]]
return m[2](method, key, iv, op)
def encrypt(self, buf):
if len(buf) == 0:
return buf
if self.iv_sent:
return self.cipher.update(buf)
else:
self.iv_sent = True
return self.cipher_iv + self.cipher.update(buf)
def decrypt(self, buf):
if len(buf) == 0:
return buf
if self.decipher is None:
decipher_iv_len = self._method_info[1]
decipher_iv = buf[:decipher_iv_len]
self.decipher = self.get_cipher(self.key, self.method, 0,
iv=decipher_iv)
buf = buf[decipher_iv_len:]
if len(buf) == 0:
return buf
return self.decipher.update(buf)
def encrypt_all(password, method, op, data):
result = []
method = method.lower()
(key_len, iv_len, m) = method_supported[method]
if key_len > 0:
key, _ = EVP_BytesToKey(password, key_len, iv_len)
else:
key = password
if op:
iv = random_string(iv_len)
result.append(iv)
else:
iv = data[:iv_len]
data = data[iv_len:]
cipher = m(method, key, iv, op)
result.append(cipher.update(data))
return b''.join(result)
CIPHERS_TO_TEST = [
'aes-128-cfb',
'aes-256-cfb',
'rc4-md5',
'salsa20',
'chacha20',
'table',
]
def test_encryptor():
from os import urandom
plain = urandom(10240)
for method in CIPHERS_TO_TEST:
logging.warn(method)
encryptor = Encryptor(b'key', method)
decryptor = Encryptor(b'key', method)
cipher = encryptor.encrypt(plain)
plain2 = decryptor.decrypt(cipher)
assert plain == plain2
def test_encrypt_all():
from os import urandom
plain = urandom(10240)
for method in CIPHERS_TO_TEST:
logging.warn(method)
cipher = encrypt_all(b'key', method, 1, plain)
plain2 = encrypt_all(b'key', method, 0, cipher)
assert plain == plain2
if __name__ == '__main__':
test_encrypt_all()
test_encryptor()
| apache-2.0 |
liamgh/liamgreenhughes-sl4a-tf101 | python/gdata/src/gdata/tlslite/X509.py | 279 | 4287 | """Class representing an X.509 certificate."""
from utils.ASN1Parser import ASN1Parser
from utils.cryptomath import *
from utils.keyfactory import _createPublicRSAKey
class X509:
"""This class represents an X.509 certificate.
@type bytes: L{array.array} of unsigned bytes
@ivar bytes: The DER-encoded ASN.1 certificate
@type publicKey: L{tlslite.utils.RSAKey.RSAKey}
@ivar publicKey: The subject public key from the certificate.
"""
def __init__(self):
self.bytes = createByteArraySequence([])
self.publicKey = None
def parse(self, s):
"""Parse a PEM-encoded X.509 certificate.
@type s: str
@param s: A PEM-encoded X.509 certificate (i.e. a base64-encoded
certificate wrapped with "-----BEGIN CERTIFICATE-----" and
"-----END CERTIFICATE-----" tags).
"""
start = s.find("-----BEGIN CERTIFICATE-----")
end = s.find("-----END CERTIFICATE-----")
if start == -1:
raise SyntaxError("Missing PEM prefix")
if end == -1:
raise SyntaxError("Missing PEM postfix")
s = s[start+len("-----BEGIN CERTIFICATE-----") : end]
bytes = base64ToBytes(s)
self.parseBinary(bytes)
return self
def parseBinary(self, bytes):
"""Parse a DER-encoded X.509 certificate.
@type bytes: str or L{array.array} of unsigned bytes
@param bytes: A DER-encoded X.509 certificate.
"""
if isinstance(bytes, type("")):
bytes = stringToBytes(bytes)
self.bytes = bytes
p = ASN1Parser(bytes)
#Get the tbsCertificate
tbsCertificateP = p.getChild(0)
#Is the optional version field present?
#This determines which index the key is at.
if tbsCertificateP.value[0]==0xA0:
subjectPublicKeyInfoIndex = 6
else:
subjectPublicKeyInfoIndex = 5
#Get the subjectPublicKeyInfo
subjectPublicKeyInfoP = tbsCertificateP.getChild(\
subjectPublicKeyInfoIndex)
#Get the algorithm
algorithmP = subjectPublicKeyInfoP.getChild(0)
rsaOID = algorithmP.value
if list(rsaOID) != [6, 9, 42, 134, 72, 134, 247, 13, 1, 1, 1, 5, 0]:
raise SyntaxError("Unrecognized AlgorithmIdentifier")
#Get the subjectPublicKey
subjectPublicKeyP = subjectPublicKeyInfoP.getChild(1)
#Adjust for BIT STRING encapsulation
if (subjectPublicKeyP.value[0] !=0):
raise SyntaxError()
subjectPublicKeyP = ASN1Parser(subjectPublicKeyP.value[1:])
#Get the modulus and exponent
modulusP = subjectPublicKeyP.getChild(0)
publicExponentP = subjectPublicKeyP.getChild(1)
#Decode them into numbers
n = bytesToNumber(modulusP.value)
e = bytesToNumber(publicExponentP.value)
#Create a public key instance
self.publicKey = _createPublicRSAKey(n, e)
def getFingerprint(self):
"""Get the hex-encoded fingerprint of this certificate.
@rtype: str
@return: A hex-encoded fingerprint.
"""
return sha.sha(self.bytes).hexdigest()
def getCommonName(self):
"""Get the Subject's Common Name from the certificate.
The cryptlib_py module must be installed in order to use this
function.
@rtype: str or None
@return: The CN component of the certificate's subject DN, if
present.
"""
import cryptlib_py
import array
c = cryptlib_py.cryptImportCert(self.bytes, cryptlib_py.CRYPT_UNUSED)
name = cryptlib_py.CRYPT_CERTINFO_COMMONNAME
try:
try:
length = cryptlib_py.cryptGetAttributeString(c, name, None)
returnVal = array.array('B', [0] * length)
cryptlib_py.cryptGetAttributeString(c, name, returnVal)
returnVal = returnVal.tostring()
except cryptlib_py.CryptException, e:
if e[0] == cryptlib_py.CRYPT_ERROR_NOTFOUND:
returnVal = None
return returnVal
finally:
cryptlib_py.cryptDestroyCert(c)
def writeBytes(self):
return self.bytes
| apache-2.0 |
Nicop06/ansible | lib/ansible/module_utils/cnos_devicerules.py | 87 | 91037 | # This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by
# Ansible still belong to the author of the module, and may assign their
# own license to the complete work.
#
# Copyright (C) 2017 Lenovo, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Contains device rule and methods
# Lenovo Networking
def getRuleString(deviceType, variableId):
retVal = variableId + ":"
if(deviceType == 'g8272_cnos'):
if variableId in g8272_cnos:
retVal = retVal + g8272_cnos[variableId]
else:
retVal = "The variable " + variableId + " is not supported"
elif(deviceType == 'g8296_cnos'):
if variableId in g8296_cnos:
retVal = retVal + g8296_cnos[variableId]
else:
retVal = "The variable " + variableId + " is not supported"
elif(deviceType == 'g8332_cnos'):
if variableId in g8332_cnos:
retVal = retVal + g8332_cnos[variableId]
else:
retVal = "The variable " + variableId + " is not supported"
elif(deviceType == 'NE1072T'):
if variableId in NE1072T:
retVal = retVal + NE1072T[variableId]
else:
retVal = "The variable " + variableId + " is not supported"
elif(deviceType == 'NE1032'):
if variableId in NE1032:
retVal = retVal + NE1032[variableId]
else:
retVal = "The variable " + variableId + " is not supported"
elif(deviceType == 'NE1032T'):
if variableId in NE1032T:
retVal = retVal + NE1032T[variableId]
else:
retVal = "The variable " + variableId + " is not supported"
elif(deviceType == 'NE10032'):
if variableId in NE10032:
retVal = retVal + NE10032[variableId]
else:
retVal = "The variable " + variableId + " is not supported"
elif(deviceType == 'NE2572'):
if variableId in NE2572:
retVal = retVal + NE2572[variableId]
else:
retVal = "The variable " + variableId + " is not supported"
else:
if variableId in default_cnos:
retVal = retVal + default_cnos[variableId]
else:
retVal = "The variable " + variableId + " is not supported"
return retVal
# EOM
default_cnos = {
'vlan_id': 'INTEGER_VALUE:1-3999',
'vlan_id_range': 'INTEGER_VALUE_RANGE:1-3999',
'vlan_name': 'TEXT:',
'vlan_flood': 'TEXT_OPTIONS:ipv4,ipv6',
'vlan_state': 'TEXT_OPTIONS:active,suspend',
'vlan_last_member_query_interval': 'INTEGER_VALUE:1-25',
'vlan_querier': 'IPV4Address:',
'vlan_querier_timeout': 'INTEGER_VALUE:1-65535',
'vlan_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_query_max_response_time': 'INTEGER_VALUE:1-25',
'vlan_report_suppression': 'INTEGER_VALUE:1-25',
'vlan_robustness_variable': 'INTEGER_VALUE:1-7',
'vlan_startup_query_count': 'INTEGER_VALUE:1-10',
'vlan_startup_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_snooping_version': 'INTEGER_VALUE:2-3',
'vlan_access_map_name': 'TEXT: ',
'vlan_ethernet_interface': 'TEXT:',
'vlan_portagg_number': 'INTEGER_VALUE:1-4096',
'vlan_accessmap_action': 'TEXT_OPTIONS:drop,forward,redirect',
'vlan_dot1q_tag': 'MATCH_TEXT_OR_EMPTY:egress-only',
'vlan_filter_name': 'TEXT:',
'vlag_auto_recovery': 'INTEGER_VALUE:240-3600',
'vlag_config_consistency': 'TEXT_OPTIONS:disable,strict',
'vlag_instance': 'INTEGER_VALUE:1-64',
'vlag_port_aggregation': 'INTEGER_VALUE:1-4096',
'vlag_priority': 'INTEGER_VALUE:0-65535',
'vlag_startup_delay': 'INTEGER_VALUE:0-3600',
'vlag_tier_id': 'INTEGER_VALUE:1-512',
'vlag_hlthchk_options': 'TEXT_OPTIONS:keepalive-attempts,\
keepalive-interval,peer-ip,retry-interval',
'vlag_keepalive_attempts': 'INTEGER_VALUE:1-24',
'vlag_keepalive_interval': 'INTEGER_VALUE:2-300',
'vlag_retry_interval': 'INTEGER_VALUE:1-300',
'vlag_peerip': 'IPV4Address:',
'vlag_peerip_vrf': 'TEXT_OPTIONS:default,management',
'bgp_as_number': 'NO_VALIDATION:1-4294967295',
'bgp_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_bgp_local_count': 'INTEGER_VALUE:2-64',
'cluster_id_as_ip': 'IPV4Address:',
'cluster_id_as_number': 'NO_VALIDATION:1-4294967295',
'confederation_identifier': 'INTEGER_VALUE:1-65535',
'condeferation_peers_as': 'INTEGER_VALUE:1-65535',
'stalepath_delay_value': 'INTEGER_VALUE:1-3600',
'maxas_limit_as': 'INTEGER_VALUE:1-2000',
'neighbor_ipaddress': 'IPV4Address:',
'neighbor_as': 'NO_VALIDATION:1-4294967295',
'router_id': 'IPV4Address:',
'bgp_keepalive_interval': 'INTEGER_VALUE:0-3600',
'bgp_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_aggregate_prefix': 'IPV4AddressWithMask:',
'addrfamily_routemap_name': 'TEXT:',
'reachability_half_life': 'INTEGER_VALUE:1-45',
'start_reuse_route_value': 'INTEGER_VALUE:1-20000',
'start_suppress_route_value': 'INTEGER_VALUE:1-20000',
'max_duration_to_suppress_route': 'INTEGER_VALUE:1-255',
'unreachability_halftime_for_penalty': 'INTEGER_VALUE:1-45',
'distance_external_AS': 'INTEGER_VALUE:1-255',
'distance_internal_AS': 'INTEGER_VALUE:1-255',
'distance_local_routes': 'INTEGER_VALUE:1-255',
'maxpath_option': 'TEXT_OPTIONS:ebgp,ibgp',
'maxpath_numbers': 'INTEGER_VALUE:2-32',
'network_ip_prefix_with_mask': 'IPV4AddressWithMask:',
'network_ip_prefix_value': 'IPV4Address:',
'network_ip_prefix_mask': 'IPV4Address:',
'nexthop_crtitical_delay': 'NO_VALIDATION:1-4294967295',
'nexthop_noncrtitical_delay': 'NO_VALIDATION:1-4294967295',
'addrfamily_redistribute_option': 'TEXT_OPTIONS:direct,ospf,\
static',
'bgp_neighbor_af_occurances': 'INTEGER_VALUE:1-10',
'bgp_neighbor_af_filtername': 'TEXT:',
'bgp_neighbor_af_maxprefix': 'INTEGER_VALUE:1-15870',
'bgp_neighbor_af_prefixname': 'TEXT:',
'bgp_neighbor_af_routemap': 'TEXT:',
'bgp_neighbor_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_neighbor_connection_retrytime': 'INTEGER_VALUE:1-65535',
'bgp_neighbor_description': 'TEXT:',
'bgp_neighbor_maxhopcount': 'INTEGER_VALUE:1-255',
'bgp_neighbor_local_as': 'NO_VALIDATION:1-4294967295',
'bgp_neighbor_maxpeers': 'INTEGER_VALUE:1-96',
'bgp_neighbor_password': 'TEXT:',
'bgp_neighbor_timers_Keepalive': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_timers_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_ttl_hops': 'INTEGER_VALUE:1-254',
'bgp_neighbor_update_options': 'TEXT_OPTIONS:ethernet,loopback,\
vlan',
'bgp_neighbor_update_ethernet': 'TEXT:',
'bgp_neighbor_update_loopback': 'INTEGER_VALUE:0-7',
'bgp_neighbor_update_vlan': 'INTEGER_VALUE:1-4094',
'bgp_neighbor_weight': 'INTEGER_VALUE:0-65535',
'ethernet_interface_value': 'INTEGER_VALUE:1-32',
'ethernet_interface_range': 'INTEGER_VALUE_RANGE:1-32',
'ethernet_interface_string': 'TEXT:',
'loopback_interface_value': 'INTEGER_VALUE:0-7',
'mgmt_interface_value': 'INTEGER_VALUE:0-0',
'vlan_interface_value': 'INTEGER_VALUE:1-4094',
'portchannel_interface_value': 'INTEGER_VALUE:1-4096',
'portchannel_interface_range': 'INTEGER_VALUE_RANGE:1-4096',
'portchannel_interface_string': 'TEXT:',
'aggregation_group_no': 'INTEGER_VALUE:1-4096',
'aggregation_group_mode': 'TEXT_OPTIONS:active,on,passive',
'bfd_options': 'TEXT_OPTIONS:authentication,echo,interval,ipv4,\
ipv6,neighbor',
'bfd_interval': 'INTEGER_VALUE:50-999',
'bfd_minrx': 'INTEGER_VALUE:50-999',
'bfd_ multiplier': 'INTEGER_VALUE:3-50',
'bfd_ipv4_options': 'TEXT_OPTIONS:authentication,echo,\
interval',
'bfd_auth_options': 'TEXT_OPTIONS:keyed-md5,keyed-sha1,\
meticulous-keyed-md5,meticulous-keyed-sha1,simple',
'bfd_key_options': 'TEXT_OPTIONS:key-chain,key-id',
'bfd_key_chain': 'TEXT:',
'bfd_key_id': 'INTEGER_VALUE:0-255',
'bfd_key_name': 'TEXT:',
'bfd_neighbor_ip': 'TEXT:',
'bfd_neighbor_options': 'TEXT_OPTIONS:admin-down,multihop,\
non-persistent',
'bfd_access_vlan': 'INTEGER_VALUE:1-3999',
'bfd_bridgeport_mode': 'TEXT_OPTIONS:access,dot1q-tunnel,\
trunk',
'trunk_options': 'TEXT_OPTIONS:allowed,native',
'trunk_vlanid': 'INTEGER_VALUE:1-3999',
'portCh_description': 'TEXT:',
'duplex_option': 'TEXT_OPTIONS:auto,full,half',
'flowcontrol_options': 'TEXT_OPTIONS:receive,send',
'portchannel_ip_options': 'TEXT_OPTIONS:access-group,address,\
arp,dhcp,ospf,port,port-unreachable,redirects,router,\
unreachables',
'accessgroup_name': 'TEXT:',
'portchannel_ipv4': 'IPV4Address:',
'portchannel_ipv4_mask': 'TEXT:',
'arp_ipaddress': 'IPV4Address:',
'arp_macaddress': 'TEXT:',
'arp_timeout_value': 'INTEGER_VALUE:60-28800',
'relay_ipaddress': 'IPV4Address:',
'ip_ospf_options': 'TEXT_OPTIONS:authentication,\
authentication-key,bfd,cost,database-filter,dead-interval,\
hello-interval,message-digest-key,mtu,mtu-ignore,network,\
passive-interface,priority,retransmit-interval,shutdown,\
transmit-delay',
'ospf_id_decimal_value': 'NO_VALIDATION:1-4294967295',
'ospf_id_ipaddres_value': 'IPV4Address:',
'lacp_options': 'TEXT_OPTIONS:port-priority,suspend-individual,\
timeout',
'port_priority': 'INTEGER_VALUE:1-65535',
'lldp_options': 'TEXT_OPTIONS:receive,tlv-select,transmit,\
trap-notification',
'lldp_tlv_options': 'TEXT_OPTIONS:link-aggregation,\
mac-phy-status,management-address,max-frame-size,\
port-description,port-protocol-vlan,port-vlan,power-mdi,\
protocol-identity,system-capabilities,system-description,\
system-name,vid-management,vlan-name',
'load_interval_delay': 'INTEGER_VALUE:30-300',
'load_interval_counter': 'INTEGER_VALUE:1-3',
'mac_accessgroup_name': 'TEXT:',
'mac_address': 'TEXT:',
'microburst_threshold': 'NO_VALIDATION:1-4294967295',
'mtu_value': 'INTEGER_VALUE:64-9216',
'service_instance': 'NO_VALIDATION:1-4294967295',
'service_policy_options': 'TEXT_OPTIONS:copp-system-policy,\
input,output,type',
'service_policy_name': 'TEXT:',
'spanning_tree_options': 'TEXT_OPTIONS:bpdufilter,bpduguard,\
cost,disable,enable,guard,link-type,mst,port,port-priority,\
vlan',
'spanning_tree_cost': 'NO_VALIDATION:1-200000000',
'spanning_tree_interfacerange': 'INTEGER_VALUE_RANGE:1-3999',
'spanning_tree_portpriority': 'TEXT_OPTIONS:0,32,64,96,128,160,\
192,224',
'portchannel_ipv6_neighbor_mac': 'TEXT:',
'portchannel_ipv6_neighbor_address': 'IPV6Address:',
'portchannel_ipv6_linklocal': 'IPV6Address:',
'portchannel_ipv6_dhcp_vlan': 'INTEGER_VALUE:1-4094',
'portchannel_ipv6_dhcp_ethernet': 'TEXT:',
'portchannel_ipv6_dhcp': 'IPV6Address:',
'portchannel_ipv6_address': 'IPV6Address:',
'portchannel_ipv6_options': 'TEXT_OPTIONS:address,dhcp,\
link-local,nd,neighbor',
'interface_speed': 'TEXT_OPTIONS:1000,10000,100000,25000,40000,50000,auto',
'stormcontrol_options': 'TEXT_OPTIONS:broadcast,multicast,\
unicast',
'stormcontrol_level': 'FLOAT:',
'portchannel_dot1q_tag': 'TEXT_OPTIONS:disable,enable,\
egress-only',
'vrrp_id': 'INTEGER_VALUE:1-255',
}
NE2572 = {
'vlan_id': 'INTEGER_VALUE:1-3999',
'vlan_id_range': 'INTEGER_VALUE_RANGE:1-3999',
'vlan_name': 'TEXT:',
'vlan_flood': 'TEXT_OPTIONS:ipv4,ipv6',
'vlan_state': 'TEXT_OPTIONS:active,suspend',
'vlan_last_member_query_interval': 'INTEGER_VALUE:1-25',
'vlan_querier': 'IPV4Address:',
'vlan_querier_timeout': 'INTEGER_VALUE:1-65535',
'vlan_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_query_max_response_time': 'INTEGER_VALUE:1-25',
'vlan_report_suppression': 'INTEGER_VALUE:1-25',
'vlan_robustness_variable': 'INTEGER_VALUE:1-7',
'vlan_startup_query_count': 'INTEGER_VALUE:1-10',
'vlan_startup_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_snooping_version': 'INTEGER_VALUE:2-3',
'vlan_access_map_name': 'TEXT: ',
'vlan_ethernet_interface': 'TEXT:',
'vlan_portagg_number': 'INTEGER_VALUE:1-4096',
'vlan_accessmap_action': 'TEXT_OPTIONS:drop,forward,redirect',
'vlan_dot1q_tag': 'MATCH_TEXT_OR_EMPTY:egress-only',
'vlan_filter_name': 'TEXT:',
'vlag_auto_recovery': 'INTEGER_VALUE:240-3600',
'vlag_config_consistency': 'TEXT_OPTIONS:disable,strict',
'vlag_instance': 'INTEGER_VALUE:1-64',
'vlag_port_aggregation': 'INTEGER_VALUE:1-4096',
'vlag_priority': 'INTEGER_VALUE:0-65535',
'vlag_startup_delay': 'INTEGER_VALUE:0-3600',
'vlag_tier_id': 'INTEGER_VALUE:1-512',
'vlag_hlthchk_options': 'TEXT_OPTIONS:keepalive-attempts,\
keepalive-interval,peer-ip,retry-interval',
'vlag_keepalive_attempts': 'INTEGER_VALUE:1-24',
'vlag_keepalive_interval': 'INTEGER_VALUE:2-300',
'vlag_retry_interval': 'INTEGER_VALUE:1-300',
'vlag_peerip': 'IPV4Address:',
'vlag_peerip_vrf': 'TEXT_OPTIONS:default,management',
'bgp_as_number': 'NO_VALIDATION:1-4294967295',
'bgp_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_bgp_local_count': 'INTEGER_VALUE:2-64',
'cluster_id_as_ip': 'IPV4Address:',
'cluster_id_as_number': 'NO_VALIDATION:1-4294967295',
'confederation_identifier': 'INTEGER_VALUE:1-65535',
'condeferation_peers_as': 'INTEGER_VALUE:1-65535',
'stalepath_delay_value': 'INTEGER_VALUE:1-3600',
'maxas_limit_as': 'INTEGER_VALUE:1-2000',
'neighbor_ipaddress': 'IPV4Address:',
'neighbor_as': 'NO_VALIDATION:1-4294967295',
'router_id': 'IPV4Address:',
'bgp_keepalive_interval': 'INTEGER_VALUE:0-3600',
'bgp_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_aggregate_prefix': 'IPV4AddressWithMask:',
'addrfamily_routemap_name': 'TEXT:',
'reachability_half_life': 'INTEGER_VALUE:1-45',
'start_reuse_route_value': 'INTEGER_VALUE:1-20000',
'start_suppress_route_value': 'INTEGER_VALUE:1-20000',
'max_duration_to_suppress_route': 'INTEGER_VALUE:1-255',
'unreachability_halftime_for_penalty': 'INTEGER_VALUE:1-45',
'distance_external_AS': 'INTEGER_VALUE:1-255',
'distance_internal_AS': 'INTEGER_VALUE:1-255',
'distance_local_routes': 'INTEGER_VALUE:1-255',
'maxpath_option': 'TEXT_OPTIONS:ebgp,ibgp',
'maxpath_numbers': 'INTEGER_VALUE:2-32',
'network_ip_prefix_with_mask': 'IPV4AddressWithMask:',
'network_ip_prefix_value': 'IPV4Address:',
'network_ip_prefix_mask': 'IPV4Address:',
'nexthop_crtitical_delay': 'NO_VALIDATION:1-4294967295',
'nexthop_noncrtitical_delay': 'NO_VALIDATION:1-4294967295',
'addrfamily_redistribute_option': 'TEXT_OPTIONS:direct,ospf,\
static',
'bgp_neighbor_af_occurances': 'INTEGER_VALUE:1-10',
'bgp_neighbor_af_filtername': 'TEXT:',
'bgp_neighbor_af_maxprefix': 'INTEGER_VALUE:1-15870',
'bgp_neighbor_af_prefixname': 'TEXT:',
'bgp_neighbor_af_routemap': 'TEXT:',
'bgp_neighbor_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_neighbor_connection_retrytime': 'INTEGER_VALUE:1-65535',
'bgp_neighbor_description': 'TEXT:',
'bgp_neighbor_maxhopcount': 'INTEGER_VALUE:1-255',
'bgp_neighbor_local_as': 'NO_VALIDATION:1-4294967295',
'bgp_neighbor_maxpeers': 'INTEGER_VALUE:1-96',
'bgp_neighbor_password': 'TEXT:',
'bgp_neighbor_timers_Keepalive': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_timers_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_ttl_hops': 'INTEGER_VALUE:1-254',
'bgp_neighbor_update_options': 'TEXT_OPTIONS:ethernet,loopback,\
vlan',
'bgp_neighbor_update_ethernet': 'TEXT:',
'bgp_neighbor_update_loopback': 'INTEGER_VALUE:0-7',
'bgp_neighbor_update_vlan': 'INTEGER_VALUE:1-4094',
'bgp_neighbor_weight': 'INTEGER_VALUE:0-65535',
'ethernet_interface_value': 'INTEGER_VALUE:1-54',
'ethernet_interface_range': 'INTEGER_VALUE_RANGE:1-54',
'ethernet_interface_string': 'TEXT:',
'loopback_interface_value': 'INTEGER_VALUE:0-7',
'mgmt_interface_value': 'INTEGER_VALUE:0-0',
'vlan_interface_value': 'INTEGER_VALUE:1-4094',
'portchannel_interface_value': 'INTEGER_VALUE:1-4096',
'portchannel_interface_range': 'INTEGER_VALUE_RANGE:1-4096',
'portchannel_interface_string': 'TEXT:',
'aggregation_group_no': 'INTEGER_VALUE:1-4096',
'aggregation_group_mode': 'TEXT_OPTIONS:active,on,passive',
'bfd_options': 'TEXT_OPTIONS:authentication,echo,interval,ipv4,\
ipv6,neighbor',
'bfd_interval': 'INTEGER_VALUE:50-999',
'bfd_minrx': 'INTEGER_VALUE:50-999',
'bfd_ multiplier': 'INTEGER_VALUE:3-50',
'bfd_ipv4_options': 'TEXT_OPTIONS:authentication,echo,interval',
'bfd_auth_options': 'TEXT_OPTIONS:keyed-md5,keyed-sha1,\
meticulous-keyed-md5,meticulous-keyed-sha1,simple',
'bfd_key_options': 'TEXT_OPTIONS:key-chain,key-id',
'bfd_key_chain': 'TEXT:',
'bfd_key_id': 'INTEGER_VALUE:0-255',
'bfd_key_name': 'TEXT:',
'bfd_neighbor_ip': 'TEXT:',
'bfd_neighbor_options': 'TEXT_OPTIONS:admin-down,multihop,\
non-persistent',
'bfd_access_vlan': 'INTEGER_VALUE:1-3999',
'bfd_bridgeport_mode': 'TEXT_OPTIONS:access,dot1q-tunnel,trunk',
'trunk_options': 'TEXT_OPTIONS:allowed,native',
'trunk_vlanid': 'INTEGER_VALUE:1-3999',
'portCh_description': 'TEXT:',
'duplex_option': 'TEXT_OPTIONS:auto,full,half',
'flowcontrol_options': 'TEXT_OPTIONS:receive,send',
'portchannel_ip_options': 'TEXT_OPTIONS:access-group,address,\
arp,dhcp,ospf,port,port-unreachable,redirects,router,\
unreachables',
'accessgroup_name': 'TEXT:',
'portchannel_ipv4': 'IPV4Address:',
'portchannel_ipv4_mask': 'TEXT:',
'arp_ipaddress': 'IPV4Address:',
'arp_macaddress': 'TEXT:',
'arp_timeout_value': 'INTEGER_VALUE:60-28800',
'relay_ipaddress': 'IPV4Address:',
'ip_ospf_options': 'TEXT_OPTIONS:authentication,\
authentication-key,bfd,cost,database-filter,dead-interval,\
hello-interval,message-digest-key,mtu,mtu-ignore,network,\
passive-interface,priority,retransmit-interval,shutdown,\
transmit-delay',
'ospf_id_decimal_value': 'NO_VALIDATION:1-4294967295',
'ospf_id_ipaddres_value': 'IPV4Address:',
'lacp_options': 'TEXT_OPTIONS:port-priority,suspend-individual,\
timeout',
'port_priority': 'INTEGER_VALUE:1-65535',
'lldp_options': 'TEXT_OPTIONS:receive,tlv-select,transmit,\
trap-notification',
'lldp_tlv_options': 'TEXT_OPTIONS:link-aggregation,\
mac-phy-status,management-address,max-frame-size,\
port-description,port-protocol-vlan,port-vlan,power-mdi,\
protocol-identity,system-capabilities,system-description,\
system-name,vid-management,vlan-name',
'load_interval_delay': 'INTEGER_VALUE:30-300',
'load_interval_counter': 'INTEGER_VALUE:1-3',
'mac_accessgroup_name': 'TEXT:',
'mac_address': 'TEXT:',
'microburst_threshold': 'NO_VALIDATION:1-4294967295',
'mtu_value': 'INTEGER_VALUE:64-9216',
'service_instance': 'NO_VALIDATION:1-4294967295',
'service_policy_options': 'TEXT_OPTIONS:copp-system-policy,input,\
output,type',
'service_policy_name': 'TEXT:',
'spanning_tree_options': 'TEXT_OPTIONS:bpdufilter,bpduguard,\
cost,disable,enable,guard,link-type,mst,port,port-priority,vlan',
'spanning_tree_cost': 'NO_VALIDATION:1-200000000',
'spanning_tree_interfacerange': 'INTEGER_VALUE_RANGE:1-3999',
'spanning_tree_portpriority': 'TEXT_OPTIONS:0,32,64,96,128,160,\
192,224',
'portchannel_ipv6_neighbor_mac': 'TEXT:',
'portchannel_ipv6_neighbor_address': 'IPV6Address:',
'portchannel_ipv6_linklocal': 'IPV6Address:',
'portchannel_ipv6_dhcp_vlan': 'INTEGER_VALUE:1-4094',
'portchannel_ipv6_dhcp_ethernet': 'TEXT:',
'portchannel_ipv6_dhcp': 'IPV6Address:',
'portchannel_ipv6_address': 'IPV6Address:',
'portchannel_ipv6_options': 'TEXT_OPTIONS:address,dhcp,\
link-local,nd,neighbor',
'interface_speed': 'TEXT_OPTIONS:10000,100000,25000,40000,50000,auto',
'stormcontrol_options': 'TEXT_OPTIONS:broadcast,multicast,\
unicast',
'stormcontrol_level': 'FLOAT:',
'portchannel_dot1q_tag': 'TEXT_OPTIONS:disable,enable,\
egress-only',
'vrrp_id': 'INTEGER_VALUE:1-255',
}
NE1032T = {
'vlan_id': 'INTEGER_VALUE:1-3999',
'vlan_id_range': 'INTEGER_VALUE_RANGE:1-3999',
'vlan_name': 'TEXT:',
'vlan_flood': 'TEXT_OPTIONS:ipv4,ipv6',
'vlan_state': 'TEXT_OPTIONS:active,suspend',
'vlan_last_member_query_interval': 'INTEGER_VALUE:1-25',
'vlan_querier': 'IPV4Address:',
'vlan_querier_timeout': 'INTEGER_VALUE:1-65535',
'vlan_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_query_max_response_time': 'INTEGER_VALUE:1-25',
'vlan_report_suppression': 'INTEGER_VALUE:1-25',
'vlan_robustness_variable': 'INTEGER_VALUE:1-7',
'vlan_startup_query_count': 'INTEGER_VALUE:1-10',
'vlan_startup_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_snooping_version': 'INTEGER_VALUE:2-3',
'vlan_access_map_name': 'TEXT: ',
'vlan_ethernet_interface': 'TEXT:',
'vlan_portagg_number': 'INTEGER_VALUE:1-4096',
'vlan_accessmap_action': 'TEXT_OPTIONS:drop,forward,redirect',
'vlan_dot1q_tag': 'MATCH_TEXT_OR_EMPTY:egress-only',
'vlan_filter_name': 'TEXT:',
'vlag_auto_recovery': 'INTEGER_VALUE:240-3600',
'vlag_config_consistency': 'TEXT_OPTIONS:disable,strict',
'vlag_instance': 'INTEGER_VALUE:1-64',
'vlag_port_aggregation': 'INTEGER_VALUE:1-4096',
'vlag_priority': 'INTEGER_VALUE:0-65535',
'vlag_startup_delay': 'INTEGER_VALUE:0-3600',
'vlag_tier_id': 'INTEGER_VALUE:1-512',
'vlag_hlthchk_options': 'TEXT_OPTIONS:keepalive-attempts,\
keepalive-interval,peer-ip,retry-interval',
'vlag_keepalive_attempts': 'INTEGER_VALUE:1-24',
'vlag_keepalive_interval': 'INTEGER_VALUE:2-300',
'vlag_retry_interval': 'INTEGER_VALUE:1-300',
'vlag_peerip': 'IPV4Address:',
'vlag_peerip_vrf': 'TEXT_OPTIONS:default,management',
'bgp_as_number': 'NO_VALIDATION:1-4294967295',
'bgp_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_bgp_local_count': 'INTEGER_VALUE:2-64',
'cluster_id_as_ip': 'IPV4Address:',
'cluster_id_as_number': 'NO_VALIDATION:1-4294967295',
'confederation_identifier': 'INTEGER_VALUE:1-65535',
'condeferation_peers_as': 'INTEGER_VALUE:1-65535',
'stalepath_delay_value': 'INTEGER_VALUE:1-3600',
'maxas_limit_as': 'INTEGER_VALUE:1-2000',
'neighbor_ipaddress': 'IPV4Address:',
'neighbor_as': 'NO_VALIDATION:1-4294967295',
'router_id': 'IPV4Address:',
'bgp_keepalive_interval': 'INTEGER_VALUE:0-3600',
'bgp_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_aggregate_prefix': 'IPV4AddressWithMask:',
'addrfamily_routemap_name': 'TEXT:',
'reachability_half_life': 'INTEGER_VALUE:1-45',
'start_reuse_route_value': 'INTEGER_VALUE:1-20000',
'start_suppress_route_value': 'INTEGER_VALUE:1-20000',
'max_duration_to_suppress_route': 'INTEGER_VALUE:1-255',
'unreachability_halftime_for_penalty': 'INTEGER_VALUE:1-45',
'distance_external_AS': 'INTEGER_VALUE:1-255',
'distance_internal_AS': 'INTEGER_VALUE:1-255',
'distance_local_routes': 'INTEGER_VALUE:1-255',
'maxpath_option': 'TEXT_OPTIONS:ebgp,ibgp',
'maxpath_numbers': 'INTEGER_VALUE:2-32',
'network_ip_prefix_with_mask': 'IPV4AddressWithMask:',
'network_ip_prefix_value': 'IPV4Address:',
'network_ip_prefix_mask': 'IPV4Address:',
'nexthop_crtitical_delay': 'NO_VALIDATION:1-4294967295',
'nexthop_noncrtitical_delay': 'NO_VALIDATION:1-4294967295',
'addrfamily_redistribute_option': 'TEXT_OPTIONS:direct,ospf,\
static',
'bgp_neighbor_af_occurances': 'INTEGER_VALUE:1-10',
'bgp_neighbor_af_filtername': 'TEXT:',
'bgp_neighbor_af_maxprefix': 'INTEGER_VALUE:1-15870',
'bgp_neighbor_af_prefixname': 'TEXT:',
'bgp_neighbor_af_routemap': 'TEXT:',
'bgp_neighbor_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_neighbor_connection_retrytime': 'INTEGER_VALUE:1-65535',
'bgp_neighbor_description': 'TEXT:',
'bgp_neighbor_maxhopcount': 'INTEGER_VALUE:1-255',
'bgp_neighbor_local_as': 'NO_VALIDATION:1-4294967295',
'bgp_neighbor_maxpeers': 'INTEGER_VALUE:1-96',
'bgp_neighbor_password': 'TEXT:',
'bgp_neighbor_timers_Keepalive': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_timers_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_ttl_hops': 'INTEGER_VALUE:1-254',
'bgp_neighbor_update_options': 'TEXT_OPTIONS:ethernet,loopback,\
vlan',
'bgp_neighbor_update_ethernet': 'TEXT:',
'bgp_neighbor_update_loopback': 'INTEGER_VALUE:0-7',
'bgp_neighbor_update_vlan': 'INTEGER_VALUE:1-4094',
'bgp_neighbor_weight': 'INTEGER_VALUE:0-65535',
'ethernet_interface_value': 'INTEGER_VALUE:1-32',
'ethernet_interface_range': 'INTEGER_VALUE_RANGE:1-32',
'ethernet_interface_string': 'TEXT:',
'loopback_interface_value': 'INTEGER_VALUE:0-7',
'mgmt_interface_value': 'INTEGER_VALUE:0-0',
'vlan_interface_value': 'INTEGER_VALUE:1-4094',
'portchannel_interface_value': 'INTEGER_VALUE:1-4096',
'portchannel_interface_range': 'INTEGER_VALUE_RANGE:1-4096',
'portchannel_interface_string': 'TEXT:',
'aggregation_group_no': 'INTEGER_VALUE:1-4096',
'aggregation_group_mode': 'TEXT_OPTIONS:active,on,passive',
'bfd_options': 'TEXT_OPTIONS:authentication,echo,interval,ipv4,\
ipv6,neighbor',
'bfd_interval': 'INTEGER_VALUE:50-999',
'bfd_minrx': 'INTEGER_VALUE:50-999',
'bfd_ multiplier': 'INTEGER_VALUE:3-50',
'bfd_ipv4_options': 'TEXT_OPTIONS:authentication,echo,interval',
'bfd_auth_options': 'TEXT_OPTIONS:keyed-md5,keyed-sha1,\
meticulous-keyed-md5,meticulous-keyed-sha1,simple',
'bfd_key_options': 'TEXT_OPTIONS:key-chain,key-id',
'bfd_key_chain': 'TEXT:',
'bfd_key_id': 'INTEGER_VALUE:0-255',
'bfd_key_name': 'TEXT:',
'bfd_neighbor_ip': 'TEXT:',
'bfd_neighbor_options': 'TEXT_OPTIONS:admin-down,multihop,\
non-persistent',
'bfd_access_vlan': 'INTEGER_VALUE:1-3999',
'bfd_bridgeport_mode': 'TEXT_OPTIONS:access,dot1q-tunnel,trunk',
'trunk_options': 'TEXT_OPTIONS:allowed,native',
'trunk_vlanid': 'INTEGER_VALUE:1-3999',
'portCh_description': 'TEXT:',
'duplex_option': 'TEXT_OPTIONS:auto,full,half',
'flowcontrol_options': 'TEXT_OPTIONS:receive,send',
'portchannel_ip_options': 'TEXT_OPTIONS:access-group,address,\
arp,dhcp,ospf,port,port-unreachable,redirects,router,\
unreachables',
'accessgroup_name': 'TEXT:',
'portchannel_ipv4': 'IPV4Address:',
'portchannel_ipv4_mask': 'TEXT:',
'arp_ipaddress': 'IPV4Address:',
'arp_macaddress': 'TEXT:',
'arp_timeout_value': 'INTEGER_VALUE:60-28800',
'relay_ipaddress': 'IPV4Address:',
'ip_ospf_options': 'TEXT_OPTIONS:authentication,\
authentication-key,bfd,cost,database-filter,dead-interval,\
hello-interval,message-digest-key,mtu,mtu-ignore,network,\
passive-interface,priority,retransmit-interval,shutdown,\
transmit-delay',
'ospf_id_decimal_value': 'NO_VALIDATION:1-4294967295',
'ospf_id_ipaddres_value': 'IPV4Address:',
'lacp_options': 'TEXT_OPTIONS:port-priority,suspend-individual,\
timeout',
'port_priority': 'INTEGER_VALUE:1-65535',
'lldp_options': 'TEXT_OPTIONS:receive,tlv-select,transmit,\
trap-notification',
'lldp_tlv_options': 'TEXT_OPTIONS:link-aggregation,\
mac-phy-status,management-address,max-frame-size,\
port-description,port-protocol-vlan,port-vlan,power-mdi,\
protocol-identity,system-capabilities,system-description,\
system-name,vid-management,vlan-name',
'load_interval_delay': 'INTEGER_VALUE:30-300',
'load_interval_counter': 'INTEGER_VALUE:1-3',
'mac_accessgroup_name': 'TEXT:',
'mac_address': 'TEXT:',
'microburst_threshold': 'NO_VALIDATION:1-4294967295',
'mtu_value': 'INTEGER_VALUE:64-9216',
'service_instance': 'NO_VALIDATION:1-4294967295',
'service_policy_options': 'TEXT_OPTIONS:copp-system-policy,input,\
output,type',
'service_policy_name': 'TEXT:',
'spanning_tree_options': 'TEXT_OPTIONS:bpdufilter,bpduguard,\
cost,disable,enable,guard,link-type,mst,port,port-priority,vlan',
'spanning_tree_cost': 'NO_VALIDATION:1-200000000',
'spanning_tree_interfacerange': 'INTEGER_VALUE_RANGE:1-3999',
'spanning_tree_portpriority': 'TEXT_OPTIONS:0,32,64,96,128,160,\
192,224',
'portchannel_ipv6_neighbor_mac': 'TEXT:',
'portchannel_ipv6_neighbor_address': 'IPV6Address:',
'portchannel_ipv6_linklocal': 'IPV6Address:',
'portchannel_ipv6_dhcp_vlan': 'INTEGER_VALUE:1-4094',
'portchannel_ipv6_dhcp_ethernet': 'TEXT:',
'portchannel_ipv6_dhcp': 'IPV6Address:',
'portchannel_ipv6_address': 'IPV6Address:',
'portchannel_ipv6_options': 'TEXT_OPTIONS:address,dhcp,\
link-local,nd,neighbor',
'interface_speed': 'TEXT_OPTIONS:1000,10000,100000,25000,40000,50000,auto',
'stormcontrol_options': 'TEXT_OPTIONS:broadcast,multicast,\
unicast',
'stormcontrol_level': 'FLOAT:',
'portchannel_dot1q_tag': 'TEXT_OPTIONS:disable,enable,\
egress-only',
'vrrp_id': 'INTEGER_VALUE:1-255',
}
NE1032 = {
'vlan_id': 'INTEGER_VALUE:1-3999',
'vlan_id_range': 'INTEGER_VALUE_RANGE:1-3999',
'vlan_name': 'TEXT:',
'vlan_flood': 'TEXT_OPTIONS:ipv4,ipv6',
'vlan_state': 'TEXT_OPTIONS:active,suspend',
'vlan_last_member_query_interval': 'INTEGER_VALUE:1-25',
'vlan_querier': 'IPV4Address:',
'vlan_querier_timeout': 'INTEGER_VALUE:1-65535',
'vlan_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_query_max_response_time': 'INTEGER_VALUE:1-25',
'vlan_report_suppression': 'INTEGER_VALUE:1-25',
'vlan_robustness_variable': 'INTEGER_VALUE:1-7',
'vlan_startup_query_count': 'INTEGER_VALUE:1-10',
'vlan_startup_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_snooping_version': 'INTEGER_VALUE:2-3',
'vlan_access_map_name': 'TEXT: ',
'vlan_ethernet_interface': 'TEXT:',
'vlan_portagg_number': 'INTEGER_VALUE:1-4096',
'vlan_accessmap_action': 'TEXT_OPTIONS:drop,forward,redirect',
'vlan_dot1q_tag': 'MATCH_TEXT_OR_EMPTY:egress-only',
'vlan_filter_name': 'TEXT:',
'vlag_auto_recovery': 'INTEGER_VALUE:240-3600',
'vlag_config_consistency': 'TEXT_OPTIONS:disable,strict',
'vlag_instance': 'INTEGER_VALUE:1-64',
'vlag_port_aggregation': 'INTEGER_VALUE:1-4096',
'vlag_priority': 'INTEGER_VALUE:0-65535',
'vlag_startup_delay': 'INTEGER_VALUE:0-3600',
'vlag_tier_id': 'INTEGER_VALUE:1-512',
'vlag_hlthchk_options': 'TEXT_OPTIONS:keepalive-attempts,\
keepalive-interval,peer-ip,retry-interval',
'vlag_keepalive_attempts': 'INTEGER_VALUE:1-24',
'vlag_keepalive_interval': 'INTEGER_VALUE:2-300',
'vlag_retry_interval': 'INTEGER_VALUE:1-300',
'vlag_peerip': 'IPV4Address:',
'vlag_peerip_vrf': 'TEXT_OPTIONS:default,management',
'bgp_as_number': 'NO_VALIDATION:1-4294967295',
'bgp_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_bgp_local_count': 'INTEGER_VALUE:2-64',
'cluster_id_as_ip': 'IPV4Address:',
'cluster_id_as_number': 'NO_VALIDATION:1-4294967295',
'confederation_identifier': 'INTEGER_VALUE:1-65535',
'condeferation_peers_as': 'INTEGER_VALUE:1-65535',
'stalepath_delay_value': 'INTEGER_VALUE:1-3600',
'maxas_limit_as': 'INTEGER_VALUE:1-2000',
'neighbor_ipaddress': 'IPV4Address:',
'neighbor_as': 'NO_VALIDATION:1-4294967295',
'router_id': 'IPV4Address:',
'bgp_keepalive_interval': 'INTEGER_VALUE:0-3600',
'bgp_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_aggregate_prefix': 'IPV4AddressWithMask:',
'addrfamily_routemap_name': 'TEXT:',
'reachability_half_life': 'INTEGER_VALUE:1-45',
'start_reuse_route_value': 'INTEGER_VALUE:1-20000',
'start_suppress_route_value': 'INTEGER_VALUE:1-20000',
'max_duration_to_suppress_route': 'INTEGER_VALUE:1-255',
'unreachability_halftime_for_penalty': 'INTEGER_VALUE:1-45',
'distance_external_AS': 'INTEGER_VALUE:1-255',
'distance_internal_AS': 'INTEGER_VALUE:1-255',
'distance_local_routes': 'INTEGER_VALUE:1-255',
'maxpath_option': 'TEXT_OPTIONS:ebgp,ibgp',
'maxpath_numbers': 'INTEGER_VALUE:2-32',
'network_ip_prefix_with_mask': 'IPV4AddressWithMask:',
'network_ip_prefix_value': 'IPV4Address:',
'network_ip_prefix_mask': 'IPV4Address:',
'nexthop_crtitical_delay': 'NO_VALIDATION:1-4294967295',
'nexthop_noncrtitical_delay': 'NO_VALIDATION:1-4294967295',
'addrfamily_redistribute_option': 'TEXT_OPTIONS:direct,ospf,\
static',
'bgp_neighbor_af_occurances': 'INTEGER_VALUE:1-10',
'bgp_neighbor_af_filtername': 'TEXT:',
'bgp_neighbor_af_maxprefix': 'INTEGER_VALUE:1-15870',
'bgp_neighbor_af_prefixname': 'TEXT:',
'bgp_neighbor_af_routemap': 'TEXT:',
'bgp_neighbor_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_neighbor_connection_retrytime': 'INTEGER_VALUE:1-65535',
'bgp_neighbor_description': 'TEXT:',
'bgp_neighbor_maxhopcount': 'INTEGER_VALUE:1-255',
'bgp_neighbor_local_as': 'NO_VALIDATION:1-4294967295',
'bgp_neighbor_maxpeers': 'INTEGER_VALUE:1-96',
'bgp_neighbor_password': 'TEXT:',
'bgp_neighbor_timers_Keepalive': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_timers_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_ttl_hops': 'INTEGER_VALUE:1-254',
'bgp_neighbor_update_options': 'TEXT_OPTIONS:ethernet,loopback,\
vlan',
'bgp_neighbor_update_ethernet': 'TEXT:',
'bgp_neighbor_update_loopback': 'INTEGER_VALUE:0-7',
'bgp_neighbor_update_vlan': 'INTEGER_VALUE:1-4094',
'bgp_neighbor_weight': 'INTEGER_VALUE:0-65535',
'ethernet_interface_value': 'INTEGER_VALUE:1-32',
'ethernet_interface_range': 'INTEGER_VALUE_RANGE:1-32',
'ethernet_interface_string': 'TEXT:',
'loopback_interface_value': 'INTEGER_VALUE:0-7',
'mgmt_interface_value': 'INTEGER_VALUE:0-0',
'vlan_interface_value': 'INTEGER_VALUE:1-4094',
'portchannel_interface_value': 'INTEGER_VALUE:1-4096',
'portchannel_interface_range': 'INTEGER_VALUE_RANGE:1-4096',
'portchannel_interface_string': 'TEXT:',
'aggregation_group_no': 'INTEGER_VALUE:1-4096',
'aggregation_group_mode': 'TEXT_OPTIONS:active,on,passive',
'bfd_options': 'TEXT_OPTIONS:authentication,echo,interval,ipv4,\
ipv6,neighbor',
'bfd_interval': 'INTEGER_VALUE:50-999',
'bfd_minrx': 'INTEGER_VALUE:50-999',
'bfd_ multiplier': 'INTEGER_VALUE:3-50',
'bfd_ipv4_options': 'TEXT_OPTIONS:authentication,echo,interval',
'bfd_auth_options': 'TEXT_OPTIONS:keyed-md5,keyed-sha1,\
meticulous-keyed-md5,meticulous-keyed-sha1,simple',
'bfd_key_options': 'TEXT_OPTIONS:key-chain,key-id',
'bfd_key_chain': 'TEXT:',
'bfd_key_id': 'INTEGER_VALUE:0-255',
'bfd_key_name': 'TEXT:',
'bfd_neighbor_ip': 'TEXT:',
'bfd_neighbor_options': 'TEXT_OPTIONS:admin-down,multihop,\
non-persistent',
'bfd_access_vlan': 'INTEGER_VALUE:1-3999',
'bfd_bridgeport_mode': 'TEXT_OPTIONS:access,dot1q-tunnel,trunk',
'trunk_options': 'TEXT_OPTIONS:allowed,native',
'trunk_vlanid': 'INTEGER_VALUE:1-3999',
'portCh_description': 'TEXT:',
'duplex_option': 'TEXT_OPTIONS:auto,full,half',
'flowcontrol_options': 'TEXT_OPTIONS:receive,send',
'portchannel_ip_options': 'TEXT_OPTIONS:access-group,address,\
arp,dhcp,ospf,port,port-unreachable,redirects,router,\
unreachables',
'accessgroup_name': 'TEXT:',
'portchannel_ipv4': 'IPV4Address:',
'portchannel_ipv4_mask': 'TEXT:',
'arp_ipaddress': 'IPV4Address:',
'arp_macaddress': 'TEXT:',
'arp_timeout_value': 'INTEGER_VALUE:60-28800',
'relay_ipaddress': 'IPV4Address:',
'ip_ospf_options': 'TEXT_OPTIONS:authentication,\
authentication-key,bfd,cost,database-filter,dead-interval,\
hello-interval,message-digest-key,mtu,mtu-ignore,network,\
passive-interface,priority,retransmit-interval,shutdown,\
transmit-delay',
'ospf_id_decimal_value': 'NO_VALIDATION:1-4294967295',
'ospf_id_ipaddres_value': 'IPV4Address:',
'lacp_options': 'TEXT_OPTIONS:port-priority,suspend-individual,\
timeout',
'port_priority': 'INTEGER_VALUE:1-65535',
'lldp_options': 'TEXT_OPTIONS:receive,tlv-select,transmit,\
trap-notification',
'lldp_tlv_options': 'TEXT_OPTIONS:link-aggregation,\
mac-phy-status,management-address,max-frame-size,\
port-description,port-protocol-vlan,port-vlan,power-mdi,\
protocol-identity,system-capabilities,system-description,\
system-name,vid-management,vlan-name',
'load_interval_delay': 'INTEGER_VALUE:30-300',
'load_interval_counter': 'INTEGER_VALUE:1-3',
'mac_accessgroup_name': 'TEXT:',
'mac_address': 'TEXT:',
'microburst_threshold': 'NO_VALIDATION:1-4294967295',
'mtu_value': 'INTEGER_VALUE:64-9216',
'service_instance': 'NO_VALIDATION:1-4294967295',
'service_policy_options': 'TEXT_OPTIONS:copp-system-policy,input,\
output,type',
'service_policy_name': 'TEXT:',
'spanning_tree_options': 'TEXT_OPTIONS:bpdufilter,bpduguard,\
cost,disable,enable,guard,link-type,mst,port,port-priority,vlan',
'spanning_tree_cost': 'NO_VALIDATION:1-200000000',
'spanning_tree_interfacerange': 'INTEGER_VALUE_RANGE:1-3999',
'spanning_tree_portpriority': 'TEXT_OPTIONS:0,32,64,96,128,160,\
192,224',
'portchannel_ipv6_neighbor_mac': 'TEXT:',
'portchannel_ipv6_neighbor_address': 'IPV6Address:',
'portchannel_ipv6_linklocal': 'IPV6Address:',
'portchannel_ipv6_dhcp_vlan': 'INTEGER_VALUE:1-4094',
'portchannel_ipv6_dhcp_ethernet': 'TEXT:',
'portchannel_ipv6_dhcp': 'IPV6Address:',
'portchannel_ipv6_address': 'IPV6Address:',
'portchannel_ipv6_options': 'TEXT_OPTIONS:address,dhcp,\
link-local,nd,neighbor',
'interface_speed': 'TEXT_OPTIONS:1000,10000,100000,25000,40000,50000,auto',
'stormcontrol_options': 'TEXT_OPTIONS:broadcast,multicast,\
unicast',
'stormcontrol_level': 'FLOAT:',
'portchannel_dot1q_tag': 'TEXT_OPTIONS:disable,enable,\
egress-only',
'vrrp_id': 'INTEGER_VALUE:1-255',
}
NE1072T = {
'vlan_id': 'INTEGER_VALUE:1-3999',
'vlan_id_range': 'INTEGER_VALUE_RANGE:1-3999',
'vlan_name': 'TEXT:',
'vlan_flood': 'TEXT_OPTIONS:ipv4,ipv6',
'vlan_state': 'TEXT_OPTIONS:active,suspend',
'vlan_last_member_query_interval': 'INTEGER_VALUE:1-25',
'vlan_querier': 'IPV4Address:',
'vlan_querier_timeout': 'INTEGER_VALUE:1-65535',
'vlan_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_query_max_response_time': 'INTEGER_VALUE:1-25',
'vlan_report_suppression': 'INTEGER_VALUE:1-25',
'vlan_robustness_variable': 'INTEGER_VALUE:1-7',
'vlan_startup_query_count': 'INTEGER_VALUE:1-10',
'vlan_startup_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_snooping_version': 'INTEGER_VALUE:2-3',
'vlan_access_map_name': 'TEXT: ',
'vlan_ethernet_interface': 'TEXT:',
'vlan_portagg_number': 'INTEGER_VALUE:1-4096',
'vlan_accessmap_action': 'TEXT_OPTIONS:drop,forward,redirect',
'vlan_dot1q_tag': 'MATCH_TEXT_OR_EMPTY:egress-only',
'vlan_filter_name': 'TEXT:',
'vlag_auto_recovery': 'INTEGER_VALUE:240-3600',
'vlag_config_consistency': 'TEXT_OPTIONS:disable,strict',
'vlag_instance': 'INTEGER_VALUE:1-64',
'vlag_port_aggregation': 'INTEGER_VALUE:1-4096',
'vlag_priority': 'INTEGER_VALUE:0-65535',
'vlag_startup_delay': 'INTEGER_VALUE:0-3600',
'vlag_tier_id': 'INTEGER_VALUE:1-512',
'vlag_hlthchk_options': 'TEXT_OPTIONS:keepalive-attempts,\
keepalive-interval,peer-ip,retry-interval',
'vlag_keepalive_attempts': 'INTEGER_VALUE:1-24',
'vlag_keepalive_interval': 'INTEGER_VALUE:2-300',
'vlag_retry_interval': 'INTEGER_VALUE:1-300',
'vlag_peerip': 'IPV4Address:',
'vlag_peerip_vrf': 'TEXT_OPTIONS:default,management',
'bgp_as_number': 'NO_VALIDATION:1-4294967295',
'bgp_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_bgp_local_count': 'INTEGER_VALUE:2-64',
'cluster_id_as_ip': 'IPV4Address:',
'cluster_id_as_number': 'NO_VALIDATION:1-4294967295',
'confederation_identifier': 'INTEGER_VALUE:1-65535',
'condeferation_peers_as': 'INTEGER_VALUE:1-65535',
'stalepath_delay_value': 'INTEGER_VALUE:1-3600',
'maxas_limit_as': 'INTEGER_VALUE:1-2000',
'neighbor_ipaddress': 'IPV4Address:',
'neighbor_as': 'NO_VALIDATION:1-4294967295',
'router_id': 'IPV4Address:',
'bgp_keepalive_interval': 'INTEGER_VALUE:0-3600',
'bgp_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_aggregate_prefix': 'IPV4AddressWithMask:',
'addrfamily_routemap_name': 'TEXT:',
'reachability_half_life': 'INTEGER_VALUE:1-45',
'start_reuse_route_value': 'INTEGER_VALUE:1-20000',
'start_suppress_route_value': 'INTEGER_VALUE:1-20000',
'max_duration_to_suppress_route': 'INTEGER_VALUE:1-255',
'unreachability_halftime_for_penalty': 'INTEGER_VALUE:1-45',
'distance_external_AS': 'INTEGER_VALUE:1-255',
'distance_internal_AS': 'INTEGER_VALUE:1-255',
'distance_local_routes': 'INTEGER_VALUE:1-255',
'maxpath_option': 'TEXT_OPTIONS:ebgp,ibgp',
'maxpath_numbers': 'INTEGER_VALUE:2-32',
'network_ip_prefix_with_mask': 'IPV4AddressWithMask:',
'network_ip_prefix_value': 'IPV4Address:',
'network_ip_prefix_mask': 'IPV4Address:',
'nexthop_crtitical_delay': 'NO_VALIDATION:1-4294967295',
'nexthop_noncrtitical_delay': 'NO_VALIDATION:1-4294967295',
'addrfamily_redistribute_option': 'TEXT_OPTIONS:direct,ospf,\
static',
'bgp_neighbor_af_occurances': 'INTEGER_VALUE:1-10',
'bgp_neighbor_af_filtername': 'TEXT:',
'bgp_neighbor_af_maxprefix': 'INTEGER_VALUE:1-15870',
'bgp_neighbor_af_prefixname': 'TEXT:',
'bgp_neighbor_af_routemap': 'TEXT:',
'bgp_neighbor_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_neighbor_connection_retrytime': 'INTEGER_VALUE:1-65535',
'bgp_neighbor_description': 'TEXT:',
'bgp_neighbor_maxhopcount': 'INTEGER_VALUE:1-255',
'bgp_neighbor_local_as': 'NO_VALIDATION:1-4294967295',
'bgp_neighbor_maxpeers': 'INTEGER_VALUE:1-96',
'bgp_neighbor_password': 'TEXT:',
'bgp_neighbor_timers_Keepalive': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_timers_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_ttl_hops': 'INTEGER_VALUE:1-254',
'bgp_neighbor_update_options': 'TEXT_OPTIONS:ethernet,loopback,\
vlan',
'bgp_neighbor_update_ethernet': 'TEXT:',
'bgp_neighbor_update_loopback': 'INTEGER_VALUE:0-7',
'bgp_neighbor_update_vlan': 'INTEGER_VALUE:1-4094',
'bgp_neighbor_weight': 'INTEGER_VALUE:0-65535',
'ethernet_interface_value': 'INTEGER_VALUE:1-54',
'ethernet_interface_range': 'INTEGER_VALUE_RANGE:1-54',
'ethernet_interface_string': 'TEXT:',
'loopback_interface_value': 'INTEGER_VALUE:0-7',
'mgmt_interface_value': 'INTEGER_VALUE:0-0',
'vlan_interface_value': 'INTEGER_VALUE:1-4094',
'portchannel_interface_value': 'INTEGER_VALUE:1-4096',
'portchannel_interface_range': 'INTEGER_VALUE_RANGE:1-4096',
'portchannel_interface_string': 'TEXT:',
'aggregation_group_no': 'INTEGER_VALUE:1-4096',
'aggregation_group_mode': 'TEXT_OPTIONS:active,on,passive',
'bfd_options': 'TEXT_OPTIONS:authentication,echo,interval,ipv4,\
ipv6,neighbor',
'bfd_interval': 'INTEGER_VALUE:50-999',
'bfd_minrx': 'INTEGER_VALUE:50-999',
'bfd_ multiplier': 'INTEGER_VALUE:3-50',
'bfd_ipv4_options': 'TEXT_OPTIONS:authentication,echo,interval',
'bfd_auth_options': 'TEXT_OPTIONS:keyed-md5,keyed-sha1,\
meticulous-keyed-md5,meticulous-keyed-sha1,simple',
'bfd_key_options': 'TEXT_OPTIONS:key-chain,key-id',
'bfd_key_chain': 'TEXT:',
'bfd_key_id': 'INTEGER_VALUE:0-255',
'bfd_key_name': 'TEXT:',
'bfd_neighbor_ip': 'TEXT:',
'bfd_neighbor_options': 'TEXT_OPTIONS:admin-down,multihop,\
non-persistent',
'bfd_access_vlan': 'INTEGER_VALUE:1-3999',
'bfd_bridgeport_mode': 'TEXT_OPTIONS:access,dot1q-tunnel,trunk',
'trunk_options': 'TEXT_OPTIONS:allowed,native',
'trunk_vlanid': 'INTEGER_VALUE:1-3999',
'portCh_description': 'TEXT:',
'duplex_option': 'TEXT_OPTIONS:auto,full,half',
'flowcontrol_options': 'TEXT_OPTIONS:receive,send',
'portchannel_ip_options': 'TEXT_OPTIONS:access-group,address,\
arp,dhcp,ospf,port,port-unreachable,redirects,router,\
unreachables',
'accessgroup_name': 'TEXT:',
'portchannel_ipv4': 'IPV4Address:',
'portchannel_ipv4_mask': 'TEXT:',
'arp_ipaddress': 'IPV4Address:',
'arp_macaddress': 'TEXT:',
'arp_timeout_value': 'INTEGER_VALUE:60-28800',
'relay_ipaddress': 'IPV4Address:',
'ip_ospf_options': 'TEXT_OPTIONS:authentication,\
authentication-key,bfd,cost,database-filter,dead-interval,\
hello-interval,message-digest-key,mtu,mtu-ignore,network,\
passive-interface,priority,retransmit-interval,shutdown,\
transmit-delay',
'ospf_id_decimal_value': 'NO_VALIDATION:1-4294967295',
'ospf_id_ipaddres_value': 'IPV4Address:',
'lacp_options': 'TEXT_OPTIONS:port-priority,suspend-individual,\
timeout',
'port_priority': 'INTEGER_VALUE:1-65535',
'lldp_options': 'TEXT_OPTIONS:receive,tlv-select,transmit,\
trap-notification',
'lldp_tlv_options': 'TEXT_OPTIONS:link-aggregation,\
mac-phy-status,management-address,max-frame-size,\
port-description,port-protocol-vlan,port-vlan,power-mdi,\
protocol-identity,system-capabilities,system-description,\
system-name,vid-management,vlan-name',
'load_interval_delay': 'INTEGER_VALUE:30-300',
'load_interval_counter': 'INTEGER_VALUE:1-3',
'mac_accessgroup_name': 'TEXT:',
'mac_address': 'TEXT:',
'microburst_threshold': 'NO_VALIDATION:1-4294967295',
'mtu_value': 'INTEGER_VALUE:64-9216',
'service_instance': 'NO_VALIDATION:1-4294967295',
'service_policy_options': 'TEXT_OPTIONS:copp-system-policy,input,\
output,type',
'service_policy_name': 'TEXT:',
'spanning_tree_options': 'TEXT_OPTIONS:bpdufilter,bpduguard,\
cost,disable,enable,guard,link-type,mst,port,port-priority,vlan',
'spanning_tree_cost': 'NO_VALIDATION:1-200000000',
'spanning_tree_interfacerange': 'INTEGER_VALUE_RANGE:1-3999',
'spanning_tree_portpriority': 'TEXT_OPTIONS:0,32,64,96,128,160,\
192,224',
'portchannel_ipv6_neighbor_mac': 'TEXT:',
'portchannel_ipv6_neighbor_address': 'IPV6Address:',
'portchannel_ipv6_linklocal': 'IPV6Address:',
'portchannel_ipv6_dhcp_vlan': 'INTEGER_VALUE:1-4094',
'portchannel_ipv6_dhcp_ethernet': 'TEXT:',
'portchannel_ipv6_dhcp': 'IPV6Address:',
'portchannel_ipv6_address': 'IPV6Address:',
'portchannel_ipv6_options': 'TEXT_OPTIONS:address,dhcp,\
link-local,nd,neighbor',
'interface_speed': 'TEXT_OPTIONS:1000,10000,100000,25000,40000,50000,auto',
'stormcontrol_options': 'TEXT_OPTIONS:broadcast,multicast,\
unicast',
'stormcontrol_level': 'FLOAT:',
'portchannel_dot1q_tag': 'TEXT_OPTIONS:disable,enable,\
egress-only',
'vrrp_id': 'INTEGER_VALUE:1-255',
}
NE10032 = {
'vlan_id': 'INTEGER_VALUE:1-3999',
'vlan_id_range': 'INTEGER_VALUE_RANGE:1-3999',
'vlan_name': 'TEXT:',
'vlan_flood': 'TEXT_OPTIONS:ipv4,ipv6',
'vlan_state': 'TEXT_OPTIONS:active,suspend',
'vlan_last_member_query_interval': 'INTEGER_VALUE:1-25',
'vlan_querier': 'IPV4Address:',
'vlan_querier_timeout': 'INTEGER_VALUE:1-65535',
'vlan_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_query_max_response_time': 'INTEGER_VALUE:1-25',
'vlan_report_suppression': 'INTEGER_VALUE:1-25',
'vlan_robustness_variable': 'INTEGER_VALUE:1-7',
'vlan_startup_query_count': 'INTEGER_VALUE:1-10',
'vlan_startup_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_snooping_version': 'INTEGER_VALUE:2-3',
'vlan_access_map_name': 'TEXT: ',
'vlan_ethernet_interface': 'TEXT:',
'vlan_portagg_number': 'INTEGER_VALUE:1-4096',
'vlan_accessmap_action': 'TEXT_OPTIONS:drop,forward,redirect',
'vlan_dot1q_tag': 'MATCH_TEXT_OR_EMPTY:egress-only',
'vlan_filter_name': 'TEXT:',
'vlag_auto_recovery': 'INTEGER_VALUE:240-3600',
'vlag_config_consistency': 'TEXT_OPTIONS:disable,strict',
'vlag_instance': 'INTEGER_VALUE:1-64',
'vlag_port_aggregation': 'INTEGER_VALUE:1-4096',
'vlag_priority': 'INTEGER_VALUE:0-65535',
'vlag_startup_delay': 'INTEGER_VALUE:0-3600',
'vlag_tier_id': 'INTEGER_VALUE:1-512',
'vlag_hlthchk_options': 'TEXT_OPTIONS:keepalive-attempts,\
keepalive-interval,peer-ip,retry-interval',
'vlag_keepalive_attempts': 'INTEGER_VALUE:1-24',
'vlag_keepalive_interval': 'INTEGER_VALUE:2-300',
'vlag_retry_interval': 'INTEGER_VALUE:1-300',
'vlag_peerip': 'IPV4Address:',
'vlag_peerip_vrf': 'TEXT_OPTIONS:default,management',
'bgp_as_number': 'NO_VALIDATION:1-4294967295',
'bgp_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_bgp_local_count': 'INTEGER_VALUE:2-64',
'cluster_id_as_ip': 'IPV4Address:',
'cluster_id_as_number': 'NO_VALIDATION:1-4294967295',
'confederation_identifier': 'INTEGER_VALUE:1-65535',
'condeferation_peers_as': 'INTEGER_VALUE:1-65535',
'stalepath_delay_value': 'INTEGER_VALUE:1-3600',
'maxas_limit_as': 'INTEGER_VALUE:1-2000',
'neighbor_ipaddress': 'IPV4Address:',
'neighbor_as': 'NO_VALIDATION:1-4294967295',
'router_id': 'IPV4Address:',
'bgp_keepalive_interval': 'INTEGER_VALUE:0-3600',
'bgp_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_aggregate_prefix': 'IPV4AddressWithMask:',
'addrfamily_routemap_name': 'TEXT:',
'reachability_half_life': 'INTEGER_VALUE:1-45',
'start_reuse_route_value': 'INTEGER_VALUE:1-20000',
'start_suppress_route_value': 'INTEGER_VALUE:1-20000',
'max_duration_to_suppress_route': 'INTEGER_VALUE:1-255',
'unreachability_halftime_for_penalty': 'INTEGER_VALUE:1-45',
'distance_external_AS': 'INTEGER_VALUE:1-255',
'distance_internal_AS': 'INTEGER_VALUE:1-255',
'distance_local_routes': 'INTEGER_VALUE:1-255',
'maxpath_option': 'TEXT_OPTIONS:ebgp,ibgp',
'maxpath_numbers': 'INTEGER_VALUE:2-32',
'network_ip_prefix_with_mask': 'IPV4AddressWithMask:',
'network_ip_prefix_value': 'IPV4Address:',
'network_ip_prefix_mask': 'IPV4Address:',
'nexthop_crtitical_delay': 'NO_VALIDATION:1-4294967295',
'nexthop_noncrtitical_delay': 'NO_VALIDATION:1-4294967295',
'addrfamily_redistribute_option': 'TEXT_OPTIONS:direct,ospf,\
static',
'bgp_neighbor_af_occurances': 'INTEGER_VALUE:1-10',
'bgp_neighbor_af_filtername': 'TEXT:',
'bgp_neighbor_af_maxprefix': 'INTEGER_VALUE:1-15870',
'bgp_neighbor_af_prefixname': 'TEXT:',
'bgp_neighbor_af_routemap': 'TEXT:',
'bgp_neighbor_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_neighbor_connection_retrytime': 'INTEGER_VALUE:1-65535',
'bgp_neighbor_description': 'TEXT:',
'bgp_neighbor_maxhopcount': 'INTEGER_VALUE:1-255',
'bgp_neighbor_local_as': 'NO_VALIDATION:1-4294967295',
'bgp_neighbor_maxpeers': 'INTEGER_VALUE:1-96',
'bgp_neighbor_password': 'TEXT:',
'bgp_neighbor_timers_Keepalive': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_timers_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_ttl_hops': 'INTEGER_VALUE:1-254',
'bgp_neighbor_update_options': 'TEXT_OPTIONS:ethernet,loopback,\
vlan',
'bgp_neighbor_update_ethernet': 'TEXT:',
'bgp_neighbor_update_loopback': 'INTEGER_VALUE:0-7',
'bgp_neighbor_update_vlan': 'INTEGER_VALUE:1-4094',
'bgp_neighbor_weight': 'INTEGER_VALUE:0-65535',
'ethernet_interface_value': 'INTEGER_VALUE:1-32',
'ethernet_interface_range': 'INTEGER_VALUE_RANGE:1-32',
'ethernet_interface_string': 'TEXT:',
'loopback_interface_value': 'INTEGER_VALUE:0-7',
'mgmt_interface_value': 'INTEGER_VALUE:0-0',
'vlan_interface_value': 'INTEGER_VALUE:1-4094',
'portchannel_interface_value': 'INTEGER_VALUE:1-4096',
'portchannel_interface_range': 'INTEGER_VALUE_RANGE:1-4096',
'portchannel_interface_string': 'TEXT:',
'aggregation_group_no': 'INTEGER_VALUE:1-4096',
'aggregation_group_mode': 'TEXT_OPTIONS:active,on,passive',
'bfd_options': 'TEXT_OPTIONS:authentication,echo,interval,ipv4,\
ipv6,neighbor',
'bfd_interval': 'INTEGER_VALUE:50-999',
'bfd_minrx': 'INTEGER_VALUE:50-999',
'bfd_ multiplier': 'INTEGER_VALUE:3-50',
'bfd_ipv4_options': 'TEXT_OPTIONS:authentication,echo,interval',
'bfd_auth_options': 'TEXT_OPTIONS:keyed-md5,keyed-sha1,\
meticulous-keyed-md5,meticulous-keyed-sha1,simple',
'bfd_key_options': 'TEXT_OPTIONS:key-chain,key-id',
'bfd_key_chain': 'TEXT:',
'bfd_key_id': 'INTEGER_VALUE:0-255',
'bfd_key_name': 'TEXT:',
'bfd_neighbor_ip': 'TEXT:',
'bfd_neighbor_options': 'TEXT_OPTIONS:admin-down,multihop,\
non-persistent',
'bfd_access_vlan': 'INTEGER_VALUE:1-3999',
'bfd_bridgeport_mode': 'TEXT_OPTIONS:access,dot1q-tunnel,trunk',
'trunk_options': 'TEXT_OPTIONS:allowed,native',
'trunk_vlanid': 'INTEGER_VALUE:1-3999',
'portCh_description': 'TEXT:',
'duplex_option': 'TEXT_OPTIONS:auto,full,half',
'flowcontrol_options': 'TEXT_OPTIONS:receive,send',
'portchannel_ip_options': 'TEXT_OPTIONS:access-group,address,\
arp,dhcp,ospf,port,port-unreachable,redirects,router,\
unreachables',
'accessgroup_name': 'TEXT:',
'portchannel_ipv4': 'IPV4Address:',
'portchannel_ipv4_mask': 'TEXT:',
'arp_ipaddress': 'IPV4Address:',
'arp_macaddress': 'TEXT:',
'arp_timeout_value': 'INTEGER_VALUE:60-28800',
'relay_ipaddress': 'IPV4Address:',
'ip_ospf_options': 'TEXT_OPTIONS:authentication,\
authentication-key,bfd,cost,database-filter,dead-interval,\
hello-interval,message-digest-key,mtu,mtu-ignore,network,\
passive-interface,priority,retransmit-interval,shutdown,\
transmit-delay',
'ospf_id_decimal_value': 'NO_VALIDATION:1-4294967295',
'ospf_id_ipaddres_value': 'IPV4Address:',
'lacp_options': 'TEXT_OPTIONS:port-priority,suspend-individual,\
timeout',
'port_priority': 'INTEGER_VALUE:1-65535',
'lldp_options': 'TEXT_OPTIONS:receive,tlv-select,transmit,\
trap-notification',
'lldp_tlv_options': 'TEXT_OPTIONS:link-aggregation,\
mac-phy-status,management-address,max-frame-size,\
port-description,port-protocol-vlan,port-vlan,power-mdi,\
protocol-identity,system-capabilities,system-description,\
system-name,vid-management,vlan-name',
'load_interval_delay': 'INTEGER_VALUE:30-300',
'load_interval_counter': 'INTEGER_VALUE:1-3',
'mac_accessgroup_name': 'TEXT:',
'mac_address': 'TEXT:',
'microburst_threshold': 'NO_VALIDATION:1-4294967295',
'mtu_value': 'INTEGER_VALUE:64-9216',
'service_instance': 'NO_VALIDATION:1-4294967295',
'service_policy_options': 'TEXT_OPTIONS:copp-system-policy,input,\
output,type',
'service_policy_name': 'TEXT:',
'spanning_tree_options': 'TEXT_OPTIONS:bpdufilter,bpduguard,\
cost,disable,enable,guard,link-type,mst,port,port-priority,vlan',
'spanning_tree_cost': 'NO_VALIDATION:1-200000000',
'spanning_tree_interfacerange': 'INTEGER_VALUE_RANGE:1-3999',
'spanning_tree_portpriority': 'TEXT_OPTIONS:0,32,64,96,128,160,\
192,224',
'portchannel_ipv6_neighbor_mac': 'TEXT:',
'portchannel_ipv6_neighbor_address': 'IPV6Address:',
'portchannel_ipv6_linklocal': 'IPV6Address:',
'portchannel_ipv6_dhcp_vlan': 'INTEGER_VALUE:1-4094',
'portchannel_ipv6_dhcp_ethernet': 'TEXT:',
'portchannel_ipv6_dhcp': 'IPV6Address:',
'portchannel_ipv6_address': 'IPV6Address:',
'portchannel_ipv6_options': 'TEXT_OPTIONS:address,dhcp,\
link-local,nd,neighbor',
'interface_speed': 'TEXT_OPTIONS:10000,100000,25000,40000,50000,auto',
'stormcontrol_options': 'TEXT_OPTIONS:broadcast,multicast,\
unicast',
'stormcontrol_level': 'FLOAT:',
'portchannel_dot1q_tag': 'TEXT_OPTIONS:disable,enable,\
egress-only',
'vrrp_id': 'INTEGER_VALUE:1-255',
}
g8272_cnos = {'vlan_id': 'INTEGER_VALUE:1-3999',
'vlan_id_range': 'INTEGER_VALUE_RANGE:1-3999',
'vlan_name': 'TEXT:',
'vlan_flood': 'TEXT_OPTIONS:ipv4,ipv6',
'vlan_state': 'TEXT_OPTIONS:active,suspend',
'vlan_last_member_query_interval': 'INTEGER_VALUE:1-25',
'vlan_querier': 'IPV4Address:',
'vlan_querier_timeout': 'INTEGER_VALUE:1-65535',
'vlan_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_query_max_response_time': 'INTEGER_VALUE:1-25',
'vlan_report_suppression': 'INTEGER_VALUE:1-25',
'vlan_robustness_variable': 'INTEGER_VALUE:1-7',
'vlan_startup_query_count': 'INTEGER_VALUE:1-10',
'vlan_startup_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_snooping_version': 'INTEGER_VALUE:2-3',
'vlan_access_map_name': 'TEXT: ',
'vlan_ethernet_interface': 'TEXT:',
'vlan_portagg_number': 'INTEGER_VALUE:1-4096',
'vlan_accessmap_action': 'TEXT_OPTIONS:drop,forward,redirect',
'vlan_dot1q_tag': 'MATCH_TEXT_OR_EMPTY:egress-only',
'vlan_filter_name': 'TEXT:',
'vlag_auto_recovery': 'INTEGER_VALUE:240-3600',
'vlag_config_consistency': 'TEXT_OPTIONS:disable,strict',
'vlag_instance': 'INTEGER_VALUE:1-64',
'vlag_port_aggregation': 'INTEGER_VALUE:1-4096',
'vlag_priority': 'INTEGER_VALUE:0-65535',
'vlag_startup_delay': 'INTEGER_VALUE:0-3600',
'vlag_tier_id': 'INTEGER_VALUE:1-512',
'vlag_hlthchk_options': 'TEXT_OPTIONS:keepalive-attempts,\
keepalive-interval,peer-ip,retry-interval',
'vlag_keepalive_attempts': 'INTEGER_VALUE:1-24',
'vlag_keepalive_interval': 'INTEGER_VALUE:2-300',
'vlag_retry_interval': 'INTEGER_VALUE:1-300',
'vlag_peerip': 'IPV4Address:',
'vlag_peerip_vrf': 'TEXT_OPTIONS:default,management',
'bgp_as_number': 'NO_VALIDATION:1-4294967295',
'bgp_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_bgp_local_count': 'INTEGER_VALUE:2-64',
'cluster_id_as_ip': 'IPV4Address:',
'cluster_id_as_number': 'NO_VALIDATION:1-4294967295',
'confederation_identifier': 'INTEGER_VALUE:1-65535',
'condeferation_peers_as': 'INTEGER_VALUE:1-65535',
'stalepath_delay_value': 'INTEGER_VALUE:1-3600',
'maxas_limit_as': 'INTEGER_VALUE:1-2000',
'neighbor_ipaddress': 'IPV4Address:',
'neighbor_as': 'NO_VALIDATION:1-4294967295',
'router_id': 'IPV4Address:',
'bgp_keepalive_interval': 'INTEGER_VALUE:0-3600',
'bgp_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_aggregate_prefix': 'IPV4AddressWithMask:',
'addrfamily_routemap_name': 'TEXT:',
'reachability_half_life': 'INTEGER_VALUE:1-45',
'start_reuse_route_value': 'INTEGER_VALUE:1-20000',
'start_suppress_route_value': 'INTEGER_VALUE:1-20000',
'max_duration_to_suppress_route': 'INTEGER_VALUE:1-255',
'unreachability_halftime_for_penalty': 'INTEGER_VALUE:1-45',
'distance_external_AS': 'INTEGER_VALUE:1-255',
'distance_internal_AS': 'INTEGER_VALUE:1-255',
'distance_local_routes': 'INTEGER_VALUE:1-255',
'maxpath_option': 'TEXT_OPTIONS:ebgp,ibgp',
'maxpath_numbers': 'INTEGER_VALUE:2-32',
'network_ip_prefix_with_mask': 'IPV4AddressWithMask:',
'network_ip_prefix_value': 'IPV4Address:',
'network_ip_prefix_mask': 'IPV4Address:',
'nexthop_crtitical_delay': 'NO_VALIDATION:1-4294967295',
'nexthop_noncrtitical_delay': 'NO_VALIDATION:1-4294967295',
'addrfamily_redistribute_option': 'TEXT_OPTIONS:direct,ospf,\
static',
'bgp_neighbor_af_occurances': 'INTEGER_VALUE:1-10',
'bgp_neighbor_af_filtername': 'TEXT:',
'bgp_neighbor_af_maxprefix': 'INTEGER_VALUE:1-15870',
'bgp_neighbor_af_prefixname': 'TEXT:',
'bgp_neighbor_af_routemap': 'TEXT:',
'bgp_neighbor_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_neighbor_connection_retrytime': 'INTEGER_VALUE:1-65535',
'bgp_neighbor_description': 'TEXT:',
'bgp_neighbor_maxhopcount': 'INTEGER_VALUE:1-255',
'bgp_neighbor_local_as': 'NO_VALIDATION:1-4294967295',
'bgp_neighbor_maxpeers': 'INTEGER_VALUE:1-96',
'bgp_neighbor_password': 'TEXT:',
'bgp_neighbor_timers_Keepalive': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_timers_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_ttl_hops': 'INTEGER_VALUE:1-254',
'bgp_neighbor_update_options': 'TEXT_OPTIONS:ethernet,loopback,\
vlan',
'bgp_neighbor_update_ethernet': 'TEXT:',
'bgp_neighbor_update_loopback': 'INTEGER_VALUE:0-7',
'bgp_neighbor_update_vlan': 'INTEGER_VALUE:1-4094',
'bgp_neighbor_weight': 'INTEGER_VALUE:0-65535',
'ethernet_interface_value': 'INTEGER_VALUE:1-54',
'ethernet_interface_range': 'INTEGER_VALUE_RANGE:1-54',
'ethernet_interface_string': 'TEXT:',
'loopback_interface_value': 'INTEGER_VALUE:0-7',
'mgmt_interface_value': 'INTEGER_VALUE:0-0',
'vlan_interface_value': 'INTEGER_VALUE:1-4094',
'portchannel_interface_value': 'INTEGER_VALUE:1-4096',
'portchannel_interface_range': 'INTEGER_VALUE_RANGE:1-4096',
'portchannel_interface_string': 'TEXT:',
'aggregation_group_no': 'INTEGER_VALUE:1-4096',
'aggregation_group_mode': 'TEXT_OPTIONS:active,on,passive',
'bfd_options': 'TEXT_OPTIONS:authentication,echo,interval,ipv4,\
ipv6,neighbor',
'bfd_interval': 'INTEGER_VALUE:50-999',
'bfd_minrx': 'INTEGER_VALUE:50-999',
'bfd_ multiplier': 'INTEGER_VALUE:3-50',
'bfd_ipv4_options': 'TEXT_OPTIONS:authentication,echo,interval',
'bfd_auth_options': 'TEXT_OPTIONS:keyed-md5,keyed-sha1,\
meticulous-keyed-md5,meticulous-keyed-sha1,simple',
'bfd_key_options': 'TEXT_OPTIONS:key-chain,key-id',
'bfd_key_chain': 'TEXT:',
'bfd_key_id': 'INTEGER_VALUE:0-255',
'bfd_key_name': 'TEXT:',
'bfd_neighbor_ip': 'TEXT:',
'bfd_neighbor_options': 'TEXT_OPTIONS:admin-down,multihop,\
non-persistent',
'bfd_access_vlan': 'INTEGER_VALUE:1-3999',
'bfd_bridgeport_mode': 'TEXT_OPTIONS:access,dot1q-tunnel,trunk',
'trunk_options': 'TEXT_OPTIONS:allowed,native',
'trunk_vlanid': 'INTEGER_VALUE:1-3999',
'portCh_description': 'TEXT:',
'duplex_option': 'TEXT_OPTIONS:auto,full,half',
'flowcontrol_options': 'TEXT_OPTIONS:receive,send',
'portchannel_ip_options': 'TEXT_OPTIONS:access-group,address,\
arp,dhcp,ospf,port,port-unreachable,redirects,router,\
unreachables',
'accessgroup_name': 'TEXT:',
'portchannel_ipv4': 'IPV4Address:',
'portchannel_ipv4_mask': 'TEXT:',
'arp_ipaddress': 'IPV4Address:',
'arp_macaddress': 'TEXT:',
'arp_timeout_value': 'INTEGER_VALUE:60-28800',
'relay_ipaddress': 'IPV4Address:',
'ip_ospf_options': 'TEXT_OPTIONS:authentication,\
authentication-key,bfd,cost,database-filter,dead-interval,\
hello-interval,message-digest-key,mtu,mtu-ignore,network,\
passive-interface,priority,retransmit-interval,shutdown,\
transmit-delay',
'ospf_id_decimal_value': 'NO_VALIDATION:1-4294967295',
'ospf_id_ipaddres_value': 'IPV4Address:',
'lacp_options': 'TEXT_OPTIONS:port-priority,suspend-individual,\
timeout',
'port_priority': 'INTEGER_VALUE:1-65535',
'lldp_options': 'TEXT_OPTIONS:receive,tlv-select,transmit,\
trap-notification',
'lldp_tlv_options': 'TEXT_OPTIONS:link-aggregation,\
mac-phy-status,management-address,max-frame-size,\
port-description,port-protocol-vlan,port-vlan,power-mdi,\
protocol-identity,system-capabilities,system-description,\
system-name,vid-management,vlan-name',
'load_interval_delay': 'INTEGER_VALUE:30-300',
'load_interval_counter': 'INTEGER_VALUE:1-3',
'mac_accessgroup_name': 'TEXT:',
'mac_address': 'TEXT:',
'microburst_threshold': 'NO_VALIDATION:1-4294967295',
'mtu_value': 'INTEGER_VALUE:64-9216',
'service_instance': 'NO_VALIDATION:1-4294967295',
'service_policy_options': 'TEXT_OPTIONS:copp-system-policy,input,\
output,type',
'service_policy_name': 'TEXT:',
'spanning_tree_options': 'TEXT_OPTIONS:bpdufilter,bpduguard,\
cost,disable,enable,guard,link-type,mst,port,port-priority,vlan',
'spanning_tree_cost': 'NO_VALIDATION:1-200000000',
'spanning_tree_interfacerange': 'INTEGER_VALUE_RANGE:1-3999',
'spanning_tree_portpriority': 'TEXT_OPTIONS:0,32,64,96,128,160,\
192,224',
'portchannel_ipv6_neighbor_mac': 'TEXT:',
'portchannel_ipv6_neighbor_address': 'IPV6Address:',
'portchannel_ipv6_linklocal': 'IPV6Address:',
'portchannel_ipv6_dhcp_vlan': 'INTEGER_VALUE:1-4094',
'portchannel_ipv6_dhcp_ethernet': 'TEXT:',
'portchannel_ipv6_dhcp': 'IPV6Address:',
'portchannel_ipv6_address': 'IPV6Address:',
'portchannel_ipv6_options': 'TEXT_OPTIONS:address,dhcp,\
link-local,nd,neighbor',
'interface_speed': 'TEXT_OPTIONS:1000,10000,40000,auto',
'stormcontrol_options': 'TEXT_OPTIONS:broadcast,multicast,\
unicast',
'stormcontrol_level': 'FLOAT:',
'portchannel_dot1q_tag': 'TEXT_OPTIONS:disable,enable,\
egress-only',
'vrrp_id': 'INTEGER_VALUE:1-255',
}
g8296_cnos = {'vlan_id': 'INTEGER_VALUE:1-3999',
'vlan_id_range': 'INTEGER_VALUE_RANGE:1-3999',
'vlan_name': 'TEXT:',
'vlan_flood': 'TEXT_OPTIONS:ipv4,ipv6',
'vlan_state': 'TEXT_OPTIONS:active,suspend',
'vlan_last_member_query_interval': 'INTEGER_VALUE:1-25',
'vlan_querier': 'IPV4Address:',
'vlan_querier_timeout': 'INTEGER_VALUE:1-65535',
'vlan_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_query_max_response_time': 'INTEGER_VALUE:1-25',
'vlan_report_suppression': 'INTEGER_VALUE:1-25',
'vlan_robustness_variable': 'INTEGER_VALUE:1-7',
'vlan_startup_query_count': 'INTEGER_VALUE:1-10',
'vlan_startup_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_snooping_version': 'INTEGER_VALUE:2-3',
'vlan_access_map_name': 'TEXT: ',
'vlan_ethernet_interface': 'TEXT:',
'vlan_portagg_number': 'INTEGER_VALUE:1-4096',
'vlan_accessmap_action': 'TEXT_OPTIONS:drop,forward,redirect',
'vlan_dot1q_tag': 'MATCH_TEXT_OR_EMPTY:egress-only',
'vlan_filter_name': 'TEXT:',
'vlag_auto_recovery': 'INTEGER_VALUE:240-3600',
'vlag_config_consistency': 'TEXT_OPTIONS:disable,strict',
'vlag_instance': 'INTEGER_VALUE:1-128',
'vlag_port_aggregation': 'INTEGER_VALUE:1-4096',
'vlag_priority': 'INTEGER_VALUE:0-65535',
'vlag_startup_delay': 'INTEGER_VALUE:0-3600',
'vlag_tier_id': 'INTEGER_VALUE:1-512',
'vlag_hlthchk_options': 'TEXT_OPTIONS:keepalive-attempts,\
keepalive-interval,peer-ip,retry-interval',
'vlag_keepalive_attempts': 'INTEGER_VALUE:1-24',
'vlag_keepalive_interval': 'INTEGER_VALUE:2-300',
'vlag_retry_interval': 'INTEGER_VALUE:1-300',
'vlag_peerip': 'IPV4Address:',
'vlag_peerip_vrf': 'TEXT_OPTIONS:default,management',
'bgp_as_number': 'NO_VALIDATION:1-4294967295',
'bgp_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_bgp_local_count': 'INTEGER_VALUE:2-64',
'cluster_id_as_ip': 'IPV4Address:',
'cluster_id_as_number': 'NO_VALIDATION:1-4294967295',
'confederation_identifier': 'INTEGER_VALUE:1-65535',
'condeferation_peers_as': 'INTEGER_VALUE:1-65535',
'stalepath_delay_value': 'INTEGER_VALUE:1-3600',
'maxas_limit_as': 'INTEGER_VALUE:1-2000',
'neighbor_ipaddress': 'IPV4Address:',
'neighbor_as': 'NO_VALIDATION:1-4294967295',
'router_id': 'IPV4Address:',
'bgp_keepalive_interval': 'INTEGER_VALUE:0-3600',
'bgp_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_aggregate_prefix': 'IPV4AddressWithMask:',
'addrfamily_routemap_name': 'TEXT:',
'reachability_half_life': 'INTEGER_VALUE:1-45',
'start_reuse_route_value': 'INTEGER_VALUE:1-20000',
'start_suppress_route_value': 'INTEGER_VALUE:1-20000',
'max_duration_to_suppress_route': 'INTEGER_VALUE:1-255',
'unreachability_halftime_for_penalty': 'INTEGER_VALUE:1-45',
'distance_external_AS': 'INTEGER_VALUE:1-255',
'distance_internal_AS': 'INTEGER_VALUE:1-255',
'distance_local_routes': 'INTEGER_VALUE:1-255',
'maxpath_option': 'TEXT_OPTIONS:ebgp,ibgp',
'maxpath_numbers': 'INTEGER_VALUE:2-32',
'network_ip_prefix_with_mask': 'IPV4AddressWithMask:',
'network_ip_prefix_value': 'IPV4Address:',
'network_ip_prefix_mask': 'IPV4Address:',
'nexthop_crtitical_delay': 'NO_VALIDATION:1-4294967295',
'nexthop_noncrtitical_delay': 'NO_VALIDATION:1-4294967295',
'addrfamily_redistribute_option': 'TEXT_OPTIONS:direct,ospf,\
static',
'bgp_neighbor_af_occurances': 'INTEGER_VALUE:1-10',
'bgp_neighbor_af_filtername': 'TEXT:',
'bgp_neighbor_af_maxprefix': 'INTEGER_VALUE:1-15870',
'bgp_neighbor_af_prefixname': 'TEXT:',
'bgp_neighbor_af_routemap': 'TEXT:',
'bgp_neighbor_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_neighbor_connection_retrytime': 'INTEGER_VALUE:1-65535',
'bgp_neighbor_description': 'TEXT:',
'bgp_neighbor_maxhopcount': 'INTEGER_VALUE:1-255',
'bgp_neighbor_local_as': 'NO_VALIDATION:1-4294967295',
'bgp_neighbor_maxpeers': 'INTEGER_VALUE:1-96',
'bgp_neighbor_password': 'TEXT:',
'bgp_neighbor_timers_Keepalive': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_timers_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_ttl_hops': 'INTEGER_VALUE:1-254',
'bgp_neighbor_update_options': 'TEXT_OPTIONS:ethernet,loopback,\
vlan',
'bgp_neighbor_update_ethernet': 'TEXT:',
'bgp_neighbor_update_loopback': 'INTEGER_VALUE:0-7',
'bgp_neighbor_update_vlan': 'INTEGER_VALUE:1-4094',
'bgp_neighbor_weight': 'INTEGER_VALUE:0-65535',
'ethernet_interface_value': 'INTEGER_VALUE:1-96',
'ethernet_interface_range': 'INTEGER_VALUE_RANGE:1-96',
'ethernet_interface_string': 'TEXT:',
'loopback_interface_value': 'INTEGER_VALUE:0-7',
'mgmt_interface_value': 'INTEGER_VALUE:0-0',
'vlan_interface_value': 'INTEGER_VALUE:1-4094',
'portchannel_interface_value': 'INTEGER_VALUE:1-4096',
'portchannel_interface_range': 'INTEGER_VALUE_RANGE:1-4096',
'portchannel_interface_string': 'TEXT:',
'aggregation_group_no': 'INTEGER_VALUE:1-4096',
'aggregation_group_mode': 'TEXT_OPTIONS:active,on,passive',
'bfd_options': 'TEXT_OPTIONS:authentication,echo,interval,ipv4,\
ipv6,neighbor',
'bfd_interval': 'INTEGER_VALUE:50-999',
'bfd_minrx': 'INTEGER_VALUE:50-999',
'bfd_ multiplier': 'INTEGER_VALUE:3-50',
'bfd_ipv4_options': 'TEXT_OPTIONS:authentication,echo,interval',
'bfd_auth_options': 'TEXT_OPTIONS:keyed-md5,keyed-sha1,\
meticulous-keyed-md5,meticulous-keyed-sha1,simple',
'bfd_key_options': 'TEXT_OPTIONS:key-chain,key-id',
'bfd_key_chain': 'TEXT:',
'bfd_key_id': 'INTEGER_VALUE:0-255',
'bfd_key_name': 'TEXT:',
'bfd_neighbor_ip': 'TEXT:',
'bfd_neighbor_options': 'TEXT_OPTIONS:admin-down,multihop,\
non-persistent',
'bfd_access_vlan': 'INTEGER_VALUE:1-3999',
'bfd_bridgeport_mode': 'TEXT_OPTIONS:access,dot1q-tunnel,trunk',
'trunk_options': 'TEXT_OPTIONS:allowed,native',
'trunk_vlanid': 'INTEGER_VALUE:1-3999',
'portCh_description': 'TEXT:',
'duplex_option': 'TEXT_OPTIONS:auto,full,half',
'flowcontrol_options': 'TEXT_OPTIONS:receive,send',
'portchannel_ip_options': 'TEXT_OPTIONS:access-group,address,\
arp,dhcp,ospf,port,port-unreachable,redirects,router,\
unreachables',
'accessgroup_name': 'TEXT:',
'portchannel_ipv4': 'IPV4Address:',
'portchannel_ipv4_mask': 'TEXT:',
'arp_ipaddress': 'IPV4Address:',
'arp_macaddress': 'TEXT:',
'arp_timeout_value': 'INTEGER_VALUE:60-28800',
'relay_ipaddress': 'IPV4Address:',
'ip_ospf_options': 'TEXT_OPTIONS:authentication,\
authentication-key,bfd,cost,database-filter,dead-interval,\
hello-interval,message-digest-key,mtu,mtu-ignore,network,\
passive-interface,priority,retransmit-interval,shutdown,\
transmit-delay',
'ospf_id_decimal_value': 'NO_VALIDATION:1-4294967295',
'ospf_id_ipaddres_value': 'IPV4Address:',
'lacp_options': 'TEXT_OPTIONS:port-priority,suspend-individual,\
timeout',
'port_priority': 'INTEGER_VALUE:1-65535',
'lldp_options': 'TEXT_OPTIONS:receive,tlv-select,transmit,\
trap-notification',
'lldp_tlv_options': 'TEXT_OPTIONS:link-aggregation,\
mac-phy-status,management-address,max-frame-size,\
port-description,port-protocol-vlan,port-vlan,power-mdi,\
protocol-identity,system-capabilities,system-description,\
system-name,vid-management,vlan-name',
'load_interval_delay': 'INTEGER_VALUE:30-300',
'load_interval_counter': 'INTEGER_VALUE:1-3',
'mac_accessgroup_name': 'TEXT:',
'mac_address': 'TEXT:',
'microburst_threshold': 'NO_VALIDATION:1-4294967295',
'mtu_value': 'INTEGER_VALUE:64-9216',
'service_instance': 'NO_VALIDATION:1-4294967295',
'service_policy_options': 'TEXT_OPTIONS:copp-system-policy,\
input,output,type',
'service_policy_name': 'TEXT:',
'spanning_tree_options': 'TEXT_OPTIONS:bpdufilter,bpduguard,\
cost,disable,enable,guard,link-type,mst,port,port-priority,vlan',
'spanning_tree_cost': 'NO_VALIDATION:1-200000000',
'spanning_tree_interfacerange': 'INTEGER_VALUE_RANGE:1-3999',
'spanning_tree_portpriority': 'TEXT_OPTIONS:0,32,64,96,128,160,\
192,224',
'portchannel_ipv6_neighbor_mac': 'TEXT:',
'portchannel_ipv6_neighbor_address': 'IPV6Address:',
'portchannel_ipv6_linklocal': 'IPV6Address:',
'portchannel_ipv6_dhcp_vlan': 'INTEGER_VALUE:1-4094',
'portchannel_ipv6_dhcp_ethernet': 'TEXT:',
'portchannel_ipv6_dhcp': 'IPV6Address:',
'portchannel_ipv6_address': 'IPV6Address:',
'portchannel_ipv6_options': 'TEXT_OPTIONS:address,dhcp,\
link-local,nd,neighbor',
'interface_speed': 'TEXT_OPTIONS:1000,10000,40000,auto',
'stormcontrol_options': 'TEXT_OPTIONS:broadcast,multicast,\
unicast',
'stormcontrol_level': 'FLOAT:',
'portchannel_dot1q_tag': 'TEXT_OPTIONS:disable,enable,\
egress-only',
'vrrp_id': 'INTEGER_VALUE:1-255',
}
g8332_cnos = {'vlan_id': 'INTEGER_VALUE:1-3999',
'vlan_id_range': 'INTEGER_VALUE_RANGE:1-3999',
'vlan_name': 'TEXT:',
'vlan_flood': 'TEXT_OPTIONS:ipv4,ipv6',
'vlan_state': 'TEXT_OPTIONS:active,suspend',
'vlan_last_member_query_interval': 'INTEGER_VALUE:1-25',
'vlan_querier': 'IPV4Address:',
'vlan_querier_timeout': 'INTEGER_VALUE:1-65535',
'vlan_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_query_max_response_time': 'INTEGER_VALUE:1-25',
'vlan_report_suppression': 'INTEGER_VALUE:1-25',
'vlan_robustness_variable': 'INTEGER_VALUE:1-7',
'vlan_startup_query_count': 'INTEGER_VALUE:1-10',
'vlan_startup_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_snooping_version': 'INTEGER_VALUE:2-3',
'vlan_access_map_name': 'TEXT: ',
'vlan_ethernet_interface': 'TEXT:',
'vlan_portagg_number': 'INTEGER_VALUE:1-4096',
'vlan_accessmap_action': 'TEXT_OPTIONS:drop,forward,redirect',
'vlan_dot1q_tag': 'MATCH_TEXT_OR_EMPTY:egress-only',
'vlan_filter_name': 'TEXT:',
'vlag_auto_recovery': 'INTEGER_VALUE:240-3600',
'vlag_config_consistency': 'TEXT_OPTIONS:disable,strict',
'vlag_instance': 'INTEGER_VALUE:1-128',
'vlag_port_aggregation': 'INTEGER_VALUE:1-4096',
'vlag_priority': 'INTEGER_VALUE:0-65535',
'vlag_startup_delay': 'INTEGER_VALUE:0-3600',
'vlag_tier_id': 'INTEGER_VALUE:1-512',
'vlag_hlthchk_options': 'TEXT_OPTIONS:keepalive-attempts,\
keepalive-interval,peer-ip,retry-interval',
'vlag_keepalive_attempts': 'INTEGER_VALUE:1-24',
'vlag_keepalive_interval': 'INTEGER_VALUE:2-300',
'vlag_retry_interval': 'INTEGER_VALUE:1-300',
'vlag_peerip': 'IPV4Address:',
'vlag_peerip_vrf': 'TEXT_OPTIONS:default,management',
'bgp_as_number': 'NO_VALIDATION:1-4294967295',
'bgp_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_bgp_local_count': 'INTEGER_VALUE:2-64',
'cluster_id_as_ip': 'IPV4Address:',
'cluster_id_as_number': 'NO_VALIDATION:1-4294967295',
'confederation_identifier': 'INTEGER_VALUE:1-65535',
'condeferation_peers_as': 'INTEGER_VALUE:1-65535',
'stalepath_delay_value': 'INTEGER_VALUE:1-3600',
'maxas_limit_as': 'INTEGER_VALUE:1-2000',
'neighbor_ipaddress': 'IPV4Address:',
'neighbor_as': 'NO_VALIDATION:1-4294967295',
'router_id': 'IPV4Address:',
'bgp_keepalive_interval': 'INTEGER_VALUE:0-3600',
'bgp_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_aggregate_prefix': 'IPV4AddressWithMask:',
'addrfamily_routemap_name': 'TEXT:',
'reachability_half_life': 'INTEGER_VALUE:1-45',
'start_reuse_route_value': 'INTEGER_VALUE:1-20000',
'start_suppress_route_value': 'INTEGER_VALUE:1-20000',
'max_duration_to_suppress_route': 'INTEGER_VALUE:1-255',
'unreachability_halftime_for_penalty': 'INTEGER_VALUE:1-45',
'distance_external_AS': 'INTEGER_VALUE:1-255',
'distance_internal_AS': 'INTEGER_VALUE:1-255',
'distance_local_routes': 'INTEGER_VALUE:1-255',
'maxpath_option': 'TEXT_OPTIONS:ebgp,ibgp',
'maxpath_numbers': 'INTEGER_VALUE:2-32',
'network_ip_prefix_with_mask': 'IPV4AddressWithMask:',
'network_ip_prefix_value': 'IPV4Address:',
'network_ip_prefix_mask': 'IPV4Address:',
'nexthop_crtitical_delay': 'NO_VALIDATION:1-4294967295',
'nexthop_noncrtitical_delay': 'NO_VALIDATION:1-4294967295',
'addrfamily_redistribute_option': 'TEXT_OPTIONS:direct,ospf,\
static',
'bgp_neighbor_af_occurances': 'INTEGER_VALUE:1-10',
'bgp_neighbor_af_filtername': 'TEXT:',
'bgp_neighbor_af_maxprefix': 'INTEGER_VALUE:1-15870',
'bgp_neighbor_af_prefixname': 'TEXT:',
'bgp_neighbor_af_routemap': 'TEXT:',
'bgp_neighbor_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_neighbor_connection_retrytime': 'INTEGER_VALUE:1-65535',
'bgp_neighbor_description': 'TEXT:',
'bgp_neighbor_maxhopcount': 'INTEGER_VALUE:1-255',
'bgp_neighbor_local_as': 'NO_VALIDATION:1-4294967295',
'bgp_neighbor_maxpeers': 'INTEGER_VALUE:1-96',
'bgp_neighbor_password': 'TEXT:',
'bgp_neighbor_timers_Keepalive': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_timers_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_ttl_hops': 'INTEGER_VALUE:1-254',
'bgp_neighbor_update_options': 'TEXT_OPTIONS:ethernet,loopback,\
vlan',
'bgp_neighbor_update_ethernet': 'TEXT:',
'bgp_neighbor_update_loopback': 'INTEGER_VALUE:0-7',
'bgp_neighbor_update_vlan': 'INTEGER_VALUE:1-4094',
'bgp_neighbor_weight': 'INTEGER_VALUE:0-65535',
'ethernet_interface_value': 'INTEGER_VALUE:1-32',
'ethernet_interface_range': 'INTEGER_VALUE_RANGE:1-32',
'ethernet_interface_string': 'TEXT:',
'loopback_interface_value': 'INTEGER_VALUE:0-7',
'mgmt_interface_value': 'INTEGER_VALUE:0-0',
'vlan_interface_value': 'INTEGER_VALUE:1-4094',
'portchannel_interface_value': 'INTEGER_VALUE:1-4096',
'portchannel_interface_range': 'INTEGER_VALUE_RANGE:1-4096',
'portchannel_interface_string': 'TEXT:',
'aggregation_group_no': 'INTEGER_VALUE:1-4096',
'aggregation_group_mode': 'TEXT_OPTIONS:active,on,passive',
'bfd_options': 'TEXT_OPTIONS:authentication,echo,interval,ipv4,\
ipv6,neighbor',
'bfd_interval': 'INTEGER_VALUE:50-999',
'bfd_minrx': 'INTEGER_VALUE:50-999',
'bfd_ multiplier': 'INTEGER_VALUE:3-50',
'bfd_ipv4_options': 'TEXT_OPTIONS:authentication,echo,interval',
'bfd_auth_options': 'TEXT_OPTIONS:keyed-md5,keyed-sha1,\
meticulous-keyed-md5,meticulous-keyed-sha1,simple',
'bfd_key_options': 'TEXT_OPTIONS:key-chain,key-id',
'bfd_key_chain': 'TEXT:',
'bfd_key_id': 'INTEGER_VALUE:0-255',
'bfd_key_name': 'TEXT:',
'bfd_neighbor_ip': 'TEXT:',
'bfd_neighbor_options': 'TEXT_OPTIONS:admin-down,multihop,\
non-persistent',
'bfd_access_vlan': 'INTEGER_VALUE:1-3999',
'bfd_bridgeport_mode': 'TEXT_OPTIONS:access,dot1q-tunnel,trunk',
'trunk_options': 'TEXT_OPTIONS:allowed,native',
'trunk_vlanid': 'INTEGER_VALUE:1-3999',
'portCh_description': 'TEXT:',
'duplex_option': 'TEXT_OPTIONS:auto,full,half',
'flowcontrol_options': 'TEXT_OPTIONS:receive,send',
'portchannel_ip_options': 'TEXT_OPTIONS:access-group,address,arp,\
dhcp,ospf,port,port-unreachable,redirects,router,unreachables',
'accessgroup_name': 'TEXT:',
'portchannel_ipv4': 'IPV4Address:',
'portchannel_ipv4_mask': 'TEXT:',
'arp_ipaddress': 'IPV4Address:',
'arp_macaddress': 'TEXT:',
'arp_timeout_value': 'INTEGER_VALUE:60-28800',
'relay_ipaddress': 'IPV4Address:',
'ip_ospf_options': 'TEXT_OPTIONS:authentication,\
authentication-key,bfd,cost,database-filter,dead-interval,\
hello-interval,message-digest-key,mtu,mtu-ignore,network,\
passive-interface,priority,retransmit-interval,shutdown,\
transmit-delay',
'ospf_id_decimal_value': 'NO_VALIDATION:1-4294967295',
'ospf_id_ipaddres_value': 'IPV4Address:',
'lacp_options': 'TEXT_OPTIONS:port-priority,suspend-individual,\
timeout',
'port_priority': 'INTEGER_VALUE:1-65535',
'lldp_options': 'TEXT_OPTIONS:receive,tlv-select,transmit,\
trap-notification',
'lldp_tlv_options': 'TEXT_OPTIONS:link-aggregation,\
mac-phy-status,management-address,max-frame-size,\
port-description,port-protocol-vlan,port-vlan,power-mdi,\
protocol-identity,system-capabilities,system-description,\
system-name,vid-management,vlan-name',
'load_interval_delay': 'INTEGER_VALUE:30-300',
'load_interval_counter': 'INTEGER_VALUE:1-3',
'mac_accessgroup_name': 'TEXT:',
'mac_address': 'TEXT:',
'microburst_threshold': 'NO_VALIDATION:1-4294967295',
'mtu_value': 'INTEGER_VALUE:64-9216',
'service_instance': 'NO_VALIDATION:1-4294967295',
'service_policy_options': 'TEXT_OPTIONS:copp-system-policy,\
input,output,type',
'service_policy_name': 'TEXT:',
'spanning_tree_options': 'TEXT_OPTIONS:bpdufilter,bpduguard,\
cost,disable,enable,guard,link-type,mst,port,port-priority,vlan',
'spanning_tree_cost': 'NO_VALIDATION:1-200000000',
'spanning_tree_interfacerange': 'INTEGER_VALUE_RANGE:1-3999',
'spanning_tree_portpriority': 'TEXT_OPTIONS:0,32,64,96,128,160,\
192,224',
'portchannel_ipv6_neighbor_mac': 'TEXT:',
'portchannel_ipv6_neighbor_address': 'IPV6Address:',
'portchannel_ipv6_linklocal': 'IPV6Address:',
'portchannel_ipv6_dhcp_vlan': 'INTEGER_VALUE:1-4094',
'portchannel_ipv6_dhcp_ethernet': 'TEXT:',
'portchannel_ipv6_dhcp': 'IPV6Address:',
'portchannel_ipv6_address': 'IPV6Address:',
'portchannel_ipv6_options': 'TEXT_OPTIONS:address,dhcp,\
link-local,nd,neighbor',
'interface_speed': 'TEXT_OPTIONS:1000,10000,40000,50000,auto',
'stormcontrol_options': 'TEXT_OPTIONS:broadcast,multicast,\
unicast',
'stormcontrol_level': 'FLOAT:',
'portchannel_dot1q_tag': 'TEXT_OPTIONS:disable,enable,\
egress-only',
'vrrp_id': 'INTEGER_VALUE:1-255',
}
| gpl-3.0 |
tomka/CATMAID | django/applications/catmaid/control/useranalytics.py | 2 | 20452 | # -*- coding: utf-8 -*-
from datetime import timedelta, datetime
from dateutil import parser as dateparser
import io
import logging
import numpy as np
import pytz
from typing import Any, Dict, List, Tuple
from django.db import connection
from django.http import HttpRequest, HttpResponse
from django.utils import timezone
from django.shortcuts import get_object_or_404
from django.views.decorators.cache import never_cache
from catmaid.control.common import get_request_bool
from catmaid.control.authentication import requires_user_role
from catmaid.models import Connector, Project, Treenode, Review, UserRole
logger = logging.getLogger(__name__)
try:
import matplotlib
# Use a noninteractive backend since most CATMAID instances are headless.
matplotlib.use('svg')
import matplotlib.pyplot as plt
from matplotlib.dates import DateFormatter, DayLocator
from pylab import figure
from matplotlib.backends.backend_svg import FigureCanvasSVG
except ImportError:
logger.warning("CATMAID was unable to load the matplotlib module. "
"User analytics will not be available")
class Bout(object):
""" Represents one bout, based on a list of events. The first event ist the
start date/time, the last event the end.
"""
def __init__(self, start, end=None):
self.events = [start]
if end:
self.events.append(end)
def addEvent(self, e):
""" Increments the event counter.
"""
self.events.append(e)
@property
def nrEvents(self):
return len(self.events)
@property
def start(self):
return self.events[0]
@property
def end(self):
return self.events[-1]
def __str__(self):
return "Bout with %s events [%s, %s]" % \
(self.nrEvents, self.start, self.end)
@never_cache
@requires_user_role(UserRole.Browse)
def plot_useranalytics(request:HttpRequest, project_id) -> HttpResponse:
""" Creates an SVG image containing different plots for analzing the
performance of individual users over time.
"""
time_zone = pytz.utc
userid = request.GET.get('userid', None)
if not (userid and userid.strip()):
raise ValueError("Need user ID")
project = get_object_or_404(Project, pk=project_id) if project_id else None
all_writes = get_request_bool(request.GET, 'all_writes', False)
maxInactivity = int(request.GET.get('max_inactivity', 3))
# Get the start date for the query, defaulting to 7 days ago.
start_date = request.GET.get('start', None)
if start_date:
start_date = dateparser.parse(start_date)
start_date = time_zone.localize(start_date)
else:
with timezone.override(time_zone):
start_date = timezone.now() - timedelta(7)
# Get the end date for the query, defaulting to now.
end_date = request.GET.get('end', None)
if end_date:
end_date = dateparser.parse(end_date)
end_date = time_zone.localize(end_date)
else:
with timezone.override(time_zone):
end_date = timezone.now()
# The API is inclusive and should return stats for the end date as
# well. The actual query is easier with an exclusive end and therefore
# the end date is set to the beginning of the next day.
end_date = end_date + timedelta(days=1)
if request.user.is_superuser or \
project and request.user.has_perm('can_browse', project):
f = generateReport( userid, project_id, maxInactivity, start_date,
end_date, all_writes )
else:
f = generateErrorImage('You lack permissions to view this report.')
# Use raw text rather than SVG fonts or pathing.
plt.rcParams['svg.fonttype'] = 'none'
buf = io.BytesIO()
plt.savefig(buf, format='svg')
return HttpResponse(buf.getvalue(), content_type='image/svg+xml')
def eventTimes(user_id, project_id, start_date, end_date, all_writes=True) -> Dict[str, Any]:
""" Returns a tuple containing a list of tree node edition times, connector
edition times and tree node review times within the date range specified
where the editor/reviewer is the given user.
"""
dr = (start_date, end_date)
tns = Treenode.objects.filter(
editor_id=user_id,
edition_time__range=dr)
cns = Connector.objects.filter(
editor_id=user_id,
edition_time__range=dr)
rns = Review.objects.filter(
reviewer_id=user_id,
review_time__range=dr)
if project_id:
tns = tns.filter(project_id=project_id)
cns = cns.filter(project_id=project_id)
rns = rns.filter(project_id=project_id)
tns = tns.values_list('edition_time', flat=True)
cns = cns.values_list('edition_time', flat=True)
rns = rns.values_list('review_time', flat=True)
events = {
'treenode_events': list(tns),
'connector_events': list(cns),
'review_events': list(rns)
}
if all_writes:
if project_id:
params:Tuple[str, ...] = (start_date, end_date, user_id, project_id)
project_filter = "AND project_id = %s"
else:
params = (start_date, end_date, user_id)
project_filter = ""
# Query transaction log. This makes this feature only useful of history
# tracking is available.
cursor = connection.cursor()
cursor.execute(f"""
SELECT execution_time
FROM catmaid_transaction_info
WHERE execution_time >= %s
AND execution_time <= %s
AND user_id = %s
{project_filter}
""", params)
events['write_events'] = [r[0] for r in cursor.fetchall()]
return events
def eventsPerInterval(times, start_date, end_date, interval='day') -> Tuple[np.ndarray, List]:
""" Creates a histogram of how many events fall into all intervals between
<start_data> and <end_date>. The interval type can be day, hour and
halfhour. Returned is a tuple containing two elemens: the histogram and a
time axis, labeling every bin.
"""
if interval=='day':
intervalsPerDay = 1
secondsPerInterval = 86400
elif interval=='hour':
intervalsPerDay = 24
secondsPerInterval = 3600
elif interval=='halfhour':
intervalsPerDay = 48
secondsPerInterval = 1800
else:
raise ValueError('Interval options are day, hour, or halfhour')
# Generate axis
daycount = (end_date - start_date).days
dt = timedelta(0, secondsPerInterval)
timeaxis = [start_date + n*dt for n in range(intervalsPerDay * daycount)]
# Calculate bins
timebins = np.zeros(intervalsPerDay * daycount)
intervalsPerSecond = 1.0 / secondsPerInterval
for t in times:
i = int((t - start_date).total_seconds() * intervalsPerSecond)
timebins[i] += 1
return timebins, timeaxis
def activeTimes(alltimes, gapThresh):
""" Goes through the sorted array of time differences between all events
stored in <alltimes>. If two events are closer together than <gapThresh>
minutes, they are counted as events within one bout. A tuple containing a
list of bout start dates as well as a list with total numbers of events for
each bout is returned.
"""
# Sort all events and create a list of (time) differences between them
alltimes.sort()
dts = np.diff(alltimes)
# Threshold between to events to be counted as separate bouts (seconds)
threshold = 60 * gapThresh
# Indicates whether we are currently in a bout and since we haven't even
# looked at the first event, we are initially not.
bout = None
# Go through all events
for i, e in enumerate(alltimes):
if i > 0 and dts[i-1].total_seconds() < threshold:
# Increment current bout's event counter and continue with the
# next element as long as the time difference to the next
# element is below our threshold.
bout.addEvent(e) # type: ignore # mypy cannot prove bout will not be None
continue
else:
# Return current bout (not available in first iteration) and create
# a new one.
if bout:
yield bout
bout = Bout(e)
# Return last bout, if it hasn't been returned, yet
if bout:
yield bout
def activeTimesPerDay(active_bouts) -> Tuple[Any, List]:
""" Creates a tuple containing the active time in hours for every day
between the first event of the first bout and the last event of the last
bout as well as a list with the date for every day.
"""
# Return right away if there are no bouts
if not active_bouts:
return [], []
# Find first event of first bout
daystart = active_bouts[0].start.replace(
hour=0, minute=0, second=0, microsecond=0)
# Find last event of last bout
dayend = active_bouts[-1].end
# Get total number of between first event and last event
numdays = (dayend - daystart).days + 1
# Create a list of dates for every day between first and last event
timeaxis = [daystart.date() + timedelta(d) for d in range(numdays)]
# Calculate the netto active time for each day
net_active_time = np.array(np.zeros(numdays))
for bout in active_bouts:
active_time = (bout.end - bout.start).total_seconds()
net_active_time[(bout.start - daystart).days] += active_time
# Return a tuple containing the active time for every
# day in hours and the list of days.
return np.divide(net_active_time, 3600), timeaxis
def singleDayEvents(alltimes, start_hour, end_hour) -> Tuple[Any, List]:
alltimes.sort()
timeaxis = [n for n in np.add(start_hour,range(end_hour-start_hour+1))]
activity = np.zeros(end_hour-start_hour+1)
for a in alltimes:
if a.hour >= start_hour:
if a.hour < end_hour:
activity[a.hour-start_hour] += 1
return np.true_divide(activity, (alltimes[-1] - alltimes[0]).days), timeaxis
def singleDayActiveness(activebouts, increment, start_hour, end_hour) -> Tuple[Any, Any]:
""" Returns a ... for all bouts between <start_hour> and <end_hour> of the
day.
"""
# Return right away, when there are no bouts given
if not activebouts:
return [], []
# Make sure 60 can be cleanly devided by <incement>
if np.mod(60, increment) > 0:
raise ValueError('Increments must divide 60 evenly')
# Some constants
stepsPerHour = 60 / increment
hoursConsidered = (end_hour - start_hour) + 1
daysConsidered = (activebouts[-1].end - activebouts[0].start).days + 1
# Get start of current day
starttime = timezone.now().replace(hour=start_hour,minute=0,second=0,microsecond=0)
# Create time axis list with entry for every <increment> minutes between
# <start_hour> and <end_hour>.
timeaxis = [starttime + timedelta(0, 0, 0, 0, n * increment) \
for n in range(stepsPerHour * hoursConsidered)]
# Loop through all days considered to find number of weekend days
weekendCorrection = 0
for d in range(daysConsidered):
# TODO: Why is 0 and 6 used for comparison?
saturday = (activebouts[0].start + timedelta(d)).isoweekday() == 0
sunday = (activebouts[0].start + timedelta(d)).isoweekday() == 6
if saturday or sunday:
weekendCorrection += 1
# Initialize list for minutes per period with zeros
durPerPeriod = np.zeros(stepsPerHour * hoursConsidered)
for bout in activebouts:
# Ignore bouts what start after requested <end_hour> or end before
# requested <start_hour>.
if bout.start.hour > end_hour:
continue
elif bout.end.hour < start_hour:
continue
# Crop start and end times of every valid bout to request period
elif bout.start.hour < start_hour:
bout.start = bout.start.replace(hour=start_hour,minute=0,second=0,microsecond=0)
elif bout.end.hour > end_hour:
bout.end = bout.end.replace(hour=end_hour,minute=0,second=0,microsecond=0)
# Go through every sub bout, defined by periods if <increment> minutes,
# and store the number of minutes for every time-fraction considered.
for subbout in splitBout(bout,increment):
subboutSeconds = (subbout.end - subbout.start).total_seconds()
i = stepsPerHour * (subbout.start.hour - start_hour) + \
subbout.start.minute / increment
durPerPeriod[i] += np.true_divide(subboutSeconds, 60)
# Divide each period (in seconds) by ?
n = increment * (daysConsidered - weekendCorrection)
durations = np.true_divide(durPerPeriod, n)
# Return a tuple containing a list durations and a list of timepoints
return durations, timeaxis
def splitBout(bout,increment) -> List[Bout]:
""" Splits one bout in periods of <increment> minutes.
"""
if np.mod(60, increment) > 0:
raise RuntimeError('Increments must divide 60 evenly')
boutListOut = []
currtime = bout.start
nexttime = bout.start
while nexttime < bout.end:
basemin = increment * ( currtime.minute / increment )
nexttime = currtime.replace(minute=0,second=0,microsecond=0) + timedelta(0,0,0,0,basemin+increment)
if nexttime > bout.end:
nexttime = bout.end
boutListOut.append(Bout(currtime, nexttime))
currtime = nexttime
return boutListOut
def generateErrorImage(msg) -> "matplotlib.figure.Figure":
""" Creates an empty image (based on image nr. 1) and adds a message to it.
"""
fig = plt.figure(1, figsize=(6,6))
fig.clf()
fig.suptitle(msg)
return fig
def generateReport(
user_id, project_id, activeTimeThresh, start_date, end_date, all_writes=True
) -> "matplotlib.figure.Figure":
""" nts: node times
cts: connector times
rts: review times """
events = eventTimes(user_id, project_id, start_date, end_date, all_writes)
nts = events['treenode_events']
cts = events['connector_events']
rts = events['review_events']
# If no nodes have been found, return an image with a descriptive text.
if len(nts) == 0:
return generateErrorImage("No tree nodes were edited during the " +
"defined period if time.")
annotationEvents, ae_timeaxis = eventsPerInterval( nts + cts, start_date, end_date )
reviewEvents, re_timeaxis = eventsPerInterval( rts, start_date, end_date )
if all_writes:
write_events = events['write_events']
other_write_events = write_events
writeEvents, we_timeaxis = eventsPerInterval(other_write_events, start_date, end_date)
else:
other_write_events = []
activeBouts = list(activeTimes( nts+cts+rts+other_write_events, activeTimeThresh ))
netActiveTime, at_timeaxis = activeTimesPerDay( activeBouts )
dayformat = DateFormatter('%b %d')
fig = plt.figure(figsize=(9.6, 8))
# Top left plot: created and edited nodes per day
ax1 = plt.subplot2grid((2,2), (0,0))
# If other writes should be shown, draw accumulated write bar first. This
# makes the regular bar draw over it, so that only the difference is
# visible, which is exactly what we want.
if all_writes:
we = ax1.bar(we_timeaxis, writeEvents, color='#00AA00', align='edge')
an = ax1.bar(ae_timeaxis, annotationEvents, color='#0000AA', align='edge')
rv = ax1.bar(re_timeaxis, reviewEvents, bottom=annotationEvents,
color='#AA0000', align='edge')
ax1.set_xlim((start_date,end_date))
if all_writes:
ax1.legend( (we, an, rv), ('Other changes','Annotated', 'Reviewed'), loc=2)
ax1.set_ylabel('Nodes and changes')
else:
ax1.legend( (an, rv), ('Annotated', 'Reviewed'), loc=2 )
ax1.set_ylabel('Nodes')
yl = ax1.get_yticklabels()
plt.setp(yl, fontsize=10)
ax1.xaxis.set_major_formatter(dayformat)
xl = ax1.get_xticklabels()
plt.setp(xl, rotation=30, fontsize=10)
ax1.set_title('Edit events', fontsize=10)
# Bottom left plot: net active time per day
ax2 = plt.subplot2grid((2,2), (1,0))
ax2.bar(at_timeaxis, netActiveTime, color='k', align='edge')
ax2.set_xlim((start_date,end_date))
ax2.set_ylabel('Hours')
yl = ax2.get_yticklabels()
plt.setp(yl, fontsize=10)
ax2.xaxis.set_major_formatter(dayformat)
xl = ax2.get_xticklabels()
plt.setp(xl, rotation=30, fontsize=10)
ax2.set_title('Net daily active time', fontsize=10)
"""
ax3 = fig.add_subplot(223)
ax3 = eventsPerIntervalPerDayPlot(ax3, rts+nts+cts, start_date, end_date, 30 )
"""
# Right column plot: bouts over days
ax4 = plt.subplot2grid((2,2), (0,1), rowspan=2)
ax4 = dailyActivePlotFigure(activeBouts, ax4, start_date, end_date)
yl = ax4.get_yticklabels()
plt.setp(yl, fontsize=10)
ax4.xaxis.set_major_formatter(dayformat)
xl = ax4.get_xticklabels()
plt.setp(xl, rotation=30, fontsize=10)
ax4.set_title('Active Bouts', fontsize=10)
yl = ax4.get_yticklabels()
plt.setp(yl, fontsize=10)
ax4.set_ylabel('Time (24 hr)')
fig.set_tight_layout(True)
return fig
def dailyActivePlotFigure(activebouts, ax:"matplotlib.axes.Axes", start_date, end_date) -> "matplotlib.axes.Axes":
""" Draws a plot of all bouts during each day between <start_date> and
<end_date> to the plot given by <ax>.
"""
# Y axis: Draw a line for each two hours in a day and set ticks accordingly
for i in range(2, 24, 2):
ax.axhline(i, color='#AAAAAA', linestyle = ':')
ax.axhspan(8, 18, facecolor='#999999', alpha=0.25)
ax.set_yticks(range(0, 25, 2))
# X axis: Ticks and labels for every day
ax.xaxis.set_major_locator(DayLocator())
# Draw all bouts
for bout in activebouts:
# Ignore bouts that span accross midnight
# TODO: Draw midnight spanning bouts, too.
if bout.start.day == bout.end.day:
isodate = bout.start.isocalendar()
ax.bar( bout.start.replace(hour=0, minute=0, second=0, microsecond=0),
np.true_divide((bout.end-bout.start).total_seconds(), 3600),
bottom=bout.start.hour + bout.start.minute/60.0 + bout.start.second/3600.0,
alpha=0.5, color='#0000AA', align='edge', edgecolor="k")
# Set Axis limits
ax.set_ylim((0, 24))
ax.invert_yaxis()
ax.set_xlim((start_date, end_date))
return ax
def eventsPerIntervalPerDayPlot(ax, times, start_date, end_date, interval=60) -> "matplotlib.axes.Axes":
if np.mod(24 * 60, interval) > 0:
raise ValueError('Interval in minutes must divide the day evenly')
daycount = (end_date-start_date).days
timebins = {}
for i in range(daycount):
timebins[i] = np.zeros(24 * 60 / interval)
dayList = []
daylabels = []
for i in range(daycount):
day = start_date + timedelta( i )
dayList.append( day )
daylabels.append( str(day.month) + '/' + str(day.day) )
timeaxis = [i for i in range(24 * 60 / interval )]
timelabels = []
for i in range(int(24 * 60 / 30)):
if np.mod(i,2)==0:
timelabels.append( str(i/2) + ':00' )
else:
timelabels.append( str( (i-1)/2 ) + ':30' )
for t in times:
timebins[np.floor((t-start_date).days)][ np.floor(np.divide(t.hour*60+t.minute, interval)) ] += 1
meandat = np.zeros(len(timebins[0]))
ignoredDays = 0
ind = 0
cm = plt.get_cmap('jet',len(timebins))
dats = []
for dat in timebins.values():
if np.sum(dat)==0:
ignoredDays += 1
else:
tmp, = ax.plot(timeaxis, dat, marker='s', linestyle='-.',alpha=0.5, color=cm(ind))
dats.append(tmp)
meandat += dat
ind += 1
meandat = np.divide(meandat, daycount-ignoredDays)
tmp, = ax.plot( timeaxis, meandat, color='k', linewidth=4, linestyle='-')
dats.append(tmp)
daylabels.append('Mean')
ax.set_xticks(timeaxis)
ax.set_xticklabels(timelabels)
xl = ax.get_xticklabels()
plt.setp(xl, rotation=30, fontsize=10)
yl = ax.get_yticklabels()
plt.setp(yl, fontsize=10)
ax.set_ylabel('Events',fontsize=10)
ax.set_xlim(8 * 60 / interval, 19 * 60 / interval)
ax.legend(dats,daylabels,loc=2,frameon=False)
return ax
| gpl-3.0 |
laslabs/odoo | addons/account/tests/test_reconciliation.py | 5 | 29317 | from openerp.addons.account.tests.account_test_classes import AccountingTestCase
import time
import unittest
class TestReconciliation(AccountingTestCase):
"""Tests for reconciliation (account.tax)
Test used to check that when doing a sale or purchase invoice in a different currency,
the result will be balanced.
"""
def setUp(self):
super(TestReconciliation, self).setUp()
self.account_invoice_model = self.env['account.invoice']
self.account_invoice_line_model = self.env['account.invoice.line']
self.acc_bank_stmt_model = self.env['account.bank.statement']
self.acc_bank_stmt_line_model = self.env['account.bank.statement.line']
self.res_currency_model = self.registry('res.currency')
self.res_currency_rate_model = self.registry('res.currency.rate')
self.partner_agrolait_id = self.env.ref("base.res_partner_2").id
self.currency_swiss_id = self.env.ref("base.CHF").id
self.currency_usd_id = self.env.ref("base.USD").id
self.currency_euro_id = self.env.ref("base.EUR").id
self.env.ref('base.main_company').write({'currency_id': self.currency_euro_id})
self.account_rcv = self.env['account.account'].search([('user_type_id', '=', self.env.ref('account.data_account_type_receivable').id)], limit=1)
self.account_rsa = self.env['account.account'].search([('user_type_id', '=', self.env.ref('account.data_account_type_payable').id)], limit=1)
self.product = self.env.ref("product.product_product_4")
self.bank_journal_euro = self.env['account.journal'].create({'name': 'Bank', 'type': 'bank', 'code': 'BNK67'})
self.account_euro = self.bank_journal_euro.default_debit_account_id
self.bank_journal_usd = self.env['account.journal'].create({'name': 'Bank US', 'type': 'bank', 'code': 'BNK68', 'currency_id': self.currency_usd_id})
self.account_usd = self.bank_journal_usd.default_debit_account_id
self.diff_income_account = self.env['res.users'].browse(self.env.uid).company_id.income_currency_exchange_account_id
self.diff_expense_account = self.env['res.users'].browse(self.env.uid).company_id.expense_currency_exchange_account_id
def create_invoice(self, type='out_invoice', invoice_amount=50, currency_id=None):
#we create an invoice in given currency
invoice = self.account_invoice_model.create({'partner_id': self.partner_agrolait_id,
'reference_type': 'none',
'currency_id': currency_id,
'name': type == 'out_invoice' and 'invoice to client' or 'invoice to vendor',
'account_id': self.account_rcv.id,
'type': type,
'date_invoice': time.strftime('%Y') + '-07-01',
})
self.account_invoice_line_model.create({'product_id': self.product.id,
'quantity': 1,
'price_unit': invoice_amount,
'invoice_id': invoice.id,
'name': 'product that cost ' + str(invoice_amount),
'account_id': self.env['account.account'].search([('user_type_id', '=', self.env.ref('account.data_account_type_revenue').id)], limit=1).id,
})
#validate invoice
invoice.signal_workflow('invoice_open')
return invoice
def make_payment(self, invoice_record, bank_journal, amount=0.0, amount_currency=0.0, currency_id=None):
bank_stmt = self.acc_bank_stmt_model.create({
'journal_id': bank_journal.id,
'date': time.strftime('%Y') + '-07-15',
})
bank_stmt_line = self.acc_bank_stmt_line_model.create({'name': 'payment',
'statement_id': bank_stmt.id,
'partner_id': self.partner_agrolait_id,
'amount': amount,
'amount_currency': amount_currency,
'currency_id': currency_id,
'date': time.strftime('%Y') + '-07-15',})
#reconcile the payment with the invoice
for l in invoice_record.move_id.line_ids:
if l.account_id.id == self.account_rcv.id:
line_id = l
break
amount_in_widget = currency_id and amount_currency or amount
bank_stmt_line.process_reconciliation(counterpart_aml_dicts=[{
'move_line': line_id,
'debit': amount_in_widget < 0 and -amount_in_widget or 0.0,
'credit': amount_in_widget > 0 and amount_in_widget or 0.0,
'name': line_id.name,
}])
return bank_stmt
def check_results(self, move_line_recs, aml_dict):
#we check that the line is balanced (bank statement line)
self.assertEquals(len(move_line_recs), len(aml_dict))
for move_line in move_line_recs:
self.assertEquals(round(move_line.debit, 2), aml_dict[move_line.account_id.id]['debit'])
self.assertEquals(round(move_line.credit, 2), aml_dict[move_line.account_id.id]['credit'])
self.assertEquals(round(move_line.amount_currency, 2), aml_dict[move_line.account_id.id]['amount_currency'])
self.assertEquals(move_line.currency_id.id, aml_dict[move_line.account_id.id]['currency_id'])
if 'currency_diff' in aml_dict[move_line.account_id.id]:
if move_line.credit:
rec_ids = [r.id for r in move_line.matched_debit_ids]
else:
rec_ids = [r.id for r in move_line.matched_credit_ids]
currency_diff_move = self.env['account.move'].search([('rate_diff_partial_rec_id', 'in', rec_ids)])
self.assertEqual(len(currency_diff_move), 1)
for currency_diff_line in currency_diff_move[0].line_ids:
if aml_dict[move_line.account_id.id].get('currency_diff') == 0:
if currency_diff_line.account_id.id == move_line.account_id.id:
self.assertAlmostEquals(currency_diff_line.amount_currency, aml_dict[move_line.account_id.id].get('amount_currency_diff'))
if aml_dict[move_line.account_id.id].get('currency_diff') > 0:
if currency_diff_line.account_id.id == move_line.account_id.id:
self.assertAlmostEquals(currency_diff_line.debit, aml_dict[move_line.account_id.id].get('currency_diff'))
else:
self.assertAlmostEquals(currency_diff_line.credit, aml_dict[move_line.account_id.id].get('currency_diff'))
self.assertIn(currency_diff_line.account_id.id, [self.diff_expense_account.id, self.diff_income_account.id], 'The difference accounts should be used correctly. ')
else:
if currency_diff_line.account_id.id == move_line.account_id.id:
self.assertAlmostEquals(currency_diff_line.credit, abs(aml_dict[move_line.account_id.id].get('currency_diff')))
else:
self.assertAlmostEquals(currency_diff_line.debit, abs(aml_dict[move_line.account_id.id].get('currency_diff')))
self.assertIn(currency_diff_line.account_id.id, [self.diff_expense_account.id, self.diff_income_account.id], 'The difference accounts should be used correctly. ')
def make_customer_and_supplier_flows(self, invoice_currency_id, invoice_amount, bank_journal, amount, amount_currency, transaction_currency_id):
#we create an invoice in given invoice_currency
invoice_record = self.create_invoice(type='out_invoice', invoice_amount=invoice_amount, currency_id=invoice_currency_id)
#we encode a payment on it, on the given bank_journal with amount, amount_currency and transaction_currency given
bank_stmt = self.make_payment(invoice_record, bank_journal, amount=amount, amount_currency=amount_currency, currency_id=transaction_currency_id)
customer_move_lines = bank_stmt.move_line_ids
#we create a supplier bill in given invoice_currency
invoice_record = self.create_invoice(type='in_invoice', invoice_amount=invoice_amount, currency_id=invoice_currency_id)
#we encode a payment on it, on the given bank_journal with amount, amount_currency and transaction_currency given
bank_stmt = self.make_payment(invoice_record, bank_journal, amount=-amount, amount_currency=-amount_currency, currency_id=transaction_currency_id)
supplier_move_lines = bank_stmt.move_line_ids
return customer_move_lines, supplier_move_lines
def test_statement_usd_invoice_eur_transaction_eur(self):
customer_move_lines, supplier_move_lines = self.make_customer_and_supplier_flows(self.currency_euro_id, 30, self.bank_journal_usd, 42, 30, self.currency_euro_id)
self.check_results(customer_move_lines, {
self.account_usd.id: {'debit': 30.0, 'credit': 0.0, 'amount_currency': 42, 'currency_id': self.currency_usd_id},
self.account_rcv.id: {'debit': 0.0, 'credit': 30.0, 'amount_currency': -42, 'currency_id': self.currency_usd_id},
})
self.check_results(supplier_move_lines, {
self.account_usd.id: {'debit': 0.0, 'credit': 30.0, 'amount_currency': -42, 'currency_id': self.currency_usd_id},
self.account_rcv.id: {'debit': 30.0, 'credit': 0.0, 'amount_currency': 42, 'currency_id': self.currency_usd_id},
})
def test_statement_usd_invoice_usd_transaction_usd(self):
customer_move_lines, supplier_move_lines = self.make_customer_and_supplier_flows(self.currency_usd_id, 50, self.bank_journal_usd, 50, 0, False)
self.check_results(customer_move_lines, {
self.account_usd.id: {'debit': 32.70, 'credit': 0.0, 'amount_currency': 50, 'currency_id': self.currency_usd_id},
self.account_rcv.id: {'debit': 0.0, 'credit': 32.70, 'amount_currency': -50, 'currency_id': self.currency_usd_id},
})
self.check_results(supplier_move_lines, {
self.account_usd.id: {'debit': 0.0, 'credit': 32.70, 'amount_currency': -50, 'currency_id': self.currency_usd_id},
self.account_rcv.id: {'debit': 32.70, 'credit': 0.0, 'amount_currency': 50, 'currency_id': self.currency_usd_id},
})
def test_statement_usd_invoice_usd_transaction_eur(self):
customer_move_lines, supplier_move_lines = self.make_customer_and_supplier_flows(self.currency_usd_id, 50, self.bank_journal_usd, 50, 40, self.currency_euro_id)
self.check_results(customer_move_lines, {
self.account_usd.id: {'debit': 40.0, 'credit': 0.0, 'amount_currency': 50, 'currency_id': self.currency_usd_id},
self.account_rcv.id: {'debit': 0.0, 'credit': 40.0, 'amount_currency': -50, 'currency_id': self.currency_usd_id, 'currency_diff': 7.30},
})
self.check_results(supplier_move_lines, {
self.account_usd.id: {'debit': 0.0, 'credit': 40.0, 'amount_currency': -50, 'currency_id': self.currency_usd_id},
self.account_rcv.id: {'debit': 40.0, 'credit': 0.0, 'amount_currency': 50, 'currency_id': self.currency_usd_id, 'currency_diff': -7.30},
})
def test_statement_usd_invoice_chf_transaction_chf(self):
customer_move_lines, supplier_move_lines = self.make_customer_and_supplier_flows(self.currency_swiss_id, 50, self.bank_journal_usd, 42, 50, self.currency_swiss_id)
self.check_results(customer_move_lines, {
self.account_usd.id: {'debit': 27.47, 'credit': 0.0, 'amount_currency': 42, 'currency_id': self.currency_usd_id},
self.account_rcv.id: {'debit': 0.0, 'credit': 27.47, 'amount_currency': -50, 'currency_id': self.currency_swiss_id, 'currency_diff': -10.74},
})
self.check_results(supplier_move_lines, {
self.account_usd.id: {'debit': 0.0, 'credit': 27.47, 'amount_currency': -42, 'currency_id': self.currency_usd_id},
self.account_rcv.id: {'debit': 27.47, 'credit': 0.0, 'amount_currency': 50, 'currency_id': self.currency_swiss_id, 'currency_diff': 10.74},
})
def test_statement_eur_invoice_usd_transaction_usd(self):
customer_move_lines, supplier_move_lines = self.make_customer_and_supplier_flows(self.currency_usd_id, 50, self.bank_journal_euro, 40, 50, self.currency_usd_id)
self.check_results(customer_move_lines, {
self.account_euro.id: {'debit': 40.0, 'credit': 0.0, 'amount_currency': 50, 'currency_id': self.currency_usd_id},
self.account_rcv.id: {'debit': 0.0, 'credit': 40.0, 'amount_currency': -50, 'currency_id': self.currency_usd_id, 'currency_diff': 7.30},
})
self.check_results(supplier_move_lines, {
self.account_euro.id: {'debit': 0.0, 'credit': 40.0, 'amount_currency': -50, 'currency_id': self.currency_usd_id},
self.account_rcv.id: {'debit': 40.0, 'credit': 0.0, 'amount_currency': 50, 'currency_id': self.currency_usd_id, 'currency_diff': -7.30},
})
def test_statement_eur_invoice_usd_transaction_eur(self):
customer_move_lines, supplier_move_lines = self.make_customer_and_supplier_flows(self.currency_usd_id, 50, self.bank_journal_euro, 40, 0.0, False)
self.check_results(customer_move_lines, {
self.account_euro.id: {'debit': 40.0, 'credit': 0.0, 'amount_currency': 0.0, 'currency_id': False},
self.account_rcv.id: {'debit': 0.0, 'credit': 40.0, 'amount_currency': -61.16, 'currency_id': self.currency_usd_id},
})
self.check_results(supplier_move_lines, {
self.account_euro.id: {'debit': 0.0, 'credit': 40.0, 'amount_currency': 0.0, 'currency_id': False},
self.account_rcv.id: {'debit': 40.0, 'credit': 0.0, 'amount_currency': 61.16, 'currency_id': self.currency_usd_id},
})
def test_statement_euro_invoice_usd_transaction_chf(self):
customer_move_lines, supplier_move_lines = self.make_customer_and_supplier_flows(self.currency_usd_id, 50, self.bank_journal_euro, 42, 50, self.currency_swiss_id)
self.check_results(customer_move_lines, {
self.account_euro.id: {'debit': 42.0, 'credit': 0.0, 'amount_currency': 50, 'currency_id': self.currency_swiss_id},
self.account_rcv.id: {'debit': 0.0, 'credit': 42.0, 'amount_currency': -50, 'currency_id': self.currency_swiss_id},
})
self.check_results(supplier_move_lines, {
self.account_euro.id: {'debit': 0.0, 'credit': 42.0, 'amount_currency': -50, 'currency_id': self.currency_swiss_id},
self.account_rcv.id: {'debit': 42.0, 'credit': 0.0, 'amount_currency': 50, 'currency_id': self.currency_swiss_id},
})
def test_statement_euro_invoice_usd_transaction_euro_full(self):
#we create an invoice in given invoice_currency
invoice_record = self.create_invoice(type='out_invoice', invoice_amount=50, currency_id=self.currency_usd_id)
#we encode a payment on it, on the given bank_journal with amount, amount_currency and transaction_currency given
bank_stmt = self.acc_bank_stmt_model.create({
'journal_id': self.bank_journal_euro.id,
'date': time.strftime('%Y') + '-01-01',
})
bank_stmt_line = self.acc_bank_stmt_line_model.create({'name': 'payment',
'statement_id': bank_stmt.id,
'partner_id': self.partner_agrolait_id,
'amount': 40,
'date': time.strftime('%Y') + '-01-01',})
#reconcile the payment with the invoice
for l in invoice_record.move_id.line_ids:
if l.account_id.id == self.account_rcv.id:
line_id = l
break
bank_stmt_line.process_reconciliation(counterpart_aml_dicts=[{
'move_line': line_id,
'debit': 0.0,
'credit': 32.7,
'name': line_id.name,
}], new_aml_dicts=[{
'debit': 0.0,
'credit': 7.3,
'name': 'exchange difference',
'account_id': self.diff_income_account.id
}])
self.check_results(bank_stmt.move_line_ids, {
self.account_euro.id: {'debit': 40.0, 'credit': 0.0, 'amount_currency': 0, 'currency_id': False},
self.account_rcv.id: {'debit': 0.0, 'credit': 32.7, 'amount_currency': -41.97, 'currency_id': self.currency_usd_id, 'currency_diff': 0, 'amount_currency_diff': -8.03},
self.diff_income_account.id: {'debit': 0.0, 'credit': 7.3, 'amount_currency': -9.37, 'currency_id': self.currency_usd_id},
})
# The invoice should be paid, as the payments totally cover its total
self.assertEquals(invoice_record.state, 'paid', 'The invoice should be paid by now')
invoice_rec_line = invoice_record.move_id.line_ids.filtered(lambda x: x.account_id.reconcile)
self.assertTrue(invoice_rec_line.reconciled, 'The invoice should be totally reconciled')
self.assertEquals(invoice_rec_line.amount_residual, 0, 'The invoice should be totally reconciled')
self.assertEquals(invoice_rec_line.amount_residual_currency, 0, 'The invoice should be totally reconciled')
@unittest.skip('adapt to new accounting')
def test_balanced_exchanges_gain_loss(self):
# The point of this test is to show that we handle correctly the gain/loss exchanges during reconciliations in foreign currencies.
# For instance, with a company set in EUR, and a USD rate set to 0.033,
# the reconciliation of an invoice of 2.00 USD (60.61 EUR) and a bank statement of two lines of 1.00 USD (30.30 EUR)
# will lead to an exchange loss, that should be handled correctly within the journal items.
cr, uid = self.cr, self.uid
# We update the currency rate of the currency USD in order to force the gain/loss exchanges in next steps
rateUSDbis_id = self.registry("ir.model.data").get_object_reference(self.cr, self.uid, "base", "rateUSDbis")[1]
self.res_currency_rate_model.write(cr, uid, rateUSDbis_id, {
'name': time.strftime('%Y-%m-%d') + ' 00:00:00',
'rate': 0.033,
})
# We create a customer invoice of 2.00 USD
invoice_id = self.account_invoice_model.create(cr, uid, {
'partner_id': self.partner_agrolait_id,
'currency_id': self.currency_usd_id,
'name': 'Foreign invoice with exchange gain',
'account_id': self.account_rcv_id,
'type': 'out_invoice',
'date_invoice': time.strftime('%Y-%m-%d'),
'journal_id': self.bank_journal_usd_id,
'invoice_line': [
(0, 0, {
'name': 'line that will lead to an exchange gain',
'quantity': 1,
'price_unit': 2,
})
]
})
self.registry('account.invoice').signal_workflow(cr, uid, [invoice_id], 'invoice_open')
invoice = self.account_invoice_model.browse(cr, uid, invoice_id)
# We create a bank statement with two lines of 1.00 USD each.
bank_stmt_id = self.acc_bank_stmt_model.create(cr, uid, {
'journal_id': self.bank_journal_usd_id,
'date': time.strftime('%Y-%m-%d'),
'line_ids': [
(0, 0, {
'name': 'half payment',
'partner_id': self.partner_agrolait_id,
'amount': 1.0,
'date': time.strftime('%Y-%m-%d')
}),
(0, 0, {
'name': 'second half payment',
'partner_id': self.partner_agrolait_id,
'amount': 1.0,
'date': time.strftime('%Y-%m-%d')
})
]
})
statement = self.acc_bank_stmt_model.browse(cr, uid, bank_stmt_id)
# We process the reconciliation of the invoice line with the two bank statement lines
line_id = None
for l in invoice.move_id.line_id:
if l.account_id.id == self.account_rcv_id:
line_id = l
break
for statement_line in statement.line_ids:
self.acc_bank_stmt_line_model.process_reconciliation(cr, uid, statement_line.id, [
{'counterpart_move_line_id': line_id.id, 'credit': 1.0, 'debit': 0.0, 'name': line_id.name}
])
# The invoice should be paid, as the payments totally cover its total
self.assertEquals(invoice.state, 'paid', 'The invoice should be paid by now')
reconcile = None
for payment in invoice.payment_ids:
reconcile = payment.reconcile_id
break
# The invoice should be reconciled (entirely, not a partial reconciliation)
self.assertTrue(reconcile, 'The invoice should be totally reconciled')
result = {}
exchange_loss_line = None
for line in reconcile.line_id:
res_account = result.setdefault(line.account_id, {'debit': 0.0, 'credit': 0.0, 'count': 0})
res_account['debit'] = res_account['debit'] + line.debit
res_account['credit'] = res_account['credit'] + line.credit
res_account['count'] += 1
if line.credit == 0.01:
exchange_loss_line = line
# We should be able to find a move line of 0.01 EUR on the Debtors account, being the cent we lost during the currency exchange
self.assertTrue(exchange_loss_line, 'There should be one move line of 0.01 EUR in credit')
# The journal items of the reconciliation should have their debit and credit total equal
# Besides, the total debit and total credit should be 60.61 EUR (2.00 USD)
self.assertEquals(sum([res['debit'] for res in result.values()]), 60.61)
self.assertEquals(sum([res['credit'] for res in result.values()]), 60.61)
counterpart_exchange_loss_line = None
for line in exchange_loss_line.move_id.line_id:
if line.account_id.id == self.account_fx_expense_id:
counterpart_exchange_loss_line = line
# We should be able to find a move line of 0.01 EUR on the Foreign Exchange Loss account
self.assertTrue(counterpart_exchange_loss_line, 'There should be one move line of 0.01 EUR on account "Foreign Exchange Loss"')
def test_manual_reconcile_wizard_opw678153(self):
def create_move(name, amount, amount_currency, currency_id):
debit_line_vals = {
'name': name,
'debit': amount > 0 and amount or 0.0,
'credit': amount < 0 and -amount or 0.0,
'account_id': self.account_rcv.id,
'amount_currency': amount_currency,
'currency_id': currency_id,
}
credit_line_vals = debit_line_vals.copy()
credit_line_vals['debit'] = debit_line_vals['credit']
credit_line_vals['credit'] = debit_line_vals['debit']
credit_line_vals['account_id'] = self.account_rsa.id
credit_line_vals['amount_currency'] = -debit_line_vals['amount_currency']
vals = {
'journal_id': self.bank_journal_euro.id,
'line_ids': [(0,0, debit_line_vals), (0, 0, credit_line_vals)]
}
return self.env['account.move'].create(vals).id
move_list_vals = [
('1', -1.83, 0, self.currency_swiss_id),
('2', 728.35, 795.05, self.currency_swiss_id),
('3', -4.46, 0, self.currency_swiss_id),
('4', 0.32, 0, self.currency_swiss_id),
('5', 14.72, 16.20, self.currency_swiss_id),
('6', -737.10, -811.25, self.currency_swiss_id),
]
move_ids = []
for name, amount, amount_currency, currency_id in move_list_vals:
move_ids.append(create_move(name, amount, amount_currency, currency_id))
aml_recs = self.env['account.move.line'].search([('move_id', 'in', move_ids), ('account_id', '=', self.account_rcv.id)])
wizard = self.env['account.move.line.reconcile'].with_context(active_ids=[x.id for x in aml_recs]).create({})
wizard.trans_rec_reconcile_full()
for aml in aml_recs:
self.assertTrue(aml.reconciled, 'The journal item should be totally reconciled')
self.assertEquals(aml.amount_residual, 0, 'The journal item should be totally reconciled')
self.assertEquals(aml.amount_residual_currency, 0, 'The journal item should be totally reconciled')
move_list_vals = [
('2', 728.35, 795.05, self.currency_swiss_id),
('3', -4.46, 0, False),
('4', 0.32, 0, False),
('5', 14.72, 16.20, self.currency_swiss_id),
('6', -737.10, -811.25, self.currency_swiss_id),
]
move_ids = []
for name, amount, amount_currency, currency_id in move_list_vals:
move_ids.append(create_move(name, amount, amount_currency, currency_id))
aml_recs = self.env['account.move.line'].search([('move_id', 'in', move_ids), ('account_id', '=', self.account_rcv.id)])
wizard = self.env['account.move.line.reconcile.writeoff'].with_context(active_ids=[x.id for x in aml_recs]).create({'journal_id': self.bank_journal_usd.id, 'writeoff_acc_id': self.account_rsa.id})
wizard.trans_rec_reconcile()
for aml in aml_recs:
self.assertTrue(aml.reconciled, 'The journal item should be totally reconciled')
self.assertEquals(aml.amount_residual, 0, 'The journal item should be totally reconciled')
self.assertEquals(aml.amount_residual_currency, 0, 'The journal item should be totally reconciled')
def test_reconcile_bank_statement_with_payment_and_writeoff(self):
# Use case:
# Company is in EUR, create a bill for 80 USD and register payment of 80 USD.
# create a bank statement in USD bank journal with a bank statement line of 85 USD
# Reconcile bank statement with payment and put the remaining 5 USD in bank fees or another account.
invoice = self.create_invoice(type='out_invoice', invoice_amount=80, currency_id=self.currency_usd_id)
# register payment on invoice
payment = self.env['account.payment'].create({'payment_type': 'inbound',
'payment_method_id': self.env.ref('account.account_payment_method_manual_in').id,
'partner_type': 'customer',
'partner_id': self.partner_agrolait_id,
'amount': 80,
'currency_id': self.currency_usd_id,
'payment_date': time.strftime('%Y') + '-07-15',
'journal_id': self.bank_journal_usd.id,
})
payment.post()
payment_move_line = False
bank_move_line = False
for l in payment.move_line_ids:
if l.account_id.id == self.account_rcv.id:
payment_move_line = l
else:
bank_move_line = l
invoice.register_payment(payment_move_line)
# create bank statement
bank_stmt = self.acc_bank_stmt_model.create({
'journal_id': self.bank_journal_usd.id,
'date': time.strftime('%Y') + '-07-15',
})
bank_stmt_line = self.acc_bank_stmt_line_model.create({'name': 'payment',
'statement_id': bank_stmt.id,
'partner_id': self.partner_agrolait_id,
'amount': 85,
'date': time.strftime('%Y') + '-07-15',})
#reconcile the statement with invoice and put remaining in another account
bank_stmt_line.process_reconciliation(payment_aml_rec= bank_move_line, new_aml_dicts=[{
'account_id': self.diff_income_account.id,
'debit': 0,
'credit': 5,
'name': 'bank fees',
}])
# Check that move lines associated to bank_statement are correct
bank_stmt_aml = self.env['account.move.line'].search([('statement_id', '=', bank_stmt.id)])
bank_stmt_aml |= bank_stmt_aml.mapped('move_id').mapped('line_ids')
self.assertEquals(len(bank_stmt_aml), 4, "The bank statement should have 4 moves lines")
lines = {
self.account_usd.id: [
{'debit': 3.27, 'credit': 0.0, 'amount_currency': 5, 'currency_id': self.currency_usd_id},
{'debit': 52.33, 'credit': 0, 'amount_currency': 80, 'currency_id': self.currency_usd_id}
],
self.diff_income_account.id: {'debit': 0.0, 'credit': 3.27, 'amount_currency': -5, 'currency_id': self.currency_usd_id},
self.account_rcv.id: {'debit': 0.0, 'credit': 52.33, 'amount_currency': -80, 'currency_id': self.currency_usd_id},
}
for aml in bank_stmt_aml:
line = lines[aml.account_id.id]
if type(line) == list:
# find correct line inside the list
if line[0]['debit'] == round(aml.debit, 2):
line = line[0]
else:
line = line[1]
self.assertEquals(round(aml.debit, 2), line['debit'])
self.assertEquals(round(aml.credit, 2), line['credit'])
self.assertEquals(round(aml.amount_currency, 2), line['amount_currency'])
self.assertEquals(aml.currency_id.id, line['currency_id']) | agpl-3.0 |
Zlash65/erpnext | erpnext/erpnext_integrations/doctype/gocardless_settings/gocardless_settings.py | 17 | 6908 | # -*- coding: utf-8 -*-
# Copyright (c) 2018, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
import gocardless_pro
from frappe import _
from six.moves.urllib.parse import urlencode
from frappe.utils import get_url, call_hook_method, flt, cint
from frappe.integrations.utils import create_request_log, create_payment_gateway
class GoCardlessSettings(Document):
supported_currencies = ["EUR", "DKK", "GBP", "SEK"]
def validate(self):
self.initialize_client()
def initialize_client(self):
self.environment = self.get_environment()
try:
self.client = gocardless_pro.Client(
access_token=self.access_token,
environment=self.environment
)
return self.client
except Exception as e:
frappe.throw(e)
def on_update(self):
create_payment_gateway('GoCardless-' + self.gateway_name, settings='GoCardLess Settings', controller=self.gateway_name)
call_hook_method('payment_gateway_enabled', gateway='GoCardless-' + self.gateway_name)
def on_payment_request_submission(self, data):
if data.reference_doctype != "Fees":
customer_data = frappe.db.get_value(data.reference_doctype, data.reference_name, ["company", "customer_name"], as_dict=1)
data = {
"amount": flt(data.grand_total, data.precision("grand_total")),
"title": customer_data.company.encode("utf-8"),
"description": data.subject.encode("utf-8"),
"reference_doctype": data.doctype,
"reference_docname": data.name,
"payer_email": data.email_to or frappe.session.user,
"payer_name": customer_data.customer_name,
"order_id": data.name,
"currency": data.currency
}
valid_mandate = self.check_mandate_validity(data)
if valid_mandate is not None:
data.update(valid_mandate)
self.create_payment_request(data)
return False
else:
return True
def check_mandate_validity(self, data):
if frappe.db.exists("GoCardless Mandate", dict(customer=data.get('payer_name'), disabled=0)):
registered_mandate = frappe.db.get_value("GoCardless Mandate", dict(customer=data.get('payer_name'), disabled=0), 'mandate')
self.initialize_client()
mandate = self.client.mandates.get(registered_mandate)
if mandate.status=="pending_customer_approval" or mandate.status=="pending_submission" or mandate.status=="submitted" or mandate.status=="active":
return {"mandate": registered_mandate}
else:
return None
else:
return None
def get_environment(self):
if self.use_sandbox:
return 'sandbox'
else:
return 'live'
def validate_transaction_currency(self, currency):
if currency not in self.supported_currencies:
frappe.throw(_("Please select another payment method. Stripe does not support transactions in currency '{0}'").format(currency))
def get_payment_url(self, **kwargs):
return get_url("./integrations/gocardless_checkout?{0}".format(urlencode(kwargs)))
def create_payment_request(self, data):
self.data = frappe._dict(data)
try:
self.integration_request = create_request_log(self.data, "Host", "GoCardless")
return self.create_charge_on_gocardless()
except Exception:
frappe.log_error(frappe.get_traceback())
return{
"redirect_to": frappe.redirect_to_message(_('Server Error'), _("There seems to be an issue with the server's GoCardless configuration. Don't worry, in case of failure, the amount will get refunded to your account.")),
"status": 401
}
def create_charge_on_gocardless(self):
redirect_to = self.data.get('redirect_to') or None
redirect_message = self.data.get('redirect_message') or None
reference_doc = frappe.get_doc(self.data.get('reference_doctype'), self.data.get('reference_docname'))
self.initialize_client()
try:
payment = self.client.payments.create(
params={
"amount" : cint(reference_doc.grand_total * 100),
"currency" : reference_doc.currency,
"links" : {
"mandate": self.data.get('mandate')
},
"metadata": {
"reference_doctype": reference_doc.doctype,
"reference_document": reference_doc.name
}
}, headers={
'Idempotency-Key' : self.data.get('reference_docname'),
})
if payment.status=="pending_submission" or payment.status=="pending_customer_approval" or payment.status=="submitted":
self.integration_request.db_set('status', 'Authorized', update_modified=False)
self.flags.status_changed_to = "Completed"
self.integration_request.db_set('output', payment.status, update_modified=False)
elif payment.status=="confirmed" or payment.status=="paid_out":
self.integration_request.db_set('status', 'Completed', update_modified=False)
self.flags.status_changed_to = "Completed"
self.integration_request.db_set('output', payment.status, update_modified=False)
elif payment.status=="cancelled" or payment.status=="customer_approval_denied" or payment.status=="charged_back":
self.integration_request.db_set('status', 'Cancelled', update_modified=False)
frappe.log_error(_("Payment Cancelled. Please check your GoCardless Account for more details"), "GoCardless Payment Error")
self.integration_request.db_set('error', payment.status, update_modified=False)
else:
self.integration_request.db_set('status', 'Failed', update_modified=False)
frappe.log_error(_("Payment Failed. Please check your GoCardless Account for more details"), "GoCardless Payment Error")
self.integration_request.db_set('error', payment.status, update_modified=False)
except Exception as e:
frappe.log_error(e, "GoCardless Payment Error")
if self.flags.status_changed_to == "Completed":
status = 'Completed'
if 'reference_doctype' in self.data and 'reference_docname' in self.data:
custom_redirect_to = None
try:
custom_redirect_to = frappe.get_doc(self.data.get('reference_doctype'),
self.data.get('reference_docname')).run_method("on_payment_authorized", self.flags.status_changed_to)
except Exception:
frappe.log_error(frappe.get_traceback())
if custom_redirect_to:
redirect_to = custom_redirect_to
redirect_url = redirect_to
else:
status = 'Error'
redirect_url = 'payment-failed'
if redirect_message:
redirect_url += '&' + urlencode({'redirect_message': redirect_message})
redirect_url = get_url(redirect_url)
return {
"redirect_to": redirect_url,
"status": status
}
def get_gateway_controller(doc):
payment_request = frappe.get_doc("Payment Request", doc)
gateway_controller = frappe.db.get_value("Payment Gateway", payment_request.payment_gateway, "gateway_controller")
return gateway_controller
def gocardless_initialization(doc):
gateway_controller = get_gateway_controller(doc)
settings = frappe.get_doc("GoCardless Settings", gateway_controller)
client = settings.initialize_client()
return client
| gpl-3.0 |
vmthunder/nova | tools/xenserver/vdi_chain_cleanup.py | 139 | 3678 | #!/usr/bin/env python
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This script is designed to cleanup any VHDs (and their descendents) which have
a bad parent pointer.
The script needs to be run in the dom0 of the affected host.
The available actions are:
- print: display the filenames of the affected VHDs
- delete: remove the affected VHDs
- move: move the affected VHDs out of the SR into another directory
"""
import glob
import os
import subprocess
import sys
class ExecutionFailed(Exception):
def __init__(self, returncode, stdout, stderr, max_stream_length=32):
self.returncode = returncode
self.stdout = stdout[:max_stream_length]
self.stderr = stderr[:max_stream_length]
self.max_stream_length = max_stream_length
def __repr__(self):
return "<ExecutionFailed returncode=%s out='%s' stderr='%s'>" % (
self.returncode, self.stdout, self.stderr)
__str__ = __repr__
def execute(cmd, ok_exit_codes=None):
if ok_exit_codes is None:
ok_exit_codes = [0]
proc = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = proc.communicate()
if proc.returncode not in ok_exit_codes:
raise ExecutionFailed(proc.returncode, stdout, stderr)
return proc.returncode, stdout, stderr
def usage():
print "usage: %s <SR PATH> <print|delete|move>" % sys.argv[0]
sys.exit(1)
def main():
if len(sys.argv) < 3:
usage()
sr_path = sys.argv[1]
action = sys.argv[2]
if action not in ('print', 'delete', 'move'):
usage()
if action == 'move':
if len(sys.argv) < 4:
print "error: must specify where to move bad VHDs"
sys.exit(1)
bad_vhd_path = sys.argv[3]
if not os.path.exists(bad_vhd_path):
os.makedirs(bad_vhd_path)
bad_leaves = []
descendents = {}
for fname in glob.glob(os.path.join(sr_path, "*.vhd")):
(returncode, stdout, stderr) = execute(
['vhd-util', 'query', '-n', fname, '-p'], ok_exit_codes=[0, 22])
stdout = stdout.strip()
if stdout.endswith('.vhd'):
try:
descendents[stdout].append(fname)
except KeyError:
descendents[stdout] = [fname]
elif 'query failed' in stdout:
bad_leaves.append(fname)
def walk_vhds(root):
yield root
if root in descendents:
for child in descendents[root]:
for vhd in walk_vhds(child):
yield vhd
for bad_leaf in bad_leaves:
for bad_vhd in walk_vhds(bad_leaf):
print bad_vhd
if action == "print":
pass
elif action == "delete":
os.unlink(bad_vhd)
elif action == "move":
new_path = os.path.join(bad_vhd_path,
os.path.basename(bad_vhd))
os.rename(bad_vhd, new_path)
else:
raise Exception("invalid action %s" % action)
if __name__ == '__main__':
main()
| apache-2.0 |
wstczlt/fresco | run_comparison.py | 8 | 10351 | #!/usr/bin/env python
# This file provided by Facebook is for non-commercial testing and evaluation
# purposes only. Facebook reserves all rights not expressly granted.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# FACEBOOK BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
This script builds and runs the comparison app, switching from one library to the next,
taking measurements as it goes, and outputs the results neatly.
Due to a bug, you must specify the CPU when running the script.
Use -c armeabi-v7a for most phones. Use -c armeabi for ARM v5-6 phones, or
-c arm64 for 64-bit ARM devices. Some emulators and tablets will need -c x86.
To select a subset of the libraries, use the -s option with a
space-separated list. Available options are fresco, fresco-okhttp,
glide, volley, drawee-volley, uil, picasso, and aquery.
To see the comparison for only network or local images, use -d network or -d local.
Note that Volley does not support local images, and fresco and fresco-okhttp
are identical for local images.
Results will vary based on the the device, the network conditions and the mix of images available.
Example: to run a local-only comparison of fresco and picasso on an ARM v7 device:
./run_comparison.py -s fresco picasso -d local -c armeabi-v7a
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import glob
import os
import re
import tempfile
from collections import namedtuple
from subprocess import check_call, PIPE, Popen
""" List of tested libraries """
TESTS = (
'fresco',
'fresco-okhttp',
'glide',
'picasso',
'uil',
'volley',
'drawee-volley',
'aquery',
)
TEST_SOURCES = (
'network',
'local'
)
ABIS = (
'arm64-v8a',
'armeabi',
'armeabi-v7a',
'x86',
'x86_64'
)
""" Appends test class name to method name """
TEST_PATTERN = 'test{}{}'
""" Named tuple containing relevant numbers reported by a test """
Stats = namedtuple('Stats', [
'success_wait_times',
'failure_wait_times',
'cancellation_wait_times',
'java_heap_sizes',
'native_heap_sizes',
'skipped_frames'])
def parse_args():
parser = argparse.ArgumentParser(
description='Runs comparison test and processes results')
parser.add_argument('-s', '--scenarios', choices=TESTS, nargs='+')
parser.add_argument('-d', '--sources', choices=TEST_SOURCES, nargs='+')
parser.add_argument('-c', '--cpu', choices=ABIS, required=True)
return parser.parse_args()
def start_subprocess(command, **kwargs):
""" Starts subprocess after printing command to stdout. """
return Popen(command.split(), **kwargs)
def run_command(command):
""" Runs given command and waits for it to terminate.
Prints the command to stdout and redirects its output to /dev/null. """
with open('/dev/null', 'w') as devnull:
check_call(command.split(), stdout=devnull, stderr=devnull)
def gradle(*tasks):
""" Runs given gradle tasks """
if tasks:
run_command('./gradlew {}'.format(" ".join(tasks)))
def adb(command):
""" Runs adb command - arguments are given as single string"""
run_command('adb {}'.format(command))
def install_apks(abi):
""" Installs comparison app and test apks """
print("Installing comparison app...")
gradle(':samples:comparison:assembleDebug',
':samples:comparison:assembleDebugAndroidTest')
adb('uninstall com.facebook.samples.comparison')
adb('uninstall com.facebook.samples.comparison.test')
cmd = ('install -r samples/comparison/build/outputs/apk/comparison-'
'{}-debug.apk'.format(abi))
adb(cmd)
adb('install -r samples/comparison/build/outputs/apk/'
'comparison-debug-androidTest-unaligned.apk')
class ComparisonTest:
""" Comparison test case """
def __init__(
self,
method_name,
class_name='com.facebook.samples.comparison.test.ScrollTest',
test_package='com.facebook.samples.comparison.test',
test_runner='android.test.InstrumentationTestRunner'):
self.method_name = method_name
self.class_name = class_name
self.test_package = test_package
self.test_runner = test_runner
def __call__(self):
""" Executes test case and captures logcat output """
adb('logcat -c')
with tempfile.TemporaryFile() as logcat_file:
logcat_reader = start_subprocess(
'adb logcat',
stdout=logcat_file)
adb('shell am instrument -w -e class {}#{} {}/{}'.format(
self.class_name,
self.method_name,
self.test_package,
self.test_runner))
logcat_reader.terminate()
logcat_reader.wait()
logcat_file.seek(0)
self.logcat = logcat_file.readlines()
def get_float_from_logs(regex, logs):
pattern = re.compile(regex)
return [float(match.group(1)) for match in map(pattern.search, logs) if match]
def get_int_from_logs(regex, logs):
pattern = re.compile(regex)
return [int(match.group(1)) for match in map(pattern.search, logs) if match]
def get_stats(logs):
pattern = re.compile("""]: loaded after (\d+) ms""")
success_wait_times = [
int(match.group(1)) for match in map(pattern.search, logs) if match]
pattern = re.compile("""]: failed after (\d+) ms""")
failure_wait_times = [
int(match.group(1)) for match in map(pattern.search, logs) if match]
pattern = re.compile("""]: cancelled after (\d+) ms""")
cancellation_wait_times = [
int(match.group(1)) for match in map(pattern.search, logs) if match]
pattern = re.compile("""\s+(\d+.\d+) MB Java""")
java_heap_sizes = [
float(match.group(1)) for match in map(pattern.search, logs) if match]
pattern = re.compile("""\s+(\d+.\d+) MB native""")
native_heap_sizes = [
float(match.group(1)) for match in map(pattern.search, logs) if match]
pattern = re.compile("""Skipped (\d+) frames! The application may be""")
skipped_frames = [
int(match.group(1)) for match in map(pattern.search, logs) if match]
return Stats(
success_wait_times,
failure_wait_times,
cancellation_wait_times,
java_heap_sizes,
native_heap_sizes,
skipped_frames)
def print_stats(stats):
successes = len(stats.success_wait_times)
cancellations = len(stats.cancellation_wait_times)
failures = len(stats.failure_wait_times)
total_count = successes + cancellations + failures
if total_count == 0:
print("Unable to read logs.")
return
total_wait_time = (
sum(stats.success_wait_times) +
sum(stats.cancellation_wait_times) +
sum(stats.failure_wait_times))
avg_wait_time = float(total_wait_time) / total_count
max_java_heap = max(stats.java_heap_sizes)
max_native_heap = max(stats.native_heap_sizes)
total_skipped_frames = sum(stats.skipped_frames)
print("Average wait time = {0:.1f}".format(avg_wait_time))
print("Successful requests = {}".format(successes))
print("Failures = {}".format(failures))
print("Cancellations = {}".format(cancellations))
print("Max java heap = {0:.1f}".format(max_java_heap))
print("Max native heap = {0:.1f}".format(max_native_heap))
print("Total skipped frames = {}".format(total_skipped_frames))
def get_test_name(option_name, source_name):
return TEST_PATTERN.format(
''.join(word.capitalize() for word in option_name.split('-')), source_name.capitalize())
def valid_scenario(scenario_name, source_name):
return source_name != 'local' or (scenario_name != 'volley' and scenario_name != 'drawee-volley')
def list_producers():
sdir = os.path.dirname(os.path.abspath(__file__))
producer_path = '%s/imagepipeline/src/main/java/com/facebook/imagepipeline/producers/*Producer.java' % sdir
files = glob.glob(producer_path)
return [f.split('.')[0].split('/')[-1] for f in files]
def print_fresco_perf_line(margin, name, times):
length = len(times)
if length == 0:
return
print("%s: %d requests, avg %d" % (name.rjust(margin), length, float(sum(times)) / length))
def print_fresco_perf(logs):
producers = list_producers()
margin = max([len(p) for p in producers])
requests = get_int_from_logs(""".*RequestLoggingListener.*onRequestSuccess.*elapsedTime:\s(\d+).*""", logs)
print_fresco_perf_line(margin, 'Total', requests)
for producer in producers:
queue = get_int_from_logs(".*onProducerFinishWithSuccess.*producer:\s%s.*queueTime=(\d+).*" % producer, logs)
print_fresco_perf_line(margin, '%s queue' % producer, queue)
times = get_int_from_logs(".*onProducerFinishWithSuccess.*producer:\s%s.*elapsedTime:\s(\d+).*" % producer, logs)
print_fresco_perf_line(margin, producer, times)
def main():
args = parse_args()
scenarios = []
sources = []
if args.scenarios:
scenarios = args.scenarios
else:
scenarios = TESTS
if args.sources:
sources = args.sources
else:
sources = TEST_SOURCES
install_apks(args.cpu)
for source_name in sources:
for scenario_name in scenarios:
if valid_scenario(scenario_name, source_name):
print()
print('Testing {} {}'.format(scenario_name, source_name))
print(get_test_name(scenario_name, source_name))
test = ComparisonTest(get_test_name(scenario_name, source_name))
test()
stats = get_stats(test.logcat)
print_stats(stats)
if scenario_name[:6] == 'fresco':
print()
print_fresco_perf(test.logcat)
if __name__ == "__main__":
main()
| bsd-3-clause |
ysywh/ryu | ryu/ofproto/ofproto_parser.py | 8 | 7216 | # Copyright (C) 2011, 2012 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2011 Isaku Yamahata <yamahata at valinux co jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import collections
import logging
import struct
import sys
import functools
from ryu import exception
from ryu import utils
from ryu.lib import stringify
from . import ofproto_common
LOG = logging.getLogger('ryu.ofproto.ofproto_parser')
def header(buf):
assert len(buf) >= ofproto_common.OFP_HEADER_SIZE
# LOG.debug('len %d bufsize %d', len(buf), ofproto.OFP_HEADER_SIZE)
return struct.unpack_from(ofproto_common.OFP_HEADER_PACK_STR, buffer(buf))
_MSG_PARSERS = {}
def register_msg_parser(version):
def register(msg_parser):
_MSG_PARSERS[version] = msg_parser
return msg_parser
return register
def msg(datapath, version, msg_type, msg_len, xid, buf):
assert len(buf) >= msg_len
msg_parser = _MSG_PARSERS.get(version)
if msg_parser is None:
raise exception.OFPUnknownVersion(version=version)
try:
return msg_parser(datapath, version, msg_type, msg_len, xid, buf)
except:
LOG.exception(
'Encounter an error during parsing OpenFlow packet from switch.'
'This implies switch sending a malformed OpenFlow packet.'
'version 0x%02x msg_type %d msg_len %d xid %d buf %s',
version, msg_type, msg_len, xid, utils.hex_array(buf))
return None
def create_list_of_base_attributes(f):
@functools.wraps(f)
def wrapper(self, *args, **kwargs):
ret = f(self, *args, **kwargs)
cls = self.__class__
# hasattr(cls, '_base_attributes') doesn't work because super class
# may already have the attribute.
if '_base_attributes' not in cls.__dict__:
cls._base_attributes = set(dir(self))
return ret
return wrapper
def ofp_msg_from_jsondict(dp, jsondict):
"""
This function instanticates an appropriate OpenFlow message class
from the given JSON style dictionary.
The objects created by following two code fragments are equivalent.
Code A::
jsonstr = '{ "OFPSetConfig": { "flags": 0, "miss_send_len": 128 } }'
jsondict = json.loads(jsonstr)
o = ofp_msg_from_jsondict(dp, jsondict)
Code B::
o = dp.ofproto_parser.OFPSetConfig(flags=0, miss_send_len=128)
This function takes the following arguments.
======== =======================================
Argument Description
======== =======================================
dp An instance of ryu.controller.Datapath.
jsondict A JSON style dict.
======== =======================================
"""
parser = dp.ofproto_parser
assert len(jsondict) == 1
for k, v in jsondict.iteritems():
cls = getattr(parser, k)
assert issubclass(cls, MsgBase)
return cls.from_jsondict(v, datapath=dp)
class StringifyMixin(stringify.StringifyMixin):
_class_prefixes = ["OFP", "ONF", "MT", "NX"]
@classmethod
def cls_from_jsondict_key(cls, k):
obj_cls = super(StringifyMixin, cls).cls_from_jsondict_key(k)
return obj_cls
class MsgBase(StringifyMixin):
"""
This is a base class for OpenFlow message classes.
An instance of this class has at least the following attributes.
========= ==============================
Attribute Description
========= ==============================
datapath A ryu.controller.controller.Datapath instance for this message
version OpenFlow protocol version
msg_type Type of OpenFlow message
msg_len Length of the message
xid Transaction id
buf Raw data
========= ==============================
"""
@create_list_of_base_attributes
def __init__(self, datapath):
super(MsgBase, self).__init__()
self.datapath = datapath
self.version = None
self.msg_type = None
self.msg_len = None
self.xid = None
self.buf = None
def set_headers(self, version, msg_type, msg_len, xid):
assert msg_type == self.cls_msg_type
self.version = version
self.msg_type = msg_type
self.msg_len = msg_len
self.xid = xid
def set_xid(self, xid):
assert self.xid is None
self.xid = xid
def set_buf(self, buf):
self.buf = buffer(buf)
def __str__(self):
def hexify(x):
return str(None) if x is None else '0x%x' % x
buf = 'version: %s msg_type %s xid %s ' % (hexify(self.version),
hexify(self.msg_type),
hexify(self.xid))
return buf + StringifyMixin.__str__(self)
@classmethod
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg_ = cls(datapath)
msg_.set_headers(version, msg_type, msg_len, xid)
msg_.set_buf(buf)
return msg_
def _serialize_pre(self):
self.version = self.datapath.ofproto.OFP_VERSION
self.msg_type = self.cls_msg_type
self.buf = bytearray(self.datapath.ofproto.OFP_HEADER_SIZE)
def _serialize_header(self):
# buffer length is determined after trailing data is formated.
assert self.version is not None
assert self.msg_type is not None
assert self.buf is not None
assert len(self.buf) >= self.datapath.ofproto.OFP_HEADER_SIZE
self.msg_len = len(self.buf)
if self.xid is None:
self.xid = 0
struct.pack_into(self.datapath.ofproto.OFP_HEADER_PACK_STR,
self.buf, 0,
self.version, self.msg_type, self.msg_len, self.xid)
def _serialize_body(self):
pass
def serialize(self):
self._serialize_pre()
self._serialize_body()
self._serialize_header()
class MsgInMsgBase(MsgBase):
@classmethod
def _decode_value(cls, k, json_value, decode_string=base64.b64decode,
**additional_args):
return cls._get_decoder(k, decode_string)(json_value,
**additional_args)
def namedtuple(typename, fields, **kwargs):
class _namedtuple(StringifyMixin,
collections.namedtuple(typename, fields, **kwargs)):
pass
return _namedtuple
def msg_str_attr(msg_, buf, attr_list=None):
if attr_list is None:
attr_list = stringify.obj_attrs(msg_)
for attr in attr_list:
val = getattr(msg_, attr, None)
if val is not None:
buf += ' %s %s' % (attr, val)
return buf
| apache-2.0 |
menren/openshift-ansible | inventory/gce/hosts/gce.py | 10 | 10594 | #!/usr/bin/env python2
# Copyright 2013 Google Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
'''
GCE external inventory script
=================================
Generates inventory that Ansible can understand by making API requests
Google Compute Engine via the libcloud library. Full install/configuration
instructions for the gce* modules can be found in the comments of
ansible/test/gce_tests.py.
When run against a specific host, this script returns the following variables
based on the data obtained from the libcloud Node object:
- gce_uuid
- gce_id
- gce_image
- gce_machine_type
- gce_private_ip
- gce_public_ip
- gce_name
- gce_description
- gce_status
- gce_zone
- gce_tags
- gce_metadata
- gce_network
When run in --list mode, instances are grouped by the following categories:
- zone:
zone group name examples are us-central1-b, europe-west1-a, etc.
- instance tags:
An entry is created for each tag. For example, if you have two instances
with a common tag called 'foo', they will both be grouped together under
the 'tag_foo' name.
- network name:
the name of the network is appended to 'network_' (e.g. the 'default'
network will result in a group named 'network_default')
- machine type
types follow a pattern like n1-standard-4, g1-small, etc.
- running status:
group name prefixed with 'status_' (e.g. status_running, status_stopped,..)
- image:
when using an ephemeral/scratch disk, this will be set to the image name
used when creating the instance (e.g. debian-7-wheezy-v20130816). when
your instance was created with a root persistent disk it will be set to
'persistent_disk' since there is no current way to determine the image.
Examples:
Execute uname on all instances in the us-central1-a zone
$ ansible -i gce.py us-central1-a -m shell -a "/bin/uname -a"
Use the GCE inventory script to print out instance specific information
$ contrib/inventory/gce.py --host my_instance
Author: Eric Johnson <erjohnso@google.com>
Version: 0.0.1
'''
__requires__ = ['pycrypto>=2.6']
try:
import pkg_resources
except ImportError:
# Use pkg_resources to find the correct versions of libraries and set
# sys.path appropriately when there are multiversion installs. We don't
# fail here as there is code that better expresses the errors where the
# library is used.
pass
USER_AGENT_PRODUCT="Ansible-gce_inventory_plugin"
USER_AGENT_VERSION="v1"
import sys
import os
import argparse
import ConfigParser
try:
import json
except ImportError:
import simplejson as json
try:
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
_ = Provider.GCE
except:
print("GCE inventory script requires libcloud >= 0.13")
sys.exit(1)
class GceInventory(object):
def __init__(self):
# Read settings and parse CLI arguments
self.parse_cli_args()
self.driver = self.get_gce_driver()
# Just display data for specific host
if self.args.host:
print(self.json_format_dict(self.node_to_dict(
self.get_instance(self.args.host)),
pretty=self.args.pretty))
sys.exit(0)
# Otherwise, assume user wants all instances grouped
print(self.json_format_dict(self.group_instances(),
pretty=self.args.pretty))
sys.exit(0)
def get_gce_driver(self):
"""Determine the GCE authorization settings and return a
libcloud driver.
"""
gce_ini_default_path = os.path.join(
os.path.dirname(os.path.realpath(__file__)), "gce.ini")
gce_ini_path = os.environ.get('GCE_INI_PATH', gce_ini_default_path)
# Create a ConfigParser.
# This provides empty defaults to each key, so that environment
# variable configuration (as opposed to INI configuration) is able
# to work.
config = ConfigParser.SafeConfigParser(defaults={
'gce_service_account_email_address': '',
'gce_service_account_pem_file_path': '',
'gce_project_id': '',
'libcloud_secrets': '',
})
if 'gce' not in config.sections():
config.add_section('gce')
config.read(gce_ini_path)
# Attempt to get GCE params from a configuration file, if one
# exists.
secrets_path = config.get('gce', 'libcloud_secrets')
secrets_found = False
try:
import secrets
args = list(getattr(secrets, 'GCE_PARAMS', []))
kwargs = getattr(secrets, 'GCE_KEYWORD_PARAMS', {})
secrets_found = True
except:
pass
if not secrets_found and secrets_path:
if not secrets_path.endswith('secrets.py'):
err = "Must specify libcloud secrets file as "
err += "/absolute/path/to/secrets.py"
print(err)
sys.exit(1)
sys.path.append(os.path.dirname(secrets_path))
try:
import secrets
args = list(getattr(secrets, 'GCE_PARAMS', []))
kwargs = getattr(secrets, 'GCE_KEYWORD_PARAMS', {})
secrets_found = True
except:
pass
if not secrets_found:
args = [
config.get('gce','gce_service_account_email_address'),
config.get('gce','gce_service_account_pem_file_path')
]
kwargs = {'project': config.get('gce', 'gce_project_id')}
# If the appropriate environment variables are set, they override
# other configuration; process those into our args and kwargs.
args[0] = os.environ.get('GCE_EMAIL', args[0])
args[1] = os.environ.get('GCE_PEM_FILE_PATH', args[1])
kwargs['project'] = os.environ.get('GCE_PROJECT', kwargs['project'])
# Retrieve and return the GCE driver.
gce = get_driver(Provider.GCE)(*args, **kwargs)
gce.connection.user_agent_append(
'%s/%s' % (USER_AGENT_PRODUCT, USER_AGENT_VERSION),
)
return gce
def parse_cli_args(self):
''' Command line argument processing '''
parser = argparse.ArgumentParser(
description='Produce an Ansible Inventory file based on GCE')
parser.add_argument('--list', action='store_true', default=True,
help='List instances (default: True)')
parser.add_argument('--host', action='store',
help='Get all information about an instance')
parser.add_argument('--pretty', action='store_true', default=False,
help='Pretty format (default: False)')
self.args = parser.parse_args()
def node_to_dict(self, inst):
md = {}
if inst is None:
return {}
if inst.extra['metadata'].has_key('items'):
for entry in inst.extra['metadata']['items']:
md[entry['key']] = entry['value']
net = inst.extra['networkInterfaces'][0]['network'].split('/')[-1]
return {
'gce_uuid': inst.uuid,
'gce_id': inst.id,
'gce_image': inst.image,
'gce_machine_type': inst.size,
'gce_private_ip': inst.private_ips[0],
'gce_public_ip': inst.public_ips[0] if len(inst.public_ips) >= 1 else None,
'gce_name': inst.name,
'gce_description': inst.extra['description'],
'gce_status': inst.extra['status'],
'gce_zone': inst.extra['zone'].name,
'gce_tags': inst.extra['tags'],
'gce_metadata': md,
'gce_network': net,
# Hosts don't have a public name, so we add an IP
'ansible_ssh_host': inst.public_ips[0] if len(inst.public_ips) >= 1 else inst.private_ips[0]
}
def get_instance(self, instance_name):
'''Gets details about a specific instance '''
try:
return self.driver.ex_get_node(instance_name)
except Exception as e:
return None
def group_instances(self):
'''Group all instances'''
groups = {}
meta = {}
meta["hostvars"] = {}
for node in self.driver.list_nodes():
name = node.name
meta["hostvars"][name] = self.node_to_dict(node)
zone = node.extra['zone'].name
if groups.has_key(zone): groups[zone].append(name)
else: groups[zone] = [name]
tags = node.extra['tags']
for t in tags:
if t.startswith('group-'):
tag = t[6:]
else:
tag = 'tag_%s' % t
if groups.has_key(tag): groups[tag].append(name)
else: groups[tag] = [name]
net = node.extra['networkInterfaces'][0]['network'].split('/')[-1]
net = 'network_%s' % net
if groups.has_key(net): groups[net].append(name)
else: groups[net] = [name]
machine_type = node.size
if groups.has_key(machine_type): groups[machine_type].append(name)
else: groups[machine_type] = [name]
image = node.image and node.image or 'persistent_disk'
if groups.has_key(image): groups[image].append(name)
else: groups[image] = [name]
status = node.extra['status']
stat = 'status_%s' % status.lower()
if groups.has_key(stat): groups[stat].append(name)
else: groups[stat] = [name]
groups["_meta"] = meta
return groups
def json_format_dict(self, data, pretty=False):
''' Converts a dict to a JSON object and dumps it as a formatted
string '''
if pretty:
return json.dumps(data, sort_keys=True, indent=2)
else:
return json.dumps(data)
# Run the script
GceInventory()
| apache-2.0 |
farhaanbukhsh/sympy | sympy/physics/mechanics/tests/test_functions.py | 24 | 5650 | from sympy import sin, cos, tan, pi, symbols, Matrix
from sympy.physics.mechanics import (Particle, Point, ReferenceFrame,
RigidBody, Vector)
from sympy.physics.mechanics import (angular_momentum, dynamicsymbols,
inertia, inertia_of_point_mass,
kinetic_energy, linear_momentum,
outer, potential_energy, msubs,
find_dynamicsymbols)
Vector.simp = True
q1, q2, q3, q4, q5 = symbols('q1 q2 q3 q4 q5')
N = ReferenceFrame('N')
A = N.orientnew('A', 'Axis', [q1, N.z])
B = A.orientnew('B', 'Axis', [q2, A.x])
C = B.orientnew('C', 'Axis', [q3, B.y])
def test_inertia():
N = ReferenceFrame('N')
ixx, iyy, izz = symbols('ixx iyy izz')
ixy, iyz, izx = symbols('ixy iyz izx')
assert inertia(N, ixx, iyy, izz) == (ixx * (N.x | N.x) + iyy *
(N.y | N.y) + izz * (N.z | N.z))
assert inertia(N, 0, 0, 0) == 0 * (N.x | N.x)
assert inertia(N, ixx, iyy, izz, ixy, iyz, izx) == (ixx * (N.x | N.x) +
ixy * (N.x | N.y) + izx * (N.x | N.z) + ixy * (N.y | N.x) + iyy *
(N.y | N.y) + iyz * (N.y | N.z) + izx * (N.z | N.x) + iyz * (N.z |
N.y) + izz * (N.z | N.z))
def test_inertia_of_point_mass():
r, s, t, m = symbols('r s t m')
N = ReferenceFrame('N')
px = r * N.x
I = inertia_of_point_mass(m, px, N)
assert I == m * r**2 * (N.y | N.y) + m * r**2 * (N.z | N.z)
py = s * N.y
I = inertia_of_point_mass(m, py, N)
assert I == m * s**2 * (N.x | N.x) + m * s**2 * (N.z | N.z)
pz = t * N.z
I = inertia_of_point_mass(m, pz, N)
assert I == m * t**2 * (N.x | N.x) + m * t**2 * (N.y | N.y)
p = px + py + pz
I = inertia_of_point_mass(m, p, N)
assert I == (m * (s**2 + t**2) * (N.x | N.x) -
m * r * s * (N.x | N.y) -
m * r * t * (N.x | N.z) -
m * r * s * (N.y | N.x) +
m * (r**2 + t**2) * (N.y | N.y) -
m * s * t * (N.y | N.z) -
m * r * t * (N.z | N.x) -
m * s * t * (N.z | N.y) +
m * (r**2 + s**2) * (N.z | N.z))
def test_linear_momentum():
N = ReferenceFrame('N')
Ac = Point('Ac')
Ac.set_vel(N, 25 * N.y)
I = outer(N.x, N.x)
A = RigidBody('A', Ac, N, 20, (I, Ac))
P = Point('P')
Pa = Particle('Pa', P, 1)
Pa.point.set_vel(N, 10 * N.x)
assert linear_momentum(N, A, Pa) == 10 * N.x + 500 * N.y
def test_angular_momentum_and_linear_momentum():
m, M, l1 = symbols('m M l1')
q1d = dynamicsymbols('q1d')
N = ReferenceFrame('N')
O = Point('O')
O.set_vel(N, 0 * N.x)
Ac = O.locatenew('Ac', l1 * N.x)
P = Ac.locatenew('P', l1 * N.x)
a = ReferenceFrame('a')
a.set_ang_vel(N, q1d * N.z)
Ac.v2pt_theory(O, N, a)
P.v2pt_theory(O, N, a)
Pa = Particle('Pa', P, m)
I = outer(N.z, N.z)
A = RigidBody('A', Ac, a, M, (I, Ac))
assert linear_momentum(
N, A, Pa) == 2 * m * q1d* l1 * N.y + M * l1 * q1d * N.y
assert angular_momentum(
O, N, A, Pa) == 4 * m * q1d * l1**2 * N.z + q1d * N.z
def test_kinetic_energy():
m, M, l1 = symbols('m M l1')
omega = dynamicsymbols('omega')
N = ReferenceFrame('N')
O = Point('O')
O.set_vel(N, 0 * N.x)
Ac = O.locatenew('Ac', l1 * N.x)
P = Ac.locatenew('P', l1 * N.x)
a = ReferenceFrame('a')
a.set_ang_vel(N, omega * N.z)
Ac.v2pt_theory(O, N, a)
P.v2pt_theory(O, N, a)
Pa = Particle('Pa', P, m)
I = outer(N.z, N.z)
A = RigidBody('A', Ac, a, M, (I, Ac))
assert 0 == kinetic_energy(N, Pa, A) - (M*l1**2*omega**2/2
+ 2*l1**2*m*omega**2 + omega**2/2)
def test_potential_energy():
m, M, l1, g, h, H = symbols('m M l1 g h H')
omega = dynamicsymbols('omega')
N = ReferenceFrame('N')
O = Point('O')
O.set_vel(N, 0 * N.x)
Ac = O.locatenew('Ac', l1 * N.x)
P = Ac.locatenew('P', l1 * N.x)
a = ReferenceFrame('a')
a.set_ang_vel(N, omega * N.z)
Ac.v2pt_theory(O, N, a)
P.v2pt_theory(O, N, a)
Pa = Particle('Pa', P, m)
I = outer(N.z, N.z)
A = RigidBody('A', Ac, a, M, (I, Ac))
Pa.potential_energy = m * g * h
A.potential_energy = M * g * H
assert potential_energy(A, Pa) == m * g * h + M * g * H
def test_msubs():
a, b = symbols('a, b')
x, y, z = dynamicsymbols('x, y, z')
# Test simple substitution
expr = Matrix([[a*x + b, x*y.diff() + y],
[x.diff().diff(), z + sin(z.diff())]])
sol = Matrix([[a + b, y],
[x.diff().diff(), 1]])
sd = {x: 1, z: 1, z.diff(): 0, y.diff(): 0}
assert msubs(expr, sd) == sol
# Test smart substitution
expr = cos(x + y)*tan(x + y) + b*x.diff()
sd = {x: 0, y: pi/2, x.diff(): 1}
assert msubs(expr, sd, smart=True) == b + 1
N = ReferenceFrame('N')
v = x*N.x + y*N.y
d = x*(N.x|N.x) + y*(N.y|N.y)
v_sol = 1*N.y
d_sol = 1*(N.y|N.y)
sd = {x: 0, y: 1}
assert msubs(v, sd) == v_sol
assert msubs(d, sd) == d_sol
def test_find_dynamicsymbols():
a, b = symbols('a, b')
x, y, z = dynamicsymbols('x, y, z')
expr = Matrix([[a*x + b, x*y.diff() + y],
[x.diff().diff(), z + sin(z.diff())]])
# Test finding all dynamicsymbols
sol = set([x, y.diff(), y, x.diff().diff(), z, z.diff()])
assert find_dynamicsymbols(expr) == sol
# Test finding all but those in sym_list
exclude = [x, y, z]
sol = set([y.diff(), x.diff().diff(), z.diff()])
assert find_dynamicsymbols(expr, exclude) == sol
| bsd-3-clause |
koyuawsmbrtn/eclock | windows/Python27/Lib/site-packages/pip/_vendor/requests/packages/charade/mbcsgroupprober.py | 2769 | 1967 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
# Proofpoint, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .charsetgroupprober import CharSetGroupProber
from .utf8prober import UTF8Prober
from .sjisprober import SJISProber
from .eucjpprober import EUCJPProber
from .gb2312prober import GB2312Prober
from .euckrprober import EUCKRProber
from .cp949prober import CP949Prober
from .big5prober import Big5Prober
from .euctwprober import EUCTWProber
class MBCSGroupProber(CharSetGroupProber):
def __init__(self):
CharSetGroupProber.__init__(self)
self._mProbers = [
UTF8Prober(),
SJISProber(),
EUCJPProber(),
GB2312Prober(),
EUCKRProber(),
CP949Prober(),
Big5Prober(),
EUCTWProber()
]
self.reset()
| gpl-2.0 |
vmax-feihu/hue | desktop/core/ext-py/tablib-0.10.0/tablib/packages/odf/thumbnail.py | 135 | 31736 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# This contains a 128x128 px thumbnail in PNG format
# Taken from http://www.zwahlendesign.ch/en/node/20
# openoffice_icons/openoffice_icons_linux/openoffice11.png
# License: Freeware
import base64
iconstr = """\
iVBORw0KGgoAAAANSUhEUgAAAIAAAACACAYAAAG0OVFdAAAABGdBTUEAANbY1E9YMgAAABl0RVh0
U29mdHdhcmUAQWRvYmUgSW1hZ2VSZWFkeXHJZTwAAFoHSURBVHjaYvz//z8DJQAggFhu3LiBU1JI
SOiPmJgYM7IYUD0jMh8ggFhAhKamJuOHDx/+8fPz4zQsMTGRYf78+RjiAAHEBCJOnTr1HZvmN2/e
MDAyQiycOXMmw5MnTxhmzZoViqwGIIAYrl+/DqKM/6OBNWvWgOmvX7/+37Rp0/8jR478//fv3/+f
P3/+h+phPHHixH+AAIK75D8WMGnSpP8vXrz4//v37/9///6Fi4MMALruf3Bw8H+AAAJp5rQrOoeh
edmyZWAbgd77f/bsWTAbBoB6JOpbmkF0OkAAgcLgO8gUYCCCnSIlJQWmw8LCGA4cOAAOAyMjI3hY
gMDvP7+f3791+weQuQAggGBi7FPmrvnf3NwMtgnkt/Xr1//fuXMn2EaQ5TB89+nX/wUlJSDbPUFe
AQgguKleiY2/QIpBTv727TuKJhB+//nf/xtP/4ANrK6tBRnAATIAIICQEwUjUCHIoyjOBYGbz/8y
8HMwMXCzfmcoLC1kMDH3YNDU1mGQ4PvLCBBALEjq/t958Zfh0dt/DL/+MDD8BdkBNIeXnYFBhIeR
4efffwybNqxgEOEXZLjw25Xh2QMWhmi9BwwAAYRsAMO5268ZZMREGGSEGBmYgcEL1MMAcgwo3D9/
+sIwf84cBhHLGoYAVVYGxi/3wDYABBCKU6dPn37s1vM//3/+/v//20+gn5/9+b/7yq//iw++/6+o
qAhy0zUg1gH5HYYBAgg99Srsvvzz//6Tt//beSf+V/doBGkqheaFL0CKF1kzCAMEECOWfAMSY3Yq
PvF7X68FKCcCPcLAA8QqQHwB3VaAAGKktDwACCCc5QETE5ODjIzMfi4uLoRtjIwiQBe8RVYHEEDg
WODh4dkBTMLuQE1YDdPR0WG4cuUKw6tXr968ffsWxdsAAQTWAbQJq+aenh5wogJpBpUNzMzMGGoA
AggckshZFRmA8sXz58/BeQKY2WA5kRmkp7Oz8z8vL+8WgAACG3Lv3j0Mze/fvwcpBuaLb/9//foF
FweG2U9dXV2RixcvguTNAAKIAVQWaPt2oGgGlT4gzSBDNm/e/P/jx48o8n/+/PlraWkJil5OgAAC
OUDEKvsgWOLdu3f/k5KSwOxPnz79nzt3LrgIQwY/fvz4X1FbDbIgAOQVgAACxcIbFnZesFcEBQXB
AbdhwwYGNjY2BmdnZzANSypffvxn4OFgY/j5+TvI9i0gMYAAgkUJI7Dc+/flyxeGly9fMaipqWEE
9m1gTv329RvDjAmVDE52dgx6enpgvQABBIu7//fvPwCmB14Mze+//geXBwKcTAwn9q9kEOIXYNC2
8IfLAwQQcqIIOHPv9/o3X/4z/PkLzABAR7KyQMoCPi5Ghm9fvjJM7i5lUDbwYXjI4sIwK41LHBgG
rwACCLk82Pvq038GaQEmBi52iAEwK/4BDbx7cTeDEB8/w42/TgwhRt8ZzNeeeAHyAUAAoSTL15/+
/f/++z+DrBATw/P3/xgeAkunt5//MSzYcpOhJYyNQUNDowGorA9o82eYHoAAQjFgw6kv/yV4/zLc
v3WRoaRxBoOEtj/D2cXhPECNAcAExAbUiFE5AgQQenkAis/PrkWH/u/us3MGsvdBxYOAeD3QAIy8
DxBAjNiKJXIAqIZ//PjxYT4+PmtgHmEAJjiGhw8fMhLSBxBALIQUcHBw1AINbAIZCkqUuABywQZM
kwzAnMBw//79TcCy2A+f+QABBA4BoOuZHj169FdWVpYs3wPzKoOAgACKI0BsYCnDwMrKyg204xsu
vQABxAQtkv6FhISUEmuho6Mjw9OnT+F8UNsIWHQxAMsChtOnT4PaSwzAVglYDBgNX9H129raci8C
AhAbIICQkTCoACEWgAoVDw8PcKl17Nix/ydPnvx//vz5/9jMAKqRh9Vi9fX1YLHe3l6QuD1AAMEs
ZwUVi6s37CTK8t27d4MtBrW7QPj169f/79y58x+YCDFKP1jJCIruurq6VyC+t4/Pf2DUgAozSYAA
Atvu4Wm5D+QA47hVoLIWwwBQsVpaWgq2FIRVVVX/gxp427dv/79kyZL/Fy5cAIcIPrBh/QZwtZOS
mvoXmLDngDIOKEQAAgg5CmLsis7+v3XrFlgDyAJIWoIAkM+A8Q5ufYEqidmzZ4Md8PnzZxzVGQSD
wN79+8F0ekb6X2C92AyqRmFRAhBA6PnUVtuv99CVjUXwlAysicEKQZUuKJcAm/7AlM0GrmyBwYi9
ogWa+hYY6m+AxeDPt9cY9PV0GSoqKxjef/jGMGvGZGmgec9gSgECCFtBofvu3ftLoJQNjFuwI0RF
RRlwNRkQbQ4Ghmfv/jF8BlZaoKDjAzYnb1w4wHDx+lWG98A66s27zwwVZUUM8vJyakAH3IbpAwgg
rCXVxo2bnvr5+Ur9+w+pFX78+s/w8w+kvQnyMCsQs7GAeIwM91//A6r5z8DLAQwRFmDVwwnUA1R6
4uhBhl0H9jG8efacgZldgCE4Pp+BiUuc4fTNLwyVwUJMsGIZIIBwFZUam89+u84GrND+QZMeKQ04
acYbDGs3bWR4B/T5kbtcDLouWQycvKLgqp0FGJBGghdu2mgLaoDUAgQQrqL4BjOw/augogGuXNnZ
GBn4OUG+Y2RgY4W2l7//Bwb3P2BpB2oGMjKwMDMy3ARW+5nRbgwB7hYMTk5ODIVdWxmiQp0Yvj5b
9qy1uHIn0NyroH4dyHxYDgAIIHyVhdvzd392vvj4nwGYdhi+AKOBGdpY//vvDwPr348MX94+BVed
fTPXMry4tm02qMbLzs7eBmynrwOWgsuA/G1Ai77jCy2AAMLnAM75S1a/SIwJ3QTqpoAEzFO3N7Nx
CTEwMrMycN8qvLB9y8FAoPADmFna2tp/rl69mglyCKh9QExNCxBAjCTWOxKg+h6Iv2KRAzXDxYD4
ORD/ROoG4wUAAURx/4BSABBAeMcbSAHA4jUF2M2YDWo3sLOzM0ybNi0SmBBXENIHEEAkt4hALR9g
FTsX2PJJBFrIwMKCPSMB2xcMwI4BwSgGCCC8LSJgBSMtLi5+AGiRCsgyUPFLTJRt3bqVwdXVFRQS
oK7MX3xqAQII7gCgTyKBrZplIIuAwUlyFADbAwwWFhZgB3p7e8OEZYD4IT59AAEEGzKyBuVb9CEC
YsHy5csZysvLUUIH1Bq6du3aLdBACD69AAEEC4GXwHYAuHYjFqxevZph3bp1DCtWrACH2Pfv38EO
AHWQgFU0OLqEhYXZQM00fAAggGBV3DPYeA8hAEq0SkpKDKGhoWCfgywFWQ7shTLcvXuXAdjzBLeI
QVEpIiICCl1hdDMWLFiwCtirBdsNEEDwEQdgcBFsih08eBCFD2qOgTqloEYMaIwJmPjATTPkLvG2
bds2IY9sAHt/6rDhNFAAAAQQ3FWtra1biW2Qgjrvly5dAteTwP422HJQo/TBgwcYTTpgg+Y/zHIX
FxdWYGj9P3fu3H9g6LwHNYQBAgil8kEel8NneXp6OthyUF8e1H8HNddAoYGtPQlSD+3LM2ZmZoLF
Nm7c+B86XMcLEEBgmw10JazMUrYSbFiC23VQy0EhABreACa6/8BCBxz0oEEFbJ4ANmiDgXoEQOyG
1tb/VlZWIDNAvWxGgABiSSqseXiHMUju359fDEADGCQkJHAmwJUrV4LbiKDEBeyxgjodDLdv3wY3
19TV1Rm4ubkZsGXlnJycNdpa2vfAQwXAtAbsP2wEMu+AWkUAAQQSkwU1yUH4ypUrGK4HKQImJHiT
HIRBiezy5cvgJjko4b18+fI/vugDhdK/P//+VTfU/09ISACNliaCogWULgACCJQVHp+aYtQEToiz
9qK4fP/+/aBsBC5WQdkNVLiAshtoCBqU3Tg5ORmMjY3BjVZ8hdiZM2eBbQhGxhdPnv4DOrofZDSs
oQIQQOC8+OMXQw+IvvaSB16axcTEMJiYmID5oKY3KG/fvHmTAZjwwMUuyCGgQTRcloOMAeFPX34A
+4I2DKWVlUA9P38DE+oRoDS8YwkQQLCS8POhPiNfi/Rdm0H9ehUVFXjnE2QRsMvFAExkDF+/fgWX
lqAmu4KCArifAIp/XPXTm8//GW5dPs9gbW3JwAxUtGL5ik7ooOVvmBqAAEKuDXfwcLIwvH37Fm45
MHuBfQ2MY3DilJSUZIDUikxgi5EHsVC668DAffcF2Ef4/BVseU5hAYMwjyBo3ABUN7xEVgsQQMi9
jT97JjgZvHkDGc8E9e1BdfqPHz8Z9PUNGLS1QcEtBox3LnDZj2uw4hWwEfvyw1+G38B+BOsviEcE
efkYXgNzGLC/0Qn0/R9k9QABhN7duTRn/pyPIF/9/PkLWJ9zAC3WBscz1i4YUsPy0zfIAPuHb//A
vSRulh8MZ8+dY4iMjWX49/cfg6OjHYORiYU0ul6AAMKWdAP+/v23HpT4YAmQEHj05h/Dj9//wRYL
8zCBHXTs4DaG81cuM7x98YLh229mhqjEPAZpaRkGNSkWPuRhMoAAwtbhOwmKe2ZmYDwDLf8G7A98
+g7qG/wHxi2w5gPy//6HWPYOmMhuPvsL7raJAC2WFmQGdlCAXTfGbwzPgenm0YMHQHNYGGxsHRg+
M4kz3H71jyGlbGoOsmUAAYStSfbm3M3XDAIiUkAL/zF8+8nI8PM3pMMJshSMQcPGTJA+IiewCcEJ
7Dm9AAYzGzNktuHZrdMMt+7eYeAA9qKffGBmEPinx3DkNNDRTH8Yfoh4tAHzVjvMMoAAwhYCv6/f
f/Xv6XtgKgam5j/AugTUMQZZyMSImKwAWfQdmJnefQM1Jv6D50zuAH14/fFnBhU1VYY3r18y8PHx
M3zms2F4/EUEaDmk06ogKw4q3OAeBwggrI3SnprEqgnLz3aAesCgXi8fEIPLGuiEDIyJngVBFZ+l
jgLDbWCZIcgrwLDj4l8GbSdDBi52JgZ3/f8M74FZ/O2rZ7C2IrhHBRBAWB1w89rlAwrC0PAGdXlY
GRmE+BjBQQ0S+v7zP8MvoO+/AtPDDyAN6jPyczEyHLryHjyC9ub1awZhUQkGHVZRBnOJ2wzt5Zbb
Jj55AuqYngXlNOSSECCAcBXgou8/fnn16RcneGxAQpAJHBKgIASNmoMGgD8AE+QXYBR9A6aPP7//
MGw69prh8e1zDOZCFxiAjRSGkJCQbaD5JKilr9HzPwwABBAuBzBdu3n/LwuvLDCOgTng639wnP+D
TFcC8Q+Gv19fMnx5/5yhu386w9kDK0CWzAE269k3bdo0wc7ODlTkggai7mIbH0YGAAGEq2Py7/jl
J98klKW5+Dj+MvAxfWJ4+/opw707VxnaJq1g4BRUYOCT1GWQF3z9G2i5JdSXjOvXr/8HtXwZMZaD
AEAA4esIRLu7e+bu3Ln9JJB9xSh2+SwOPikG2AQHsPIKh3bDwRULsGiWB9aeB48dOxYH5B4FZRRi
un0AAYTPAWxQ+Z9Qvg2w0XIYaDGo6gb58g2aen0gVgXiXaCSmdjuOUAAkdIVAqlVBjWlcMhLgio0
qMP+E+sAgACi2nwBLQGoRw7se7gCO7uJwHZnBLBNyobcpqAEAAQQy0B6DNjkUAR6KAnYvIgFpWFQ
EwM0tgEackBu5SH3eUHNlNOnT98GBgpovPMXpW4ACCAWWsQWsPUYB/RIPNBjjjBPgVqShAZ7iQGg
1omysrK8lpaWJpB7kVLzAAKI6CwA9IAlECcBPRMDxBwgj4EwrgEiagDQnHdRURHD4sWLGbq7uxlK
Skrgcvfv3weNEaA0rcgBAAEEDwBQzC1cuNDO39//AB8fHwO5QzUUZgmG3t5ehoqKCnCyB3UPQHMT
2ABoQGTt2rU9sbGxZcTUN7gAQACxII26/AcGwndQgIACgB4A5MEHwDbrt2/fGC5cuMCQl5cHbkb8
g89aI8oAkBhoCAuEQWxQdrK1tQUlCVA38xm5bgAIIPRMeX/Xrl0HQQ6iNgD1Ljdu3Ahf2hQVFQVO
xvr6+iCPMOTm5oI9eunSJUgHDehR0Fjb8+fPwaMP165dA9MgPkgclFrExMRAXeRjwIhjJdddAAGE
UgYADQL1f1yBsbJdTk6OKtkAlH+zs7PBMY0rOYNiFIRBngIFFMiDoNQBKgNAM+CgIRfQcAxIP6hX
DCp7YAUqaDjHxsbGAJgdLuIrmC0tLa+tXLlSA2Tew4cP/8bFxXE9efLkH0AAYRSCQMWKBw8ePG9h
YcGPb5qeGIBtZRhsNh00/gByfG1tLcPSpUvBMd7f389gaGgIlgOpA2VF0HAAqFMMWo6Eq3967949
UM2AtUD08vLiAeK7QHvEQOtjgCmcAeh50Ey/FjDQHwIEEDbzuCQlJVNB403UBKCRPNDYZEZGxn9g
coePc7W0tPwHDc6C1iEBYwS8aAlkN2jgFbT+CNuQIzoAqQOmtG5YioZGKouTk9NP0FgodNnR/zlz
5vzfsWPHf2Dq6QOldCAWAQggbM1NXv9Q/9OggTpcq6tIBaAx1Pz8/P8bNmyAexxkPmjFJmzBJciB
oOFR0BQ4aMUWSA/IYyB5YsZtQdPpoKk0qOfZHBwcnoNGob/+/P5/2owZ/1tbW/8fPXoUZn8CA2Rp
HStAADFCPS0UXTbt3uM/FuDi/8+PTwzavNcYeqqiKa4ROjo6wENtoDF9cHe7p4ehsLAQnMRBox+g
/A5aeAIa+wMlfVAyB+VzUHIF2Q0agCSmrQHKVsCa5AGwR6QBbKeI37x585S8vLz49bt3GKrLKxiE
geYBszaoIAWtGQCtKboIDKz3AAEEMhlUglrCPA9OOxy8DCfvsYCn7EFTb8QWhiALlixZAsqP4NId
BCorK1GW9IAKO1DeB40zg0p0EBvkeJA9oPwuLi4OXoUDaj0SMyaF3EJUVFRUAJZhFgcOHlwtBiw4
rty6yVBXVc1gaW7+e+bMmX/v3r3bC+0qgpZ1fgTpAwggRqT2gI1D0en9/xgglv78/JIhy/kPQ5i/
C96JM1DVBmrmIk2OMVhbWzP4+vqCqylQTIPqeGDeZ5CWlmZ49uwZeGAdFLigwACV7KAaB7QaGDTo
CjKLnNoHZA9oDJWNg51BSECQ4cLVqwz1wALWztr61+zZs/8CU0QtdLIe5Pn3oNVKIH0AAcSI1iYw
DClZfOLVP22Wf39/Mby7e4hh98xo+FJlGAAtS9q5cydDQkICQ1JSEsPcuXMxqjVQqQ6q0kDJHJS0
QUkd5GlQAIDm0UClOmh0GTTKDKriQDFOnsch9j14cB8YgIJAs4QYTl04z9Bc38BgbWnxa+HCRb9u
3LhRCvU8qCv9GbnlCBBAjFgKQZXo9MwDj7lTpb69vccwr1gNPEkAyoegUAbFKmhcHjR5gJ4HQR4F
5WVQsgZNEILYoCYrKOmD5EGBAqveQLEOzKPgFIArqROaFgbJv//yl+E2MKmrK0sByw0BhqOnTjK0
tbQymJub/dm6ecvXUydPlgGVnoZ6/gt6sxkggHAFuZStrfb0f/oz/ER/n2GY1x4PLpSAfQWG+Ph4
lGQHimVQIQZqtIBiGDSHAAKgGAU1YEAxDcpCIE+CYhjUgIHI8eCt23EtDQItGP/4DTRI9h/o+X8M
j+9fY7AxVgWaxcmw/8gRhq72dgYfbx+GbVu3MWzbtiULmudB81NfsfUZAAIIX5oDNdviDCLm969s
tGJQVVVFSaIgj4Nmd0GFGSjGQYEBKshAMcrLCym9YV1gSlqUIK0/gb3+Lz//M4DWp3798R+ezR7e
vshgZ64N9vzOffsYJgA7UmGh4cDGzg4GNQ19hlUrFmfcuH51KS7PgwBAABFyGTdotqp76vIZWQl+
DLDF4aA5E5CnQRjkEJDHQSU3SJ4a3WOQp0EDvp+BMf3l5z8wm4kRkez//vvL8PzueQZBXlaGA0eP
APM+L8OqlasZEmPjGLZs3sygq2/IYGRmy8DPx8NgYaIjBKrucNkFEEDERA1oPX7Z06fPakEzVKCY
BuVpUOEGHY2k2mDHT6BHQTMhn779g+yLgI3GM0JWwoGG6n//Bub5GxeAofCDYdf+feAIuHDmLIOn
pwfDWSCtpaPHYGRqzSAjr8bwl4GN4cal4/uC/ZxdYaU+OgAIIGKiC7SbYQ0wf9eCCkBQnoUNhmAL
TZiDiVmKBFL3DZi8P4Cm84Aeh818gD3MCfEwaECcA9hS4WJnZPj2/Q/DjZvnGVgY/zFs2buH4dfv
XwwXz55jcHJwZLh46QaDpJIeg4qOLYOEHNDzzFwMX4Fm/+RRd4LORTzC5gaAACI2c/L7+fnX9U+Z
W8TOLcjw4w+ou/of4mFGREiCVheCkuq//4jQ+AddrffnH2Q66ecfyLJDYIUAXob4H+pvUALi4WAE
eg6Y74CeZwZng/8MXGyM4MV77z7/YTh/9igDO8tfhv2HD4Gr1XvA7rGRgSHDk2cvGIRkdBgUtKwY
FJWUGV594WC49OgfUA/QTqb/DNy/b3+fmGcgCEwFP9E9BhBApJROVnM3Xz0qLq0CXiXIiJQn/xNZ
bRGq0hiQZp1AAQlis4Irib8MX5+cAmaDfwyHjh4GN3hePX/BoKWpxXDl9nMGRkEDBhZxCwZeEQUG
VnYuFHMFuBgZXr37yHB6frD1mmVzjqHbCxBApJRYd989vnRDSFRagxVY2KE7GBTTyEkfFOuw/QxM
0Lk1ZmhJxsQEmnAEBiJUzz/QfA+QAZoLBPFBMQ5SCp4P+veHYd/ayQyeThYMf5mYwY2mA3v3MTjY
2TOsP/yYQVDWhEFc0pyBT0SRgZmVA6wXZIacMCODtjwzMACAfY5ffAy2SvOOamqqg1IBysIkgAAi
JQWAAstj54n7m+VlpRkYgWkWFDl//6PGPCz1w1begqZsQetLQROczEAT/v+DrEVlBq3EBQYEKIBA
+kFiIDlQVmAH5oPfQPY3YPE/d/kOhsnNmQx1dXVg80FtClC12j5nP4OIgjmDsLwZg7aaNIOGFNCQ
318Yvnz9zPDtyycG1l8vv+/duvzaxg3rQas+QbUAaHpwKzAAUMoCgAAiJQWAZl1u/Pr8+g8zgySL
IBczMK8ygh0LCvHfwJAANVJAc9pfvjFA5rGheZyXG6IOtCKNm50RkufBSZwRHDi//oECCVHPP3r3
n+HOiz8MB84+Z1hbG85QX18H7iGCWqCgwY/mrml/OX99eHp40XzwXOcGYOucAbJi9DFopQJ0dgg0
PQVqlf3CN2gKEECkVtrPw/0ds05cuDdLUkiUgYMFMl0MSqqgCdsvPyHzp8CaiuEbkA2a0P/5F5Sk
/4HV/IUmedA8J0iM4T+wifz7BzDGvjB8+/aJ4dzNTwzs/94zPDm/+sW61YtBnvsI7G+EgjwOnnzT
198DigRo6w7UvH1M6eQIQACRGgBfv337eu78rZcMz77xAz3EBE7qoOQNbqBAS3w2pn/A0vwPA/P/
XwzMwGT5++snhi/AfsGfH58ZDhw6yrB+x3Fgl5uPgZVTgIGVS5iB+/3Gu5cu3JwH9RhoNQson7J2
dnbeBdXzISEhi4HtjtfQadhz0Jj+RslwOAwABBA5zbYH146t26wYmuoL9D445j59+szw68fn/6eO
7Hy6ceMG4R9cepwsQM+BVlqz80owsHELAz0rCB5nYGb1YNDx8WDY32cFmmXdDp2yfwdJF4iZ2ebm
5sfAzpNQbW1tFTAQQJ6/AO3QgJL2P2rNaQIEECOZekBp0hwaU8+hee49NM8JGBrId/E6rY9Cnbo9
9mtlX04xkAlarXWbAfsKcGjjkq0F2NfwgY75g8y/Ah37/4U0j0GVAAAIIFpN/4A6UmG+gS6pm9fv
6YT2xZ/CJvMJuIcX2nKTgMb2I2gKQVljQa0AAAgg+s9/EZctWaBu+w1uBWHZQkKtAAAIoAHfPzDQ
ACCAqLZ/gZYA2PsUAbYDkoGNoOi/f/+elJWVTaNGDQACAAHEMtg8C+xtGgMLQdCiiRggzQcaPUKe
hn/06JEukCqE9lIpBgABNGABAJqvu3v3biDQcwlAT/rCutggjG+YDDTSxABZ8U6VAAAIILoEwMeP
H/nExMSSgZ6LA8asAciToAFUEE3qcBmoSbx48eIcYACWUSMbAAQQ1QOAiYlJFxijoJUksUAsDPIk
aGaIWitJQJ0hU1PTPCAT1Dv6Tql5AAHEQkkSvnPnjjfQk6AkHAwaFoPlV1JGeokFt27dAk+ogNbw
8/Lygoaj+KkRAAABxEJkEmYXFRVNAXouHpgHTSlJwqQA0CYl0KoRkB2ghROwgxdAg7DA7rEDkLmC
UjsAAgjbIil1oCfBSRjoQUlYrJKyu4oSAJo8aWxsZGhpaQHzQVsI0GemQJMs58+ffwjMCqqUrhcE
CCAWpCTNAUxi30GW0XLlFz6wY8cOhm3btjFMnjwZfMzSvHnzcFWVoBkqeSATNKH7lhI7AQII2aeg
c2zuiYuLKw1EAGzZsoVh1apV4CU1oO0xoAlW7GOHkAVToDkIYKTJUhoAAAGE7NPfu3fvPmphYaEE
Svb0SOqgPX+g6TRQeQKaaAWtCwJNnoLmH5ABbP0QSA+IBgFQluzs7GwGBkIgrsXoxACAAEJeJwjy
tT/QASvRHUBNcOLECXCBBtqLtGfPHlBehssdP34cvBcVOa8jzzCDAgB2lgGoOgTNUqurq/MD/fCJ
XPcABBByCgD1tV+AJjexLW6iFID6HKBtp9XV1eApcdBiSBCGrRwDeRQ22QKKcdDsE8gtoKl0EA3b
5gqbbAWxoatXBKHdZrIAQAAxIXUvQa2qt6CQhiUzagHQMlfQFDtkp9l/8PrA9PR0lGVzMM+D7AZN
qYNmm0H79EAYNgcJagaDWoKgQACpBwUAtFVIMLaAhWrUokWLDgL9aY4sDhBA6BN6H65evXoLFPrU
AqB1AKB1AiCPg2IcW6EGS+ogj4PUgiY+QB4H0aDkD/KwsLAwGIM8DZtuBwUItFWId+93dHS0eEBA
wJKmpiY7QUHBE8AI2QvN9kwAAYRe3H8BFoR73N3d1aCdDoqAoqIieAceNgAr1JDzOCibwM4mAk19
gWIZ5GnQuiHQ8hlQ4Yw8CQvKpoRahaBNUubm5mdiY2MZQakIlJ02bNjgAKxiQWXeL4AAQk8B3+fO
nbsXtsiBEgDyFCipowOQI0ByoJjetGkTfAuqm5sbOGa/gIfIv4E9C5qMBRXIoKwCksM2Aw0KsNLS
UmtczfWqqqrslJQUGVD75syZM+B5BWCqew1tQzABBBC2ITJr0M5wYtbmkQpAawFBB/2ATjg0NDSE
rxlcunTpf2ANAN4MDtoUfvr0afDiSdCCSdAebXxuAZkJVP8AVDOiHebH6Ojo+AHoafiud9CJksAs
AFooCVrmAtrfww4QQOhZAFwQghYoApMoIzUbRKDkfvToUXBDB9S8BVV/oGwG2g8Mil3kTRWwKXhY
aY+vRsLWKgTFPDD7cTo5OfGDVp/DaiGQWba2tn+AKQ+U/ME9NoAAwubDj2fPnr2sqqqqR60AAOX3
9vZ2sOdA+5FB63VBHZu9e/eCCzjYNhnYchpYNUxsVQzKBlpaWjJIrUIWYEx/Bq1MBwUkyP5jx46B
p9aAnmeBDuGDlsT/BQggJiztAuH1R1/pggoiaoH6+npwPgYGLNjzoCoRdAoYKO+DCjdQSgB5HpRK
QDUBqAyANXyIGbQFLcbq6ekBtQqZQbEvIiIiYGVlxQQKmMvA7NDX1wcuT0CbMYABAjpXCzR/CFpY
/RcggFiQ8j5/aknt6Rt//FTu3z8CdgRoZRilDSLQSnBQSQ7y2Pr168GrQUHJH5a6QJ4EeRbmaVBs
geRAKQPkMWJSISiWgS1CXwbI0To/gO0GXmDT+j+wgGW8AWxunwQWfpcvXwY3xIDgALTh9BPU9gEI
IJDp7EH+phFvlGcuuAk6ahQ0t8clzECNmgA2fg/q2Hh5eYH5oPodFKiw2AbVBqDVZqDSH9bYAaUK
UN6GLa0jNhtAW4VvJ06cuFNbWxs01cYMOnOEA5j3QRsugI2obUB73kHHE8GNHYAAAmUBaZDnUfIB
Bx94Cz65LULQ5ghQzIMKPNCZlTDPg8RBHgJVg6DWHSgwQNUSqK0AW2oHinVQYweUAkhZfAVqIQJb
ernAVMsbHh7OCyxAmUELpa8A+x3A5P8PtBwf6HnQ/CJoiu0jdMKFASCAQKb/lf4w4S5KALDzMGw/
fBW+EozYer+hoQG8Ehy0aBpU74I6PKBDtkAAtBECtDQW1swFDW+B2gmgDg2oMQTyAKjBA8qroBQA
a+2RMlYITPJ5EydOOAEMBNAJUwwrli1nEOTn//fu3bs/wOy1kgFywM8TBshyWfBkLEAAgQLg+fJ5
i3JR2sesHAz7zr5kILYgBK0g3b59O7hkB7YkwV1bkMeBTU+4GlAfH2QeqMsLqglAGLScFhTDoGUv
MjIy4PIBFPsUDLWxOTu78gMDjvEysNq7dOECg6aGxl9gO+EfMOBBjQNQRIMaQfCYBQggFmgv8Pyz
DS6zpAL2pIEDgJmd4c03SKsMVHrjcgwoX4GWvIPorq4usBjIE5mZmaATVcHJGORh0EgPKDZh/X1Q
OwBkNii/g2Ic5HlQjQDikzugCuq/fPzwkUFdQ1UAHPvAvC8iJPQPWPiBlsnPYoCsOwC1yz8hrxkE
CCBYEfv6zr13C1UY36d9+w8s+ZmYGVi5BOEnJWELAFCMgo75ADYzwckX1MgBVj1YOzqgAAGtJwZ1
bkBJHtTFBeVvUMyDltCDaEo8D1sqr6yizMDMxMpw7vIlcOzb29r8mT17zl9g9gR5/h567IMAQADB
ShhQgXBtR69zFkyClYOf4cWrd+DSGh2A8jiwcwH2PGgrG8hDyJ6HrRwHeRoU6yBPg2IelFpAYqAA
BVWxoHY+yPOkLppG9Tyk/Ll29TqDsAjkAKmVK1YyiAMLvqtXroEKvwnQvA9aZ4CxZhgggJCLWFDJ
eEjm/x5w/cfMxg0sCG+ACyhkADpHy8HBgcHPzw8c8qBJD2SPg1INqFoDeRZ24hBoTB9U2oOGv0Dm
gTwMSvIgz4NKfWI9Dzsi5z/SyrT///8xXL/1kMHU3BQY+8wMJ4CNravAOl9ZSfnP8RPHvgMjELSc
5inUfxixCRBALGj9gPvL+ssS7YrOrQKtudt//jVDMbCBAkriIABqURUXF4M9BurnIydB2AgOyPOg
wg1UrYH4oNhB3vEJ8jyoXAF5HlR342vo4GsEgpfZAhPzu4/ArvOHVwyCupA5g1WrVgIbRSpANz74
//DhoxkMkPVEoKSPdU0RQACh2w5qH58SfFh56p1cq9kPBl5wCw12YhrI8+hNU1CsgzwJ2zMASvKg
LAEbVIHtG4DV56DqChT7sH1BsPKFlGUKoMWQn779B680u3HrDoO5oQHYnIPA9v6N6zcYXBwdGRYt
XPgVmA3vI8U+1kYNQABhC/5nG9furLIrat8DWtgEC4CAgACsngfJg2IctG8AFPuwsT1QXQ7yOKhw
Q97pCRvXA22YAJ2LRIrHQWEFOlEEVCyBzlB59/E7gxDXX1grkGHdmjUMOsB+xof3HxiuXr26HFrn
g2L/O66JVIAAwnqUDRBfvrnYsg/UInzw+BU4NkE9N3TPg5I4bPcXyPOwOh20FQa0cww0IgTZECUN
LPCkwBh0JhPoiBtS6nrwThGgq95+hhwm9OYr6Iyfvwyvnt6GT5ftPngAWN7cAtqpwHDrzi2GiKhY
FwbIAspPuGIfBAACCFc78+3L1z9XiLC//rfj6C2MghCUl0GFHSjmQUkeVOKDyglQlQYa8QUdmwTy
KD+/ADimQakAlPRh/X70EyLwl3yQWActp38P9DhoPwFI56+fPxhkRDmA9nKDlW1Yt57B2MgE2MZ4
wSAkLMVgaGyuDh0m+4FvGh0ggHAFACjEbm6bmph18OI7BuTd5LB9QqB8Dsr3oMAQEBAEexoUw4KC
QuCkT+kmCvDxLX8g55KB8vubL5DVprDU9+zBTQZFYGCDwLbdu8C1jaqyCrBtcoNBQU6KQUddEWYM
3g4NQADhcyGov3xY7P+5j7ByAGL5P/BpX9+//wB6kBnsYVC7H1SwgVIBufv+0D3/Hhjm7z//Y3gH
Su7Qw7pALgDvRfj7k4HpzzuGNevXMazZuJFhy6bNDJampuC+B6htISElx8AvJscwceLkIEJ2AQQQ
E/7Ex3Bvz+bV9aA9Qoj6F3LSGMijoJIc1JSFleiUbpuB7QoDndX0DohBB4jBAgTkcdDaZEHO/wwf
XtwB2ivOcA/Yuzx//hywN/kQmPUUge2N2wzSMrIMmmoKDGqKUgzWdk7VDJDzgHACgAAiNNoA6g2d
BvbZ/wGTHRMs78ImJUB8ap0jAj4DEJTXf/4H7xBjZEDsPwB5nIcTkqq+ASv/r5/fMVy78YiBA1i2
3Lt7l0FPW5fhDpCWkBRnkJFVYmDlFmf4+IMDWKfzgsb2QX2Dl7jsBQggYqLs/smTJ27Beoaw4/xA
+RxUqFHqefAOMWAZ+/LTf4ZXwFj/CvU8eA8B0OMivEwM3KCTvEBVKPM/hif3L4PPtr4PbIx9/gRs
agNbl5Cjyd8x8AnLMnAKyDL8ZeEHNvmYGPh4eBk8fYK08dkPEEDEBMCn/Pz86aBSHzYZARutwZXk
GdFoXB4HD0EDC7kXH4DJHZjX/0G334D2FIE8DtpDBB7t4WIEn5cGKndAVfI9YJJnBAbC08dPGJSA
SR80qvSHkZNBXFoR2BsVB3qcA1hjABsBn9kZotPK5iGfIIcOAAKImOgD+fwmrCBkgm57+Qvd3QGq
n3/9hZQN4H4T1GNMDBB5kEdBu0ZA2yQgZ7RDxL8CExSogANtkGCEbuIHJSbQ9jiYGlDMgzZn/QTW
/Rxs/xju3boMNucBMPa/f//G8ArYyVIwNWN4+OQlg6qaJugweYb3f/gYPr1hAqaW/wxvvzAzHLsn
CBoyB7XlsZ7hCxBAxAQAKPW9ffv+CwMb31+G33+ZgB7+D94OA9shBk4I/xHbZUD7BpihGynAaqDi
0L0S4MBhYERsjgKZwcsJ2jEG2VnCycYAPikRZAc7KyPDb2DA3n/2DVzt3n94H7xd7svnTwyyMjIM
n4DVMTsnL8NvLjmGrwzAzhUjO8P5+8DAevGPQYyfkeHHf3A/RhRXAAAEELEZ+M2TVx++cQn/5mJi
YQNvZwPV0d9/gUptSEkN2hYHa9b+Q/IYOCCQdoGBNleAT2UESjAz/gemCkbwJguQmSAZkOf/ADWA
jg/8/x+y2eoHMIk9un0B3Oh5/voVw3dganz98jV4y9yFG48ZZJR1GfiE5BhOPeIBFn7/wZuyQJEC
akCxsnMzVE7YMQWYDTyxbZ4ECCBiA+Dz0SOHD//h1XTn5mFBKaFh9SUjUqZnxkHDCjdwfQ4MOFCb
G+Tx/9CtNKAN0UyMsJ0nEFNBSfnPr+8MrAy/GK5cv8vw9dtX8F0+wP4+OEX8ZRFkePhNjuHtMyEG
Lj52BuSeNShSmJhZGR59l3GDDpljrC4FCCBiK+5v3S2VS379/A452hLN88g0uTUBAyx1MEK23cBM
/fkb2Od4fpHByMiYQUpWhuHLx88Mn4D9DkkJKYaDl94xvPkry8DGK8PAzsUL7lyhjx0oSTAzaCmA
F1IIYLMbIICITQGgOvDZD2Do//uHKAj/w/sGiC2zIDbIIyAOEzCJs4L2D4JikhGRD0AeBJcTwFj+
9x+0fxBC/2eA7CeEtDoh+wh/AmP/9+dnDEdPfAGP+LwGdr6kxCWA3fPPDP/YgP0NUXkGDl4RBmYW
drCHQQWyALD2V5dhYlARhxROHz9yMJRV1oFOjcG4wAgggIgNAJCL3v0COgYUAP8ZIHe2gU5VZQWm
bw5WUOEF3SzJAKEhSZ0RvgEStDHyP9QkRkbYRktGaEqCjQkwQrMSRBzY+mIwt5QDjzBPmTKFYc++
fcBW3wMGG0trhgVbrjAIyZkw8AjJMDCy8ALtYGJQFmNk0JAB1h5sv4F9lW8Mz55+Ae8h/A10t6GJ
RRKwHFiKvssMIIBIacW85f7/7r2UwB9BLtDmRQZEgYdcXcCqSPDuT1BMQ/cQM0PTOQsrKFVAPAra
LAkrI0ClP6g8ZWOGlAkgvadvfQMPxIDmC0BrB0FrCDrbOhhWrFjJwM4rycAhoMDAySvAYKr8l0GY
8w2wlfiF4cndjwys/78xPL5x7OmCOdMuALvqL6HjgfuxNU0AAoiUAPhyeM/6rRra+jHAJiB0Hz8D
tIUGSQnswBTBww6p70H7i1mQCiRQbQHaHg9K6qCzekF7C0E0qBAE6f3C8B+8hZaXiwl8aPCdFz8Z
7jz4AG5xgqa4QR0t0PgC6Hqlg1d/MEgr8TIYyf1iEBG4B+z1v/uxevuq6+vWrgbt/ngHHQR5Bh0Q
gW2kfMuAtOkKBgACiJRuG3gZ3ZHzj1cqyUuBW2mgqgrkSVAqAHViQB4CbZz8Br0h5e8/SMBwcUCy
BUg9JysjvO0AKi/AbYr/kOYw6BDblx//MTz98B9YwH0BtvNvMTzfUwjOAqBRaNC9evWds4F5/O+X
OzcvX3v58uVd6JDXS6hnn0E9C5v/+0FoDSFAAJGSAsDL6MR5fgA9D7l35jWw7Q5L6qCmKg/QgxIC
TPCNlKBTtsG7SH9B1IPO/PgBPRIYVEWBYh+8dfYvpCT9++83w5X7Xxku3vkMLPheMDy/vpPBztaW
wcDAADys3tPT8/Lq6ROg1SCgbXTHGSC7SGFzfQS3yWIDAAFESgCAC8L7z78x8IkAW1kCLPDqCpS0
wUNVn0FJ+h+4kQQ+9/gXxHOgghMUUKB2DbhQBLXy/oMkfjH8+Qls4X36zPD+/QeGs7c+Mwj/Pv/r
4vq512/evHFfVFTU0ts4Txw09L5ixYrXJ06cOMgA2T0K2gYP2gz9gdJNEwABRGpX7u339w8f//uj
Ivv8HSPDp++QZP/tNyQ2QdkA3A+AlvL/oS3Bf/+BFSEwdln+/2D49xtYlX7/zPDzG7Aa+/WJYee+
wwxb95xm4AU2V+7dvAHaGwyawQUdjfddR0fHH3Sjwfnz5790dnZuZoDsMgXtGb4JneKieMcIQACR
GgDfZsyas/KfiEUJOw8rOPkyQmMV3BZghJzYwMr0D9xyY/jzneHXj08MX4F1NqiEfvboHsPaLfsZ
7j5+ywDeWsspyMDHy/b/9c1ds4Gl1mGg+WegJfYPAQEB5cbGRsZp06b9XwEq9iGevgid4PxI6C4Z
YgFAAJE6dgXaNOC9aNeD9YIiUuDSn53lHwMHy18Gpr+/QPuHgc1U6F5iYCwfPX6cYe3WowxMrDzg
uUZWLiFgo0WCgZVbBDz19mZ/4pb7D94vgObnl0jjd+zd3d1P//79K1xVVdX379+/N9CkfxVawv+i
1sZJgAAiNQWAmu+v+FiAeZX1HTAZfwG3yF4Ae2QfXj/4smTBrKe33/Gpg3eFg2KYR4xBTCcQHNMs
QA8zs3ExMLNyMsh8nf96+ezp5VCP30ebsGRUUFAIAhZ8wgsWLHgD9Pwz6MzuTWgJ/4uBigAggMgZ
znnH9Ovjr80r1jyfMqnv3OfPn99CHQaKwe9WNiZ5v1VTtUCHgIM8zMTMBll3AxrYYHr+f2uPPWi9
LCi5X4OO2aPnYzbQjpXU1NR/Dx48aIZmidvQ+pyqngcBgAAiZ/gWtA7HEUrD5t3eQj0DSiGWDkWn
d8NOpYOB55vc1t6+82YBNCm/wjFcDXKPILD0f/r69esMaODcgKaAj8iBRa0sABBA5G6f54Y65gcW
j4iJi7KXq8ceB19kKP15Nr7kjg5AoQZa3qLHANmBDkpZ16EzPDTZPQ4QQORkgf8M+Hdtvnn5+udy
B6Yz2St70uoIJHdsgcsEbdR8g3r8HaHJDUoAQADRas8baPBBCuqRVyR4ABQhoJlOPqgeWAsPYySH
WikAIIBotTsKFHt3yND3F+rhn1D2b2yepyYACKAhcb/AUAdycnKijx49ej0Y3QYQQCyj0UM6AK3H
vnbtmjILC4sRKyurAZDWB2I9ZmZmGdicEQyDJtJfvXr1DKgHdO7MF2pdjEEtABBAoyUAUqSeO3dO
gIeHxwgYmYbAyDMAYm1QBKNHKjImtBAAlABAa+MuXLiw2MfHB3SI0FtcRzsOBAAIIJaRELG3bt0C
5VBDJiYmUMTqAyNVF0gLIkck7Exv0GwvNc4GhfedWVnBK7iA7ohNTExcNH/+/OMMuE+QojsACKAh
WQKAIvXSpUuyXFxcoAgF51ZQEQzEirANV6A1C8gRTOnRvpQA0AJR0EIy0GHv9vb2oEtAX1Cy2Zua
ACCABlUCePHiBbesrKwhMKJAOdYYGHF60IhlJbcIHiwAtJwSVBVs2rSpMD8/fwF0RG/AT/ACCCAW
euRWYMrXBOZAI2h9qg+NVDH03ApaV4pv0c1gBaDVkqB7aEFHzYNORgAtJAJdLADaJQUazIdt8QZV
BQ4ODqA1a6C77b8TGBWhCwAIILJKAFCkXr16VYyNjQ25waQHjGB1WKSiF8MDWQRTC4Dm60ERDTrk
ZNasWeBiHQRAuz7KysoYsrKy8N5FAlpODyoFzp8/v9TPzw90ENqbgS4FAAKIBUcEMwEdZgX0TBe0
e8MNikBYhMIaTEOpCCYXgJbdgnbAga4HAR3tA9rjCA0j8C455CtFCAHYwlJtbe1oYINwwWBoEAIE
ELYLpkDlL9CdfPLHjh07JSEhwYZvx9xwBKAtD6CDbUA5HHTQzbRp08DioP0dkyZNYkhOTh42DUKA
AMJW2YKPcv/06RPfoUOHzoACA3mR/HAFoJ0HoLr88OHD4MifPXs2+EIgUOSD1qWAdiGAGnLERD7s
jCvk01FgJ6SAxEElgbS0tFlvb28YKLMRc/YNrQBAAGFUAdDbFkFj2R/WrFmzzc7OzgpU5JO6g3mw
AVAEgLamwzYtgiIHtp0FtMIatOcXtBES+bQX0EKkAwcOoOwLxBXZyPcEwjCIj5xxYGMMoHAEhSew
BADdvQVa7AAK7x8DES4AAYS1EQhdUg46zVl33bp1U42MjBRBy9PIvQFqIABoExeo/gZFOqjeBh2b
AzqlDrSkFgZg13rBLnSD7YkE7X4D7XoH3XyJL8Jhm0BBy/ZBCQzGhsnDIh3WIIadxQhiw+5ZAzYq
lwUEBBQMVIMQIICwNgJBdRIwEYDmPF/vBQIgO4Uep2ZSCkCbVUFHkoAiG3RoBbCnAt6Z6+PjwxAT
EwNeYAra0kRO6x+2+R1WpIPCA4ZBRTss4kEAFOGgiIZFOPJBnKDIh+02Apmpo6MTBWwQzqd2g3Dt
2rWcoqKi7MCq5hOwe41z6BkggHB2A6EnCkoDsdHRo0cXKCgocIP2gVJ68ygtAOhIBlCdDToXBrRV
s6CgAHwIJ7nXQ8KKbVgRDsvZoMgG9fFBbQFQuwjEhm0DRz7uCdTnh20PhR0JBetBwbrDsKNjqN0g
BCb2TBMTkwZgqSICtBvUXf8PLOm+ysvL7wOGU0p8fPwbpJ7eP4AAwjcQBFqAA1rF8BLYGzgOTE0u
oAClxk5IagFQfzw8PJwhKCgIfOwsMNWTnKtx1eEwNqx4hx1uBDvgCLYdHnacJag9AcPIEY9vDASU
KEDVDzCCzCZOnBgCGiEERgzJI4TQRiSzoaHhNFNT04S0tDRW0FlFoARpbm7OePnyZV5glegPbNR7
Aruuuc+fP58DLf1/AQQQzgSA1Bh8O2/evA3u7u4uIM9TY28gpQCU86ZPnw66bIXo3glyRMOKcljO
BuVoUOMPVH2ADlkDHfEFakMA++vgfj6ozw/SB0oIsLOUQIEL6haCIhA0wgeikTeDEzvwRekIIbTb
zqaurj4TiKOAPRcW0PEloMQHWk8Nqg5B7gFWMaBRSmZgKQ6KPNCarp+ghAMQQIRcCFukpbts2bJ+
S0tLLVBjEHa501AYuUPvjoGOKwIN6IAwMEdgRAao8RcREQGOQFgVgNyaB+Vo2CEYoEiHnQkBuxSX
nBFP2AghMLIWBAYGgq5DfUtMKVBYWOgKtHtzZGQkE7AtwSIpKckIa+OAGr6ghAyK/BkzZoCOdfoH
jLsZwEQB2iUEWqYFag1/AwggQlkZtkTpNbD+2KOvr68FKv4Ge28AuZUOammDLgIGHfQDO10I1LVD
BqCtF6BhXNg5+MjTwrD6GnY7Kqw1D8LI9TslQ92wEUI9Pb0EYNtlKTC3HoN2DXEW90B1icASaFZU
VBT4wCLkk59BpRdoAAvU4wF1e4Hs/8CcvwMY+aCl5YLQOAUfJQEQQMRuFH6/aNGi/cAGRAzQoUKw
Ix8Ha44H5XSge8FHRoIOMwDlVFBggA43gwFQ9w90tT3oiEnk1jm2xho6Rk4c1JjjAOmHHZYILIFa
gQkgGBjRv9AbhNDinhWY42uA3dUaUMIFlsrgHWQwAGpUgo7wApUooAEs2CAUMOH/grbrQBjUgAGZ
/Q8ggIhJAH+hgxTf9+7ddUdGRsYMdizOYCoFQJEPyu2gIzJBI3pKSkoMLi4u4HvPQUd2woCWlha4
iwg63AU54tFzPKzIhyUqbA1HWDVBjURAqEEIOhoUSLED3T8Z2F5JAs2cnjt/Dlx9gA6rAw1agfwD
mo0ERT5oTxVo/gJ0zzuwRAB5AHTkD2j1PmiZ8WdonP4FCCAWPEPEoP4eb3SUY9J3ibjGx+/52bdc
uckQ/AXSBQLVewPdGEQexgWd3Adq9IC2E4HugwYV+6BzHGEAJgY6zgi5yIZFNnp7AUbDqhPIMQmI
kTzkrh1swIfSRICrQQiLfNCwMTBidUE7Bt3c3cEnBcDAnIXzGXZs28HwH+hufz8/8FZK0IWcoHMj
GSB3z7yGRv5HaPUCKmH+AQQQC1qDD3QcOV96VlTPE664yM//xBgfg5M+sJ/L/h6YJIQYdh+7wRDq
xQ926GCYDQSdXgg6px6UIzw9PcFFPbDHApcHRdTmzZvBp5gin0qMbSQPNl4P4sOuhocV0bC6H1T1
wc7GofbQOMhMUCkgJCQksnr16srQ0NBKYOSDimvQriye6OjoBGAjUR7ov19AdfABmWWrVzKcPH6S
4Sewd8QDrEZAp7aCzssFRj6oNzEJ2p0HRfwXaKICVQPgYg0ggFighvNlFOVvvc0YZv77Pyd4LwbK
anRGoGdZOMA7nvaeeczg4/gd3BWDtXwHCoBOZATduGJrawsu0svLyxnOnTsHlwctygDleuSiHHly
BuQHGIaN6sFyP6zBB/Ij7MZ45HO/kBt/1FzrAGsQAtsmKcB23kpgewB0KSMjsHuXAaz704ENcR7U
yF/NsH/vfnDjVglYDYAOcAMd0yssLPwNKDaXAbK95hU093+CFf2wqgUggECuBu3ikOHnZfA0Sz00
4ScDD/Y69t9fhl9f3zB8fX2LoTpWjcHKRAvc+KB1WwCUw0ErbUBHboP666ChXdAxvLBxfFBktbW1
YbTsZ86cyZCamgofwkUfyYNhUHUGy/WwuhgUwbBGGQzDunrIgzu0WrkEGyEERtIpR0fH6IKCgqT4
uPhYdQ01EU5OLvilEUtWrmQ4CAybN69egaq+v3///Pm7b98+UAJ4B9S/mgGycRx0bCjo0FjQLjvY
2el/YAkAIIBge5FAFzEoA7FVQMma/nf/lLD67M/Pzwzf3z9k0JP4xFCR6gzueoBSK7USACiiQBfK
gCIblENBkQ27OQPWvQEdQwqKbNDp+/hGCIODg+E3dIAiHnYvC2jmDzThA+LDjiyHHYIFq4NhGDni
kXM7PQDIraA2zbp168+7u7tJamioCXBxcXPAjnpYtGI5OPLfv3nLoK+n9xcEDh8+/P/OnTvPnjx5
soEBcnrAIyh+AY3878iRDwIAAcQCLexBRwmBtiD+2tATkhhd2Dv9MaMjF0YdxcIO3uh66tZzhvcf
P4NP6oKd9E0uAKV0UGSCIgY0IwdrdIFGrkCRTSoA3UYGGhqG3U4CimyQ2aC2AYgGBSzsOFdQZMKG
cUERDvIPKMFRY3CH0owAsvPmzVsMAUGBhlISEr+Bkc8Ki/yFy5YwHNh/kOHDu7cMBnr6f4DhBY58
YO/gLjDyQXUeaFMx7MzcV9D4/Y5c9MMAQAAxorFBZ8yBLunSi80unfaQPVIErSJg+P39A8O3t/cZ
fI2ZGeKDbMClACgRkBJAoFwJGnYF5UJQZIOOogZFHHK/GHTlA2gWz8PDA9yyB/XniRnbB0UsKIJB
RT0o0kELWkAYFPmg4h5U1IPMh43mgSIcNHoGinzki4oGIuKR7xXaC8zdoKFoSdDIKycHA/QQHYZ5
wAbeoQP7GT4Au7z6unp/gHr+AqtIUORffvr0KSzynzIgbr3+jF7vIwOAAMLmO1B5CxpW0guNDmx9
KV6Lcubevz/AxtLHpwxsP+4xzKzxArcDQDmXmC4hqH8KGmsHRcSaNWsY5syZAxYH5T7QGeOgG6ZA
/XdiBnuQu23I3TdY5INyPigBgDAo14NKA5A62A0VsIgHYViuJ3UcnxaR/+3bd4YdO7YzGBgaAds4
kgzcnJDMBTqAYu7ChQwHgdXfZ2CC1tXR+QN0Jyjn/wOG6Vlg428zNOKfMiC2FYNa/T/xHSgBEEDY
Yu0H1JDfq5euL7K1vZXNaLrYD55imIG5g52X4dNnbobj5+8wuNnxEuwSglqooBwPKupB9yyAgLq6
OngaF7nLhmtYFxY4sEiGdduQaeSLqWAte9iULayBB8rZoFwOyu2g9gUo8kFuh1VjtO7W4pq3giRm
0FHM34Fd1k0MVpaWDBKgkpWDE9xCA3Xk5wJ7O4cPHQAdl/bf0FD/79cv3/+ePHXiH7CRfOzli5c7
oXH2BGmw5wu0r493azZAALHgmQoGpaLfhw9f7VN56vJcLnBb+p//bOAz2ZhZucAHX+w5+YjBxkQd
5xWroGIX1E0DXWUCatiBWu2gC7RA3TNiFl8gT8fC6nQYBkUsSAw5ASAP4sDEQGbBJnBgK39gkQ/i
w7p4lDbuSF0yCTtg8Nfv/wxff/xl+PDpK8Pxg9sYbGxswYcBgop9kPyP/38ZFsybD2z4HmT4A/Sz
vr7e35/ff4HOz/h/7+69Y69fvd6JVN/DGnuwyCe4BxEggPCV23+gdciVO/feLbjTa/HAu2Rr2+d/
kozM4MYgH8OVR88ZHj97Db/2BXmxyNatW8F1OAjExcWBZ97wzR8gT+Ag52RYLobNw8O6bLAEAksw
yO0H5BE62KgdyH2g3I5c38OmtglFPrXWwzJCT46DHbf5+88/8JF4X4F+O3pgK4M1MOeLiokwwHp6
34D+nD8fmPMPHgKfKWhhYQlqzzCev3D+77lz53a+efPmKDDMnqDl/K/ERj4IAAQQMbOBoBQFGhv6
sbXH+1VYybwZLxj0WZnZeRjYuIUYth+5xaAoJwmOGNhiEdDRdqALkUGrdEDXbBAzXQuLdNiiC1gf
Hbb4AjYsCzu2F3ZgM6ENKOgDOrArOZHre1oteIY5A2T+j98Q/A96NvDPv/+A/vvD8PHLV4ZH148y
uLm6Aksnfngm+gRM9AsXLgBHPsggcxMTcHg9e/6c6ezZsweA7alj0JwPi3xYzv9Nyu5jgAAi9tRc
0AgS6MSLX6t6khKj8xqmPWRx42PhEGA4eOEWQ0IApM6FzQ+A7kjEt1ADFvGw4h15cAaW40HisEkY
WA6GDcXCpmOREwGs/sbMzYjZO8yJH9pEPiP0pEDQ6X/gIxL/wU4U/A8+Dfj3b9BheX8YfoCOPr53
msHezoaBm4cbHvnvgI3WRYsWMhw5dJiBCajRzNKMgYWZheHalcsM33/8YRQRFfsKTABvGBAnBcJy
/m9St54DBBAjiWpB3UTQ2S96EQkx3Q/Y45S+vr3LEO8kwBDkYQ7uEYASAa6GFPJwLKyIB0U67BJN
kBhs/B2Wa2Fr7GBj8LBIhyysZMKYxcM35YpMUz3SGSCRDiraYQcBgxMBMMJB9Tz85GRw++YPwzeg
v5/dPgFs8JmDqyYODkj1+PrjB4bFixYzHDl8mIGTg43B3sYe3Ja6desOw2+gefwCggw83FzfWptq
QRsULkO7e2SdlAgCAAFE6kD+b6hln65cuHRGT/qpxncea6lXL54xOJrKwxdCYgtkWB8dlLNBkQ0b
lQPRoIQAu0QQ1B0D1dNCQqA7dEWQWut8SMuuQPdssqHM3eObv0fG1MzlsMOTwcdhgs4J/QmJfNDx
9yD8/Tfk7G+YeuTI//DkAoOdnS3QT6C1FZCc//ztG4ZFCxcxHAW2lziACd3F0QnYTvjDcOPmbWAY
/WQQl5BkkJGRZZCRlmI1MTFh3rdv7y5oO+0XuUvKAQKI3BABDf2BBol0NdQls1g0SvwbknQZzI00
wddooDf2YPU8YnQOMQYPm09HXlGLXEcPto2lIGeAzjr9CT0zHcT+BT75Fpg7oPU7NqeCMwAw8n98
/cLw7M4J0A13DLygYXQWZgZxYWGG+0+egIewjx45wsAJTNzOzi4Mf//8Zrhw8SIw4/xlkJaWZJCT
lWOQkQVmNB5+0DUmf50dLE1AVTMw8r+Q6x+AAKIkVEHtB9DyImNWFobQjPLJSbnx7sBqQAKcU5Ej
DP2CeORVtbCiHrmIH5SRDj/iGMKGHIH8H3zOMy5Xwq5QAB2qyQwsPH///MZw/tR+BiUlZXBiOHnm
NMNnYPX3BZgoPn74wPD502fwnLwtsCsICi/QzCbo2HQxcSnw5JeCvAKDvLw0g7CQAMOf3+CdTmfs
7OxAYzSvyD09FCCAKA1hkP9A65GsgUV34PHjJ6JAq1NAfW1YNwx5QwX6PDusBQ9q5MH64oPhbABY
d+3PP8hBwKCGHOj6hp/QRhz0VHeMiIcc9s3IwAY6RRVYqrMxQY6C//3rN7DE+8pw8exhYOQJgi8I
uH7zOsMXYCkIvnkNiEG3IbABw0JPVw9s+K1bN8CXFcnISDPIy8kDc74cg5gE6IYmEQbQOZzfgI4C
jazu2Lwqpam+cjUwAXwix68AAURpaP+DTjZcBjbmDt27dw982xLyTciwljlshwzyxAtsxo3Y/jit
Ix3cev8LOt6ageEtsFB99ekf+BToN5//MXz4/g/coEPv3oEii42ZEXz3hygvE4MwDyP4PHEWYOSD
unygYvzPr68Ml88fAUaeADjSL127DOwh/GJ49+EdeE3DV2A7iBNY8oEi/y+wSnzy5DG4lBERl2YQ
EpNn4BKWZ2DmkWL4xSLM8PEPN8OPf6wMP/8D21qsPAy2Tt49oOoYupGHZAAQQNQI8b/QRHBtxYoV
20GtetgdpMh9ceTtUrB5dXoMvxIzGgfK6V++Q+4rewmMdNDB7W8+/wXfXQYq6hlhuR0pwvk4IXcb
ifBAIh90ODxIEYgGXfYkxM0ATAh/GP79/sJw6vgh8GAZqLdz4/ZNYCn4h+Hp86cMTx48YvgFDCte
8FpAefC8BehiiJfvvjFwC0oyCIorALE8A+hsZm5+YciFdMCeAeSKIdDN19wMn/7xCkyau6YWNKVC
zi5jgACiVsiDmrGgBWpmwGpgAbC+YgdtI8M1TYwS3/8xXfEfS0MKXQybGhSz/iNu80DhQ5WBDrMH
Fe+gbhuoLod11ZBzN0w9GyvkFi9WFkYUp4IOx2cHi0PYoIQEOigfVMT/Bub8Xz++MFw8f5aBm4sL
XNdfv3mTgRGoEDQ3Arr27BuwMcwPLAVBRTzILlDv4MXbb8CiXhKcIBTk5RkkJaUZePgEGX4zcTF8
/8PK8OUXpDriZmcEJs5/DN+/fmT48uHt31gvTV3Q4g9gVfCNlIgDCCBqreoEzR2A1pw9OXXq5Glh
YSEbXl4eBhZW0C5YRshFGv8gl+OAijbQlSiw2wL+/oXcRAKuLf8zgtWAAuM/9H4qBuigCuwseiZo
1wt2NRMT9Daif9A7msB3VcEqcQZIfQyrWVjAF/NArmT59RvSgkdOM7Dj30GRycHGBG28QYo4UAYH
JQA26A0p4ET0B2QeyC+QhAG+RAAo/wvYL/z44QvDpfPHwLkbVMzffvCAgR2Ye9++e8vwCHT7GTC3
CwG7t7LAbh24ewy6DhpY9ygBG3kikgoM3MKyDF9YJBmefONnYP/PCUyELODBpE/A7ubrj0D8CZT4
/jPcf8HO8P0LO3P5xP2LOvMd/YClwE9SGoQAAVg7gx6CYSiO/4sEkxEjNjvgS/gwvpOvIOLuzNXd
wVEiMTcO2AxLNMO0fR2uEocemiZNmn/z3vu/NP39M/bKByQdMbrT2WJYqdrMrFgiCuTeQLBUlu/u
mxQ6TiEk4oRSR+WmnsTXUUIn9B8/vsRn+kIkTC8yEovIbNLzE7tDYqrImol5hr2RFkponfdTpld6
UdT+6v//D/JK4nFojaGQT1DSeKv4Qc6AikUiTF6jC/beXNU3fhBguV7BKBrwjz48b43zKUTTbijw
Ib9xYSljbLYhypaLmtOG7bZg1R1kClWE3MDulBUpSdOg2AeyZIpaQ/Yf7jwCjw7gy0FvPOpPfikI
XwKImuu6f0CHJJ8+fXz/wR8mXsXfTDwMzMBQ/AfNdX//IW6bQekv/0esQWVEaoVjqMNeY0DrA9S6
AFFF/IcnOlAigJUW/xkQF7/Brv+B3W32FzpCxsyMsBtUt7Mywdo0kMTBBG0cgC6KAJU8v4Fds2/A
Bt2rB8DI5+ECXwVy/fYt8DD250+fQBc8M7wHJgJxUVFonf8JPMDz8M0vBmExaQZOIXmGL8wyDC9f
iTP8fsMPbO1zgOt6ZmgpxobWYgNFvpQgE8PTd5wMzL+B7QiTyBkMC3sMgaXAD/Q7knABgACidusL
tB9bxcTcxj+pdEqdsKgEsKUvAGzhM2EclI5SCqDX4wyUXUdG+8UbiITxF5q6/v35BayPPzNwfzkP
vujuNbBrd+XGVWDk84AHve7fuwe+8lBESBi8FgI0FvL563eG07eB+YZTioFdQI6BV1iOgUdAnIGD
W5CBhY0TGOks2EeVkNzBDE0UX779YPj77Q2DEtOJvtbyuDZQZiRmdBAggKi9swO8m/jMySNnU/+8
//zzBz8vByc3+GozYiP/PzHTskh9cNj9NOCSA6k9ALu2EXZzIUwvuJ6H5nKG/6hdQORr4KCFA+SW
M6g5DFA1sO4TI/RGwN+gCa2v7xj4v19lUFJWAq9aBk3u+P8PYti0eRP4mufHjx8ySAH78ZqaWuD7
L998/MFw5NoPBl4haQZeEXkGHiFZBi4+cWDDHphhQHdtMDJjRD6sBAUJg25aFONnAnY9GRjkREGT
WtwM/37+ZvjxxSQfqHQWNC6+E4owgACiRf8LdNWDenZhdam+fWSgELAU4OLmBde/yJEIrsuRingm
eEQwQu/Y/A9udIEaVsyMSA0+RsiVdCxMsMiFVAqM0DodcicXpEEJa9QxIl1kBBcDLXj4zwC92Q0S
o///wa61Q5q4+Q9xNxO0OgG1ARABB7rf7yfDlJ46hucPb4BXItvZ2QEjn5fh+YvnDAcOHWI4euwo
w7PHTxjkgInCxNgEvD7x8cuPDOcf/AdffccL7ONzC8owcPGLM7BzCTAwA3M+sKUCbNZArtAD9UD4
gd1KcX5GBlEg5mUHNUb/gy9j+gdeDgdZ/AIab/j1E9ie+PGNgf3Hw21x4V5JoLklQrODAAFEi71d
4FJgan/r6lk2gX6/fnxn5uHmAqZYFgZ2ZkhLmgXauobVoUxMiEiBRdR/2DJIRuyLMuANSgZGlJQM
0/sfaaQKXM//Q5QU/5mQruWGDdn+Q1IPbRWC3McCvdIT3EYA8llZGaFtDFDk/2J4/uwlw7wZ/eB6
HtSaBxX3sDGOH8Cu3/tXrxnMTM3AmzZA3b+bD14z3H7NgYh8YGufi1eMgY2THzwgJsjDxCAh8B+Y
s/8BI/sv+D61nz9+gRuKvz78ZHjx+yewxIFgJoY/DOwM3/68f/Xg04M7116fP3v6PrAXBrqUBrQ7
hgfaM8N7+BRAANEiAYC2I4GWIT///ebqVTEpPj0R3j9Az7GCh0n//8cs6sGlwX9E5CNyHFoxzAQd
V4eWAuAWMTP0qkKkRASyB3SRLKjUgVxmCb30khF+gxHYnj/QIogZGMvg3shfyAW2sCvPwJdc/4a0
vkFXG4ISL0ju3ee/DE/e/GR49e4Tw5YV88Dmgep10A4l0CAYqNsH2phqb2/P4O/vD96rD9qxu33/
GYY/PGoMXAKSwFwvxSAiIswgLcHBICX2i0GQ6y3QDX/AufjHO6DZb/8wfGT88ffTm8ef79+58vrC
udMvjh879hwaod+hGe0bdHb2CzSyQYtC3kIH5t4yoF20gw0ABBCthuBAqU9JRlbOccbCLRNExMQZ
eICNQSZQbP1H3C0NijjYXZOgVjUogEFKQGxYVwdl3Pk/JIJAd9SBIu8ftA/+9z9kfAHUJYPdZghK
JGB5aGMAfMcltBvIAG0rgIZrQXdeguz/D78Z9T+YD6vz/0LNff3pP8ODN//ALe+Pn38y3Hn6heHW
/dcMW3pcGfh52MF7EUBL5EEJABT5IDbotLGHwD4/GxsHQ3nbPAYuYNUgJirCICslBox0gX/MwM7b
8yf33ty4dvn5yRMnnv38+fM7lsj9zIDY2/eZAXGhEEztD+g0PQz/IWVqGCAAbWewgjAMg+EyC3pS
BnrydXw194bzsOMUEaZDRTdmm9YkbezAq556Lfz52yYh/f413ksbaw/7ulL9qcln89V6ucBcOHX4
nA+FFOJnykrERRFRHluTLKRgGu89zc5Wn2CZor11pr4iRWoF3oWikyCuYYS0JGGfjKxMxNcBAgzd
R6Kzxfv+ZRwH3QVdXx0tOt+oDo922595PsKZDp2/4TY4tbZpQJXKvvT5FnX0tkVx25XlFQAe8WQk
ZzYqQU3b6Nz7SHQReIiiykw//Bo28RZAtEoAf6HF0ps1q1esj0stSvv69yuwMQhausUEabAxQe4Q
B12UDrqLmA+YOFiYGbBGJCRRMIJzPijyQJH47wcoIv+Bb7IHzdD9/c8AH0kEJag/0NwPHkz6C6GR
hh3ACewvtG5hBDakgOUFuEH1B6j5/79fwAbWL4bHr38x3H7yneH1+x/gbh4z4y8GEa4f/7m/P/j5
8uGFj7dP7Gc2MTERAZ0IDlrHACryQQ0yUOSvWrXqxYoVKx5CI/0FdPkWiA+76BW2hu87tKj+MxA3
iQAEEC1nYWDHzBmePHlqmaysDBtsfgA0/PsbWrT+/ou4ffcndFEFePj4P0IOrB46nAzr4kE7Coiu
IAN6V/A/9Cpx8NgiOJKB2RW0swUoB4xgYGPq31/QTN1Pho9ffjB8+fETfOPnr69vwcX2nVs3GW5c
2PPx/r1H95Dq1jfQnAqKtP/Aer/Cx8eHE5QATE1NYUezvC8pKTkC1QO72Re2OfMlA2J//u/BcHUM
QADR8oQH2PzAqz37Dx0xsfF24nnPycDGzg0dj4dE7p9//+GLJv/C+/T/4d035L4+A3w+ADpDB6KB
EczE+JeB6T+o7Aa12H6B8e8/wJbyT9A28J/gSGYElqCf3r9luHv/IcOVG/cYrt1+zPDl22/wfkcQ
Bu114Gd+8efx9f1XP378Cbql9DE04p5Bi+wP0OIZ5BJuYMRnAxt4nKCt2KAEA8z5PwsKCtYDq4Ef
0ITyHJrrnzEgVu1+hYbL38FwWQQIAAQQredhwfMDHJyc5v2Lj8zjFRBn4BUQAq9wRYlc2OwadPiV
BdSCByUCYN+MmfEfPOf+h0bsH3A36DfDDyANajUzAiP/88d3DPfuP2C4fus+MHKfMLz/9B18cSkT
KwcwgjmBEcwBub2VjZuBhY0HTLMBO9kiP3a/37VxLWg3Leh2x3vQiH/JgNhQ+QdpvIqFiYmJz8/P
b0JKSko0sMvHdOzYsX9AfP4UsP8FVf8GKeJhW7Q+Q+vzP4Ml4mEAIIBofcYLeH7gx/fvj3+9v3vn
Hw+vCjsTDwMX9KZ5pv9/wZH3Dxi5oCVOf0B93O8/wf1r0PIxEP7/H7SO7hP4HL9rN+8zXL35iOHt
x2+QLWosHOAIBl3JC8agyGWXYmCVVmWQVOCCyEHVMDEDczow4fGyvv3PcHvipZ3rD69ggNxlirF3
HsdINDM3N7coMNfPiYmJ8fj48SPTyZMn/+7cuXMjMFIvI0U+LOJfQyMeloj+MQxCABBAtE4A/6DF
3tvVi6YsLavrq//74TvDuz+/oQtCgXXw98+/nzy6/f761cuvTp449uzmzZvvYMUkdJ2BhIZdotNf
NklgnIsxsEgqMkjIQSMcnLs5IMU4Myuw/geWLEwgmhmImeAFnCTDsV/nNjVsvn3nzQEg9wEDYjMF
/MQMAqUkyDBOaWnpCmAR73rgwAGmT58+/QFG/hxgwrwJa/BCzYSVHt8YEDd/DtqpDYAAYqRTIgOd
PK4FxAbQhiEb2iAGbCADdo4NrBsECnhRINYMzp3Y85LRnA09crFaCGyti39aCLq1eCG0aH8IzZmw
7dKkXMkKPqBJSkoqRFhYuB/Y0BN8+vTpdGBVcBOYGH5CzXsNjfg3aIkKI+IH2y1tAAFEr7VY7NCZ
QtAqYg5o4MPOyP8JzfF/YH1dpICDHWEDOrBfPyavevojlmCsx32DLif/d6Pr4vYtB5dDc/ljIot2
fIAJ6nbQhcZmQJwKxCsZIMviGdByPnJdjzNxDbYEABBAQ+Hwf0ZoogHvSIpMiut+KlCgDCnaTwCL
9jpyi3ZiSy/QbijQETrC0ATMA3XTD6SBnY9ILXy8JctgSwAAATSULgIC5UTQZhR5aJXCBA3852QW
7cSEDTM0AYBKIW4oZmVAHJ4Ji/gfxJYwgy0BAATQULo69ic0sl9DI+Y/NND/0qiR9R+amGAN0p/Q
CGeAjdwhVVv/GQb3GhacACCAWIbY1bGwCKEL0NTU/A+1jxGaABiRIvs/NEcP6du0AAIMANtMxR3x
N38FAAAAAElFTkSuQmCC\
"""
def thumbnail():
icon = base64.decodestring(iconstr)
return icon
if __name__ == "__main__":
icon = thumbnail()
f = file("thumbnail.png","wb")
f.write(icon)
f.close()
| apache-2.0 |
JuBra/GEMEditor | GEMEditor/database/ui/MetaboliteEntryDisplayWidget.py | 1 | 8059 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '.\MetaboliteEntryDisplayWidget.ui'
#
# Created by: PyQt5 UI code generator 5.8.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MetaboliteEntryDisplayWidget(object):
def setupUi(self, MetaboliteEntryDisplayWidget):
MetaboliteEntryDisplayWidget.setObjectName("MetaboliteEntryDisplayWidget")
MetaboliteEntryDisplayWidget.resize(333, 465)
self.formLayout = QtWidgets.QFormLayout(MetaboliteEntryDisplayWidget)
self.formLayout.setObjectName("formLayout")
self.label = QtWidgets.QLabel(MetaboliteEntryDisplayWidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label.sizePolicy().hasHeightForWidth())
self.label.setSizePolicy(sizePolicy)
self.label.setObjectName("label")
self.formLayout.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.label)
self.label_name = QtWidgets.QLabel(MetaboliteEntryDisplayWidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_name.sizePolicy().hasHeightForWidth())
self.label_name.setSizePolicy(sizePolicy)
self.label_name.setText("")
self.label_name.setWordWrap(True)
self.label_name.setObjectName("label_name")
self.formLayout.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.label_name)
self.label_4 = QtWidgets.QLabel(MetaboliteEntryDisplayWidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_4.sizePolicy().hasHeightForWidth())
self.label_4.setSizePolicy(sizePolicy)
self.label_4.setObjectName("label_4")
self.formLayout.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.label_4)
self.label_formula = QtWidgets.QLabel(MetaboliteEntryDisplayWidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_formula.sizePolicy().hasHeightForWidth())
self.label_formula.setSizePolicy(sizePolicy)
self.label_formula.setText("")
self.label_formula.setObjectName("label_formula")
self.formLayout.setWidget(1, QtWidgets.QFormLayout.FieldRole, self.label_formula)
self.label_2 = QtWidgets.QLabel(MetaboliteEntryDisplayWidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_2.sizePolicy().hasHeightForWidth())
self.label_2.setSizePolicy(sizePolicy)
self.label_2.setObjectName("label_2")
self.formLayout.setWidget(2, QtWidgets.QFormLayout.LabelRole, self.label_2)
self.label_charge = QtWidgets.QLabel(MetaboliteEntryDisplayWidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_charge.sizePolicy().hasHeightForWidth())
self.label_charge.setSizePolicy(sizePolicy)
self.label_charge.setText("")
self.label_charge.setObjectName("label_charge")
self.formLayout.setWidget(2, QtWidgets.QFormLayout.FieldRole, self.label_charge)
self.label_3 = QtWidgets.QLabel(MetaboliteEntryDisplayWidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_3.sizePolicy().hasHeightForWidth())
self.label_3.setSizePolicy(sizePolicy)
self.label_3.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.label_3.setObjectName("label_3")
self.formLayout.setWidget(3, QtWidgets.QFormLayout.LabelRole, self.label_3)
self.list_synonyms = QtWidgets.QListWidget(MetaboliteEntryDisplayWidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.list_synonyms.sizePolicy().hasHeightForWidth())
self.list_synonyms.setSizePolicy(sizePolicy)
self.list_synonyms.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.list_synonyms.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers)
self.list_synonyms.setSelectionMode(QtWidgets.QAbstractItemView.NoSelection)
self.list_synonyms.setLayoutMode(QtWidgets.QListView.SinglePass)
self.list_synonyms.setObjectName("list_synonyms")
self.formLayout.setWidget(3, QtWidgets.QFormLayout.FieldRole, self.list_synonyms)
self.label_5 = QtWidgets.QLabel(MetaboliteEntryDisplayWidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_5.sizePolicy().hasHeightForWidth())
self.label_5.setSizePolicy(sizePolicy)
self.label_5.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.label_5.setObjectName("label_5")
self.formLayout.setWidget(4, QtWidgets.QFormLayout.LabelRole, self.label_5)
self.table_identifiers = AnnotationTableWidget(MetaboliteEntryDisplayWidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.table_identifiers.sizePolicy().hasHeightForWidth())
self.table_identifiers.setSizePolicy(sizePolicy)
self.table_identifiers.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
self.table_identifiers.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers)
self.table_identifiers.setWordWrap(False)
self.table_identifiers.setObjectName("table_identifiers")
self.table_identifiers.setColumnCount(0)
self.table_identifiers.setRowCount(0)
self.table_identifiers.horizontalHeader().setStretchLastSection(True)
self.table_identifiers.verticalHeader().setVisible(False)
self.table_identifiers.verticalHeader().setHighlightSections(False)
self.formLayout.setWidget(4, QtWidgets.QFormLayout.FieldRole, self.table_identifiers)
self.retranslateUi(MetaboliteEntryDisplayWidget)
QtCore.QMetaObject.connectSlotsByName(MetaboliteEntryDisplayWidget)
def retranslateUi(self, MetaboliteEntryDisplayWidget):
_translate = QtCore.QCoreApplication.translate
MetaboliteEntryDisplayWidget.setWindowTitle(_translate("MetaboliteEntryDisplayWidget", "Form"))
self.label.setText(_translate("MetaboliteEntryDisplayWidget", "Name:"))
self.label_4.setText(_translate("MetaboliteEntryDisplayWidget", "Formula:"))
self.label_2.setText(_translate("MetaboliteEntryDisplayWidget", "Charge:"))
self.label_3.setText(_translate("MetaboliteEntryDisplayWidget", "Synonyms:"))
self.label_5.setText(_translate("MetaboliteEntryDisplayWidget", "Identifier:"))
from GEMEditor.base.widgets import AnnotationTableWidget
| gpl-3.0 |
GeekTrainer/Flask | Work/Trivia/routes.py | 7 | 1572 | from flask import Flask, url_for, request, render_template
from app import app
import redis
r=redis.StrictRedis('localhost',6379,0, decode_responses=True,charset='utf-8');
@app.route('/')
def hello():
url = url_for('about');
link = '<a href="' + url + '">About us!</a>';
return link;
@app.route('/about')
def about():
return 'We are the knights who say Ni!!';
@app.route('/question/<title>', methods=['GET', 'POST'])
def question(title):
if request.method == 'GET':
question = r.get(title+':question')
return render_template('AnswerQuestion.html',
question = question)
elif request.method == 'POST':
submittedAnswer = request.form['submittedAnswer'];
answer=r.get(title+':answer')
if submittedAnswer == answer:
return render_template('Correct.html');
else:
return render_template('Incorrect.html',
answer = answer,
submittedAnswer = submittedAnswer);
@app.route('/submit', methods=['GET', 'POST'])
def submit():
if request.method == 'GET':
return render_template('CreateQuestion.html');
elif request.method == 'POST':
question = request.form['question'];
answer = request.form['answer'];
title = request.form['title'];
r.set(title+':question',question);
r.set(title+':answer',answer);
return render_template('CreatedQuestion.html',
question = question);
return; | apache-2.0 |
dongjoon-hyun/tensorflow | tensorflow/python/feature_column/feature_column_lib.py | 23 | 1038 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""FeatureColumns: tools for ingesting and representing features."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import,line-too-long,wildcard-import
from tensorflow.python.feature_column.feature_column import *
# pylint: enable=unused-import,line-too-long
| apache-2.0 |
Onager/plaso | tests/data/presets.py | 4 | 1164 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for parser and parser plugin presets."""
import unittest
from plaso.parsers import presets
from plaso.parsers import manager as parsers_manager
from plaso.filters import parser_filter
from tests import test_lib as shared_test_lib
class PresetsDataTest(shared_test_lib.BaseTestCase):
"""Tests the presets.yaml file."""
def testParsersAndPresets(self):
"""Tests that all parsers/plugins in presets.yaml are valid."""
presets_file_path = self._GetDataFilePath(['presets.yaml'])
preset_manager = presets.ParserPresetsManager()
preset_manager.ReadFromFile(presets_file_path)
filter_helper = parser_filter.ParserFilterExpressionHelper()
for name in preset_manager.GetNames():
expanded_preset = filter_helper.ExpandPresets(preset_manager, name)
_, invalid_parser_elements = (
parsers_manager.ParsersManager.CheckFilterExpression(expanded_preset))
error_message = 'Invalid parser/plugin name(s) in preset: {0:s}'.format(
name)
self.assertFalse(invalid_parser_elements, msg=error_message)
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
blueboxgroup/neutron | neutron/tests/unit/test_db_plugin_level.py | 19 | 3370 | # Copyright (c) 2014 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.api.v2 import attributes
from neutron.common import constants
from neutron.common import exceptions as n_exc
from neutron import context
from neutron import manager
from neutron.tests.unit import test_db_plugin
from neutron.tests.unit import testlib_api
from neutron.tests.unit import testlib_plugin
class TestNetworks(testlib_api.SqlTestCase,
testlib_plugin.PluginSetupHelper):
def setUp(self):
super(TestNetworks, self).setUp()
self._tenant_id = 'test-tenant'
# Update the plugin
self.setup_coreplugin(test_db_plugin.DB_PLUGIN_KLASS)
def _create_network(self, plugin, ctx, shared=True):
network = {'network': {'name': 'net',
'shared': shared,
'admin_state_up': True,
'tenant_id': self._tenant_id}}
created_network = plugin.create_network(ctx, network)
return (network, created_network['id'])
def _create_port(self, plugin, ctx, net_id, device_owner, tenant_id):
port = {'port': {'name': 'port',
'network_id': net_id,
'mac_address': attributes.ATTR_NOT_SPECIFIED,
'fixed_ips': attributes.ATTR_NOT_SPECIFIED,
'admin_state_up': True,
'device_id': 'device_id',
'device_owner': device_owner,
'tenant_id': tenant_id}}
plugin.create_port(ctx, port)
def _test_update_shared_net_used(self,
device_owner,
expected_exception=None):
plugin = manager.NeutronManager.get_plugin()
ctx = context.get_admin_context()
network, net_id = self._create_network(plugin, ctx)
self._create_port(plugin,
ctx,
net_id,
device_owner,
self._tenant_id + '1')
network['network']['shared'] = False
if (expected_exception):
with testlib_api.ExpectedException(expected_exception):
plugin.update_network(ctx, net_id, network)
else:
plugin.update_network(ctx, net_id, network)
def test_update_shared_net_used_fails(self):
self._test_update_shared_net_used('', n_exc.InvalidSharedSetting)
def test_update_shared_net_used_as_router_gateway(self):
self._test_update_shared_net_used(
constants.DEVICE_OWNER_ROUTER_GW)
def test_update_shared_net_used_by_floating_ip(self):
self._test_update_shared_net_used(
constants.DEVICE_OWNER_FLOATINGIP)
| apache-2.0 |
jonmash/ardupilot | Tools/autotest/jsb_sim/runsim.py | 167 | 12772 | #!/usr/bin/env python
# run a jsbsim model as a child process
import sys, os, pexpect, socket
import math, time, select, struct, signal, errno
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'pysim'))
import util, atexit, fdpexpect
from pymavlink import fgFDM
class control_state(object):
def __init__(self):
self.aileron = 0
self.elevator = 0
self.throttle = 0
self.rudder = 0
self.ground_height = 0
sitl_state = control_state()
def interpret_address(addrstr):
'''interpret a IP:port string'''
a = addrstr.split(':')
a[1] = int(a[1])
return tuple(a)
def jsb_set(variable, value):
'''set a JSBSim variable'''
global jsb_console
jsb_console.send('set %s %s\r\n' % (variable, value))
def setup_template(home):
'''setup aircraft/Rascal/reset.xml'''
global opts
v = home.split(',')
if len(v) != 4:
print("home should be lat,lng,alt,hdg - '%s'" % home)
sys.exit(1)
latitude = float(v[0])
longitude = float(v[1])
altitude = float(v[2])
heading = float(v[3])
sitl_state.ground_height = altitude
template = os.path.join('aircraft', 'Rascal', 'reset_template.xml')
reset = os.path.join('aircraft', 'Rascal', 'reset.xml')
xml = open(template).read() % { 'LATITUDE' : str(latitude),
'LONGITUDE' : str(longitude),
'HEADING' : str(heading) }
open(reset, mode='w').write(xml)
print("Wrote %s" % reset)
baseport = int(opts.simout.split(':')[1])
template = os.path.join('jsb_sim', 'fgout_template.xml')
out = os.path.join('jsb_sim', 'fgout.xml')
xml = open(template).read() % { 'FGOUTPORT' : str(baseport+3) }
open(out, mode='w').write(xml)
print("Wrote %s" % out)
template = os.path.join('jsb_sim', 'rascal_test_template.xml')
out = os.path.join('jsb_sim', 'rascal_test.xml')
xml = open(template).read() % { 'JSBCONSOLEPORT' : str(baseport+4) }
open(out, mode='w').write(xml)
print("Wrote %s" % out)
def process_sitl_input(buf):
'''process control changes from SITL sim'''
control = list(struct.unpack('<14H', buf))
pwm = control[:11]
(speed, direction, turbulance) = control[11:]
global wind
wind.speed = speed*0.01
wind.direction = direction*0.01
wind.turbulance = turbulance*0.01
aileron = (pwm[0]-1500)/500.0
elevator = (pwm[1]-1500)/500.0
throttle = (pwm[2]-1000)/1000.0
if opts.revthr:
throttle = 1.0 - throttle
rudder = (pwm[3]-1500)/500.0
if opts.elevon:
# fake an elevon plane
ch1 = aileron
ch2 = elevator
aileron = (ch2-ch1)/2.0
# the minus does away with the need for RC2_REV=-1
elevator = -(ch2+ch1)/2.0
if opts.vtail:
# fake an elevon plane
ch1 = elevator
ch2 = rudder
# this matches VTAIL_OUTPUT==2
elevator = (ch2-ch1)/2.0
rudder = (ch2+ch1)/2.0
buf = ''
if aileron != sitl_state.aileron:
buf += 'set fcs/aileron-cmd-norm %s\n' % aileron
sitl_state.aileron = aileron
if elevator != sitl_state.elevator:
buf += 'set fcs/elevator-cmd-norm %s\n' % elevator
sitl_state.elevator = elevator
if rudder != sitl_state.rudder:
buf += 'set fcs/rudder-cmd-norm %s\n' % rudder
sitl_state.rudder = rudder
if throttle != sitl_state.throttle:
buf += 'set fcs/throttle-cmd-norm %s\n' % throttle
sitl_state.throttle = throttle
buf += 'step\n'
global jsb_console
jsb_console.send(buf)
def update_wind(wind):
'''update wind simulation'''
(speed, direction) = wind.current()
jsb_set('atmosphere/psiw-rad', math.radians(direction))
jsb_set('atmosphere/wind-mag-fps', speed/0.3048)
def process_jsb_input(buf, simtime):
'''process FG FDM input from JSBSim'''
global fdm, fg_out, sim_out
fdm.parse(buf)
if fg_out:
try:
agl = fdm.get('agl', units='meters')
fdm.set('altitude', agl+sitl_state.ground_height, units='meters')
fdm.set('rpm', sitl_state.throttle*1000)
fg_out.send(fdm.pack())
except socket.error as e:
if e.errno not in [ errno.ECONNREFUSED ]:
raise
timestamp = int(simtime*1.0e6)
simbuf = struct.pack('<Q17dI',
timestamp,
fdm.get('latitude', units='degrees'),
fdm.get('longitude', units='degrees'),
fdm.get('altitude', units='meters'),
fdm.get('psi', units='degrees'),
fdm.get('v_north', units='mps'),
fdm.get('v_east', units='mps'),
fdm.get('v_down', units='mps'),
fdm.get('A_X_pilot', units='mpss'),
fdm.get('A_Y_pilot', units='mpss'),
fdm.get('A_Z_pilot', units='mpss'),
fdm.get('phidot', units='dps'),
fdm.get('thetadot', units='dps'),
fdm.get('psidot', units='dps'),
fdm.get('phi', units='degrees'),
fdm.get('theta', units='degrees'),
fdm.get('psi', units='degrees'),
fdm.get('vcas', units='mps'),
0x4c56414f)
try:
sim_out.send(simbuf)
except socket.error as e:
if e.errno not in [ errno.ECONNREFUSED ]:
raise
##################
# main program
from optparse import OptionParser
parser = OptionParser("runsim.py [options]")
parser.add_option("--simin", help="SITL input (IP:port)", default="127.0.0.1:5502")
parser.add_option("--simout", help="SITL output (IP:port)", default="127.0.0.1:5501")
parser.add_option("--fgout", help="FG display output (IP:port)", default="127.0.0.1:5503")
parser.add_option("--home", type='string', help="home lat,lng,alt,hdg (required)")
parser.add_option("--script", type='string', help='jsbsim model script', default='jsb_sim/rascal_test.xml')
parser.add_option("--options", type='string', help='jsbsim startup options')
parser.add_option("--elevon", action='store_true', default=False, help='assume elevon input')
parser.add_option("--revthr", action='store_true', default=False, help='reverse throttle')
parser.add_option("--vtail", action='store_true', default=False, help='assume vtail input')
parser.add_option("--wind", dest="wind", help="Simulate wind (speed,direction,turbulance)", default='0,0,0')
parser.add_option("--rate", type='int', help="Simulation rate (Hz)", default=1000)
parser.add_option("--speedup", type='float', default=1.0, help="speedup from realtime")
(opts, args) = parser.parse_args()
for m in [ 'home', 'script' ]:
if not opts.__dict__[m]:
print("Missing required option '%s'" % m)
parser.print_help()
sys.exit(1)
os.chdir(util.reltopdir('Tools/autotest'))
# kill off child when we exit
atexit.register(util.pexpect_close_all)
setup_template(opts.home)
# start child
cmd = "JSBSim --realtime --suspend --nice --simulation-rate=%u --logdirectivefile=jsb_sim/fgout.xml --script=%s" % (opts.rate, opts.script)
if opts.options:
cmd += ' %s' % opts.options
jsb = pexpect.spawn(cmd, logfile=sys.stdout, timeout=10)
jsb.delaybeforesend = 0
util.pexpect_autoclose(jsb)
i = jsb.expect(["Successfully bound to socket for input on port (\d+)",
"Could not bind to socket for input"])
if i == 1:
print("Failed to start JSBSim - is another copy running?")
sys.exit(1)
jsb_out_address = interpret_address("127.0.0.1:%u" % int(jsb.match.group(1)))
jsb.expect("Creating UDP socket on port (\d+)")
jsb_in_address = interpret_address("127.0.0.1:%u" % int(jsb.match.group(1)))
jsb.expect("Successfully connected to socket for output")
jsb.expect("JSBSim Execution beginning")
# setup output to jsbsim
print("JSBSim console on %s" % str(jsb_out_address))
jsb_out = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
jsb_out.connect(jsb_out_address)
jsb_console = fdpexpect.fdspawn(jsb_out.fileno(), logfile=sys.stdout)
jsb_console.delaybeforesend = 0
# setup input from jsbsim
print("JSBSim FG FDM input on %s" % str(jsb_in_address))
jsb_in = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
jsb_in.bind(jsb_in_address)
jsb_in.setblocking(0)
# socket addresses
sim_out_address = interpret_address(opts.simout)
sim_in_address = interpret_address(opts.simin)
# setup input from SITL sim
sim_in = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sim_in.bind(sim_in_address)
sim_in.setblocking(0)
# setup output to SITL sim
sim_out = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sim_out.connect(interpret_address(opts.simout))
sim_out.setblocking(0)
# setup possible output to FlightGear for display
fg_out = None
if opts.fgout:
fg_out = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
fg_out.connect(interpret_address(opts.fgout))
# setup wind generator
wind = util.Wind(opts.wind)
fdm = fgFDM.fgFDM()
jsb_console.send('info\n')
jsb_console.send('resume\n')
jsb.expect(["trim computation time","Trim Results"])
time.sleep(1.5)
jsb_console.send('step\n')
jsb_console.logfile = None
print("Simulator ready to fly")
def main_loop():
'''run main loop'''
tnow = time.time()
last_report = tnow
last_sim_input = tnow
last_wind_update = tnow
frame_count = 0
paused = False
simstep = 1.0/opts.rate
simtime = simstep
frame_time = 1.0/opts.rate
scaled_frame_time = frame_time/opts.speedup
last_wall_time = time.time()
achieved_rate = opts.speedup
while True:
new_frame = False
rin = [jsb_in.fileno(), sim_in.fileno(), jsb_console.fileno(), jsb.fileno()]
try:
(rin, win, xin) = select.select(rin, [], [], 1.0)
except select.error:
util.check_parent()
continue
tnow = time.time()
if jsb_in.fileno() in rin:
buf = jsb_in.recv(fdm.packet_size())
process_jsb_input(buf, simtime)
frame_count += 1
new_frame = True
if sim_in.fileno() in rin:
simbuf = sim_in.recv(28)
process_sitl_input(simbuf)
simtime += simstep
last_sim_input = tnow
# show any jsbsim console output
if jsb_console.fileno() in rin:
util.pexpect_drain(jsb_console)
if jsb.fileno() in rin:
util.pexpect_drain(jsb)
# only simulate wind above 5 meters, to prevent crashes while
# waiting for takeoff
if tnow - last_wind_update > 0.1:
update_wind(wind)
last_wind_update = tnow
if tnow - last_report > 3:
print("FPS %u asl=%.1f agl=%.1f roll=%.1f pitch=%.1f a=(%.2f %.2f %.2f) AR=%.1f" % (
frame_count / (time.time() - last_report),
fdm.get('altitude', units='meters'),
fdm.get('agl', units='meters'),
fdm.get('phi', units='degrees'),
fdm.get('theta', units='degrees'),
fdm.get('A_X_pilot', units='mpss'),
fdm.get('A_Y_pilot', units='mpss'),
fdm.get('A_Z_pilot', units='mpss'),
achieved_rate))
frame_count = 0
last_report = time.time()
if new_frame:
now = time.time()
if now < last_wall_time + scaled_frame_time:
dt = last_wall_time+scaled_frame_time - now
time.sleep(last_wall_time+scaled_frame_time - now)
now = time.time()
if now > last_wall_time and now - last_wall_time < 0.1:
rate = 1.0/(now - last_wall_time)
achieved_rate = (0.98*achieved_rate) + (0.02*rate)
if achieved_rate < opts.rate*opts.speedup:
scaled_frame_time *= 0.999
else:
scaled_frame_time *= 1.001
last_wall_time = now
def exit_handler():
'''exit the sim'''
print("running exit handler")
signal.signal(signal.SIGINT, signal.SIG_IGN)
signal.signal(signal.SIGTERM, signal.SIG_IGN)
# JSBSim really doesn't like to die ...
if getattr(jsb, 'pid', None) is not None:
os.kill(jsb.pid, signal.SIGKILL)
jsb_console.send('quit\n')
jsb.close(force=True)
util.pexpect_close_all()
sys.exit(1)
signal.signal(signal.SIGINT, exit_handler)
signal.signal(signal.SIGTERM, exit_handler)
try:
main_loop()
except Exception as ex:
print(ex)
exit_handler()
raise
| gpl-3.0 |
kickstandproject/wildcard | wildcard/dashboards/settings/password/tests.py | 1 | 3365 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Centrin Data Systems Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import NoReverseMatch # noqa
from django.core.urlresolvers import reverse # noqa
from django import http
from mox import IsA # noqa
from wildcard import api
from wildcard.test import helpers as test
# TODO(mrunge): remove, when keystone v3 supports
# change_own_password, incl. password validation
kver = api.keystone.VERSIONS.active
if kver == 2:
INDEX_URL = reverse('horizon:settings:password:index')
class ChangePasswordTests(test.TestCase):
@test.create_stubs({api.keystone: ('user_update_own_password', )})
def test_change_password(self):
if kver == 3:
self.skipTest('Password change in keystone v3 unsupported')
api.keystone.user_update_own_password(IsA(http.HttpRequest),
'oldpwd',
'normalpwd',).AndReturn(None)
self.mox.ReplayAll()
formData = {'method': 'PasswordForm',
'current_password': 'oldpwd',
'new_password': 'normalpwd',
'confirm_password': 'normalpwd'}
res = self.client.post(INDEX_URL, formData)
self.assertNoFormErrors(res)
def test_change_validation_passwords_not_matching(self):
if kver == 3:
self.skipTest('Password change in keystone v3 unsupported')
formData = {'method': 'PasswordForm',
'current_password': 'currpasswd',
'new_password': 'testpassword',
'confirm_password': 'doesnotmatch'}
res = self.client.post(INDEX_URL, formData)
self.assertFormError(res, "form", None, ['Passwords do not match.'])
@test.create_stubs({api.keystone: ('user_update_own_password', )})
def test_change_password_shows_message_on_login_page(self):
if kver == 3:
self.skipTest('Password change in keystone v3 unsupported')
api.keystone.user_update_own_password(IsA(http.HttpRequest),
'oldpwd',
'normalpwd').AndReturn(None)
self.mox.ReplayAll()
formData = {'method': 'PasswordForm',
'current_password': 'oldpwd',
'new_password': 'normalpwd',
'confirm_password': 'normalpwd'}
res = self.client.post(INDEX_URL, formData, follow=True)
info_msg = "Password changed. Please log in again to continue."
self.assertContains(res, info_msg)
def test_on_keystone_v3_disabled(self):
try:
reverse('horizon:settings:password:index')
except NoReverseMatch:
pass
| apache-2.0 |
maurizi/nyc-trees | src/nyc_trees/apps/users/migrations/0001_initial.py | 4 | 2129 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('core', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Follow',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('group', models.ForeignKey(to='core.Group')),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='TrainingResult',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('module_name', models.CharField(max_length=255)),
('score', models.IntegerField(null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='TrustedMapper',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('group', models.ForeignKey(to='core.Group')),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
},
bases=(models.Model,),
),
]
| agpl-3.0 |
MostafaGazar/tensorflow | tensorflow/contrib/slim/python/slim/nets/vgg.py | 25 | 10637 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Contains model definitions for versions of the Oxford VGG network.
These model definitions were introduced in the following technical report:
Very Deep Convolutional Networks For Large-Scale Image Recognition
Karen Simonyan and Andrew Zisserman
arXiv technical report, 2015
PDF: http://arxiv.org/pdf/1409.1556.pdf
ILSVRC 2014 Slides: http://www.robots.ox.ac.uk/~karen/pdf/ILSVRC_2014.pdf
CC-BY-4.0
More information can be obtained from the VGG website:
www.robots.ox.ac.uk/~vgg/research/very_deep/
Usage:
with slim.arg_scope(vgg.vgg_arg_scope()):
outputs, end_points = vgg.vgg_a(inputs)
with slim.arg_scope(vgg.vgg_arg_scope()):
outputs, end_points = vgg.vgg_16(inputs)
@@vgg_a
@@vgg_16
@@vgg_19
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
slim = tf.contrib.slim
def vgg_arg_scope(weight_decay=0.0005):
"""Defines the VGG arg scope.
Args:
weight_decay: The l2 regularization coefficient.
Returns:
An arg_scope.
"""
with slim.arg_scope([slim.conv2d, slim.fully_connected],
activation_fn=tf.nn.relu,
weights_regularizer=slim.l2_regularizer(weight_decay),
biases_initializer=tf.zeros_initializer):
with slim.arg_scope([slim.conv2d], padding='SAME') as arg_sc:
return arg_sc
def vgg_a(inputs,
num_classes=1000,
is_training=True,
dropout_keep_prob=0.5,
spatial_squeeze=True,
scope='vgg_a'):
"""Oxford Net VGG 11-Layers version A Example.
Note: All the fully_connected layers have been transformed to conv2d layers.
To use in classification mode, resize input to 224x224.
Args:
inputs: a tensor of size [batch_size, height, width, channels].
num_classes: number of predicted classes.
is_training: whether or not the model is being trained.
dropout_keep_prob: the probability that activations are kept in the dropout
layers during training.
spatial_squeeze: whether or not should squeeze the spatial dimensions of the
outputs. Useful to remove unnecessary dimensions for classification.
scope: Optional scope for the variables.
Returns:
the last op containing the log predictions and end_points dict.
"""
with tf.variable_scope(scope, 'vgg_a', [inputs]) as sc:
end_points_collection = sc.original_name_scope + '_end_points'
# Collect outputs for conv2d, fully_connected and max_pool2d.
with slim.arg_scope([slim.conv2d, slim.max_pool2d],
outputs_collections=end_points_collection):
net = slim.repeat(inputs, 1, slim.conv2d, 64, [3, 3], scope='conv1')
net = slim.max_pool2d(net, [2, 2], scope='pool1')
net = slim.repeat(net, 1, slim.conv2d, 128, [3, 3], scope='conv2')
net = slim.max_pool2d(net, [2, 2], scope='pool2')
net = slim.repeat(net, 2, slim.conv2d, 256, [3, 3], scope='conv3')
net = slim.max_pool2d(net, [2, 2], scope='pool3')
net = slim.repeat(net, 2, slim.conv2d, 512, [3, 3], scope='conv4')
net = slim.max_pool2d(net, [2, 2], scope='pool4')
net = slim.repeat(net, 2, slim.conv2d, 512, [3, 3], scope='conv5')
net = slim.max_pool2d(net, [2, 2], scope='pool5')
# Use conv2d instead of fully_connected layers.
net = slim.conv2d(net, 4096, [7, 7], padding='VALID', scope='fc6')
net = slim.dropout(net, dropout_keep_prob, is_training=is_training,
scope='dropout6')
net = slim.conv2d(net, 4096, [1, 1], scope='fc7')
net = slim.dropout(net, dropout_keep_prob, is_training=is_training,
scope='dropout7')
net = slim.conv2d(net, num_classes, [1, 1],
activation_fn=None,
normalizer_fn=None,
scope='fc8')
# Convert end_points_collection into a end_point dict.
end_points = slim.utils.convert_collection_to_dict(end_points_collection)
if spatial_squeeze:
net = tf.squeeze(net, [1, 2], name='fc8/squeezed')
end_points[sc.name + '/fc8'] = net
return net, end_points
vgg_a.default_image_size = 224
def vgg_16(inputs,
num_classes=1000,
is_training=True,
dropout_keep_prob=0.5,
spatial_squeeze=True,
scope='vgg_16'):
"""Oxford Net VGG 16-Layers version D Example.
Note: All the fully_connected layers have been transformed to conv2d layers.
To use in classification mode, resize input to 224x224.
Args:
inputs: a tensor of size [batch_size, height, width, channels].
num_classes: number of predicted classes.
is_training: whether or not the model is being trained.
dropout_keep_prob: the probability that activations are kept in the dropout
layers during training.
spatial_squeeze: whether or not should squeeze the spatial dimensions of the
outputs. Useful to remove unnecessary dimensions for classification.
scope: Optional scope for the variables.
Returns:
the last op containing the log predictions and end_points dict.
"""
with tf.variable_scope(scope, 'vgg_16', [inputs]) as sc:
end_points_collection = sc.original_name_scope + '_end_points'
# Collect outputs for conv2d, fully_connected and max_pool2d.
with slim.arg_scope([slim.conv2d, slim.fully_connected, slim.max_pool2d],
outputs_collections=end_points_collection):
net = slim.repeat(inputs, 2, slim.conv2d, 64, [3, 3], scope='conv1')
net = slim.max_pool2d(net, [2, 2], scope='pool1')
net = slim.repeat(net, 2, slim.conv2d, 128, [3, 3], scope='conv2')
net = slim.max_pool2d(net, [2, 2], scope='pool2')
net = slim.repeat(net, 3, slim.conv2d, 256, [3, 3], scope='conv3')
net = slim.max_pool2d(net, [2, 2], scope='pool3')
net = slim.repeat(net, 3, slim.conv2d, 512, [3, 3], scope='conv4')
net = slim.max_pool2d(net, [2, 2], scope='pool4')
net = slim.repeat(net, 3, slim.conv2d, 512, [3, 3], scope='conv5')
net = slim.max_pool2d(net, [2, 2], scope='pool5')
# Use conv2d instead of fully_connected layers.
net = slim.conv2d(net, 4096, [7, 7], padding='VALID', scope='fc6')
net = slim.dropout(net, dropout_keep_prob, is_training=is_training,
scope='dropout6')
net = slim.conv2d(net, 4096, [1, 1], scope='fc7')
net = slim.dropout(net, dropout_keep_prob, is_training=is_training,
scope='dropout7')
net = slim.conv2d(net, num_classes, [1, 1],
activation_fn=None,
normalizer_fn=None,
scope='fc8')
# Convert end_points_collection into a end_point dict.
end_points = slim.utils.convert_collection_to_dict(end_points_collection)
if spatial_squeeze:
net = tf.squeeze(net, [1, 2], name='fc8/squeezed')
end_points[sc.name + '/fc8'] = net
return net, end_points
vgg_16.default_image_size = 224
def vgg_19(inputs,
num_classes=1000,
is_training=True,
dropout_keep_prob=0.5,
spatial_squeeze=True,
scope='vgg_19'):
"""Oxford Net VGG 19-Layers version E Example.
Note: All the fully_connected layers have been transformed to conv2d layers.
To use in classification mode, resize input to 224x224.
Args:
inputs: a tensor of size [batch_size, height, width, channels].
num_classes: number of predicted classes.
is_training: whether or not the model is being trained.
dropout_keep_prob: the probability that activations are kept in the dropout
layers during training.
spatial_squeeze: whether or not should squeeze the spatial dimensions of the
outputs. Useful to remove unnecessary dimensions for classification.
scope: Optional scope for the variables.
Returns:
the last op containing the log predictions and end_points dict.
"""
with tf.variable_scope(scope, 'vgg_19', [inputs]) as sc:
end_points_collection = sc.name + '_end_points'
# Collect outputs for conv2d, fully_connected and max_pool2d.
with slim.arg_scope([slim.conv2d, slim.fully_connected, slim.max_pool2d],
outputs_collections=end_points_collection):
net = slim.repeat(inputs, 2, slim.conv2d, 64, [3, 3], scope='conv1')
net = slim.max_pool2d(net, [2, 2], scope='pool1')
net = slim.repeat(net, 2, slim.conv2d, 128, [3, 3], scope='conv2')
net = slim.max_pool2d(net, [2, 2], scope='pool2')
net = slim.repeat(net, 4, slim.conv2d, 256, [3, 3], scope='conv3')
net = slim.max_pool2d(net, [2, 2], scope='pool3')
net = slim.repeat(net, 4, slim.conv2d, 512, [3, 3], scope='conv4')
net = slim.max_pool2d(net, [2, 2], scope='pool4')
net = slim.repeat(net, 4, slim.conv2d, 512, [3, 3], scope='conv5')
net = slim.max_pool2d(net, [2, 2], scope='pool5')
# Use conv2d instead of fully_connected layers.
net = slim.conv2d(net, 4096, [7, 7], padding='VALID', scope='fc6')
net = slim.dropout(net, dropout_keep_prob, is_training=is_training,
scope='dropout6')
net = slim.conv2d(net, 4096, [1, 1], scope='fc7')
net = slim.dropout(net, dropout_keep_prob, is_training=is_training,
scope='dropout7')
net = slim.conv2d(net, num_classes, [1, 1],
activation_fn=None,
normalizer_fn=None,
scope='fc8')
# Convert end_points_collection into a end_point dict.
end_points = slim.utils.convert_collection_to_dict(end_points_collection)
if spatial_squeeze:
net = tf.squeeze(net, [1, 2], name='fc8/squeezed')
end_points[sc.name + '/fc8'] = net
return net, end_points
vgg_19.default_image_size = 224
# Alias
vgg_d = vgg_16
vgg_e = vgg_19
| apache-2.0 |
highlander12rus/whatsupmoscow.ru | demon/main.py | 1 | 3580 | # -*- coding: utf-8 -*-
__author__ = 'meanwhile'
import ssl
import time
import socket
import sys
import logging
import vkontakte
import ProvaderStorage
import Constants
import FileWriter
import ProccessingResponce
import daemon
class VkParserDemon(daemon.Daemon):
def run(self):
#read code for method vk.executin from file
codeFromFile = ''
with open(Constants.Constants.getFileCodeExecute(), 'r') as f:
codeFromFile = f.read()
#read access token from file
access_tokens = [];
with open(Constants.Constants.getFileAccessToken(), 'r') as f:
access_tokens = [token.strip() for token in f]
isValidToken = False;
for acces_token in access_tokens:
try:
vk = vkontakte.API(token=acces_token)
vk.getServerTime() #проверяем соединилось ли
isValidToken = True
break
except vkontakte.VKError, e:
logging.error("vkontakte.VKError ")
except ssl.SSLError, e: #The handshake operation timed out
logging.error("ssl error")
time.sleep(1)
access_tokens.append(acces_token)
if (isValidToken):
storage = ProvaderStorage.ProvaderStorage()
lastTime = vk.getServerTime()
emptyLastTime = 0;
while True:
try:
time.sleep(Constants.Constants.getTimeOutInSec())
codeSending = codeFromFile.replace('%time_replace%', str(lastTime))
json = vk.execute(code=codeSending, timeout=10)
logging.debug("vk_json responce ", json)
fileName = Constants.Constants.getDirHomeScript() + str(time.strftime("%d-%m-%Y")) + ".vkr" #vk raw
file = FileWriter.FileWriterBinary(fileName)
process = ProccessingResponce.ProccessingResponce(storage, file)
process.jsonParse(json)
if json['max_time'] > 0:
lastTime = json['max_time'] + 1
else:
logging.debug("empty json= ", json)
logging.debug("lastTime= ", lastTime)
logging.debug("complidet proccessing")
except ssl.SSLError, e:
logging.error("ssl error")
except socket.timeout, e:
logging.error("socket.timeout")
except vkontakte.VKError, e:
logging.error("vkontakte.VKError")
except AttributeError, e:
logging.error("AttributeError")
else:
#TODO: send emails tokens no correct
logging.error("token uncorrect")
if __name__ == "__main__":
logging.basicConfig(format=u'%(filename)s[LINE:%(lineno)d]# %(levelname)-8s [%(asctime)s] %(message)s',
level=logging.ERROR)
daemon = VkParserDemon('/tmp/daemon-example.pid', stdout='/var/log/vk_parser/stdout.log',
stderr='/var/log/vk_parser/error.log')
if len(sys.argv) == 2:
if 'start' == sys.argv[1]:
daemon.start()
elif 'stop' == sys.argv[1]:
daemon.stop()
elif 'restart' == sys.argv[1]:
daemon.restart()
else:
print "Unknown command"
sys.exit(2)
sys.exit(0)
else:
print "usage: %s start|stop|restart" % sys.argv[0]
sys.exit(2)
| apache-2.0 |
nsol-nmsu/ns3-smartgrid | examples/wireless/examples-to-run.py | 23 | 2054 | #! /usr/bin/env python
## -*- Mode: python; py-indent-offset: 4; indent-tabs-mode: nil; coding: utf-8; -*-
# A list of C++ examples to run in order to ensure that they remain
# buildable and runnable over time. Each tuple in the list contains
#
# (example_name, do_run, do_valgrind_run).
#
# See test.py for more information.
cpp_examples = [
("mixed-wireless", "True", "True"),
("multirate --totalTime=0.3s --rateManager=ns3::AarfcdWifiManager", "True", "True"),
("multirate --totalTime=0.3s --rateManager=ns3::AmrrWifiManager", "True", "True"),
("multirate --totalTime=0.3s --rateManager=ns3::CaraWifiManager", "True", "True"),
("multirate --totalTime=0.3s --rateManager=ns3::IdealWifiManager", "True", "True"),
("multirate --totalTime=0.3s --rateManager=ns3::MinstrelWifiManager", "True", "True"),
("multirate --totalTime=0.3s --rateManager=ns3::OnoeWifiManager", "True", "True"),
("multirate --totalTime=0.3s --rateManager=ns3::RraaWifiManager", "True", "True"),
("simple-wifi-frame-aggregation", "True", "True"),
("wifi-adhoc", "False", "True"), # Takes too long to run
("wifi-ap --verbose=0", "True", "True"), # Don't let it spew to stdout
("wifi-clear-channel-cmu", "False", "True"), # Requires specific hardware
("wifi-simple-adhoc", "True", "True"),
("wifi-simple-adhoc-grid", "True", "True"),
("wifi-simple-infra", "True", "True"),
("wifi-simple-interference", "True", "True"),
("wifi-wired-bridging", "True", "True"),
("power-adaptation-distance --manager=ns3::ParfWifiManager --outputFileName=parf --steps=5 --stepsSize=10", "True", "True"),
("power-adaptation-distance --manager=ns3::AparfWifiManager --outputFileName=aparf --steps=5 --stepsSize=10", "True", "True"),
]
# A list of Python examples to run in order to ensure that they remain
# runnable over time. Each tuple in the list contains
#
# (example_name, do_run).
#
# See test.py for more information.
python_examples = [
("wifi-ap.py", "True"),
("mixed-wireless.py", "True"),
]
| gpl-2.0 |
CEG-FYP-OpenStack/scheduler | nova/image/download/__init__.py | 18 | 1592 | # Copyright 2013 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
import stevedore.driver
import stevedore.extension
from nova.i18n import _LE
LOG = logging.getLogger(__name__)
def load_transfer_modules():
module_dictionary = {}
ex = stevedore.extension.ExtensionManager('nova.image.download.modules')
for module_name in ex.names():
mgr = stevedore.driver.DriverManager(
namespace='nova.image.download.modules',
name=module_name,
invoke_on_load=False)
schemes_list = mgr.driver.get_schemes()
for scheme in schemes_list:
if scheme in module_dictionary:
LOG.error(_LE('%(scheme)s is registered as a module twice. '
'%(module_name)s is not being used.'),
{'scheme': scheme,
'module_name': module_name})
else:
module_dictionary[scheme] = mgr.driver
return module_dictionary
| apache-2.0 |
Distrotech/mozjs | js/src/testing/mozbase/mozhttpd/tests/api.py | 5 | 9724 | #!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import mozhttpd
import urllib2
import os
import unittest
import re
try:
import json
except ImportError:
import simplejson as json
import tempfile
here = os.path.dirname(os.path.abspath(__file__))
class ApiTest(unittest.TestCase):
resource_get_called = 0
resource_post_called = 0
resource_del_called = 0
@mozhttpd.handlers.json_response
def resource_get(self, request, objid):
self.resource_get_called += 1
return (200, { 'called': self.resource_get_called,
'id': objid,
'query': request.query })
@mozhttpd.handlers.json_response
def resource_post(self, request):
self.resource_post_called += 1
return (201, { 'called': self.resource_post_called,
'data': json.loads(request.body),
'query': request.query })
@mozhttpd.handlers.json_response
def resource_del(self, request, objid):
self.resource_del_called += 1
return (200, { 'called': self.resource_del_called,
'id': objid,
'query': request.query })
def get_url(self, path, server_port, querystr):
url = "http://127.0.0.1:%s%s" % (server_port, path)
if querystr:
url += "?%s" % querystr
return url
def try_get(self, server_port, querystr):
self.resource_get_called = 0
f = urllib2.urlopen(self.get_url('/api/resource/1', server_port, querystr))
try:
self.assertEqual(f.getcode(), 200)
except AttributeError:
pass # python 2.4
self.assertEqual(json.loads(f.read()), { 'called': 1, 'id': str(1), 'query': querystr })
self.assertEqual(self.resource_get_called, 1)
def try_post(self, server_port, querystr):
self.resource_post_called = 0
postdata = { 'hamburgers': '1234' }
try:
f = urllib2.urlopen(self.get_url('/api/resource/', server_port, querystr),
data=json.dumps(postdata))
except urllib2.HTTPError, e:
# python 2.4
self.assertEqual(e.code, 201)
body = e.fp.read()
else:
self.assertEqual(f.getcode(), 201)
body = f.read()
self.assertEqual(json.loads(body), { 'called': 1,
'data': postdata,
'query': querystr })
self.assertEqual(self.resource_post_called, 1)
def try_del(self, server_port, querystr):
self.resource_del_called = 0
opener = urllib2.build_opener(urllib2.HTTPHandler)
request = urllib2.Request(self.get_url('/api/resource/1', server_port, querystr))
request.get_method = lambda: 'DEL'
f = opener.open(request)
try:
self.assertEqual(f.getcode(), 200)
except AttributeError:
pass # python 2.4
self.assertEqual(json.loads(f.read()), { 'called': 1, 'id': str(1), 'query': querystr })
self.assertEqual(self.resource_del_called, 1)
def test_api(self):
httpd = mozhttpd.MozHttpd(port=0,
urlhandlers = [ { 'method': 'GET',
'path': '/api/resource/([^/]+)/?',
'function': self.resource_get },
{ 'method': 'POST',
'path': '/api/resource/?',
'function': self.resource_post },
{ 'method': 'DEL',
'path': '/api/resource/([^/]+)/?',
'function': self.resource_del }
])
httpd.start(block=False)
server_port = httpd.httpd.server_port
# GET
self.try_get(server_port, '')
self.try_get(server_port, '?foo=bar')
# POST
self.try_post(server_port, '')
self.try_post(server_port, '?foo=bar')
# DEL
self.try_del(server_port, '')
self.try_del(server_port, '?foo=bar')
# GET: By default we don't serve any files if we just define an API
f = None
exception_thrown = False
try:
f = urllib2.urlopen(self.get_url('/', server_port, None))
except urllib2.HTTPError, e:
self.assertEqual(e.code, 404)
exception_thrown = True
self.assertTrue(exception_thrown)
def test_nonexistent_resources(self):
# Create a server with a placeholder handler so we don't fall back
# to serving local files
httpd = mozhttpd.MozHttpd(port=0)
httpd.start(block=False)
server_port = httpd.httpd.server_port
# GET: Return 404 for non-existent endpoint
f = None
exception_thrown = False
try:
f = urllib2.urlopen(self.get_url('/api/resource/', server_port, None))
except urllib2.HTTPError, e:
self.assertEqual(e.code, 404)
exception_thrown = True
self.assertTrue(exception_thrown)
# POST: POST should also return 404
f = None
exception_thrown = False
try:
f = urllib2.urlopen(self.get_url('/api/resource/', server_port, None),
data=json.dumps({}))
except urllib2.HTTPError, e:
self.assertEqual(e.code, 404)
exception_thrown = True
self.assertTrue(exception_thrown)
# DEL: DEL should also return 404
f = None
exception_thrown = False
try:
opener = urllib2.build_opener(urllib2.HTTPHandler)
request = urllib2.Request(self.get_url('/api/resource/', server_port,
None))
request.get_method = lambda: 'DEL'
f = opener.open(request)
except urllib2.HTTPError, e:
self.assertEqual(e.code, 404)
exception_thrown = True
self.assertTrue(exception_thrown)
def test_api_with_docroot(self):
httpd = mozhttpd.MozHttpd(port=0, docroot=here,
urlhandlers = [ { 'method': 'GET',
'path': '/api/resource/([^/]+)/?',
'function': self.resource_get } ])
httpd.start(block=False)
server_port = httpd.httpd.server_port
# We defined a docroot, so we expect a directory listing
f = urllib2.urlopen(self.get_url('/', server_port, None))
try:
self.assertEqual(f.getcode(), 200)
except AttributeError:
pass # python 2.4
self.assertTrue('Directory listing for' in f.read())
# Make sure API methods still work
self.try_get(server_port, '')
self.try_get(server_port, '?foo=bar')
class ProxyTest(unittest.TestCase):
def tearDown(self):
# reset proxy opener in case it changed
urllib2.install_opener(None)
def test_proxy(self):
docroot = tempfile.mkdtemp()
hosts = ('mozilla.com', 'mozilla.org')
unproxied_host = 'notmozilla.org'
def url(host): return 'http://%s/' % host
index_filename = 'index.html'
def index_contents(host): return '%s index' % host
index = file(os.path.join(docroot, index_filename), 'w')
index.write(index_contents('*'))
index.close()
httpd = mozhttpd.MozHttpd(port=0, docroot=docroot)
httpd.start(block=False)
server_port = httpd.httpd.server_port
proxy_support = urllib2.ProxyHandler({'http': 'http://127.0.0.1:%d' %
server_port})
urllib2.install_opener(urllib2.build_opener(proxy_support))
for host in hosts:
f = urllib2.urlopen(url(host))
try:
self.assertEqual(f.getcode(), 200)
except AttributeError:
pass # python 2.4
self.assertEqual(f.read(), index_contents('*'))
httpd.stop()
# test separate directories per host
httpd = mozhttpd.MozHttpd(port=0, docroot=docroot, proxy_host_dirs=True)
httpd.start(block=False)
server_port = httpd.httpd.server_port
proxy_support = urllib2.ProxyHandler({'http': 'http://127.0.0.1:%d' %
server_port})
urllib2.install_opener(urllib2.build_opener(proxy_support))
# set up dirs
for host in hosts:
os.mkdir(os.path.join(docroot, host))
file(os.path.join(docroot, host, index_filename), 'w') \
.write(index_contents(host))
for host in hosts:
f = urllib2.urlopen(url(host))
try:
self.assertEqual(f.getcode(), 200)
except AttributeError:
pass # python 2.4
self.assertEqual(f.read(), index_contents(host))
exc = None
try:
urllib2.urlopen(url(unproxied_host))
except urllib2.HTTPError, e:
exc = e
self.assertNotEqual(exc, None)
self.assertEqual(exc.code, 404)
if __name__ == '__main__':
unittest.main()
| mpl-2.0 |
caot/intellij-community | python/lib/Lib/sched.py | 83 | 4538 | """A generally useful event scheduler class.
Each instance of this class manages its own queue.
No multi-threading is implied; you are supposed to hack that
yourself, or use a single instance per application.
Each instance is parametrized with two functions, one that is
supposed to return the current time, one that is supposed to
implement a delay. You can implement real-time scheduling by
substituting time and sleep from built-in module time, or you can
implement simulated time by writing your own functions. This can
also be used to integrate scheduling with STDWIN events; the delay
function is allowed to modify the queue. Time can be expressed as
integers or floating point numbers, as long as it is consistent.
Events are specified by tuples (time, priority, action, argument).
As in UNIX, lower priority numbers mean higher priority; in this
way the queue can be maintained as a priority queue. Execution of the
event means calling the action function, passing it the argument.
Remember that in Python, multiple function arguments can be packed
in a tuple. The action function may be an instance method so it
has another way to reference private data (besides global variables).
Parameterless functions or methods cannot be used, however.
"""
# XXX The timefunc and delayfunc should have been defined as methods
# XXX so you can define new kinds of schedulers using subclassing
# XXX instead of having to define a module or class just to hold
# XXX the global state of your particular time and delay functions.
import heapq
__all__ = ["scheduler"]
class scheduler:
def __init__(self, timefunc, delayfunc):
"""Initialize a new instance, passing the time and delay
functions"""
self.queue = []
self.timefunc = timefunc
self.delayfunc = delayfunc
def enterabs(self, time, priority, action, argument):
"""Enter a new event in the queue at an absolute time.
Returns an ID for the event which can be used to remove it,
if necessary.
"""
event = time, priority, action, argument
heapq.heappush(self.queue, event)
return event # The ID
def enter(self, delay, priority, action, argument):
"""A variant that specifies the time as a relative time.
This is actually the more commonly used interface.
"""
time = self.timefunc() + delay
return self.enterabs(time, priority, action, argument)
def cancel(self, event):
"""Remove an event from the queue.
This must be presented the ID as returned by enter().
If the event is not in the queue, this raises RuntimeError.
"""
self.queue.remove(event)
heapq.heapify(self.queue)
def empty(self):
"""Check whether the queue is empty."""
return not self.queue
def run(self):
"""Execute events until the queue is empty.
When there is a positive delay until the first event, the
delay function is called and the event is left in the queue;
otherwise, the event is removed from the queue and executed
(its action function is called, passing it the argument). If
the delay function returns prematurely, it is simply
restarted.
It is legal for both the delay function and the action
function to to modify the queue or to raise an exception;
exceptions are not caught but the scheduler's state remains
well-defined so run() may be called again.
A questionably hack is added to allow other threads to run:
just after an event is executed, a delay of 0 is executed, to
avoid monopolizing the CPU when other threads are also
runnable.
"""
# localize variable access to minimize overhead
# and to improve thread safety
q = self.queue
delayfunc = self.delayfunc
timefunc = self.timefunc
pop = heapq.heappop
while q:
time, priority, action, argument = checked_event = q[0]
now = timefunc()
if now < time:
delayfunc(time - now)
else:
event = pop(q)
# Verify that the event was not removed or altered
# by another thread after we last looked at q[0].
if event is checked_event:
void = action(*argument)
delayfunc(0) # Let other threads run
else:
heapq.heappush(q, event)
| apache-2.0 |
CenterForOpenScience/osf.io | osf/management/commands/archive_registrations_on_IA.py | 2 | 2243 | import logging
import django
django.setup()
import time
from framework.celery_tasks import app as celery_app
from osf.models import Registration
from website import settings
from osf.utils.requests import requests_retry_session
logger = logging.getLogger(__name__)
django.setup()
from django.core.management.base import BaseCommand
@celery_app.task(name='osf.management.commands.archive_registrations_on_IA')
def archive_registrations_on_IA(dry_run=False, batch_size=100, guids=None):
if guids:
registrations = Registration.objects.filter(guids___id__in=guids)
else:
# randomize order so large registrations won't block all pigeon workers,
# and stuck registrations won't block repeatedly
registrations = Registration.find_ia_backlog().order_by('?')[:batch_size]
logger.info(f'{registrations.count()} to be archived in batch')
for registration in registrations:
time.sleep(.1) # Don't DDOS self
if not dry_run:
logger.info(f'archiving {registration._id}')
requests_retry_session().post(f'{settings.OSF_PIGEON_URL}archive/{registration._id}')
else:
logger.info(f'DRY RUN for archiving {registration._id}')
class Command(BaseCommand):
"""
Nightly task to take a number of Registrations and gradually archive them on archive.org via our archiving service
osf-pigeon
"""
def add_arguments(self, parser):
super().add_arguments(parser)
parser.add_argument(
'--dry',
action='store_true',
dest='dry_run',
help='Run migration and roll back changes to db',
)
parser.add_argument(
'--batch_size',
'-b',
type=int,
help='number of registrations to archive.',
)
parser.add_argument(
'guids',
type=str,
nargs='+',
help='List of guids to archive.',
)
def handle(self, *args, **options):
dry_run = options.get('dry_run', False)
batch_size = options.get('batch_size', 100)
guids = options.get('guids', [])
archive_registrations_on_IA(dry_run=dry_run, batch_size=batch_size, guids=guids)
| apache-2.0 |
golismero/golismero | thirdparty_libs/chardet/gb2312freq.py | 323 | 36001 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# GB2312 most frequently used character table
#
# Char to FreqOrder table , from hz6763
# 512 --> 0.79 -- 0.79
# 1024 --> 0.92 -- 0.13
# 2048 --> 0.98 -- 0.06
# 6768 --> 1.00 -- 0.02
#
# Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79
# Random Distribution Ration = 512 / (3755 - 512) = 0.157
#
# Typical Distribution Ratio about 25% of Ideal one, still much higher that RDR
GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9
GB2312_TABLE_SIZE = 3760
GB2312CharToFreqOrder = ( \
1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205,
2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842,
2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409,
249,4088,1746,1873,2047,1774, 581,1813, 358,1174,3590,1014,1561,4844,2245, 670,
1636,3112, 889,1286, 953, 556,2327,3060,1290,3141, 613, 185,3477,1367, 850,3820,
1715,2428,2642,2303,2732,3041,2562,2648,3566,3946,1349, 388,3098,2091,1360,3585,
152,1687,1539, 738,1559, 59,1232,2925,2267,1388,1249,1741,1679,2960, 151,1566,
1125,1352,4271, 924,4296, 385,3166,4459, 310,1245,2850, 70,3285,2729,3534,3575,
2398,3298,3466,1960,2265, 217,3647, 864,1909,2084,4401,2773,1010,3269,5152, 853,
3051,3121,1244,4251,1895, 364,1499,1540,2313,1180,3655,2268, 562, 715,2417,3061,
544, 336,3768,2380,1752,4075, 950, 280,2425,4382, 183,2759,3272, 333,4297,2155,
1688,2356,1444,1039,4540, 736,1177,3349,2443,2368,2144,2225, 565, 196,1482,3406,
927,1335,4147, 692, 878,1311,1653,3911,3622,1378,4200,1840,2969,3149,2126,1816,
2534,1546,2393,2760, 737,2494, 13, 447, 245,2747, 38,2765,2129,2589,1079, 606,
360, 471,3755,2890, 404, 848, 699,1785,1236, 370,2221,1023,3746,2074,2026,2023,
2388,1581,2119, 812,1141,3091,2536,1519, 804,2053, 406,1596,1090, 784, 548,4414,
1806,2264,2936,1100, 343,4114,5096, 622,3358, 743,3668,1510,1626,5020,3567,2513,
3195,4115,5627,2489,2991, 24,2065,2697,1087,2719, 48,1634, 315, 68, 985,2052,
198,2239,1347,1107,1439, 597,2366,2172, 871,3307, 919,2487,2790,1867, 236,2570,
1413,3794, 906,3365,3381,1701,1982,1818,1524,2924,1205, 616,2586,2072,2004, 575,
253,3099, 32,1365,1182, 197,1714,2454,1201, 554,3388,3224,2748, 756,2587, 250,
2567,1507,1517,3529,1922,2761,2337,3416,1961,1677,2452,2238,3153, 615, 911,1506,
1474,2495,1265,1906,2749,3756,3280,2161, 898,2714,1759,3450,2243,2444, 563, 26,
3286,2266,3769,3344,2707,3677, 611,1402, 531,1028,2871,4548,1375, 261,2948, 835,
1190,4134, 353, 840,2684,1900,3082,1435,2109,1207,1674, 329,1872,2781,4055,2686,
2104, 608,3318,2423,2957,2768,1108,3739,3512,3271,3985,2203,1771,3520,1418,2054,
1681,1153, 225,1627,2929, 162,2050,2511,3687,1954, 124,1859,2431,1684,3032,2894,
585,4805,3969,2869,2704,2088,2032,2095,3656,2635,4362,2209, 256, 518,2042,2105,
3777,3657, 643,2298,1148,1779, 190, 989,3544, 414, 11,2135,2063,2979,1471, 403,
3678, 126, 770,1563, 671,2499,3216,2877, 600,1179, 307,2805,4937,1268,1297,2694,
252,4032,1448,1494,1331,1394, 127,2256, 222,1647,1035,1481,3056,1915,1048, 873,
3651, 210, 33,1608,2516, 200,1520, 415, 102, 0,3389,1287, 817, 91,3299,2940,
836,1814, 549,2197,1396,1669,2987,3582,2297,2848,4528,1070, 687, 20,1819, 121,
1552,1364,1461,1968,2617,3540,2824,2083, 177, 948,4938,2291, 110,4549,2066, 648,
3359,1755,2110,2114,4642,4845,1693,3937,3308,1257,1869,2123, 208,1804,3159,2992,
2531,2549,3361,2418,1350,2347,2800,2568,1291,2036,2680, 72, 842,1990, 212,1233,
1154,1586, 75,2027,3410,4900,1823,1337,2710,2676, 728,2810,1522,3026,4995, 157,
755,1050,4022, 710, 785,1936,2194,2085,1406,2777,2400, 150,1250,4049,1206, 807,
1910, 534, 529,3309,1721,1660, 274, 39,2827, 661,2670,1578, 925,3248,3815,1094,
4278,4901,4252, 41,1150,3747,2572,2227,4501,3658,4902,3813,3357,3617,2884,2258,
887, 538,4187,3199,1294,2439,3042,2329,2343,2497,1255, 107, 543,1527, 521,3478,
3568, 194,5062, 15, 961,3870,1241,1192,2664, 66,5215,3260,2111,1295,1127,2152,
3805,4135, 901,1164,1976, 398,1278, 530,1460, 748, 904,1054,1966,1426, 53,2909,
509, 523,2279,1534, 536,1019, 239,1685, 460,2353, 673,1065,2401,3600,4298,2272,
1272,2363, 284,1753,3679,4064,1695, 81, 815,2677,2757,2731,1386, 859, 500,4221,
2190,2566, 757,1006,2519,2068,1166,1455, 337,2654,3203,1863,1682,1914,3025,1252,
1409,1366, 847, 714,2834,2038,3209, 964,2970,1901, 885,2553,1078,1756,3049, 301,
1572,3326, 688,2130,1996,2429,1805,1648,2930,3421,2750,3652,3088, 262,1158,1254,
389,1641,1812, 526,1719, 923,2073,1073,1902, 468, 489,4625,1140, 857,2375,3070,
3319,2863, 380, 116,1328,2693,1161,2244, 273,1212,1884,2769,3011,1775,1142, 461,
3066,1200,2147,2212, 790, 702,2695,4222,1601,1058, 434,2338,5153,3640, 67,2360,
4099,2502, 618,3472,1329, 416,1132, 830,2782,1807,2653,3211,3510,1662, 192,2124,
296,3979,1739,1611,3684, 23, 118, 324, 446,1239,1225, 293,2520,3814,3795,2535,
3116, 17,1074, 467,2692,2201, 387,2922, 45,1326,3055,1645,3659,2817, 958, 243,
1903,2320,1339,2825,1784,3289, 356, 576, 865,2315,2381,3377,3916,1088,3122,1713,
1655, 935, 628,4689,1034,1327, 441, 800, 720, 894,1979,2183,1528,5289,2702,1071,
4046,3572,2399,1571,3281, 79, 761,1103, 327, 134, 758,1899,1371,1615, 879, 442,
215,2605,2579, 173,2048,2485,1057,2975,3317,1097,2253,3801,4263,1403,1650,2946,
814,4968,3487,1548,2644,1567,1285, 2, 295,2636, 97, 946,3576, 832, 141,4257,
3273, 760,3821,3521,3156,2607, 949,1024,1733,1516,1803,1920,2125,2283,2665,3180,
1501,2064,3560,2171,1592, 803,3518,1416, 732,3897,4258,1363,1362,2458, 119,1427,
602,1525,2608,1605,1639,3175, 694,3064, 10, 465, 76,2000,4846,4208, 444,3781,
1619,3353,2206,1273,3796, 740,2483, 320,1723,2377,3660,2619,1359,1137,1762,1724,
2345,2842,1850,1862, 912, 821,1866, 612,2625,1735,2573,3369,1093, 844, 89, 937,
930,1424,3564,2413,2972,1004,3046,3019,2011, 711,3171,1452,4178, 428, 801,1943,
432, 445,2811, 206,4136,1472, 730, 349, 73, 397,2802,2547, 998,1637,1167, 789,
396,3217, 154,1218, 716,1120,1780,2819,4826,1931,3334,3762,2139,1215,2627, 552,
3664,3628,3232,1405,2383,3111,1356,2652,3577,3320,3101,1703, 640,1045,1370,1246,
4996, 371,1575,2436,1621,2210, 984,4033,1734,2638, 16,4529, 663,2755,3255,1451,
3917,2257,1253,1955,2234,1263,2951, 214,1229, 617, 485, 359,1831,1969, 473,2310,
750,2058, 165, 80,2864,2419, 361,4344,2416,2479,1134, 796,3726,1266,2943, 860,
2715, 938, 390,2734,1313,1384, 248, 202, 877,1064,2854, 522,3907, 279,1602, 297,
2357, 395,3740, 137,2075, 944,4089,2584,1267,3802, 62,1533,2285, 178, 176, 780,
2440, 201,3707, 590, 478,1560,4354,2117,1075, 30, 74,4643,4004,1635,1441,2745,
776,2596, 238,1077,1692,1912,2844, 605, 499,1742,3947, 241,3053, 980,1749, 936,
2640,4511,2582, 515,1543,2162,5322,2892,2993, 890,2148,1924, 665,1827,3581,1032,
968,3163, 339,1044,1896, 270, 583,1791,1720,4367,1194,3488,3669, 43,2523,1657,
163,2167, 290,1209,1622,3378, 550, 634,2508,2510, 695,2634,2384,2512,1476,1414,
220,1469,2341,2138,2852,3183,2900,4939,2865,3502,1211,3680, 854,3227,1299,2976,
3172, 186,2998,1459, 443,1067,3251,1495, 321,1932,3054, 909, 753,1410,1828, 436,
2441,1119,1587,3164,2186,1258, 227, 231,1425,1890,3200,3942, 247, 959, 725,5254,
2741, 577,2158,2079, 929, 120, 174, 838,2813, 591,1115, 417,2024, 40,3240,1536,
1037, 291,4151,2354, 632,1298,2406,2500,3535,1825,1846,3451, 205,1171, 345,4238,
18,1163, 811, 685,2208,1217, 425,1312,1508,1175,4308,2552,1033, 587,1381,3059,
2984,3482, 340,1316,4023,3972, 792,3176, 519, 777,4690, 918, 933,4130,2981,3741,
90,3360,2911,2200,5184,4550, 609,3079,2030, 272,3379,2736, 363,3881,1130,1447,
286, 779, 357,1169,3350,3137,1630,1220,2687,2391, 747,1277,3688,2618,2682,2601,
1156,3196,5290,4034,3102,1689,3596,3128, 874, 219,2783, 798, 508,1843,2461, 269,
1658,1776,1392,1913,2983,3287,2866,2159,2372, 829,4076, 46,4253,2873,1889,1894,
915,1834,1631,2181,2318, 298, 664,2818,3555,2735, 954,3228,3117, 527,3511,2173,
681,2712,3033,2247,2346,3467,1652, 155,2164,3382, 113,1994, 450, 899, 494, 994,
1237,2958,1875,2336,1926,3727, 545,1577,1550, 633,3473, 204,1305,3072,2410,1956,
2471, 707,2134, 841,2195,2196,2663,3843,1026,4940, 990,3252,4997, 368,1092, 437,
3212,3258,1933,1829, 675,2977,2893, 412, 943,3723,4644,3294,3283,2230,2373,5154,
2389,2241,2661,2323,1404,2524, 593, 787, 677,3008,1275,2059, 438,2709,2609,2240,
2269,2246,1446, 36,1568,1373,3892,1574,2301,1456,3962, 693,2276,5216,2035,1143,
2720,1919,1797,1811,2763,4137,2597,1830,1699,1488,1198,2090, 424,1694, 312,3634,
3390,4179,3335,2252,1214, 561,1059,3243,2295,2561, 975,5155,2321,2751,3772, 472,
1537,3282,3398,1047,2077,2348,2878,1323,3340,3076, 690,2906, 51, 369, 170,3541,
1060,2187,2688,3670,2541,1083,1683, 928,3918, 459, 109,4427, 599,3744,4286, 143,
2101,2730,2490, 82,1588,3036,2121, 281,1860, 477,4035,1238,2812,3020,2716,3312,
1530,2188,2055,1317, 843, 636,1808,1173,3495, 649, 181,1002, 147,3641,1159,2414,
3750,2289,2795, 813,3123,2610,1136,4368, 5,3391,4541,2174, 420, 429,1728, 754,
1228,2115,2219, 347,2223,2733, 735,1518,3003,2355,3134,1764,3948,3329,1888,2424,
1001,1234,1972,3321,3363,1672,1021,1450,1584, 226, 765, 655,2526,3404,3244,2302,
3665, 731, 594,2184, 319,1576, 621, 658,2656,4299,2099,3864,1279,2071,2598,2739,
795,3086,3699,3908,1707,2352,2402,1382,3136,2475,1465,4847,3496,3865,1085,3004,
2591,1084, 213,2287,1963,3565,2250, 822, 793,4574,3187,1772,1789,3050, 595,1484,
1959,2770,1080,2650, 456, 422,2996, 940,3322,4328,4345,3092,2742, 965,2784, 739,
4124, 952,1358,2498,2949,2565, 332,2698,2378, 660,2260,2473,4194,3856,2919, 535,
1260,2651,1208,1428,1300,1949,1303,2942, 433,2455,2450,1251,1946, 614,1269, 641,
1306,1810,2737,3078,2912, 564,2365,1419,1415,1497,4460,2367,2185,1379,3005,1307,
3218,2175,1897,3063, 682,1157,4040,4005,1712,1160,1941,1399, 394, 402,2952,1573,
1151,2986,2404, 862, 299,2033,1489,3006, 346, 171,2886,3401,1726,2932, 168,2533,
47,2507,1030,3735,1145,3370,1395,1318,1579,3609,4560,2857,4116,1457,2529,1965,
504,1036,2690,2988,2405, 745,5871, 849,2397,2056,3081, 863,2359,3857,2096, 99,
1397,1769,2300,4428,1643,3455,1978,1757,3718,1440, 35,4879,3742,1296,4228,2280,
160,5063,1599,2013, 166, 520,3479,1646,3345,3012, 490,1937,1545,1264,2182,2505,
1096,1188,1369,1436,2421,1667,2792,2460,1270,2122, 727,3167,2143, 806,1706,1012,
1800,3037, 960,2218,1882, 805, 139,2456,1139,1521, 851,1052,3093,3089, 342,2039,
744,5097,1468,1502,1585,2087, 223, 939, 326,2140,2577, 892,2481,1623,4077, 982,
3708, 135,2131, 87,2503,3114,2326,1106, 876,1616, 547,2997,2831,2093,3441,4530,
4314, 9,3256,4229,4148, 659,1462,1986,1710,2046,2913,2231,4090,4880,5255,3392,
3274,1368,3689,4645,1477, 705,3384,3635,1068,1529,2941,1458,3782,1509, 100,1656,
2548, 718,2339, 408,1590,2780,3548,1838,4117,3719,1345,3530, 717,3442,2778,3220,
2898,1892,4590,3614,3371,2043,1998,1224,3483, 891, 635, 584,2559,3355, 733,1766,
1729,1172,3789,1891,2307, 781,2982,2271,1957,1580,5773,2633,2005,4195,3097,1535,
3213,1189,1934,5693,3262, 586,3118,1324,1598, 517,1564,2217,1868,1893,4445,3728,
2703,3139,1526,1787,1992,3882,2875,1549,1199,1056,2224,1904,2711,5098,4287, 338,
1993,3129,3489,2689,1809,2815,1997, 957,1855,3898,2550,3275,3057,1105,1319, 627,
1505,1911,1883,3526, 698,3629,3456,1833,1431, 746, 77,1261,2017,2296,1977,1885,
125,1334,1600, 525,1798,1109,2222,1470,1945, 559,2236,1186,3443,2476,1929,1411,
2411,3135,1777,3372,2621,1841,1613,3229, 668,1430,1839,2643,2916, 195,1989,2671,
2358,1387, 629,3205,2293,5256,4439, 123,1310, 888,1879,4300,3021,3605,1003,1162,
3192,2910,2010, 140,2395,2859, 55,1082,2012,2901, 662, 419,2081,1438, 680,2774,
4654,3912,1620,1731,1625,5035,4065,2328, 512,1344, 802,5443,2163,2311,2537, 524,
3399, 98,1155,2103,1918,2606,3925,2816,1393,2465,1504,3773,2177,3963,1478,4346,
180,1113,4655,3461,2028,1698, 833,2696,1235,1322,1594,4408,3623,3013,3225,2040,
3022, 541,2881, 607,3632,2029,1665,1219, 639,1385,1686,1099,2803,3231,1938,3188,
2858, 427, 676,2772,1168,2025, 454,3253,2486,3556, 230,1950, 580, 791,1991,1280,
1086,1974,2034, 630, 257,3338,2788,4903,1017, 86,4790, 966,2789,1995,1696,1131,
259,3095,4188,1308, 179,1463,5257, 289,4107,1248, 42,3413,1725,2288, 896,1947,
774,4474,4254, 604,3430,4264, 392,2514,2588, 452, 237,1408,3018, 988,4531,1970,
3034,3310, 540,2370,1562,1288,2990, 502,4765,1147, 4,1853,2708, 207, 294,2814,
4078,2902,2509, 684, 34,3105,3532,2551, 644, 709,2801,2344, 573,1727,3573,3557,
2021,1081,3100,4315,2100,3681, 199,2263,1837,2385, 146,3484,1195,2776,3949, 997,
1939,3973,1008,1091,1202,1962,1847,1149,4209,5444,1076, 493, 117,5400,2521, 972,
1490,2934,1796,4542,2374,1512,2933,2657, 413,2888,1135,2762,2314,2156,1355,2369,
766,2007,2527,2170,3124,2491,2593,2632,4757,2437, 234,3125,3591,1898,1750,1376,
1942,3468,3138, 570,2127,2145,3276,4131, 962, 132,1445,4196, 19, 941,3624,3480,
3366,1973,1374,4461,3431,2629, 283,2415,2275, 808,2887,3620,2112,2563,1353,3610,
955,1089,3103,1053, 96, 88,4097, 823,3808,1583, 399, 292,4091,3313, 421,1128,
642,4006, 903,2539,1877,2082, 596, 29,4066,1790, 722,2157, 130, 995,1569, 769,
1485, 464, 513,2213, 288,1923,1101,2453,4316, 133, 486,2445, 50, 625, 487,2207,
57, 423, 481,2962, 159,3729,1558, 491, 303, 482, 501, 240,2837, 112,3648,2392,
1783, 362, 8,3433,3422, 610,2793,3277,1390,1284,1654, 21,3823, 734, 367, 623,
193, 287, 374,1009,1483, 816, 476, 313,2255,2340,1262,2150,2899,1146,2581, 782,
2116,1659,2018,1880, 255,3586,3314,1110,2867,2137,2564, 986,2767,5185,2006, 650,
158, 926, 762, 881,3157,2717,2362,3587, 306,3690,3245,1542,3077,2427,1691,2478,
2118,2985,3490,2438, 539,2305, 983, 129,1754, 355,4201,2386, 827,2923, 104,1773,
2838,2771, 411,2905,3919, 376, 767, 122,1114, 828,2422,1817,3506, 266,3460,1007,
1609,4998, 945,2612,4429,2274, 726,1247,1964,2914,2199,2070,4002,4108, 657,3323,
1422, 579, 455,2764,4737,1222,2895,1670, 824,1223,1487,2525, 558, 861,3080, 598,
2659,2515,1967, 752,2583,2376,2214,4180, 977, 704,2464,4999,2622,4109,1210,2961,
819,1541, 142,2284, 44, 418, 457,1126,3730,4347,4626,1644,1876,3671,1864, 302,
1063,5694, 624, 723,1984,3745,1314,1676,2488,1610,1449,3558,3569,2166,2098, 409,
1011,2325,3704,2306, 818,1732,1383,1824,1844,3757, 999,2705,3497,1216,1423,2683,
2426,2954,2501,2726,2229,1475,2554,5064,1971,1794,1666,2014,1343, 783, 724, 191,
2434,1354,2220,5065,1763,2752,2472,4152, 131, 175,2885,3434, 92,1466,4920,2616,
3871,3872,3866, 128,1551,1632, 669,1854,3682,4691,4125,1230, 188,2973,3290,1302,
1213, 560,3266, 917, 763,3909,3249,1760, 868,1958, 764,1782,2097, 145,2277,3774,
4462, 64,1491,3062, 971,2132,3606,2442, 221,1226,1617, 218, 323,1185,3207,3147,
571, 619,1473,1005,1744,2281, 449,1887,2396,3685, 275, 375,3816,1743,3844,3731,
845,1983,2350,4210,1377, 773, 967,3499,3052,3743,2725,4007,1697,1022,3943,1464,
3264,2855,2722,1952,1029,2839,2467, 84,4383,2215, 820,1391,2015,2448,3672, 377,
1948,2168, 797,2545,3536,2578,2645, 94,2874,1678, 405,1259,3071, 771, 546,1315,
470,1243,3083, 895,2468, 981, 969,2037, 846,4181, 653,1276,2928, 14,2594, 557,
3007,2474, 156, 902,1338,1740,2574, 537,2518, 973,2282,2216,2433,1928, 138,2903,
1293,2631,1612, 646,3457, 839,2935, 111, 496,2191,2847, 589,3186, 149,3994,2060,
4031,2641,4067,3145,1870, 37,3597,2136,1025,2051,3009,3383,3549,1121,1016,3261,
1301, 251,2446,2599,2153, 872,3246, 637, 334,3705, 831, 884, 921,3065,3140,4092,
2198,1944, 246,2964, 108,2045,1152,1921,2308,1031, 203,3173,4170,1907,3890, 810,
1401,2003,1690, 506, 647,1242,2828,1761,1649,3208,2249,1589,3709,2931,5156,1708,
498, 666,2613, 834,3817,1231, 184,2851,1124, 883,3197,2261,3710,1765,1553,2658,
1178,2639,2351, 93,1193, 942,2538,2141,4402, 235,1821, 870,1591,2192,1709,1871,
3341,1618,4126,2595,2334, 603, 651, 69, 701, 268,2662,3411,2555,1380,1606, 503,
448, 254,2371,2646, 574,1187,2309,1770, 322,2235,1292,1801, 305, 566,1133, 229,
2067,2057, 706, 167, 483,2002,2672,3295,1820,3561,3067, 316, 378,2746,3452,1112,
136,1981, 507,1651,2917,1117, 285,4591, 182,2580,3522,1304, 335,3303,1835,2504,
1795,1792,2248, 674,1018,2106,2449,1857,2292,2845, 976,3047,1781,2600,2727,1389,
1281, 52,3152, 153, 265,3950, 672,3485,3951,4463, 430,1183, 365, 278,2169, 27,
1407,1336,2304, 209,1340,1730,2202,1852,2403,2883, 979,1737,1062, 631,2829,2542,
3876,2592, 825,2086,2226,3048,3625, 352,1417,3724, 542, 991, 431,1351,3938,1861,
2294, 826,1361,2927,3142,3503,1738, 463,2462,2723, 582,1916,1595,2808, 400,3845,
3891,2868,3621,2254, 58,2492,1123, 910,2160,2614,1372,1603,1196,1072,3385,1700,
3267,1980, 696, 480,2430, 920, 799,1570,2920,1951,2041,4047,2540,1321,4223,2469,
3562,2228,1271,2602, 401,2833,3351,2575,5157, 907,2312,1256, 410, 263,3507,1582,
996, 678,1849,2316,1480, 908,3545,2237, 703,2322, 667,1826,2849,1531,2604,2999,
2407,3146,2151,2630,1786,3711, 469,3542, 497,3899,2409, 858, 837,4446,3393,1274,
786, 620,1845,2001,3311, 484, 308,3367,1204,1815,3691,2332,1532,2557,1842,2020,
2724,1927,2333,4440, 567, 22,1673,2728,4475,1987,1858,1144,1597, 101,1832,3601,
12, 974,3783,4391, 951,1412, 1,3720, 453,4608,4041, 528,1041,1027,3230,2628,
1129, 875,1051,3291,1203,2262,1069,2860,2799,2149,2615,3278, 144,1758,3040, 31,
475,1680, 366,2685,3184, 311,1642,4008,2466,5036,1593,1493,2809, 216,1420,1668,
233, 304,2128,3284, 232,1429,1768,1040,2008,3407,2740,2967,2543, 242,2133, 778,
1565,2022,2620, 505,2189,2756,1098,2273, 372,1614, 708, 553,2846,2094,2278, 169,
3626,2835,4161, 228,2674,3165, 809,1454,1309, 466,1705,1095, 900,3423, 880,2667,
3751,5258,2317,3109,2571,4317,2766,1503,1342, 866,4447,1118, 63,2076, 314,1881,
1348,1061, 172, 978,3515,1747, 532, 511,3970, 6, 601, 905,2699,3300,1751, 276,
1467,3725,2668, 65,4239,2544,2779,2556,1604, 578,2451,1802, 992,2331,2624,1320,
3446, 713,1513,1013, 103,2786,2447,1661, 886,1702, 916, 654,3574,2031,1556, 751,
2178,2821,2179,1498,1538,2176, 271, 914,2251,2080,1325, 638,1953,2937,3877,2432,
2754, 95,3265,1716, 260,1227,4083, 775, 106,1357,3254, 426,1607, 555,2480, 772,
1985, 244,2546, 474, 495,1046,2611,1851,2061, 71,2089,1675,2590, 742,3758,2843,
3222,1433, 267,2180,2576,2826,2233,2092,3913,2435, 956,1745,3075, 856,2113,1116,
451, 3,1988,2896,1398, 993,2463,1878,2049,1341,2718,2721,2870,2108, 712,2904,
4363,2753,2324, 277,2872,2349,2649, 384, 987, 435, 691,3000, 922, 164,3939, 652,
1500,1184,4153,2482,3373,2165,4848,2335,3775,3508,3154,2806,2830,1554,2102,1664,
2530,1434,2408, 893,1547,2623,3447,2832,2242,2532,3169,2856,3223,2078, 49,3770,
3469, 462, 318, 656,2259,3250,3069, 679,1629,2758, 344,1138,1104,3120,1836,1283,
3115,2154,1437,4448, 934, 759,1999, 794,2862,1038, 533,2560,1722,2342, 855,2626,
1197,1663,4476,3127, 85,4240,2528, 25,1111,1181,3673, 407,3470,4561,2679,2713,
768,1925,2841,3986,1544,1165, 932, 373,1240,2146,1930,2673, 721,4766, 354,4333,
391,2963, 187, 61,3364,1442,1102, 330,1940,1767, 341,3809,4118, 393,2496,2062,
2211, 105, 331, 300, 439, 913,1332, 626, 379,3304,1557, 328, 689,3952, 309,1555,
931, 317,2517,3027, 325, 569, 686,2107,3084, 60,1042,1333,2794, 264,3177,4014,
1628, 258,3712, 7,4464,1176,1043,1778, 683, 114,1975, 78,1492, 383,1886, 510,
386, 645,5291,2891,2069,3305,4138,3867,2939,2603,2493,1935,1066,1848,3588,1015,
1282,1289,4609, 697,1453,3044,2666,3611,1856,2412, 54, 719,1330, 568,3778,2459,
1748, 788, 492, 551,1191,1000, 488,3394,3763, 282,1799, 348,2016,1523,3155,2390,
1049, 382,2019,1788,1170, 729,2968,3523, 897,3926,2785,2938,3292, 350,2319,3238,
1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421, 56,1908,1640,2387,2232,
1917,1874,2477,4921, 148, 83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624,
381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189,
852,1221,1400,1486, 882,2299,4036, 351, 28,1122, 700,6479,6480,6481,6482,6483, # last 512
#Everything below is of no interest for detection purpose
5508,6484,3900,3414,3974,4441,4024,3537,4037,5628,5099,3633,6485,3148,6486,3636,
5509,3257,5510,5973,5445,5872,4941,4403,3174,4627,5873,6276,2286,4230,5446,5874,
5122,6102,6103,4162,5447,5123,5323,4849,6277,3980,3851,5066,4246,5774,5067,6278,
3001,2807,5695,3346,5775,5974,5158,5448,6487,5975,5976,5776,3598,6279,5696,4806,
4211,4154,6280,6488,6489,6490,6281,4212,5037,3374,4171,6491,4562,4807,4722,4827,
5977,6104,4532,4079,5159,5324,5160,4404,3858,5359,5875,3975,4288,4610,3486,4512,
5325,3893,5360,6282,6283,5560,2522,4231,5978,5186,5449,2569,3878,6284,5401,3578,
4415,6285,4656,5124,5979,2506,4247,4449,3219,3417,4334,4969,4329,6492,4576,4828,
4172,4416,4829,5402,6286,3927,3852,5361,4369,4830,4477,4867,5876,4173,6493,6105,
4657,6287,6106,5877,5450,6494,4155,4868,5451,3700,5629,4384,6288,6289,5878,3189,
4881,6107,6290,6495,4513,6496,4692,4515,4723,5100,3356,6497,6291,3810,4080,5561,
3570,4430,5980,6498,4355,5697,6499,4724,6108,6109,3764,4050,5038,5879,4093,3226,
6292,5068,5217,4693,3342,5630,3504,4831,4377,4466,4309,5698,4431,5777,6293,5778,
4272,3706,6110,5326,3752,4676,5327,4273,5403,4767,5631,6500,5699,5880,3475,5039,
6294,5562,5125,4348,4301,4482,4068,5126,4593,5700,3380,3462,5981,5563,3824,5404,
4970,5511,3825,4738,6295,6501,5452,4516,6111,5881,5564,6502,6296,5982,6503,4213,
4163,3454,6504,6112,4009,4450,6113,4658,6297,6114,3035,6505,6115,3995,4904,4739,
4563,4942,4110,5040,3661,3928,5362,3674,6506,5292,3612,4791,5565,4149,5983,5328,
5259,5021,4725,4577,4564,4517,4364,6298,5405,4578,5260,4594,4156,4157,5453,3592,
3491,6507,5127,5512,4709,4922,5984,5701,4726,4289,6508,4015,6116,5128,4628,3424,
4241,5779,6299,4905,6509,6510,5454,5702,5780,6300,4365,4923,3971,6511,5161,3270,
3158,5985,4100, 867,5129,5703,6117,5363,3695,3301,5513,4467,6118,6512,5455,4232,
4242,4629,6513,3959,4478,6514,5514,5329,5986,4850,5162,5566,3846,4694,6119,5456,
4869,5781,3779,6301,5704,5987,5515,4710,6302,5882,6120,4392,5364,5705,6515,6121,
6516,6517,3736,5988,5457,5989,4695,2457,5883,4551,5782,6303,6304,6305,5130,4971,
6122,5163,6123,4870,3263,5365,3150,4871,6518,6306,5783,5069,5706,3513,3498,4409,
5330,5632,5366,5458,5459,3991,5990,4502,3324,5991,5784,3696,4518,5633,4119,6519,
4630,5634,4417,5707,4832,5992,3418,6124,5993,5567,4768,5218,6520,4595,3458,5367,
6125,5635,6126,4202,6521,4740,4924,6307,3981,4069,4385,6308,3883,2675,4051,3834,
4302,4483,5568,5994,4972,4101,5368,6309,5164,5884,3922,6127,6522,6523,5261,5460,
5187,4164,5219,3538,5516,4111,3524,5995,6310,6311,5369,3181,3386,2484,5188,3464,
5569,3627,5708,6524,5406,5165,4677,4492,6312,4872,4851,5885,4468,5996,6313,5709,
5710,6128,2470,5886,6314,5293,4882,5785,3325,5461,5101,6129,5711,5786,6525,4906,
6526,6527,4418,5887,5712,4808,2907,3701,5713,5888,6528,3765,5636,5331,6529,6530,
3593,5889,3637,4943,3692,5714,5787,4925,6315,6130,5462,4405,6131,6132,6316,5262,
6531,6532,5715,3859,5716,5070,4696,5102,3929,5788,3987,4792,5997,6533,6534,3920,
4809,5000,5998,6535,2974,5370,6317,5189,5263,5717,3826,6536,3953,5001,4883,3190,
5463,5890,4973,5999,4741,6133,6134,3607,5570,6000,4711,3362,3630,4552,5041,6318,
6001,2950,2953,5637,4646,5371,4944,6002,2044,4120,3429,6319,6537,5103,4833,6538,
6539,4884,4647,3884,6003,6004,4758,3835,5220,5789,4565,5407,6540,6135,5294,4697,
4852,6320,6321,3206,4907,6541,6322,4945,6542,6136,6543,6323,6005,4631,3519,6544,
5891,6545,5464,3784,5221,6546,5571,4659,6547,6324,6137,5190,6548,3853,6549,4016,
4834,3954,6138,5332,3827,4017,3210,3546,4469,5408,5718,3505,4648,5790,5131,5638,
5791,5465,4727,4318,6325,6326,5792,4553,4010,4698,3439,4974,3638,4335,3085,6006,
5104,5042,5166,5892,5572,6327,4356,4519,5222,5573,5333,5793,5043,6550,5639,5071,
4503,6328,6139,6551,6140,3914,3901,5372,6007,5640,4728,4793,3976,3836,4885,6552,
4127,6553,4451,4102,5002,6554,3686,5105,6555,5191,5072,5295,4611,5794,5296,6556,
5893,5264,5894,4975,5466,5265,4699,4976,4370,4056,3492,5044,4886,6557,5795,4432,
4769,4357,5467,3940,4660,4290,6141,4484,4770,4661,3992,6329,4025,4662,5022,4632,
4835,4070,5297,4663,4596,5574,5132,5409,5895,6142,4504,5192,4664,5796,5896,3885,
5575,5797,5023,4810,5798,3732,5223,4712,5298,4084,5334,5468,6143,4052,4053,4336,
4977,4794,6558,5335,4908,5576,5224,4233,5024,4128,5469,5225,4873,6008,5045,4729,
4742,4633,3675,4597,6559,5897,5133,5577,5003,5641,5719,6330,6560,3017,2382,3854,
4406,4811,6331,4393,3964,4946,6561,2420,3722,6562,4926,4378,3247,1736,4442,6332,
5134,6333,5226,3996,2918,5470,4319,4003,4598,4743,4744,4485,3785,3902,5167,5004,
5373,4394,5898,6144,4874,1793,3997,6334,4085,4214,5106,5642,4909,5799,6009,4419,
4189,3330,5899,4165,4420,5299,5720,5227,3347,6145,4081,6335,2876,3930,6146,3293,
3786,3910,3998,5900,5300,5578,2840,6563,5901,5579,6147,3531,5374,6564,6565,5580,
4759,5375,6566,6148,3559,5643,6336,6010,5517,6337,6338,5721,5902,3873,6011,6339,
6567,5518,3868,3649,5722,6568,4771,4947,6569,6149,4812,6570,2853,5471,6340,6341,
5644,4795,6342,6012,5723,6343,5724,6013,4349,6344,3160,6150,5193,4599,4514,4493,
5168,4320,6345,4927,3666,4745,5169,5903,5005,4928,6346,5725,6014,4730,4203,5046,
4948,3395,5170,6015,4150,6016,5726,5519,6347,5047,3550,6151,6348,4197,4310,5904,
6571,5581,2965,6152,4978,3960,4291,5135,6572,5301,5727,4129,4026,5905,4853,5728,
5472,6153,6349,4533,2700,4505,5336,4678,3583,5073,2994,4486,3043,4554,5520,6350,
6017,5800,4487,6351,3931,4103,5376,6352,4011,4321,4311,4190,5136,6018,3988,3233,
4350,5906,5645,4198,6573,5107,3432,4191,3435,5582,6574,4139,5410,6353,5411,3944,
5583,5074,3198,6575,6354,4358,6576,5302,4600,5584,5194,5412,6577,6578,5585,5413,
5303,4248,5414,3879,4433,6579,4479,5025,4854,5415,6355,4760,4772,3683,2978,4700,
3797,4452,3965,3932,3721,4910,5801,6580,5195,3551,5907,3221,3471,3029,6019,3999,
5908,5909,5266,5267,3444,3023,3828,3170,4796,5646,4979,4259,6356,5647,5337,3694,
6357,5648,5338,4520,4322,5802,3031,3759,4071,6020,5586,4836,4386,5048,6581,3571,
4679,4174,4949,6154,4813,3787,3402,3822,3958,3215,3552,5268,4387,3933,4950,4359,
6021,5910,5075,3579,6358,4234,4566,5521,6359,3613,5049,6022,5911,3375,3702,3178,
4911,5339,4521,6582,6583,4395,3087,3811,5377,6023,6360,6155,4027,5171,5649,4421,
4249,2804,6584,2270,6585,4000,4235,3045,6156,5137,5729,4140,4312,3886,6361,4330,
6157,4215,6158,3500,3676,4929,4331,3713,4930,5912,4265,3776,3368,5587,4470,4855,
3038,4980,3631,6159,6160,4132,4680,6161,6362,3923,4379,5588,4255,6586,4121,6587,
6363,4649,6364,3288,4773,4774,6162,6024,6365,3543,6588,4274,3107,3737,5050,5803,
4797,4522,5589,5051,5730,3714,4887,5378,4001,4523,6163,5026,5522,4701,4175,2791,
3760,6589,5473,4224,4133,3847,4814,4815,4775,3259,5416,6590,2738,6164,6025,5304,
3733,5076,5650,4816,5590,6591,6165,6592,3934,5269,6593,3396,5340,6594,5804,3445,
3602,4042,4488,5731,5732,3525,5591,4601,5196,6166,6026,5172,3642,4612,3202,4506,
4798,6366,3818,5108,4303,5138,5139,4776,3332,4304,2915,3415,4434,5077,5109,4856,
2879,5305,4817,6595,5913,3104,3144,3903,4634,5341,3133,5110,5651,5805,6167,4057,
5592,2945,4371,5593,6596,3474,4182,6367,6597,6168,4507,4279,6598,2822,6599,4777,
4713,5594,3829,6169,3887,5417,6170,3653,5474,6368,4216,2971,5228,3790,4579,6369,
5733,6600,6601,4951,4746,4555,6602,5418,5475,6027,3400,4665,5806,6171,4799,6028,
5052,6172,3343,4800,4747,5006,6370,4556,4217,5476,4396,5229,5379,5477,3839,5914,
5652,5807,4714,3068,4635,5808,6173,5342,4192,5078,5419,5523,5734,6174,4557,6175,
4602,6371,6176,6603,5809,6372,5735,4260,3869,5111,5230,6029,5112,6177,3126,4681,
5524,5915,2706,3563,4748,3130,6178,4018,5525,6604,6605,5478,4012,4837,6606,4534,
4193,5810,4857,3615,5479,6030,4082,3697,3539,4086,5270,3662,4508,4931,5916,4912,
5811,5027,3888,6607,4397,3527,3302,3798,2775,2921,2637,3966,4122,4388,4028,4054,
1633,4858,5079,3024,5007,3982,3412,5736,6608,3426,3236,5595,3030,6179,3427,3336,
3279,3110,6373,3874,3039,5080,5917,5140,4489,3119,6374,5812,3405,4494,6031,4666,
4141,6180,4166,6032,5813,4981,6609,5081,4422,4982,4112,3915,5653,3296,3983,6375,
4266,4410,5654,6610,6181,3436,5082,6611,5380,6033,3819,5596,4535,5231,5306,5113,
6612,4952,5918,4275,3113,6613,6376,6182,6183,5814,3073,4731,4838,5008,3831,6614,
4888,3090,3848,4280,5526,5232,3014,5655,5009,5737,5420,5527,6615,5815,5343,5173,
5381,4818,6616,3151,4953,6617,5738,2796,3204,4360,2989,4281,5739,5174,5421,5197,
3132,5141,3849,5142,5528,5083,3799,3904,4839,5480,2880,4495,3448,6377,6184,5271,
5919,3771,3193,6034,6035,5920,5010,6036,5597,6037,6378,6038,3106,5422,6618,5423,
5424,4142,6619,4889,5084,4890,4313,5740,6620,3437,5175,5307,5816,4199,5198,5529,
5817,5199,5656,4913,5028,5344,3850,6185,2955,5272,5011,5818,4567,4580,5029,5921,
3616,5233,6621,6622,6186,4176,6039,6379,6380,3352,5200,5273,2908,5598,5234,3837,
5308,6623,6624,5819,4496,4323,5309,5201,6625,6626,4983,3194,3838,4167,5530,5922,
5274,6381,6382,3860,3861,5599,3333,4292,4509,6383,3553,5481,5820,5531,4778,6187,
3955,3956,4324,4389,4218,3945,4325,3397,2681,5923,4779,5085,4019,5482,4891,5382,
5383,6040,4682,3425,5275,4094,6627,5310,3015,5483,5657,4398,5924,3168,4819,6628,
5925,6629,5532,4932,4613,6041,6630,4636,6384,4780,4204,5658,4423,5821,3989,4683,
5822,6385,4954,6631,5345,6188,5425,5012,5384,3894,6386,4490,4104,6632,5741,5053,
6633,5823,5926,5659,5660,5927,6634,5235,5742,5824,4840,4933,4820,6387,4859,5928,
4955,6388,4143,3584,5825,5346,5013,6635,5661,6389,5014,5484,5743,4337,5176,5662,
6390,2836,6391,3268,6392,6636,6042,5236,6637,4158,6638,5744,5663,4471,5347,3663,
4123,5143,4293,3895,6639,6640,5311,5929,5826,3800,6189,6393,6190,5664,5348,3554,
3594,4749,4603,6641,5385,4801,6043,5827,4183,6642,5312,5426,4761,6394,5665,6191,
4715,2669,6643,6644,5533,3185,5427,5086,5930,5931,5386,6192,6044,6645,4781,4013,
5745,4282,4435,5534,4390,4267,6045,5746,4984,6046,2743,6193,3501,4087,5485,5932,
5428,4184,4095,5747,4061,5054,3058,3862,5933,5600,6646,5144,3618,6395,3131,5055,
5313,6396,4650,4956,3855,6194,3896,5202,4985,4029,4225,6195,6647,5828,5486,5829,
3589,3002,6648,6397,4782,5276,6649,6196,6650,4105,3803,4043,5237,5830,6398,4096,
3643,6399,3528,6651,4453,3315,4637,6652,3984,6197,5535,3182,3339,6653,3096,2660,
6400,6654,3449,5934,4250,4236,6047,6401,5831,6655,5487,3753,4062,5832,6198,6199,
6656,3766,6657,3403,4667,6048,6658,4338,2897,5833,3880,2797,3780,4326,6659,5748,
5015,6660,5387,4351,5601,4411,6661,3654,4424,5935,4339,4072,5277,4568,5536,6402,
6662,5238,6663,5349,5203,6200,5204,6201,5145,4536,5016,5056,4762,5834,4399,4957,
6202,6403,5666,5749,6664,4340,6665,5936,5177,5667,6666,6667,3459,4668,6404,6668,
6669,4543,6203,6670,4276,6405,4480,5537,6671,4614,5205,5668,6672,3348,2193,4763,
6406,6204,5937,5602,4177,5669,3419,6673,4020,6205,4443,4569,5388,3715,3639,6407,
6049,4058,6206,6674,5938,4544,6050,4185,4294,4841,4651,4615,5488,6207,6408,6051,
5178,3241,3509,5835,6208,4958,5836,4341,5489,5278,6209,2823,5538,5350,5206,5429,
6675,4638,4875,4073,3516,4684,4914,4860,5939,5603,5389,6052,5057,3237,5490,3791,
6676,6409,6677,4821,4915,4106,5351,5058,4243,5539,4244,5604,4842,4916,5239,3028,
3716,5837,5114,5605,5390,5940,5430,6210,4332,6678,5540,4732,3667,3840,6053,4305,
3408,5670,5541,6410,2744,5240,5750,6679,3234,5606,6680,5607,5671,3608,4283,4159,
4400,5352,4783,6681,6411,6682,4491,4802,6211,6412,5941,6413,6414,5542,5751,6683,
4669,3734,5942,6684,6415,5943,5059,3328,4670,4144,4268,6685,6686,6687,6688,4372,
3603,6689,5944,5491,4373,3440,6416,5543,4784,4822,5608,3792,4616,5838,5672,3514,
5391,6417,4892,6690,4639,6691,6054,5673,5839,6055,6692,6056,5392,6212,4038,5544,
5674,4497,6057,6693,5840,4284,5675,4021,4545,5609,6418,4454,6419,6213,4113,4472,
5314,3738,5087,5279,4074,5610,4959,4063,3179,4750,6058,6420,6214,3476,4498,4716,
5431,4960,4685,6215,5241,6694,6421,6216,6695,5841,5945,6422,3748,5946,5179,3905,
5752,5545,5947,4374,6217,4455,6423,4412,6218,4803,5353,6696,3832,5280,6219,4327,
4702,6220,6221,6059,4652,5432,6424,3749,4751,6425,5753,4986,5393,4917,5948,5030,
5754,4861,4733,6426,4703,6697,6222,4671,5949,4546,4961,5180,6223,5031,3316,5281,
6698,4862,4295,4934,5207,3644,6427,5842,5950,6428,6429,4570,5843,5282,6430,6224,
5088,3239,6060,6699,5844,5755,6061,6431,2701,5546,6432,5115,5676,4039,3993,3327,
4752,4425,5315,6433,3941,6434,5677,4617,4604,3074,4581,6225,5433,6435,6226,6062,
4823,5756,5116,6227,3717,5678,4717,5845,6436,5679,5846,6063,5847,6064,3977,3354,
6437,3863,5117,6228,5547,5394,4499,4524,6229,4605,6230,4306,4500,6700,5951,6065,
3693,5952,5089,4366,4918,6701,6231,5548,6232,6702,6438,4704,5434,6703,6704,5953,
4168,6705,5680,3420,6706,5242,4407,6066,3812,5757,5090,5954,4672,4525,3481,5681,
4618,5395,5354,5316,5955,6439,4962,6707,4526,6440,3465,4673,6067,6441,5682,6708,
5435,5492,5758,5683,4619,4571,4674,4804,4893,4686,5493,4753,6233,6068,4269,6442,
6234,5032,4705,5146,5243,5208,5848,6235,6443,4963,5033,4640,4226,6236,5849,3387,
6444,6445,4436,4437,5850,4843,5494,4785,4894,6709,4361,6710,5091,5956,3331,6237,
4987,5549,6069,6711,4342,3517,4473,5317,6070,6712,6071,4706,6446,5017,5355,6713,
6714,4988,5436,6447,4734,5759,6715,4735,4547,4456,4754,6448,5851,6449,6450,3547,
5852,5318,6451,6452,5092,4205,6716,6238,4620,4219,5611,6239,6072,4481,5760,5957,
5958,4059,6240,6453,4227,4537,6241,5761,4030,4186,5244,5209,3761,4457,4876,3337,
5495,5181,6242,5959,5319,5612,5684,5853,3493,5854,6073,4169,5613,5147,4895,6074,
5210,6717,5182,6718,3830,6243,2798,3841,6075,6244,5855,5614,3604,4606,5496,5685,
5118,5356,6719,6454,5960,5357,5961,6720,4145,3935,4621,5119,5962,4261,6721,6455,
4786,5963,4375,4582,6245,6246,6247,6076,5437,4877,5856,3376,4380,6248,4160,6722,
5148,6456,5211,6457,6723,4718,6458,6724,6249,5358,4044,3297,6459,6250,5857,5615,
5497,5245,6460,5498,6725,6251,6252,5550,3793,5499,2959,5396,6461,6462,4572,5093,
5500,5964,3806,4146,6463,4426,5762,5858,6077,6253,4755,3967,4220,5965,6254,4989,
5501,6464,4352,6726,6078,4764,2290,5246,3906,5438,5283,3767,4964,2861,5763,5094,
6255,6256,4622,5616,5859,5860,4707,6727,4285,4708,4824,5617,6257,5551,4787,5212,
4965,4935,4687,6465,6728,6466,5686,6079,3494,4413,2995,5247,5966,5618,6729,5967,
5764,5765,5687,5502,6730,6731,6080,5397,6467,4990,6258,6732,4538,5060,5619,6733,
4719,5688,5439,5018,5149,5284,5503,6734,6081,4607,6259,5120,3645,5861,4583,6260,
4584,4675,5620,4098,5440,6261,4863,2379,3306,4585,5552,5689,4586,5285,6735,4864,
6736,5286,6082,6737,4623,3010,4788,4381,4558,5621,4587,4896,3698,3161,5248,4353,
4045,6262,3754,5183,4588,6738,6263,6739,6740,5622,3936,6741,6468,6742,6264,5095,
6469,4991,5968,6743,4992,6744,6083,4897,6745,4256,5766,4307,3108,3968,4444,5287,
3889,4343,6084,4510,6085,4559,6086,4898,5969,6746,5623,5061,4919,5249,5250,5504,
5441,6265,5320,4878,3242,5862,5251,3428,6087,6747,4237,5624,5442,6266,5553,4539,
6748,2585,3533,5398,4262,6088,5150,4736,4438,6089,6267,5505,4966,6749,6268,6750,
6269,5288,5554,3650,6090,6091,4624,6092,5690,6751,5863,4270,5691,4277,5555,5864,
6752,5692,4720,4865,6470,5151,4688,4825,6753,3094,6754,6471,3235,4653,6755,5213,
5399,6756,3201,4589,5865,4967,6472,5866,6473,5019,3016,6757,5321,4756,3957,4573,
6093,4993,5767,4721,6474,6758,5625,6759,4458,6475,6270,6760,5556,4994,5214,5252,
6271,3875,5768,6094,5034,5506,4376,5769,6761,2120,6476,5253,5770,6762,5771,5970,
3990,5971,5557,5558,5772,6477,6095,2787,4641,5972,5121,6096,6097,6272,6763,3703,
5867,5507,6273,4206,6274,4789,6098,6764,3619,3646,3833,3804,2394,3788,4936,3978,
4866,4899,6099,6100,5559,6478,6765,3599,5868,6101,5869,5870,6275,6766,4527,6767)
| gpl-2.0 |
jxs/servo | tests/wpt/css-tests/tools/pywebsocket/src/example/origin_check_wsh.py | 516 | 1992 | # Copyright 2011, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# This example is derived from test/testdata/handlers/origin_check_wsh.py.
def web_socket_do_extra_handshake(request):
if request.ws_origin == 'http://example.com':
return
raise ValueError('Unacceptable origin: %r' % request.ws_origin)
def web_socket_transfer_data(request):
request.connection.write('origin_check_wsh.py is called for %s, %s' %
(request.ws_resource, request.ws_protocol))
# vi:sts=4 sw=4 et
| mpl-2.0 |
sonata-nfv/son-cli | bootstrap.py | 120 | 7458 | ##############################################################################
#
# Copyright (c) 2006 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Bootstrap a buildout-based project
Simply run this script in a directory containing a buildout.cfg.
The script accepts buildout command-line options, so you can
use the -c option to specify an alternate configuration file.
"""
import os
import shutil
import sys
import tempfile
from optparse import OptionParser
__version__ = '2015-07-01'
# See zc.buildout's changelog if this version is up to date.
tmpeggs = tempfile.mkdtemp(prefix='bootstrap-')
usage = '''\
[DESIRED PYTHON FOR BUILDOUT] bootstrap.py [options]
Bootstraps a buildout-based project.
Simply run this script in a directory containing a buildout.cfg, using the
Python that you want bin/buildout to use.
Note that by using --find-links to point to local resources, you can keep
this script from going over the network.
'''
parser = OptionParser(usage=usage)
parser.add_option("--version",
action="store_true", default=False,
help=("Return bootstrap.py version."))
parser.add_option("-t", "--accept-buildout-test-releases",
dest='accept_buildout_test_releases',
action="store_true", default=False,
help=("Normally, if you do not specify a --version, the "
"bootstrap script and buildout gets the newest "
"*final* versions of zc.buildout and its recipes and "
"extensions for you. If you use this flag, "
"bootstrap and buildout will get the newest releases "
"even if they are alphas or betas."))
parser.add_option("-c", "--config-file",
help=("Specify the path to the buildout configuration "
"file to be used."))
parser.add_option("-f", "--find-links",
help=("Specify a URL to search for buildout releases"))
parser.add_option("--allow-site-packages",
action="store_true", default=False,
help=("Let bootstrap.py use existing site packages"))
parser.add_option("--buildout-version",
help="Use a specific zc.buildout version")
parser.add_option("--setuptools-version",
help="Use a specific setuptools version")
parser.add_option("--setuptools-to-dir",
help=("Allow for re-use of existing directory of "
"setuptools versions"))
options, args = parser.parse_args()
if options.version:
print("bootstrap.py version %s" % __version__)
sys.exit(0)
######################################################################
# load/install setuptools
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
ez = {}
if os.path.exists('ez_setup.py'):
exec(open('ez_setup.py').read(), ez)
else:
exec(urlopen('https://bootstrap.pypa.io/ez_setup.py').read(), ez)
if not options.allow_site_packages:
# ez_setup imports site, which adds site packages
# this will remove them from the path to ensure that incompatible versions
# of setuptools are not in the path
import site
# inside a virtualenv, there is no 'getsitepackages'.
# We can't remove these reliably
if hasattr(site, 'getsitepackages'):
for sitepackage_path in site.getsitepackages():
# Strip all site-packages directories from sys.path that
# are not sys.prefix; this is because on Windows
# sys.prefix is a site-package directory.
if sitepackage_path != sys.prefix:
sys.path[:] = [x for x in sys.path
if sitepackage_path not in x]
setup_args = dict(to_dir=tmpeggs, download_delay=0)
if options.setuptools_version is not None:
setup_args['version'] = options.setuptools_version
if options.setuptools_to_dir is not None:
setup_args['to_dir'] = options.setuptools_to_dir
ez['use_setuptools'](**setup_args)
import setuptools
import pkg_resources
# This does not (always?) update the default working set. We will
# do it.
for path in sys.path:
if path not in pkg_resources.working_set.entries:
pkg_resources.working_set.add_entry(path)
######################################################################
# Install buildout
ws = pkg_resources.working_set
setuptools_path = ws.find(
pkg_resources.Requirement.parse('setuptools')).location
# Fix sys.path here as easy_install.pth added before PYTHONPATH
cmd = [sys.executable, '-c',
'import sys; sys.path[0:0] = [%r]; ' % setuptools_path +
'from setuptools.command.easy_install import main; main()',
'-mZqNxd', tmpeggs]
find_links = os.environ.get(
'bootstrap-testing-find-links',
options.find_links or
('http://downloads.buildout.org/'
if options.accept_buildout_test_releases else None)
)
if find_links:
cmd.extend(['-f', find_links])
requirement = 'zc.buildout'
version = options.buildout_version
if version is None and not options.accept_buildout_test_releases:
# Figure out the most recent final version of zc.buildout.
import setuptools.package_index
_final_parts = '*final-', '*final'
def _final_version(parsed_version):
try:
return not parsed_version.is_prerelease
except AttributeError:
# Older setuptools
for part in parsed_version:
if (part[:1] == '*') and (part not in _final_parts):
return False
return True
index = setuptools.package_index.PackageIndex(
search_path=[setuptools_path])
if find_links:
index.add_find_links((find_links,))
req = pkg_resources.Requirement.parse(requirement)
if index.obtain(req) is not None:
best = []
bestv = None
for dist in index[req.project_name]:
distv = dist.parsed_version
if _final_version(distv):
if bestv is None or distv > bestv:
best = [dist]
bestv = distv
elif distv == bestv:
best.append(dist)
if best:
best.sort()
version = best[-1].version
if version:
requirement = '=='.join((requirement, version))
cmd.append(requirement)
import subprocess
if subprocess.call(cmd) != 0:
raise Exception(
"Failed to execute command:\n%s" % repr(cmd)[1:-1])
######################################################################
# Import and run buildout
ws.add_entry(tmpeggs)
ws.require(requirement)
import zc.buildout.buildout
if not [a for a in args if '=' not in a]:
args.append('bootstrap')
# if -c was provided, we push it back into args for buildout' main function
if options.config_file is not None:
args[0:0] = ['-c', options.config_file]
zc.buildout.buildout.main(args)
shutil.rmtree(tmpeggs)
| apache-2.0 |
Hubert51/AutoGrading | learning/web_Haotian/venv/Lib/encodings/iso8859_6.py | 272 | 10833 | """ Python Character Mapping Codec iso8859_6 generated from 'MAPPINGS/ISO8859/8859-6.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-6',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
'\x00' # 0x00 -> NULL
'\x01' # 0x01 -> START OF HEADING
'\x02' # 0x02 -> START OF TEXT
'\x03' # 0x03 -> END OF TEXT
'\x04' # 0x04 -> END OF TRANSMISSION
'\x05' # 0x05 -> ENQUIRY
'\x06' # 0x06 -> ACKNOWLEDGE
'\x07' # 0x07 -> BELL
'\x08' # 0x08 -> BACKSPACE
'\t' # 0x09 -> HORIZONTAL TABULATION
'\n' # 0x0A -> LINE FEED
'\x0b' # 0x0B -> VERTICAL TABULATION
'\x0c' # 0x0C -> FORM FEED
'\r' # 0x0D -> CARRIAGE RETURN
'\x0e' # 0x0E -> SHIFT OUT
'\x0f' # 0x0F -> SHIFT IN
'\x10' # 0x10 -> DATA LINK ESCAPE
'\x11' # 0x11 -> DEVICE CONTROL ONE
'\x12' # 0x12 -> DEVICE CONTROL TWO
'\x13' # 0x13 -> DEVICE CONTROL THREE
'\x14' # 0x14 -> DEVICE CONTROL FOUR
'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
'\x16' # 0x16 -> SYNCHRONOUS IDLE
'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
'\x18' # 0x18 -> CANCEL
'\x19' # 0x19 -> END OF MEDIUM
'\x1a' # 0x1A -> SUBSTITUTE
'\x1b' # 0x1B -> ESCAPE
'\x1c' # 0x1C -> FILE SEPARATOR
'\x1d' # 0x1D -> GROUP SEPARATOR
'\x1e' # 0x1E -> RECORD SEPARATOR
'\x1f' # 0x1F -> UNIT SEPARATOR
' ' # 0x20 -> SPACE
'!' # 0x21 -> EXCLAMATION MARK
'"' # 0x22 -> QUOTATION MARK
'#' # 0x23 -> NUMBER SIGN
'$' # 0x24 -> DOLLAR SIGN
'%' # 0x25 -> PERCENT SIGN
'&' # 0x26 -> AMPERSAND
"'" # 0x27 -> APOSTROPHE
'(' # 0x28 -> LEFT PARENTHESIS
')' # 0x29 -> RIGHT PARENTHESIS
'*' # 0x2A -> ASTERISK
'+' # 0x2B -> PLUS SIGN
',' # 0x2C -> COMMA
'-' # 0x2D -> HYPHEN-MINUS
'.' # 0x2E -> FULL STOP
'/' # 0x2F -> SOLIDUS
'0' # 0x30 -> DIGIT ZERO
'1' # 0x31 -> DIGIT ONE
'2' # 0x32 -> DIGIT TWO
'3' # 0x33 -> DIGIT THREE
'4' # 0x34 -> DIGIT FOUR
'5' # 0x35 -> DIGIT FIVE
'6' # 0x36 -> DIGIT SIX
'7' # 0x37 -> DIGIT SEVEN
'8' # 0x38 -> DIGIT EIGHT
'9' # 0x39 -> DIGIT NINE
':' # 0x3A -> COLON
';' # 0x3B -> SEMICOLON
'<' # 0x3C -> LESS-THAN SIGN
'=' # 0x3D -> EQUALS SIGN
'>' # 0x3E -> GREATER-THAN SIGN
'?' # 0x3F -> QUESTION MARK
'@' # 0x40 -> COMMERCIAL AT
'A' # 0x41 -> LATIN CAPITAL LETTER A
'B' # 0x42 -> LATIN CAPITAL LETTER B
'C' # 0x43 -> LATIN CAPITAL LETTER C
'D' # 0x44 -> LATIN CAPITAL LETTER D
'E' # 0x45 -> LATIN CAPITAL LETTER E
'F' # 0x46 -> LATIN CAPITAL LETTER F
'G' # 0x47 -> LATIN CAPITAL LETTER G
'H' # 0x48 -> LATIN CAPITAL LETTER H
'I' # 0x49 -> LATIN CAPITAL LETTER I
'J' # 0x4A -> LATIN CAPITAL LETTER J
'K' # 0x4B -> LATIN CAPITAL LETTER K
'L' # 0x4C -> LATIN CAPITAL LETTER L
'M' # 0x4D -> LATIN CAPITAL LETTER M
'N' # 0x4E -> LATIN CAPITAL LETTER N
'O' # 0x4F -> LATIN CAPITAL LETTER O
'P' # 0x50 -> LATIN CAPITAL LETTER P
'Q' # 0x51 -> LATIN CAPITAL LETTER Q
'R' # 0x52 -> LATIN CAPITAL LETTER R
'S' # 0x53 -> LATIN CAPITAL LETTER S
'T' # 0x54 -> LATIN CAPITAL LETTER T
'U' # 0x55 -> LATIN CAPITAL LETTER U
'V' # 0x56 -> LATIN CAPITAL LETTER V
'W' # 0x57 -> LATIN CAPITAL LETTER W
'X' # 0x58 -> LATIN CAPITAL LETTER X
'Y' # 0x59 -> LATIN CAPITAL LETTER Y
'Z' # 0x5A -> LATIN CAPITAL LETTER Z
'[' # 0x5B -> LEFT SQUARE BRACKET
'\\' # 0x5C -> REVERSE SOLIDUS
']' # 0x5D -> RIGHT SQUARE BRACKET
'^' # 0x5E -> CIRCUMFLEX ACCENT
'_' # 0x5F -> LOW LINE
'`' # 0x60 -> GRAVE ACCENT
'a' # 0x61 -> LATIN SMALL LETTER A
'b' # 0x62 -> LATIN SMALL LETTER B
'c' # 0x63 -> LATIN SMALL LETTER C
'd' # 0x64 -> LATIN SMALL LETTER D
'e' # 0x65 -> LATIN SMALL LETTER E
'f' # 0x66 -> LATIN SMALL LETTER F
'g' # 0x67 -> LATIN SMALL LETTER G
'h' # 0x68 -> LATIN SMALL LETTER H
'i' # 0x69 -> LATIN SMALL LETTER I
'j' # 0x6A -> LATIN SMALL LETTER J
'k' # 0x6B -> LATIN SMALL LETTER K
'l' # 0x6C -> LATIN SMALL LETTER L
'm' # 0x6D -> LATIN SMALL LETTER M
'n' # 0x6E -> LATIN SMALL LETTER N
'o' # 0x6F -> LATIN SMALL LETTER O
'p' # 0x70 -> LATIN SMALL LETTER P
'q' # 0x71 -> LATIN SMALL LETTER Q
'r' # 0x72 -> LATIN SMALL LETTER R
's' # 0x73 -> LATIN SMALL LETTER S
't' # 0x74 -> LATIN SMALL LETTER T
'u' # 0x75 -> LATIN SMALL LETTER U
'v' # 0x76 -> LATIN SMALL LETTER V
'w' # 0x77 -> LATIN SMALL LETTER W
'x' # 0x78 -> LATIN SMALL LETTER X
'y' # 0x79 -> LATIN SMALL LETTER Y
'z' # 0x7A -> LATIN SMALL LETTER Z
'{' # 0x7B -> LEFT CURLY BRACKET
'|' # 0x7C -> VERTICAL LINE
'}' # 0x7D -> RIGHT CURLY BRACKET
'~' # 0x7E -> TILDE
'\x7f' # 0x7F -> DELETE
'\x80' # 0x80 -> <control>
'\x81' # 0x81 -> <control>
'\x82' # 0x82 -> <control>
'\x83' # 0x83 -> <control>
'\x84' # 0x84 -> <control>
'\x85' # 0x85 -> <control>
'\x86' # 0x86 -> <control>
'\x87' # 0x87 -> <control>
'\x88' # 0x88 -> <control>
'\x89' # 0x89 -> <control>
'\x8a' # 0x8A -> <control>
'\x8b' # 0x8B -> <control>
'\x8c' # 0x8C -> <control>
'\x8d' # 0x8D -> <control>
'\x8e' # 0x8E -> <control>
'\x8f' # 0x8F -> <control>
'\x90' # 0x90 -> <control>
'\x91' # 0x91 -> <control>
'\x92' # 0x92 -> <control>
'\x93' # 0x93 -> <control>
'\x94' # 0x94 -> <control>
'\x95' # 0x95 -> <control>
'\x96' # 0x96 -> <control>
'\x97' # 0x97 -> <control>
'\x98' # 0x98 -> <control>
'\x99' # 0x99 -> <control>
'\x9a' # 0x9A -> <control>
'\x9b' # 0x9B -> <control>
'\x9c' # 0x9C -> <control>
'\x9d' # 0x9D -> <control>
'\x9e' # 0x9E -> <control>
'\x9f' # 0x9F -> <control>
'\xa0' # 0xA0 -> NO-BREAK SPACE
'\ufffe'
'\ufffe'
'\ufffe'
'\xa4' # 0xA4 -> CURRENCY SIGN
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\u060c' # 0xAC -> ARABIC COMMA
'\xad' # 0xAD -> SOFT HYPHEN
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\u061b' # 0xBB -> ARABIC SEMICOLON
'\ufffe'
'\ufffe'
'\ufffe'
'\u061f' # 0xBF -> ARABIC QUESTION MARK
'\ufffe'
'\u0621' # 0xC1 -> ARABIC LETTER HAMZA
'\u0622' # 0xC2 -> ARABIC LETTER ALEF WITH MADDA ABOVE
'\u0623' # 0xC3 -> ARABIC LETTER ALEF WITH HAMZA ABOVE
'\u0624' # 0xC4 -> ARABIC LETTER WAW WITH HAMZA ABOVE
'\u0625' # 0xC5 -> ARABIC LETTER ALEF WITH HAMZA BELOW
'\u0626' # 0xC6 -> ARABIC LETTER YEH WITH HAMZA ABOVE
'\u0627' # 0xC7 -> ARABIC LETTER ALEF
'\u0628' # 0xC8 -> ARABIC LETTER BEH
'\u0629' # 0xC9 -> ARABIC LETTER TEH MARBUTA
'\u062a' # 0xCA -> ARABIC LETTER TEH
'\u062b' # 0xCB -> ARABIC LETTER THEH
'\u062c' # 0xCC -> ARABIC LETTER JEEM
'\u062d' # 0xCD -> ARABIC LETTER HAH
'\u062e' # 0xCE -> ARABIC LETTER KHAH
'\u062f' # 0xCF -> ARABIC LETTER DAL
'\u0630' # 0xD0 -> ARABIC LETTER THAL
'\u0631' # 0xD1 -> ARABIC LETTER REH
'\u0632' # 0xD2 -> ARABIC LETTER ZAIN
'\u0633' # 0xD3 -> ARABIC LETTER SEEN
'\u0634' # 0xD4 -> ARABIC LETTER SHEEN
'\u0635' # 0xD5 -> ARABIC LETTER SAD
'\u0636' # 0xD6 -> ARABIC LETTER DAD
'\u0637' # 0xD7 -> ARABIC LETTER TAH
'\u0638' # 0xD8 -> ARABIC LETTER ZAH
'\u0639' # 0xD9 -> ARABIC LETTER AIN
'\u063a' # 0xDA -> ARABIC LETTER GHAIN
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\u0640' # 0xE0 -> ARABIC TATWEEL
'\u0641' # 0xE1 -> ARABIC LETTER FEH
'\u0642' # 0xE2 -> ARABIC LETTER QAF
'\u0643' # 0xE3 -> ARABIC LETTER KAF
'\u0644' # 0xE4 -> ARABIC LETTER LAM
'\u0645' # 0xE5 -> ARABIC LETTER MEEM
'\u0646' # 0xE6 -> ARABIC LETTER NOON
'\u0647' # 0xE7 -> ARABIC LETTER HEH
'\u0648' # 0xE8 -> ARABIC LETTER WAW
'\u0649' # 0xE9 -> ARABIC LETTER ALEF MAKSURA
'\u064a' # 0xEA -> ARABIC LETTER YEH
'\u064b' # 0xEB -> ARABIC FATHATAN
'\u064c' # 0xEC -> ARABIC DAMMATAN
'\u064d' # 0xED -> ARABIC KASRATAN
'\u064e' # 0xEE -> ARABIC FATHA
'\u064f' # 0xEF -> ARABIC DAMMA
'\u0650' # 0xF0 -> ARABIC KASRA
'\u0651' # 0xF1 -> ARABIC SHADDA
'\u0652' # 0xF2 -> ARABIC SUKUN
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| mit |
rmac75/mboxparser | mbox.py | 1 | 3399 | #!/usr/bin/python2
#--------------------------------
#Takes in mbox, spits out csv with email info and basic geolocation, plus other header fields.
#--------------------------------
#This product includes GeoLite2 data created by MaxMind, available from
#<a href="http://www.maxmind.com">http://www.maxmind.com</a>.
import mailbox
import sys
import csv
import re
from os import path
import pprint
import argparse
import geoip2.database
import geoip2.errors
import pygeoip
import email.utils
from email.utils import getaddresses
def get_iprecord(ip):
try:
geo = reader.city(ip)
org = reader2.org_by_addr(ip)
except (geoip2.errors.AddressNotFoundError, ValueError):
return None,None,None
if geo.city.name:
cityname=geo.city.name.encode('ascii','ignore')
else:
cityname=geo.city.name
return geo.country.iso_code, cityname, org
def main():
# first some sanity tests on the command-line arguments
#sys.argv = ['mbox_to_mysql','list1.mbox','mailman','lists',] # !@!@! APS here for testing purposes only - comment out for live run
parser = argparse.ArgumentParser(description='Parse mbox file')
parser.add_argument('mbox', help='mbox file to parse')
parser.add_argument('outfile', help='output csv file')
args = parser.parse_args()
if not path.isfile(args.mbox):
parser.error("the file %s does not exist"%args.mbox)
mbox = args.mbox
outfile = args.outfile
ipPattern = re.compile('\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}')
global reader
reader = geoip2.database.Reader('geo/GeoLite2-City.mmdb')
global reader2
reader2 = pygeoip.GeoIP('geo/GeoIPOrg.dat')
f = open(outfile, 'wt')
try:
writer = csv.writer(f)
writer.writerow( ('Date','From','From Email','Return-Path Email','To','To Email','Recipients','X-To','Subject','Received-Last','Org','City', 'Country','X-IP','X-Org', 'X-City', 'X-Country','X-Mailer'))
for message in mailbox.mbox(mbox):
From = str(message['From'])
fname,femail = email.utils.parseaddr(From)
#print fname
Return = str(message['Return-Path'])
rname,remail = email.utils.parseaddr(Return)
#print remail
To = str(message['To'])
tname,temail = email.utils.parseaddr(To)
tos = message.get_all('to', [])
ccs = message.get_all('cc', [])
resent_tos = message.get_all('resent-to', [])
resent_ccs = message.get_all('resent-cc', [])
all_recipients = getaddresses(tos + ccs + resent_tos + resent_ccs)
XTo = str(message['X-Apparently-To'])
#findIP = re.findall(ipPattern,s)
Date = str(message['Date'])
Subject = str(message['Subject'])
Received = re.findall(ipPattern,str(message['Received']))
if Received:
#print Received[-1]
country, city, org = get_iprecord(Received[-1])
#print get_iprecord(Received[-1])
#print org
else:
Received = "None"
XIP = message['X-Originating-IP']
if XIP:
XIP = str(XIP).strip('[]')
#print ("XIP: %s." % XIP)
Xcountry, Xcity, Xorg = get_iprecord(XIP)
else:
XIP = "None"
Xcountry = "None"
Xcity = "None"
Xorg = "None"
XMailer = str(message['X-Mailer'])
#Attachment = message.get_filename()
#Body = str(message['Body'])
writer.writerow((Date,fname,femail,remail,tname,temail,all_recipients,XTo,Subject,Received[-1],org,city,country,XIP,Xorg,Xcity,Xcountry,XMailer))
finally:
f.close()
#print open(sys.argv[1], 'rt').read()
if __name__ == '__main__':
main()
| gpl-2.0 |
enriquecoronadozu/HMPy | src/borrar/modificar/hmpy.py | 1 | 6228 | #!/usr/bin/env python
"""@See preprocessed data
"""
from numpy import*
import matplotlib.pyplot as plt
from GestureModel import*
from Creator import*
from Classifier import*
def plotResults(gr_points,gr_sig, b_points,b_sig,name_model):
from scipy import linalg
import matplotlib.pyplot as plt
gr_points = gr_points.transpose()
b_points = b_points.transpose()
gr_sigma = []
b_sigma = []
n,m = gr_points.shape
maximum = zeros((m))
minimum = zeros((m))
x = arange(0,m,1)
for i in range(m):
gr_sigma.append(gr_sig[i*3:i*3+3])
b_sigma.append(b_sig[i*3:i*3+3])
for i in range(m):
sigma = 3.*linalg.sqrtm(gr_sigma[i])
maximum[i] = gr_points[0,i]+ sigma[0,0];
minimum[i] = gr_points[0,i]- sigma[0,0];
fig2 = plt.figure()
import matplotlib.pyplot as plt
plt.fill_between(x, maximum, minimum,lw=2, alpha=0.5 )
plt.plot(x, gr_points[0])
plt.savefig(name_model+ "_gravity_x_axis.png")
for i in range(m):
sigma = 3.*linalg.sqrtm(gr_sigma[i])
maximum[i] = gr_points[1,i]+ sigma[1,1];
minimum[i] = gr_points[1,i]- sigma[1,1];
fig3 = plt.figure()
plt.fill_between(x, maximum, minimum,lw=2, alpha=0.5 )
plt.plot(x, gr_points[1])
plt.savefig(name_model+ "_gravity_y_axis.png")
for i in range(m):
sigma = 3.*linalg.sqrtm(gr_sigma[i])
maximum[i] = gr_points[2,i]+ sigma[2,2];
minimum[i] = gr_points[2,i]- sigma[2,2];
fig3 = plt.figure()
import matplotlib.pyplot as plt
plt.fill_between(x, maximum, minimum,lw=2, alpha=0.5 )
plt.plot(x, gr_points[2])
plt.savefig(name_model+ "_gravity_z_axis.png")
for i in range(m):
sigma = 3.*linalg.sqrtm(b_sigma[i])
maximum[i] = b_points[0,i]+ sigma[0,0];
minimum[i] = b_points[0,i]- sigma[0,0];
fig4 = plt.figure()
import matplotlib.pyplot as plt
plt.fill_between(x, maximum, minimum,lw=2, alpha=0.5 )
plt.plot(x, b_points[0])
plt.savefig(name_model+ "_body_x_axis.png")
for i in range(m):
sigma = 3.*linalg.sqrtm(b_sigma[i])
maximum[i] = b_points[1,i]+ sigma[1,1];
minimum[i] = b_points[1,i]- sigma[1,1];
fig5 = plt.figure()
import matplotlib.pyplot as plt
plt.fill_between(x, maximum, minimum,lw=2, alpha=0.5 )
plt.plot(x, b_points[1])
plt.savefig(name_model+ "_body_axis.png")
for i in range(m):
sigma = 3.*linalg.sqrtm(b_sigma[i])
maximum[i] = b_points[2,i]+ sigma[2,2];
minimum[i] = b_points[2,i]- sigma[2,2];
fig6 = plt.figure()
import matplotlib.pyplot as plt
plt.fill_between(x, maximum, minimum,lw=2, alpha=0.5 )
plt.plot(x, b_points[2])
plt.savefig(name_model+ "_body_z_axis.png")
#NOTE: Add path
def newModel(name,files):
g = Creator()
#Read the data
g.ReadFiles(files,[])
g.CreateDatasets_Acc()
g.ObtainNumberOfCluster()
gravity = g.gravity
K_gravity = g.K_gravity
body = g.body
K_body = g.K_body
# 2) define the number of points to be used in GMR
# (current settings allow for CONSTANT SPACING only)
numPoints = amax(gravity[0,:]);
scaling_factor = 10/10;
numGMRPoints = math.ceil(numPoints*scaling_factor);
# 3) perform Gaussian Mixture Modelling and Regression to retrieve the
# expected curve and associated covariance matrices for each feature
gr_points, gr_sigma = g.GetExpected(gravity,K_gravity,numGMRPoints)
b_points, b_sigma = g.GetExpected(body,K_body,numGMRPoints)
savetxt(name+"MuGravity.txt", gr_points,fmt='%.12f')
savetxt(name+"SigmaGravity.txt", gr_sigma,fmt='%.12f')
savetxt(name+"MuBody.txt", b_points,fmt='%.12f')
savetxt(name+"SigmaBody.txt", b_sigma,fmt='%.12f')
def loadModel(file_name, th=1, plot=True):
#Load files
gr_points = loadtxt(file_name+"MuGravity.txt")
gr_sigma = loadtxt(file_name+"SigmaGravity.txt")
b_points = loadtxt(file_name+"MuBody.txt")
b_sigma = loadtxt(file_name+"SigmaBody.txt")
#Add model
gm = GestureModel()
gm.addModel("gravity",gr_points, gr_sigma,th)
gm.addModel("body",b_points, b_sigma,th)
if plot == True:
plotResults(gr_points,gr_sigma, b_points,b_sigma,file_name)
return gm
name_models = ['A','B','S1','S2']
num_samples = [10,14,9,10]
th = [25,20,10,65]
create_models = False
list_files = []
#Create a list of the list of files for each model
print "Defining files"
i = 0
for name in name_models:
files = []
for k in range(1,num_samples[i]+1):
files.append('Models/' + name + '/data/mod('+ str(k) + ').txt')
list_files.append(files)
i = i + 1
#Create the models and save the list of files for calculate the weigths
if(create_models == True):
print "Creating models"
i = 0
for model in name_models:
print list_files[i]
newModel(model,list_files[i])
i = i + 1
list_models = []
print "Loading models"
#Load the models
for j in range(len(name_models)):
#For the moment don't put True is there are more that 2 models in Ubuntu
gm = loadModel(name_models[j],th[j],False)
list_models.append(gm)
print "Calculating weigths"
#Used to calculate the weights
v0 = Classifier()
for j in range(len(name_models)):
print "\nFor model " + name_models[j] + ":"
w_g, w_b = v0.calculateW(list_files[j],list_models[j])
list_models[j].addWeight("gravity",w_g)
list_models[j].addWeight("body",w_b)
print "\n Init classifers"
l_class = []
for j in range(len(name_models)):
l_class.append(Classifier())
print "Give the model to each classifier"
for j in range(len(name_models)):
l_class[j].classify(list_models[j])
print "Validation"
sfile = "validation/mix3.txt"
import matplotlib.pyplot as plt
fig = plt.figure()
for j in range(len(name_models)):
poss = l_class[j].validate_from_file(sfile, ',')
m,n = poss.shape
x = arange(0,m,1)
plt.plot(x, poss,'o',label= name_models[j])
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,
ncol=2, mode="expand", borderaxespad=0.)
plt.savefig("result.png")
print "Finish ..."
| gpl-3.0 |
dr87/SimpleKernel | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/Core.py | 11088 | 3246 | # Core.py - Python extension for perf script, core functions
#
# Copyright (C) 2010 by Tom Zanussi <tzanussi@gmail.com>
#
# This software may be distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
from collections import defaultdict
def autodict():
return defaultdict(autodict)
flag_fields = autodict()
symbolic_fields = autodict()
def define_flag_field(event_name, field_name, delim):
flag_fields[event_name][field_name]['delim'] = delim
def define_flag_value(event_name, field_name, value, field_str):
flag_fields[event_name][field_name]['values'][value] = field_str
def define_symbolic_field(event_name, field_name):
# nothing to do, really
pass
def define_symbolic_value(event_name, field_name, value, field_str):
symbolic_fields[event_name][field_name]['values'][value] = field_str
def flag_str(event_name, field_name, value):
string = ""
if flag_fields[event_name][field_name]:
print_delim = 0
keys = flag_fields[event_name][field_name]['values'].keys()
keys.sort()
for idx in keys:
if not value and not idx:
string += flag_fields[event_name][field_name]['values'][idx]
break
if idx and (value & idx) == idx:
if print_delim and flag_fields[event_name][field_name]['delim']:
string += " " + flag_fields[event_name][field_name]['delim'] + " "
string += flag_fields[event_name][field_name]['values'][idx]
print_delim = 1
value &= ~idx
return string
def symbol_str(event_name, field_name, value):
string = ""
if symbolic_fields[event_name][field_name]:
keys = symbolic_fields[event_name][field_name]['values'].keys()
keys.sort()
for idx in keys:
if not value and not idx:
string = symbolic_fields[event_name][field_name]['values'][idx]
break
if (value == idx):
string = symbolic_fields[event_name][field_name]['values'][idx]
break
return string
trace_flags = { 0x00: "NONE", \
0x01: "IRQS_OFF", \
0x02: "IRQS_NOSUPPORT", \
0x04: "NEED_RESCHED", \
0x08: "HARDIRQ", \
0x10: "SOFTIRQ" }
def trace_flag_str(value):
string = ""
print_delim = 0
keys = trace_flags.keys()
for idx in keys:
if not value and not idx:
string += "NONE"
break
if idx and (value & idx) == idx:
if print_delim:
string += " | ";
string += trace_flags[idx]
print_delim = 1
value &= ~idx
return string
def taskState(state):
states = {
0 : "R",
1 : "S",
2 : "D",
64: "DEAD"
}
if state not in states:
return "Unknown"
return states[state]
class EventHeaders:
def __init__(self, common_cpu, common_secs, common_nsecs,
common_pid, common_comm):
self.cpu = common_cpu
self.secs = common_secs
self.nsecs = common_nsecs
self.pid = common_pid
self.comm = common_comm
def ts(self):
return (self.secs * (10 ** 9)) + self.nsecs
def ts_format(self):
return "%d.%d" % (self.secs, int(self.nsecs / 1000))
| gpl-2.0 |
malkavi/Flexget | flexget/plugins/services/myepisodes.py | 3 | 10145 | import re
from datetime import datetime
from loguru import logger
from sqlalchemy import Column, DateTime, Integer, String
from flexget import plugin
from flexget.db_schema import versioned_base
from flexget.event import event
from flexget.utils import requests
logger = logger.bind(name='myepisodes')
Base = versioned_base('myepisodes', 0)
class MyEpisodesInfo(Base):
__tablename__ = 'myepisodes'
id = Column(Integer, primary_key=True)
series_name = Column(String, unique=True)
myepisodes_id = Column(Integer, unique=True)
updated = Column(DateTime)
def __init__(self, series_name, myepisodes_id):
self.series_name = series_name
self.myepisodes_id = myepisodes_id
self.updated = datetime.now()
def __repr__(self):
return '<MyEpisodesInfo(series_name=%s, myepisodes_id=%s)>' % (
self.series_name,
self.myepisodes_id,
)
class MyEpisodes:
"""
Marks a series episode as acquired in your myepisodes.com account.
Simple Example:
Most shows are recognized automatically from their TVDBname.
And of course the plugin needs to know your MyEpisodes.com account details.
tasks:
tvshows:
myepisodes:
username: <username>
password: <password>
series:
- human target
- chuck
Advanced Example:
In some cases, the TVDB name is either not unique or won't even be discovered.
In that case you need to specify the MyEpisodes id manually using the set plugin.
tasks:
tvshows:
myepisodes:
username: <username>
password: <password>
series:
- human target:
set:
myepisodes_id: 5111
- chuck
How to find the MyEpisodes id: http://matrixagents.org/screencasts/myep_example-20110507-131555.png
"""
schema = {
'type': 'object',
'properties': {'username': {'type': 'string'}, 'password': {'type': 'string'}},
'required': ['username', 'password'],
'additionalProperties': False,
}
def __init__(self):
self.plugin_config = None
self.db_session = None
self.test_mode = None
self.http_session = None
@plugin.priority(plugin.PRIORITY_LAST)
def on_task_output(self, task, config):
"""
Mark all accepted episodes as acquired on MyEpisodes
"""
if not task.accepted:
# Nothing accepted, don't do anything
return
try:
self.plugin_config = config
self.db_session = task.session
self.test_mode = task.options.test
# attempt authentication
self.http_session = self._login(config)
except plugin.PluginWarning as w:
logger.warning(w)
return
except plugin.PluginError as e:
logger.error(e)
return
for entry in task.accepted:
# mark the accepted entries as acquired
try:
self._validate_entry(entry)
entry['myepisodes_id'] = self._lookup_myepisodes_id(entry)
self._mark_episode_acquired(entry)
except plugin.PluginWarning as w:
logger.warning(w)
def _validate_entry(self, entry):
"""
Checks an entry for all of the fields needed to comunicate with myepidoes
Return: boolean
"""
if (
'series_season' not in entry
or 'series_episode' not in entry
or 'series_name' not in entry
):
raise plugin.PluginWarning(
'Can\'t mark entry `%s` in myepisodes without series_season, series_episode and series_name '
'fields' % entry['title'],
logger,
)
def _lookup_myepisodes_id(self, entry):
"""
Attempts to find the myepisodes id for the series
Return: myepisode id or None
"""
# Do we already have the id?
myepisodes_id = entry.get('myepisodes_id')
if myepisodes_id:
return myepisodes_id
# have we previously recorded the id for this series?
myepisodes_id = self._retrieve_id_from_database(entry)
if myepisodes_id:
return myepisodes_id
# We don't know the id for this series, so it's time to search myepisodes.com for it
myepisodes_id = self._retrieve_id_from_website(entry)
if myepisodes_id:
return myepisodes_id
raise plugin.PluginWarning(
'Unable to determine the myepisodes id for: `%s`' % entry['title'], logger
)
def _retrieve_id_from_database(self, entry):
"""
Attempts to find the myepisodes id in the database
Return: myepisode id or None
"""
lc_series_name = entry['series_name'].lower()
info = (
self.db_session.query(MyEpisodesInfo)
.filter(MyEpisodesInfo.series_name == lc_series_name)
.first()
)
if info:
return info.myepisodes_id
def _retrieve_id_from_website(self, entry):
"""
Attempts to find the myepisodes id for the series for the website itself
Return: myepisode id or None
"""
myepisodes_id = None
baseurl = 'http://www.myepisodes.com/search/'
search_value = self._generate_search_value(entry)
payload = {'tvshow': search_value, 'action': 'Search'}
try:
response = self.http_session.post(baseurl, data=payload)
regex = r'"/epsbyshow\/([0-9]*)\/.*">' + search_value + '</a>'
match_obj = re.search(regex, response.text, re.MULTILINE | re.IGNORECASE)
if match_obj:
myepisodes_id = match_obj.group(1)
self._save_id(search_value, myepisodes_id)
except requests.RequestException as e:
raise plugin.PluginError('Error searching for myepisodes id: %s' % e)
return myepisodes_id
def _generate_search_value(self, entry):
"""
Find the TVDB name for searching myepisodes with.
myepisodes.com is backed by tvrage, so this will not be perfect.
Return: myepisode id or None
"""
search_value = entry['series_name']
# Get the series name from thetvdb to increase match chance on myepisodes
if entry.get('tvdb_series_name'):
search_value = entry['tvdb_series_name']
else:
try:
series = plugin.get('api_tvdb', self).lookup_series(
name=entry['series_name'], tvdb_id=entry.get('tvdb_id')
)
search_value = series.name
except LookupError:
logger.warning(
'Unable to lookup series `{}` from tvdb, using raw name.', entry['series_name']
)
return search_value
def _save_id(self, series_name, myepisodes_id):
"""
Save the myepisodes id in the database.
This will help prevent unecceary communication with the website
"""
# if we already have the a record for that id, update the name so that we find it next time
db_item = (
self.db_session.query(MyEpisodesInfo)
.filter(MyEpisodesInfo.myepisodes_id == myepisodes_id)
.first()
)
if db_item:
logger.info(
'Changing name to `{}` for series with myepisodes_id {}',
series_name.lower(),
myepisodes_id,
)
db_item.series_name = series_name.lower()
else:
self.db_session.add(MyEpisodesInfo(series_name.lower(), myepisodes_id))
def _mark_episode_acquired(self, entry):
"""Mark episode as acquired.
Required entry fields:
- series_name
- series_season
- series_episode
Raises:
PluginWarning if operation fails
"""
url = "http://www.myepisodes.com/ajax/service.php?mode=eps_update"
myepisodes_id = entry['myepisodes_id']
season = entry['series_season']
episode = entry['series_episode']
super_secret_code = "A%s-%s-%s" % (str(myepisodes_id), str(season), str(episode))
payload = {super_secret_code: "true"}
if self.test_mode:
logger.info(
'Would mark {} of `{}` as acquired.', entry['series_id'], entry['series_name']
)
return
try:
self.http_session.post(url, data=payload)
except requests.RequestException:
raise plugin.PluginError(
'Failed to mark %s of `%s` as acquired.'
% (entry['series_id'], entry['series_name'])
)
logger.info('Marked {} of `{}` as acquired.', entry['series_id'], entry['series_name'])
def _login(self, config):
"""Authenicate with the myepisodes service and return a requests session
Return:
requests session
Raises:
PluginWarning if login fails
PluginError if http communication fails
"""
url = "https://www.myepisodes.com/login.php"
session = requests.Session()
payload = {
'username': config['username'],
'password': config['password'],
'action': 'Login',
}
try:
response = session.post(url, data=payload)
if 'login' in response.url:
raise plugin.PluginWarning(
(
'Login to myepisodes.com failed, please see if the site is down and verify '
'your credentials.'
),
logger,
)
except requests.RequestException as e:
raise plugin.PluginError('Error logging in to myepisodes: %s' % e)
return session
@event('plugin.register')
def register_plugin():
plugin.register(MyEpisodes, 'myepisodes', api_ver=2)
| mit |
jendrikseipp/rednotebook-elementary | rednotebook/util/markup.py | 1 | 16346 | # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------
# Copyright (c) 2009 Jendrik Seipp
#
# RedNotebook is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# RedNotebook is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with RedNotebook; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
# -----------------------------------------------------------------------
import logging
import os
import re
import sys
from gi.repository import GObject
from gi.repository import Pango
from rednotebook.external import txt2tags
from rednotebook.data import HASHTAG
from rednotebook.util import filesystem
# Linebreaks are only allowed at line ends
REGEX_LINEBREAK = r'\\\\[\s]*$'
REGEX_HTML_LINK = r'<a.*?>(.*?)</a>'
# pic [""/home/user/Desktop/RedNotebook pic"".png]
PIC_NAME = r'\S.*?\S|\S'
PIC_EXT = r'(?:png|jpe?g|gif|eps|bmp|svg)'
REGEX_PIC = re.compile(r'(\["")(%s)("")(\.%s)(\?\d+)?(\])' % (PIC_NAME, PIC_EXT), flags=re.I)
# named local link [my file.txt ""file:///home/user/my file.txt""]
# named link in web [heise ""http://heise.de""]
REGEX_NAMED_LINK = re.compile(r'(\[)(.*?)(\s"")(\S.*?\S)(""\])', flags=re.I)
ESCAPE_COLOR = r'XBEGINCOLORX\1XSEPARATORX\2XENDCOLORX'
COLOR_ESCAPED = r'XBEGINCOLORX(.*?)XSEPARATORX(.*?)XENDCOLORX'
TABLE_HEAD_BG = '#aaa'
CHARSET_UTF8 = '<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />'
PRINT_FUNCTION = '<script></script>'
CSS = """\
<style type="text/css">
body {
font-family: %(font)s;
}
<!-- Don't split last line between pages.
This fix is only supported by Opera -->
p {
page-break-inside: avoid;
}
blockquote {
margin: 1em 2em;
border-left: 2px solid #999;
font-style: oblique;
padding-left: 1em;
}
blockquote:first-letter {
margin: .2em .1em .1em 0;
font-size: 160%%;
font-weight: bold;
}
blockquote:first-line {
font-weight: bold;
}
table {
border-collapse: collapse;
}
td, th {
<!--border: 1px solid #888;--> <!--Allow tables without borders-->
padding: 3px 7px 2px 7px;
}
th {
text-align: left;
padding-top: 5px;
padding-bottom: 4px;
background-color: %(table_head_bg)s;
color: #ffffff;
}
hr.heavy {
height: 2px;
background-color: black;
}
</style>
"""
# MathJax
FORMULAS_SUPPORTED = True
MATHJAX_FILE = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.1/MathJax.js'
# Explicitly setting inlineMath: [ ['\\(','\\)'] ] doesn't work.
# Using defaults:
# displayMath: [ ['$$','$$'], ['\[','\]'] ]
# inlineMath: [['\(','\)']]
MATHJAX_DELIMITERS = ['$$', '\\(', '\\)', r'\\[', '\\]']
MATHJAX = """\
<script type="text/x-mathjax-config">
MathJax.Hub.Config({{
messageStyle: "none",
config: ["MMLorHTML.js"],
jax: ["input/TeX","input/MathML","output/HTML-CSS","output/NativeMML"],
tex2jax: {{}},
extensions: ["tex2jax.js","mml2jax.js","MathMenu.js","MathZoom.js"],
TeX: {{
extensions: ["AMSmath.js","AMSsymbols.js","noErrors.js","noUndefined.js"]
}}
}});
</script>
<script type="text/javascript" src="{MATHJAX_FILE}"></script>
""".format(**locals())
def convert_categories_to_markup(categories, with_category_title=True):
# Only add Category title if the text is displayed
if with_category_title:
markup = '== %s ==\n' % _('Tags')
else:
markup = ''
for category, entry_list in categories.items():
markup += '- ' + category + '\n'
for entry in entry_list:
markup += ' - ' + entry + '\n'
markup += '\n\n'
return markup
def get_markup_for_day(day, with_text=True, with_tags=True, categories=None, date=None):
'''
Used for exporting days
'''
export_string = ''
# Add date if it is not None and not the empty string
if date:
export_string += '= %s =\n\n' % date
# Add text
if with_text:
export_string += day.text
# Add Categories
category_content_pairs = day.get_category_content_pairs()
if with_tags and categories:
categories = [word.lower() for word in categories]
export_categories = dict((x, y) for (x, y) in category_content_pairs.items()
if x.lower() in categories)
elif with_tags and categories is None:
# No restrictions
export_categories = category_content_pairs
else:
# "Export no categories" selected
export_categories = []
if export_categories:
export_string += '\n\n\n' + convert_categories_to_markup(
export_categories, with_category_title=with_text)
elif with_text:
export_string += '\n\n'
# Only return the string, when there is text or there are categories
# We don't want to list empty dates
if export_categories or with_text:
export_string += '\n\n\n'
return export_string
return ''
def _get_config(target, options):
config = {}
# Set the configuration on the 'config' dict.
config = txt2tags.ConfigMaster()._get_defaults()
config['outfile'] = txt2tags.MODULEOUT # results as list
config['target'] = target
# The Pre (and Post) processing config is a list of lists:
# [ [this, that], [foo, bar], [patt, replace] ]
config['postproc'] = []
config['preproc'] = []
config['style'] = []
# Allow line breaks, r'\\\\' are 2 \ for regexes
config['preproc'].append([REGEX_LINEBREAK, 'LINEBREAK'])
# Highlight hashtags.
if target == 'tex':
config['preproc'].append([HASHTAG.pattern, r'\1{\2\3BEGININDEX\3ENDINDEX|color:red}'])
else:
config['preproc'].append([HASHTAG.pattern, r'\1{\2\3|color:red}'])
# Escape color markup.
config['preproc'].append([r'\{(.*?)\|color:(.+?)\}', ESCAPE_COLOR])
if target in ['xhtml', 'html']:
config['encoding'] = 'UTF-8' # document encoding
config['toc'] = 0
config['css-sugar'] = 1
# Fix encoding for export opened in firefox
config['postproc'].append([r'<head>', '<head>' + CHARSET_UTF8])
# Line breaks
config['postproc'].append([r'LINEBREAK', '<br />'])
# Apply image resizing
config['postproc'].append([r'src=\"WIDTH(\d+)-', r'width="\1" src="'])
# {{red text|color:red}} -> <span style="color:red">red text</span>
config['postproc'].append([COLOR_ESCAPED, r'<span style="color:\2">\1</span>'])
elif target == 'tex':
config['encoding'] = 'utf8'
config['preproc'].append(['€', 'Euro'])
# Latex only allows whitespace and underscores in filenames if
# the filename is surrounded by "...". This is in turn only possible
# if the extension is omitted.
config['preproc'].append([r'\[""', r'["""'])
config['preproc'].append([r'""\.', r'""".'])
scheme = 'file:///' if sys.platform == 'win32' else 'file://'
# For images we have to omit the file:// prefix
config['postproc'].append([r'includegraphics\{(.*)"%s' % scheme, r'includegraphics{"\1'])
# Special handling for LOCAL file links (Omit scheme, add run:)
# \htmladdnormallink{file.txt}{file:///home/user/file.txt}
# -->
# \htmladdnormallink{file.txt}{run:/home/user/file.txt}
config['postproc'].append([r'htmladdnormallink\{(.*)\}\{%s(.*)\}' % scheme,
r'htmladdnormallink{\1}{run:\2}'])
# Line breaks
config['postproc'].append([r'LINEBREAK', r'\\\\'])
# Apply image resizing
config['postproc'].append([r'includegraphics\{("?)WIDTH(\d+)-', r'includegraphics[width=\2px]{\1'])
# We want the plain latex formulas unescaped.
# Allowed formulas: $$...$$, \[...\], \(...\)
config['preproc'].append([r'\\\[\s*(.+?)\s*\\\]', r"BEGINEQUATION''\1''ENDEQUATION"])
config['preproc'].append([r'\$\$\s*(.+?)\s*\$\$', r"BEGINEQUATION''\1''ENDEQUATION"])
config['postproc'].append([r'BEGINEQUATION(.+)ENDEQUATION', r'$$\1$$'])
config['preproc'].append([r'\\\(\s*(.+?)\s*\\\)', r"BEGINMATH''\1''ENDMATH"])
config['postproc'].append([r'BEGINMATH(.+)ENDMATH', r'$\1$'])
# Fix utf8 quotations - „, “ and ” cause problems compiling the latex document.
config['postproc'].extend([[u'„', '"'], [u'”', '"'], [u'“', '"']])
# Enable index.
config['style'].append('makeidx')
config['postproc'].append([r'BEGININDEX(.+?)ENDINDEX', r'\\index{\1}'])
config['postproc'].append(['begin{document}', 'makeindex\n\\\\begin{document}'])
config['postproc'].append(['end{document}', 'printindex\n\n\\\\end{document}'])
config['postproc'].append([COLOR_ESCAPED, r'\\textcolor{\2}{\1}'])
elif target == 'txt':
# Line breaks
config['postproc'].append([r'LINEBREAK', '\n'])
# Apply image resizing ([WIDTH400-file:///pathtoimage.jpg])
config['postproc'].append([r'\[WIDTH(\d+)-(.+)\]', r'[\2?\1]'])
# Allow resizing images by changing
# [filename.png?width] to [WIDTHwidth-filename.png]
img_ext = r'png|jpe?g|gif|eps|bmp|svg'
img_name = r'\S.*\S|\S'
# Apply this prepoc only after the latex image quotes have been added
config['preproc'].append([r'\[(%s\.(%s))\?(\d+)\]' % (img_name, img_ext), r'[WIDTH\3-\1]'])
# Disable colors for all other targets.
config['postproc'].append([COLOR_ESCAPED, r'\1'])
# MathJax
if options.pop('add_mathjax'):
config['postproc'].append([r'</body>', MATHJAX + '</body>'])
config['postproc'].append([r'</body>', PRINT_FUNCTION + '</body>'])
# Custom css
fonts = options.pop('font', 'sans-serif')
if 'html' in target:
css = CSS % {'font': fonts, 'table_head_bg': TABLE_HEAD_BG}
config['postproc'].append([r'</head>', css + '</head>'])
config.update(options)
return config
def _convert_paths(txt, data_dir):
def _convert_uri(uri):
path = uri[len('file://'):] if uri.startswith('file://') else uri
# Check if relative file exists and convert it if it does.
if (not any(uri.startswith(proto) for proto in filesystem.REMOTE_PROTOCOLS) and
not os.path.isabs(path)):
path = os.path.join(data_dir, path)
assert os.path.isabs(path), path
if os.path.exists(path):
uri = filesystem.get_local_url(path)
return uri
def _convert_pic_path(match):
uri = _convert_uri(match.group(2) + match.group(4))
# Reassemble picture markup.
name, ext = os.path.splitext(uri)
parts = [match.group(1), name, match.group(3), ext]
if match.group(5) is not None:
parts.append(match.group(5))
parts.append(match.group(6))
return ''.join(parts)
def _convert_file_path(match):
uri = _convert_uri(match.group(4))
# Reassemble link markup
parts = [match.group(i) for i in range(1, 6)]
parts[3] = uri
return ''.join(parts)
txt = REGEX_PIC.sub(_convert_pic_path, txt)
txt = REGEX_NAMED_LINK.sub(_convert_file_path, txt)
return txt
def convert(txt, target, data_dir, headers=None, options=None):
'''
Code partly taken from txt2tags tarball
'''
options = options or {}
# Only add MathJax code if there is a formula.
options['add_mathjax'] = (
FORMULAS_SUPPORTED and
'html' in target and
any(x in txt for x in MATHJAX_DELIMITERS))
logging.debug('Add mathjax code: %s' % options['add_mathjax'])
# Turn relative paths into absolute paths.
txt = _convert_paths(txt, data_dir)
# The body text must be a list.
txt = txt.split('\n')
# Set the three header fields
if headers is None:
if target == 'tex':
# LaTeX requires a title if \maketitle is used
headers = ['RedNotebook', '', '']
else:
headers = ['', '', '']
config = _get_config(target, options)
# Let's do the conversion
try:
headers = txt2tags.doHeader(headers, config)
body, toc = txt2tags.convert(txt, config)
footer = txt2tags.doFooter(config)
toc = txt2tags.toc_tagger(toc, config)
toc = txt2tags.toc_formatter(toc, config)
full_doc = headers + toc + body + footer
finished = txt2tags.finish_him(full_doc, config)
result = '\n'.join(finished)
# Txt2tags error, show the messsage to the user
except txt2tags.error as msg:
logging.error(msg)
result = msg
# Unknown error, show the traceback to the user
except:
result = (
'<b>Error</b>: This day contains invalid '
'<a href="http://txt2tags.org/markup.html">txt2tags markup</a>. '
'You can help us fix this by submitting a bugreport in the '
'<a href="https://code.google.com/p/txt2tags/issues/list">'
'txt2tags bugtracker</a>. Please append the day\'s text to the issue.')
logging.error('Invalid markup:\n%s' % txt2tags.getUnknownErrorMessage())
return result
def convert_to_pango(txt, headers=None, options=None):
'''
Code partly taken from txt2tags tarball
'''
original_txt = txt
# Here is the marked body text, it must be a list.
txt = txt.split('\n')
# Set the three header fields
if headers is None:
headers = ['', '', '']
config = txt2tags.ConfigMaster()._get_defaults()
config['outfile'] = txt2tags.MODULEOUT # results as list
config['target'] = 'xhtml'
config['preproc'] = []
# We need to escape the ampersand here, otherwise "&" would become
# "&amp;"
config['preproc'].append([r'&', '&'])
# Allow line breaks
config['postproc'] = []
config['postproc'].append([REGEX_LINEBREAK, '\n'])
if options is not None:
config.update(options)
# Let's do the conversion
try:
body, toc = txt2tags.convert(txt, config)
full_doc = body
finished = txt2tags.finish_him(full_doc, config)
result = ''.join(finished)
# Txt2tags error, show the messsage to the user
except txt2tags.error as msg:
logging.error(msg)
result = msg
# Unknown error, show the traceback to the user
except:
result = txt2tags.getUnknownErrorMessage()
logging.error(result)
# remove unwanted paragraphs
result = result.replace('<p>', '').replace('</p>', '')
logging.log(5, 'Converted "%s" text to "%s" txt2tags markup' %
(repr(original_txt), repr(result)))
# Remove unknown tags (<a>)
def replace_links(match):
"""Return the link name."""
return match.group(1)
result = re.sub(REGEX_HTML_LINK, replace_links, result)
try:
Pango.parse_markup(result, -1, "0")
# result is valid pango markup, return the markup.
return result
except GObject.GError:
# There are unknown tags in the markup, return the original text
logging.debug('There are unknown tags in the markup: %s' % result)
return original_txt
def convert_from_pango(pango_markup):
original_txt = pango_markup
replacements = dict((
('<b>', '**'), ('</b>', '**'),
('<i>', '//'), ('</i>', '//'),
('<s>', '--'), ('</s>', '--'),
('<u>', '__'), ('</u>', '__'),
('&', '&'),
('<', '<'), ('>', '>'),
('\n', r'\\'),
))
for orig, repl in replacements.items():
pango_markup = pango_markup.replace(orig, repl)
logging.log(5, 'Converted "%s" pango to "%s" txt2tags' %
(repr(original_txt), repr(pango_markup)))
return pango_markup
| gpl-2.0 |
jcasner/nupic | src/nupic/support/exceptions.py | 39 | 2907 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import sys
import traceback
class TimeoutError(Exception):
""" The requested operation timed out """
pass
class NupicJobFailException(Exception):
""" This exception signals that the Nupic job (e.g., Hypersearch, Production,
etc.) should be aborted due to the given error.
"""
def __init__(self, errorCode, msg):
"""
Parameters:
---------------------------------------------------------------------
errorCode: An error code from the support.errorcodes.ErrorCodes
enumeration
msg: Error message string
"""
self.__errorCode = errorCode
self.__msg = msg
super(JobFatalException, self).__init__(errorCode, msg)
return
def getWorkerCompletionMessage(self):
""" Generates a worker completion message that is suitable for the
worker_completion_message field in jobs table
Parameters:
---------------------------------------------------------------------
retval: The worker completion message appropriate for the
"worker_completion_message" field in jobs table
"""
msg = "%s: %s\n%s" % (self.__errorCode, self.__msg, traceback.format_exc())
return msg
@classmethod
def mapCurrentException(cls, e, errorCode, msg):
""" Raises NupicJobFailException by mapping from another exception that
is being handled in the caller's scope and preserves the current exception's
traceback.
Parameters:
---------------------------------------------------------------------
e: The source exception
errorCode: An error code from the support.errorcodes.ErrorCodes
enumeration
msg: Error message string
"""
traceback = sys.exc_info()[2]
assert traceback is not None
newMsg = "%s: %r" % (msg, e)
e = NupicJobFailException(errorCode=errorCode, msg=newMsg)
raise e, None, traceback
| agpl-3.0 |
TansyArron/pants | tests/python/pants_test/backend/jvm/tasks/test_scala_repl_integration.py | 15 | 2086 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from textwrap import dedent
from pants_test.pants_run_integration_test import PantsRunIntegrationTest
class ScalaReplIntegrationTest(PantsRunIntegrationTest):
def run_repl(self, target, program, repl_args=None):
"""Run a repl for the given target with the given input, and return stdout_data"""
command = ['repl']
if repl_args:
command.extend(repl_args)
command.extend([target, '--quiet'])
pants_run = self.run_pants(command=command, stdin_data=program)
self.assert_success(pants_run)
return pants_run.stdout_data.splitlines()
def run_repl_helloworld(self, repl_args=None):
output_lines = self.run_repl(
'examples/src/scala/org/pantsbuild/example/hello/welcome',
dedent("""
import org.pantsbuild.example.hello.welcome.WelcomeEverybody
println(WelcomeEverybody("World" :: Nil).head)
"""),
repl_args=repl_args)
return output_lines
def test_run_repl_direct(self):
self.assertIn('Hello, World!', self.run_repl_helloworld())
def test_run_repl_explicit_usejavacp(self):
self.assertIn('Hello, World!',
self.run_repl_helloworld(repl_args=['--jvm-options=-Dscala.usejavacp=true']))
def test_run_repl_explicit_nousejavacp(self):
self.assertIn('Failed to initialize the REPL due to an unexpected error.',
self.run_repl_helloworld(repl_args=['--jvm-options=-Dscala.usejavacp=false']))
def test_run_repl_transitive(self):
output_lines = self.run_repl(
'testprojects/src/scala/org/pantsbuild/testproject/unicode',
dedent("""
import org.pantsbuild.testproject.unicode.shapeless.ShapelessExample
println(ShapelessExample.greek())
"""))
self.assertIn("shapeless success", output_lines)
| apache-2.0 |
SportySpice/Collections | src/file/File.py | 1 | 3600 | import xbmc
import xbmcvfs
import Folder
import urllib
import urlparse
NAME_QUERY = 'fileName'
FOLDER_NAME_QUERY = 'folderName'
FOLDER_PATH_QUERY = 'folderPath'
class File(object):
def __init__(self, name, folder):
self.name = name
self.folder = folder
self.path = folder.fullpath
self.fullpath = folder.fullpath + '/' + name
if '.' in name:
self.soleName, self.extension = name.split('.', 1)
else:
self.soleName = name
self.extension = None
self._pathTranslated = None
self._fullpathTranslated = None
def exists(self):
return xbmcvfs.exists(self.fullpath)
def delete(self):
xbmcvfs.delete(self.fullpath)
def deleteIfExists(self):
if self.exists():
self.delete()
def pathTranslated(self):
return self.folder.fullpathTranslated()
def fullpathTranslated(self):
if self._fullpathTranslated is None:
self._fullpathTranslated = xbmc.translatePath(self.fullpath)
return self._fullpathTranslated
def fileHandler(self, write=False):
if write:
permission = 'w'
else:
permission = 'r'
fullpath = self.fullpathTranslated()
return xbmcvfs.File(fullpath, permission)
def contents(self):
fh = self.fileHandler();
contents = fh.read()
fh.close()
return contents
def lines(self):
contents = self.contents()
return contents.split('\n')
def write(self, contentsStr):
fh = self.fileHandler(write=True)
fh.write(contentsStr)
fh.close()
def encodedQuery(self):
query = urllib.urlencode({NAME_QUERY: self.name,
FOLDER_NAME_QUERY: self.folder.name,
FOLDER_PATH_QUERY: self.folder.path
})
return query
def dumpObject(self, dumpObject):
import dill as pickle
with open(self.fullpathTranslated(), 'wb') as f:
pickle.dump(dumpObject, f)
def loadObject(self):
import dill as pickle
with open(self.fullpathTranslated(),'rb') as f:
loadedObject = pickle.load(f)
return loadedObject
def fromQuery(query):
parsedQuery = urlparse.parse_qs(query)
name = parsedQuery[NAME_QUERY][0]
folderName = parsedQuery[FOLDER_NAME_QUERY][0]
folderPath = parsedQuery[FOLDER_PATH_QUERY][0]
folder = Folder.Folder(folderName, folderPath)
newFile = File(name, folder)
return newFile
def fromFullpath(fullpath):
folderPath, folderName, fileName = fullpath.rsplit('/', 2)
folder = Folder.Folder(folderName, folderPath)
newFile = File(fileName, folder)
return newFile
def fromNameAndDir(fileName, dirPath):
folder = Folder.fromFullpath(dirPath)
newFile = File(fileName, folder)
return newFile
def fromInvalidNameAndDir(originalName, dirPath):
import utils
name = utils.createValidName(originalName)
return fromNameAndDir(name, dirPath)
def loadObjectFromFP(fullpath):
dumpFile = fromFullpath(fullpath)
return dumpFile.loadObject() | gpl-2.0 |
qspin/qtaste | doc/src/docbkx/scripts/lib/PyGithub/github/tests/Equality.py | 39 | 2613 | # -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2013 Vincent Jacques <vincent@vincent-jacques.net> #
# #
# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import Framework
class Equality(Framework.TestCase):
def testUserEquality(self):
u1 = self.g.get_user("jacquev6")
u2 = self.g.get_user("jacquev6")
self.assertTrue(u1 == u2)
self.assertFalse(u1 != u2)
self.assertEqual(u1, u2)
def testUserDifference(self):
u1 = self.g.get_user("jacquev6")
u2 = self.g.get_user("OddBloke")
self.assertFalse(u1 == u2)
self.assertTrue(u1 != u2)
self.assertNotEqual(u1, u2)
def testBranchEquality(self):
# Erf, equality of NonCompletableGithubObjects will be difficult to implement
# because even their _rawData can differ. (Here, the avatar_url is not equal)
# (CompletableGithubObjects are compared by their API url, which is a good key)
r = self.g.get_user().get_repo("PyGithub")
b1 = r.get_branch("develop")
b2 = r.get_branch("develop")
self.assertNotEqual(b1._rawData, b2._rawData)
| lgpl-3.0 |
ZhangJun-GitHub/Cycle | dialogs.py | 1 | 20748 | #====================================================
# Cycle - calendar for women
# Distributed under GNU Public License
# Original author: Oleg S. Gints
# Maintainer: Matt Molyneaux (moggers87+git@moggers87.co.uk)
# Home page: http://moggers.co.uk/cgit/cycle.git/about
#===================================================
import os
import wx
import wx.html
import cPickle
from cal_year import cycle , Val
from save_load import Load_Cycle, get_f_name, set_color_default
from set_dir import *
#---------------------------------------------------------------------------
class Settings_Dlg(wx.Dialog):
def __init__(self, parent):
wx.Dialog.__init__(self, parent, -1, _('Settings'), wx.DefaultPosition)
self.Centre(wx.BOTH)
#======================
box = wx.BoxSizer(wx.VERTICAL)
b1 = wx.StaticBoxSizer(wx.StaticBox(self, -1, _('Length of cycle')), wx.VERTICAL)
i = wx.NewId()
self.cb1 = wx.CheckBox(self, i, _(' by average'), style=wx.NO_BORDER)
b1.Add(self.cb1, 0, wx.ALL, 5)
self.Bind(wx.EVT_CHECKBOX, self.By_Average, id=i)
self.cb1.SetValue(cycle.by_average)
b2 = wx.BoxSizer(wx.HORIZONTAL)
i = wx.NewId()
self.sc = wx.SpinCtrl(self, i, "", size=wx.Size(50, -1))
self.sc.SetRange(21, 35)
self.sc.SetValue(cycle.period)
self.sc.Enable(not self.cb1.GetValue())
b2.Add(self.sc, 0)
b2.Add(wx.StaticText(self, -1, _(' days in cycle')), 0)
b1.Add(b2, 0, wx.ALL, 5)
box.Add(b1, 0, wx.EXPAND | wx.TOP | wx.LEFT | wx.RIGHT, 10)
#======================
self.rb = wx.RadioBox(self, -1, _('Display'),
choices = [_('fertile days'), _('none')],
majorDimension=1, style=wx.RA_SPECIFY_COLS)
box.Add(self.rb, 0, wx.EXPAND | wx.TOP | wx.LEFT | wx.RIGHT, 10)
self.rb.SetSelection(cycle.disp)
#======================
self.rb1 = wx.RadioBox(self, -1, _('First week day'),
choices=[_('monday'), _('sunday')],
majorDimension=1, style=wx.RA_SPECIFY_COLS)
box.Add(self.rb1, 0, wx.EXPAND | wx.ALL, 10)
self.rb1.SetSelection(cycle.first_week_day)
#======================
i = wx.NewId()
txt1 = _('Colours')
txt2 = _('Change password')
w1, h = self.GetTextExtent(txt1)
w2, h = self.GetTextExtent(txt2)
w = max(w1, w2)
box.Add(wx.Button(self, i, txt1, size=wx.Size(w+10, -1)), 0, wx.ALIGN_CENTER)
self.Bind(wx.EVT_BUTTON, self.OnColours, id=i)
#======================
i = wx.NewId()
box.Add(wx.Button(self, i, txt2, size=wx.Size(w + 10, -1)), 0, wx.TOP | wx.ALIGN_CENTER, 10)
self.Bind(wx.EVT_BUTTON, self.OnChangePasswd, id=i)
#======================
but_box = wx.BoxSizer(wx.HORIZONTAL)
i = wx.NewId()
but_box.Add(wx.Button(self, i, _('Ok')), 0, wx.ALL, 10)
self.Bind(wx.EVT_BUTTON, self.OnOk, id=i)
i = wx.NewId()
but_box.Add(wx.Button(self, i, _('Cancel')), 0, wx.ALL, 10)
self.Bind(wx.EVT_BUTTON, self.OnCancel, id=i)
box.Add(but_box, 0, wx.ALIGN_CENTER)
self.SetAutoLayout(True)
self.SetSizer(box)
box.Fit(self)
def By_Average(self, event):
if event.Checked():
self.sc.Enable(False)
else:
self.sc.Enable(True)
def OnOk(self, event):
if not 21 <= self.sc.GetValue() <= 35:
dlg = wx.MessageDialog(self, _('Period of cycle is invalid!'),
_('Error!'), wx.OK | wx.ICON_ERROR )
dlg.ShowModal()
dlg.Destroy()
return
cycle.period = self.sc.GetValue()
cycle.by_average = self.cb1.GetValue()
cycle.disp = self.rb.GetSelection()
cycle.first_week_day = self.rb1.GetSelection()
self.EndModal(wx.ID_OK)
def OnCancel(self, event):
self.EndModal(wx.ID_CANCEL)
def OnChangePasswd(self, event):
dlg = Ask_Passwd_Dlg(self)
dlg.ShowModal()
dlg.Destroy()
def OnColours(self, event):
dlg = Colours_Dlg(self)
dlg.ShowModal()
dlg.Destroy()
#---------------------------------------------------------------------------
class Ask_Passwd_Dlg(wx.Dialog):
def __init__(self, parent):
wx.Dialog.__init__(self, parent, -1, _('Password'))
#======================
box = wx.BoxSizer(wx.VERTICAL)
box.Add(wx.StaticText(self, -1, _('Enter your password')), 0,
wx.ALIGN_CENTER|wx.TOP|wx.LEFT|wx.RIGHT, 10)
self.pass1 = wx.TextCtrl(self, -1, "", wx.Point(10, 30),
size=(130, -1), style=wx.TE_PASSWORD)
box.Add(self.pass1, 0, wx.ALIGN_CENTER | wx.ALL, 10)
box.Add(wx.StaticText(self, -1, _('Once more...')), 0,
wx.ALIGN_CENTER|wx.LEFT|wx.RIGHT, 10)
self.pass2 = wx.TextCtrl(self, -1, "", wx.Point(10, 80),
size=(130, -1), style=wx.TE_PASSWORD)
box.Add(self.pass2, 0, wx.ALIGN_CENTER|wx.ALL, 10)
b1 = wx.BoxSizer(wx.HORIZONTAL)
i = wx.NewId()
b1.Add(wx.Button(self, i, _('Ok')), 0, wx.ALL, 10)
self.Bind(wx.EVT_BUTTON, self.OnOk, id=i)
i = wx.NewId()
b1.Add(wx.Button(self, i, _('Cancel')), 0, wx.ALL, 10)
self.Bind(wx.EVT_BUTTON, self.OnCancel, id=i)
self.pass1.SetFocus()
box.Add(b1, 0, wx.ALIGN_CENTER)
self.SetAutoLayout(True)
self.SetSizer(box)
box.Fit(self)
def OnOk(self, event):
err = ""
if self.pass1.GetValue() == "" or self.pass2.GetValue() == "":
err = _('Password must be not EMPTY!')
if self.pass1.GetValue() != self.pass2.GetValue():
err = _('Entering password don\'t match!')
if err != "":
dlg = wx.MessageDialog(self, err,
_('Error!'), wx.OK | wx.ICON_ERROR )
dlg.ShowModal()
dlg.Destroy()
return
cycle.passwd = self.pass1.GetValue()
self.EndModal(wx.ID_OK)
def OnCancel(self, event):
self.EndModal(wx.ID_CANCEL)
#---------------------------------------------------------------------------
def get_users():
#Get list of users
magic_str = 'UserName='
users = [] #array of (user, file) name
p, f_name = get_f_name()
if os.path.exists(p):
files = os.listdir(p)
for f in files:
fd = open(os.path.join(p, f), "rb")
try:
data = cPickle.loads(fd.read())
except (cPickle.UnpicklingError, ImportError, AttributeError, EOFError, IndexError):
fd.seek(0)
data = fd.read(len(magic_str))
if 'username' in data:
users.append((data['username'], f))
elif data == magic_str:
data = fd.read()
n = data.find("===") #find end string
if n is not -1:
users.append((cPickle.loads(data[:n]), f))
else: #old format
users.append((f, f))
users.sort()
return users
#---------------------------------------------------------------------------
class Login_Dlg(wx.Dialog):
def __init__(self, parent):
wx.Dialog.__init__(self, parent, -1, _('Login'))
self.name = ""
self.file = ""
box = wx.BoxSizer(wx.VERTICAL)
#Get list of users
self.users = get_users()
# p, f_name = get_f_name()
# if os.path.exists(p):
# users = os.listdir(p)
# else:
# users = [_('empty')]
# users.sort()
#======== List users ==============
i = wx.NewId()
self.il = wx.ImageList(16, 16, True)
bmp = wx.Bitmap(os.path.join(bitmaps_dir, 'smiles.bmp'), wx.BITMAP_TYPE_BMP)
mask = wx.Mask(bmp, wx.WHITE)
bmp.SetMask(mask)
idx1 = self.il.Add(bmp)
self.list = wx.ListCtrl(self, i, size = wx.Size(200, 200),
style=wx.LC_REPORT|wx.SUNKEN_BORDER|wx.LC_SINGLE_SEL)
self.list.SetImageList(self.il, wx.IMAGE_LIST_SMALL)
self.list.InsertColumn(0, _('Your name'))
for k in range(len(self.users)):
self.list.InsertImageStringItem(k, self.users[k][0], idx1)
self.list.SetColumnWidth(0, 180)
self.list.SetItemState(0, wx.LIST_STATE_SELECTED, wx.LIST_STATE_SELECTED)
self.name = self.users[0][0]
self.file = self.users[0][1]
self.list.Bind(wx.EVT_LIST_ITEM_SELECTED, self.OnItemSelected, self.list)
self.list.Bind(wx.EVT_LIST_KEY_DOWN, self.OnKeyDown, self.list)
box.Add(self.list, 0, wx.ALL, 10)
#========= Add user =============
i = wx.NewId()
box.Add(wx.Button(self, i, _('Add user')), 0, wx.ALIGN_CENTER)
self.Bind(wx.EVT_BUTTON, self.OnAdd, id=i)
#========= Ok - Cancel =============
b1 = wx.BoxSizer(wx.HORIZONTAL)
i = wx.NewId()
b1.Add(wx.Button(self, i, _('Ok')), 0, wx.ALL, 10)
self.Bind(wx.EVT_BUTTON, self.OnOk, id=i)
i = wx.NewId()
b1.Add(wx.Button(self, i, _('Cancel')), 0, wx.ALL, 10)
self.Bind(wx.EVT_BUTTON, self.OnCancel, id=i)
box.Add(b1, 0, wx.ALIGN_CENTER)
self.SetAutoLayout(True)
self.SetSizer(box)
box.Fit(self)
self.list.SetFocus()
def OnItemSelected(self, event):
self.name = self.users[event.GetIndex()][0] #self.list.GetItemText(event.GetIndex())
self.file = self.users[event.GetIndex()][1]
def OnKeyDown(self, event):
if event.GetKeyCode() == ord(" ") or event.GetKeyCode() == wx.WXK_RETURN:
self.OnOk()
else:
event.Skip()
def OnAdd(self, event=None):
if ask_name(self):
self.EndModal(wx.ID_OK)
def OnOk(self, event=None):
dlg = wx.TextEntryDialog(self, self.name + _(', enter you password:'), _('Password'), '',
style=wx.OK | wx.CANCEL | wx.TE_PASSWORD)
while dlg.ShowModal() == wx.ID_OK:
cycle.passwd = dlg.GetValue()
cycle.name = self.name
cycle.file = self.file
if Load_Cycle(cycle.name, cycle.passwd, cycle.file):
dlg.Destroy()
self.EndModal(wx.ID_OK)
return
else:
dlg2 = wx.MessageDialog(self, _('Password is invalid!'),
_('Error!'), wx.OK | wx.ICON_ERROR )
dlg2.ShowModal()
dlg2.Destroy()
dlg.Destroy()
def OnCancel(self, event):
self.EndModal(wx.ID_CANCEL)
#-------------------------------------------------------
def first_login():
#Get list of users
users = get_users()
if users != []:
return 'not_first' #user(s) already exists
if ask_name():
return 'first'
else:
return 'bad_login'
#-------------------------------------------------------
def get_new_file_name():
#create filename for user
while True:
random_chars = "".join(chr(random.randint(0,255)) for i in xrange(4))
random_chars = base64.urlsafe_b64encode(random_chars)
p, random_chars = get_f_name(random_chars)
if not os.path.isfile(random_chars):
return random_chars
#-------------------------------------------------------
def ask_name(parent=None):
# nobody, it is first login
wx.MessageBox(
_("This program is not a reliable contraceptive method.\n"
"Neither does it help to prevent sexually transmitted diseases\n"
"like HIV/AIDS.\n\nIt is just an electronic means of keeping track\n"
"of some of your medical data and extracting some statistical\n"
"conclusions from them. You cannot consider this program as a\n"
"substitute for your gynecologist in any way."))
dlg = wx.TextEntryDialog(parent, _('Enter you name:'), _('New user'), '',
style=wx.OK | wx.CANCEL)
while dlg.ShowModal() == wx.ID_OK:
name = dlg.GetValue()
if name != "":
users = get_users()
exists = False
for i in users:
if name == i[0]:
exists = True
break
if not exists:
d = Ask_Passwd_Dlg(parent)
if d.ShowModal() == wx.ID_OK:
cycle.file = get_new_file_name()
cycle.name = name
d.Destroy()
dlg.Destroy()
#self.EndModal(wx.ID_OK)
set_color_default()
return True
else:
d.Destroy()
continue
else:
err = name + _(' - already exists!')
else:
err = _('Name must be not EMPTY')
d2 = wx.MessageDialog(dlg, err, _('Error!'), wx.OK | wx.ICON_ERROR)
d2.ShowModal()
d2.Destroy()
dlg.Destroy()
return False
#---------------------------------------------------------------------------
class Legend_Dlg(wx.Dialog):
def __init__(self, parent):
wx.Dialog.__init__(self, parent, -1, _('Legend'))
#======================
box = wx.BoxSizer(wx.VERTICAL)
self._add(box, _('today'), wx.NullColour, wx.SIMPLE_BORDER)
self._add(box, _('begin of cycle'), cycle.colour_set['begin'])
self._add(box, _('prognosis of cycle begin'), cycle.colour_set['prog begin'])
self._add(box, _('conception'), cycle.colour_set['conception'])
self._add(box, _('fertile'), cycle.colour_set['fertile'])
self._add(box, _('ovulation, birth'), cycle.colour_set['ovule'])
self._add(box, _('1-st tablet'), cycle.colour_set['1-st tablet'])
self._add(box, _('tablets no. 22-28 or pause'), cycle.colour_set['pause'])
self._add(box, _('next 1-st tablet'), cycle.colour_set['next 1-st tablet'])
i = wx.NewId()
box.Add(wx.Button(self, i, _('Ok')), 0, wx.ALIGN_CENTER|wx.ALL, 10)
self.Bind(wx.EVT_BUTTON, self.OnOk, id=i)
self.SetAutoLayout(True)
self.SetSizer(box)
box.Fit(self)
def _add(self, box, txt, col, st=0):
b = wx.BoxSizer(wx.HORIZONTAL)
w = wx.Window(self, -1, size=wx.Size(15, 15), style=st)
w.SetBackgroundColour(col)
b.Add(w, 0, wx.LEFT|wx.RIGHT, 10)
b.Add(wx.StaticText(self, -1, txt), 0, wx.ALIGN_CENTER_VERTICAL|wx.RIGHT, 10)
box.Add(b, 0, wx.TOP, 10)
def OnOk(self, event):
self.EndModal(wx.ID_OK)
#---------------------------------------------------------------------------
class Note_Dlg(wx.Dialog):
def __init__(self, parent, title="", txt=""):
wx.Dialog.__init__(self, parent, -1, title)
self.CentreOnParent(wx.BOTH)
#======================
box = wx.BoxSizer(wx.VERTICAL)
self.txt = wx.TextCtrl(self, -1, txt,
size=(-1, 100), style=wx.TE_MULTILINE)
box.Add( self.txt, 0,
wx.EXPAND|wx.ALIGN_CENTER|wx.TOP|wx.LEFT|wx.RIGHT, 10)
b1 = wx.BoxSizer(wx.HORIZONTAL)
i = wx.NewId()
b1.Add(wx.Button(self, i, _('Ok')), 0, wx.ALL, 10)
self.Bind(wx.EVT_BUTTON, self.OnOk, id=i)
i = wx.NewId()
b1.Add(wx.Button(self, i, _('Cancel')), 0, wx.ALL, 10)
self.Bind(wx.EVT_BUTTON, self.OnCancel, id=i)
i = wx.NewId()
b1.Add(wx.Button(self, i, _('Remove')), 0, wx.ALL, 10)
self.Bind(wx.EVT_BUTTON, self.OnRemove, id=i)
box.Add(b1, 0, wx.ALIGN_CENTER)
self.SetAutoLayout(True)
self.SetSizer(box)
box.Fit(self)
self.txt.SetFocus()
def OnOk(self, event):
self.EndModal(wx.ID_OK)
def OnCancel(self, event):
self.EndModal(wx.ID_CANCEL)
def OnRemove(self, event):
self.EndModal(False)
def Get_Txt(self):
return self.txt.GetValue()
#---------------------------------------------------------------------------
class MyHtmlWindow(wx.html.HtmlWindow):
def __init__(self, parent, id, pos = wx.DefaultPosition, size=wx.DefaultSize):
wx.html.HtmlWindow.__init__(self, parent, id, pos, size)
if "gtk2" in wx.PlatformInfo:
self.SetStandardFonts()
def OnLinkClicked(self, linkinfo):
pass
#---------------------------------------------------------------------------
class Help_Dlg(wx.Dialog):
def __init__(self, parent, title="", txt=""):
wx.Dialog.__init__(self, parent, -1, title)
self.CentreOnParent(wx.BOTH)
#======================
box = wx.BoxSizer(wx.VERTICAL)
self.html = MyHtmlWindow(self, -1, size=(500, 350))
self.html.SetPage(txt)
box.Add(self.html, 0, wx.ALIGN_CENTER|wx.TOP|wx.LEFT|wx.RIGHT, 10)
i = wx.NewId()
box.Add(wx.Button(self, i, _('Ok')), 0, wx.ALIGN_CENTER|wx.ALL, 10)
self.Bind(wx.EVT_BUTTON, self.OnOk, id=i)
self.SetAutoLayout(True)
self.SetSizer(box)
box.Fit(self)
def OnOk(self, event):
self.EndModal(wx.ID_OK)
#---------------------------------------------------------------------------
class Colours_Dlg(wx.Dialog):
def __init__(self, parent):
wx.Dialog.__init__(self, parent, -1, _('Colours settings'))
self.col_set = cycle.colour_set.copy()
self.col_id = cycle.colour_set.keys()
self.data = wx.ColourData()
self.data.SetChooseFull(True)
self.buttons = {}
#======================
box = wx.BoxSizer(wx.VERTICAL)
self._add(box, _('begin of cycle'), 'begin')
self._add(box, _('prognosis of cycle begin'), 'prog begin')
self._add(box, _('conception'), 'conception')
self._add(box, _('fertile'), 'fertile')
self._add(box, _('ovulation, birth'), 'ovule')
self._add(box, _('1-st tablet'), '1-st tablet')
self._add(box, _('tablets no. 22-28 or pause'), 'pause')
self._add(box, _('next 1-st tablet'), 'next 1-st tablet')
b1 = wx.BoxSizer(wx.HORIZONTAL)
i = wx.NewId()
b1.Add(wx.Button(self, i, _('Ok')), 0, wx.ALL, 10)
self.Bind(wx.EVT_BUTTON, self.OnOk, id=i)
i = wx.NewId()
b1.Add(wx.Button(self, i, _('By default')), 0, wx.ALL, 10)
self.Bind(wx.EVT_BUTTON, self.OnDefault, id=i)
i = wx.NewId()
b1.Add(wx.Button(self, i, _('Cancel')), 0, wx.ALL, 10)
self.Bind(wx.EVT_BUTTON, self.OnCancel, id=i)
box.Add(b1, 0, wx.ALIGN_CENTER)
self.SetAutoLayout(True)
self.SetSizer(box)
box.Fit(self)
def _add(self, box, txt, col):
b = wx.BoxSizer(wx.HORIZONTAL)
i = self.col_id.index(col)
bt = wx.Button(self, i, "", size=wx.Size(15, 15))
self.Bind(wx.EVT_BUTTON, self.get_colour, id=i)
bt.SetBackgroundColour(self.col_set[col])
self.buttons.update({i:bt})
b.Add(bt, 0, wx.LEFT|wx.RIGHT, 10)
b.Add(wx.StaticText(self, -1, txt), 0, wx.ALIGN_CENTER_VERTICAL|wx.RIGHT, 10)
box.Add(b, 0, wx.TOP, 10)
def get_colour(self, event):
c = self.col_set[ self.col_id[event.GetId()] ]
self.data.SetColour(c)
dlg = wx.ColourDialog(self, self.data)
if dlg.ShowModal() == wx.ID_OK:
self.data = dlg.GetColourData()
c = self.data.GetColour()
self.buttons[event.GetId()].SetBackgroundColour(c)
self.col_set[self.col_id[event.GetId()]] = c
def OnOk(self, event):
cycle.colour_set = self.col_set.copy()
Val.Cal.Draw_Mark()
self.EndModal(wx.ID_OK)
def OnDefault(self, event):
self.col_set = {'begin':wx.NamedColour('RED'),
'prog begin':wx.NamedColour('PINK'),
'conception':wx.NamedColour('MAGENTA'),
'fertile':wx.NamedColour('GREEN YELLOW'),
'ovule':wx.NamedColour('SPRING GREEN'),
'1-st tablet':wx.NamedColour('GOLD'),
'pause':wx.NamedColour('LIGHT BLUE'),
'next 1-st tablet':wx.NamedColour('PINK')}
for item in self.col_id:
self.buttons[self.col_id.index(item)].SetBackgroundColour(self.col_set[item])
def OnCancel(self, event):
self.EndModal(wx.ID_CANCEL)
#---------------------------------------------------------------------------
| gpl-2.0 |
jounex/hue | apps/help/src/help/views.py | 33 | 2822 | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from desktop.lib.django_util import render
from desktop.lib.exceptions_renderable import PopupException
from desktop import appmanager
from hadoop.fs import LocalSubFileSystem
import markdown
import urllib
import os
INDEX_FILENAMES = ("index.md", "index.html", "index.txt")
def _unquote_path(path):
"""Normalizes paths."""
return urllib.unquote(path)
def get_help_fs(app_name):
"""
Creates a local file system for a given app's help directory.
"""
app = appmanager.get_desktop_module(app_name)
if app is not None:
if app.help_dir is None:
raise PopupException("No help available for app '%s'." % app_name)
return LocalSubFileSystem(app.help_dir)
else:
raise PopupException("App '%s' is not loaded, so no help is available for it!" % app_name)
def view(request, app, path):
"""
Views and renders a file at a given path.
Markdown files are parsed through markdown; others
are just pasted in <pre>'s.
TODO: Expose a way to do images.
"""
path = _unquote_path(path)
fs = get_help_fs(app)
if fs.isdir(path):
for i in INDEX_FILENAMES:
tmp_path = os.path.join(path, i)
if fs.isfile(tmp_path):
path = tmp_path
if not fs.isfile(path):
raise PopupException("Could not find or read the file: %s (app %s)" % (path, app))
content = fs.open(path, 'r').read()
content = unicode(content, 'utf-8', errors='replace')
if path.lower().endswith(".md"):
content = ('<div class="print rendered-markdown">' +
markdown.markdown(content, ['extra']) +
'</div>')
elif path.lower().endswith(".html"):
content = '<div class="print">%s</div>' % (content,)
else:
# TODO(todd) escape content?
content = '<pre>' + content + '</pre>'
data = {
'content': content,
'apps': sorted([ x for x in appmanager.DESKTOP_MODULES if x.help_dir ],
key = lambda app: app.menu_index),
'title': appmanager.get_desktop_module(app).nice_name,
'current': app
}
return render("display.mako", request, data)
| apache-2.0 |
dsfsdgsbngfggb/odoo | addons/resource/faces/utils.py | 433 | 3231 | ############################################################################
# Copyright (C) 2005 by Reithinger GmbH
# mreithinger@web.de
#
# This file is part of faces.
#
# faces is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# faces is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
############################################################################
import observer
import os.path
import sys
import os.path
_call_dir = os.path.abspath(os.path.dirname(sys.argv[0]))
def get_installation_path():
try:
if sys.frozen:
path = _call_dir
else:
raise AttributeError()
except AttributeError:
path = os.path.abspath(observer.__file__)
path = os.path.split(path)[0]
path = os.path.normcase(path)
return path
def get_resource_path():
try:
if sys.frozen:
path = _call_dir
path = os.path.join(path, "resources", "faces", "gui")
else:
raise AttributeError()
except AttributeError:
path = get_installation_path()
path = os.path.join(path, "gui", "resources")
path = os.path.normcase(path)
return path
def get_template_path():
try:
if sys.frozen:
path = _call_dir
path = os.path.join(path, "resources", "faces", "templates")
else:
raise AttributeError()
except AttributeError:
path = get_installation_path()
path = os.path.join(path, "templates")
path = os.path.normcase(path)
return path
def get_howtos_path():
try:
if sys.frozen:
path = _call_dir
else:
raise AttributeError()
except AttributeError:
path = get_installation_path()
path = os.path.join(path, "howtos")
path = os.path.normcase(path)
return path
def flatten(items):
if isinstance(items, tuple):
items = list(items)
if not isinstance(items, list):
yield items
stack = [iter(items)]
while stack:
for item in stack[-1]:
if isinstance(item, tuple):
item = list(item)
if isinstance(item, list):
stack.append(iter(item))
break
yield item
else:
stack.pop()
def do_yield():
pass
def progress_start(title, maximum, message=""):
pass
def progress_update(value, message=""):
pass
def progress_end():
pass
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
moonboots/tensorflow | tensorflow/python/kernel_tests/string_to_number_op_test.py | 15 | 2865 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for StringToNumber op from parsing_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
_ERROR_MESSAGE = "StringToNumberOp could not correctly convert string: "
class StringToNumberOpTest(tf.test.TestCase):
def testToFloat(self):
with self.test_session():
input_string = tf.placeholder(tf.string)
output = tf.string_to_number(
input_string,
out_type=tf.float32)
result = output.eval(feed_dict={
input_string: ["0",
"3",
"-1",
"1.12",
"0xF",
" -10.5",
"3.40282e+38",
# The next two exceed maximum value for float, so we
# expect +/-INF to be returned instead.
"3.40283e+38",
"-3.40283e+38",
"NAN",
"INF"]
})
self.assertAllClose([0, 3, -1, 1.12, 0xF, -10.5, 3.40282e+38,
float("INF"), float("-INF"), float("NAN"),
float("INF")], result)
with self.assertRaisesOpError(_ERROR_MESSAGE + "10foobar"):
output.eval(feed_dict={input_string: ["10foobar"]})
def testToInt32(self):
with self.test_session():
input_string = tf.placeholder(tf.string)
output = tf.string_to_number(
input_string,
out_type=tf.int32)
result = output.eval(feed_dict={
input_string: ["0", "3", "-1", " -10", "-2147483648", "2147483647"]
})
self.assertAllEqual([0, 3, -1, -10, -2147483648, 2147483647], result)
with self.assertRaisesOpError(_ERROR_MESSAGE + "2.9"):
output.eval(feed_dict={input_string: ["2.9"]})
# The next two exceed maximum value of int32.
for in_string in ["-2147483649", "2147483648"]:
with self.assertRaisesOpError(_ERROR_MESSAGE + in_string):
output.eval(feed_dict={input_string: [in_string]})
if __name__ == "__main__":
tf.test.main()
| apache-2.0 |
marnnie/Cable-buenaventura | plugin.video.kodipopcorntime/resources/site-packages/pyga/entities.py | 16 | 19442 | # -*- coding: utf-8 -*-
from datetime import datetime
from operator import itemgetter
from urlparse import urlparse
from urllib import unquote_plus
from pyga import utils
from pyga import exceptions
__author__ = "Arun KR (kra3) <the1.arun@gmail.com>"
__license__ = "Simplified BSD"
class Campaign(object):
'''
A representation of Campaign
Properties:
_type -- See TYPE_* constants, will be mapped to "__utmz" parameter.
creation_time -- Time of the creation of this campaign, will be mapped to "__utmz" parameter.
response_count -- Response Count, will be mapped to "__utmz" parameter.
Is also used to determine whether the campaign is new or repeated,
which will be mapped to "utmcn" and "utmcr" parameters.
id -- Campaign ID, a.k.a. "utm_id" query parameter for ga.js
Will be mapped to "__utmz" parameter.
source -- Source, a.k.a. "utm_source" query parameter for ga.js.
Will be mapped to "utmcsr" key in "__utmz" parameter.
g_click_id -- Google AdWords Click ID, a.k.a. "gclid" query parameter for ga.js.
Will be mapped to "utmgclid" key in "__utmz" parameter.
d_click_id -- DoubleClick (?) Click ID. Will be mapped to "utmdclid" key in "__utmz" parameter.
name -- Name, a.k.a. "utm_campaign" query parameter for ga.js.
Will be mapped to "utmccn" key in "__utmz" parameter.
medium -- Medium, a.k.a. "utm_medium" query parameter for ga.js.
Will be mapped to "utmcmd" key in "__utmz" parameter.
term -- Terms/Keywords, a.k.a. "utm_term" query parameter for ga.js.
Will be mapped to "utmctr" key in "__utmz" parameter.
content -- Ad Content Description, a.k.a. "utm_content" query parameter for ga.js.
Will be mapped to "utmcct" key in "__utmz" parameter.
'''
TYPE_DIRECT = 'direct'
TYPE_ORGANIC = 'organic'
TYPE_REFERRAL = 'referral'
CAMPAIGN_DELIMITER = '|'
UTMZ_PARAM_MAP = {
'utmcid': 'id',
'utmcsr': 'source',
'utmgclid': 'g_click_id',
'utmdclid': 'd_click_id',
'utmccn': 'name',
'utmcmd': 'medium',
'utmctr': 'term',
'utmcct': 'content',
}
def __init__(self, typ):
self._type = None
self.creation_time = None
self.response_count = 0
self.id = None
self.source = None
self.g_click_id = None
self.d_click_id = None
self.name = None
self.medium = None
self.term = None
self.content = None
if typ:
if typ not in ('direct', 'organic', 'referral'):
raise ValueError('Campaign type has to be one of the Campaign::TYPE_* constant values.')
self._type = typ
if typ == Campaign.TYPE_DIRECT:
self.name = '(direct)'
self.source = '(direct)'
self.medium = '(none)'
elif typ == Campaign.TYPE_REFERRAL:
self.name = '(referral)'
self.medium = 'referral'
elif typ == Campaign.TYPE_ORGANIC:
self.name = '(organic)'
self.medium = 'organic'
else:
self._type = None
self.creation_time = datetime.utcnow()
def validate(self):
if not self.source:
raise exceptions.ValidationError('Campaigns need to have at least the "source" attribute defined.')
@staticmethod
def create_from_referrer(url):
obj = Campaign(Campaign.TYPE_REFERRAL)
parse_rslt = urlparse(url)
obj.source = parse_rslt.netloc
obj.content = parse_rslt.path
return obj
def extract_from_utmz(self, utmz):
parts = utmz.split('.', 4)
if len(parts) != 5:
raise ValueError('The given "__utmz" cookie value is invalid.')
self.creation_time = utils.convert_ga_timestamp(parts[1])
self.response_count = int(parts[3])
params = parts[4].split(Campaign.CAMPAIGN_DELIMITER)
for param in params:
key, val = param.split('=')
try:
setattr(self, self.UTMZ_PARAM_MAP[key], unquote_plus(val))
except KeyError:
continue
return self
class CustomVariable(object):
'''
Represent a Custom Variable
Properties:
index -- Is the slot, you have 5 slots
name -- Name given to custom variable
value -- Value for the variable
scope -- Scope can be any one of 1, 2 or 3.
WATCH OUT: It's a known issue that GA will not decode URL-encoded
characters in custom variable names and values properly, so spaces
will show up as "%20" in the interface etc. (applicable to name & value)
http://www.google.com/support/forum/p/Google%20Analytics/thread?tid=2cdb3ec0be32e078
'''
SCOPE_VISITOR = 1
SCOPE_SESSION = 2
SCOPE_PAGE = 3
def __init__(self, index=None, name=None, value=None, scope=3):
self.index = index
self.name = name
self.value = value
self.scope = CustomVariable.SCOPE_PAGE
if scope:
self.scope = scope
def __setattr__(self, name, value):
if name == 'scope':
if value and value not in range(1, 4):
raise ValueError('Custom Variable scope has to be one of the 1,2 or 3')
if name == 'index':
# Custom Variables are limited to five slots officially, but there seems to be a
# trick to allow for more of them which we could investigate at a later time (see
# http://analyticsimpact.com/2010/05/24/get-more-than-5-custom-variables-in-google-analytics/
if value and (value < 0 or value > 5):
raise ValueError('Custom Variable index has to be between 1 and 5.')
object.__setattr__(self, name, value)
def validate(self):
'''
According to the GA documentation, there is a limit to the combined size of
name and value of 64 bytes after URL encoding,
see http://code.google.com/apis/analytics/docs/tracking/gaTrackingCustomVariables.html#varTypes
and http://xahlee.org/js/google_analytics_tracker_2010-07-01_expanded.js line 563
This limit was increased to 128 bytes BEFORE encoding with the 2012-01 release of ga.js however,
see http://code.google.com/apis/analytics/community/gajs_changelog.html
'''
if len('%s%s' % (self.name, self.value)) > 128:
raise exceptions.ValidationError('Custom Variable combined name and value length must not be larger than 128 bytes.')
class Event(object):
'''
Represents an Event
http://code.google.com/apis/analytics/docs/tracking/eventTrackerOverview.html
Properties:
category -- The general event category
action -- The action for the event
label -- An optional descriptor for the event
value -- An optional value associated with the event. You can see your
event values in the Overview, Categories, and Actions reports,
where they are listed by event or aggregated across events,
depending upon your report view.
noninteraction -- By default, event hits will impact a visitor's bounce rate.
By setting this parameter to true, this event hit
will not be used in bounce rate calculations.
(default False)
'''
def __init__(self, category=None, action=None, label=None, value=None, noninteraction=False):
self.category = category
self.action = action
self.label = label
self.value = value
self.noninteraction = bool(noninteraction)
if self.noninteraction and not self.value:
self.value = 0
def validate(self):
if not(self.category and self.action):
raise exceptions.ValidationError('Events, at least need to have a category and action defined.')
class Item(object):
'''
Represents an Item in Transaction
Properties:
order_id -- Order ID, will be mapped to "utmtid" parameter
sku -- Product Code. This is the sku code for a given product, will be mapped to "utmipc" parameter
name -- Product Name, will be mapped to "utmipn" parameter
variation -- Variations on an item, will be mapped to "utmiva" parameter
price -- Unit Price. Value is set to numbers only, will be mapped to "utmipr" parameter
quantity -- Unit Quantity, will be mapped to "utmiqt" parameter
'''
def __init__(self):
self.order_id = None
self.sku = None
self.name = None
self.variation = None
self.price = None
self.quantity = 1
def validate(self):
if not self.sku:
raise exceptions.ValidationError('sku/product is a required parameter')
class Page(object):
'''
Contains all parameters needed for tracking a page
Properties:
path -- Page request URI, will be mapped to "utmp" parameter
title -- Page title, will be mapped to "utmdt" parameter
charset -- Charset encoding, will be mapped to "utmcs" parameter
referrer -- Referer URL, will be mapped to "utmr" parameter
load_time -- Page load time in milliseconds, will be encoded into "utme" parameter.
'''
REFERRER_INTERNAL = '0'
def __init__(self, path):
self.path = None
self.title = None
self.charset = None
self.referrer = None
self.load_time = None
if path:
self.path = path
def __setattr__(self, name, value):
if name == 'path':
if value and value != '':
if value[0] != '/':
raise ValueError('The page path should always start with a slash ("/").')
elif name == 'load_time':
if value and not isinstance(value, int):
raise ValueError('Page load time must be specified in integer milliseconds.')
object.__setattr__(self, name, value)
class Session(object):
'''
You should serialize this object and store it in the user session to keep it
persistent between requests (similar to the "__umtb" cookie of the GA Javascript client).
Properties:
session_id -- A unique per-session ID, will be mapped to "utmhid" parameter
track_count -- The amount of pageviews that were tracked within this session so far,
will be part of the "__utmb" cookie parameter.
Will get incremented automatically upon each request
start_time -- Timestamp of the start of this new session, will be part of the "__utmb" cookie parameter
'''
def __init__(self):
self.session_id = utils.get_32bit_random_num()
self.track_count = 0
self.start_time = datetime.utcnow()
@staticmethod
def generate_session_id():
return utils.get_32bit_random_num()
def extract_from_utmb(self, utmb):
'''
Will extract information for the "trackCount" and "startTime"
properties from the given "__utmb" cookie value.
'''
parts = utmb.split('.')
if len(parts) != 4:
raise ValueError('The given "__utmb" cookie value is invalid.')
self.track_count = int(parts[1])
self.start_time = utils.convert_ga_timestamp(parts[3])
return self
class SocialInteraction(object):
'''
Properties:
action -- Required. A string representing the social action being tracked,
will be mapped to "utmsa" parameter
network -- Required. A string representing the social network being tracked,
will be mapped to "utmsn" parameter
target -- Optional. A string representing the URL (or resource) which receives the action.
'''
def __init__(self, action=None, network=None, target=None):
self.action = action
self.network = network
self.target = target
def validate(self):
if not(self.action and self.network):
raise exceptions.ValidationError('Social interactions need to have at least the "network" and "action" attributes defined.')
class Transaction(object):
'''
Represents parameters for a Transaction call
Properties:
order_id -- Order ID, will be mapped to "utmtid" parameter
affiliation -- Affiliation, Will be mapped to "utmtst" parameter
total -- Total Cost, will be mapped to "utmtto" parameter
tax -- Tax Cost, will be mapped to "utmttx" parameter
shipping -- Shipping Cost, values as for unit and price, will be mapped to "utmtsp" parameter
city -- Billing City, will be mapped to "utmtci" parameter
state -- Billing Region, will be mapped to "utmtrg" parameter
country -- Billing Country, will be mapped to "utmtco" parameter
items -- @entity.Items in a transaction
'''
def __init__(self):
self.items = []
self.order_id = None
self.affiliation = None
self.total = None
self.tax = None
self.shipping = None
self.city = None
self.state = None
self.country = None
def __setattr__(self, name, value):
if name == 'order_id':
for itm in self.items:
itm.order_id = value
object.__setattr__(self, name, value)
def validate(self):
if len(self.items) == 0:
raise exceptions.ValidationError('Transaction need to consist of at least one item')
def add_item(self, item):
''' item of type entities.Item '''
if isinstance(item, Item):
item.order_id = self.order_id
self.items.append(item)
class Visitor(object):
'''
You should serialize this object and store it in the user database to keep it
persistent for the same user permanently (similar to the "__umta" cookie of
the GA Javascript client).
Properties:
unique_id -- Unique user ID, will be part of the "__utma" cookie parameter
first_visit_time -- Time of the very first visit of this user, will be part of the "__utma" cookie parameter
previous_visit_time -- Time of the previous visit of this user, will be part of the "__utma" cookie parameter
current_visit_time -- Time of the current visit of this user, will be part of the "__utma" cookie parameter
visit_count -- Amount of total visits by this user, will be part of the "__utma" cookie parameter
ip_address -- IP Address of the end user, will be mapped to "utmip" parameter and "X-Forwarded-For" request header
user_agent -- User agent string of the end user, will be mapped to "User-Agent" request header
locale -- Locale string (country part optional) will be mapped to "utmul" parameter
flash_version -- Visitor's Flash version, will be maped to "utmfl" parameter
java_enabled -- Visitor's Java support, will be mapped to "utmje" parameter
screen_colour_depth -- Visitor's screen color depth, will be mapped to "utmsc" parameter
screen_resolution -- Visitor's screen resolution, will be mapped to "utmsr" parameter
'''
def __init__(self):
now = datetime.utcnow()
self.unique_id = None
self.first_visit_time = now
self.previous_visit_time = now
self.current_visit_time = now
self.visit_count = 1
self.ip_address = None
self.user_agent = None
self.locale = None
self.flash_version = None
self.java_enabled = None
self.screen_colour_depth = None
self.screen_resolution = None
def __setattr__(self, name, value):
if name == 'unique_id':
if value and value < 0 or value > 0x7fffffff:
raise ValueError('Visitor unique ID has to be a 32-bit integer between 0 and 0x7fffffff')
object.__setattr__(self, name, value)
def __getattribute__(self, name):
if name == 'unique_id':
tmp = object.__getattribute__(self, name)
if tmp is None:
self.unique_id = self.generate_unique_id()
return object.__getattribute__(self, name)
def __getstate__(self):
state = self.__dict__
if state.get('user_agent') is None:
state['unique_id'] = self.generate_unique_id()
return state
def extract_from_utma(self, utma):
'''
Will extract information for the "unique_id", "first_visit_time", "previous_visit_time",
"current_visit_time" and "visit_count" properties from the given "__utma" cookie value.
'''
parts = utma.split('.')
if len(parts) != 6:
raise ValueError('The given "__utma" cookie value is invalid.')
self.unique_id = int(parts[1])
self.first_visit_time = utils.convert_ga_timestamp(parts[2])
self.previous_visit_time = utils.convert_ga_timestamp(parts[3])
self.current_visit_time = utils.convert_ga_timestamp(parts[4])
self.visit_count = int(parts[5])
return self
def extract_from_server_meta(self, meta):
'''
Will extract information for the "ip_address", "user_agent" and "locale"
properties from the given WSGI REQUEST META variable or equivalent.
'''
if 'REMOTE_ADDR' in meta and meta['REMOTE_ADDR']:
ip = None
for key in ('HTTP_X_FORWARDED_FOR', 'REMOTE_ADDR'):
if key in meta and not ip:
ips = meta.get(key, '').split(',')
ip = ips[len(ips) - 1].strip()
if not utils.is_valid_ip(ip):
ip = ''
if utils.is_private_ip(ip):
ip = ''
if ip:
self.ip_address = ip
if 'HTTP_USER_AGENT' in meta and meta['HTTP_USER_AGENT']:
self.user_agent = meta['HTTP_USER_AGENT']
if 'HTTP_ACCEPT_LANGUAGE' in meta and meta['HTTP_ACCEPT_LANGUAGE']:
user_locals = []
matched_locales = utils.validate_locale(meta['HTTP_ACCEPT_LANGUAGE'])
if matched_locales:
lang_lst = map((lambda x: x.replace('-', '_')), (i[1] for i in matched_locales))
quality_lst = map((lambda x: x and x or 1), (float(i[4] and i[4] or '0') for i in matched_locales))
lang_quality_map = map((lambda x, y: (x, y)), lang_lst, quality_lst)
user_locals = [x[0] for x in sorted(lang_quality_map, key=itemgetter(1), reverse=True)]
if user_locals:
self.locale = user_locals[0]
return self
def generate_hash(self):
'''Generates a hashed value from user-specific properties.'''
tmpstr = "%s%s%s" % (self.user_agent, self.screen_resolution, self.screen_colour_depth)
return utils.generate_hash(tmpstr)
def generate_unique_id(self):
'''Generates a unique user ID from the current user-specific properties.'''
return ((utils.get_32bit_random_num() ^ self.generate_hash()) & 0x7fffffff)
def add_session(self, session):
'''
Updates the "previousVisitTime", "currentVisitTime" and "visitCount"
fields based on the given session object.
'''
start_time = session.start_time
if start_time != self.current_visit_time:
self.previous_visit_time = self.current_visit_time
self.current_visit_time = start_time
self.visit_count = self.visit_count + 1
| gpl-2.0 |
rolobio/sshm | sshm/main.py | 1 | 5215 | #! /usr/bin/env python3
"""
This module allows the console to use SSHM's functionality.
This module should only be run by the console!
"""
from __future__ import print_function
import sys
try: # pragma: no cover version specific
from lib import sshm
except ImportError: # pragma: no cover version specific
from sshm.lib import sshm
__all__ = ['main']
def get_argparse_args(args=None):
"""
Get the arguments passed to this script when it was run.
@param args: A list of arguments passed in the console.
@type args: list
@returns: A tuple containing (args, command, extra_args)
@rtype: tuple
"""
try: # pragma: no cover
from _info import __version__, __long_description__
except ImportError: # pragma: no cover
from sshm._info import __version__, __long_description__
import argparse
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=__long_description__)
parser.add_argument('servers', nargs='+')
parser.add_argument('command')
parser.add_argument('-s', '--sorted-output', action='store_true', default=False,
help='Sort the output by the URI of each instance. This will wait for all instances to finish before showing any output!')
parser.add_argument('-p', '--strip-whitespace', action='store_true', default=False,
help='Remove any whitespace surrounding the output of each instance.')
parser.add_argument('-d', '--disable-formatting', action='store_true', default=False,
help='Disable command formatting.')
parser.add_argument('-u', '--quiet', action='store_true', default=False,
help="Hide SSHM's server information on output (this implies sorted).")
parser.add_argument('-w', '--workers', type=int, default=20,
help="Limit the amount of concurrent SSH connections.")
parser.add_argument('--version', action='version', version='%(prog)s '+__version__)
args, extra_args = parser.parse_known_args(args=args)
# Move any servers that start with a - to extra_args
new_servers = []
for i in args.servers:
if i.startswith('-'):
extra_args.append(i)
else:
new_servers.append(i)
args.servers = new_servers
# If the comand starts with a -, replace it with the last server and
# move the command to extra_args.
if args.command.startswith('-'):
extra_args.append(args.command)
args.command = args.servers.pop(-1)
if args.quiet:
args.sorted_output = True
return (args, args.command, extra_args)
def _print_handling_newlines(uri, return_code, to_print, header='', strip_whitespace=False, quiet=False, file=sys.stdout):
"""
Print "to_print" to "file" with the formatting needed to represent it's data
properly.
"""
if strip_whitespace:
to_print = to_print.strip()
if to_print.count('\n') == 0:
sep = ' '
else:
sep = '\n'
output_str = 'sshm: {header}{uri}({return_code}):{sep}{to_print}'
if quiet:
output_str = '{to_print}'
print(output_str.format(header=header,
uri=uri,
return_code=return_code,
sep=sep,
to_print=to_print), file=file)
def main():
"""
Run SSHM using console provided arguments.
This should only be run using a console!
"""
import select
args, command, extra_arguments = get_argparse_args()
# Only provided stdin if there is data
r_list, i, i = select.select([sys.stdin], [], [], 0)
if r_list:
stdin = r_list[0]
else:
stdin = None
# Perform the command on each server, print the results to stdout.
results = sshm(args.servers, command, extra_arguments, stdin, args.disable_formatting, args.workers)
# If a sorted output is requested, gather all results before output.
if args.sorted_output:
results = list(results)
results = sorted(results, key=lambda x: x['uri'])
exit_code = 0
for result in results:
exit_code = exit_code or result.get('return_code')
if result.get('stdout') != None:
_print_handling_newlines(result['uri'],
result['return_code'],
result['stdout'],
strip_whitespace=args.strip_whitespace,
quiet=args.quiet,
)
if result.get('stderr'):
_print_handling_newlines(result['uri'],
result.get('return_code', ''),
result['stderr'],
'Error: ',
strip_whitespace=args.strip_whitespace,
quiet=args.quiet,
file=sys.stderr,
)
if result.get('traceback'):
_print_handling_newlines(result['uri'],
result['traceback'],
'Traceback: ',
strip_whitespace=args.strip_whitespace,
quiet=args.quiet,
file=sys.stderr,
)
# Exit with non-zero when there is a failure
sys.exit(exit_code)
if __name__ == '__main__':
main()
| gpl-2.0 |
mkuai/underwater | src/flow-monitor/examples/wifi-olsr-flowmon.py | 108 | 7439 | # -*- Mode: Python; -*-
# Copyright (c) 2009 INESC Porto
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation;
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Authors: Gustavo Carneiro <gjc@inescporto.pt>
import sys
import ns.applications
import ns.core
import ns.flow_monitor
import ns.internet
import ns.mobility
import ns.network
import ns.olsr
import ns.wifi
try:
import ns.visualizer
except ImportError:
pass
DISTANCE = 100 # (m)
NUM_NODES_SIDE = 3
def main(argv):
cmd = ns.core.CommandLine()
cmd.NumNodesSide = None
cmd.AddValue("NumNodesSide", "Grid side number of nodes (total number of nodes will be this number squared)")
cmd.Results = None
cmd.AddValue("Results", "Write XML results to file")
cmd.Plot = None
cmd.AddValue("Plot", "Plot the results using the matplotlib python module")
cmd.Parse(argv)
wifi = ns.wifi.WifiHelper.Default()
wifiMac = ns.wifi.NqosWifiMacHelper.Default()
wifiPhy = ns.wifi.YansWifiPhyHelper.Default()
wifiChannel = ns.wifi.YansWifiChannelHelper.Default()
wifiPhy.SetChannel(wifiChannel.Create())
ssid = ns.wifi.Ssid("wifi-default")
wifi.SetRemoteStationManager("ns3::ArfWifiManager")
wifiMac.SetType ("ns3::AdhocWifiMac",
"Ssid", ns.wifi.SsidValue(ssid))
internet = ns.internet.InternetStackHelper()
list_routing = ns.internet.Ipv4ListRoutingHelper()
olsr_routing = ns.olsr.OlsrHelper()
static_routing = ns.internet.Ipv4StaticRoutingHelper()
list_routing.Add(static_routing, 0)
list_routing.Add(olsr_routing, 100)
internet.SetRoutingHelper(list_routing)
ipv4Addresses = ns.internet.Ipv4AddressHelper()
ipv4Addresses.SetBase(ns.network.Ipv4Address("10.0.0.0"), ns.network.Ipv4Mask("255.255.255.0"))
port = 9 # Discard port(RFC 863)
onOffHelper = ns.applications.OnOffHelper("ns3::UdpSocketFactory",
ns.network.Address(ns.network.InetSocketAddress(ns.network.Ipv4Address("10.0.0.1"), port)))
onOffHelper.SetAttribute("DataRate", ns.network.DataRateValue(ns.network.DataRate("100kbps")))
onOffHelper.SetAttribute("OnTime", ns.core.StringValue ("ns3::ConstantRandomVariable[Constant=1]"))
onOffHelper.SetAttribute("OffTime", ns.core.StringValue ("ns3::ConstantRandomVariable[Constant=0]"))
addresses = []
nodes = []
if cmd.NumNodesSide is None:
num_nodes_side = NUM_NODES_SIDE
else:
num_nodes_side = int(cmd.NumNodesSide)
for xi in range(num_nodes_side):
for yi in range(num_nodes_side):
node = ns.network.Node()
nodes.append(node)
internet.Install(ns.network.NodeContainer(node))
mobility = ns.mobility.ConstantPositionMobilityModel()
mobility.SetPosition(ns.core.Vector(xi*DISTANCE, yi*DISTANCE, 0))
node.AggregateObject(mobility)
devices = wifi.Install(wifiPhy, wifiMac, node)
ipv4_interfaces = ipv4Addresses.Assign(devices)
addresses.append(ipv4_interfaces.GetAddress(0))
for i, node in enumerate(nodes):
destaddr = addresses[(len(addresses) - 1 - i) % len(addresses)]
#print i, destaddr
onOffHelper.SetAttribute("Remote", ns.network.AddressValue(ns.network.InetSocketAddress(destaddr, port)))
app = onOffHelper.Install(ns.network.NodeContainer(node))
urv = ns.core.UniformRandomVariable()
app.Start(ns.core.Seconds(urv.GetValue(20, 30)))
#internet.EnablePcapAll("wifi-olsr")
flowmon_helper = ns.flow_monitor.FlowMonitorHelper()
#flowmon_helper.SetMonitorAttribute("StartTime", ns.core.TimeValue(ns.core.Seconds(31)))
monitor = flowmon_helper.InstallAll()
monitor = flowmon_helper.GetMonitor()
monitor.SetAttribute("DelayBinWidth", ns.core.DoubleValue(0.001))
monitor.SetAttribute("JitterBinWidth", ns.core.DoubleValue(0.001))
monitor.SetAttribute("PacketSizeBinWidth", ns.core.DoubleValue(20))
ns.core.Simulator.Stop(ns.core.Seconds(44.0))
ns.core.Simulator.Run()
def print_stats(os, st):
print >> os, " Tx Bytes: ", st.txBytes
print >> os, " Rx Bytes: ", st.rxBytes
print >> os, " Tx Packets: ", st.txPackets
print >> os, " Rx Packets: ", st.rxPackets
print >> os, " Lost Packets: ", st.lostPackets
if st.rxPackets > 0:
print >> os, " Mean{Delay}: ", (st.delaySum.GetSeconds() / st.rxPackets)
print >> os, " Mean{Jitter}: ", (st.jitterSum.GetSeconds() / (st.rxPackets-1))
print >> os, " Mean{Hop Count}: ", float(st.timesForwarded) / st.rxPackets + 1
if 0:
print >> os, "Delay Histogram"
for i in range(st.delayHistogram.GetNBins () ):
print >> os, " ",i,"(", st.delayHistogram.GetBinStart (i), "-", \
st.delayHistogram.GetBinEnd (i), "): ", st.delayHistogram.GetBinCount (i)
print >> os, "Jitter Histogram"
for i in range(st.jitterHistogram.GetNBins () ):
print >> os, " ",i,"(", st.jitterHistogram.GetBinStart (i), "-", \
st.jitterHistogram.GetBinEnd (i), "): ", st.jitterHistogram.GetBinCount (i)
print >> os, "PacketSize Histogram"
for i in range(st.packetSizeHistogram.GetNBins () ):
print >> os, " ",i,"(", st.packetSizeHistogram.GetBinStart (i), "-", \
st.packetSizeHistogram.GetBinEnd (i), "): ", st.packetSizeHistogram.GetBinCount (i)
for reason, drops in enumerate(st.packetsDropped):
print " Packets dropped by reason %i: %i" % (reason, drops)
#for reason, drops in enumerate(st.bytesDropped):
# print "Bytes dropped by reason %i: %i" % (reason, drops)
monitor.CheckForLostPackets()
classifier = flowmon_helper.GetClassifier()
if cmd.Results is None:
for flow_id, flow_stats in monitor.GetFlowStats():
t = classifier.FindFlow(flow_id)
proto = {6: 'TCP', 17: 'UDP'} [t.protocol]
print "FlowID: %i (%s %s/%s --> %s/%i)" % \
(flow_id, proto, t.sourceAddress, t.sourcePort, t.destinationAddress, t.destinationPort)
print_stats(sys.stdout, flow_stats)
else:
print monitor.SerializeToXmlFile(cmd.Results, True, True)
if cmd.Plot is not None:
import pylab
delays = []
for flow_id, flow_stats in monitor.GetFlowStats():
tupl = classifier.FindFlow(flow_id)
if tupl.protocol == 17 and tupl.sourcePort == 698:
continue
delays.append(flow_stats.delaySum.GetSeconds() / flow_stats.rxPackets)
pylab.hist(delays, 20)
pylab.xlabel("Delay (s)")
pylab.ylabel("Number of Flows")
pylab.show()
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
| gpl-2.0 |
MIPS/kernel-linux-mti | tools/perf/scripts/python/sched-migration.py | 11215 | 11670 | #!/usr/bin/python
#
# Cpu task migration overview toy
#
# Copyright (C) 2010 Frederic Weisbecker <fweisbec@gmail.com>
#
# perf script event handlers have been generated by perf script -g python
#
# This software is distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
import os
import sys
from collections import defaultdict
from UserList import UserList
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
sys.path.append('scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from SchedGui import *
threads = { 0 : "idle"}
def thread_name(pid):
return "%s:%d" % (threads[pid], pid)
class RunqueueEventUnknown:
@staticmethod
def color():
return None
def __repr__(self):
return "unknown"
class RunqueueEventSleep:
@staticmethod
def color():
return (0, 0, 0xff)
def __init__(self, sleeper):
self.sleeper = sleeper
def __repr__(self):
return "%s gone to sleep" % thread_name(self.sleeper)
class RunqueueEventWakeup:
@staticmethod
def color():
return (0xff, 0xff, 0)
def __init__(self, wakee):
self.wakee = wakee
def __repr__(self):
return "%s woke up" % thread_name(self.wakee)
class RunqueueEventFork:
@staticmethod
def color():
return (0, 0xff, 0)
def __init__(self, child):
self.child = child
def __repr__(self):
return "new forked task %s" % thread_name(self.child)
class RunqueueMigrateIn:
@staticmethod
def color():
return (0, 0xf0, 0xff)
def __init__(self, new):
self.new = new
def __repr__(self):
return "task migrated in %s" % thread_name(self.new)
class RunqueueMigrateOut:
@staticmethod
def color():
return (0xff, 0, 0xff)
def __init__(self, old):
self.old = old
def __repr__(self):
return "task migrated out %s" % thread_name(self.old)
class RunqueueSnapshot:
def __init__(self, tasks = [0], event = RunqueueEventUnknown()):
self.tasks = tuple(tasks)
self.event = event
def sched_switch(self, prev, prev_state, next):
event = RunqueueEventUnknown()
if taskState(prev_state) == "R" and next in self.tasks \
and prev in self.tasks:
return self
if taskState(prev_state) != "R":
event = RunqueueEventSleep(prev)
next_tasks = list(self.tasks[:])
if prev in self.tasks:
if taskState(prev_state) != "R":
next_tasks.remove(prev)
elif taskState(prev_state) == "R":
next_tasks.append(prev)
if next not in next_tasks:
next_tasks.append(next)
return RunqueueSnapshot(next_tasks, event)
def migrate_out(self, old):
if old not in self.tasks:
return self
next_tasks = [task for task in self.tasks if task != old]
return RunqueueSnapshot(next_tasks, RunqueueMigrateOut(old))
def __migrate_in(self, new, event):
if new in self.tasks:
self.event = event
return self
next_tasks = self.tasks[:] + tuple([new])
return RunqueueSnapshot(next_tasks, event)
def migrate_in(self, new):
return self.__migrate_in(new, RunqueueMigrateIn(new))
def wake_up(self, new):
return self.__migrate_in(new, RunqueueEventWakeup(new))
def wake_up_new(self, new):
return self.__migrate_in(new, RunqueueEventFork(new))
def load(self):
""" Provide the number of tasks on the runqueue.
Don't count idle"""
return len(self.tasks) - 1
def __repr__(self):
ret = self.tasks.__repr__()
ret += self.origin_tostring()
return ret
class TimeSlice:
def __init__(self, start, prev):
self.start = start
self.prev = prev
self.end = start
# cpus that triggered the event
self.event_cpus = []
if prev is not None:
self.total_load = prev.total_load
self.rqs = prev.rqs.copy()
else:
self.rqs = defaultdict(RunqueueSnapshot)
self.total_load = 0
def __update_total_load(self, old_rq, new_rq):
diff = new_rq.load() - old_rq.load()
self.total_load += diff
def sched_switch(self, ts_list, prev, prev_state, next, cpu):
old_rq = self.prev.rqs[cpu]
new_rq = old_rq.sched_switch(prev, prev_state, next)
if old_rq is new_rq:
return
self.rqs[cpu] = new_rq
self.__update_total_load(old_rq, new_rq)
ts_list.append(self)
self.event_cpus = [cpu]
def migrate(self, ts_list, new, old_cpu, new_cpu):
if old_cpu == new_cpu:
return
old_rq = self.prev.rqs[old_cpu]
out_rq = old_rq.migrate_out(new)
self.rqs[old_cpu] = out_rq
self.__update_total_load(old_rq, out_rq)
new_rq = self.prev.rqs[new_cpu]
in_rq = new_rq.migrate_in(new)
self.rqs[new_cpu] = in_rq
self.__update_total_load(new_rq, in_rq)
ts_list.append(self)
if old_rq is not out_rq:
self.event_cpus.append(old_cpu)
self.event_cpus.append(new_cpu)
def wake_up(self, ts_list, pid, cpu, fork):
old_rq = self.prev.rqs[cpu]
if fork:
new_rq = old_rq.wake_up_new(pid)
else:
new_rq = old_rq.wake_up(pid)
if new_rq is old_rq:
return
self.rqs[cpu] = new_rq
self.__update_total_load(old_rq, new_rq)
ts_list.append(self)
self.event_cpus = [cpu]
def next(self, t):
self.end = t
return TimeSlice(t, self)
class TimeSliceList(UserList):
def __init__(self, arg = []):
self.data = arg
def get_time_slice(self, ts):
if len(self.data) == 0:
slice = TimeSlice(ts, TimeSlice(-1, None))
else:
slice = self.data[-1].next(ts)
return slice
def find_time_slice(self, ts):
start = 0
end = len(self.data)
found = -1
searching = True
while searching:
if start == end or start == end - 1:
searching = False
i = (end + start) / 2
if self.data[i].start <= ts and self.data[i].end >= ts:
found = i
end = i
continue
if self.data[i].end < ts:
start = i
elif self.data[i].start > ts:
end = i
return found
def set_root_win(self, win):
self.root_win = win
def mouse_down(self, cpu, t):
idx = self.find_time_slice(t)
if idx == -1:
return
ts = self[idx]
rq = ts.rqs[cpu]
raw = "CPU: %d\n" % cpu
raw += "Last event : %s\n" % rq.event.__repr__()
raw += "Timestamp : %d.%06d\n" % (ts.start / (10 ** 9), (ts.start % (10 ** 9)) / 1000)
raw += "Duration : %6d us\n" % ((ts.end - ts.start) / (10 ** 6))
raw += "Load = %d\n" % rq.load()
for t in rq.tasks:
raw += "%s \n" % thread_name(t)
self.root_win.update_summary(raw)
def update_rectangle_cpu(self, slice, cpu):
rq = slice.rqs[cpu]
if slice.total_load != 0:
load_rate = rq.load() / float(slice.total_load)
else:
load_rate = 0
red_power = int(0xff - (0xff * load_rate))
color = (0xff, red_power, red_power)
top_color = None
if cpu in slice.event_cpus:
top_color = rq.event.color()
self.root_win.paint_rectangle_zone(cpu, color, top_color, slice.start, slice.end)
def fill_zone(self, start, end):
i = self.find_time_slice(start)
if i == -1:
return
for i in xrange(i, len(self.data)):
timeslice = self.data[i]
if timeslice.start > end:
return
for cpu in timeslice.rqs:
self.update_rectangle_cpu(timeslice, cpu)
def interval(self):
if len(self.data) == 0:
return (0, 0)
return (self.data[0].start, self.data[-1].end)
def nr_rectangles(self):
last_ts = self.data[-1]
max_cpu = 0
for cpu in last_ts.rqs:
if cpu > max_cpu:
max_cpu = cpu
return max_cpu
class SchedEventProxy:
def __init__(self):
self.current_tsk = defaultdict(lambda : -1)
self.timeslices = TimeSliceList()
def sched_switch(self, headers, prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio):
""" Ensure the task we sched out this cpu is really the one
we logged. Otherwise we may have missed traces """
on_cpu_task = self.current_tsk[headers.cpu]
if on_cpu_task != -1 and on_cpu_task != prev_pid:
print "Sched switch event rejected ts: %s cpu: %d prev: %s(%d) next: %s(%d)" % \
(headers.ts_format(), headers.cpu, prev_comm, prev_pid, next_comm, next_pid)
threads[prev_pid] = prev_comm
threads[next_pid] = next_comm
self.current_tsk[headers.cpu] = next_pid
ts = self.timeslices.get_time_slice(headers.ts())
ts.sched_switch(self.timeslices, prev_pid, prev_state, next_pid, headers.cpu)
def migrate(self, headers, pid, prio, orig_cpu, dest_cpu):
ts = self.timeslices.get_time_slice(headers.ts())
ts.migrate(self.timeslices, pid, orig_cpu, dest_cpu)
def wake_up(self, headers, comm, pid, success, target_cpu, fork):
if success == 0:
return
ts = self.timeslices.get_time_slice(headers.ts())
ts.wake_up(self.timeslices, pid, target_cpu, fork)
def trace_begin():
global parser
parser = SchedEventProxy()
def trace_end():
app = wx.App(False)
timeslices = parser.timeslices
frame = RootFrame(timeslices, "Migration")
app.MainLoop()
def sched__sched_stat_runtime(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, runtime, vruntime):
pass
def sched__sched_stat_iowait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_stat_sleep(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_stat_wait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_process_fork(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
parent_comm, parent_pid, child_comm, child_pid):
pass
def sched__sched_process_wait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_process_exit(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_process_free(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_migrate_task(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, orig_cpu,
dest_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.migrate(headers, pid, prio, orig_cpu, dest_cpu)
def sched__sched_switch(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.sched_switch(headers, prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio)
def sched__sched_wakeup_new(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, success,
target_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.wake_up(headers, comm, pid, success, target_cpu, 1)
def sched__sched_wakeup(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, success,
target_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.wake_up(headers, comm, pid, success, target_cpu, 0)
def sched__sched_wait_task(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_kthread_stop_ret(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
ret):
pass
def sched__sched_kthread_stop(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid):
pass
def trace_unhandled(event_name, context, common_cpu, common_secs, common_nsecs,
common_pid, common_comm):
pass
| gpl-2.0 |
lgiommi/root | interpreter/llvm/src/tools/clang/tools/scan-build-py/libscanbuild/analyze.py | 22 | 20649 | # -*- coding: utf-8 -*-
# The LLVM Compiler Infrastructure
#
# This file is distributed under the University of Illinois Open Source
# License. See LICENSE.TXT for details.
""" This module implements the 'scan-build' command API.
To run the static analyzer against a build is done in multiple steps:
-- Intercept: capture the compilation command during the build,
-- Analyze: run the analyzer against the captured commands,
-- Report: create a cover report from the analyzer outputs. """
import sys
import re
import os
import os.path
import json
import argparse
import logging
import subprocess
import multiprocessing
from libscanbuild import initialize_logging, tempdir, command_entry_point
from libscanbuild.runner import run
from libscanbuild.intercept import capture
from libscanbuild.report import report_directory, document
from libscanbuild.clang import get_checkers
from libscanbuild.compilation import split_command
__all__ = ['analyze_build_main', 'analyze_build_wrapper']
COMPILER_WRAPPER_CC = 'analyze-cc'
COMPILER_WRAPPER_CXX = 'analyze-c++'
@command_entry_point
def analyze_build_main(bin_dir, from_build_command):
""" Entry point for 'analyze-build' and 'scan-build'. """
parser = create_parser(from_build_command)
args = parser.parse_args()
validate(parser, args, from_build_command)
# setup logging
initialize_logging(args.verbose)
logging.debug('Parsed arguments: %s', args)
with report_directory(args.output, args.keep_empty) as target_dir:
if not from_build_command:
# run analyzer only and generate cover report
run_analyzer(args, target_dir)
number_of_bugs = document(args, target_dir, True)
return number_of_bugs if args.status_bugs else 0
elif args.intercept_first:
# run build command and capture compiler executions
exit_code = capture(args, bin_dir)
# next step to run the analyzer against the captured commands
if need_analyzer(args.build):
run_analyzer(args, target_dir)
# cover report generation and bug counting
number_of_bugs = document(args, target_dir, True)
# remove the compilation database when it was not requested
if os.path.exists(args.cdb):
os.unlink(args.cdb)
# set exit status as it was requested
return number_of_bugs if args.status_bugs else exit_code
else:
return exit_code
else:
# run the build command with compiler wrappers which
# execute the analyzer too. (interposition)
environment = setup_environment(args, target_dir, bin_dir)
logging.debug('run build in environment: %s', environment)
exit_code = subprocess.call(args.build, env=environment)
logging.debug('build finished with exit code: %d', exit_code)
# cover report generation and bug counting
number_of_bugs = document(args, target_dir, False)
# set exit status as it was requested
return number_of_bugs if args.status_bugs else exit_code
def need_analyzer(args):
""" Check the intent of the build command.
When static analyzer run against project configure step, it should be
silent and no need to run the analyzer or generate report.
To run `scan-build` against the configure step might be neccessary,
when compiler wrappers are used. That's the moment when build setup
check the compiler and capture the location for the build process. """
return len(args) and not re.search('configure|autogen', args[0])
def run_analyzer(args, output_dir):
""" Runs the analyzer against the given compilation database. """
def exclude(filename):
""" Return true when any excluded directory prefix the filename. """
return any(re.match(r'^' + directory, filename)
for directory in args.excludes)
consts = {
'clang': args.clang,
'output_dir': output_dir,
'output_format': args.output_format,
'output_failures': args.output_failures,
'direct_args': analyzer_params(args),
'force_debug': args.force_debug
}
logging.debug('run analyzer against compilation database')
with open(args.cdb, 'r') as handle:
generator = (dict(cmd, **consts)
for cmd in json.load(handle) if not exclude(cmd['file']))
# when verbose output requested execute sequentially
pool = multiprocessing.Pool(1 if args.verbose > 2 else None)
for current in pool.imap_unordered(run, generator):
if current is not None:
# display error message from the static analyzer
for line in current['error_output']:
logging.info(line.rstrip())
pool.close()
pool.join()
def setup_environment(args, destination, bin_dir):
""" Set up environment for build command to interpose compiler wrapper. """
environment = dict(os.environ)
environment.update({
'CC': os.path.join(bin_dir, COMPILER_WRAPPER_CC),
'CXX': os.path.join(bin_dir, COMPILER_WRAPPER_CXX),
'ANALYZE_BUILD_CC': args.cc,
'ANALYZE_BUILD_CXX': args.cxx,
'ANALYZE_BUILD_CLANG': args.clang if need_analyzer(args.build) else '',
'ANALYZE_BUILD_VERBOSE': 'DEBUG' if args.verbose > 2 else 'WARNING',
'ANALYZE_BUILD_REPORT_DIR': destination,
'ANALYZE_BUILD_REPORT_FORMAT': args.output_format,
'ANALYZE_BUILD_REPORT_FAILURES': 'yes' if args.output_failures else '',
'ANALYZE_BUILD_PARAMETERS': ' '.join(analyzer_params(args)),
'ANALYZE_BUILD_FORCE_DEBUG': 'yes' if args.force_debug else ''
})
return environment
def analyze_build_wrapper(cplusplus):
""" Entry point for `analyze-cc` and `analyze-c++` compiler wrappers. """
# initialize wrapper logging
logging.basicConfig(format='analyze: %(levelname)s: %(message)s',
level=os.getenv('ANALYZE_BUILD_VERBOSE', 'INFO'))
# execute with real compiler
compiler = os.getenv('ANALYZE_BUILD_CXX', 'c++') if cplusplus \
else os.getenv('ANALYZE_BUILD_CC', 'cc')
compilation = [compiler] + sys.argv[1:]
logging.info('execute compiler: %s', compilation)
result = subprocess.call(compilation)
# exit when it fails, ...
if result or not os.getenv('ANALYZE_BUILD_CLANG'):
return result
# ... and run the analyzer if all went well.
try:
# check is it a compilation
compilation = split_command(sys.argv)
if compilation is None:
return result
# collect the needed parameters from environment, crash when missing
parameters = {
'clang': os.getenv('ANALYZE_BUILD_CLANG'),
'output_dir': os.getenv('ANALYZE_BUILD_REPORT_DIR'),
'output_format': os.getenv('ANALYZE_BUILD_REPORT_FORMAT'),
'output_failures': os.getenv('ANALYZE_BUILD_REPORT_FAILURES'),
'direct_args': os.getenv('ANALYZE_BUILD_PARAMETERS',
'').split(' '),
'force_debug': os.getenv('ANALYZE_BUILD_FORCE_DEBUG'),
'directory': os.getcwd(),
'command': [sys.argv[0], '-c'] + compilation.flags
}
# call static analyzer against the compilation
for source in compilation.files:
parameters.update({'file': source})
logging.debug('analyzer parameters %s', parameters)
current = run(parameters)
# display error message from the static analyzer
if current is not None:
for line in current['error_output']:
logging.info(line.rstrip())
except Exception:
logging.exception("run analyzer inside compiler wrapper failed.")
return result
def analyzer_params(args):
""" A group of command line arguments can mapped to command
line arguments of the analyzer. This method generates those. """
def prefix_with(constant, pieces):
""" From a sequence create another sequence where every second element
is from the original sequence and the odd elements are the prefix.
eg.: prefix_with(0, [1,2,3]) creates [0, 1, 0, 2, 0, 3] """
return [elem for piece in pieces for elem in [constant, piece]]
result = []
if args.store_model:
result.append('-analyzer-store={0}'.format(args.store_model))
if args.constraints_model:
result.append('-analyzer-constraints={0}'.format(
args.constraints_model))
if args.internal_stats:
result.append('-analyzer-stats')
if args.analyze_headers:
result.append('-analyzer-opt-analyze-headers')
if args.stats:
result.append('-analyzer-checker=debug.Stats')
if args.maxloop:
result.extend(['-analyzer-max-loop', str(args.maxloop)])
if args.output_format:
result.append('-analyzer-output={0}'.format(args.output_format))
if args.analyzer_config:
result.append(args.analyzer_config)
if args.verbose >= 4:
result.append('-analyzer-display-progress')
if args.plugins:
result.extend(prefix_with('-load', args.plugins))
if args.enable_checker:
checkers = ','.join(args.enable_checker)
result.extend(['-analyzer-checker', checkers])
if args.disable_checker:
checkers = ','.join(args.disable_checker)
result.extend(['-analyzer-disable-checker', checkers])
if os.getenv('UBIVIZ'):
result.append('-analyzer-viz-egraph-ubigraph')
return prefix_with('-Xclang', result)
def print_active_checkers(checkers):
""" Print active checkers to stdout. """
for name in sorted(name for name, (_, active) in checkers.items()
if active):
print(name)
def print_checkers(checkers):
""" Print verbose checker help to stdout. """
print('')
print('available checkers:')
print('')
for name in sorted(checkers.keys()):
description, active = checkers[name]
prefix = '+' if active else ' '
if len(name) > 30:
print(' {0} {1}'.format(prefix, name))
print(' ' * 35 + description)
else:
print(' {0} {1: <30} {2}'.format(prefix, name, description))
print('')
print('NOTE: "+" indicates that an analysis is enabled by default.')
print('')
def validate(parser, args, from_build_command):
""" Validation done by the parser itself, but semantic check still
needs to be done. This method is doing that. """
if args.help_checkers_verbose:
print_checkers(get_checkers(args.clang, args.plugins))
parser.exit()
elif args.help_checkers:
print_active_checkers(get_checkers(args.clang, args.plugins))
parser.exit()
if from_build_command and not args.build:
parser.error('missing build command')
def create_parser(from_build_command):
""" Command line argument parser factory method. """
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument(
'--verbose', '-v',
action='count',
default=0,
help="""Enable verbose output from '%(prog)s'. A second and third
flag increases verbosity.""")
parser.add_argument(
'--override-compiler',
action='store_true',
help="""Always resort to the compiler wrapper even when better
interposition methods are available.""")
parser.add_argument(
'--intercept-first',
action='store_true',
help="""Run the build commands only, build a compilation database,
then run the static analyzer afterwards.
Generally speaking it has better coverage on build commands.
With '--override-compiler' it use compiler wrapper, but does
not run the analyzer till the build is finished. """)
parser.add_argument(
'--cdb',
metavar='<file>',
default="compile_commands.json",
help="""The JSON compilation database.""")
parser.add_argument(
'--output', '-o',
metavar='<path>',
default=tempdir(),
help="""Specifies the output directory for analyzer reports.
Subdirectory will be created if default directory is targeted.
""")
parser.add_argument(
'--status-bugs',
action='store_true',
help="""By default, the exit status of '%(prog)s' is the same as the
executed build command. Specifying this option causes the exit
status of '%(prog)s' to be non zero if it found potential bugs
and zero otherwise.""")
parser.add_argument(
'--html-title',
metavar='<title>',
help="""Specify the title used on generated HTML pages.
If not specified, a default title will be used.""")
parser.add_argument(
'--analyze-headers',
action='store_true',
help="""Also analyze functions in #included files. By default, such
functions are skipped unless they are called by functions
within the main source file.""")
format_group = parser.add_mutually_exclusive_group()
format_group.add_argument(
'--plist', '-plist',
dest='output_format',
const='plist',
default='html',
action='store_const',
help="""This option outputs the results as a set of .plist files.""")
format_group.add_argument(
'--plist-html', '-plist-html',
dest='output_format',
const='plist-html',
default='html',
action='store_const',
help="""This option outputs the results as a set of .html and .plist
files.""")
# TODO: implement '-view '
advanced = parser.add_argument_group('advanced options')
advanced.add_argument(
'--keep-empty',
action='store_true',
help="""Don't remove the build results directory even if no issues
were reported.""")
advanced.add_argument(
'--no-failure-reports', '-no-failure-reports',
dest='output_failures',
action='store_false',
help="""Do not create a 'failures' subdirectory that includes analyzer
crash reports and preprocessed source files.""")
advanced.add_argument(
'--stats', '-stats',
action='store_true',
help="""Generates visitation statistics for the project being analyzed.
""")
advanced.add_argument(
'--internal-stats',
action='store_true',
help="""Generate internal analyzer statistics.""")
advanced.add_argument(
'--maxloop', '-maxloop',
metavar='<loop count>',
type=int,
help="""Specifiy the number of times a block can be visited before
giving up. Increase for more comprehensive coverage at a cost
of speed.""")
advanced.add_argument(
'--store', '-store',
metavar='<model>',
dest='store_model',
choices=['region', 'basic'],
help="""Specify the store model used by the analyzer.
'region' specifies a field- sensitive store model.
'basic' which is far less precise but can more quickly
analyze code. 'basic' was the default store model for
checker-0.221 and earlier.""")
advanced.add_argument(
'--constraints', '-constraints',
metavar='<model>',
dest='constraints_model',
choices=['range', 'basic'],
help="""Specify the contraint engine used by the analyzer. Specifying
'basic' uses a simpler, less powerful constraint model used by
checker-0.160 and earlier.""")
advanced.add_argument(
'--use-analyzer',
metavar='<path>',
dest='clang',
default='clang',
help="""'%(prog)s' uses the 'clang' executable relative to itself for
static analysis. One can override this behavior with this
option by using the 'clang' packaged with Xcode (on OS X) or
from the PATH.""")
advanced.add_argument(
'--use-cc',
metavar='<path>',
dest='cc',
default='cc',
help="""When '%(prog)s' analyzes a project by interposing a "fake
compiler", which executes a real compiler for compilation and
do other tasks (to run the static analyzer or just record the
compiler invocation). Because of this interposing, '%(prog)s'
does not know what compiler your project normally uses.
Instead, it simply overrides the CC environment variable, and
guesses your default compiler.
If you need '%(prog)s' to use a specific compiler for
*compilation* then you can use this option to specify a path
to that compiler.""")
advanced.add_argument(
'--use-c++',
metavar='<path>',
dest='cxx',
default='c++',
help="""This is the same as "--use-cc" but for C++ code.""")
advanced.add_argument(
'--analyzer-config', '-analyzer-config',
metavar='<options>',
help="""Provide options to pass through to the analyzer's
-analyzer-config flag. Several options are separated with
comma: 'key1=val1,key2=val2'
Available options:
stable-report-filename=true or false (default)
Switch the page naming to:
report-<filename>-<function/method name>-<id>.html
instead of report-XXXXXX.html""")
advanced.add_argument(
'--exclude',
metavar='<directory>',
dest='excludes',
action='append',
default=[],
help="""Do not run static analyzer against files found in this
directory. (You can specify this option multiple times.)
Could be usefull when project contains 3rd party libraries.
The directory path shall be absolute path as file names in
the compilation database.""")
advanced.add_argument(
'--force-analyze-debug-code',
dest='force_debug',
action='store_true',
help="""Tells analyzer to enable assertions in code even if they were
disabled during compilation, enabling more precise results.""")
plugins = parser.add_argument_group('checker options')
plugins.add_argument(
'--load-plugin', '-load-plugin',
metavar='<plugin library>',
dest='plugins',
action='append',
help="""Loading external checkers using the clang plugin interface.""")
plugins.add_argument(
'--enable-checker', '-enable-checker',
metavar='<checker name>',
action=AppendCommaSeparated,
help="""Enable specific checker.""")
plugins.add_argument(
'--disable-checker', '-disable-checker',
metavar='<checker name>',
action=AppendCommaSeparated,
help="""Disable specific checker.""")
plugins.add_argument(
'--help-checkers',
action='store_true',
help="""A default group of checkers is run unless explicitly disabled.
Exactly which checkers constitute the default group is a
function of the operating system in use. These can be printed
with this flag.""")
plugins.add_argument(
'--help-checkers-verbose',
action='store_true',
help="""Print all available checkers and mark the enabled ones.""")
if from_build_command:
parser.add_argument(
dest='build',
nargs=argparse.REMAINDER,
help="""Command to run.""")
return parser
class AppendCommaSeparated(argparse.Action):
""" argparse Action class to support multiple comma separated lists. """
def __call__(self, __parser, namespace, values, __option_string):
# getattr(obj, attr, default) does not really returns default but none
if getattr(namespace, self.dest, None) is None:
setattr(namespace, self.dest, [])
# once it's fixed we can use as expected
actual = getattr(namespace, self.dest)
actual.extend(values.split(','))
setattr(namespace, self.dest, actual)
| lgpl-2.1 |
sw-irou/flasktest | lib/mongoengine/django/mongo_auth/models.py | 3 | 3378 | from django.conf import settings
from django.contrib.auth.models import UserManager
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.utils.importlib import import_module
from django.utils.translation import ugettext_lazy as _
__all__ = (
'get_user_document',
)
MONGOENGINE_USER_DOCUMENT = getattr(
settings, 'MONGOENGINE_USER_DOCUMENT', 'mongoengine.django.auth.User')
def get_user_document():
"""Get the user document class used for authentication.
This is the class defined in settings.MONGOENGINE_USER_DOCUMENT, which
defaults to `mongoengine.django.auth.User`.
"""
name = MONGOENGINE_USER_DOCUMENT
dot = name.rindex('.')
module = import_module(name[:dot])
return getattr(module, name[dot + 1:])
class MongoUserManager(UserManager):
"""A User manager wich allows the use of MongoEngine documents in Django.
To use the manager, you must tell django.contrib.auth to use MongoUser as
the user model. In you settings.py, you need:
INSTALLED_APPS = (
...
'django.contrib.auth',
'mongoengine.django.mongo_auth',
...
)
AUTH_USER_MODEL = 'mongo_auth.MongoUser'
Django will use the model object to access the custom Manager, which will
replace the original queryset with MongoEngine querysets.
By default, mongoengine.django.auth.User will be used to store users. You
can specify another document class in MONGOENGINE_USER_DOCUMENT in your
settings.py.
The User Document class has the same requirements as a standard custom user
model: https://docs.djangoproject.com/en/dev/topics/auth/customizing/
In particular, the User Document class must define USERNAME_FIELD and
REQUIRED_FIELDS.
`AUTH_USER_MODEL` has been added in Django 1.5.
"""
def contribute_to_class(self, model, name):
super(MongoUserManager, self).contribute_to_class(model, name)
self.dj_model = self.model
self.model = get_user_document()
self.dj_model.USERNAME_FIELD = self.model.USERNAME_FIELD
username = models.CharField(_('username'), max_length=30, unique=True)
username.contribute_to_class(self.dj_model, self.dj_model.USERNAME_FIELD)
self.dj_model.REQUIRED_FIELDS = self.model.REQUIRED_FIELDS
for name in self.dj_model.REQUIRED_FIELDS:
field = models.CharField(_(name), max_length=30)
field.contribute_to_class(self.dj_model, name)
def get(self, *args, **kwargs):
try:
return self.get_query_set().get(*args, **kwargs)
except self.model.DoesNotExist:
# ModelBackend expects this exception
raise self.dj_model.DoesNotExist
@property
def db(self):
raise NotImplementedError
def get_empty_query_set(self):
return self.model.objects.none()
def get_query_set(self):
return self.model.objects
class MongoUser(models.Model):
""""Dummy user model for Django.
MongoUser is used to replace Django's UserManager with MongoUserManager.
The actual user document class is mongoengine.django.auth.User or any
other document class specified in MONGOENGINE_USER_DOCUMENT.
To get the user document class, use `get_user_document()`.
"""
objects = MongoUserManager()
| bsd-3-clause |
simonsdave/clair-database | clair_cicd/assessor.py | 2 | 2160 | import logging
_logger = logging.getLogger(__name__)
class VulnerabilitiesRiskAssessor(object):
def __init__(self, whitelist, vulnerabilities):
object.__init__(self)
self.whitelist = whitelist
self.vulnerabilities = vulnerabilities
def assess(self):
"""Returns ```True``` if the risk is deemed acceptable
otherwise returns ```False```.
"""
_logger.info('Assessment starts')
for vulnerability in self.vulnerabilities:
if not self._assess_vulnerability(vulnerability):
_logger.info('Assessment ends - fail')
return False
_logger.info('Assessment ends - pass')
return True
def _assess_vulnerability(self, vulnerability):
"""Returns ```True``` if the risk is deemed acceptable for ```vulnerability```
otherwise returns ```False```.
"""
_logger.info('Assessing vulnerability %s - start', vulnerability)
rv = self.__assess_vulnerability(vulnerability)
_logger.info('Assessing vulnerability %s - finish', vulnerability)
return rv
def __assess_vulnerability(self, vulnerability):
"""Returns ```True``` if the risk is deemed acceptable for ```vulnerability```
otherwise returns ```False```.
"""
if vulnerability.cve_id in self.whitelist.vulnerabilities_by_cve_id:
_logger.info('Vulnerability %s in whitelist - pass', vulnerability)
return True
if self.whitelist.ignore_severities_at_or_below < vulnerability.severity:
_logger.info(
'Vulnerability %s @ severity %s greater than whitelist severity @ %s - fail',
vulnerability,
vulnerability.severity,
self.whitelist.ignore_severities_at_or_below)
return False
else:
_logger.info(
'Vulnerability %s @ severity %s less than or equal to whitelist severity @ %s - pass',
vulnerability,
vulnerability.severity,
self.whitelist.ignore_severities_at_or_below)
return True
| mit |
jmathai/elodie | elodie/tests/config_test.py | 1 | 3912 | from __future__ import absolute_import
# Project imports
import os
import sys
import unittest
from mock import patch
from tempfile import gettempdir
sys.path.insert(0, os.path.abspath(os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))))
from elodie import constants
from elodie.config import load_config, load_plugin_config
@patch('elodie.config.config_file', '%s/config.ini-singleton-success' % gettempdir())
def test_load_config_singleton_success():
with open('%s/config.ini-singleton-success' % gettempdir(), 'w') as f:
f.write("""
[MapQuest]
key=your-api-key-goes-here
prefer_english_names=False
""")
if hasattr(load_config, 'config'):
del load_config.config
config = load_config()
assert config['MapQuest']['key'] == 'your-api-key-goes-here', config.get('MapQuest', 'key')
config.set('MapQuest', 'key', 'new-value')
config = load_config()
if hasattr(load_config, 'config'):
del load_config.config
assert config['MapQuest']['key'] == 'new-value', config.get('MapQuest', 'key')
@patch('elodie.config.config_file', '%s/config.ini-does-not-exist' % gettempdir())
def test_load_config_singleton_no_file():
if hasattr(load_config, 'config'):
del load_config.config
config = load_config()
if hasattr(load_config, 'config'):
del load_config.config
assert config == {}, config
@patch('elodie.config.config_file', '%s/config.ini-load-plugin-config-unset-backwards-compat' % gettempdir())
def test_load_plugin_config_unset_backwards_compat():
with open('%s/config.ini-load-plugin-config-unset-backwards-compat' % gettempdir(), 'w') as f:
f.write("""
""")
if hasattr(load_config, 'config'):
del load_config.config
plugins = load_plugin_config()
if hasattr(load_config, 'config'):
del load_config.config
assert plugins == [], plugins
@patch('elodie.config.config_file', '%s/config.ini-load-plugin-config-exists-not-set' % gettempdir())
def test_load_plugin_config_exists_not_set():
with open('%s/config.ini-load-plugin-config-exists-not-set' % gettempdir(), 'w') as f:
f.write("""
[Plugins]
""")
if hasattr(load_config, 'config'):
del load_config.config
plugins = load_plugin_config()
if hasattr(load_config, 'config'):
del load_config.config
assert plugins == [], plugins
@patch('elodie.config.config_file', '%s/config.ini-load-plugin-config-one' % gettempdir())
def test_load_plugin_config_one():
with open('%s/config.ini-load-plugin-config-one' % gettempdir(), 'w') as f:
f.write("""
[Plugins]
plugins=Dummy
""")
if hasattr(load_config, 'config'):
del load_config.config
plugins = load_plugin_config()
if hasattr(load_config, 'config'):
del load_config.config
assert plugins == ['Dummy'], plugins
@patch('elodie.config.config_file', '%s/config.ini-load-plugin-config-one-with-invalid' % gettempdir())
def test_load_plugin_config_one_with_invalid():
with open('%s/config.ini-load-plugin-config-one' % gettempdir(), 'w') as f:
f.write("""
[Plugins]
plugins=DNE
""")
if hasattr(load_config, 'config'):
del load_config.config
plugins = load_plugin_config()
if hasattr(load_config, 'config'):
del load_config.config
assert plugins == [], plugins
@patch('elodie.config.config_file', '%s/config.ini-load-plugin-config-many' % gettempdir())
def test_load_plugin_config_many():
with open('%s/config.ini-load-plugin-config-many' % gettempdir(), 'w') as f:
f.write("""
[Plugins]
plugins=GooglePhotos,Dummy
""")
if hasattr(load_config, 'config'):
del load_config.config
plugins = load_plugin_config()
if hasattr(load_config, 'config'):
del load_config.config
assert plugins == ['GooglePhotos','Dummy'], plugins
| apache-2.0 |
marctc/django | django/core/management/commands/squashmigrations.py | 132 | 7265 | from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from django.db import DEFAULT_DB_ALIAS, connections, migrations
from django.db.migrations.loader import AmbiguityError, MigrationLoader
from django.db.migrations.migration import SwappableTuple
from django.db.migrations.optimizer import MigrationOptimizer
from django.db.migrations.writer import MigrationWriter
from django.utils import six
from django.utils.version import get_docs_version
class Command(BaseCommand):
help = "Squashes an existing set of migrations (from first until specified) into a single new one."
def add_arguments(self, parser):
parser.add_argument('app_label',
help='App label of the application to squash migrations for.')
parser.add_argument('migration_name',
help='Migrations will be squashed until and including this migration.')
parser.add_argument('--no-optimize', action='store_true', dest='no_optimize', default=False,
help='Do not try to optimize the squashed operations.')
parser.add_argument('--noinput', action='store_false', dest='interactive', default=True,
help='Tells Django to NOT prompt the user for input of any kind.')
def handle(self, **options):
self.verbosity = options.get('verbosity')
self.interactive = options.get('interactive')
app_label = options['app_label']
migration_name = options['migration_name']
no_optimize = options['no_optimize']
# Load the current graph state, check the app and migration they asked for exists
loader = MigrationLoader(connections[DEFAULT_DB_ALIAS])
if app_label not in loader.migrated_apps:
raise CommandError(
"App '%s' does not have migrations (so squashmigrations on "
"it makes no sense)" % app_label
)
try:
migration = loader.get_migration_by_prefix(app_label, migration_name)
except AmbiguityError:
raise CommandError(
"More than one migration matches '%s' in app '%s'. Please be "
"more specific." % (migration_name, app_label)
)
except KeyError:
raise CommandError(
"Cannot find a migration matching '%s' from app '%s'." %
(migration_name, app_label)
)
# Work out the list of predecessor migrations
migrations_to_squash = [
loader.get_migration(al, mn)
for al, mn in loader.graph.forwards_plan((migration.app_label, migration.name))
if al == migration.app_label
]
# Tell them what we're doing and optionally ask if we should proceed
if self.verbosity > 0 or self.interactive:
self.stdout.write(self.style.MIGRATE_HEADING("Will squash the following migrations:"))
for migration in migrations_to_squash:
self.stdout.write(" - %s" % migration.name)
if self.interactive:
answer = None
while not answer or answer not in "yn":
answer = six.moves.input("Do you wish to proceed? [yN] ")
if not answer:
answer = "n"
break
else:
answer = answer[0].lower()
if answer != "y":
return
# Load the operations from all those migrations and concat together,
# along with collecting external dependencies and detecting
# double-squashing
operations = []
dependencies = set()
for smigration in migrations_to_squash:
if smigration.replaces:
raise CommandError(
"You cannot squash squashed migrations! Please transition "
"it to a normal migration first: "
"https://docs.djangoproject.com/en/%s/topics/migrations/#squashing-migrations" % get_docs_version()
)
operations.extend(smigration.operations)
for dependency in smigration.dependencies:
if isinstance(dependency, SwappableTuple):
if settings.AUTH_USER_MODEL == dependency.setting:
dependencies.add(("__setting__", "AUTH_USER_MODEL"))
else:
dependencies.add(dependency)
elif dependency[0] != smigration.app_label:
dependencies.add(dependency)
if no_optimize:
if self.verbosity > 0:
self.stdout.write(self.style.MIGRATE_HEADING("(Skipping optimization.)"))
new_operations = operations
else:
if self.verbosity > 0:
self.stdout.write(self.style.MIGRATE_HEADING("Optimizing..."))
optimizer = MigrationOptimizer()
new_operations = optimizer.optimize(operations, migration.app_label)
if self.verbosity > 0:
if len(new_operations) == len(operations):
self.stdout.write(" No optimizations possible.")
else:
self.stdout.write(
" Optimized from %s operations to %s operations." %
(len(operations), len(new_operations))
)
# Work out the value of replaces (any squashed ones we're re-squashing)
# need to feed their replaces into ours
replaces = []
for migration in migrations_to_squash:
if migration.replaces:
replaces.extend(migration.replaces)
else:
replaces.append((migration.app_label, migration.name))
# Make a new migration with those operations
subclass = type("Migration", (migrations.Migration, ), {
"dependencies": dependencies,
"operations": new_operations,
"replaces": replaces,
"initial": True,
})
new_migration = subclass("0001_squashed_%s" % migration.name, app_label)
# Write out the new migration file
writer = MigrationWriter(new_migration)
with open(writer.path, "wb") as fh:
fh.write(writer.as_string())
if self.verbosity > 0:
self.stdout.write(self.style.MIGRATE_HEADING("Created new squashed migration %s" % writer.path))
self.stdout.write(" You should commit this migration but leave the old ones in place;")
self.stdout.write(" the new migration will be used for new installs. Once you are sure")
self.stdout.write(" all instances of the codebase have applied the migrations you squashed,")
self.stdout.write(" you can delete them.")
if writer.needs_manual_porting:
self.stdout.write(self.style.MIGRATE_HEADING("Manual porting required"))
self.stdout.write(" Your migrations contained functions that must be manually copied over,")
self.stdout.write(" as we could not safely copy their implementation.")
self.stdout.write(" See the comment at the top of the squashed migration for details.")
| bsd-3-clause |
jillesme/phantomjs | src/breakpad/src/tools/gyp/test/generator-output/gyptest-relocate.py | 151 | 1604 | #!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that a project hierarchy created with the --generator-output=
option can be built even when it's relocated to a different path.
"""
import TestGyp
test = TestGyp.TestGyp()
test.writable(test.workpath('src'), False)
test.run_gyp('prog1.gyp',
'-Dset_symroot=1',
'--generator-output=' + test.workpath('gypfiles'),
chdir='src')
test.writable(test.workpath('src'), True)
test.relocate('src', 'relocate/src')
test.relocate('gypfiles', 'relocate/gypfiles')
test.writable(test.workpath('relocate/src'), False)
test.writable(test.workpath('relocate/src/build'), True)
test.writable(test.workpath('relocate/src/subdir2/build'), True)
test.writable(test.workpath('relocate/src/subdir3/build'), True)
test.build('prog1.gyp', test.ALL, chdir='relocate/gypfiles')
chdir = 'relocate/gypfiles'
expect = """\
Hello from %s
Hello from inc.h
Hello from inc1/include1.h
Hello from inc2/include2.h
Hello from inc3/include3.h
Hello from subdir2/deeper/deeper.h
"""
if test.format == 'xcode':
chdir = 'relocate/src'
test.run_built_executable('prog1', chdir=chdir, stdout=expect % 'prog1.c')
if test.format == 'xcode':
chdir = 'relocate/src/subdir2'
test.run_built_executable('prog2', chdir=chdir, stdout=expect % 'prog2.c')
if test.format == 'xcode':
chdir = 'relocate/src/subdir3'
test.run_built_executable('prog3', chdir=chdir, stdout=expect % 'prog3.c')
test.pass_test()
| bsd-3-clause |
todaychi/hue | desktop/core/ext-py/boto-2.46.1/boto/services/submit.py | 153 | 3555 | # Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import time
import os
class Submitter(object):
def __init__(self, sd):
self.sd = sd
self.input_bucket = self.sd.get_obj('input_bucket')
self.output_bucket = self.sd.get_obj('output_bucket')
self.output_domain = self.sd.get_obj('output_domain')
self.queue = self.sd.get_obj('input_queue')
def get_key_name(self, fullpath, prefix):
key_name = fullpath[len(prefix):]
l = key_name.split(os.sep)
return '/'.join(l)
def write_message(self, key, metadata):
if self.queue:
m = self.queue.new_message()
m.for_key(key, metadata)
if self.output_bucket:
m['OutputBucket'] = self.output_bucket.name
self.queue.write(m)
def submit_file(self, path, metadata=None, cb=None, num_cb=0, prefix='/'):
if not metadata:
metadata = {}
key_name = self.get_key_name(path, prefix)
k = self.input_bucket.new_key(key_name)
k.update_metadata(metadata)
k.set_contents_from_filename(path, replace=False, cb=cb, num_cb=num_cb)
self.write_message(k, metadata)
def submit_path(self, path, tags=None, ignore_dirs=None, cb=None, num_cb=0, status=False, prefix='/'):
path = os.path.expanduser(path)
path = os.path.expandvars(path)
path = os.path.abspath(path)
total = 0
metadata = {}
if tags:
metadata['Tags'] = tags
l = []
for t in time.gmtime():
l.append(str(t))
metadata['Batch'] = '_'.join(l)
if self.output_domain:
self.output_domain.put_attributes(metadata['Batch'], {'type' : 'Batch'})
if os.path.isdir(path):
for root, dirs, files in os.walk(path):
if ignore_dirs:
for ignore in ignore_dirs:
if ignore in dirs:
dirs.remove(ignore)
for file in files:
fullpath = os.path.join(root, file)
if status:
print('Submitting %s' % fullpath)
self.submit_file(fullpath, metadata, cb, num_cb, prefix)
total += 1
elif os.path.isfile(path):
self.submit_file(path, metadata, cb, num_cb)
total += 1
else:
print('problem with %s' % path)
return (metadata['Batch'], total)
| apache-2.0 |
pthatcher/psync | src/history/__init__.py | 1 | 1996 | # Copyright (c) 2011, Peter Thatcher
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# 3. The name of the author may not be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Things in this module represent anything related to this history of
# files, but unrelated to a specific store of the files (such as the
# file system). It also contains the log for comparing histories and
# determining what kinds of actions are necessary to merge histories.
from store import HistoryStore
from entry import History, HistoryEntry, group_history_by_gpath
from diff import HistoryDiff, HistoryDiffType, diff_histories
from diff import MergeAction, MergeActionType, calculate_merge_actions
from mergelog import MergeLog
| bsd-3-clause |
natefoo/tools-iuc | tools/spyboat/output_report.py | 12 | 8730 | """ Produces plots and a summary html 'headless' """
import logging
import os
import matplotlib
import matplotlib.pyplot as ppl
import spyboat.plotting as spyplot
ppl.switch_backend('Agg')
matplotlib.rcParams["text.usetex"] = False
logger = logging.getLogger(__name__)
# figure resolution
DPI = 250
def produce_snapshots(input_movie, results, frame, Wkwargs, img_path="."):
"""
Takes the *input_movie* and the *results* dictionary
from spyboat.processing.run_parallel and produces phase,
period and amplitude snapshot png's.
For the period snapshot also the period range is needed,
hence the analysis dictionary 'Wkwargs' also gets passed.
The output files name pattern is:
[input, phase, period, amplitude]_frame{frame}.png
and the storage location in *img_path*.
These get picked up by 'create_html'
"""
spyplot.input_snapshot(input_movie[frame])
fig = ppl.gcf()
out_path = os.path.join(img_path, f"input_frame{frame}.png")
fig.savefig(out_path, dpi=DPI)
ppl.close(fig)
spyplot.phase_snapshot(results["phase"][frame])
fig = ppl.gcf()
out_path = os.path.join(img_path, f"phase_frame{frame}.png")
fig.savefig(out_path, dpi=DPI)
ppl.close(fig)
spyplot.period_snapshot(
results["period"][frame], Wkwargs["Tmin"],
Wkwargs["Tmax"], time_unit="a.u."
)
fig = ppl.gcf()
out_path = os.path.join(img_path, f"period_frame{frame}.png")
fig.savefig(out_path, dpi=DPI)
ppl.close(fig)
spyplot.amplitude_snapshot(results["amplitude"][frame])
fig = ppl.gcf()
out_path = os.path.join(img_path, f"amplitude_frame{frame}.png")
fig.savefig(out_path, dpi=DPI)
ppl.close(fig)
logger.info(f"Produced 4 snapshots for frame {frame}..")
def produce_distr_plots(results, Wkwargs, img_path="."):
"""
Output file names are:
period_distr.png, power_distr.png and phase_distr.png
"""
spyplot.period_distr_dynamics(results["period"], Wkwargs)
fig = ppl.gcf()
out_path = os.path.join(img_path, "period_distr.png")
fig.savefig(out_path, dpi=DPI)
spyplot.power_distr_dynamics(results["power"], Wkwargs)
fig = ppl.gcf()
out_path = os.path.join(img_path, "power_distr.png")
fig.savefig(out_path, dpi=DPI)
spyplot.phase_coherence_dynamics(results["phase"], Wkwargs)
fig = ppl.gcf()
out_path = os.path.join(img_path, "phase_distr.png")
fig.savefig(out_path, dpi=DPI)
logger.info("Produced 3 distribution plots..")
def create_html(frame_nums, par_str, html_fname="OutputReport.html"):
"""
The html generated assumes the respective png's
have been created with 'produce_snapshots' and 'produce_distr_plots'
and can be found at the cwd (that's how Galaxy works..)
"""
# -- create a gallery for every frame in frame_nums --
galleries = ""
for frame_num in frame_nums:
new_gal = f"""
<div class="FrameSlides">
<h3 style="text-align:center; color=#363333">
Frame Nr. {frame_num} </h3>
<div class="snapshot_gallery">
<figure class=”snapshot_gallery__item
snapshot_gallery__item--1" style="margin: 0 0">
<img src="input_frame{frame_num}.png" alt="The Input"
class="snapshot_gallery__img">
</figure>
<figure class=”snapshot_gallery__item
snapshot_gallery__item--2" style="margin: 0 0">
<img src="phase_frame{frame_num}.png" alt="Phase"
class="snapshot_gallery__img">
</figure>
<figure class=”snapshot_gallery__item
snapshot_gallery__item--3" style="margin: 0 0">
<img src="period_frame{frame_num}.png"
alt="Period" class="snapshot_gallery__img">
</figure>
<figure class=”snapshot_gallery__item
snapshot_gallery__item--4" style="margin: 0 0">
<img src="amplitude_frame{frame_num}.png"
alt="Amplitude" class="snapshot_gallery__img">
</figure>
</div>
</div>
"""
galleries += new_gal
parameter_cells = ''
for line in par_str.split('\n'):
# last str is empty..
if not line:
break
par_name, par_val = line.split('->')
parameter_cells += f'''
<tr>
<td>{par_name}</td>
<td>{par_val}</td>
</tr>'''
html_string = f"""
<html>
<!-- this file got automatically created by 'output_report.py' -->
<title>SpyBOAT Output Report</title>
<head>
<!-- that doesn't work with galaxy.. -->
<!--link rel="stylesheet" href="styles.css"-->
<style type="text/css">
body{{ margin:10 100; background:whitesmoke; }}
p{{
text-align: center;
margin-top: 0.05cm;
margin-bottom: .05cm;
color:#2c2e2e;
}}
.center{{
text-align: center;
display: block;
margin-left: auto;
margin-right: auto;
width: 100%;}}
/* matplotlib output at 1600x1200 */
.snapshot_gallery {{
margin: 0 0;
text-align: center;
display: grid;
grid-template-columns: repeat(2,1fr);
grid-template-rows: repeat(2,27vw);
grid-gap: 5px;
}}
.snapshot_gallery__img {{
width: 100%;
height: 100%;
object-fit: contain;
margin-top: 5px;
margin-bottom: 15px;
}}
.subheader{{
text-align:center;
font-size: 160%;
color:#363333;}}
.centerimg{{
text-align: center;
width: 65%;
max-width: 400px;
display: block;
padding: 10px;
margin-left: auto;
margin-right: auto;
}}
.div_distr{{
text-align: center;
border-radius: 25px;
margin-top: 1cm;
margin: auto;
margin-bottom: 0.5cm;
background-color: #cce1e3;
max-width: 550px;
}}
.partable{{
width: 70%;
margin-left: auto;
margin-right: auto;
}}
tr, td{{
color:#2c2e2e;
font-size: 110%;
}}
</style>
</head>
<body>
<h1 style="text-align:center; color:#363333">SpyBOAT Results Report</h1>
<hr style="width:70%">
<h1 class="subheader"> Spatial Summary Statistics </h1>
<div class="div_distr">
<img src="period_distr.png" alt="Period"
class="centerimg">
<p> Median and quartiles of the estimated periods for each frame </p>
</div>
<div class="div_distr">
<img src="power_distr.png" alt="Period"
class="centerimg">
<p> Median and quartiles of the ridge wavelet power for each frame </p>
</div>
<div class="div_distr">
<img src="phase_distr.png" alt="Period"
class="centerimg">
<p> Kuramoto order parameter for the phases estimated for each frame </p>
</div>
<h1 class="subheader"> Output Movie Snapshots </h1>
<!-- trigger the javascript at the end--->
<div class="center">
<button class="w3-button" onclick="plusDivs(-1)">❮ Prev</button>
<button class="w3-button" onclick="plusDivs(1)">Next ❯</button>
</div>
<!-- defines all elements of the "FrameSlides" class --->
{galleries}
</div>
<h1 class="subheader"> Parameters </h1>
<div class="div_distr">
<table border = "1" class="partable">
<tr>
<th>Name</th>
<th>Value</th>
</tr>
{parameter_cells}
</table>
</div>
<!-- javascript with escaped '{{'--->
<script>
var slideIndex = 1;
showDivs(slideIndex);
function plusDivs(n) {{
showDivs(slideIndex += n);
}}
function showDivs(n) {{
var i;
var x = document.getElementsByClassName("FrameSlides");
if (n > x.length) {{slideIndex = 1}}
if (n < 1) {{slideIndex = x.length}} ;
for (i = 0; i < x.length; i++) {{
x[i].style.display = "none";
}}
x[slideIndex-1].style.display = "block";
}}
</script>
</body>
</html>
"""
with open(html_fname, "w") as OUT:
OUT.write(html_string)
logger.info("Created html report")
return html_string
# for local testing
# create_html([0,20], 'par1 -> val1\n verylongpar2 -> val2')
| mit |
dparaujo/projeto | app_inscricoes/questionarios/migrations/0002_auto_20170220_2126.py | 1 | 1224 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2017-02-21 00:26
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('questionarios', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='tblquestionarioingresopsid',
name='pessoa',
),
migrations.RemoveField(
model_name='tblquestionariosocioeconomico',
name='pessoa',
),
migrations.AlterField(
model_name='tblquestionarioingresopsid',
name='fez_quantos_curso_psid',
field=models.IntegerField(choices=[(0, 'Nenhum Curso'), (1, 'Um Curso'), (3, 'Dois Cursos'), (4, 'Tr\xeas Cursos'), (5, 'Quatro Cursos'), (6, 'Fiz mais que quatro cursos')], verbose_name='Quantos curso voc\xea fez no PSID?'),
),
migrations.AlterField(
model_name='tblquestionariosocioeconomico',
name='cor_raca',
field=models.IntegerField(choices=[(0, 'Branca'), (1, 'Negro'), (3, 'Pardo'), (4, 'Amarela'), (5, 'Ind\xedgena'), (6, 'N\xe3o Declara')], verbose_name='Cor/Ra\xe7a'),
),
]
| gpl-3.0 |
babywolfh/iris-panel | iris/core/migrations/0010_STRIP_IMAGE_NAME.py | 7 | 14447 | # -*- coding: utf-8 -*-
# This file is part of IRIS: Infrastructure and Release Information System
#
# Copyright (C) 2013-2015 Intel Corporation
#
# IRIS is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# version 2.0 as published by the Free Software Foundation.
#pylint: skip-file
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
# Note: Don't use "from appname.models import ModelName".
# Use orm.ModelName to refer to models in this application,
# and orm['appname.ModelName'] for models in other applications.
for img in orm.Image.objects.all():
img.name = img.name.strip()
img.save(update_fields=('name',))
def backwards(self, orm):
"Write your backwards methods here."
pass
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '225'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'core.domain': {
'Meta': {'object_name': 'Domain'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
'core.domainrole': {
'Meta': {'unique_together': "(('role', 'domain'),)", 'object_name': 'DomainRole', '_ormbases': [u'auth.Group']},
'domain': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'role_set'", 'to': "orm['core.Domain']"}),
u'group_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.Group']", 'unique': 'True', 'primary_key': 'True'}),
'role': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'})
},
'core.gittree': {
'Meta': {'object_name': 'GitTree'},
'gitpath': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'licenses': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['core.License']", 'symmetrical': 'False'}),
'packages': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['core.Package']", 'symmetrical': 'False'}),
'subdomain': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.SubDomain']"})
},
'core.gittreerole': {
'Meta': {'unique_together': "(('role', 'gittree'),)", 'object_name': 'GitTreeRole', '_ormbases': [u'auth.Group']},
'gittree': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'role_set'", 'to': "orm['core.GitTree']"}),
u'group_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.Group']", 'unique': 'True', 'primary_key': 'True'}),
'role': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'})
},
'core.image': {
'Meta': {'object_name': 'Image'},
'arch': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.TextField', [], {}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Product']"}),
'target': ('django.db.models.fields.TextField', [], {})
},
'core.imagebuild': {
'Meta': {'object_name': 'ImageBuild'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Image']"}),
'log': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.Log']", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'name': ('django.db.models.fields.TextField', [], {}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '8'})
},
'core.license': {
'Meta': {'object_name': 'License'},
'fullname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'shortname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'text': ('django.db.models.fields.TextField', [], {})
},
'core.log': {
'Meta': {'object_name': 'Log'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'core.package': {
'Meta': {'object_name': 'Package'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
'core.packagebuild': {
'Meta': {'object_name': 'PackageBuild'},
'arch': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'log': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.Log']", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'package': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Package']"}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '8'}),
'target': ('django.db.models.fields.TextField', [], {})
},
'core.product': {
'Meta': {'object_name': 'Product'},
'description': ('django.db.models.fields.TextField', [], {}),
'gittrees': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['core.GitTree']", 'symmetrical': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
'core.subdomain': {
'Meta': {'unique_together': "(('name', 'domain'),)", 'object_name': 'SubDomain'},
'domain': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Domain']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})
},
'core.subdomainrole': {
'Meta': {'unique_together': "(('role', 'subdomain'),)", 'object_name': 'SubDomainRole', '_ormbases': [u'auth.Group']},
u'group_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.Group']", 'unique': 'True', 'primary_key': 'True'}),
'role': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'subdomain': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.SubDomain']"})
},
'core.submission': {
'Meta': {'object_name': 'Submission'},
'comment': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'commit': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'gittree': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['core.GitTree']", 'symmetrical': 'False', 'blank': 'True'}),
'ibuilds': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['core.ImageBuild']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80', 'db_index': 'True'}),
'pbuilds': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['core.PackageBuild']", 'symmetrical': 'False', 'blank': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Product']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '16', 'db_index': 'True'}),
'submitters': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.User']", 'symmetrical': 'False'}),
'testresults': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['core.TestResult']", 'symmetrical': 'False', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'core.submissiongroup': {
'Meta': {'object_name': 'SubmissionGroup'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80', 'db_index': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Product']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'submissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['core.Submission']", 'symmetrical': 'False'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'core.testresult': {
'Meta': {'object_name': 'TestResult'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'log': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.Log']", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'name': ('django.db.models.fields.TextField', [], {}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '16'})
},
'core.userparty': {
'Meta': {'object_name': 'UserParty', '_ormbases': [u'auth.Group']},
u'group_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.Group']", 'unique': 'True', 'primary_key': 'True'}),
'party': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '15'})
},
'core.userprofile': {
'Meta': {'object_name': 'UserProfile'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.User']", 'unique': 'True'})
}
}
complete_apps = ['core']
symmetrical = True
| gpl-2.0 |
aron-bordin/Tyrant-Sql | SQL_Map/tamper/randomcase.py | 8 | 1274 | #!/usr/bin/env python
"""
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import re
from lib.core.common import randomRange
from lib.core.data import kb
from lib.core.enums import PRIORITY
__priority__ = PRIORITY.NORMAL
def dependencies():
pass
def tamper(payload, **kwargs):
"""
Replaces each keyword character with random case value
Tested against:
* Microsoft SQL Server 2005
* MySQL 4, 5.0 and 5.5
* Oracle 10g
* PostgreSQL 8.3, 8.4, 9.0
Notes:
* Useful to bypass very weak and bespoke web application firewalls
that has poorly written permissive regular expressions
* This tamper script should work against all (?) databases
>>> import random
>>> random.seed(0)
>>> tamper('INSERT')
'INseRt'
"""
retVal = payload
if payload:
for match in re.finditer(r"[A-Za-z_]+", retVal):
word = match.group()
if word.upper() in kb.keywords:
_ = str()
for i in xrange(len(word)):
_ += word[i].upper() if randomRange(0, 1) else word[i].lower()
retVal = retVal.replace(word, _)
return retVal
| gpl-3.0 |
WillisXChen/django-oscar | oscar/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/langthaimodel.py | 2930 | 11275 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9
# The following result for thai was collected from a limited sample (1M).
# Character Mapping Table:
TIS620CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,182,106,107,100,183,184,185,101, 94,186,187,108,109,110,111, # 40
188,189,190, 89, 95,112,113,191,192,193,194,253,253,253,253,253, # 50
253, 64, 72, 73,114, 74,115,116,102, 81,201,117, 90,103, 78, 82, # 60
96,202, 91, 79, 84,104,105, 97, 98, 92,203,253,253,253,253,253, # 70
209,210,211,212,213, 88,214,215,216,217,218,219,220,118,221,222,
223,224, 99, 85, 83,225,226,227,228,229,230,231,232,233,234,235,
236, 5, 30,237, 24,238, 75, 8, 26, 52, 34, 51,119, 47, 58, 57,
49, 53, 55, 43, 20, 19, 44, 14, 48, 3, 17, 25, 39, 62, 31, 54,
45, 9, 16, 2, 61, 15,239, 12, 42, 46, 18, 21, 76, 4, 66, 63,
22, 10, 1, 36, 23, 13, 40, 27, 32, 35, 86,240,241,242,243,244,
11, 28, 41, 29, 33,245, 50, 37, 6, 7, 67, 77, 38, 93,246,247,
68, 56, 59, 65, 69, 60, 70, 80, 71, 87,248,249,250,251,252,253,
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 92.6386%
# first 1024 sequences:7.3177%
# rest sequences: 1.0230%
# negative sequences: 0.0436%
ThaiLangModel = (
0,1,3,3,3,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,0,0,3,3,3,0,3,3,3,3,
0,3,3,0,0,0,1,3,0,3,3,2,3,3,0,1,2,3,3,3,3,0,2,0,2,0,0,3,2,1,2,2,
3,0,3,3,2,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,0,3,2,3,0,2,2,2,3,
0,2,3,0,0,0,0,1,0,1,2,3,1,1,3,2,2,0,1,1,0,0,1,0,0,0,0,0,0,0,1,1,
3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,3,3,2,3,2,3,3,2,2,2,
3,1,2,3,0,3,3,2,2,1,2,3,3,1,2,0,1,3,0,1,0,0,1,0,0,0,0,0,0,0,1,1,
3,3,2,2,3,3,3,3,1,2,3,3,3,3,3,2,2,2,2,3,3,2,2,3,3,2,2,3,2,3,2,2,
3,3,1,2,3,1,2,2,3,3,1,0,2,1,0,0,3,1,2,1,0,0,1,0,0,0,0,0,0,1,0,1,
3,3,3,3,3,3,2,2,3,3,3,3,2,3,2,2,3,3,2,2,3,2,2,2,2,1,1,3,1,2,1,1,
3,2,1,0,2,1,0,1,0,1,1,0,1,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,
3,3,3,2,3,2,3,3,2,2,3,2,3,3,2,3,1,1,2,3,2,2,2,3,2,2,2,2,2,1,2,1,
2,2,1,1,3,3,2,1,0,1,2,2,0,1,3,0,0,0,1,1,0,0,0,0,0,2,3,0,0,2,1,1,
3,3,2,3,3,2,0,0,3,3,0,3,3,0,2,2,3,1,2,2,1,1,1,0,2,2,2,0,2,2,1,1,
0,2,1,0,2,0,0,2,0,1,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,2,3,3,2,0,0,3,3,0,2,3,0,2,1,2,2,2,2,1,2,0,0,2,2,2,0,2,2,1,1,
0,2,1,0,2,0,0,2,0,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,
3,3,2,3,2,3,2,0,2,2,1,3,2,1,3,2,1,2,3,2,2,3,0,2,3,2,2,1,2,2,2,2,
1,2,2,0,0,0,0,2,0,1,2,0,1,1,1,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,1,0,
3,3,2,3,3,2,3,2,2,2,3,2,2,3,2,2,1,2,3,2,2,3,1,3,2,2,2,3,2,2,2,3,
3,2,1,3,0,1,1,1,0,2,1,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,2,0,0,
1,0,0,3,0,3,3,3,3,3,0,0,3,0,2,2,3,3,3,3,3,0,0,0,1,1,3,0,0,0,0,2,
0,0,1,0,0,0,0,0,0,0,2,3,0,0,0,3,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
2,0,3,3,3,3,0,0,2,3,0,0,3,0,3,3,2,3,3,3,3,3,0,0,3,3,3,0,0,0,3,3,
0,0,3,0,0,0,0,2,0,0,2,1,1,3,0,0,1,0,0,2,3,0,1,0,0,0,0,0,0,0,1,0,
3,3,3,3,2,3,3,3,3,3,3,3,1,2,1,3,3,2,2,1,2,2,2,3,1,1,2,0,2,1,2,1,
2,2,1,0,0,0,1,1,0,1,0,1,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,
3,0,2,1,2,3,3,3,0,2,0,2,2,0,2,1,3,2,2,1,2,1,0,0,2,2,1,0,2,1,2,2,
0,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,1,3,3,1,1,3,0,2,3,1,1,3,2,1,1,2,0,2,2,3,2,1,1,1,1,1,2,
3,0,0,1,3,1,2,1,2,0,3,0,0,0,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,
3,3,1,1,3,2,3,3,3,1,3,2,1,3,2,1,3,2,2,2,2,1,3,3,1,2,1,3,1,2,3,0,
2,1,1,3,2,2,2,1,2,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,
3,3,2,3,2,3,3,2,3,2,3,2,3,3,2,1,0,3,2,2,2,1,2,2,2,1,2,2,1,2,1,1,
2,2,2,3,0,1,3,1,1,1,1,0,1,1,0,2,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,3,2,2,1,1,3,2,3,2,3,2,0,3,2,2,1,2,0,2,2,2,1,2,2,2,2,1,
3,2,1,2,2,1,0,2,0,1,0,0,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,2,3,1,2,3,3,2,2,3,0,1,1,2,0,3,3,2,2,3,0,1,1,3,0,0,0,0,
3,1,0,3,3,0,2,0,2,1,0,0,3,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,2,3,2,3,3,0,1,3,1,1,2,1,2,1,1,3,1,1,0,2,3,1,1,1,1,1,1,1,1,
3,1,1,2,2,2,2,1,1,1,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,2,2,1,1,2,1,3,3,2,3,2,2,3,2,2,3,1,2,2,1,2,0,3,2,1,2,2,2,2,2,1,
3,2,1,2,2,2,1,1,1,1,0,0,1,1,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,1,3,3,0,2,1,0,3,2,0,0,3,1,0,1,1,0,1,0,0,0,0,0,1,
1,0,0,1,0,3,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,2,2,2,3,0,0,1,3,0,3,2,0,3,2,2,3,3,3,3,3,1,0,2,2,2,0,2,2,1,2,
0,2,3,0,0,0,0,1,0,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,0,2,3,1,3,3,2,3,3,0,3,3,0,3,2,2,3,2,3,3,3,0,0,2,2,3,0,1,1,1,3,
0,0,3,0,0,0,2,2,0,1,3,0,1,2,2,2,3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,
3,2,3,3,2,0,3,3,2,2,3,1,3,2,1,3,2,0,1,2,2,0,2,3,2,1,0,3,0,0,0,0,
3,0,0,2,3,1,3,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,3,2,2,2,1,2,0,1,3,1,1,3,1,3,0,0,2,1,1,1,1,2,1,1,1,0,2,1,0,1,
1,2,0,0,0,3,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,3,1,0,0,0,1,0,
3,3,3,3,2,2,2,2,2,1,3,1,1,1,2,0,1,1,2,1,2,1,3,2,0,0,3,1,1,1,1,1,
3,1,0,2,3,0,0,0,3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,2,3,0,3,3,0,2,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,2,3,1,3,0,0,1,2,0,0,2,0,3,3,2,3,3,3,2,3,0,0,2,2,2,0,0,0,2,2,
0,0,1,0,0,0,0,3,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
0,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,1,2,3,1,3,3,0,0,1,0,3,0,0,0,0,0,
0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,1,2,3,1,2,3,1,0,3,0,2,2,1,0,2,1,1,2,0,1,0,0,1,1,1,1,0,1,0,0,
1,0,0,0,0,1,1,0,3,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,1,0,1,1,1,3,1,2,2,2,2,2,2,1,1,1,1,0,3,1,0,1,3,1,1,1,1,
1,1,0,2,0,1,3,1,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1,
3,0,2,2,1,3,3,2,3,3,0,1,1,0,2,2,1,2,1,3,3,1,0,0,3,2,0,0,0,0,2,1,
0,1,0,0,0,0,1,2,0,1,1,3,1,1,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
0,0,3,0,0,1,0,0,0,3,0,0,3,0,3,1,0,1,1,1,3,2,0,0,0,3,0,0,0,0,2,0,
0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,1,3,2,1,3,3,1,2,2,0,1,2,1,0,1,2,0,0,0,0,0,3,0,0,0,3,0,0,0,0,
3,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,1,2,0,3,3,3,2,2,0,1,1,0,1,3,0,0,0,2,2,0,0,0,0,3,1,0,1,0,0,0,
0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,2,3,1,2,0,0,2,1,0,3,1,0,1,2,0,1,1,1,1,3,0,0,3,1,1,0,2,2,1,1,
0,2,0,0,0,0,0,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,3,1,2,0,0,2,2,0,1,2,0,1,0,1,3,1,2,1,0,0,0,2,0,3,0,0,0,1,0,
0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,1,1,2,2,0,0,0,2,0,2,1,0,1,1,0,1,1,1,2,1,0,0,1,1,1,0,2,1,1,1,
0,1,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1,
0,0,0,2,0,1,3,1,1,1,1,0,0,0,0,3,2,0,1,0,0,0,1,2,0,0,0,1,0,0,0,0,
0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,2,3,2,2,0,0,0,1,0,0,0,0,2,3,2,1,2,2,3,0,0,0,2,3,1,0,0,0,1,1,
0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,
3,3,2,2,0,1,0,0,0,0,2,0,2,0,1,0,0,0,1,1,0,0,0,2,1,0,1,0,1,1,0,0,
0,1,0,2,0,0,1,0,3,0,1,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,1,0,0,1,0,0,0,0,0,1,1,2,0,0,0,0,1,0,0,1,3,1,0,0,0,0,1,1,0,0,
0,1,0,0,0,0,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,
3,3,1,1,1,1,2,3,0,0,2,1,1,1,1,1,0,2,1,1,0,0,0,2,1,0,1,2,1,1,0,1,
2,1,0,3,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,3,1,0,0,0,0,0,0,0,3,0,0,0,3,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,
0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,2,0,0,0,0,0,0,1,2,1,0,1,1,0,2,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,2,0,0,0,1,3,0,1,0,0,0,2,0,0,0,0,0,0,0,1,2,0,0,0,0,0,
3,3,0,0,1,1,2,0,0,1,2,1,0,1,1,1,0,1,1,0,0,2,1,1,0,1,0,0,1,1,1,0,
0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,1,0,0,0,0,1,0,0,0,0,3,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,0,0,1,1,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,0,1,2,0,1,2,0,0,1,1,0,2,0,1,0,0,1,0,0,0,0,1,0,0,0,2,0,0,0,0,
1,0,0,1,0,1,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,2,1,3,0,0,0,0,1,1,0,0,0,0,0,0,0,3,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,1,0,1,0,0,2,0,0,2,0,0,1,1,2,0,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0,
1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,3,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,
1,0,0,0,0,0,0,0,0,1,0,0,0,0,2,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,1,1,0,0,2,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
)
TIS620ThaiModel = {
'charToOrderMap': TIS620CharToOrderMap,
'precedenceMatrix': ThaiLangModel,
'mTypicalPositiveRatio': 0.926386,
'keepEnglishLetter': False,
'charsetName': "TIS-620"
}
# flake8: noqa
| bsd-3-clause |
steelee/Adafruit_Python_GPIO | tests/test_I2C.py | 12 | 7418 | # Copyright (c) 2014 Adafruit Industries
# Author: Tony DiCola
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import logging
import unittest
from mock import Mock, patch
import Adafruit_GPIO.Platform as Platform
# Enable debug logging to stdout during tests.
logging.basicConfig()
logging.getLogger().setLevel(logging.DEBUG)
class MockSMBus(object):
# Mock the smbus.SMBus class to record all data written to specific
# addresses and registers in the _written member.
def __init__(self):
# _written will store a dictionary of address to register dictionary.
# Each register dictionary will store a mapping of register value to
# an array of all written values (in sequential write order).
self._written = {}
self._read = {}
def _write_register(self, address, register, value):
self._written.setdefault(address, {}).setdefault(register, []).append(value)
def _read_register(self, address, register):
return self._read.get(address).get(register).pop(0)
def write_byte_data(self, address, register, value):
self._write_register(address, register, value)
def write_word_data(self, address, register, value):
self._write_register(address, register, value >> 8 & 0xFF)
self._write_register(address, register+1, value & 0xFF)
def write_i2c_block_data(self, address, register, values):
for i, value in enumerate(values):
self._write_register(address, register+i, value & 0xFF)
def read_byte_data(self, address, register):
return self._read_register(address, register)
def read_word_data(self, address, register):
high = self._read_register(address, register)
low = self._read_register(address, register+1)
return (high << 8) | low
def read_i2c_block_data(self, address, length):
return [self._read_register(address+i) for i in range(length)]
def create_device(address, busnum):
# Mock the smbus module and inject it into the global namespace so the
# Adafruit_GPIO.I2C module can be imported. Also inject a mock SMBus
# instance to be returned by smbus.SMBus function calls.
smbus = Mock()
mockbus = MockSMBus()
smbus.SMBus.return_value = mockbus
with patch.dict('sys.modules', {'smbus': smbus}):
import Adafruit_GPIO.I2C as I2C
return (I2C.Device(address, busnum), smbus, mockbus)
def safe_import_i2c():
# Mock the smbus module and inject it into the global namespace so the
# Adafruit_GPIO.I2C module can be imported. The imported I2C module is
# returned so global functions can be called on it.
with patch.dict('sys.modules', {'smbus': Mock() }):
import Adafruit_GPIO.I2C as I2C
return I2C
class TestI2CDevice(unittest.TestCase):
def test_address_and_bus_set_correctly(self):
device, smbus, mockbus = create_device(0x1F, 1)
self.assertEqual(device._bus, mockbus)
smbus.SMBus.assert_called_with(1)
self.assertEqual(device._address, 0x1F)
def test_write8(self):
device, smbus, mockbus = create_device(0x1F, 1)
device.write8(0xFE, 0xED)
self.assertDictEqual(mockbus._written, { 0x1F: { 0xFE: [0xED] }})
def test_write8_truncates_to_8bits(self):
device, smbus, mockbus = create_device(0x1F, 1)
device.write8(0xFE, 0xBEEFED)
self.assertDictEqual(mockbus._written, { 0x1F: { 0xFE: [0xED] }})
def test_write16(self):
device, smbus, mockbus = create_device(0x1F, 1)
device.write16(0xFE, 0xBEEF)
self.assertDictEqual(mockbus._written, { 0x1F: { 0xFE: [0xBE],
0xFF: [0xEF] }})
def test_write16_truncates_to_8bits(self):
device, smbus, mockbus = create_device(0x1F, 1)
device.write16(0xFE, 0xFEEDBEEF)
self.assertDictEqual(mockbus._written, { 0x1F: { 0xFE: [0xBE],
0xFF: [0xEF] }})
def test_writeList(self):
device, smbus, mockbus = create_device(0x1F, 1)
device.writeList(0x00, [0xFE, 0xED, 0xBE, 0xEF])
self.assertDictEqual(mockbus._written, { 0x1F: { 0x00: [0xFE],
0x01: [0xED],
0x02: [0xBE],
0x03: [0xEF] }})
def test_readU8(self):
device, smbus, mockbus = create_device(0x1F, 1)
mockbus._read[0x1F] = { 0xFE: [0xED] }
value = device.readU8(0xFE)
self.assertEqual(value, 0xED)
def test_readS8(self):
device, smbus, mockbus = create_device(0x1F, 1)
mockbus._read[0x1F] = { 0xFE: [0xED] }
value = device.readS8(0xFE)
self.assertEqual(value, -19)
def test_readU16(self):
device, smbus, mockbus = create_device(0x1F, 1)
mockbus._read[0x1F] = { 0xFE: [0xED], 0xFF: [0x01] }
value = device.readU16(0xFE)
self.assertEqual(value, 0xED01)
def test_readS16(self):
device, smbus, mockbus = create_device(0x1F, 1)
mockbus._read[0x1F] = { 0xFE: [0xED], 0xFF: [0x01] }
value = device.readS16(0xFE)
self.assertEqual(value, -4863)
class TestGetDefaultBus(unittest.TestCase):
@patch('Adafruit_GPIO.Platform.pi_revision', Mock(return_value=1))
@patch('Adafruit_GPIO.Platform.platform_detect', Mock(return_value=Platform.RASPBERRY_PI))
def test_raspberry_pi_rev1(self):
I2C = safe_import_i2c()
bus = I2C.get_default_bus()
self.assertEqual(bus, 0)
@patch('Adafruit_GPIO.Platform.pi_revision', Mock(return_value=2))
@patch('Adafruit_GPIO.Platform.platform_detect', Mock(return_value=Platform.RASPBERRY_PI))
def test_raspberry_pi_rev2(self):
I2C = safe_import_i2c()
bus = I2C.get_default_bus()
self.assertEqual(bus, 1)
@patch('Adafruit_GPIO.Platform.platform_detect', Mock(return_value=Platform.BEAGLEBONE_BLACK))
def test_beaglebone_black(self):
I2C = safe_import_i2c()
bus = I2C.get_default_bus()
self.assertEqual(bus, 1)
@patch('Adafruit_GPIO.Platform.platform_detect', Mock(return_value=Platform.UNKNOWN))
def test_unknown(self):
I2C = safe_import_i2c()
self.assertRaises(RuntimeError, I2C.get_default_bus)
| mit |
cg31/tensorflow | tensorflow/python/kernel_tests/check_ops_test.py | 20 | 28413 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.check_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
class AssertProperIterableTest(tf.test.TestCase):
def test_single_tensor_raises(self):
tensor = tf.constant(1)
with self.assertRaisesRegexp(TypeError, "proper"):
tf.assert_proper_iterable(tensor)
def test_single_sparse_tensor_raises(self):
ten = tf.SparseTensor(indices=[[0, 0], [1, 2]], values=[1, 2], shape=[3, 4])
with self.assertRaisesRegexp(TypeError, "proper"):
tf.assert_proper_iterable(ten)
def test_single_ndarray_raises(self):
array = np.array([1, 2, 3])
with self.assertRaisesRegexp(TypeError, "proper"):
tf.assert_proper_iterable(array)
def test_single_string_raises(self):
mystr = "hello"
with self.assertRaisesRegexp(TypeError, "proper"):
tf.assert_proper_iterable(mystr)
def test_non_iterable_object_raises(self):
non_iterable = 1234
with self.assertRaisesRegexp(TypeError, "to be iterable"):
tf.assert_proper_iterable(non_iterable)
def test_list_does_not_raise(self):
list_of_stuff = [tf.constant([11, 22]), tf.constant([1, 2])]
tf.assert_proper_iterable(list_of_stuff)
def test_generator_does_not_raise(self):
generator_of_stuff = (tf.constant([11, 22]), tf.constant([1, 2]))
tf.assert_proper_iterable(generator_of_stuff)
class AssertEqualTest(tf.test.TestCase):
def test_doesnt_raise_when_equal(self):
with self.test_session():
small = tf.constant([1, 2], name="small")
with tf.control_dependencies([tf.assert_equal(small, small)]):
out = tf.identity(small)
out.eval()
def test_raises_when_greater(self):
with self.test_session():
small = tf.constant([1, 2], name="small")
big = tf.constant([3, 4], name="big")
with tf.control_dependencies(
[tf.assert_equal(big, small, message="fail")]):
out = tf.identity(small)
with self.assertRaisesOpError("fail.*big.*small"):
out.eval()
def test_raises_when_less(self):
with self.test_session():
small = tf.constant([3, 1], name="small")
big = tf.constant([4, 2], name="big")
with tf.control_dependencies([tf.assert_equal(small, big)]):
out = tf.identity(small)
with self.assertRaisesOpError("small.*big"):
out.eval()
def test_doesnt_raise_when_equal_and_broadcastable_shapes(self):
with self.test_session():
small = tf.constant([1, 2], name="small")
small_2 = tf.constant([1, 2], name="small_2")
with tf.control_dependencies([tf.assert_equal(small, small_2)]):
out = tf.identity(small)
out.eval()
def test_raises_when_equal_but_non_broadcastable_shapes(self):
with self.test_session():
small = tf.constant([1, 1, 1], name="small")
small_2 = tf.constant([1, 1], name="small_2")
with self.assertRaisesRegexp(ValueError, "must be"):
with tf.control_dependencies([tf.assert_equal(small, small_2)]):
out = tf.identity(small)
out.eval()
def test_doesnt_raise_when_both_empty(self):
with self.test_session():
larry = tf.constant([])
curly = tf.constant([])
with tf.control_dependencies([tf.assert_equal(larry, curly)]):
out = tf.identity(larry)
out.eval()
class AssertLessTest(tf.test.TestCase):
def test_raises_when_equal(self):
with self.test_session():
small = tf.constant([1, 2], name="small")
with tf.control_dependencies(
[tf.assert_less(small, small, message="fail")]):
out = tf.identity(small)
with self.assertRaisesOpError("fail.*small.*small"):
out.eval()
def test_raises_when_greater(self):
with self.test_session():
small = tf.constant([1, 2], name="small")
big = tf.constant([3, 4], name="big")
with tf.control_dependencies([tf.assert_less(big, small)]):
out = tf.identity(small)
with self.assertRaisesOpError("big.*small"):
out.eval()
def test_doesnt_raise_when_less(self):
with self.test_session():
small = tf.constant([3, 1], name="small")
big = tf.constant([4, 2], name="big")
with tf.control_dependencies([tf.assert_less(small, big)]):
out = tf.identity(small)
out.eval()
def test_doesnt_raise_when_less_and_broadcastable_shapes(self):
with self.test_session():
small = tf.constant([1], name="small")
big = tf.constant([3, 2], name="big")
with tf.control_dependencies([tf.assert_less(small, big)]):
out = tf.identity(small)
out.eval()
def test_raises_when_less_but_non_broadcastable_shapes(self):
with self.test_session():
small = tf.constant([1, 1, 1], name="small")
big = tf.constant([3, 2], name="big")
with self.assertRaisesRegexp(ValueError, "must be"):
with tf.control_dependencies([tf.assert_less(small, big)]):
out = tf.identity(small)
out.eval()
def test_doesnt_raise_when_both_empty(self):
with self.test_session():
larry = tf.constant([])
curly = tf.constant([])
with tf.control_dependencies([tf.assert_less(larry, curly)]):
out = tf.identity(larry)
out.eval()
class AssertLessEqualTest(tf.test.TestCase):
def test_doesnt_raise_when_equal(self):
with self.test_session():
small = tf.constant([1, 2], name="small")
with tf.control_dependencies([tf.assert_less_equal(small, small)]):
out = tf.identity(small)
out.eval()
def test_raises_when_greater(self):
with self.test_session():
small = tf.constant([1, 2], name="small")
big = tf.constant([3, 4], name="big")
with tf.control_dependencies(
[tf.assert_less_equal(big, small, message="fail")]):
out = tf.identity(small)
with self.assertRaisesOpError("fail.*big.*small"):
out.eval()
def test_doesnt_raise_when_less_equal(self):
with self.test_session():
small = tf.constant([1, 2], name="small")
big = tf.constant([3, 2], name="big")
with tf.control_dependencies([tf.assert_less_equal(small, big)]):
out = tf.identity(small)
out.eval()
def test_doesnt_raise_when_less_equal_and_broadcastable_shapes(self):
with self.test_session():
small = tf.constant([1], name="small")
big = tf.constant([3, 1], name="big")
with tf.control_dependencies([tf.assert_less_equal(small, big)]):
out = tf.identity(small)
out.eval()
def test_raises_when_less_equal_but_non_broadcastable_shapes(self):
with self.test_session():
small = tf.constant([1, 1, 1], name="small")
big = tf.constant([3, 1], name="big")
with self.assertRaisesRegexp(ValueError, "must be"):
with tf.control_dependencies([tf.assert_less_equal(small, big)]):
out = tf.identity(small)
out.eval()
def test_doesnt_raise_when_both_empty(self):
with self.test_session():
larry = tf.constant([])
curly = tf.constant([])
with tf.control_dependencies([tf.assert_less_equal(larry, curly)]):
out = tf.identity(larry)
out.eval()
class AssertGreaterTest(tf.test.TestCase):
def test_raises_when_equal(self):
with self.test_session():
small = tf.constant([1, 2], name="small")
with tf.control_dependencies(
[tf.assert_greater(small, small, message="fail")]):
out = tf.identity(small)
with self.assertRaisesOpError("fail.*small.*small"):
out.eval()
def test_raises_when_less(self):
with self.test_session():
small = tf.constant([1, 2], name="small")
big = tf.constant([3, 4], name="big")
with tf.control_dependencies([tf.assert_greater(small, big)]):
out = tf.identity(big)
with self.assertRaisesOpError("small.*big"):
out.eval()
def test_doesnt_raise_when_greater(self):
with self.test_session():
small = tf.constant([3, 1], name="small")
big = tf.constant([4, 2], name="big")
with tf.control_dependencies([tf.assert_greater(big, small)]):
out = tf.identity(small)
out.eval()
def test_doesnt_raise_when_greater_and_broadcastable_shapes(self):
with self.test_session():
small = tf.constant([1], name="small")
big = tf.constant([3, 2], name="big")
with tf.control_dependencies([tf.assert_greater(big, small)]):
out = tf.identity(small)
out.eval()
def test_raises_when_greater_but_non_broadcastable_shapes(self):
with self.test_session():
small = tf.constant([1, 1, 1], name="small")
big = tf.constant([3, 2], name="big")
with self.assertRaisesRegexp(ValueError, "must be"):
with tf.control_dependencies([tf.assert_greater(big, small)]):
out = tf.identity(small)
out.eval()
def test_doesnt_raise_when_both_empty(self):
with self.test_session():
larry = tf.constant([])
curly = tf.constant([])
with tf.control_dependencies([tf.assert_greater(larry, curly)]):
out = tf.identity(larry)
out.eval()
class AssertGreaterEqualTest(tf.test.TestCase):
def test_doesnt_raise_when_equal(self):
with self.test_session():
small = tf.constant([1, 2], name="small")
with tf.control_dependencies([tf.assert_greater_equal(small, small)]):
out = tf.identity(small)
out.eval()
def test_raises_when_less(self):
with self.test_session():
small = tf.constant([1, 2], name="small")
big = tf.constant([3, 4], name="big")
with tf.control_dependencies(
[tf.assert_greater_equal(small, big, message="fail")]):
out = tf.identity(small)
with self.assertRaisesOpError("fail.*small.*big"):
out.eval()
def test_doesnt_raise_when_greater_equal(self):
with self.test_session():
small = tf.constant([1, 2], name="small")
big = tf.constant([3, 2], name="big")
with tf.control_dependencies([tf.assert_greater_equal(big, small)]):
out = tf.identity(small)
out.eval()
def test_doesnt_raise_when_greater_equal_and_broadcastable_shapes(self):
with self.test_session():
small = tf.constant([1], name="small")
big = tf.constant([3, 1], name="big")
with tf.control_dependencies([tf.assert_greater_equal(big, small)]):
out = tf.identity(small)
out.eval()
def test_raises_when_less_equal_but_non_broadcastable_shapes(self):
with self.test_session():
small = tf.constant([1, 1, 1], name="big")
big = tf.constant([3, 1], name="small")
with self.assertRaisesRegexp(ValueError, "Dimensions must be equal"):
with tf.control_dependencies([tf.assert_greater_equal(big, small)]):
out = tf.identity(small)
out.eval()
def test_doesnt_raise_when_both_empty(self):
with self.test_session():
larry = tf.constant([])
curly = tf.constant([])
with tf.control_dependencies([tf.assert_greater_equal(larry, curly)]):
out = tf.identity(larry)
out.eval()
class AssertNegativeTest(tf.test.TestCase):
def test_doesnt_raise_when_negative(self):
with self.test_session():
frank = tf.constant([-1, -2], name="frank")
with tf.control_dependencies([tf.assert_negative(frank)]):
out = tf.identity(frank)
out.eval()
def test_raises_when_positive(self):
with self.test_session():
doug = tf.constant([1, 2], name="doug")
with tf.control_dependencies([tf.assert_negative(doug, message="fail")]):
out = tf.identity(doug)
with self.assertRaisesOpError("fail.*doug"):
out.eval()
def test_raises_when_zero(self):
with self.test_session():
claire = tf.constant([0], name="claire")
with tf.control_dependencies([tf.assert_negative(claire)]):
out = tf.identity(claire)
with self.assertRaisesOpError("claire"):
out.eval()
def test_empty_tensor_doesnt_raise(self):
# A tensor is negative when it satisfies:
# For every element x_i in x, x_i < 0
# and an empty tensor has no elements, so this is trivially satisfied.
# This is standard set theory.
with self.test_session():
empty = tf.constant([], name="empty")
with tf.control_dependencies([tf.assert_negative(empty)]):
out = tf.identity(empty)
out.eval()
class AssertPositiveTest(tf.test.TestCase):
def test_raises_when_negative(self):
with self.test_session():
freddie = tf.constant([-1, -2], name="freddie")
with tf.control_dependencies(
[tf.assert_positive(freddie, message="fail")]):
out = tf.identity(freddie)
with self.assertRaisesOpError("fail.*freddie"):
out.eval()
def test_doesnt_raise_when_positive(self):
with self.test_session():
remmy = tf.constant([1, 2], name="remmy")
with tf.control_dependencies([tf.assert_positive(remmy)]):
out = tf.identity(remmy)
out.eval()
def test_raises_when_zero(self):
with self.test_session():
meechum = tf.constant([0], name="meechum")
with tf.control_dependencies([tf.assert_positive(meechum)]):
out = tf.identity(meechum)
with self.assertRaisesOpError("meechum"):
out.eval()
def test_empty_tensor_doesnt_raise(self):
# A tensor is positive when it satisfies:
# For every element x_i in x, x_i > 0
# and an empty tensor has no elements, so this is trivially satisfied.
# This is standard set theory.
with self.test_session():
empty = tf.constant([], name="empty")
with tf.control_dependencies([tf.assert_positive(empty)]):
out = tf.identity(empty)
out.eval()
class AssertRankTest(tf.test.TestCase):
def test_rank_zero_tensor_raises_if_rank_too_small_static_rank(self):
with self.test_session():
tensor = tf.constant(1, name="my_tensor")
desired_rank = 1
with self.assertRaisesRegexp(
ValueError, "fail.*my_tensor.*must have rank 1"):
with tf.control_dependencies(
[tf.assert_rank(tensor, desired_rank, message="fail")]):
tf.identity(tensor).eval()
def test_rank_zero_tensor_raises_if_rank_too_small_dynamic_rank(self):
with self.test_session():
tensor = tf.placeholder(tf.float32, name="my_tensor")
desired_rank = 1
with tf.control_dependencies(
[tf.assert_rank(tensor, desired_rank, message="fail")]):
with self.assertRaisesOpError("fail.*my_tensor.*rank"):
tf.identity(tensor).eval(feed_dict={tensor: 0})
def test_rank_zero_tensor_doesnt_raise_if_rank_just_right_static_rank(self):
with self.test_session():
tensor = tf.constant(1, name="my_tensor")
desired_rank = 0
with tf.control_dependencies([tf.assert_rank(tensor, desired_rank)]):
tf.identity(tensor).eval()
def test_rank_zero_tensor_doesnt_raise_if_rank_just_right_dynamic_rank(self):
with self.test_session():
tensor = tf.placeholder(tf.float32, name="my_tensor")
desired_rank = 0
with tf.control_dependencies([tf.assert_rank(tensor, desired_rank)]):
tf.identity(tensor).eval(feed_dict={tensor: 0})
def test_rank_one_tensor_raises_if_rank_too_large_static_rank(self):
with self.test_session():
tensor = tf.constant([1, 2], name="my_tensor")
desired_rank = 0
with self.assertRaisesRegexp(ValueError, "my_tensor.*rank"):
with tf.control_dependencies([tf.assert_rank(tensor, desired_rank)]):
tf.identity(tensor).eval()
def test_rank_one_tensor_raises_if_rank_too_large_dynamic_rank(self):
with self.test_session():
tensor = tf.placeholder(tf.float32, name="my_tensor")
desired_rank = 0
with tf.control_dependencies([tf.assert_rank(tensor, desired_rank)]):
with self.assertRaisesOpError("my_tensor.*rank"):
tf.identity(tensor).eval(feed_dict={tensor: [1, 2]})
def test_rank_one_tensor_doesnt_raise_if_rank_just_right_static_rank(self):
with self.test_session():
tensor = tf.constant([1, 2], name="my_tensor")
desired_rank = 1
with tf.control_dependencies([tf.assert_rank(tensor, desired_rank)]):
tf.identity(tensor).eval()
def test_rank_one_tensor_doesnt_raise_if_rank_just_right_dynamic_rank(self):
with self.test_session():
tensor = tf.placeholder(tf.float32, name="my_tensor")
desired_rank = 1
with tf.control_dependencies([tf.assert_rank(tensor, desired_rank)]):
tf.identity(tensor).eval(feed_dict={tensor: [1, 2]})
def test_rank_one_tensor_raises_if_rank_too_small_static_rank(self):
with self.test_session():
tensor = tf.constant([1, 2], name="my_tensor")
desired_rank = 2
with self.assertRaisesRegexp(ValueError, "my_tensor.*rank"):
with tf.control_dependencies([tf.assert_rank(tensor, desired_rank)]):
tf.identity(tensor).eval()
def test_rank_one_tensor_raises_if_rank_too_small_dynamic_rank(self):
with self.test_session():
tensor = tf.placeholder(tf.float32, name="my_tensor")
desired_rank = 2
with tf.control_dependencies([tf.assert_rank(tensor, desired_rank)]):
with self.assertRaisesOpError("my_tensor.*rank"):
tf.identity(tensor).eval(feed_dict={tensor: [1, 2]})
def test_raises_if_rank_is_not_scalar_static(self):
with self.test_session():
tensor = tf.constant([1, 2], name="my_tensor")
with self.assertRaisesRegexp(ValueError, "Rank must be a scalar"):
tf.assert_rank(tensor, np.array([], dtype=np.int32))
def test_raises_if_rank_is_not_scalar_dynamic(self):
with self.test_session():
tensor = tf.constant([1, 2], dtype=tf.float32, name="my_tensor")
rank_tensor = tf.placeholder(tf.int32, name="rank_tensor")
with self.assertRaisesOpError("Rank must be a scalar"):
with tf.control_dependencies([tf.assert_rank(tensor, rank_tensor)]):
tf.identity(tensor).eval(feed_dict={rank_tensor: [1, 2]})
def test_raises_if_rank_is_not_integer_static(self):
with self.test_session():
tensor = tf.constant([1, 2], name="my_tensor")
with self.assertRaisesRegexp(TypeError,
"must be of type <dtype: 'int32'>"):
tf.assert_rank(tensor, .5)
def test_raises_if_rank_is_not_integer_dynamic(self):
with self.test_session():
tensor = tf.constant([1, 2], dtype=tf.float32, name="my_tensor")
rank_tensor = tf.placeholder(tf.float32, name="rank_tensor")
with self.assertRaisesRegexp(TypeError,
"must be of type <dtype: 'int32'>"):
with tf.control_dependencies([tf.assert_rank(tensor, rank_tensor)]):
tf.identity(tensor).eval(feed_dict={rank_tensor: .5})
class AssertRankAtLeastTest(tf.test.TestCase):
def test_rank_zero_tensor_raises_if_rank_too_small_static_rank(self):
with self.test_session():
tensor = tf.constant(1, name="my_tensor")
desired_rank = 1
with self.assertRaisesRegexp(ValueError, "my_tensor.*rank at least 1"):
with tf.control_dependencies([tf.assert_rank_at_least(tensor,
desired_rank)]):
tf.identity(tensor).eval()
def test_rank_zero_tensor_raises_if_rank_too_small_dynamic_rank(self):
with self.test_session():
tensor = tf.placeholder(tf.float32, name="my_tensor")
desired_rank = 1
with tf.control_dependencies([tf.assert_rank_at_least(tensor,
desired_rank)]):
with self.assertRaisesOpError("my_tensor.*rank"):
tf.identity(tensor).eval(feed_dict={tensor: 0})
def test_rank_zero_tensor_doesnt_raise_if_rank_just_right_static_rank(self):
with self.test_session():
tensor = tf.constant(1, name="my_tensor")
desired_rank = 0
with tf.control_dependencies([tf.assert_rank_at_least(tensor,
desired_rank)]):
tf.identity(tensor).eval()
def test_rank_zero_tensor_doesnt_raise_if_rank_just_right_dynamic_rank(self):
with self.test_session():
tensor = tf.placeholder(tf.float32, name="my_tensor")
desired_rank = 0
with tf.control_dependencies([tf.assert_rank_at_least(tensor,
desired_rank)]):
tf.identity(tensor).eval(feed_dict={tensor: 0})
def test_rank_one_ten_doesnt_raise_raise_if_rank_too_large_static_rank(self):
with self.test_session():
tensor = tf.constant([1, 2], name="my_tensor")
desired_rank = 0
with tf.control_dependencies([tf.assert_rank_at_least(tensor,
desired_rank)]):
tf.identity(tensor).eval()
def test_rank_one_ten_doesnt_raise_if_rank_too_large_dynamic_rank(self):
with self.test_session():
tensor = tf.placeholder(tf.float32, name="my_tensor")
desired_rank = 0
with tf.control_dependencies([tf.assert_rank_at_least(tensor,
desired_rank)]):
tf.identity(tensor).eval(feed_dict={tensor: [1, 2]})
def test_rank_one_tensor_doesnt_raise_if_rank_just_right_static_rank(self):
with self.test_session():
tensor = tf.constant([1, 2], name="my_tensor")
desired_rank = 1
with tf.control_dependencies([tf.assert_rank_at_least(tensor,
desired_rank)]):
tf.identity(tensor).eval()
def test_rank_one_tensor_doesnt_raise_if_rank_just_right_dynamic_rank(self):
with self.test_session():
tensor = tf.placeholder(tf.float32, name="my_tensor")
desired_rank = 1
with tf.control_dependencies([tf.assert_rank_at_least(tensor,
desired_rank)]):
tf.identity(tensor).eval(feed_dict={tensor: [1, 2]})
def test_rank_one_tensor_raises_if_rank_too_small_static_rank(self):
with self.test_session():
tensor = tf.constant([1, 2], name="my_tensor")
desired_rank = 2
with self.assertRaisesRegexp(ValueError, "my_tensor.*rank"):
with tf.control_dependencies([tf.assert_rank_at_least(tensor,
desired_rank)]):
tf.identity(tensor).eval()
def test_rank_one_tensor_raises_if_rank_too_small_dynamic_rank(self):
with self.test_session():
tensor = tf.placeholder(tf.float32, name="my_tensor")
desired_rank = 2
with tf.control_dependencies([tf.assert_rank_at_least(tensor,
desired_rank)]):
with self.assertRaisesOpError("my_tensor.*rank"):
tf.identity(tensor).eval(feed_dict={tensor: [1, 2]})
class AssertNonNegativeTest(tf.test.TestCase):
def test_raises_when_negative(self):
with self.test_session():
zoe = tf.constant([-1, -2], name="zoe")
with tf.control_dependencies([tf.assert_non_negative(zoe)]):
out = tf.identity(zoe)
with self.assertRaisesOpError("zoe"):
out.eval()
def test_doesnt_raise_when_zero_and_positive(self):
with self.test_session():
lucas = tf.constant([0, 2], name="lucas")
with tf.control_dependencies([tf.assert_non_negative(lucas)]):
out = tf.identity(lucas)
out.eval()
def test_empty_tensor_doesnt_raise(self):
# A tensor is non-negative when it satisfies:
# For every element x_i in x, x_i >= 0
# and an empty tensor has no elements, so this is trivially satisfied.
# This is standard set theory.
with self.test_session():
empty = tf.constant([], name="empty")
with tf.control_dependencies([tf.assert_non_negative(empty)]):
out = tf.identity(empty)
out.eval()
class AssertNonPositiveTest(tf.test.TestCase):
def test_doesnt_raise_when_zero_and_negative(self):
with self.test_session():
tom = tf.constant([0, -2], name="tom")
with tf.control_dependencies([tf.assert_non_positive(tom)]):
out = tf.identity(tom)
out.eval()
def test_raises_when_positive(self):
with self.test_session():
rachel = tf.constant([0, 2], name="rachel")
with tf.control_dependencies([tf.assert_non_positive(rachel)]):
out = tf.identity(rachel)
with self.assertRaisesOpError("rachel"):
out.eval()
def test_empty_tensor_doesnt_raise(self):
# A tensor is non-positive when it satisfies:
# For every element x_i in x, x_i <= 0
# and an empty tensor has no elements, so this is trivially satisfied.
# This is standard set theory.
with self.test_session():
empty = tf.constant([], name="empty")
with tf.control_dependencies([tf.assert_non_positive(empty)]):
out = tf.identity(empty)
out.eval()
class AssertIntegerTest(tf.test.TestCase):
def test_doesnt_raise_when_integer(self):
with self.test_session():
integers = tf.constant([1, 2], name="integers")
with tf.control_dependencies([tf.assert_integer(integers)]):
out = tf.identity(integers)
out.eval()
def test_raises_when_float(self):
with self.test_session():
floats = tf.constant([1.0, 2.0], name="floats")
with self.assertRaisesRegexp(TypeError, "Expected.*integer"):
tf.assert_integer(floats)
class IsStrictlyIncreasingTest(tf.test.TestCase):
def test_constant_tensor_is_not_strictly_increasing(self):
with self.test_session():
self.assertFalse(tf.is_strictly_increasing([1, 1, 1]).eval())
def test_decreasing_tensor_is_not_strictly_increasing(self):
with self.test_session():
self.assertFalse(tf.is_strictly_increasing([1, 0, -1]).eval())
def test_2d_decreasing_tensor_is_not_strictly_increasing(self):
with self.test_session():
self.assertFalse(tf.is_strictly_increasing([[1, 3], [2, 4]]).eval())
def test_increasing_tensor_is_increasing(self):
with self.test_session():
self.assertTrue(tf.is_strictly_increasing([1, 2, 3]).eval())
def test_increasing_rank_two_tensor(self):
with self.test_session():
self.assertTrue(tf.is_strictly_increasing([[-1, 2], [3, 4]]).eval())
def test_tensor_with_one_element_is_strictly_increasing(self):
with self.test_session():
self.assertTrue(tf.is_strictly_increasing([1]).eval())
def test_empty_tensor_is_strictly_increasing(self):
with self.test_session():
self.assertTrue(tf.is_strictly_increasing([]).eval())
class IsNonDecreasingTest(tf.test.TestCase):
def test_constant_tensor_is_non_decreasing(self):
with self.test_session():
self.assertTrue(tf.is_non_decreasing([1, 1, 1]).eval())
def test_decreasing_tensor_is_not_non_decreasing(self):
with self.test_session():
self.assertFalse(tf.is_non_decreasing([3, 2, 1]).eval())
def test_2d_decreasing_tensor_is_not_non_decreasing(self):
with self.test_session():
self.assertFalse(tf.is_non_decreasing([[1, 3], [2, 4]]).eval())
def test_increasing_rank_one_tensor_is_non_decreasing(self):
with self.test_session():
self.assertTrue(tf.is_non_decreasing([1, 2, 3]).eval())
def test_increasing_rank_two_tensor(self):
with self.test_session():
self.assertTrue(tf.is_non_decreasing([[-1, 2], [3, 3]]).eval())
def test_tensor_with_one_element_is_non_decreasing(self):
with self.test_session():
self.assertTrue(tf.is_non_decreasing([1]).eval())
def test_empty_tensor_is_non_decreasing(self):
with self.test_session():
self.assertTrue(tf.is_non_decreasing([]).eval())
if __name__ == "__main__":
tf.test.main()
| apache-2.0 |
dvitme/odoomrp-wip | stock_move_purchase_price/__openerp__.py | 27 | 1368 |
# -*- encoding: utf-8 -*-
##############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
##############################################################################
{
"name": "Stock move purchase price",
"version": "1.0",
"depends": ["base", "stock", "purchase"],
"author": "OdooMRP team,"
"AvanzOSC,"
"Serv. Tecnol. Avanzados - Pedro M. Baeza",
"contributors": ["Mikel Arregi <mikelarregi@avanzosc.es>"],
"category": "purchase",
"description": """
Adds price column on backorder lines
""",
'data': ['views/stock_picking_line_info.xml'],
"installable": True,
"auto_install": False,
}
| agpl-3.0 |
BassantMorsi/finderApp | lib/python2.7/site-packages/wheel/test/test_basic.py | 472 | 6405 | """
Basic wheel tests.
"""
import os
import pkg_resources
import json
import sys
from pkg_resources import resource_filename
import wheel.util
import wheel.tool
from wheel import egg2wheel
from wheel.install import WheelFile
from zipfile import ZipFile
from shutil import rmtree
test_distributions = ("complex-dist", "simple.dist", "headers.dist")
def teardown_module():
"""Delete eggs/wheels created by tests."""
base = pkg_resources.resource_filename('wheel.test', '')
for dist in test_distributions:
for subdir in ('build', 'dist'):
try:
rmtree(os.path.join(base, dist, subdir))
except OSError:
pass
def setup_module():
build_wheel()
build_egg()
def build_wheel():
"""Build wheels from test distributions."""
for dist in test_distributions:
pwd = os.path.abspath(os.curdir)
distdir = pkg_resources.resource_filename('wheel.test', dist)
os.chdir(distdir)
try:
sys.argv = ['', 'bdist_wheel']
exec(compile(open('setup.py').read(), 'setup.py', 'exec'))
finally:
os.chdir(pwd)
def build_egg():
"""Build eggs from test distributions."""
for dist in test_distributions:
pwd = os.path.abspath(os.curdir)
distdir = pkg_resources.resource_filename('wheel.test', dist)
os.chdir(distdir)
try:
sys.argv = ['', 'bdist_egg']
exec(compile(open('setup.py').read(), 'setup.py', 'exec'))
finally:
os.chdir(pwd)
def test_findable():
"""Make sure pkg_resources can find us."""
assert pkg_resources.working_set.by_key['wheel'].version
def test_egg_re():
"""Make sure egg_info_re matches."""
egg_names = open(pkg_resources.resource_filename('wheel', 'eggnames.txt'))
for line in egg_names:
line = line.strip()
if not line:
continue
assert egg2wheel.egg_info_re.match(line), line
def test_compatibility_tags():
"""Test compatibilty tags are working."""
wf = WheelFile("package-1.0.0-cp32.cp33-noabi-noarch.whl")
assert (list(wf.compatibility_tags) ==
[('cp32', 'noabi', 'noarch'), ('cp33', 'noabi', 'noarch')])
assert (wf.arity == 2)
wf2 = WheelFile("package-1.0.0-1st-cp33-noabi-noarch.whl")
wf2_info = wf2.parsed_filename.groupdict()
assert wf2_info['build'] == '1st', wf2_info
def test_convert_egg():
base = pkg_resources.resource_filename('wheel.test', '')
for dist in test_distributions:
distdir = os.path.join(base, dist, 'dist')
eggs = [e for e in os.listdir(distdir) if e.endswith('.egg')]
wheel.tool.convert(eggs, distdir, verbose=False)
def test_unpack():
"""
Make sure 'wheel unpack' works.
This also verifies the integrity of our testing wheel files.
"""
for dist in test_distributions:
distdir = pkg_resources.resource_filename('wheel.test',
os.path.join(dist, 'dist'))
for wheelfile in (w for w in os.listdir(distdir) if w.endswith('.whl')):
wheel.tool.unpack(os.path.join(distdir, wheelfile), distdir)
def test_no_scripts():
"""Make sure entry point scripts are not generated."""
dist = "complex-dist"
basedir = pkg_resources.resource_filename('wheel.test', dist)
for (dirname, subdirs, filenames) in os.walk(basedir):
for filename in filenames:
if filename.endswith('.whl'):
whl = ZipFile(os.path.join(dirname, filename))
for entry in whl.infolist():
assert not '.data/scripts/' in entry.filename
def test_pydist():
"""Make sure pydist.json exists and validates against our schema."""
# XXX this test may need manual cleanup of older wheels
import jsonschema
def open_json(filename):
return json.loads(open(filename, 'rb').read().decode('utf-8'))
pymeta_schema = open_json(resource_filename('wheel.test',
'pydist-schema.json'))
valid = 0
for dist in ("simple.dist", "complex-dist"):
basedir = pkg_resources.resource_filename('wheel.test', dist)
for (dirname, subdirs, filenames) in os.walk(basedir):
for filename in filenames:
if filename.endswith('.whl'):
whl = ZipFile(os.path.join(dirname, filename))
for entry in whl.infolist():
if entry.filename.endswith('/metadata.json'):
pymeta = json.loads(whl.read(entry).decode('utf-8'))
jsonschema.validate(pymeta, pymeta_schema)
valid += 1
assert valid > 0, "No metadata.json found"
def test_util():
"""Test functions in util.py."""
for i in range(10):
before = b'*' * i
encoded = wheel.util.urlsafe_b64encode(before)
assert not encoded.endswith(b'=')
after = wheel.util.urlsafe_b64decode(encoded)
assert before == after
def test_pick_best():
"""Test the wheel ranking algorithm."""
def get_tags(res):
info = res[-1].parsed_filename.groupdict()
return info['pyver'], info['abi'], info['plat']
cand_tags = [('py27', 'noabi', 'noarch'), ('py26', 'noabi', 'noarch'),
('cp27', 'noabi', 'linux_i686'),
('cp26', 'noabi', 'linux_i686'),
('cp27', 'noabi', 'linux_x86_64'),
('cp26', 'noabi', 'linux_x86_64')]
cand_wheels = [WheelFile('testpkg-1.0-%s-%s-%s.whl' % t)
for t in cand_tags]
supported = [('cp27', 'noabi', 'linux_i686'), ('py27', 'noabi', 'noarch')]
supported2 = [('cp27', 'noabi', 'linux_i686'), ('py27', 'noabi', 'noarch'),
('cp26', 'noabi', 'linux_i686'), ('py26', 'noabi', 'noarch')]
supported3 = [('cp26', 'noabi', 'linux_i686'), ('py26', 'noabi', 'noarch'),
('cp27', 'noabi', 'linux_i686'), ('py27', 'noabi', 'noarch')]
for supp in (supported, supported2, supported3):
context = lambda: list(supp)
for wheel in cand_wheels:
wheel.context = context
best = max(cand_wheels)
assert list(best.tags)[0] == supp[0]
# assert_equal(
# list(map(get_tags, pick_best(cand_wheels, supp, top=False))), supp)
| mit |
foobarbazblarg/stayclean | stayclean-2017-october/display.py | 26 | 24079 | #!/usr/bin/python
# TODO: issues with new oauth2 stuff. Keep using older version of Python for now.
# #!/usr/bin/env python
from participantCollection import ParticipantCollection
import re
import datetime
import pyperclip
# Edit Me!
currentMonthTotalDays = 31
currentMonthIndex = datetime.date.today().month
currentMonthPenultimateDayIndex = currentMonthTotalDays - 1
currentMonthName = {1: 'January', 2: 'February', 3: 'March', 4: 'April', 5: 'May', 6: 'June', 7: 'July', 8: 'August', 9: 'September', 10: 'October', 11: 'November', 12: 'December'}[currentMonthIndex]
nextMonthIndex = currentMonthIndex % 12 + 1
nextMonthName = {1: 'January', 2: 'February', 3: 'March', 4: 'April', 5: 'May', 6: 'June', 7: 'July', 8: 'August', 9: 'September', 10: 'October', 11: 'November', 12: 'December'}[nextMonthIndex]
currentDayOfMonthIndex = datetime.date.today().day
# TODO: testing...
# currentDayOfMonthIndex = 31
currentDayOfMonthName = {1: 'first', 2: 'second', 3: 'third', 4: 'fourth', 5: 'fifth', 6: 'sixth', 7: 'seventh', 8: 'eighth', 9: 'ninth', 10: 'tenth', 11: 'eleventh', 12: 'twelfth', 13: 'thirteenth', 14: 'fourteenth', 15: 'fifteenth', 16: 'sixteenth', 17: 'seventeenth', 18: 'eighteenth', 19: 'nineteenth', 20: 'twentieth', 21: 'twenty-first', 22: 'twenty-second', 23: 'twenty-third', 24: 'twenty-fourth', 25: 'twenty-fifth', 26: 'twenty-sixth', 27: 'twenty-seventh', 28: 'twenty-eighth', 29: 'twenty-ninth', 30: 'thirtieth', 31: 'thirty-first'}[currentDayOfMonthIndex]
currentDayOfWeekName = {0: 'Monday', 1: 'Tuesday', 2: 'Wednesday', 3: 'Thursday', 4: 'Friday', 5: 'Saturday', 6: 'Sunday'}[datetime.date.today().weekday()]
participants = ParticipantCollection()
numberStillIn = participants.sizeOfParticipantsWhoAreStillIn()
initialNumber = participants.size()
percentStillIn = int(round(100 * numberStillIn / initialNumber, 0))
# print "There are currently **" + str(numberStillIn) + " out of " + str(initialNumber) +"** original participants. That's **" + str(int(round(100*numberStillIn/initialNumber,0))) + "%** Here is the list of participants still with the challenge:\n"
def stringToPrintLegacy():
answer = "There are currently **NUMBER_STILL_IN out of INITIAL_NUMBER** original participants. That's **PERCENT_STILL_IN%**. Here is the list of participants still with the challenge:\n\n"
answer = re.sub('NUMBER_STILL_IN', str(numberStillIn), answer)
answer = re.sub('INITIAL_NUMBER', str(initialNumber), answer)
answer = re.sub('PERCENT_STILL_IN', str(percentStillIn), answer)
for participant in participants.participantsWhoAreStillIn():
answer += "/u/" + participant.name
if not participant.hasCheckedIn:
answer += " ~"
answer += "\n\n"
return answer
def templateForParticipants():
answer = ""
for participant in participants.participantsWhoAreStillIn():
answer += "/u/" + participant.name
if not participant.hasCheckedIn:
answer += " ~"
answer += "\n\n"
return answer
def templateForParticipantsOnFinalDay():
answer = ""
answer += "These participants have checked in at least once in the last 15 days:\n"
answer += "\n"
for participant in participants.participantsWhoAreStillInAndHaveCheckedIn():
answer += "/u/" + participant.name + "\n"
answer += "\n"
answer += "These participants have not reported a relapse, so they are still in the running, but **if they do not check in by the end of today, they will be removed from the list, and will not be considered victorious**:\n"
answer += "\n"
for participant in participants.participantsWhoAreStillInAndHaveNotCheckedIn():
answer += "/u/" + participant.name + " ~\n"
answer += "\n"
return answer
def templateFor1():
print '1\n\n'
answer = ""
print "============================================================="
answer += "**Daily news:** This is CURRENT_DAY_OF_WEEK_NAME, CURRENT_MONTH_NAME CURRENT_DAY_OF_MONTH_INDEX, the CURRENT_DAY_OF_MONTH_NAME day of the Stay Clean CURRENT_MONTH_NAME challenge. ~~We will no longer be accepting new signups.~~ Good news! We will be be accepting late signups for the next 3 days. If you forgot to sign up for the CURRENT_MONTH_NAME challenge, just leave a \"sign me up\" comment below, and I'll add you. Best of luck to everyone here!\n"
answer += "\n"
answer += "Here's how this thing works:\n"
answer += "\n"
answer += "- At the end of this post is a list of people who have signed up for the challenge, and who are still in the running. That means that they have not needed to reset because of a relapse or slip.\n"
answer += "- Please check in with the group in the comments as often as you want! Feel free to share thoughts, feelings, experiences, progress, wisdom, encouragement and whatever else!\n"
answer += "- **IMPORTANT: if you relapse, please post a comment to that effect here** and I will remove your name from the list. We will not judge you or shame you, we have all been there.\n"
answer += '- If you have a "~" after your name, you have yet to check in on any update threads. If it is still there by CURRENT_MONTH_NAME 15th, you will be removed from the list, in order to keep the numbers as realistic as possible.\n'
answer += "- ~~We will not be accepting any new participants~~, but even if you're not on the list, please feel free to check in in the update threads anyway! Also, stay tuned to catch the NEXT_MONTH_NAME thread!\n"
answer += "\n"
answer += "Good luck!\n"
answer += "\n"
answer += "For a chart of relapse data, check out [this Google Spreadsheet](https://docs.google.com/spreadsheets/d/1fnRMkDqFAJpsWHaZt8duMkZIPBCtUy0IfGFmlIfvOII/edit#gid=0).\n"
answer += "\n"
answer += "Here are our **INITIAL_NUMBER** original participants:\n\n"
answer += templateForParticipants()
print "============================================================="
return answer
def templateFor2():
print '2\n\n'
answer = ""
answer += "**Daily news:** This is CURRENT_DAY_OF_WEEK_NAME, CURRENT_MONTH_NAME CURRENT_DAY_OF_MONTH_INDEX, the CURRENT_DAY_OF_MONTH_NAME day of the Stay Clean CURRENT_MONTH_NAME challenge. This is the second day of our 3 day late-signup grace period. If you forgot to sign up for the CURRENT_MONTH_NAME challenge, just leave a \"sign me up\" comment below, and I'll add you.\n"
answer += "\n"
answer += "Guidelines:\n"
answer += "\n"
answer += "- At the end of this post is a list of people who have signed up for the challenge, and who are still in the running. That means that they have not needed to reset because of a relapse or slip.\n"
answer += "- Please check in with the group in the comments as often as you want! Feel free to share thoughts, feelings, experiences, progress, wisdom, encouragement and whatever else!\n"
answer += "- **IMPORTANT: if you relapse, please post a comment to that effect here** and I will remove your name from the list. We will not judge you or shame you, we have all been there.\n"
answer += '- If you have a "~" after your name, you have yet to check in on any update threads. If it is still there by CURRENT_MONTH_NAME 15th, you will be removed from the list, in order to keep the numbers as realistic as possible.\n'
answer += "- ~~We will not be accepting any new participants~~, but even if you're not on the list, please feel free to check in in the update threads anyway! Also, stay tuned to catch the NEXT_MONTH_NAME thread!\n"
answer += "\n"
answer += "Good luck!\n"
answer += "\n"
answer += "For a chart of relapse data, check out [this Google Spreadsheet](https://docs.google.com/spreadsheets/d/1fnRMkDqFAJpsWHaZt8duMkZIPBCtUy0IfGFmlIfvOII/edit#gid=0).\n"
answer += "\n"
answer += "There are currently **NUMBER_STILL_IN out of INITIAL_NUMBER** original participants. That's **PERCENT_STILL_IN%**. Here is the list of participants still with the challenge:\n\n"
answer += templateForParticipants()
return answer
def templateFor3():
print '3\n\n'
answer = ""
answer += "**Daily news:** This is CURRENT_DAY_OF_WEEK_NAME, CURRENT_MONTH_NAME CURRENT_DAY_OF_MONTH_INDEX, the CURRENT_DAY_OF_MONTH_NAME day of the Stay Clean CURRENT_MONTH_NAME challenge. This is the last day of our 3 day late-signup grace period. If you forgot to sign up for the CURRENT_MONTH_NAME challenge, just leave a \"sign me up\" comment below, and I'll add you. After today, further signup requests will be silently ignored.\n"
answer += "\n"
answer += "Guidelines:\n"
answer += "\n"
answer += "- At the end of this post is a list of people who have signed up for the challenge, and who are still in the running. That means that they have not needed to reset because of a relapse or slip.\n"
answer += "- Please check in with the group in the comments as often as you want! Feel free to share thoughts, feelings, experiences, progress, wisdom, encouragement and whatever else!\n"
answer += "- **IMPORTANT: if you relapse, please post a comment to that effect here** and I will remove your name from the list. We will not judge you or shame you, we have all been there.\n"
answer += '- If you have a "~" after your name, you have yet to check in on any update threads. If it is still there by CURRENT_MONTH_NAME 15th, you will be removed from the list, in order to keep the numbers as realistic as possible.\n'
answer += "- ~~We will not be accepting any new participants~~, but even if you're not on the list, please feel free to check in in the update threads anyway! Also, stay tuned to catch the NEXT_MONTH_NAME thread!\n"
answer += "\n"
answer += "Good luck!\n"
answer += "\n"
answer += "For a chart of relapse data, check out [this Google Spreadsheet](https://docs.google.com/spreadsheets/d/1fnRMkDqFAJpsWHaZt8duMkZIPBCtUy0IfGFmlIfvOII/edit#gid=0).\n"
answer += "\n"
answer += "There are currently **NUMBER_STILL_IN out of INITIAL_NUMBER** original participants. That's **PERCENT_STILL_IN%**. Here is the list of participants still with the challenge:\n\n"
answer += templateForParticipants()
return answer
def templateFor4():
print '4\n\n'
answer = ""
answer += "**Daily news:** This is CURRENT_DAY_OF_WEEK_NAME, CURRENT_MONTH_NAME CURRENT_DAY_OF_MONTH_INDEX, the CURRENT_DAY_OF_MONTH_NAME day of the Stay Clean CURRENT_MONTH_NAME challenge. Our 3 day late-signup grace period is now over. If you forgot to sign up, it's too late for CURRENT_MONTH_NAME, but feel free to leave comments here anyway, and we'll see you in NEXT_MONTH_NAME.\n"
answer += "\n"
answer += "Guidelines:\n"
answer += "\n"
answer += "- At the end of this post is a list of people who have signed up for the challenge, and who are still in the running. That means that they have not needed to reset because of a relapse or slip.\n"
answer += "- Please check in with the group in the comments as often as you want! Feel free to share thoughts, feelings, experiences, progress, wisdom, encouragement and whatever else!\n"
answer += "- **IMPORTANT: if you relapse, please post a comment to that effect here** and I will remove your name from the list. We will not judge you or shame you, we have all been there.\n"
answer += '- If you have a "~" after your name, you have yet to check in on any update threads. If it is still there by CURRENT_MONTH_NAME 15th, you will be removed from the list, in order to keep the numbers as realistic as possible.\n'
answer += "- We will not be accepting any new participants, but even if you're not on the list, please feel free to check in in the update threads anyway! Also, stay tuned to catch the NEXT_MONTH_NAME thread!\n"
answer += "\n"
answer += "Good luck!\n"
answer += "\n"
answer += "For a chart of relapse data, check out [this Google Spreadsheet](https://docs.google.com/spreadsheets/d/1fnRMkDqFAJpsWHaZt8duMkZIPBCtUy0IfGFmlIfvOII/edit#gid=0).\n"
answer += "\n"
answer += "There are currently **NUMBER_STILL_IN out of INITIAL_NUMBER** original participants. That's **PERCENT_STILL_IN%**. Here is the list of participants still with the challenge:\n\n"
answer += templateForParticipants()
return answer
def templateFor5to9():
print '5 to 9\n\n'
answer = ""
answer += "**Daily news:** This is CURRENT_DAY_OF_WEEK_NAME, CURRENT_MONTH_NAME CURRENT_DAY_OF_MONTH_INDEX, the CURRENT_DAY_OF_MONTH_NAME day of the Stay Clean CURRENT_MONTH_NAME challenge. Keep fighting the good fight!\n"
answer += "\n"
answer += "Guidelines:\n"
answer += "\n"
answer += "- At the end of this post is a list of people who have signed up for the challenge, and who are still in the running. That means that they have not needed to reset because of a relapse or slip.\n"
answer += "- Please check in with the group in the comments as often as you want! Feel free to share thoughts, feelings, experiences, progress, wisdom, encouragement and whatever else!\n"
answer += "- **IMPORTANT: if you relapse, please post a comment to that effect here** and I will remove your name from the list. We will not judge you or shame you, we have all been there.\n"
answer += '- If you have a "~" after your name, you have yet to check in on any update threads. If it is still there by CURRENT_MONTH_NAME 15th, you will be removed from the list, in order to keep the numbers as realistic as possible.\n'
answer += "- We will not be accepting any new participants, but even if you're not on the list, please feel free to check in in the update threads anyway! Also, stay tuned to catch the NEXT_MONTH_NAME thread!\n"
answer += "\n"
answer += "Good luck!\n"
answer += "\n"
answer += "For a chart of relapse data, check out [this Google Spreadsheet](https://docs.google.com/spreadsheets/d/1fnRMkDqFAJpsWHaZt8duMkZIPBCtUy0IfGFmlIfvOII/edit#gid=0).\n"
answer += "\n"
answer += "There are currently **NUMBER_STILL_IN out of INITIAL_NUMBER** original participants. That's **PERCENT_STILL_IN%**. Here is the list of participants still with the challenge:\n\n"
answer += templateForParticipants()
return answer
def templateFor10to14():
print '10 to 14\n\n'
answer = ""
answer += "**Daily news:** This is CURRENT_DAY_OF_WEEK_NAME, CURRENT_MONTH_NAME CURRENT_DAY_OF_MONTH_INDEX, the CURRENT_DAY_OF_MONTH_NAME day of the Stay Clean CURRENT_MONTH_NAME challenge. Keep fighting the good fight!\n"
answer += "\n"
answer += "**THE COUNTDOWN: Attention everyone!** You have " + str(15 - currentDayOfMonthIndex) + " days to make an update comment (if you haven't already) to be counted as an active participant! **Otherwise your name will be REMOVED from the list** on CURRENT_MONTH_INDEX/15!!\n"
answer += "\n"
answer += "Guidelines:\n"
answer += "\n"
answer += "- At the end of this post is a list of people who have signed up for the challenge, and who are still in the running. That means that they have not needed to reset because of a relapse or slip.\n"
answer += "- Please check in with the group in the comments as often as you want! Feel free to share thoughts, feelings, experiences, progress, wisdom, encouragement and whatever else!\n"
answer += "- **IMPORTANT: if you relapse, please post a comment to that effect here** and I will remove your name from the list. We will not judge you or shame you, we have all been there.\n"
answer += '- If you have a "~" after your name, you have yet to check in on any update threads. If it is still there by CURRENT_MONTH_NAME 15th, you will be removed from the list, in order to keep the numbers as realistic as possible.\n'
answer += "- We will not be accepting any new participants, but even if you're not on the list, please feel free to check in in the update threads anyway! Also, stay tuned to catch the NEXT_MONTH_NAME thread!\n"
answer += "\n"
answer += "Good luck!\n"
answer += "\n"
answer += "For a chart of relapse data, check out [this Google Spreadsheet](https://docs.google.com/spreadsheets/d/1fnRMkDqFAJpsWHaZt8duMkZIPBCtUy0IfGFmlIfvOII/edit#gid=0).\n"
answer += "\n"
answer += "There are currently **NUMBER_STILL_IN out of INITIAL_NUMBER** original participants. That's **PERCENT_STILL_IN%**. Here is the list of participants still with the challenge:\n\n"
answer += templateForParticipants()
return answer
def templateFor15():
print '15\n\n'
answer = ""
answer += "**Daily news:** This is CURRENT_DAY_OF_WEEK_NAME, CURRENT_MONTH_NAME CURRENT_DAY_OF_MONTH_INDEX, the CURRENT_DAY_OF_MONTH_NAME day of the Stay Clean CURRENT_MONTH_NAME challenge. Keep fighting the good fight!\n"
answer += "\n"
answer += "**THIS IS YOUR LAST DAY TO CHECK IN** (if you haven't already) **BEFORE YOUR NAME IS REMOVED FROM THE LIST!** Check in by posting a brief comment.\n"
answer += "\n"
answer += "Guidelines:\n"
answer += "\n"
answer += "- At the end of this post is a list of people who have signed up for the challenge, and who are still in the running. That means that they have not needed to reset because of a relapse or slip.\n"
answer += "- Please check in with the group in the comments as often as you want! Feel free to share thoughts, feelings, experiences, progress, wisdom, encouragement and whatever else!\n"
answer += "- **IMPORTANT: if you relapse, please post a comment to that effect here** and I will remove your name from the list. We will not judge you or shame you, we have all been there.\n"
answer += '- If you have a "~" after your name, you have yet to check in on any update threads. If it is still there by CURRENT_MONTH_NAME 15th, you will be removed from the list, in order to keep the numbers as realistic as possible.\n'
answer += "- We will not be accepting any new participants, but even if you're not on the list, please feel free to check in in the update threads anyway! Also, stay tuned to catch the NEXT_MONTH_NAME thread!\n"
answer += "\n"
answer += "Good luck!\n"
answer += "\n"
answer += "For a chart of relapse data, check out [this Google Spreadsheet](https://docs.google.com/spreadsheets/d/1fnRMkDqFAJpsWHaZt8duMkZIPBCtUy0IfGFmlIfvOII/edit#gid=0).\n"
answer += "\n"
answer += "There are currently **NUMBER_STILL_IN out of INITIAL_NUMBER** original participants. That's **PERCENT_STILL_IN%**. Here is the list of participants still with the challenge:\n\n"
answer += templateForParticipants()
return answer
def templateFor16toPenultimate():
print '16 to penultimate\n\n'
answer = ""
answer += "**Daily news:** This is CURRENT_DAY_OF_WEEK_NAME, CURRENT_MONTH_NAME CURRENT_DAY_OF_MONTH_INDEX, the CURRENT_DAY_OF_MONTH_NAME day of the Stay Clean CURRENT_MONTH_NAME challenge. Keep fighting the good fight!\n"
answer += "\n"
answer += "If you think you should still be on this list but aren't, you probably got removed in the great purge of CURRENT_MONTH_NAME 15th because you never checked in. However, if you let me know you're still with it I will re-add you.\n"
answer += "\n"
answer += "Guidelines:\n"
answer += "\n"
answer += "- At the end of this post is a list of people who have signed up for the challenge, and who are still in the running. That means that they have not needed to reset because of a relapse or slip.\n"
answer += "- Please check in with the group in the comments as often as you want! Feel free to share thoughts, feelings, experiences, progress, wisdom, encouragement and whatever else!\n"
answer += "- **IMPORTANT: if you relapse, please post a comment to that effect here** and I will remove your name from the list. We will not judge you or shame you, we have all been there.\n"
answer += '- If you have a "~" after your name, you have yet to check in on any update threads since CURRENT_MONTH_NAME 15. If it is still there by CURRENT_MONTH_NAME CURRENT_MONTH_TOTAL_DAYS, you will be removed from the list, in order to keep the numbers as realistic as possible.\n'
answer += "- We will not be accepting any new participants, but even if you're not on the list, please feel free to check in in the update threads anyway! Also, stay tuned to catch the NEXT_MONTH_NAME thread!\n"
answer += "\n"
answer += "Good luck!\n"
answer += "\n"
answer += "For a chart of relapse data, check out [this Google Spreadsheet](https://docs.google.com/spreadsheets/d/1fnRMkDqFAJpsWHaZt8duMkZIPBCtUy0IfGFmlIfvOII/edit#gid=0).\n"
answer += "\n"
answer += "There are currently **NUMBER_STILL_IN out of INITIAL_NUMBER** original participants. That's **PERCENT_STILL_IN%**. Here is the list of participants still with the challenge:\n\n"
answer += templateForParticipants()
return answer
def templateForUltimate():
print 'Ultimate\n\n'
answer = ""
answer += "**Daily news:** This is CURRENT_DAY_OF_WEEK_NAME, CURRENT_MONTH_NAME CURRENT_DAY_OF_MONTH_INDEX, the last day of the Stay Clean CURRENT_MONTH_NAME challenge. This is it, folks, the day we've been waiting for... the final day of the challenge. I'll be making a congratulatory post tomorrow to honor the victors. I'm really proud of everyone who signed up for this challenge. Quitting porn is difficult, especially in an era where porn is always as close as a few keystrokes, and triggers are absolutely everywhere. Everybody who gave it their best shot deserves to take a minute right now to feel good about themselves.\n"
answer += "\n"
answer += "For a chart of relapse data, check out [this Google Spreadsheet](https://docs.google.com/spreadsheets/d/1fnRMkDqFAJpsWHaZt8duMkZIPBCtUy0IfGFmlIfvOII/edit#gid=0).\n"
answer += "\n"
# TODO: need to do the part where it lists the checked in and non-checked in participants separately.
answer += "There are currently **NUMBER_STILL_IN out of INITIAL_NUMBER** original participants. That's **PERCENT_STILL_IN%**.\n\n"
answer += templateForParticipantsOnFinalDay()
return answer
def templateToUse():
if currentDayOfMonthIndex == 1:
return templateFor1()
elif currentDayOfMonthIndex == 2:
return templateFor2()
elif currentDayOfMonthIndex == 3:
return templateFor3()
elif currentDayOfMonthIndex == 4:
return templateFor4()
elif 5 <= currentDayOfMonthIndex <= 9:
return templateFor5to9()
elif 10 <= currentDayOfMonthIndex <= 14:
return templateFor10to14()
if currentDayOfMonthIndex == 15:
return templateFor15()
elif (currentDayOfMonthIndex >= 16) and (currentDayOfMonthIndex <= currentMonthPenultimateDayIndex):
return templateFor16toPenultimate()
else:
return templateForUltimate()
def stringToPrint():
answer = templateToUse()
answer = re.sub('NUMBER_STILL_IN', str(numberStillIn), answer)
answer = re.sub('INITIAL_NUMBER', str(initialNumber), answer)
answer = re.sub('PERCENT_STILL_IN', str(percentStillIn), answer)
answer = re.sub('CURRENT_MONTH_INDEX', str(currentMonthIndex), answer)
answer = re.sub('CURRENT_MONTH_TOTAL_DAYS', str(currentMonthTotalDays), answer)
answer = re.sub('CURRENT_MONTH_PENULTIMATE_DAY_INDEX', str(currentMonthPenultimateDayIndex), answer)
answer = re.sub('CURRENT_MONTH_NAME', currentMonthName, answer)
answer = re.sub('NEXT_MONTH_INDEX', str(nextMonthIndex), answer)
answer = re.sub('NEXT_MONTH_NAME', nextMonthName, answer)
answer = re.sub('CURRENT_DAY_OF_MONTH_INDEX', str(currentDayOfMonthIndex), answer)
answer = re.sub('CURRENT_DAY_OF_MONTH_NAME', currentDayOfMonthName, answer)
answer = re.sub('CURRENT_DAY_OF_WEEK_NAME', currentDayOfWeekName, answer)
return answer
outputString = stringToPrint()
print "============================================================="
print outputString
print "============================================================="
pyperclip.copy(outputString)
| mit |
Weihonghao/ECM | Vpy34/lib/python3.5/site-packages/tensorflow/contrib/ffmpeg/ops/gen_encode_audio_op_py.py | 2 | 2869 | """Python wrappers around Brain.
This file is MACHINE GENERATED! Do not edit.
"""
import collections
from google.protobuf import text_format
from tensorflow.core.framework import op_def_pb2
# Needed to trigger the call to _set_call_cpp_shape_fn.
from tensorflow.python.framework import common_shapes
from tensorflow.python.framework import op_def_registry
from tensorflow.python.framework import ops
from tensorflow.python.framework import op_def_library
_encode_audio_outputs = ["contents"]
def encode_audio(sampled_audio, file_format, samples_per_second,
bits_per_second=None, name=None):
r"""Processes a `Tensor` containing sampled audio with the number of channels
and length of the audio specified by the dimensions of the `Tensor`. The
audio is converted into a string that, when saved to disk, will be equivalent
to the audio in the specified audio format.
The input audio has one row of the tensor for each channel in the audio file.
Each channel contains audio samples starting at the beginning of the audio and
having `1/samples_per_second` time between them. The output file will contain
all of the audio channels contained in the tensor.
Args:
sampled_audio: A `Tensor` of type `float32`.
A rank 2 tensor containing all tracks of the audio. Dimension 0
is time and dimension 1 is the channel.
file_format: A `string`.
A string describing the audio file format. This must be "wav".
samples_per_second: An `int`.
The number of samples per second that the audio should have.
bits_per_second: An optional `int`. Defaults to `192000`.
The approximate bitrate of the encoded audio file. This is
ignored by the "wav" file format.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `string`. The binary audio file contents.
"""
result = _op_def_lib.apply_op("EncodeAudio", sampled_audio=sampled_audio,
file_format=file_format,
samples_per_second=samples_per_second,
bits_per_second=bits_per_second, name=name)
return result
def _InitOpDefLibrary():
op_list = op_def_pb2.OpList()
text_format.Merge(_InitOpDefLibrary.op_list_ascii, op_list)
op_def_registry.register_op_list(op_list)
op_def_lib = op_def_library.OpDefLibrary()
op_def_lib.add_op_list(op_list)
return op_def_lib
_InitOpDefLibrary.op_list_ascii = """op {
name: "EncodeAudio"
input_arg {
name: "sampled_audio"
type: DT_FLOAT
}
output_arg {
name: "contents"
type: DT_STRING
}
attr {
name: "file_format"
type: "string"
}
attr {
name: "samples_per_second"
type: "int"
}
attr {
name: "bits_per_second"
type: "int"
default_value {
i: 192000
}
}
}
"""
_op_def_lib = _InitOpDefLibrary()
| agpl-3.0 |
hellsgod/hells-Core-N6P | tools/perf/util/setup.py | 2079 | 1438 | #!/usr/bin/python2
from distutils.core import setup, Extension
from os import getenv
from distutils.command.build_ext import build_ext as _build_ext
from distutils.command.install_lib import install_lib as _install_lib
class build_ext(_build_ext):
def finalize_options(self):
_build_ext.finalize_options(self)
self.build_lib = build_lib
self.build_temp = build_tmp
class install_lib(_install_lib):
def finalize_options(self):
_install_lib.finalize_options(self)
self.build_dir = build_lib
cflags = ['-fno-strict-aliasing', '-Wno-write-strings']
cflags += getenv('CFLAGS', '').split()
build_lib = getenv('PYTHON_EXTBUILD_LIB')
build_tmp = getenv('PYTHON_EXTBUILD_TMP')
libtraceevent = getenv('LIBTRACEEVENT')
liblk = getenv('LIBLK')
ext_sources = [f.strip() for f in file('util/python-ext-sources')
if len(f.strip()) > 0 and f[0] != '#']
perf = Extension('perf',
sources = ext_sources,
include_dirs = ['util/include'],
extra_compile_args = cflags,
extra_objects = [libtraceevent, liblk],
)
setup(name='perf',
version='0.1',
description='Interface with the Linux profiling infrastructure',
author='Arnaldo Carvalho de Melo',
author_email='acme@redhat.com',
license='GPLv2',
url='http://perf.wiki.kernel.org',
ext_modules=[perf],
cmdclass={'build_ext': build_ext, 'install_lib': install_lib})
| gpl-2.0 |
crespyl/pcre2 | maint/MultiStage2.py | 1 | 23077 | #! /usr/bin/python
# Multistage table builder
# (c) Peter Kankowski, 2008
##############################################################################
# This script was submitted to the PCRE project by Peter Kankowski as part of
# the upgrading of Unicode property support. The new code speeds up property
# matching many times. The script is for the use of PCRE maintainers, to
# generate the pcre_ucd.c file that contains a digested form of the Unicode
# data tables.
#
# The script has now been upgraded to Python 3 for PCRE2, and should be run in
# the maint subdirectory, using the command
#
# [python3] ./MultiStage2.py >../src/pcre2_ucd.c
#
# It requires four Unicode data tables, DerivedGeneralCategory.txt,
# GraphemeBreakProperty.txt, Scripts.txt, and CaseFolding.txt, to be in the
# Unicode.tables subdirectory. The first of these is found in the "extracted"
# subdirectory of the Unicode database (UCD) on the Unicode web site; the
# second is in the "auxiliary" subdirectory; the other two are directly in the
# UCD directory.
#
# Minor modifications made to this script:
# Added #! line at start
# Removed tabs
# Made it work with Python 2.4 by rewriting two statements that needed 2.5
# Consequent code tidy
# Adjusted data file names to take from the Unicode.tables directory
# Adjusted global table names by prefixing _pcre_.
# Commented out stuff relating to the casefolding table, which isn't used;
# removed completely in 2012.
# Corrected size calculation
# Add #ifndef SUPPORT_UCP to use dummy tables when no UCP support is needed.
# Update for PCRE2: name changes, and SUPPORT_UCP is abolished.
#
# Major modifications made to this script:
# Added code to add a grapheme break property field to records.
#
# Added code to search for sets of more than two characters that must match
# each other caselessly. A new table is output containing these sets, and
# offsets into the table are added to the main output records. This new
# code scans CaseFolding.txt instead of UnicodeData.txt.
#
# Update for Python3:
# . Processed with 2to3, but that didn't fix everything
# . Changed string.strip to str.strip
# . Added encoding='utf-8' to the open() call
# . Inserted 'int' before blocksize/ELEMS_PER_LINE because an int is
# required and the result of the division is a float
#
# The main tables generated by this script are used by macros defined in
# pcre2_internal.h. They look up Unicode character properties using short
# sequences of code that contains no branches, which makes for greater speed.
#
# Conceptually, there is a table of records (of type ucd_record), containing a
# script number, character type, grapheme break type, offset to caseless
# matching set, and offset to the character's other case for every character.
# However, a real table covering all Unicode characters would be far too big.
# It can be efficiently compressed by observing that many characters have the
# same record, and many blocks of characters (taking 128 characters in a block)
# have the same set of records as other blocks. This leads to a 2-stage lookup
# process.
#
# This script constructs four tables. The ucd_caseless_sets table contains
# lists of characters that all match each other caselessly. Each list is
# in order, and is terminated by NOTACHAR (0xffffffff), which is larger than
# any valid character. The first list is empty; this is used for characters
# that are not part of any list.
#
# The ucd_records table contains one instance of every unique record that is
# required. The ucd_stage1 table is indexed by a character's block number, and
# yields what is in effect a "virtual" block number. The ucd_stage2 table is a
# table of "virtual" blocks; each block is indexed by the offset of a character
# within its own block, and the result is the offset of the required record.
#
# Example: lowercase "a" (U+0061) is in block 0
# lookup 0 in stage1 table yields 0
# lookup 97 in the first table in stage2 yields 16
# record 17 is { 33, 5, 11, 0, -32 }
# 33 = ucp_Latin => Latin script
# 5 = ucp_Ll => Lower case letter
# 11 = ucp_gbOther => Grapheme break property "Other"
# 0 => not part of a caseless set
# -32 => Other case is U+0041
#
# Almost all lowercase latin characters resolve to the same record. One or two
# are different because they are part of a multi-character caseless set (for
# example, k, K and the Kelvin symbol are such a set).
#
# Example: hiragana letter A (U+3042) is in block 96 (0x60)
# lookup 96 in stage1 table yields 88
# lookup 66 in the 88th table in stage2 yields 467
# record 470 is { 26, 7, 11, 0, 0 }
# 26 = ucp_Hiragana => Hiragana script
# 7 = ucp_Lo => Other letter
# 11 = ucp_gbOther => Grapheme break property "Other"
# 0 => not part of a caseless set
# 0 => No other case
#
# In these examples, no other blocks resolve to the same "virtual" block, as it
# happens, but plenty of other blocks do share "virtual" blocks.
#
# There is a fourth table, maintained by hand, which translates from the
# individual character types such as ucp_Cc to the general types like ucp_C.
#
# Philip Hazel, 03 July 2008
#
# 01-March-2010: Updated list of scripts for Unicode 5.2.0
# 30-April-2011: Updated list of scripts for Unicode 6.0.0
# July-2012: Updated list of scripts for Unicode 6.1.0
# 20-August-2012: Added scan of GraphemeBreakProperty.txt and added a new
# field in the record to hold the value. Luckily, the
# structure had a hole in it, so the resulting table is
# not much bigger than before.
# 18-September-2012: Added code for multiple caseless sets. This uses the
# final hole in the structure.
# 30-September-2012: Added RegionalIndicator break property from Unicode 6.2.0
# 13-May-2014: Updated for PCRE2
# 03-June-2014: Updated for Python 3
# 20-June-2014: Updated for Unicode 7.0.0
# 12-August-2014: Updated to put Unicode version into the file
##############################################################################
import re
import string
import sys
MAX_UNICODE = 0x110000
NOTACHAR = 0xffffffff
# Parse a line of Scripts.txt, GraphemeBreakProperty.txt or DerivedGeneralCategory.txt
def make_get_names(enum):
return lambda chardata: enum.index(chardata[1])
# Parse a line of CaseFolding.txt
def get_other_case(chardata):
if chardata[1] == 'C' or chardata[1] == 'S':
return int(chardata[2], 16) - int(chardata[0], 16)
return 0
# Read the whole table in memory, setting/checking the Unicode version
def read_table(file_name, get_value, default_value):
global unicode_version
f = re.match(r'^[^/]+/([^.]+)\.txt$', file_name)
file_base = f.group(1)
version_pat = r"^# " + re.escape(file_base) + r"-(\d+\.\d+\.\d+)\.txt$"
file = open(file_name, 'r', encoding='utf-8')
f = re.match(version_pat, file.readline())
version = f.group(1)
if unicode_version == "":
unicode_version = version
elif unicode_version != version:
print("WARNING: Unicode version differs in %s", file_name, file=sys.stderr)
table = [default_value] * MAX_UNICODE
for line in file:
line = re.sub(r'#.*', '', line)
chardata = list(map(str.strip, line.split(';')))
if len(chardata) <= 1:
continue
value = get_value(chardata)
m = re.match(r'([0-9a-fA-F]+)(\.\.([0-9a-fA-F]+))?$', chardata[0])
char = int(m.group(1), 16)
if m.group(3) is None:
last = char
else:
last = int(m.group(3), 16)
for i in range(char, last + 1):
# It is important not to overwrite a previously set
# value because in the CaseFolding file there are lines
# to be ignored (returning the default value of 0)
# which often come after a line which has already set
# data.
if table[i] == default_value:
table[i] = value
file.close()
return table
# Get the smallest possible C language type for the values
def get_type_size(table):
type_size = [("uint8_t", 1), ("uint16_t", 2), ("uint32_t", 4),
("signed char", 1), ("pcre_int16", 2), ("pcre_int32", 4)]
limits = [(0, 255), (0, 65535), (0, 4294967295),
(-128, 127), (-32768, 32767), (-2147483648, 2147483647)]
minval = min(table)
maxval = max(table)
for num, (minlimit, maxlimit) in enumerate(limits):
if minlimit <= minval and maxval <= maxlimit:
return type_size[num]
else:
raise OverflowError("Too large to fit into C types")
def get_tables_size(*tables):
total_size = 0
for table in tables:
type, size = get_type_size(table)
total_size += size * len(table)
return total_size
# Compress the table into the two stages
def compress_table(table, block_size):
blocks = {} # Dictionary for finding identical blocks
stage1 = [] # Stage 1 table contains block numbers (indices into stage 2 table)
stage2 = [] # Stage 2 table contains the blocks with property values
table = tuple(table)
for i in range(0, len(table), block_size):
block = table[i:i+block_size]
start = blocks.get(block)
if start is None:
# Allocate a new block
start = len(stage2) / block_size
stage2 += block
blocks[block] = start
stage1.append(start)
return stage1, stage2
# Print a table
def print_table(table, table_name, block_size = None):
type, size = get_type_size(table)
ELEMS_PER_LINE = 16
s = "const %s %s[] = { /* %d bytes" % (type, table_name, size * len(table))
if block_size:
s += ", block = %d" % block_size
print(s + " */")
table = tuple(table)
if block_size is None:
fmt = "%3d," * ELEMS_PER_LINE + " /* U+%04X */"
mult = MAX_UNICODE / len(table)
for i in range(0, len(table), ELEMS_PER_LINE):
print(fmt % (table[i:i+ELEMS_PER_LINE] + (i * mult,)))
else:
if block_size > ELEMS_PER_LINE:
el = ELEMS_PER_LINE
else:
el = block_size
fmt = "%3d," * el + "\n"
if block_size > ELEMS_PER_LINE:
fmt = fmt * int(block_size / ELEMS_PER_LINE)
for i in range(0, len(table), block_size):
print(("/* block %d */\n" + fmt) % ((i / block_size,) + table[i:i+block_size]))
print("};\n")
# Extract the unique combinations of properties into records
def combine_tables(*tables):
records = {}
index = []
for t in zip(*tables):
i = records.get(t)
if i is None:
i = records[t] = len(records)
index.append(i)
return index, records
def get_record_size_struct(records):
size = 0
structure = '/* When recompiling tables with a new Unicode version, please check the\n' + \
'types in this structure definition from pcre2_internal.h (the actual\n' + \
'field names will be different):\n\ntypedef struct {\n'
for i in range(len(records[0])):
record_slice = [record[i] for record in records]
slice_type, slice_size = get_type_size(record_slice)
# add padding: round up to the nearest power of slice_size
size = (size + slice_size - 1) & -slice_size
size += slice_size
structure += '%s property_%d;\n' % (slice_type, i)
# round up to the first item of the next structure in array
record_slice = [record[0] for record in records]
slice_type, slice_size = get_type_size(record_slice)
size = (size + slice_size - 1) & -slice_size
structure += '} ucd_record;\n*/\n\n'
return size, structure
def test_record_size():
tests = [ \
( [(3,), (6,), (6,), (1,)], 1 ), \
( [(300,), (600,), (600,), (100,)], 2 ), \
( [(25, 3), (6, 6), (34, 6), (68, 1)], 2 ), \
( [(300, 3), (6, 6), (340, 6), (690, 1)], 4 ), \
( [(3, 300), (6, 6), (6, 340), (1, 690)], 4 ), \
( [(300, 300), (6, 6), (6, 340), (1, 690)], 4 ), \
( [(3, 100000), (6, 6), (6, 123456), (1, 690)], 8 ), \
( [(100000, 300), (6, 6), (123456, 6), (1, 690)], 8 ), \
]
for test in tests:
size, struct = get_record_size_struct(test[0])
assert(size == test[1])
#print struct
def print_records(records, record_size):
print('const ucd_record PRIV(ucd_records)[] = { ' + \
'/* %d bytes, record size %d */' % (len(records) * record_size, record_size))
records = list(zip(list(records.keys()), list(records.values())))
records.sort(key = lambda x: x[1])
for i, record in enumerate(records):
print((' {' + '%6d, ' * len(record[0]) + '}, /* %3d */') % (record[0] + (i,)))
print('};\n')
script_names = ['Arabic', 'Armenian', 'Bengali', 'Bopomofo', 'Braille', 'Buginese', 'Buhid', 'Canadian_Aboriginal', \
'Cherokee', 'Common', 'Coptic', 'Cypriot', 'Cyrillic', 'Deseret', 'Devanagari', 'Ethiopic', 'Georgian', \
'Glagolitic', 'Gothic', 'Greek', 'Gujarati', 'Gurmukhi', 'Han', 'Hangul', 'Hanunoo', 'Hebrew', 'Hiragana', \
'Inherited', 'Kannada', 'Katakana', 'Kharoshthi', 'Khmer', 'Lao', 'Latin', 'Limbu', 'Linear_B', 'Malayalam', \
'Mongolian', 'Myanmar', 'New_Tai_Lue', 'Ogham', 'Old_Italic', 'Old_Persian', 'Oriya', 'Osmanya', 'Runic', \
'Shavian', 'Sinhala', 'Syloti_Nagri', 'Syriac', 'Tagalog', 'Tagbanwa', 'Tai_Le', 'Tamil', 'Telugu', 'Thaana', \
'Thai', 'Tibetan', 'Tifinagh', 'Ugaritic', 'Yi', \
# New for Unicode 5.0
'Balinese', 'Cuneiform', 'Nko', 'Phags_Pa', 'Phoenician', \
# New for Unicode 5.1
'Carian', 'Cham', 'Kayah_Li', 'Lepcha', 'Lycian', 'Lydian', 'Ol_Chiki', 'Rejang', 'Saurashtra', 'Sundanese', 'Vai', \
# New for Unicode 5.2
'Avestan', 'Bamum', 'Egyptian_Hieroglyphs', 'Imperial_Aramaic', \
'Inscriptional_Pahlavi', 'Inscriptional_Parthian', \
'Javanese', 'Kaithi', 'Lisu', 'Meetei_Mayek', \
'Old_South_Arabian', 'Old_Turkic', 'Samaritan', 'Tai_Tham', 'Tai_Viet', \
# New for Unicode 6.0.0
'Batak', 'Brahmi', 'Mandaic', \
# New for Unicode 6.1.0
'Chakma', 'Meroitic_Cursive', 'Meroitic_Hieroglyphs', 'Miao', 'Sharada', 'Sora_Sompeng', 'Takri',
# New for Unicode 7.0.0
'Bassa_Vah', 'Caucasian_Albanian', 'Duployan', 'Elbasan', 'Grantha', 'Khojki', 'Khudawadi',
'Linear_A', 'Mahajani', 'Manichaean', 'Mende_Kikakui', 'Modi', 'Mro', 'Nabataean',
'Old_North_Arabian', 'Old_Permic', 'Pahawh_Hmong', 'Palmyrene', 'Psalter_Pahlavi',
'Pau_Cin_Hau', 'Siddham', 'Tirhuta', 'Warang_Citi'
]
category_names = ['Cc', 'Cf', 'Cn', 'Co', 'Cs', 'Ll', 'Lm', 'Lo', 'Lt', 'Lu',
'Mc', 'Me', 'Mn', 'Nd', 'Nl', 'No', 'Pc', 'Pd', 'Pe', 'Pf', 'Pi', 'Po', 'Ps',
'Sc', 'Sk', 'Sm', 'So', 'Zl', 'Zp', 'Zs' ]
break_property_names = ['CR', 'LF', 'Control', 'Extend', 'Prepend',
'SpacingMark', 'L', 'V', 'T', 'LV', 'LVT', 'Regional_Indicator', 'Other' ]
test_record_size()
unicode_version = ""
script = read_table('Unicode.tables/Scripts.txt', make_get_names(script_names), script_names.index('Common'))
category = read_table('Unicode.tables/DerivedGeneralCategory.txt', make_get_names(category_names), category_names.index('Cn'))
break_props = read_table('Unicode.tables/GraphemeBreakProperty.txt', make_get_names(break_property_names), break_property_names.index('Other'))
other_case = read_table('Unicode.tables/CaseFolding.txt', get_other_case, 0)
# This block of code was added by PH in September 2012. I am not a Python
# programmer, so the style is probably dreadful, but it does the job. It scans
# the other_case table to find sets of more than two characters that must all
# match each other caselessly. Later in this script a table of these sets is
# written out. However, we have to do this work here in order to compute the
# offsets in the table that are inserted into the main table.
# The CaseFolding.txt file lists pairs, but the common logic for reading data
# sets only one value, so first we go through the table and set "return"
# offsets for those that are not already set.
for c in range(0x10ffff):
if other_case[c] != 0 and other_case[c + other_case[c]] == 0:
other_case[c + other_case[c]] = -other_case[c]
# Now scan again and create equivalence sets.
sets = []
for c in range(0x10ffff):
o = c + other_case[c]
# Trigger when this character's other case does not point back here. We
# now have three characters that are case-equivalent.
if other_case[o] != -other_case[c]:
t = o + other_case[o]
# Scan the existing sets to see if any of the three characters are already
# part of a set. If so, unite the existing set with the new set.
appended = 0
for s in sets:
found = 0
for x in s:
if x == c or x == o or x == t:
found = 1
# Add new characters to an existing set
if found:
found = 0
for y in [c, o, t]:
for x in s:
if x == y:
found = 1
if not found:
s.append(y)
appended = 1
# If we have not added to an existing set, create a new one.
if not appended:
sets.append([c, o, t])
# End of loop looking for caseless sets.
# Now scan the sets and set appropriate offsets for the characters.
caseless_offsets = [0] * MAX_UNICODE
offset = 1;
for s in sets:
for x in s:
caseless_offsets[x] = offset
offset += len(s) + 1
# End of block of code for creating offsets for caseless matching sets.
# Combine the tables
table, records = combine_tables(script, category, break_props,
caseless_offsets, other_case)
record_size, record_struct = get_record_size_struct(list(records.keys()))
# Find the optimum block size for the two-stage table
min_size = sys.maxsize
for block_size in [2 ** i for i in range(5,10)]:
size = len(records) * record_size
stage1, stage2 = compress_table(table, block_size)
size += get_tables_size(stage1, stage2)
#print "/* block size %5d => %5d bytes */" % (block_size, size)
if size < min_size:
min_size = size
min_stage1, min_stage2 = stage1, stage2
min_block_size = block_size
print("/* This module is generated by the maint/MultiStage2.py script.")
print("Do not modify it by hand. Instead modify the script and run it")
print("to regenerate this code.")
print()
print("As well as being part of the PCRE2 library, this module is #included")
print("by the pcre2test program, which redefines the PRIV macro to change")
print("table names from _pcre2_xxx to xxxx, thereby avoiding name clashes")
print("with the library. At present, just one of these tables is actually")
print("needed. */")
print()
print("#ifndef PCRE2_PCRE2TEST")
print()
print("#ifdef HAVE_CONFIG_H")
print("#include \"config.h\"")
print("#endif")
print()
print("#include \"pcre2_internal.h\"")
print()
print("#endif /* PCRE2_PCRE2TEST */")
print()
print("/* Unicode character database. */")
print("/* This file was autogenerated by the MultiStage2.py script. */")
print("/* Total size: %d bytes, block size: %d. */" % (min_size, min_block_size))
print()
print("/* The tables herein are needed only when UCP support is built,")
print("and in PCRE2 that happens automatically with UTF support.")
print("This module should not be referenced otherwise, so")
print("it should not matter whether it is compiled or not. However")
print("a comment was received about space saving - maybe the guy linked")
print("all the modules rather than using a library - so we include a")
print("condition to cut out the tables when not needed. But don't leave")
print("a totally empty module because some compilers barf at that.")
print("Instead, just supply small dummy tables. */")
print()
print("#ifndef SUPPORT_UNICODE")
print("const ucd_record PRIV(ucd_records)[] = {{0,0,0,0,0 }};")
print("const uint8_t PRIV(ucd_stage1)[] = {0};")
print("const uint16_t PRIV(ucd_stage2)[] = {0};")
print("const uint32_t PRIV(ucd_caseless_sets)[] = {0};")
print("#else")
print()
print("const char *PRIV(unicode_version) = \"{}\";".format(unicode_version))
print()
print(record_struct)
# --- Added by PH: output the table of caseless character sets ---
print("const uint32_t PRIV(ucd_caseless_sets)[] = {")
print(" NOTACHAR,")
for s in sets:
s = sorted(s)
for x in s:
print(' 0x%04x,' % x, end=' ')
print(' NOTACHAR,')
print('};')
print()
# ------
print("/* When #included in pcre2test, we don't need this large table. */")
print()
print("#ifndef PCRE2_PCRE2TEST")
print()
print_records(records, record_size)
print_table(min_stage1, 'PRIV(ucd_stage1)')
print_table(min_stage2, 'PRIV(ucd_stage2)', min_block_size)
print("#if UCD_BLOCK_SIZE != %d" % min_block_size)
print("#error Please correct UCD_BLOCK_SIZE in pcre2_internal.h")
print("#endif")
print("#endif /* SUPPORT_UNICODE */")
print()
print("#endif /* PCRE2_PCRE2TEST */")
"""
# Three-stage tables:
# Find the optimum block size for 3-stage table
min_size = sys.maxint
for stage3_block in [2 ** i for i in range(2,6)]:
stage_i, stage3 = compress_table(table, stage3_block)
for stage2_block in [2 ** i for i in range(5,10)]:
size = len(records) * 4
stage1, stage2 = compress_table(stage_i, stage2_block)
size += get_tables_size(stage1, stage2, stage3)
# print "/* %5d / %3d => %5d bytes */" % (stage2_block, stage3_block, size)
if size < min_size:
min_size = size
min_stage1, min_stage2, min_stage3 = stage1, stage2, stage3
min_stage2_block, min_stage3_block = stage2_block, stage3_block
print "/* Total size: %d bytes" % min_size */
print_records(records)
print_table(min_stage1, 'ucd_stage1')
print_table(min_stage2, 'ucd_stage2', min_stage2_block)
print_table(min_stage3, 'ucd_stage3', min_stage3_block)
"""
| gpl-3.0 |
svn2github/libtorrent-1_0_x | test/http.py | 6 | 8075 | # -*- coding: cp1252 -*-
# <PythonProxy.py>
#
#Copyright (c) <2009> <Fábio Domingues - fnds3000 in gmail.com>
#
#Permission is hereby granted, free of charge, to any person
#obtaining a copy of this software and associated documentation
#files (the "Software"), to deal in the Software without
#restriction, including without limitation the rights to use,
#copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the
#Software is furnished to do so, subject to the following
#conditions:
#
#The above copyright notice and this permission notice shall be
#included in all copies or substantial portions of the Software.
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
#EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
#OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
#NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
#HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
#WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
#FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
#OTHER DEALINGS IN THE SOFTWARE.
"""\
Copyright (c) <2009> <Fábio Domingues - fnds3000 in gmail.com> <MIT Licence>
**************************************
*** Python Proxy - A Fast HTTP proxy ***
**************************************
Neste momento este proxy é um Elie Proxy.
Suporta os métodos HTTP:
- OPTIONS;
- GET;
- HEAD;
- POST;
- PUT;
- DELETE;
- TRACE;
- CONENCT.
Suporta:
- Conexões dos cliente em IPv4 ou IPv6;
- Conexões ao alvo em IPv4 e IPv6;
- Conexões todo o tipo de transmissão de dados TCP (CONNECT tunneling),
p.e. ligações SSL, como é o caso do HTTPS.
A fazer:
- Verificar se o input vindo do cliente está correcto;
- Enviar os devidos HTTP erros se não, ou simplesmente quebrar a ligação;
- Criar um gestor de erros;
- Criar ficheiro log de erros;
- Colocar excepções nos sítios onde é previsível a ocorrência de erros,
p.e.sockets e ficheiros;
- Rever tudo e melhorar a estrutura do programar e colocar nomes adequados nas
variáveis e métodos;
- Comentar o programa decentemente;
- Doc Strings.
Funcionalidades futuras:
- Adiconar a funcionalidade de proxy anónimo e transparente;
- Suportar FTP?.
(!) Atenção o que se segue só tem efeito em conexões não CONNECT, para estas o
proxy é sempre Elite.
Qual a diferença entre um proxy Elite, Anónimo e Transparente?
- Um proxy elite é totalmente anónimo, o servidor que o recebe não consegue ter
conhecimento da existência do proxy e não recebe o endereço IP do cliente;
- Quando é usado um proxy anónimo o servidor sabe que o cliente está a usar um
proxy mas não sabe o endereço IP do cliente;
É enviado o cabeçalho HTTP "Proxy-agent".
- Um proxy transparente fornece ao servidor o IP do cliente e um informação que
se está a usar um proxy.
São enviados os cabeçalhos HTTP "Proxy-agent" e "HTTP_X_FORWARDED_FOR".
"""
import socket, thread, select, sys, base64, time, errno
__version__ = '0.1.0 Draft 1'
BUFLEN = 8192
VERSION = 'Python Proxy/'+__version__
HTTPVER = 'HTTP/1.1'
username = None
password = None
class ConnectionHandler:
def __init__(self, connection, address, timeout):
self.client = connection
self.client_buffer = ''
self.timeout = timeout
self.method, self.path, self.protocol = self.get_base_header()
global username
global password
if username != None:
auth = base64.b64encode(username + ':' + password)
if not 'Proxy-Authorization: Basic ' + auth in self.client_buffer:
print 'failed authentication: %s' % self.client_buffer
self.client.send(HTTPVER+' 401 Authentication Failed\n'+
'Proxy-agent: %s\n\n'%VERSION)
self.client.close()
return
try:
if self.method=='CONNECT':
self.method_CONNECT()
elif self.method in ('OPTIONS', 'GET', 'HEAD', 'POST', 'PUT',
'DELETE', 'TRACE'):
self.method_others()
except:
try:
self.client.send(HTTPVER+' 502 Connection failed\n'+
'Proxy-agent: %s\n\n'%VERSION)
except Exception, e:
print e
self.client.close()
return
self.client.close()
self.target.close()
def get_base_header(self):
retries = 0
while 1:
try:
self.client_buffer += self.client.recv(BUFLEN)
except socket.error, e:
err = e.args[0]
if (err == errno.EAGAIN or err == errno.EWOULDBLOCK) and retries < 20:
time.sleep(0.5)
retries += 1
continue
raise e
end = self.client_buffer.find('\r\n\r\n')
if end!=-1:
break
line_end = self.client_buffer.find('\n')
print '%s'%self.client_buffer[:line_end]#debug
data = (self.client_buffer[:line_end+1]).split()
self.client_buffer = self.client_buffer[line_end+1:]
return data
def method_CONNECT(self):
self._connect_target(self.path)
self.client.send(HTTPVER+' 200 Connection established\n'+
'Proxy-agent: %s\n\n'%VERSION)
self.client_buffer = ''
self._read_write()
def method_others(self):
self.path = self.path[7:]
i = self.path.find('/')
host = self.path[:i]
path = self.path[i:]
self._connect_target(host)
self.target.send('%s %s %s\n'%(self.method, path, self.protocol)+
self.client_buffer)
self.client_buffer = ''
self._read_write()
def _connect_target(self, host):
i = host.find(':')
if i!=-1:
port = int(host[i+1:])
host = host[:i]
else:
port = 80
(soc_family, _, _, _, address) = socket.getaddrinfo(host, port)[0]
self.target = socket.socket(soc_family)
self.target.connect(address)
def _read_write(self):
time_out_max = self.timeout/3
socs = [self.client, self.target]
count = 0
while 1:
count += 1
(recv, _, error) = select.select(socs, [], socs, 3)
if error:
break
if recv:
for in_ in recv:
data = in_.recv(BUFLEN)
if in_ is self.client:
out = self.target
else:
out = self.client
if data:
out.send(data)
count = 0
if count == time_out_max:
break
def start_server(host='localhost', port=8080, IPv6=False, timeout=100,
handler=ConnectionHandler):
if IPv6==True:
soc_type=socket.AF_INET6
else:
soc_type=socket.AF_INET
soc = socket.socket(soc_type)
soc.settimeout(120)
print "Serving on %s:%d."%(host, port)#debug
soc.bind((host, port))
soc.listen(0)
while 1:
thread.start_new_thread(handler, soc.accept()+(timeout,))
if __name__ == '__main__':
listen_port = 8080
i = 1
while i < len(sys.argv):
if sys.argv[i] == '--port':
listen_port = int(sys.argv[i+1])
i += 1
elif sys.argv[i] == '--username':
username = sys.argv[i+1]
i += 1
elif sys.argv[i] == '--password':
password = sys.argv[i+1]
i += 1
else:
if sys.argv[i] != '--help': print('unknown option "%s"' % sys.argv[i])
print('usage: http.py [--port <listen-port>]')
sys.exit(1)
i += 1
start_server(port=listen_port)
| bsd-3-clause |
ghickman/django | django/contrib/admin/options.py | 3 | 82383 | from __future__ import unicode_literals
import copy
import json
import operator
from collections import OrderedDict
from functools import partial, reduce, update_wrapper
from django import forms
from django.conf import settings
from django.contrib import messages
from django.contrib.admin import helpers, widgets
from django.contrib.admin.checks import (
BaseModelAdminChecks, InlineModelAdminChecks, ModelAdminChecks,
)
from django.contrib.admin.exceptions import DisallowedModelAdminToField
from django.contrib.admin.templatetags.admin_urls import add_preserved_filters
from django.contrib.admin.utils import (
NestedObjects, flatten_fieldsets, get_deleted_objects,
lookup_needs_distinct, model_format_dict, quote, unquote,
)
from django.contrib.auth import get_permission_codename
from django.core.exceptions import (
FieldDoesNotExist, FieldError, PermissionDenied, ValidationError,
)
from django.core.paginator import Paginator
from django.core.urlresolvers import reverse
from django.db import models, router, transaction
from django.db.models.constants import LOOKUP_SEP
from django.db.models.fields import BLANK_CHOICE_DASH
from django.forms.formsets import DELETION_FIELD_NAME, all_valid
from django.forms.models import (
BaseInlineFormSet, inlineformset_factory, modelform_defines_fields,
modelform_factory, modelformset_factory,
)
from django.forms.widgets import CheckboxSelectMultiple, SelectMultiple
from django.http import Http404, HttpResponseRedirect
from django.http.response import HttpResponseBase
from django.template.response import SimpleTemplateResponse, TemplateResponse
from django.utils import six
from django.utils.decorators import method_decorator
from django.utils.encoding import force_text, python_2_unicode_compatible
from django.utils.html import escape, format_html
from django.utils.http import urlencode, urlquote
from django.utils.safestring import mark_safe
from django.utils.text import capfirst, get_text_list
from django.utils.translation import string_concat, ugettext as _, ungettext
from django.views.decorators.csrf import csrf_protect
from django.views.generic import RedirectView
IS_POPUP_VAR = '_popup'
TO_FIELD_VAR = '_to_field'
HORIZONTAL, VERTICAL = 1, 2
def get_content_type_for_model(obj):
# Since this module gets imported in the application's root package,
# it cannot import models from other applications at the module level.
from django.contrib.contenttypes.models import ContentType
return ContentType.objects.get_for_model(obj, for_concrete_model=False)
def get_ul_class(radio_style):
return 'radiolist' if radio_style == VERTICAL else 'radiolist inline'
class IncorrectLookupParameters(Exception):
pass
# Defaults for formfield_overrides. ModelAdmin subclasses can change this
# by adding to ModelAdmin.formfield_overrides.
FORMFIELD_FOR_DBFIELD_DEFAULTS = {
models.DateTimeField: {
'form_class': forms.SplitDateTimeField,
'widget': widgets.AdminSplitDateTime
},
models.DateField: {'widget': widgets.AdminDateWidget},
models.TimeField: {'widget': widgets.AdminTimeWidget},
models.TextField: {'widget': widgets.AdminTextareaWidget},
models.URLField: {'widget': widgets.AdminURLFieldWidget},
models.IntegerField: {'widget': widgets.AdminIntegerFieldWidget},
models.BigIntegerField: {'widget': widgets.AdminBigIntegerFieldWidget},
models.CharField: {'widget': widgets.AdminTextInputWidget},
models.ImageField: {'widget': widgets.AdminFileWidget},
models.FileField: {'widget': widgets.AdminFileWidget},
models.EmailField: {'widget': widgets.AdminEmailInputWidget},
}
csrf_protect_m = method_decorator(csrf_protect)
class BaseModelAdmin(six.with_metaclass(forms.MediaDefiningClass)):
"""Functionality common to both ModelAdmin and InlineAdmin."""
raw_id_fields = ()
fields = None
exclude = None
fieldsets = None
form = forms.ModelForm
filter_vertical = ()
filter_horizontal = ()
radio_fields = {}
prepopulated_fields = {}
formfield_overrides = {}
readonly_fields = ()
ordering = None
view_on_site = True
show_full_result_count = True
checks_class = BaseModelAdminChecks
def check(self, **kwargs):
return self.checks_class().check(self, **kwargs)
def __init__(self):
overrides = FORMFIELD_FOR_DBFIELD_DEFAULTS.copy()
overrides.update(self.formfield_overrides)
self.formfield_overrides = overrides
def formfield_for_dbfield(self, db_field, request, **kwargs):
"""
Hook for specifying the form Field instance for a given database Field
instance.
If kwargs are given, they're passed to the form Field's constructor.
"""
# If the field specifies choices, we don't need to look for special
# admin widgets - we just need to use a select widget of some kind.
if db_field.choices:
return self.formfield_for_choice_field(db_field, request, **kwargs)
# ForeignKey or ManyToManyFields
if isinstance(db_field, (models.ForeignKey, models.ManyToManyField)):
# Combine the field kwargs with any options for formfield_overrides.
# Make sure the passed in **kwargs override anything in
# formfield_overrides because **kwargs is more specific, and should
# always win.
if db_field.__class__ in self.formfield_overrides:
kwargs = dict(self.formfield_overrides[db_field.__class__], **kwargs)
# Get the correct formfield.
if isinstance(db_field, models.ForeignKey):
formfield = self.formfield_for_foreignkey(db_field, request, **kwargs)
elif isinstance(db_field, models.ManyToManyField):
formfield = self.formfield_for_manytomany(db_field, request, **kwargs)
# For non-raw_id fields, wrap the widget with a wrapper that adds
# extra HTML -- the "add other" interface -- to the end of the
# rendered output. formfield can be None if it came from a
# OneToOneField with parent_link=True or a M2M intermediary.
if formfield and db_field.name not in self.raw_id_fields:
related_modeladmin = self.admin_site._registry.get(db_field.remote_field.model)
wrapper_kwargs = {}
if related_modeladmin:
wrapper_kwargs.update(
can_add_related=related_modeladmin.has_add_permission(request),
can_change_related=related_modeladmin.has_change_permission(request),
can_delete_related=related_modeladmin.has_delete_permission(request),
)
formfield.widget = widgets.RelatedFieldWidgetWrapper(
formfield.widget, db_field.remote_field, self.admin_site, **wrapper_kwargs
)
return formfield
# If we've got overrides for the formfield defined, use 'em. **kwargs
# passed to formfield_for_dbfield override the defaults.
for klass in db_field.__class__.mro():
if klass in self.formfield_overrides:
kwargs = dict(copy.deepcopy(self.formfield_overrides[klass]), **kwargs)
return db_field.formfield(**kwargs)
# For any other type of field, just call its formfield() method.
return db_field.formfield(**kwargs)
def formfield_for_choice_field(self, db_field, request, **kwargs):
"""
Get a form Field for a database Field that has declared choices.
"""
# If the field is named as a radio_field, use a RadioSelect
if db_field.name in self.radio_fields:
# Avoid stomping on custom widget/choices arguments.
if 'widget' not in kwargs:
kwargs['widget'] = widgets.AdminRadioSelect(attrs={
'class': get_ul_class(self.radio_fields[db_field.name]),
})
if 'choices' not in kwargs:
kwargs['choices'] = db_field.get_choices(
include_blank=db_field.blank,
blank_choice=[('', _('None'))]
)
return db_field.formfield(**kwargs)
def get_field_queryset(self, db, db_field, request):
"""
If the ModelAdmin specifies ordering, the queryset should respect that
ordering. Otherwise don't specify the queryset, let the field decide
(returns None in that case).
"""
related_admin = self.admin_site._registry.get(db_field.remote_field.model)
if related_admin is not None:
ordering = related_admin.get_ordering(request)
if ordering is not None and ordering != ():
return db_field.remote_field.model._default_manager.using(db).order_by(*ordering)
return None
def formfield_for_foreignkey(self, db_field, request, **kwargs):
"""
Get a form Field for a ForeignKey.
"""
db = kwargs.get('using')
if db_field.name in self.raw_id_fields:
kwargs['widget'] = widgets.ForeignKeyRawIdWidget(db_field.remote_field,
self.admin_site, using=db)
elif db_field.name in self.radio_fields:
kwargs['widget'] = widgets.AdminRadioSelect(attrs={
'class': get_ul_class(self.radio_fields[db_field.name]),
})
kwargs['empty_label'] = _('None') if db_field.blank else None
if 'queryset' not in kwargs:
queryset = self.get_field_queryset(db, db_field, request)
if queryset is not None:
kwargs['queryset'] = queryset
return db_field.formfield(**kwargs)
def formfield_for_manytomany(self, db_field, request, **kwargs):
"""
Get a form Field for a ManyToManyField.
"""
# If it uses an intermediary model that isn't auto created, don't show
# a field in admin.
if not db_field.remote_field.through._meta.auto_created:
return None
db = kwargs.get('using')
if db_field.name in self.raw_id_fields:
kwargs['widget'] = widgets.ManyToManyRawIdWidget(db_field.remote_field,
self.admin_site, using=db)
kwargs['help_text'] = ''
elif db_field.name in (list(self.filter_vertical) + list(self.filter_horizontal)):
kwargs['widget'] = widgets.FilteredSelectMultiple(
db_field.verbose_name,
db_field.name in self.filter_vertical
)
if 'queryset' not in kwargs:
queryset = self.get_field_queryset(db, db_field, request)
if queryset is not None:
kwargs['queryset'] = queryset
form_field = db_field.formfield(**kwargs)
if isinstance(form_field.widget, SelectMultiple) and not isinstance(form_field.widget, CheckboxSelectMultiple):
msg = _('Hold down "Control", or "Command" on a Mac, to select more than one.')
help_text = form_field.help_text
form_field.help_text = string_concat(help_text, ' ', msg) if help_text else msg
return form_field
def get_view_on_site_url(self, obj=None):
if obj is None or not self.view_on_site:
return None
if callable(self.view_on_site):
return self.view_on_site(obj)
elif self.view_on_site and hasattr(obj, 'get_absolute_url'):
# use the ContentType lookup if view_on_site is True
return reverse('admin:view_on_site', kwargs={
'content_type_id': get_content_type_for_model(obj).pk,
'object_id': obj.pk
})
def get_empty_value_display(self):
"""
Return the empty_value_display set on ModelAdmin or AdminSite.
"""
try:
return mark_safe(self.empty_value_display)
except AttributeError:
return mark_safe(self.admin_site.empty_value_display)
def get_fields(self, request, obj=None):
"""
Hook for specifying fields.
"""
return self.fields
def get_fieldsets(self, request, obj=None):
"""
Hook for specifying fieldsets.
"""
if self.fieldsets:
return self.fieldsets
return [(None, {'fields': self.get_fields(request, obj)})]
def get_ordering(self, request):
"""
Hook for specifying field ordering.
"""
return self.ordering or () # otherwise we might try to *None, which is bad ;)
def get_readonly_fields(self, request, obj=None):
"""
Hook for specifying custom readonly fields.
"""
return self.readonly_fields
def get_prepopulated_fields(self, request, obj=None):
"""
Hook for specifying custom prepopulated fields.
"""
return self.prepopulated_fields
def get_queryset(self, request):
"""
Returns a QuerySet of all model instances that can be edited by the
admin site. This is used by changelist_view.
"""
qs = self.model._default_manager.get_queryset()
# TODO: this should be handled by some parameter to the ChangeList.
ordering = self.get_ordering(request)
if ordering:
qs = qs.order_by(*ordering)
return qs
def lookup_allowed(self, lookup, value):
from django.contrib.admin.filters import SimpleListFilter
model = self.model
# Check FKey lookups that are allowed, so that popups produced by
# ForeignKeyRawIdWidget, on the basis of ForeignKey.limit_choices_to,
# are allowed to work.
for l in model._meta.related_fkey_lookups:
# As ``limit_choices_to`` can be a callable, invoke it here.
if callable(l):
l = l()
for k, v in widgets.url_params_from_lookup_dict(l).items():
if k == lookup and v == value:
return True
relation_parts = []
prev_field = None
for part in lookup.split(LOOKUP_SEP):
try:
field = model._meta.get_field(part)
except FieldDoesNotExist:
# Lookups on non-existent fields are ok, since they're ignored
# later.
break
# It is allowed to filter on values that would be found from local
# model anyways. For example, if you filter on employee__department__id,
# then the id value would be found already from employee__department_id.
if not prev_field or (prev_field.concrete and
field not in prev_field.get_path_info()[-1].target_fields):
relation_parts.append(part)
if not getattr(field, 'get_path_info', None):
# This is not a relational field, so further parts
# must be transforms.
break
prev_field = field
model = field.get_path_info()[-1].to_opts.model
if len(relation_parts) <= 1:
# Either a local field filter, or no fields at all.
return True
clean_lookup = LOOKUP_SEP.join(relation_parts)
valid_lookups = [self.date_hierarchy]
for filter_item in self.list_filter:
if isinstance(filter_item, type) and issubclass(filter_item, SimpleListFilter):
valid_lookups.append(filter_item.parameter_name)
elif isinstance(filter_item, (list, tuple)):
valid_lookups.append(filter_item[0])
else:
valid_lookups.append(filter_item)
return clean_lookup in valid_lookups
def to_field_allowed(self, request, to_field):
"""
Returns True if the model associated with this admin should be
allowed to be referenced by the specified field.
"""
opts = self.model._meta
try:
field = opts.get_field(to_field)
except FieldDoesNotExist:
return False
# Always allow referencing the primary key since it's already possible
# to get this information from the change view URL.
if field.primary_key:
return True
# Allow reverse relationships to models defining m2m fields if they
# target the specified field.
for many_to_many in opts.many_to_many:
if many_to_many.m2m_target_field_name() == to_field:
return True
# Make sure at least one of the models registered for this site
# references this field through a FK or a M2M relationship.
registered_models = set()
for model, admin in self.admin_site._registry.items():
registered_models.add(model)
for inline in admin.inlines:
registered_models.add(inline.model)
related_objects = (
f for f in opts.get_fields(include_hidden=True)
if (f.auto_created and not f.concrete)
)
for related_object in related_objects:
related_model = related_object.related_model
remote_field = related_object.field.remote_field
if (any(issubclass(model, related_model) for model in registered_models) and
hasattr(remote_field, 'get_related_field') and
remote_field.get_related_field() == field):
return True
return False
def has_add_permission(self, request):
"""
Returns True if the given request has permission to add an object.
Can be overridden by the user in subclasses.
"""
opts = self.opts
codename = get_permission_codename('add', opts)
return request.user.has_perm("%s.%s" % (opts.app_label, codename))
def has_change_permission(self, request, obj=None):
"""
Returns True if the given request has permission to change the given
Django model instance, the default implementation doesn't examine the
`obj` parameter.
Can be overridden by the user in subclasses. In such case it should
return True if the given request has permission to change the `obj`
model instance. If `obj` is None, this should return True if the given
request has permission to change *any* object of the given type.
"""
opts = self.opts
codename = get_permission_codename('change', opts)
return request.user.has_perm("%s.%s" % (opts.app_label, codename))
def has_delete_permission(self, request, obj=None):
"""
Returns True if the given request has permission to change the given
Django model instance, the default implementation doesn't examine the
`obj` parameter.
Can be overridden by the user in subclasses. In such case it should
return True if the given request has permission to delete the `obj`
model instance. If `obj` is None, this should return True if the given
request has permission to delete *any* object of the given type.
"""
opts = self.opts
codename = get_permission_codename('delete', opts)
return request.user.has_perm("%s.%s" % (opts.app_label, codename))
def has_module_permission(self, request):
"""
Returns True if the given request has any permission in the given
app label.
Can be overridden by the user in subclasses. In such case it should
return True if the given request has permission to view the module on
the admin index page and access the module's index page. Overriding it
does not restrict access to the add, change or delete views. Use
`ModelAdmin.has_(add|change|delete)_permission` for that.
"""
return request.user.has_module_perms(self.opts.app_label)
@python_2_unicode_compatible
class ModelAdmin(BaseModelAdmin):
"Encapsulates all admin options and functionality for a given model."
list_display = ('__str__',)
list_display_links = ()
list_filter = ()
list_select_related = False
list_per_page = 100
list_max_show_all = 200
list_editable = ()
search_fields = ()
date_hierarchy = None
save_as = False
save_on_top = False
paginator = Paginator
preserve_filters = True
inlines = []
# Custom templates (designed to be over-ridden in subclasses)
add_form_template = None
change_form_template = None
change_list_template = None
delete_confirmation_template = None
delete_selected_confirmation_template = None
object_history_template = None
# Actions
actions = []
action_form = helpers.ActionForm
actions_on_top = True
actions_on_bottom = False
actions_selection_counter = True
checks_class = ModelAdminChecks
def __init__(self, model, admin_site):
self.model = model
self.opts = model._meta
self.admin_site = admin_site
super(ModelAdmin, self).__init__()
def __str__(self):
return "%s.%s" % (self.model._meta.app_label, self.__class__.__name__)
def get_inline_instances(self, request, obj=None):
inline_instances = []
for inline_class in self.inlines:
inline = inline_class(self.model, self.admin_site)
if request:
if not (inline.has_add_permission(request) or
inline.has_change_permission(request, obj) or
inline.has_delete_permission(request, obj)):
continue
if not inline.has_add_permission(request):
inline.max_num = 0
inline_instances.append(inline)
return inline_instances
def get_urls(self):
from django.conf.urls import url
def wrap(view):
def wrapper(*args, **kwargs):
return self.admin_site.admin_view(view)(*args, **kwargs)
wrapper.model_admin = self
return update_wrapper(wrapper, view)
info = self.model._meta.app_label, self.model._meta.model_name
urlpatterns = [
url(r'^$', wrap(self.changelist_view), name='%s_%s_changelist' % info),
url(r'^add/$', wrap(self.add_view), name='%s_%s_add' % info),
url(r'^(.+)/history/$', wrap(self.history_view), name='%s_%s_history' % info),
url(r'^(.+)/delete/$', wrap(self.delete_view), name='%s_%s_delete' % info),
url(r'^(.+)/change/$', wrap(self.change_view), name='%s_%s_change' % info),
# For backwards compatibility (was the change url before 1.9)
url(r'^(.+)/$', wrap(RedirectView.as_view(
pattern_name='%s:%s_%s_change' % ((self.admin_site.name,) + info)
))),
]
return urlpatterns
def urls(self):
return self.get_urls()
urls = property(urls)
@property
def media(self):
extra = '' if settings.DEBUG else '.min'
js = [
'core.js',
'vendor/jquery/jquery%s.js' % extra,
'jquery.init.js',
'admin/RelatedObjectLookups.js',
'actions%s.js' % extra,
'urlify.js',
'prepopulate%s.js' % extra,
'vendor/xregexp/xregexp.min.js',
]
return forms.Media(js=['admin/js/%s' % url for url in js])
def get_model_perms(self, request):
"""
Returns a dict of all perms for this model. This dict has the keys
``add``, ``change``, and ``delete`` mapping to the True/False for each
of those actions.
"""
return {
'add': self.has_add_permission(request),
'change': self.has_change_permission(request),
'delete': self.has_delete_permission(request),
}
def get_fields(self, request, obj=None):
if self.fields:
return self.fields
form = self.get_form(request, obj, fields=None)
return list(form.base_fields) + list(self.get_readonly_fields(request, obj))
def get_form(self, request, obj=None, **kwargs):
"""
Returns a Form class for use in the admin add view. This is used by
add_view and change_view.
"""
if 'fields' in kwargs:
fields = kwargs.pop('fields')
else:
fields = flatten_fieldsets(self.get_fieldsets(request, obj))
if self.exclude is None:
exclude = []
else:
exclude = list(self.exclude)
readonly_fields = self.get_readonly_fields(request, obj)
exclude.extend(readonly_fields)
if self.exclude is None and hasattr(self.form, '_meta') and self.form._meta.exclude:
# Take the custom ModelForm's Meta.exclude into account only if the
# ModelAdmin doesn't define its own.
exclude.extend(self.form._meta.exclude)
# if exclude is an empty list we pass None to be consistent with the
# default on modelform_factory
exclude = exclude or None
# Remove declared form fields which are in readonly_fields.
new_attrs = OrderedDict(
(f, None) for f in readonly_fields
if f in self.form.declared_fields
)
form = type(self.form.__name__, (self.form,), new_attrs)
defaults = {
"form": form,
"fields": fields,
"exclude": exclude,
"formfield_callback": partial(self.formfield_for_dbfield, request=request),
}
defaults.update(kwargs)
if defaults['fields'] is None and not modelform_defines_fields(defaults['form']):
defaults['fields'] = forms.ALL_FIELDS
try:
return modelform_factory(self.model, **defaults)
except FieldError as e:
raise FieldError('%s. Check fields/fieldsets/exclude attributes of class %s.'
% (e, self.__class__.__name__))
def get_changelist(self, request, **kwargs):
"""
Returns the ChangeList class for use on the changelist page.
"""
from django.contrib.admin.views.main import ChangeList
return ChangeList
def get_object(self, request, object_id, from_field=None):
"""
Returns an instance matching the field and value provided, the primary
key is used if no field is provided. Returns ``None`` if no match is
found or the object_id fails validation.
"""
queryset = self.get_queryset(request)
model = queryset.model
field = model._meta.pk if from_field is None else model._meta.get_field(from_field)
try:
object_id = field.to_python(object_id)
return queryset.get(**{field.name: object_id})
except (model.DoesNotExist, ValidationError, ValueError):
return None
def get_changelist_form(self, request, **kwargs):
"""
Returns a Form class for use in the Formset on the changelist page.
"""
defaults = {
"formfield_callback": partial(self.formfield_for_dbfield, request=request),
}
defaults.update(kwargs)
if (defaults.get('fields') is None
and not modelform_defines_fields(defaults.get('form'))):
defaults['fields'] = forms.ALL_FIELDS
return modelform_factory(self.model, **defaults)
def get_changelist_formset(self, request, **kwargs):
"""
Returns a FormSet class for use on the changelist page if list_editable
is used.
"""
defaults = {
"formfield_callback": partial(self.formfield_for_dbfield, request=request),
}
defaults.update(kwargs)
return modelformset_factory(self.model,
self.get_changelist_form(request), extra=0,
fields=self.list_editable, **defaults)
def get_formsets_with_inlines(self, request, obj=None):
"""
Yields formsets and the corresponding inlines.
"""
for inline in self.get_inline_instances(request, obj):
yield inline.get_formset(request, obj), inline
def get_paginator(self, request, queryset, per_page, orphans=0, allow_empty_first_page=True):
return self.paginator(queryset, per_page, orphans, allow_empty_first_page)
def log_addition(self, request, object, message):
"""
Log that an object has been successfully added.
The default implementation creates an admin LogEntry object.
"""
from django.contrib.admin.models import LogEntry, ADDITION
LogEntry.objects.log_action(
user_id=request.user.pk,
content_type_id=get_content_type_for_model(object).pk,
object_id=object.pk,
object_repr=force_text(object),
action_flag=ADDITION,
change_message=message,
)
def log_change(self, request, object, message):
"""
Log that an object has been successfully changed.
The default implementation creates an admin LogEntry object.
"""
from django.contrib.admin.models import LogEntry, CHANGE
LogEntry.objects.log_action(
user_id=request.user.pk,
content_type_id=get_content_type_for_model(object).pk,
object_id=object.pk,
object_repr=force_text(object),
action_flag=CHANGE,
change_message=message,
)
def log_deletion(self, request, object, object_repr):
"""
Log that an object will be deleted. Note that this method must be
called before the deletion.
The default implementation creates an admin LogEntry object.
"""
from django.contrib.admin.models import LogEntry, DELETION
LogEntry.objects.log_action(
user_id=request.user.pk,
content_type_id=get_content_type_for_model(object).pk,
object_id=object.pk,
object_repr=object_repr,
action_flag=DELETION,
)
def action_checkbox(self, obj):
"""
A list_display column containing a checkbox widget.
"""
return helpers.checkbox.render(helpers.ACTION_CHECKBOX_NAME, force_text(obj.pk))
action_checkbox.short_description = mark_safe('<input type="checkbox" id="action-toggle" />')
def get_actions(self, request):
"""
Return a dictionary mapping the names of all actions for this
ModelAdmin to a tuple of (callable, name, description) for each action.
"""
# If self.actions is explicitly set to None that means that we don't
# want *any* actions enabled on this page.
if self.actions is None or IS_POPUP_VAR in request.GET:
return OrderedDict()
actions = []
# Gather actions from the admin site first
for (name, func) in self.admin_site.actions:
description = getattr(func, 'short_description', name.replace('_', ' '))
actions.append((func, name, description))
# Then gather them from the model admin and all parent classes,
# starting with self and working back up.
for klass in self.__class__.mro()[::-1]:
class_actions = getattr(klass, 'actions', [])
# Avoid trying to iterate over None
if not class_actions:
continue
actions.extend(self.get_action(action) for action in class_actions)
# get_action might have returned None, so filter any of those out.
actions = filter(None, actions)
# Convert the actions into an OrderedDict keyed by name.
actions = OrderedDict(
(name, (func, name, desc))
for func, name, desc in actions
)
return actions
def get_action_choices(self, request, default_choices=BLANK_CHOICE_DASH):
"""
Return a list of choices for use in a form object. Each choice is a
tuple (name, description).
"""
choices = [] + default_choices
for func, name, description in six.itervalues(self.get_actions(request)):
choice = (name, description % model_format_dict(self.opts))
choices.append(choice)
return choices
def get_action(self, action):
"""
Return a given action from a parameter, which can either be a callable,
or the name of a method on the ModelAdmin. Return is a tuple of
(callable, name, description).
"""
# If the action is a callable, just use it.
if callable(action):
func = action
action = action.__name__
# Next, look for a method. Grab it off self.__class__ to get an unbound
# method instead of a bound one; this ensures that the calling
# conventions are the same for functions and methods.
elif hasattr(self.__class__, action):
func = getattr(self.__class__, action)
# Finally, look for a named method on the admin site
else:
try:
func = self.admin_site.get_action(action)
except KeyError:
return None
if hasattr(func, 'short_description'):
description = func.short_description
else:
description = capfirst(action.replace('_', ' '))
return func, action, description
def get_list_display(self, request):
"""
Return a sequence containing the fields to be displayed on the
changelist.
"""
return self.list_display
def get_list_display_links(self, request, list_display):
"""
Return a sequence containing the fields to be displayed as links
on the changelist. The list_display parameter is the list of fields
returned by get_list_display().
"""
if self.list_display_links or self.list_display_links is None or not list_display:
return self.list_display_links
else:
# Use only the first item in list_display as link
return list(list_display)[:1]
def get_list_filter(self, request):
"""
Returns a sequence containing the fields to be displayed as filters in
the right sidebar of the changelist page.
"""
return self.list_filter
def get_list_select_related(self, request):
"""
Returns a list of fields to add to the select_related() part of the
changelist items query.
"""
return self.list_select_related
def get_search_fields(self, request):
"""
Returns a sequence containing the fields to be searched whenever
somebody submits a search query.
"""
return self.search_fields
def get_search_results(self, request, queryset, search_term):
"""
Returns a tuple containing a queryset to implement the search,
and a boolean indicating if the results may contain duplicates.
"""
# Apply keyword searches.
def construct_search(field_name):
if field_name.startswith('^'):
return "%s__istartswith" % field_name[1:]
elif field_name.startswith('='):
return "%s__iexact" % field_name[1:]
elif field_name.startswith('@'):
return "%s__search" % field_name[1:]
else:
return "%s__icontains" % field_name
use_distinct = False
search_fields = self.get_search_fields(request)
if search_fields and search_term:
orm_lookups = [construct_search(str(search_field))
for search_field in search_fields]
for bit in search_term.split():
or_queries = [models.Q(**{orm_lookup: bit})
for orm_lookup in orm_lookups]
queryset = queryset.filter(reduce(operator.or_, or_queries))
if not use_distinct:
for search_spec in orm_lookups:
if lookup_needs_distinct(self.opts, search_spec):
use_distinct = True
break
return queryset, use_distinct
def get_preserved_filters(self, request):
"""
Returns the preserved filters querystring.
"""
match = request.resolver_match
if self.preserve_filters and match:
opts = self.model._meta
current_url = '%s:%s' % (match.app_name, match.url_name)
changelist_url = 'admin:%s_%s_changelist' % (opts.app_label, opts.model_name)
if current_url == changelist_url:
preserved_filters = request.GET.urlencode()
else:
preserved_filters = request.GET.get('_changelist_filters')
if preserved_filters:
return urlencode({'_changelist_filters': preserved_filters})
return ''
def construct_change_message(self, request, form, formsets, add=False):
"""
Construct a change message from a changed object.
"""
change_message = []
if add:
change_message.append(_('Added.'))
elif form.changed_data:
change_message.append(_('Changed %s.') % get_text_list(form.changed_data, _('and')))
if formsets:
for formset in formsets:
for added_object in formset.new_objects:
change_message.append(_('Added %(name)s "%(object)s".')
% {'name': force_text(added_object._meta.verbose_name),
'object': force_text(added_object)})
for changed_object, changed_fields in formset.changed_objects:
change_message.append(_('Changed %(list)s for %(name)s "%(object)s".')
% {'list': get_text_list(changed_fields, _('and')),
'name': force_text(changed_object._meta.verbose_name),
'object': force_text(changed_object)})
for deleted_object in formset.deleted_objects:
change_message.append(_('Deleted %(name)s "%(object)s".')
% {'name': force_text(deleted_object._meta.verbose_name),
'object': force_text(deleted_object)})
change_message = ' '.join(change_message)
return change_message or _('No fields changed.')
def message_user(self, request, message, level=messages.INFO, extra_tags='',
fail_silently=False):
"""
Send a message to the user. The default implementation
posts a message using the django.contrib.messages backend.
Exposes almost the same API as messages.add_message(), but accepts the
positional arguments in a different order to maintain backwards
compatibility. For convenience, it accepts the `level` argument as
a string rather than the usual level number.
"""
if not isinstance(level, int):
# attempt to get the level if passed a string
try:
level = getattr(messages.constants, level.upper())
except AttributeError:
levels = messages.constants.DEFAULT_TAGS.values()
levels_repr = ', '.join('`%s`' % l for l in levels)
raise ValueError('Bad message level string: `%s`. '
'Possible values are: %s' % (level, levels_repr))
messages.add_message(request, level, message, extra_tags=extra_tags,
fail_silently=fail_silently)
def save_form(self, request, form, change):
"""
Given a ModelForm return an unsaved instance. ``change`` is True if
the object is being changed, and False if it's being added.
"""
return form.save(commit=False)
def save_model(self, request, obj, form, change):
"""
Given a model instance save it to the database.
"""
obj.save()
def delete_model(self, request, obj):
"""
Given a model instance delete it from the database.
"""
obj.delete()
def save_formset(self, request, form, formset, change):
"""
Given an inline formset save it to the database.
"""
formset.save()
def save_related(self, request, form, formsets, change):
"""
Given the ``HttpRequest``, the parent ``ModelForm`` instance, the
list of inline formsets and a boolean value based on whether the
parent is being added or changed, save the related objects to the
database. Note that at this point save_form() and save_model() have
already been called.
"""
form.save_m2m()
for formset in formsets:
self.save_formset(request, form, formset, change=change)
def render_change_form(self, request, context, add=False, change=False, form_url='', obj=None):
opts = self.model._meta
app_label = opts.app_label
preserved_filters = self.get_preserved_filters(request)
form_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, form_url)
view_on_site_url = self.get_view_on_site_url(obj)
context.update({
'add': add,
'change': change,
'has_add_permission': self.has_add_permission(request),
'has_change_permission': self.has_change_permission(request, obj),
'has_delete_permission': self.has_delete_permission(request, obj),
'has_file_field': True, # FIXME - this should check if form or formsets have a FileField,
'has_absolute_url': view_on_site_url is not None,
'absolute_url': view_on_site_url,
'form_url': form_url,
'opts': opts,
'content_type_id': get_content_type_for_model(self.model).pk,
'save_as': self.save_as,
'save_on_top': self.save_on_top,
'to_field_var': TO_FIELD_VAR,
'is_popup_var': IS_POPUP_VAR,
'app_label': app_label,
})
if add and self.add_form_template is not None:
form_template = self.add_form_template
else:
form_template = self.change_form_template
request.current_app = self.admin_site.name
return TemplateResponse(request, form_template or [
"admin/%s/%s/change_form.html" % (app_label, opts.model_name),
"admin/%s/change_form.html" % app_label,
"admin/change_form.html"
], context)
def response_add(self, request, obj, post_url_continue=None):
"""
Determines the HttpResponse for the add_view stage.
"""
opts = obj._meta
pk_value = obj._get_pk_val()
preserved_filters = self.get_preserved_filters(request)
obj_url = reverse(
'admin:%s_%s_change' % (opts.app_label, opts.model_name),
args=(quote(pk_value),),
current_app=self.admin_site.name,
)
# Add a link to the object's change form if the user can edit the obj.
if self.has_change_permission(request, obj):
obj_repr = format_html('<a href="{}">{}</a>', urlquote(obj_url), obj)
else:
obj_repr = force_text(obj)
msg_dict = {
'name': force_text(opts.verbose_name),
'obj': obj_repr,
}
# Here, we distinguish between different save types by checking for
# the presence of keys in request.POST.
if IS_POPUP_VAR in request.POST:
to_field = request.POST.get(TO_FIELD_VAR)
if to_field:
attr = str(to_field)
else:
attr = obj._meta.pk.attname
value = obj.serializable_value(attr)
popup_response_data = json.dumps({
'value': value,
'obj': six.text_type(obj),
})
return SimpleTemplateResponse('admin/popup_response.html', {
'popup_response_data': popup_response_data,
})
elif "_continue" in request.POST:
msg = format_html(
_('The {name} "{obj}" was added successfully. You may edit it again below.'),
**msg_dict
)
self.message_user(request, msg, messages.SUCCESS)
if post_url_continue is None:
post_url_continue = obj_url
post_url_continue = add_preserved_filters(
{'preserved_filters': preserved_filters, 'opts': opts},
post_url_continue
)
return HttpResponseRedirect(post_url_continue)
elif "_addanother" in request.POST:
msg = format_html(
_('The {name} "{obj}" was added successfully. You may add another {name} below.'),
**msg_dict
)
self.message_user(request, msg, messages.SUCCESS)
redirect_url = request.path
redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url)
return HttpResponseRedirect(redirect_url)
else:
msg = format_html(
_('The {name} "{obj}" was added successfully.'),
**msg_dict
)
self.message_user(request, msg, messages.SUCCESS)
return self.response_post_save_add(request, obj)
def response_change(self, request, obj):
"""
Determines the HttpResponse for the change_view stage.
"""
if IS_POPUP_VAR in request.POST:
to_field = request.POST.get(TO_FIELD_VAR)
attr = str(to_field) if to_field else obj._meta.pk.attname
# Retrieve the `object_id` from the resolved pattern arguments.
value = request.resolver_match.args[0]
new_value = obj.serializable_value(attr)
popup_response_data = json.dumps({
'action': 'change',
'value': value,
'obj': six.text_type(obj),
'new_value': new_value,
})
return SimpleTemplateResponse('admin/popup_response.html', {
'popup_response_data': popup_response_data,
})
opts = self.model._meta
pk_value = obj._get_pk_val()
preserved_filters = self.get_preserved_filters(request)
msg_dict = {
'name': force_text(opts.verbose_name),
'obj': format_html('<a href="{}">{}</a>', urlquote(request.path), obj),
}
if "_continue" in request.POST:
msg = format_html(
_('The {name} "{obj}" was changed successfully. You may edit it again below.'),
**msg_dict
)
self.message_user(request, msg, messages.SUCCESS)
redirect_url = request.path
redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url)
return HttpResponseRedirect(redirect_url)
elif "_saveasnew" in request.POST:
msg = format_html(
_('The {name} "{obj}" was added successfully. You may edit it again below.'),
**msg_dict
)
self.message_user(request, msg, messages.SUCCESS)
redirect_url = reverse('admin:%s_%s_change' %
(opts.app_label, opts.model_name),
args=(pk_value,),
current_app=self.admin_site.name)
redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url)
return HttpResponseRedirect(redirect_url)
elif "_addanother" in request.POST:
msg = format_html(
_('The {name} "{obj}" was changed successfully. You may add another {name} below.'),
**msg_dict
)
self.message_user(request, msg, messages.SUCCESS)
redirect_url = reverse('admin:%s_%s_add' %
(opts.app_label, opts.model_name),
current_app=self.admin_site.name)
redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url)
return HttpResponseRedirect(redirect_url)
else:
msg = format_html(
_('The {name} "{obj}" was changed successfully.'),
**msg_dict
)
self.message_user(request, msg, messages.SUCCESS)
return self.response_post_save_change(request, obj)
def response_post_save_add(self, request, obj):
"""
Figure out where to redirect after the 'Save' button has been pressed
when adding a new object.
"""
opts = self.model._meta
if self.has_change_permission(request, None):
post_url = reverse('admin:%s_%s_changelist' %
(opts.app_label, opts.model_name),
current_app=self.admin_site.name)
preserved_filters = self.get_preserved_filters(request)
post_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, post_url)
else:
post_url = reverse('admin:index',
current_app=self.admin_site.name)
return HttpResponseRedirect(post_url)
def response_post_save_change(self, request, obj):
"""
Figure out where to redirect after the 'Save' button has been pressed
when editing an existing object.
"""
opts = self.model._meta
if self.has_change_permission(request, None):
post_url = reverse('admin:%s_%s_changelist' %
(opts.app_label, opts.model_name),
current_app=self.admin_site.name)
preserved_filters = self.get_preserved_filters(request)
post_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, post_url)
else:
post_url = reverse('admin:index',
current_app=self.admin_site.name)
return HttpResponseRedirect(post_url)
def response_action(self, request, queryset):
"""
Handle an admin action. This is called if a request is POSTed to the
changelist; it returns an HttpResponse if the action was handled, and
None otherwise.
"""
# There can be multiple action forms on the page (at the top
# and bottom of the change list, for example). Get the action
# whose button was pushed.
try:
action_index = int(request.POST.get('index', 0))
except ValueError:
action_index = 0
# Construct the action form.
data = request.POST.copy()
data.pop(helpers.ACTION_CHECKBOX_NAME, None)
data.pop("index", None)
# Use the action whose button was pushed
try:
data.update({'action': data.getlist('action')[action_index]})
except IndexError:
# If we didn't get an action from the chosen form that's invalid
# POST data, so by deleting action it'll fail the validation check
# below. So no need to do anything here
pass
action_form = self.action_form(data, auto_id=None)
action_form.fields['action'].choices = self.get_action_choices(request)
# If the form's valid we can handle the action.
if action_form.is_valid():
action = action_form.cleaned_data['action']
select_across = action_form.cleaned_data['select_across']
func = self.get_actions(request)[action][0]
# Get the list of selected PKs. If nothing's selected, we can't
# perform an action on it, so bail. Except we want to perform
# the action explicitly on all objects.
selected = request.POST.getlist(helpers.ACTION_CHECKBOX_NAME)
if not selected and not select_across:
# Reminder that something needs to be selected or nothing will happen
msg = _("Items must be selected in order to perform "
"actions on them. No items have been changed.")
self.message_user(request, msg, messages.WARNING)
return None
if not select_across:
# Perform the action only on the selected objects
queryset = queryset.filter(pk__in=selected)
response = func(self, request, queryset)
# Actions may return an HttpResponse-like object, which will be
# used as the response from the POST. If not, we'll be a good
# little HTTP citizen and redirect back to the changelist page.
if isinstance(response, HttpResponseBase):
return response
else:
return HttpResponseRedirect(request.get_full_path())
else:
msg = _("No action selected.")
self.message_user(request, msg, messages.WARNING)
return None
def response_delete(self, request, obj_display, obj_id):
"""
Determines the HttpResponse for the delete_view stage.
"""
opts = self.model._meta
if IS_POPUP_VAR in request.POST:
popup_response_data = json.dumps({
'action': 'delete',
'value': obj_id,
})
return SimpleTemplateResponse('admin/popup_response.html', {
'popup_response_data': popup_response_data,
})
self.message_user(request,
_('The %(name)s "%(obj)s" was deleted successfully.') % {
'name': force_text(opts.verbose_name),
'obj': force_text(obj_display),
}, messages.SUCCESS)
if self.has_change_permission(request, None):
post_url = reverse('admin:%s_%s_changelist' %
(opts.app_label, opts.model_name),
current_app=self.admin_site.name)
preserved_filters = self.get_preserved_filters(request)
post_url = add_preserved_filters(
{'preserved_filters': preserved_filters, 'opts': opts}, post_url
)
else:
post_url = reverse('admin:index',
current_app=self.admin_site.name)
return HttpResponseRedirect(post_url)
def render_delete_form(self, request, context):
opts = self.model._meta
app_label = opts.app_label
request.current_app = self.admin_site.name
context.update(
to_field_var=TO_FIELD_VAR,
is_popup_var=IS_POPUP_VAR,
media=self.media,
)
return TemplateResponse(request,
self.delete_confirmation_template or [
"admin/{}/{}/delete_confirmation.html".format(app_label, opts.model_name),
"admin/{}/delete_confirmation.html".format(app_label),
"admin/delete_confirmation.html"
], context)
def get_inline_formsets(self, request, formsets, inline_instances,
obj=None):
inline_admin_formsets = []
for inline, formset in zip(inline_instances, formsets):
fieldsets = list(inline.get_fieldsets(request, obj))
readonly = list(inline.get_readonly_fields(request, obj))
prepopulated = dict(inline.get_prepopulated_fields(request, obj))
inline_admin_formset = helpers.InlineAdminFormSet(inline, formset,
fieldsets, prepopulated, readonly, model_admin=self)
inline_admin_formsets.append(inline_admin_formset)
return inline_admin_formsets
def get_changeform_initial_data(self, request):
"""
Get the initial form data.
Unless overridden, this populates from the GET params.
"""
initial = dict(request.GET.items())
for k in initial:
try:
f = self.model._meta.get_field(k)
except FieldDoesNotExist:
continue
# We have to special-case M2Ms as a list of comma-separated PKs.
if isinstance(f, models.ManyToManyField):
initial[k] = initial[k].split(",")
return initial
@csrf_protect_m
@transaction.atomic
def changeform_view(self, request, object_id=None, form_url='', extra_context=None):
to_field = request.POST.get(TO_FIELD_VAR, request.GET.get(TO_FIELD_VAR))
if to_field and not self.to_field_allowed(request, to_field):
raise DisallowedModelAdminToField("The field %s cannot be referenced." % to_field)
model = self.model
opts = model._meta
add = object_id is None
if add:
if not self.has_add_permission(request):
raise PermissionDenied
obj = None
else:
obj = self.get_object(request, unquote(object_id), to_field)
if not self.has_change_permission(request, obj):
raise PermissionDenied
if obj is None:
raise Http404(_('%(name)s object with primary key %(key)r does not exist.') % {
'name': force_text(opts.verbose_name), 'key': escape(object_id)})
if request.method == 'POST' and "_saveasnew" in request.POST:
object_id = None
obj = None
ModelForm = self.get_form(request, obj)
if request.method == 'POST':
form = ModelForm(request.POST, request.FILES, instance=obj)
if form.is_valid():
form_validated = True
new_object = self.save_form(request, form, change=not add)
else:
form_validated = False
new_object = form.instance
formsets, inline_instances = self._create_formsets(request, new_object, change=not add)
if all_valid(formsets) and form_validated:
self.save_model(request, new_object, form, not add)
self.save_related(request, form, formsets, not add)
change_message = self.construct_change_message(request, form, formsets, add)
if add:
self.log_addition(request, new_object, change_message)
return self.response_add(request, new_object)
else:
self.log_change(request, new_object, change_message)
return self.response_change(request, new_object)
else:
form_validated = False
else:
if add:
initial = self.get_changeform_initial_data(request)
form = ModelForm(initial=initial)
formsets, inline_instances = self._create_formsets(request, form.instance, change=False)
else:
form = ModelForm(instance=obj)
formsets, inline_instances = self._create_formsets(request, obj, change=True)
adminForm = helpers.AdminForm(
form,
list(self.get_fieldsets(request, obj)),
self.get_prepopulated_fields(request, obj),
self.get_readonly_fields(request, obj),
model_admin=self)
media = self.media + adminForm.media
inline_formsets = self.get_inline_formsets(request, formsets, inline_instances, obj)
for inline_formset in inline_formsets:
media = media + inline_formset.media
context = dict(self.admin_site.each_context(request),
title=(_('Add %s') if add else _('Change %s')) % force_text(opts.verbose_name),
adminform=adminForm,
object_id=object_id,
original=obj,
is_popup=(IS_POPUP_VAR in request.POST or
IS_POPUP_VAR in request.GET),
to_field=to_field,
media=media,
inline_admin_formsets=inline_formsets,
errors=helpers.AdminErrorList(form, formsets),
preserved_filters=self.get_preserved_filters(request),
)
# Hide the "Save" and "Save and continue" buttons if "Save as New" was
# previously chosen to prevent the interface from getting confusing.
if request.method == 'POST' and not form_validated and "_saveasnew" in request.POST:
context['show_save'] = False
context['show_save_and_continue'] = False
context.update(extra_context or {})
return self.render_change_form(request, context, add=add, change=not add, obj=obj, form_url=form_url)
def add_view(self, request, form_url='', extra_context=None):
return self.changeform_view(request, None, form_url, extra_context)
def change_view(self, request, object_id, form_url='', extra_context=None):
return self.changeform_view(request, object_id, form_url, extra_context)
@csrf_protect_m
def changelist_view(self, request, extra_context=None):
"""
The 'change list' admin view for this model.
"""
from django.contrib.admin.views.main import ERROR_FLAG
opts = self.model._meta
app_label = opts.app_label
if not self.has_change_permission(request, None):
raise PermissionDenied
list_display = self.get_list_display(request)
list_display_links = self.get_list_display_links(request, list_display)
list_filter = self.get_list_filter(request)
search_fields = self.get_search_fields(request)
list_select_related = self.get_list_select_related(request)
# Check actions to see if any are available on this changelist
actions = self.get_actions(request)
if actions:
# Add the action checkboxes if there are any actions available.
list_display = ['action_checkbox'] + list(list_display)
ChangeList = self.get_changelist(request)
try:
cl = ChangeList(request, self.model, list_display,
list_display_links, list_filter, self.date_hierarchy,
search_fields, list_select_related, self.list_per_page,
self.list_max_show_all, self.list_editable, self)
except IncorrectLookupParameters:
# Wacky lookup parameters were given, so redirect to the main
# changelist page, without parameters, and pass an 'invalid=1'
# parameter via the query string. If wacky parameters were given
# and the 'invalid=1' parameter was already in the query string,
# something is screwed up with the database, so display an error
# page.
if ERROR_FLAG in request.GET.keys():
return SimpleTemplateResponse('admin/invalid_setup.html', {
'title': _('Database error'),
})
return HttpResponseRedirect(request.path + '?' + ERROR_FLAG + '=1')
# If the request was POSTed, this might be a bulk action or a bulk
# edit. Try to look up an action or confirmation first, but if this
# isn't an action the POST will fall through to the bulk edit check,
# below.
action_failed = False
selected = request.POST.getlist(helpers.ACTION_CHECKBOX_NAME)
# Actions with no confirmation
if (actions and request.method == 'POST' and
'index' in request.POST and '_save' not in request.POST):
if selected:
response = self.response_action(request, queryset=cl.get_queryset(request))
if response:
return response
else:
action_failed = True
else:
msg = _("Items must be selected in order to perform "
"actions on them. No items have been changed.")
self.message_user(request, msg, messages.WARNING)
action_failed = True
# Actions with confirmation
if (actions and request.method == 'POST' and
helpers.ACTION_CHECKBOX_NAME in request.POST and
'index' not in request.POST and '_save' not in request.POST):
if selected:
response = self.response_action(request, queryset=cl.get_queryset(request))
if response:
return response
else:
action_failed = True
# If we're allowing changelist editing, we need to construct a formset
# for the changelist given all the fields to be edited. Then we'll
# use the formset to validate/process POSTed data.
formset = cl.formset = None
# Handle POSTed bulk-edit data.
if (request.method == "POST" and cl.list_editable and
'_save' in request.POST and not action_failed):
FormSet = self.get_changelist_formset(request)
formset = cl.formset = FormSet(request.POST, request.FILES, queryset=cl.result_list)
if formset.is_valid():
changecount = 0
for form in formset.forms:
if form.has_changed():
obj = self.save_form(request, form, change=True)
self.save_model(request, obj, form, change=True)
self.save_related(request, form, formsets=[], change=True)
change_msg = self.construct_change_message(request, form, None)
self.log_change(request, obj, change_msg)
changecount += 1
if changecount:
if changecount == 1:
name = force_text(opts.verbose_name)
else:
name = force_text(opts.verbose_name_plural)
msg = ungettext("%(count)s %(name)s was changed successfully.",
"%(count)s %(name)s were changed successfully.",
changecount) % {'count': changecount,
'name': name,
'obj': force_text(obj)}
self.message_user(request, msg, messages.SUCCESS)
return HttpResponseRedirect(request.get_full_path())
# Handle GET -- construct a formset for display.
elif cl.list_editable:
FormSet = self.get_changelist_formset(request)
formset = cl.formset = FormSet(queryset=cl.result_list)
# Build the list of media to be used by the formset.
if formset:
media = self.media + formset.media
else:
media = self.media
# Build the action form and populate it with available actions.
if actions:
action_form = self.action_form(auto_id=None)
action_form.fields['action'].choices = self.get_action_choices(request)
else:
action_form = None
selection_note_all = ungettext('%(total_count)s selected',
'All %(total_count)s selected', cl.result_count)
context = dict(
self.admin_site.each_context(request),
module_name=force_text(opts.verbose_name_plural),
selection_note=_('0 of %(cnt)s selected') % {'cnt': len(cl.result_list)},
selection_note_all=selection_note_all % {'total_count': cl.result_count},
title=cl.title,
is_popup=cl.is_popup,
to_field=cl.to_field,
cl=cl,
media=media,
has_add_permission=self.has_add_permission(request),
opts=cl.opts,
action_form=action_form,
actions_on_top=self.actions_on_top,
actions_on_bottom=self.actions_on_bottom,
actions_selection_counter=self.actions_selection_counter,
preserved_filters=self.get_preserved_filters(request),
)
context.update(extra_context or {})
request.current_app = self.admin_site.name
return TemplateResponse(request, self.change_list_template or [
'admin/%s/%s/change_list.html' % (app_label, opts.model_name),
'admin/%s/change_list.html' % app_label,
'admin/change_list.html'
], context)
@csrf_protect_m
@transaction.atomic
def delete_view(self, request, object_id, extra_context=None):
"The 'delete' admin view for this model."
opts = self.model._meta
app_label = opts.app_label
to_field = request.POST.get(TO_FIELD_VAR, request.GET.get(TO_FIELD_VAR))
if to_field and not self.to_field_allowed(request, to_field):
raise DisallowedModelAdminToField("The field %s cannot be referenced." % to_field)
obj = self.get_object(request, unquote(object_id), to_field)
if not self.has_delete_permission(request, obj):
raise PermissionDenied
if obj is None:
raise Http404(
_('%(name)s object with primary key %(key)r does not exist.') %
{'name': force_text(opts.verbose_name), 'key': escape(object_id)}
)
using = router.db_for_write(self.model)
# Populate deleted_objects, a data structure of all related objects that
# will also be deleted.
(deleted_objects, model_count, perms_needed, protected) = get_deleted_objects(
[obj], opts, request.user, self.admin_site, using)
if request.POST: # The user has already confirmed the deletion.
if perms_needed:
raise PermissionDenied
obj_display = force_text(obj)
attr = str(to_field) if to_field else opts.pk.attname
obj_id = obj.serializable_value(attr)
self.log_deletion(request, obj, obj_display)
self.delete_model(request, obj)
return self.response_delete(request, obj_display, obj_id)
object_name = force_text(opts.verbose_name)
if perms_needed or protected:
title = _("Cannot delete %(name)s") % {"name": object_name}
else:
title = _("Are you sure?")
context = dict(
self.admin_site.each_context(request),
title=title,
object_name=object_name,
object=obj,
deleted_objects=deleted_objects,
model_count=dict(model_count).items(),
perms_lacking=perms_needed,
protected=protected,
opts=opts,
app_label=app_label,
preserved_filters=self.get_preserved_filters(request),
is_popup=(IS_POPUP_VAR in request.POST or
IS_POPUP_VAR in request.GET),
to_field=to_field,
)
context.update(extra_context or {})
return self.render_delete_form(request, context)
def history_view(self, request, object_id, extra_context=None):
"The 'history' admin view for this model."
from django.contrib.admin.models import LogEntry
# First check if the user can see this history.
model = self.model
obj = self.get_object(request, unquote(object_id))
if obj is None:
raise Http404(_('%(name)s object with primary key %(key)r does not exist.') % {
'name': force_text(model._meta.verbose_name),
'key': escape(object_id),
})
if not self.has_change_permission(request, obj):
raise PermissionDenied
# Then get the history for this object.
opts = model._meta
app_label = opts.app_label
action_list = LogEntry.objects.filter(
object_id=unquote(object_id),
content_type=get_content_type_for_model(model)
).select_related().order_by('action_time')
context = dict(self.admin_site.each_context(request),
title=_('Change history: %s') % force_text(obj),
action_list=action_list,
module_name=capfirst(force_text(opts.verbose_name_plural)),
object=obj,
opts=opts,
preserved_filters=self.get_preserved_filters(request),
)
context.update(extra_context or {})
request.current_app = self.admin_site.name
return TemplateResponse(request, self.object_history_template or [
"admin/%s/%s/object_history.html" % (app_label, opts.model_name),
"admin/%s/object_history.html" % app_label,
"admin/object_history.html"
], context)
def _create_formsets(self, request, obj, change):
"Helper function to generate formsets for add/change_view."
formsets = []
inline_instances = []
prefixes = {}
get_formsets_args = [request]
if change:
get_formsets_args.append(obj)
for FormSet, inline in self.get_formsets_with_inlines(*get_formsets_args):
prefix = FormSet.get_default_prefix()
prefixes[prefix] = prefixes.get(prefix, 0) + 1
if prefixes[prefix] != 1 or not prefix:
prefix = "%s-%s" % (prefix, prefixes[prefix])
formset_params = {
'instance': obj,
'prefix': prefix,
'queryset': inline.get_queryset(request),
}
if request.method == 'POST':
formset_params.update({
'data': request.POST,
'files': request.FILES,
'save_as_new': '_saveasnew' in request.POST
})
formsets.append(FormSet(**formset_params))
inline_instances.append(inline)
return formsets, inline_instances
class InlineModelAdmin(BaseModelAdmin):
"""
Options for inline editing of ``model`` instances.
Provide ``fk_name`` to specify the attribute name of the ``ForeignKey``
from ``model`` to its parent. This is required if ``model`` has more than
one ``ForeignKey`` to its parent.
"""
model = None
fk_name = None
formset = BaseInlineFormSet
extra = 3
min_num = None
max_num = None
template = None
verbose_name = None
verbose_name_plural = None
can_delete = True
show_change_link = False
checks_class = InlineModelAdminChecks
classes = None
def __init__(self, parent_model, admin_site):
self.admin_site = admin_site
self.parent_model = parent_model
self.opts = self.model._meta
self.has_registered_model = admin_site.is_registered(self.model)
super(InlineModelAdmin, self).__init__()
if self.verbose_name is None:
self.verbose_name = self.model._meta.verbose_name
if self.verbose_name_plural is None:
self.verbose_name_plural = self.model._meta.verbose_name_plural
@property
def media(self):
extra = '' if settings.DEBUG else '.min'
js = ['vendor/jquery/jquery%s.js' % extra, 'jquery.init.js',
'inlines%s.js' % extra]
if self.filter_vertical or self.filter_horizontal:
js.extend(['SelectBox.js', 'SelectFilter2.js'])
if self.classes and 'collapse' in self.classes:
js.append('collapse%s.js' % extra)
return forms.Media(js=['admin/js/%s' % url for url in js])
def get_extra(self, request, obj=None, **kwargs):
"""Hook for customizing the number of extra inline forms."""
return self.extra
def get_min_num(self, request, obj=None, **kwargs):
"""Hook for customizing the min number of inline forms."""
return self.min_num
def get_max_num(self, request, obj=None, **kwargs):
"""Hook for customizing the max number of extra inline forms."""
return self.max_num
def get_formset(self, request, obj=None, **kwargs):
"""Returns a BaseInlineFormSet class for use in admin add/change views."""
if 'fields' in kwargs:
fields = kwargs.pop('fields')
else:
fields = flatten_fieldsets(self.get_fieldsets(request, obj))
if self.exclude is None:
exclude = []
else:
exclude = list(self.exclude)
exclude.extend(self.get_readonly_fields(request, obj))
if self.exclude is None and hasattr(self.form, '_meta') and self.form._meta.exclude:
# Take the custom ModelForm's Meta.exclude into account only if the
# InlineModelAdmin doesn't define its own.
exclude.extend(self.form._meta.exclude)
# If exclude is an empty list we use None, since that's the actual
# default.
exclude = exclude or None
can_delete = self.can_delete and self.has_delete_permission(request, obj)
defaults = {
"form": self.form,
"formset": self.formset,
"fk_name": self.fk_name,
"fields": fields,
"exclude": exclude,
"formfield_callback": partial(self.formfield_for_dbfield, request=request),
"extra": self.get_extra(request, obj, **kwargs),
"min_num": self.get_min_num(request, obj, **kwargs),
"max_num": self.get_max_num(request, obj, **kwargs),
"can_delete": can_delete,
}
defaults.update(kwargs)
base_model_form = defaults['form']
class DeleteProtectedModelForm(base_model_form):
def hand_clean_DELETE(self):
"""
We don't validate the 'DELETE' field itself because on
templates it's not rendered using the field information, but
just using a generic "deletion_field" of the InlineModelAdmin.
"""
if self.cleaned_data.get(DELETION_FIELD_NAME, False):
using = router.db_for_write(self._meta.model)
collector = NestedObjects(using=using)
if self.instance.pk is None:
return
collector.collect([self.instance])
if collector.protected:
objs = []
for p in collector.protected:
objs.append(
# Translators: Model verbose name and instance representation,
# suitable to be an item in a list.
_('%(class_name)s %(instance)s') % {
'class_name': p._meta.verbose_name,
'instance': p}
)
params = {'class_name': self._meta.model._meta.verbose_name,
'instance': self.instance,
'related_objects': get_text_list(objs, _('and'))}
msg = _("Deleting %(class_name)s %(instance)s would require "
"deleting the following protected related objects: "
"%(related_objects)s")
raise ValidationError(msg, code='deleting_protected', params=params)
def is_valid(self):
result = super(DeleteProtectedModelForm, self).is_valid()
self.hand_clean_DELETE()
return result
defaults['form'] = DeleteProtectedModelForm
if defaults['fields'] is None and not modelform_defines_fields(defaults['form']):
defaults['fields'] = forms.ALL_FIELDS
return inlineformset_factory(self.parent_model, self.model, **defaults)
def get_fields(self, request, obj=None):
if self.fields:
return self.fields
form = self.get_formset(request, obj, fields=None).form
return list(form.base_fields) + list(self.get_readonly_fields(request, obj))
def get_queryset(self, request):
queryset = super(InlineModelAdmin, self).get_queryset(request)
if not self.has_change_permission(request):
queryset = queryset.none()
return queryset
def has_add_permission(self, request):
if self.opts.auto_created:
# We're checking the rights to an auto-created intermediate model,
# which doesn't have its own individual permissions. The user needs
# to have the change permission for the related model in order to
# be able to do anything with the intermediate model.
return self.has_change_permission(request)
return super(InlineModelAdmin, self).has_add_permission(request)
def has_change_permission(self, request, obj=None):
opts = self.opts
if opts.auto_created:
# The model was auto-created as intermediary for a
# ManyToMany-relationship, find the target model
for field in opts.fields:
if field.remote_field and field.remote_field.model != self.parent_model:
opts = field.remote_field.model._meta
break
codename = get_permission_codename('change', opts)
return request.user.has_perm("%s.%s" % (opts.app_label, codename))
def has_delete_permission(self, request, obj=None):
if self.opts.auto_created:
# We're checking the rights to an auto-created intermediate model,
# which doesn't have its own individual permissions. The user needs
# to have the change permission for the related model in order to
# be able to do anything with the intermediate model.
return self.has_change_permission(request, obj)
return super(InlineModelAdmin, self).has_delete_permission(request, obj)
class StackedInline(InlineModelAdmin):
template = 'admin/edit_inline/stacked.html'
class TabularInline(InlineModelAdmin):
template = 'admin/edit_inline/tabular.html'
| bsd-3-clause |
leebird/legonlp | annotation/align.py | 1 | 2062 | # -*- coding: utf-8 -*-
import os
import sys
import re
import codecs
from alignment import Alignment,Hirschberg
from readers import AnnParser
from writers import AnnWriter
writer = AnnWriter()
def get_phrase(text):
p = re.compile(ur'[a-zA-Z]+|[0-9]+|\s+|[.,;!\(\)]+')
lista = []
pre = 0
for m in p.finditer(text):
start = m.start()
end = m.end()
if pre < start:
lista.append(text[pre:start])
lista.append(text[start:end])
pre = end
return lista
for root,_,files in os.walk('input'):
for f in files:
if not f.endswith('.txt'):
continue
pmid = f[:-4]
print pmid
alter = os.path.join(root,pmid+'.txt')
alterFile = codecs.open(alter,'r','utf-8')
alterText = alterFile.read().strip()
alterFile.close()
reader = AnnParser(root,pmid+'.ann')
annotation = reader.parse()
if len(annotation['T']) == 0:
writer.write('output',pmid+'.ann',annotation)
continue
gold = os.path.join('output',pmid+'.txt')
goldFile = codecs.open(gold,'r','utf-8')
goldText = goldFile.read().strip()
goldFile.close()
entities = annotation['T']
goldPhrases = get_phrase(goldText)
alterPhrases = get_phrase(alterText)
h = Hirschberg(goldPhrases,alterPhrases)
#h = Hirschberg(list(goldText),list(alterText))
alignGold,alignAlter = h.align()
#print ''.join(alignGold)
#print ''.join(alignAlter)
alter2gold = h.map_alignment(''.join(alignGold),''.join(alignAlter))
for k,e in entities.iteritems():
start = int(e.start)
end = int(e.end)
e.start = alter2gold[start]
if alter2gold[end] - alter2gold[end-1] > 1:
e.end = alter2gold[end-1]+1
else:
e.end = alter2gold[end]
e.text = goldText[e.start:e.end]
writer.write('output',pmid+'.ann',annotation)
| gpl-2.0 |
kervinck/gigatron-rom | Contrib/at67/hw/BabelFish_nor/tinyfont.py | 4 | 4452 |
#-----------------------------------------------------------------------
#
# tinyfont.py -- a very small font
#
#-----------------------------------------------------------------------
#
# Description:
#
# The tiny font is an 3x5 pixel monospaced font where each character
# is placed in a box of 4x6 pixels. At the native 160x120 Gigatron
# resolution we can display 40x20 characters.
#
# The encoding has 3x5 bits for the pixels and 1 shift bit to indicate
# an overall shift down for characters g, j, p, q and y. The lowercase
# j needs special handling. This scheme gives 2 bytes per character,
# for a total of 192 bytes.
#
# References:
#
# http://vt100.tarunz.org
# VT100 Terminal for Pilot (Brian J. Swetland)
#
# https://robey.lag.net/2010/01/23/tiny-monospace-font.html
# A very tiny, monospace, bitmap font (Robey Pointer)
#
# https://fonts2u.com/small-5x3-regular.font
# Small 5X3 regular (soxhead2000)
#
# https://github.com/olikraus/u8g2
# U8glib library for monochrome displays, version 2
#
# History:
#
# 2018-08-01 (marcelk) Initial version inspired by open source examples
# 2018-08-xx (marcelk) Update of @corz
#
#-----------------------------------------------------------------------
def convert(font):
for c in range(96):
i, j = c % 16, c / 16
word, shift = 0, 0
for y in range(6):
for x in range(3):
p = ((j * 6 + y) * 16 + i) * 4 + x + 1
if font[p] == '@':
if y == 5 and shift == 0:
shift, word = 1, word << 1
word |= 1 << (5*(3-x) - y - 1 + shift)
if shift == 0:
print ' 0x%04x,' % word,
else:
print '-0x%04x,' % (0x8000 - word),
print '// %s' % repr(chr(32+c))
tinyfont =\
'......@..@.@.@.@..@@.@....@...@....@.@..........................'\
'......@..@.@.@@@.@@....@.@.@..@...@...@..@.@..@................@'\
'......@......@.@..@@..@...@.......@...@...@..@@@.....@@@......@.'\
'.............@@@.@@..@...@.@......@...@..@.@..@...@..........@..'\
'......@......@.@..@....@..@@.......@.@...........@........@.....'\
'................................................................'\
'..@@..@..@@..@@..@.@.@@@..@@.@@@.@@@.@@@...........@.....@...@@@'\
'.@.@.@@....@...@.@.@.@...@.....@.@.@.@.@..@...@...@..@@@..@....@'\
'.@.@..@...@..@@..@@@.@@..@@@..@..@@@.@@@.........@.........@..@.'\
'.@.@..@..@.....@...@...@.@.@.@...@.@...@..@...@...@..@@@..@.....'\
'.@@..@@@.@@@.@@....@.@@..@@@.@...@@@.@@......@.....@.....@....@.'\
'................................................................'\
'..@@..@..@@...@@.@@..@@@.@@@..@@.@.@.@@@..@@.@.@.@...@.@.@.@.@@@'\
'.@.@.@.@.@.@.@...@.@.@...@...@...@.@..@....@.@.@.@...@@@.@@@.@.@'\
'.@.@.@@@.@@..@...@.@.@@..@@..@.@.@@@..@....@.@@..@...@@@.@@@.@.@'\
'.@...@.@.@.@.@...@.@.@...@...@.@.@.@..@..@.@.@.@.@...@.@.@@@.@.@'\
'..@@.@.@.@@...@@.@@..@@@.@...@@@.@.@.@@@..@..@.@.@@@.@.@.@.@.@@@'\
'................................................................'\
'.@@...@..@@...@@.@@@.@.@.@.@.@.@.@.@.@.@.@@@..@@.....@@...@.....'\
'.@.@.@.@.@.@.@....@..@.@.@.@.@.@.@.@.@.@...@..@..@....@..@.@....'\
'.@.@.@.@.@@@..@...@..@.@.@.@.@@@..@..@@@..@...@...@...@.........'\
'.@@..@@@.@@....@..@..@.@.@.@.@@@.@.@..@..@....@....@..@.........'\
'.@....@@.@.@.@@...@..@@@..@..@.@.@.@..@..@@@..@@.....@@......@@@'\
'................................................................'\
'.@.......@.........@.......@.....@....@....O.@...@@.............'\
'..@...@@.@@...@@..@@..@@..@...@@.@@..........@.@..@..@@@.@@...@.'\
'.....@.@.@.@.@...@.@.@.@.@@@.@.@.@.@.@@...@@.@@...@..@@@.@.@.@.@'\
'.....@.@.@.@.@...@.@.@@...@..@@@.@.@..@....@.@.@..@..@@@.@.@.@.@'\
'.....@@@.@@...@@..@@..@@..@....@.@.@.@@@.@.@.@.@.@@@.@.@.@.@..@.'\
'..............................@.......... @.....................'\
'..................@...........................@@..@..@@...@@.@@@'\
'.@@...@@.@.@..@@.@@@.@.@.@.@.@.@.@.@.@.@.@@@..@...@...@..@@..@@@'\
'.@.@.@.@.@@..@@...@..@.@.@.@.@@@..@..@.@...@.@@...@...@@.....@@@'\
'.@.@.@.@.@....@@..@..@.@.@@@.@@@..@...@@..@...@...@...@......@@@'\
'.@@...@@.@...@@...@@..@@..@..@@@.@.@...@.@@@..@@..@..@@......@@@'\
'.@.....@..............................@.........................'
print '// Generated by tinyfont.py'
convert(tinyfont)
| bsd-2-clause |
skycucumber/xuemc | python/venv/lib/python2.7/site-packages/coverage/__init__.py | 208 | 4505 | """Code coverage measurement for Python.
Ned Batchelder
http://nedbatchelder.com/code/coverage
"""
from coverage.version import __version__, __url__
from coverage.control import coverage, process_startup
from coverage.data import CoverageData
from coverage.cmdline import main, CoverageScript
from coverage.misc import CoverageException
# Module-level functions. The original API to this module was based on
# functions defined directly in the module, with a singleton of the coverage()
# class. That design hampered programmability, so the current api uses
# explicitly-created coverage objects. But for backward compatibility, here we
# define the top-level functions to create the singleton when they are first
# called.
# Singleton object for use with module-level functions. The singleton is
# created as needed when one of the module-level functions is called.
_the_coverage = None
def _singleton_method(name):
"""Return a function to the `name` method on a singleton `coverage` object.
The singleton object is created the first time one of these functions is
called.
"""
# Disable pylint msg W0612, because a bunch of variables look unused, but
# they're accessed via locals().
# pylint: disable=W0612
def wrapper(*args, **kwargs):
"""Singleton wrapper around a coverage method."""
global _the_coverage
if not _the_coverage:
_the_coverage = coverage(auto_data=True)
return getattr(_the_coverage, name)(*args, **kwargs)
import inspect
meth = getattr(coverage, name)
args, varargs, kw, defaults = inspect.getargspec(meth)
argspec = inspect.formatargspec(args[1:], varargs, kw, defaults)
docstring = meth.__doc__
wrapper.__doc__ = ("""\
A first-use-singleton wrapper around coverage.%(name)s.
This wrapper is provided for backward compatibility with legacy code.
New code should use coverage.%(name)s directly.
%(name)s%(argspec)s:
%(docstring)s
""" % locals()
)
return wrapper
# Define the module-level functions.
use_cache = _singleton_method('use_cache')
start = _singleton_method('start')
stop = _singleton_method('stop')
erase = _singleton_method('erase')
exclude = _singleton_method('exclude')
analysis = _singleton_method('analysis')
analysis2 = _singleton_method('analysis2')
report = _singleton_method('report')
annotate = _singleton_method('annotate')
# On Windows, we encode and decode deep enough that something goes wrong and
# the encodings.utf_8 module is loaded and then unloaded, I don't know why.
# Adding a reference here prevents it from being unloaded. Yuk.
import encodings.utf_8
# Because of the "from coverage.control import fooey" lines at the top of the
# file, there's an entry for coverage.coverage in sys.modules, mapped to None.
# This makes some inspection tools (like pydoc) unable to find the class
# coverage.coverage. So remove that entry.
import sys
try:
del sys.modules['coverage.coverage']
except KeyError:
pass
# COPYRIGHT AND LICENSE
#
# Copyright 2001 Gareth Rees. All rights reserved.
# Copyright 2004-2013 Ned Batchelder. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDERS AND CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
# DAMAGE.
| gpl-2.0 |
sangh/LaserShow | pyglet-hg/tests/window/WINDOW_INITIAL_FULLSCREEN.py | 33 | 1474 | #!/usr/bin/env python
'''Test that a window can be opened fullscreen.
Expected behaviour:
A fullscreen window will be created, with a flat purple colour.
- Press 'g' to leave fullscreen mode and create a window.
- Press 'f' to re-enter fullscreen mode.
- All events will be printed to the console. Ensure that mouse,
keyboard and activation/deactivation events are all correct.
Close either window or press ESC to end the test.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
import unittest
import pyglet.window
from pyglet import window
from pyglet.window.event import WindowEventLogger
from pyglet.window import key
from pyglet.gl import *
class WINDOW_INITIAL_FULLSCREEN(unittest.TestCase):
def on_key_press(self, symbol, modifiers):
if symbol == key.F:
print 'Setting fullscreen.'
self.w.set_fullscreen(True)
elif symbol == key.G:
print 'Leaving fullscreen.'
self.w.set_fullscreen(False)
def on_expose(self):
glClearColor(1, 0, 1, 1)
glClear(GL_COLOR_BUFFER_BIT)
self.w.flip()
def test_initial_fullscreen(self):
self.w = window.Window(fullscreen=True)
self.w.push_handlers(self)
self.w.push_handlers(WindowEventLogger())
self.on_expose()
while not self.w.has_exit:
self.w.dispatch_events()
self.w.close()
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
ga7g08/sympy | sympy/series/formal.py | 24 | 34616 | """Formal Power Series"""
from __future__ import print_function, division
from collections import defaultdict
from sympy import oo, zoo, nan
from sympy.core.expr import Expr
from sympy.core.add import Add
from sympy.core.mul import Mul
from sympy.core.function import Derivative, Function
from sympy.core.singleton import S
from sympy.core.sympify import sympify
from sympy.core.symbol import Wild, Dummy, symbols, Symbol
from sympy.core.relational import Eq
from sympy.core.numbers import Rational
from sympy.core.compatibility import iterable
from sympy.sets.sets import Interval
from sympy.functions.combinatorial.factorials import binomial, factorial, rf
from sympy.functions.elementary.piecewise import Piecewise
from sympy.functions.elementary.integers import floor, frac, ceiling
from sympy.functions.elementary.miscellaneous import Min, Max
from sympy.series.sequences import sequence
from sympy.series.series_class import SeriesBase
from sympy.series.order import Order
from sympy.series.limits import Limit
def rational_algorithm(f, x, k, order=4, full=False):
"""Rational algorithm for computing
formula of coefficients of Formal Power Series
of a function.
Applicable when f(x) or some derivative of f(x)
is a rational function in x.
:func:`rational_algorithm` uses :func:`apart` function for partial fraction
decomposition. :func:`apart` by default uses 'undetermined coefficients
method'. By setting ``full=True``, 'Bronstein's algorithm' can be used
instead.
Looks for derivative of a function up to 4'th order (by default).
This can be overriden using order option.
Returns
=======
formula : Expr
ind : Expr
Independent terms.
order : int
Examples
========
>>> from sympy import log, atan, I
>>> from sympy.series.formal import rational_algorithm as ra
>>> from sympy.abc import x, k
>>> ra(1 / (1 - x), x, k)
(1, 0, 0)
>>> ra(log(1 + x), x, k)
(-(-1)**(-k)/k, 0, 1)
>>> ra(atan(x), x, k, full=True)
((-I*(-I)**(-k)/2 + I*I**(-k)/2)/k, 0, 1)
Notes
=====
By setting ``full=True``, range of admissible functions to be solved using
``rational_algorithm`` can be increased. This option should be used
carefully as it can signifcantly slow down the computation as ``doit`` is
performed on the :class:`RootSum` object returned by the ``apart`` function.
Use ``full=False`` whenever possible.
See Also
========
sympy.polys.partfrac.apart
References
==========
.. [1] Formal Power Series - Dominik Gruntz, Wolfram Koepf
.. [2] Power Series in Computer Algebra - Wolfram Koepf
"""
from sympy.polys import RootSum, apart
from sympy.integrals import integrate
diff = f
ds = [] # list of diff
for i in range(order + 1):
if i:
diff = diff.diff(x)
if diff.is_rational_function(x):
coeff, sep = S.Zero, S.Zero
terms = apart(diff, x, full=full)
if terms.has(RootSum):
terms = terms.doit()
for t in Add.make_args(terms):
num, den = t.as_numer_denom()
if not den.has(x):
sep += t
else:
if isinstance(den, Mul):
# m*(n*x - a)**j -> (n*x - a)**j
ind = den.as_independent(x)
den = ind[1]
num /= ind[0]
# (n*x - a)**j -> (x - b)
den, j = den.as_base_exp()
a, xterm = den.as_coeff_add(x)
# term -> m/x**n
if not a:
sep += t
continue
xc = xterm[0].coeff(x)
a /= -xc
num /= xc**j
ak = ((-1)**j * num *
binomial(j + k - 1, k).rewrite(factorial) /
a**(j + k))
coeff += ak
# Hacky, better way?
if coeff is S.Zero:
return None
if (coeff.has(x) or coeff.has(zoo) or coeff.has(oo) or
coeff.has(nan)):
return None
for j in range(i):
coeff = (coeff / (k + j + 1))
sep = integrate(sep, x)
sep += (ds.pop() - sep).limit(x, 0) # constant of integration
return (coeff.subs(k, k - i), sep, i)
else:
ds.append(diff)
return None
def rational_independent(terms, x):
"""Returns a list of all the rationally independent terms.
Examples
========
>>> from sympy import sin, cos
>>> from sympy.series.formal import rational_independent
>>> from sympy.abc import x
>>> rational_independent([cos(x), sin(x)], x)
[cos(x), sin(x)]
>>> rational_independent([x**2, sin(x), x*sin(x), x**3], x)
[x**3 + x**2, x*sin(x) + sin(x)]
"""
if not terms:
return []
ind = terms[0:1]
for t in terms[1:]:
n = t.as_independent(x)[1]
for i, term in enumerate(ind):
d = term.as_independent(x)[1]
q = (n / d).cancel()
if q.is_rational_function(x):
ind[i] += t
break
else:
ind.append(t)
return ind
def simpleDE(f, x, g, order=4):
"""Generates simple DE.
DE is of the form
.. math::
f^k(x) + \sum\limits_{j=0}^{k-1} A_j f^j(x) = 0
where :math:`A_j` should be rational function in x.
Generates DE's upto order 4 (default). DE's can also have free parameters.
By increasing order, higher order DE's can be found.
Yields a tuple of (DE, order).
"""
from sympy.solvers.solveset import linsolve
a = symbols('a:%d' % (order))
def _makeDE(k):
eq = f.diff(x, k) + Add(*[a[i]*f.diff(x, i) for i in range(0, k)])
DE = g(x).diff(x, k) + Add(*[a[i]*g(x).diff(x, i) for i in range(0, k)])
return eq, DE
eq, DE = _makeDE(order)
found = False
for k in range(1, order + 1):
eq, DE = _makeDE(k)
eq = eq.expand()
terms = eq.as_ordered_terms()
ind = rational_independent(terms, x)
if found or len(ind) == k:
sol = dict(zip(a, (i for s in linsolve(ind, a[:k]) for i in s)))
if sol:
found = True
DE = DE.subs(sol)
DE = DE.as_numer_denom()[0]
DE = DE.factor().as_coeff_mul(Derivative)[1][0]
yield DE.collect(Derivative(g(x))), k
def exp_re(DE, r, k):
"""Converts a DE with constant coefficients (explike) into a RE.
Performs the substitution:
.. math::
f^j(x) \\to r(k + j)
Normalises the terms so that lowest order of a term is always r(k).
Examples
========
>>> from sympy import Function, Derivative
>>> from sympy.series.formal import exp_re
>>> from sympy.abc import x, k
>>> f, r = Function('f'), Function('r')
>>> exp_re(-f(x) + Derivative(f(x)), r, k)
-r(k) + r(k + 1)
>>> exp_re(Derivative(f(x), x) + Derivative(f(x), x, x), r, k)
r(k) + r(k + 1)
See Also
========
sympy.series.formal.hyper_re
"""
RE = S.Zero
g = DE.atoms(Function).pop()
mini = None
for t in Add.make_args(DE):
coeff, d = t.as_independent(g)
if isinstance(d, Derivative):
j = len(d.args) - 1
else:
j = 0
if mini is None or j < mini:
mini = j
RE += coeff * r(k + j)
if mini:
RE = RE.subs(k, k - mini)
return RE
def hyper_re(DE, r, k):
"""Converts a DE into a RE.
Performs the substitution:
.. math::
x^l f^j(x) \\to (k + 1 - l)_j . a_{k + j - l}
Normalises the terms so that lowest order of a term is always r(k).
Examples
========
>>> from sympy import Function, Derivative
>>> from sympy.series.formal import hyper_re
>>> from sympy.abc import x, k
>>> f, r = Function('f'), Function('r')
>>> hyper_re(-f(x) + Derivative(f(x)), r, k)
(k + 1)*r(k + 1) - r(k)
>>> hyper_re(-x*f(x) + Derivative(f(x), x, x), r, k)
(k + 2)*(k + 3)*r(k + 3) - r(k)
See Also
========
sympy.series.formal.exp_re
"""
RE = S.Zero
g = DE.atoms(Function).pop()
x = g.atoms(Symbol).pop()
mini = None
for t in Add.make_args(DE.expand()):
coeff, d = t.as_independent(g)
c, v = coeff.as_independent(x)
l = v.as_coeff_exponent(x)[1]
if isinstance(d, Derivative):
j = len(d.args[1:])
else:
j = 0
RE += c * rf(k + 1 - l, j) * r(k + j - l)
if mini is None or j - l < mini:
mini = j - l
RE = RE.subs(k, k - mini)
m = Wild('m')
return RE.collect(r(k + m))
def _transformation_a(f, x, P, Q, k, m, shift):
f *= x**(-shift)
P = P.subs(k, k + shift)
Q = Q.subs(k, k + shift)
return f, P, Q, m
def _transformation_c(f, x, P, Q, k, m, scale):
f = f.subs(x, x**scale)
P = P.subs(k, k / scale)
Q = Q.subs(k, k / scale)
m *= scale
return f, P, Q, m
def _transformation_e(f, x, P, Q, k, m):
f = f.diff(x)
P = P.subs(k, k + 1) * (k + m + 1)
Q = Q.subs(k, k + 1) * (k + 1)
return f, P, Q, m
def _apply_shift(sol, shift):
return [(res, cond + shift) for res, cond in sol]
def _apply_scale(sol, scale):
return [(res, cond / scale) for res, cond in sol]
def _apply_integrate(sol, x, k):
return [(res / ((cond + 1)*(cond.as_coeff_Add()[1].coeff(k))), cond + 1)
for res, cond in sol]
def _compute_formula(f, x, P, Q, k, m, k_max):
"""Computes the formula for f."""
from sympy.polys import roots
sol = []
for i in range(k_max + 1, k_max + m + 1):
r = f.diff(x, i).limit(x, 0) / factorial(i)
if r is S.Zero:
continue
kterm = m*k + i
res = r
p = P.subs(k, kterm)
q = Q.subs(k, kterm)
c1 = p.subs(k, 1/k).leadterm(k)[0]
c2 = q.subs(k, 1/k).leadterm(k)[0]
res *= (-c1 / c2)**k
for r, mul in roots(p, k).items():
res *= rf(-r, k)**mul
for r, mul in roots(q, k).items():
res /= rf(-r, k)**mul
sol.append((res, kterm))
return sol
def _rsolve_hypergeometric(f, x, P, Q, k, m):
"""Recursive wrapper to rsolve_hypergeometric.
Returns a Tuple of (formula, series independent terms,
maximum power of x in independent terms) if successful
otherwise ``None``.
See :func:`rsolve_hypergeometric` for details.
"""
from sympy.polys import lcm, roots
from sympy.integrals import integrate
# tranformation - c
proots, qroots = roots(P, k), roots(Q, k)
all_roots = dict(proots)
all_roots.update(qroots)
scale = lcm([r.as_numer_denom()[1] for r, t in all_roots.items()
if r.is_rational])
f, P, Q, m = _transformation_c(f, x, P, Q, k, m, scale)
# transformation - a
qroots = roots(Q, k)
if qroots:
k_min = Min(*qroots.keys())
else:
k_min = S.Zero
shift = k_min + m
f, P, Q, m = _transformation_a(f, x, P, Q, k, m, shift)
l = (x*f).limit(x, 0)
if not isinstance(l, Limit) and l != 0: # Ideally should only be l != 0
return None
qroots = roots(Q, k)
if qroots:
k_max = Max(*qroots.keys())
else:
k_max = S.Zero
ind, mp = S.Zero, -oo
for i in range(k_max + m + 1):
r = f.diff(x, i).limit(x, 0) / factorial(i)
if r.is_finite is False:
old_f = f
f, P, Q, m = _transformation_a(f, x, P, Q, k, m, i)
f, P, Q, m = _transformation_e(f, x, P, Q, k, m)
sol, ind, mp = _rsolve_hypergeometric(f, x, P, Q, k, m)
sol = _apply_integrate(sol, x, k)
sol = _apply_shift(sol, i)
ind = integrate(ind, x)
ind += (old_f - ind).limit(x, 0) # constant of integration
mp += 1
return sol, ind, mp
elif r:
ind += r*x**(i + shift)
pow_x = Rational((i + shift), scale)
if pow_x > mp:
mp = pow_x # maximum power of x
ind = ind.subs(x, x**(1/scale))
sol = _compute_formula(f, x, P, Q, k, m, k_max)
sol = _apply_shift(sol, shift)
sol = _apply_scale(sol, scale)
return sol, ind, mp
def rsolve_hypergeometric(f, x, P, Q, k, m):
"""Solves RE of hypergeometric type.
Attempts to solve RE of the form
Q(k)*a(k + m) - P(k)*a(k)
Transformations that preserve Hypergeometric type:
a. x**n*f(x): b(k + m) = R(k - n)*b(k)
b. f(A*x): b(k + m) = A**m*R(k)*b(k)
c. f(x**n): b(k + n*m) = R(k/n)*b(k)
d. f(x**(1/m)): b(k + 1) = R(k*m)*b(k)
e. f'(x): b(k + m) = ((k + m + 1)/(k + 1))*R(k + 1)*b(k)
Some of these transformations have been used to solve the RE.
Returns
=======
formula : Expr
ind : Expr
Independent terms.
order : int
Examples
========
>>> from sympy import exp, ln, S
>>> from sympy.series.formal import rsolve_hypergeometric as rh
>>> from sympy.abc import x, k
>>> rh(exp(x), x, -S.One, (k + 1), k, 1)
(Piecewise((1/(factorial(k)), Eq(Mod(k, 1), 0)), (0, True)), 1, 1)
>>> rh(ln(1 + x), x, k**2, k*(k + 1), k, 1)
(Piecewise(((-1)**(k - 1)*factorial(k - 1)/RisingFactorial(2, k - 1),
Eq(Mod(k, 1), 0)), (0, True)), x, 2)
References
==========
.. [1] Formal Power Series - Dominik Gruntz, Wolfram Koepf
.. [2] Power Series in Computer Algebra - Wolfram Koepf
"""
result = _rsolve_hypergeometric(f, x, P, Q, k, m)
if result is None:
return None
sol_list, ind, mp = result
sol_dict = defaultdict(lambda: S.Zero)
for res, cond in sol_list:
j, mk = cond.as_coeff_Add()
c = mk.coeff(k)
if j.is_integer is False:
res *= x**frac(j)
j = floor(j)
res = res.subs(k, (k - j) / c)
cond = Eq(k % c, j % c)
sol_dict[cond] += res # Group together formula for same conditions
sol = []
for cond, res in sol_dict.items():
sol.append((res, cond))
sol.append((S.Zero, True))
sol = Piecewise(*sol)
if mp is -oo:
s = S.Zero
elif mp.is_integer is False:
s = ceiling(mp)
else:
s = mp + 1
# save all the terms of
# form 1/x**k in ind
if s < 0:
ind += sum(sequence(sol * x**k, (k, s, -1)))
s = S.Zero
return (sol, ind, s)
def _solve_hyper_RE(f, x, RE, g, k):
"""See docstring of :func:`rsolve_hypergeometric` for details."""
terms = Add.make_args(RE)
if len(terms) == 2:
gs = list(RE.atoms(Function))
P, Q = map(RE.coeff, gs)
m = gs[1].args[0] - gs[0].args[0]
if m < 0:
P, Q = Q, P
m = abs(m)
return rsolve_hypergeometric(f, x, P, Q, k, m)
def _solve_explike_DE(f, x, DE, g, k):
"""Solves DE with constant coefficients."""
from sympy.solvers import rsolve
for t in Add.make_args(DE):
coeff, d = t.as_independent(g)
if coeff.free_symbols:
return
RE = exp_re(DE, g, k)
init = {}
for i in range(len(Add.make_args(RE))):
if i:
f = f.diff(x)
init[g(k).subs(k, i)] = f.limit(x, 0)
sol = rsolve(RE, g(k), init)
if sol:
return (sol / factorial(k), S.Zero, S.Zero)
def _solve_simple(f, x, DE, g, k):
"""Converts DE into RE and solves using :func:`rsolve`."""
from sympy.solvers import rsolve
RE = hyper_re(DE, g, k)
init = {}
for i in range(len(Add.make_args(RE))):
if i:
f = f.diff(x)
init[g(k).subs(k, i)] = f.limit(x, 0) / factorial(i)
sol = rsolve(RE, g(k), init)
if sol:
return (sol, S.Zero, S.Zero)
def _transform_explike_DE(DE, g, x, order, syms):
"""Converts DE with free parameters into DE with constant coefficients."""
from sympy.solvers.solveset import linsolve
eq = []
highest_coeff = DE.coeff(Derivative(g(x), x, order))
for i in range(order):
coeff = DE.coeff(Derivative(g(x), x, i))
coeff = (coeff / highest_coeff).expand().collect(x)
for t in Add.make_args(coeff):
eq.append(t)
temp = []
for e in eq:
if e.has(x):
break
elif e.has(Symbol):
temp.append(e)
else:
eq = temp
if eq:
sol = dict(zip(syms, (i for s in linsolve(eq, list(syms)) for i in s)))
if sol:
DE = DE.subs(sol)
DE = DE.factor().as_coeff_mul(Derivative)[1][0]
DE = DE.collect(Derivative(g(x)))
return DE
def _transform_DE_RE(DE, g, k, order, syms):
"""Converts DE with free parameters into RE of hypergeometric type."""
from sympy.solvers.solveset import linsolve
RE = hyper_re(DE, g, k)
eq = []
for i in range(1, order):
coeff = RE.coeff(g(k + i))
eq.append(coeff)
sol = dict(zip(syms, (i for s in linsolve(eq, list(syms)) for i in s)))
if sol:
m = Wild('m')
RE = RE.subs(sol)
RE = RE.factor().as_numer_denom()[0].collect(g(k + m))
RE = RE.as_coeff_mul(g)[1][0]
for i in range(order): # smallest order should be g(k)
if RE.coeff(g(k + i)) and i:
RE = RE.subs(k, k - i)
break
return RE
def solve_de(f, x, DE, order, g, k):
"""Solves the DE.
Tries to solve DE by either converting into a RE containing two terms or
converting into a DE having constant coefficients.
Returns
=======
formula : Expr
ind : Expr
Independent terms.
order : int
Examples
========
>>> from sympy import Derivative as D
>>> from sympy import exp, ln
>>> from sympy.series.formal import solve_de
>>> from sympy.abc import x, k, f
>>> solve_de(exp(x), x, D(f(x), x) - f(x), 1, f, k)
(Piecewise((1/(factorial(k)), Eq(Mod(k, 1), 0)), (0, True)), 1, 1)
>>> solve_de(ln(1 + x), x, (x + 1)*D(f(x), x, 2) + D(f(x)), 2, f, k)
(Piecewise(((-1)**(k - 1)*factorial(k - 1)/RisingFactorial(2, k - 1),
Eq(Mod(k, 1), 0)), (0, True)), x, 2)
"""
sol = None
syms = DE.free_symbols.difference(set([g, x]))
if syms:
RE = _transform_DE_RE(DE, g, k, order, syms)
else:
RE = hyper_re(DE, g, k)
if not RE.free_symbols.difference(set([k])):
sol = _solve_hyper_RE(f, x, RE, g, k)
if sol:
return sol
if syms:
DE = _transform_explike_DE(DE, g, x, order, syms)
if not DE.free_symbols.difference(set([x])):
sol = _solve_explike_DE(f, x, DE, g, k)
if sol:
return sol
def hyper_algorithm(f, x, k, order=4):
"""Hypergeometric algorithm for computing Formal Power Series.
Steps:
* Generates DE
* Convert the DE into RE
* Solves the RE
Examples
========
>>> from sympy import exp, ln
>>> from sympy.series.formal import hyper_algorithm
>>> from sympy.abc import x, k
>>> hyper_algorithm(exp(x), x, k)
(Piecewise((1/(factorial(k)), Eq(Mod(k, 1), 0)), (0, True)), 1, 1)
>>> hyper_algorithm(ln(1 + x), x, k)
(Piecewise(((-1)**(k - 1)*factorial(k - 1)/RisingFactorial(2, k - 1),
Eq(Mod(k, 1), 0)), (0, True)), x, 2)
See Also
========
sympy.series.formal.simpleDE
sympy.series.formal.solve_de
"""
g = Function('g')
des = [] # list of DE's
sol = None
for DE, i in simpleDE(f, x, g, order):
if DE is not None:
sol = solve_de(f, x, DE, i, g, k)
if sol:
return sol
if not DE.free_symbols.difference(set([x])):
des.append(DE)
# If nothing works
# Try plain rsolve
for DE in des:
sol = _solve_simple(f, x, DE, g, k)
if sol:
return sol
def _compute_fps(f, x, x0, dir, hyper, order, rational, full):
"""Recursive wrapper to compute fps.
See :func:`compute_fps` for details.
"""
if x0 in [S.Infinity, -S.Infinity]:
dir = S.One if x0 is S.Infinity else -S.One
temp = f.subs(x, 1/x)
result = _compute_fps(temp, x, 0, dir, hyper, order, rational, full)
if result is None:
return None
return (result[0], result[1].subs(x, 1/x), result[2].subs(x, 1/x))
elif x0 or dir == -S.One:
if dir == -S.One:
rep = -x + x0
rep2 = -x
rep2b = x0
else:
rep = x + x0
rep2 = x
rep2b = -x0
temp = f.subs(x, rep)
result = _compute_fps(temp, x, 0, S.One, hyper, order, rational, full)
if result is None:
return None
return (result[0], result[1].subs(x, rep2 + rep2b),
result[2].subs(x, rep2 + rep2b))
if f.is_polynomial(x):
return None
# Break instances of Add
# this allows application of different
# algorithms on different terms increasing the
# range of admissible functions.
if isinstance(f, Add):
result = False
ak = sequence(S.Zero, (0, oo))
ind, xk = S.Zero, None
for t in Add.make_args(f):
res = _compute_fps(t, x, 0, S.One, hyper, order, rational, full)
if res:
if not result:
result = True
xk = res[1]
if res[0].start > ak.start:
seq = ak
s, f = ak.start, res[0].start
else:
seq = res[0]
s, f = res[0].start, ak.start
save = Add(*[z[0]*z[1] for z in zip(seq[0:(f - s)], xk[s:f])])
ak += res[0]
ind += res[2] + save
else:
ind += t
if result:
return ak, xk, ind
return None
result = None
# from here on it's x0=0 and dir=1 handling
k = Dummy('k')
if rational:
result = rational_algorithm(f, x, k, order, full)
if result is None and hyper:
result = hyper_algorithm(f, x, k, order)
if result is None:
return None
ak = sequence(result[0], (k, result[2], oo))
xk = sequence(x**k, (k, 0, oo))
ind = result[1]
return ak, xk, ind
def compute_fps(f, x, x0=0, dir=1, hyper=True, order=4, rational=True,
full=False):
"""Computes the formula for Formal Power Series of a function.
Tries to compute the formula by applying the following techniques
(in order):
* rational_algorithm
* Hypergeomitric algorithm
Parameters
==========
x : Symbol
x0 : number, optional
Point to perform series expansion about. Default is 0.
dir : {1, -1, '+', '-'}, optional
If dir is 1 or '+' the series is calculated from the right and
for -1 or '-' the series is calculated from the left. For smooth
functions this flag will not alter the results. Default is 1.
hyper : {True, False}, optional
Set hyper to False to skip the hypergeometric algorithm.
By default it is set to False.
order : int, optional
Order of the derivative of ``f``, Default is 4.
rational : {True, False}, optional
Set rational to False to skip rational algorithm. By default it is set
to True.
full : {True, False}, optional
Set full to True to increase the range of rational algorithm.
See :func:`rational_algorithm` for details. By default it is set to
False.
Returns
=======
ak : sequence
Sequence of coefficients.
xk : sequence
Sequence of powers of x.
ind : Expr
Independent terms.
mul : Pow
Common terms.
See Also
========
sympy.series.formal.rational_algorithm
sympy.series.formal.hyper_algorithm
"""
f = sympify(f)
x = sympify(x)
if not f.has(x):
return None
x0 = sympify(x0)
if dir == '+':
dir = S.One
elif dir == '-':
dir = -S.One
elif dir not in [S.One, -S.One]:
raise ValueError("Dir must be '+' or '-'")
else:
dir = sympify(dir)
return _compute_fps(f, x, x0, dir, hyper, order, rational, full)
class FormalPowerSeries(SeriesBase):
"""Represents Formal Power Series of a function.
No computation is performed. This class should only to be used to represent
a series. No checks are performed.
For computing a series use :func:`fps`.
See Also
========
sympy.series.formal.fps
"""
def __new__(cls, *args):
args = map(sympify, args)
return Expr.__new__(cls, *args)
@property
def function(self):
return self.args[0]
@property
def x(self):
return self.args[1]
@property
def x0(self):
return self.args[2]
@property
def dir(self):
return self.args[3]
@property
def ak(self):
return self.args[4][0]
@property
def xk(self):
return self.args[4][1]
@property
def ind(self):
return self.args[4][2]
@property
def interval(self):
return Interval(0, oo)
@property
def start(self):
return self.interval.inf
@property
def stop(self):
return self.interval.sup
@property
def length(self):
return oo
@property
def infinite(self):
"""Returns an infinite representation of the series"""
from sympy.concrete import Sum
ak, xk = self.ak, self.xk
k = ak.variables[0]
inf_sum = Sum(ak.formula * xk.formula, (k, ak.start, ak.stop))
return self.ind + inf_sum
def _get_pow_x(self, term):
"""Returns the power of x in a term."""
xterm, pow_x = term.as_independent(self.x)[1].as_base_exp()
if not xterm.has(self.x):
return S.Zero
return pow_x
def polynomial(self, n=6):
"""Truncated series as polynomial.
Returns series sexpansion of ``f`` upto order ``O(x**n)``
as a polynomial(without ``O`` term).
"""
terms = []
for i, t in enumerate(self):
xp = self._get_pow_x(t)
if xp >= n:
break
elif xp.is_integer is True and i == n + 1:
break
elif t is not S.Zero:
terms.append(t)
return Add(*terms)
def truncate(self, n=6):
"""Truncated series.
Returns truncated series expansion of f upto
order ``O(x**n)``.
If n is ``None``, returns an infinite iterator.
"""
if n is None:
return iter(self)
x, x0 = self.x, self.x0
pt_xk = self.xk.coeff(n)
if x0 is S.NegativeInfinity:
x0 = S.Infinity
return self.polynomial(n) + Order(pt_xk, (x, x0))
def _eval_term(self, pt):
try:
pt_xk = self.xk.coeff(pt)
pt_ak = self.ak.coeff(pt).simplify() # Simplify the coefficients
except IndexError:
term = S.Zero
else:
term = (pt_ak * pt_xk)
if self.ind:
ind = S.Zero
for t in Add.make_args(self.ind):
pow_x = self._get_pow_x(t)
if pt == 0 and pow_x < 1:
ind += t
elif pow_x >= pt and pow_x < pt + 1:
ind += t
term += ind
return term.collect(self.x)
def _eval_subs(self, old, new):
x = self.x
if old.has(x):
return self
def _eval_as_leading_term(self, x):
for t in self:
if t is not S.Zero:
return t
def _eval_derivative(self, x):
f = self.function.diff(x)
ind = self.ind.diff(x)
pow_xk = self._get_pow_x(self.xk.formula)
ak = self.ak
k = ak.variables[0]
if ak.formula.has(x):
form = []
for e, c in ak.formula.args:
temp = S.Zero
for t in Add.make_args(e):
pow_x = self._get_pow_x(t)
temp += t * (pow_xk + pow_x)
form.append((temp, c))
form = Piecewise(*form)
ak = sequence(form.subs(k, k + 1), (k, ak.start - 1, ak.stop))
else:
ak = sequence((ak.formula * pow_xk).subs(k, k + 1),
(k, ak.start - 1, ak.stop))
return self.func(f, self.x, self.x0, self.dir, (ak, self.xk, ind))
def integrate(self, x=None):
"""Integrate Formal Power Series.
Examples
========
>>> from sympy import fps, sin
>>> from sympy.abc import x
>>> f = fps(sin(x))
>>> f.integrate(x).truncate()
-1 + x**2/2 - x**4/24 + O(x**6)
>>> f.integrate((x, 0, 1))
-cos(1) + 1
"""
from sympy.integrals import integrate
if x is None:
x = self.x
elif iterable(x):
return integrate(self.function, x)
f = integrate(self.function, x)
ind = integrate(self.ind, x)
ind += (f - ind).limit(x, 0) # constant of integration
pow_xk = self._get_pow_x(self.xk.formula)
ak = self.ak
k = ak.variables[0]
if ak.formula.has(x):
form = []
for e, c in ak.formula.args:
temp = S.Zero
for t in Add.make_args(e):
pow_x = self._get_pow_x(t)
temp += t / (pow_xk + pow_x + 1)
form.append((temp, c))
form = Piecewise(*form)
ak = sequence(form.subs(k, k - 1), (k, ak.start + 1, ak.stop))
else:
ak = sequence((ak.formula / (pow_xk + 1)).subs(k, k - 1),
(k, ak.start + 1, ak.stop))
return self.func(f, self.x, self.x0, self.dir, (ak, self.xk, ind))
def __add__(self, other):
other = sympify(other)
if isinstance(other, FormalPowerSeries):
if self.dir != other.dir:
raise ValueError("Both series should be calculated from the"
" same direction.")
elif self.x0 != other.x0:
raise ValueError("Both series should be calculated about the"
" same point.")
x, y = self.x, other.x
f = self.function + other.function.subs(y, x)
if self.x not in f.free_symbols:
return f
ak = self.ak + other.ak
if self.ak.start > other.ak.start:
seq = other.ak
s, e = other.ak.start, self.ak.start
else:
seq = self.ak
s, e = self.ak.start, other.ak.start
save = Add(*[z[0]*z[1] for z in zip(seq[0:(e - s)], self.xk[s:e])])
ind = self.ind + other.ind + save
return self.func(f, x, self.x0, self.dir, (ak, self.xk, ind))
elif not other.has(self.x):
f = self.function + other
ind = self.ind + other
return self.func(f, self.x, self.x0, self.dir,
(self.ak, self.xk, ind))
return Add(self, other)
def __radd__(self, other):
return self.__add__(other)
def __neg__(self):
return self.func(-self.function, self.x, self.x0, self.dir,
(-self.ak, self.xk, -self.ind))
def __sub__(self, other):
return self.__add__(-other)
def __rsub__(self, other):
return (-self).__add__(other)
def __mul__(self, other):
other = sympify(other)
if other.has(self.x):
return Mul(self, other)
f = self.function * other
ak = self.ak.coeff_mul(other)
ind = self.ind * other
return self.func(f, self.x, self.x0, self.dir, (ak, self.xk, ind))
def __rmul__(self, other):
return self.__mul__(other)
def fps(f, x=None, x0=0, dir=1, hyper=True, order=4, rational=True, full=False):
"""Generates Formal Power Series of f.
Returns the formal series expansion of ``f`` around ``x = x0``
with respect to ``x`` in the form of a ``FormalPowerSeries`` object.
Formal Power Series is represented using an explicit formula
computed using different algorithms.
See :func:`compute_fps` for the more details regarding the computation
of formula.
Parameters
==========
x : Symbol, optional
If x is None and ``f`` is univariate, the univariate symbols will be
supplied, otherwise an error will be raised.
x0 : number, optional
Point to perform series expansion about. Default is 0.
dir : {1, -1, '+', '-'}, optional
If dir is 1 or '+' the series is calculated from the right and
for -1 or '-' the series is calculated from the left. For smooth
functions this flag will not alter the results. Default is 1.
hyper : {True, False}, optional
Set hyper to False to skip the hypergeometric algorithm.
By default it is set to False.
order : int, optional
Order of the derivative of ``f``, Default is 4.
rational : {True, False}, optional
Set rational to False to skip rational algorithm. By default it is set
to True.
full : {True, False}, optional
Set full to True to increase the range of rational algorithm.
See :func:`rational_algorithm` for details. By default it is set to
False.
Examples
========
>>> from sympy import fps, O, ln, atan
>>> from sympy.abc import x
Rational Functions
>>> fps(ln(1 + x)).truncate()
x - x**2/2 + x**3/3 - x**4/4 + x**5/5 + O(x**6)
>>> fps(atan(x), full=True).truncate()
x - x**3/3 + x**5/5 + O(x**6)
See Also
========
sympy.series.formal.FormalPowerSeries
sympy.series.formal.compute_fps
"""
f = sympify(f)
if x is None:
free = f.free_symbols
if len(free) == 1:
x = free.pop()
elif not free:
return f
else:
raise NotImplementedError("multivariate formal power series")
result = compute_fps(f, x, x0, dir, hyper, order, rational, full)
if result is None:
return f
return FormalPowerSeries(f, x, x0, dir, result)
| bsd-3-clause |
khalim19/gimp-plugin-export-layers | export_layers/tests/test_placeholders.py | 1 | 2548 | # -*- coding: utf-8 -*-
#
# This file is part of Export Layers.
#
# Copyright (C) 2013-2019 khalim19 <khalim19@gmail.com>
#
# Export Layers is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Export Layers is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Export Layers. If not, see <https://www.gnu.org/licenses/>.
from __future__ import absolute_import, division, print_function, unicode_literals
from future.builtins import *
import unittest
import parameterized
from export_layers import pygimplib as pg
from export_layers.pygimplib.tests import stubs_gimp
from .. import placeholders
class TestGetReplacedArgsAndKwargs(unittest.TestCase):
def test_get_replaced_args_and_kwargs(self):
image = stubs_gimp.ImageStub()
layer = stubs_gimp.LayerStub()
layer_exporter = object()
args = ["current_image", "current_layer", "some_other_arg"]
kwargs = {
"run_mode": 0, "image": "current_image", "layer": "current_layer"}
new_args, new_kwargs = placeholders.get_replaced_args_and_kwargs(
args, kwargs, image, layer, layer_exporter)
self.assertListEqual(new_args, [image, layer, "some_other_arg"])
self.assertDictEqual(new_kwargs, {"run_mode": 0, "image": image, "layer": layer})
class TestPlaceHolderSetting(unittest.TestCase):
@parameterized.parameterized.expand([
("placeholder", placeholders.PlaceholderSetting, []),
("image_placeholder", placeholders.PlaceholderImageSetting, ["current_image"]),
])
def test_get_allowed_placeholder_names(
self, test_case_name_suffix, placeholder_setting_type, expected_result):
self.assertListEqual(
placeholder_setting_type.get_allowed_placeholder_names(), expected_result)
@parameterized.parameterized.expand([
("placeholder", placeholders.PlaceholderSetting, 0),
("image_placeholder", placeholders.PlaceholderImageSetting, 1),
])
def test_get_allowed_placeholders(
self, test_case_name_suffix, placeholder_setting_type, expected_length):
self.assertEqual(len(placeholder_setting_type.get_allowed_placeholders()), expected_length)
| gpl-3.0 |
ramondelafuente/ansible | lib/ansible/utils/module_docs_fragments/junos.py | 18 | 2684 | #
# (c) 2015, Peter Sprygada <psprygada@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
class ModuleDocFragment(object):
# Standard files documentation fragment
DOCUMENTATION = """
options:
host:
description:
- Specifies the DNS host name or address for connecting to the remote
device over the specified transport. The value of host is used as
the destination address for the transport.
required: true
port:
description:
- Specifies the port to use when buiding the connection to the remote
device. The port value will default to the well known SSH port
of 22
required: false
default: 22
username:
description:
- Configures the usename to use to authenticate the connection to
the remote device. The value of I(username) is used to authenticate
the SSH session. If the value is not specified in the task, the
value of environment variable ANSIBLE_NET_USERNAME will be used instead.
required: false
password:
description:
- Specifies the password to use to authenticate the connection to
the remote device. The value of I(password) is used to authenticate
the SSH session. If the value is not specified in the task, the
value of environment variable ANSIBLE_NET_PASSWORD will be used instead.
required: false
default: null
ssh_keyfile:
description:
- Specifies the SSH key to use to authenticate the connection to
the remote device. The value of I(ssh_keyfile) is the path to the key
used to authenticate the SSH session. If the value is not specified in
the task, the value of environment variable ANSIBLE_NET_SSH_KEYFILE
will be used instead.
required: false
provider:
description:
- Convience method that allows all M(ios) arguments to be passed as
a dict object. All constraints (required, choices, etc) must be
met either by individual arguments or values in this dict.
required: false
default: null
"""
| gpl-3.0 |
DebrahR/project2 | server/lib/flask/wrappers.py | 773 | 6709 | # -*- coding: utf-8 -*-
"""
flask.wrappers
~~~~~~~~~~~~~~
Implements the WSGI wrappers (request and response).
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from werkzeug.wrappers import Request as RequestBase, Response as ResponseBase
from werkzeug.exceptions import BadRequest
from .debughelpers import attach_enctype_error_multidict
from . import json
from .globals import _request_ctx_stack
_missing = object()
def _get_data(req, cache):
getter = getattr(req, 'get_data', None)
if getter is not None:
return getter(cache=cache)
return req.data
class Request(RequestBase):
"""The request object used by default in Flask. Remembers the
matched endpoint and view arguments.
It is what ends up as :class:`~flask.request`. If you want to replace
the request object used you can subclass this and set
:attr:`~flask.Flask.request_class` to your subclass.
The request object is a :class:`~werkzeug.wrappers.Request` subclass and
provides all of the attributes Werkzeug defines plus a few Flask
specific ones.
"""
#: the internal URL rule that matched the request. This can be
#: useful to inspect which methods are allowed for the URL from
#: a before/after handler (``request.url_rule.methods``) etc.
#:
#: .. versionadded:: 0.6
url_rule = None
#: a dict of view arguments that matched the request. If an exception
#: happened when matching, this will be `None`.
view_args = None
#: if matching the URL failed, this is the exception that will be
#: raised / was raised as part of the request handling. This is
#: usually a :exc:`~werkzeug.exceptions.NotFound` exception or
#: something similar.
routing_exception = None
# switched by the request context until 1.0 to opt in deprecated
# module functionality
_is_old_module = False
@property
def max_content_length(self):
"""Read-only view of the `MAX_CONTENT_LENGTH` config key."""
ctx = _request_ctx_stack.top
if ctx is not None:
return ctx.app.config['MAX_CONTENT_LENGTH']
@property
def endpoint(self):
"""The endpoint that matched the request. This in combination with
:attr:`view_args` can be used to reconstruct the same or a
modified URL. If an exception happened when matching, this will
be `None`.
"""
if self.url_rule is not None:
return self.url_rule.endpoint
@property
def module(self):
"""The name of the current module if the request was dispatched
to an actual module. This is deprecated functionality, use blueprints
instead.
"""
from warnings import warn
warn(DeprecationWarning('modules were deprecated in favor of '
'blueprints. Use request.blueprint '
'instead.'), stacklevel=2)
if self._is_old_module:
return self.blueprint
@property
def blueprint(self):
"""The name of the current blueprint"""
if self.url_rule and '.' in self.url_rule.endpoint:
return self.url_rule.endpoint.rsplit('.', 1)[0]
@property
def json(self):
"""If the mimetype is `application/json` this will contain the
parsed JSON data. Otherwise this will be `None`.
The :meth:`get_json` method should be used instead.
"""
# XXX: deprecate property
return self.get_json()
def get_json(self, force=False, silent=False, cache=True):
"""Parses the incoming JSON request data and returns it. If
parsing fails the :meth:`on_json_loading_failed` method on the
request object will be invoked. By default this function will
only load the json data if the mimetype is ``application/json``
but this can be overriden by the `force` parameter.
:param force: if set to `True` the mimetype is ignored.
:param silent: if set to `False` this method will fail silently
and return `False`.
:param cache: if set to `True` the parsed JSON data is remembered
on the request.
"""
rv = getattr(self, '_cached_json', _missing)
if rv is not _missing:
return rv
if self.mimetype != 'application/json' and not force:
return None
# We accept a request charset against the specification as
# certain clients have been using this in the past. This
# fits our general approach of being nice in what we accept
# and strict in what we send out.
request_charset = self.mimetype_params.get('charset')
try:
data = _get_data(self, cache)
if request_charset is not None:
rv = json.loads(data, encoding=request_charset)
else:
rv = json.loads(data)
except ValueError as e:
if silent:
rv = None
else:
rv = self.on_json_loading_failed(e)
if cache:
self._cached_json = rv
return rv
def on_json_loading_failed(self, e):
"""Called if decoding of the JSON data failed. The return value of
this method is used by :meth:`get_json` when an error occurred. The
default implementation just raises a :class:`BadRequest` exception.
.. versionchanged:: 0.10
Removed buggy previous behavior of generating a random JSON
response. If you want that behavior back you can trivially
add it by subclassing.
.. versionadded:: 0.8
"""
raise BadRequest()
def _load_form_data(self):
RequestBase._load_form_data(self)
# in debug mode we're replacing the files multidict with an ad-hoc
# subclass that raises a different error for key errors.
ctx = _request_ctx_stack.top
if ctx is not None and ctx.app.debug and \
self.mimetype != 'multipart/form-data' and not self.files:
attach_enctype_error_multidict(self)
class Response(ResponseBase):
"""The response object that is used by default in Flask. Works like the
response object from Werkzeug but is set to have an HTML mimetype by
default. Quite often you don't have to create this object yourself because
:meth:`~flask.Flask.make_response` will take care of that for you.
If you want to replace the response object used you can subclass this and
set :attr:`~flask.Flask.response_class` to your subclass.
"""
default_mimetype = 'text/html'
| apache-2.0 |
GdZ/scriptfile | software/googleAppEngine/lib/django_0_96/django/db/models/related.py | 49 | 6008 | class BoundRelatedObject(object):
def __init__(self, related_object, field_mapping, original):
self.relation = related_object
self.field_mappings = field_mapping[related_object.name]
def template_name(self):
raise NotImplementedError
def __repr__(self):
return repr(self.__dict__)
class RelatedObject(object):
def __init__(self, parent_model, model, field):
self.parent_model = parent_model
self.model = model
self.opts = model._meta
self.field = field
self.edit_inline = field.rel.edit_inline
self.name = '%s:%s' % (self.opts.app_label, self.opts.module_name)
self.var_name = self.opts.object_name.lower()
def flatten_data(self, follow, obj=None):
new_data = {}
rel_instances = self.get_list(obj)
for i, rel_instance in enumerate(rel_instances):
instance_data = {}
for f in self.opts.fields + self.opts.many_to_many:
# TODO: Fix for recursive manipulators.
fol = follow.get(f.name, None)
if fol:
field_data = f.flatten_data(fol, rel_instance)
for name, value in field_data.items():
instance_data['%s.%d.%s' % (self.var_name, i, name)] = value
new_data.update(instance_data)
return new_data
def extract_data(self, data):
"""
Pull out the data meant for inline objects of this class,
i.e. anything starting with our module name.
"""
return data # TODO
def get_list(self, parent_instance=None):
"Get the list of this type of object from an instance of the parent class."
if parent_instance is not None:
attr = getattr(parent_instance, self.get_accessor_name())
if self.field.rel.multiple:
# For many-to-many relationships, return a list of objects
# corresponding to the xxx_num_in_admin options of the field
objects = list(attr.all())
count = len(objects) + self.field.rel.num_extra_on_change
if self.field.rel.min_num_in_admin:
count = max(count, self.field.rel.min_num_in_admin)
if self.field.rel.max_num_in_admin:
count = min(count, self.field.rel.max_num_in_admin)
change = count - len(objects)
if change > 0:
return objects + [None] * change
if change < 0:
return objects[:change]
else: # Just right
return objects
else:
# A one-to-one relationship, so just return the single related
# object
return [attr]
else:
if self.field.rel.min_num_in_admin:
return [None] * max(self.field.rel.num_in_admin, self.field.rel.min_num_in_admin)
else:
return [None] * self.field.rel.num_in_admin
def get_db_prep_lookup(self, lookup_type, value):
# Defer to the actual field definition for db prep
return self.field.get_db_prep_lookup(lookup_type, value)
def editable_fields(self):
"Get the fields in this class that should be edited inline."
return [f for f in self.opts.fields + self.opts.many_to_many if f.editable and f != self.field]
def get_follow(self, override=None):
if isinstance(override, bool):
if override:
over = {}
else:
return None
else:
if override:
over = override.copy()
elif self.edit_inline:
over = {}
else:
return None
over[self.field.name] = False
return self.opts.get_follow(over)
def get_manipulator_fields(self, opts, manipulator, change, follow):
if self.field.rel.multiple:
if change:
attr = getattr(manipulator.original_object, self.get_accessor_name())
count = attr.count()
count += self.field.rel.num_extra_on_change
else:
count = self.field.rel.num_in_admin
if self.field.rel.min_num_in_admin:
count = max(count, self.field.rel.min_num_in_admin)
if self.field.rel.max_num_in_admin:
count = min(count, self.field.rel.max_num_in_admin)
else:
count = 1
fields = []
for i in range(count):
for f in self.opts.fields + self.opts.many_to_many:
if follow.get(f.name, False):
prefix = '%s.%d.' % (self.var_name, i)
fields.extend(f.get_manipulator_fields(self.opts, manipulator, change,
name_prefix=prefix, rel=True))
return fields
def __repr__(self):
return "<RelatedObject: %s related to %s>" % (self.name, self.field.name)
def bind(self, field_mapping, original, bound_related_object_class=BoundRelatedObject):
return bound_related_object_class(self, field_mapping, original)
def get_accessor_name(self):
# This method encapsulates the logic that decides what name to give an
# accessor descriptor that retrieves related many-to-one or
# many-to-many objects. It uses the lower-cased object_name + "_set",
# but this can be overridden with the "related_name" option.
if self.field.rel.multiple:
# If this is a symmetrical m2m relation on self, there is no reverse accessor.
if getattr(self.field.rel, 'symmetrical', False) and self.model == self.parent_model:
return None
return self.field.rel.related_name or (self.opts.object_name.lower() + '_set')
else:
return self.field.rel.related_name or (self.opts.object_name.lower())
| mit |
friedrich420/S4-AEL-GPE-LOLLIPOP | tools/perf/util/setup.py | 4998 | 1330 | #!/usr/bin/python2
from distutils.core import setup, Extension
from os import getenv
from distutils.command.build_ext import build_ext as _build_ext
from distutils.command.install_lib import install_lib as _install_lib
class build_ext(_build_ext):
def finalize_options(self):
_build_ext.finalize_options(self)
self.build_lib = build_lib
self.build_temp = build_tmp
class install_lib(_install_lib):
def finalize_options(self):
_install_lib.finalize_options(self)
self.build_dir = build_lib
cflags = ['-fno-strict-aliasing', '-Wno-write-strings']
cflags += getenv('CFLAGS', '').split()
build_lib = getenv('PYTHON_EXTBUILD_LIB')
build_tmp = getenv('PYTHON_EXTBUILD_TMP')
ext_sources = [f.strip() for f in file('util/python-ext-sources')
if len(f.strip()) > 0 and f[0] != '#']
perf = Extension('perf',
sources = ext_sources,
include_dirs = ['util/include'],
extra_compile_args = cflags,
)
setup(name='perf',
version='0.1',
description='Interface with the Linux profiling infrastructure',
author='Arnaldo Carvalho de Melo',
author_email='acme@redhat.com',
license='GPLv2',
url='http://perf.wiki.kernel.org',
ext_modules=[perf],
cmdclass={'build_ext': build_ext, 'install_lib': install_lib})
| gpl-2.0 |
hrishioa/Aviato | flask/Lib/site-packages/nltk/classify/__init__.py | 2 | 4496 | # Natural Language Toolkit: Classifiers
#
# Copyright (C) 2001-2015 NLTK Project
# Author: Edward Loper <edloper@gmail.com>
# URL: <http://nltk.org/>
# For license information, see LICENSE.TXT
"""
Classes and interfaces for labeling tokens with category labels (or
"class labels"). Typically, labels are represented with strings
(such as ``'health'`` or ``'sports'``). Classifiers can be used to
perform a wide range of classification tasks. For example,
classifiers can be used...
- to classify documents by topic
- to classify ambiguous words by which word sense is intended
- to classify acoustic signals by which phoneme they represent
- to classify sentences by their author
Features
========
In order to decide which category label is appropriate for a given
token, classifiers examine one or more 'features' of the token. These
"features" are typically chosen by hand, and indicate which aspects
of the token are relevant to the classification decision. For
example, a document classifier might use a separate feature for each
word, recording how often that word occurred in the document.
Featuresets
===========
The features describing a token are encoded using a "featureset",
which is a dictionary that maps from "feature names" to "feature
values". Feature names are unique strings that indicate what aspect
of the token is encoded by the feature. Examples include
``'prevword'``, for a feature whose value is the previous word; and
``'contains-word(library)'`` for a feature that is true when a document
contains the word ``'library'``. Feature values are typically
booleans, numbers, or strings, depending on which feature they
describe.
Featuresets are typically constructed using a "feature detector"
(also known as a "feature extractor"). A feature detector is a
function that takes a token (and sometimes information about its
context) as its input, and returns a featureset describing that token.
For example, the following feature detector converts a document
(stored as a list of words) to a featureset describing the set of
words included in the document:
>>> # Define a feature detector function.
>>> def document_features(document):
... return dict([('contains-word(%s)' % w, True) for w in document])
Feature detectors are typically applied to each token before it is fed
to the classifier:
>>> # Classify each Gutenberg document.
>>> from nltk.corpus import gutenberg
>>> for fileid in gutenberg.fileids(): # doctest: +SKIP
... doc = gutenberg.words(fileid) # doctest: +SKIP
... print fileid, classifier.classify(document_features(doc)) # doctest: +SKIP
The parameters that a feature detector expects will vary, depending on
the task and the needs of the feature detector. For example, a
feature detector for word sense disambiguation (WSD) might take as its
input a sentence, and the index of a word that should be classified,
and return a featureset for that word. The following feature detector
for WSD includes features describing the left and right contexts of
the target word:
>>> def wsd_features(sentence, index):
... featureset = {}
... for i in range(max(0, index-3), index):
... featureset['left-context(%s)' % sentence[i]] = True
... for i in range(index, max(index+3, len(sentence))):
... featureset['right-context(%s)' % sentence[i]] = True
... return featureset
Training Classifiers
====================
Most classifiers are built by training them on a list of hand-labeled
examples, known as the "training set". Training sets are represented
as lists of ``(featuredict, label)`` tuples.
"""
from nltk.classify.api import ClassifierI, MultiClassifierI
from nltk.classify.megam import config_megam, call_megam
from nltk.classify.weka import WekaClassifier, config_weka
from nltk.classify.naivebayes import NaiveBayesClassifier
from nltk.classify.positivenaivebayes import PositiveNaiveBayesClassifier
from nltk.classify.decisiontree import DecisionTreeClassifier
from nltk.classify.rte_classify import rte_classifier, rte_features, RTEFeatureExtractor
from nltk.classify.util import accuracy, apply_features, log_likelihood
from nltk.classify.scikitlearn import SklearnClassifier
from nltk.classify.maxent import (MaxentClassifier, BinaryMaxentFeatureEncoding,
TypedMaxentFeatureEncoding,
ConditionalExponentialClassifier)
from nltk.classify.senna import Senna
| gpl-2.0 |
akshatharaj/django | tests/fixtures_regress/models.py | 281 | 8611 | from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.db import models
from django.utils import six
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Animal(models.Model):
name = models.CharField(max_length=150)
latin_name = models.CharField(max_length=150)
count = models.IntegerField()
weight = models.FloatField()
# use a non-default name for the default manager
specimens = models.Manager()
def __str__(self):
return self.name
class Plant(models.Model):
name = models.CharField(max_length=150)
class Meta:
# For testing when upper case letter in app name; regression for #4057
db_table = "Fixtures_regress_plant"
@python_2_unicode_compatible
class Stuff(models.Model):
name = models.CharField(max_length=20, null=True)
owner = models.ForeignKey(User, models.SET_NULL, null=True)
def __str__(self):
return six.text_type(self.name) + ' is owned by ' + six.text_type(self.owner)
class Absolute(models.Model):
name = models.CharField(max_length=40)
class Parent(models.Model):
name = models.CharField(max_length=10)
class Meta:
ordering = ('id',)
class Child(Parent):
data = models.CharField(max_length=10)
# Models to regression test #7572, #20820
class Channel(models.Model):
name = models.CharField(max_length=255)
class Article(models.Model):
title = models.CharField(max_length=255)
channels = models.ManyToManyField(Channel)
class Meta:
ordering = ('id',)
# Subclass of a model with a ManyToManyField for test_ticket_20820
class SpecialArticle(Article):
pass
# Models to regression test #22421
class CommonFeature(Article):
class Meta:
abstract = True
class Feature(CommonFeature):
pass
# Models to regression test #11428
@python_2_unicode_compatible
class Widget(models.Model):
name = models.CharField(max_length=255)
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
class WidgetProxy(Widget):
class Meta:
proxy = True
# Check for forward references in FKs and M2Ms with natural keys
class TestManager(models.Manager):
def get_by_natural_key(self, key):
return self.get(name=key)
@python_2_unicode_compatible
class Store(models.Model):
objects = TestManager()
name = models.CharField(max_length=255)
main = models.ForeignKey('self', models.SET_NULL, null=True)
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
def natural_key(self):
return (self.name,)
@python_2_unicode_compatible
class Person(models.Model):
objects = TestManager()
name = models.CharField(max_length=255)
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
# Person doesn't actually have a dependency on store, but we need to define
# one to test the behavior of the dependency resolution algorithm.
def natural_key(self):
return (self.name,)
natural_key.dependencies = ['fixtures_regress.store']
@python_2_unicode_compatible
class Book(models.Model):
name = models.CharField(max_length=255)
author = models.ForeignKey(Person, models.CASCADE)
stores = models.ManyToManyField(Store)
class Meta:
ordering = ('name',)
def __str__(self):
return '%s by %s (available at %s)' % (
self.name,
self.author.name,
', '.join(s.name for s in self.stores.all())
)
class NKManager(models.Manager):
def get_by_natural_key(self, data):
return self.get(data=data)
@python_2_unicode_compatible
class NKChild(Parent):
data = models.CharField(max_length=10, unique=True)
objects = NKManager()
def natural_key(self):
return (self.data,)
def __str__(self):
return 'NKChild %s:%s' % (self.name, self.data)
@python_2_unicode_compatible
class RefToNKChild(models.Model):
text = models.CharField(max_length=10)
nk_fk = models.ForeignKey(NKChild, models.CASCADE, related_name='ref_fks')
nk_m2m = models.ManyToManyField(NKChild, related_name='ref_m2ms')
def __str__(self):
return '%s: Reference to %s [%s]' % (
self.text,
self.nk_fk,
', '.join(str(o) for o in self.nk_m2m.all())
)
# ome models with pathological circular dependencies
class Circle1(models.Model):
name = models.CharField(max_length=255)
def natural_key(self):
return (self.name,)
natural_key.dependencies = ['fixtures_regress.circle2']
class Circle2(models.Model):
name = models.CharField(max_length=255)
def natural_key(self):
return (self.name,)
natural_key.dependencies = ['fixtures_regress.circle1']
class Circle3(models.Model):
name = models.CharField(max_length=255)
def natural_key(self):
return (self.name,)
natural_key.dependencies = ['fixtures_regress.circle3']
class Circle4(models.Model):
name = models.CharField(max_length=255)
def natural_key(self):
return (self.name,)
natural_key.dependencies = ['fixtures_regress.circle5']
class Circle5(models.Model):
name = models.CharField(max_length=255)
def natural_key(self):
return (self.name,)
natural_key.dependencies = ['fixtures_regress.circle6']
class Circle6(models.Model):
name = models.CharField(max_length=255)
def natural_key(self):
return (self.name,)
natural_key.dependencies = ['fixtures_regress.circle4']
class ExternalDependency(models.Model):
name = models.CharField(max_length=255)
def natural_key(self):
return (self.name,)
natural_key.dependencies = ['fixtures_regress.book']
# Model for regression test of #11101
class Thingy(models.Model):
name = models.CharField(max_length=255)
class M2MToSelf(models.Model):
parent = models.ManyToManyField("self", blank=True)
@python_2_unicode_compatible
class BaseNKModel(models.Model):
"""
Base model with a natural_key and a manager with `get_by_natural_key`
"""
data = models.CharField(max_length=20, unique=True)
objects = NKManager()
class Meta:
abstract = True
def __str__(self):
return self.data
def natural_key(self):
return (self.data,)
class M2MSimpleA(BaseNKModel):
b_set = models.ManyToManyField("M2MSimpleB")
class M2MSimpleB(BaseNKModel):
pass
class M2MSimpleCircularA(BaseNKModel):
b_set = models.ManyToManyField("M2MSimpleCircularB")
class M2MSimpleCircularB(BaseNKModel):
a_set = models.ManyToManyField("M2MSimpleCircularA")
class M2MComplexA(BaseNKModel):
b_set = models.ManyToManyField("M2MComplexB", through="M2MThroughAB")
class M2MComplexB(BaseNKModel):
pass
class M2MThroughAB(BaseNKModel):
a = models.ForeignKey(M2MComplexA, models.CASCADE)
b = models.ForeignKey(M2MComplexB, models.CASCADE)
class M2MComplexCircular1A(BaseNKModel):
b_set = models.ManyToManyField("M2MComplexCircular1B",
through="M2MCircular1ThroughAB")
class M2MComplexCircular1B(BaseNKModel):
c_set = models.ManyToManyField("M2MComplexCircular1C",
through="M2MCircular1ThroughBC")
class M2MComplexCircular1C(BaseNKModel):
a_set = models.ManyToManyField("M2MComplexCircular1A",
through="M2MCircular1ThroughCA")
class M2MCircular1ThroughAB(BaseNKModel):
a = models.ForeignKey(M2MComplexCircular1A, models.CASCADE)
b = models.ForeignKey(M2MComplexCircular1B, models.CASCADE)
class M2MCircular1ThroughBC(BaseNKModel):
b = models.ForeignKey(M2MComplexCircular1B, models.CASCADE)
c = models.ForeignKey(M2MComplexCircular1C, models.CASCADE)
class M2MCircular1ThroughCA(BaseNKModel):
c = models.ForeignKey(M2MComplexCircular1C, models.CASCADE)
a = models.ForeignKey(M2MComplexCircular1A, models.CASCADE)
class M2MComplexCircular2A(BaseNKModel):
b_set = models.ManyToManyField("M2MComplexCircular2B",
through="M2MCircular2ThroughAB")
class M2MComplexCircular2B(BaseNKModel):
def natural_key(self):
return (self.data,)
# Fake the dependency for a circularity
natural_key.dependencies = ["fixtures_regress.M2MComplexCircular2A"]
class M2MCircular2ThroughAB(BaseNKModel):
a = models.ForeignKey(M2MComplexCircular2A, models.CASCADE)
b = models.ForeignKey(M2MComplexCircular2B, models.CASCADE)
| bsd-3-clause |
nwchandler/ansible | lib/ansible/plugins/cliconf/aireos.py | 42 | 2670 | #
# (c) 2017 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import re
import json
from itertools import chain
from ansible.module_utils._text import to_bytes, to_text
from ansible.module_utils.network_common import to_list
from ansible.plugins.cliconf import CliconfBase, enable_mode
class Cliconf(CliconfBase):
def get_device_info(self):
device_info = {}
device_info['network_os'] = 'aireos'
reply = self.get(b'show sysinfo')
data = to_text(reply, errors='surrogate_or_strict').strip()
match = re.search(r'Product Version\.* (.*)', data)
if match:
device_info['network_os_version'] = match.group(1)
match = re.search(r'System Name\.* (.*)', data, re.M)
if match:
device_info['network_os_hostname'] = match.group(1)
reply = self.get(b'show inventory')
data = to_text(reply, errors='surrogate_or_strict').strip()
match = re.search(r'DESCR: \"(.*)\"', data, re.M)
if match:
device_info['network_os_model'] = match.group(1)
return device_info
@enable_mode
def get_config(self, source='running'):
if source not in ('running', 'startup'):
return self.invalid_params("fetching configuration from %s is not supported" % source)
if source == 'running':
cmd = b'show run-config commands'
else:
cmd = b'show run-config startup-commands'
return self.send_command(cmd)
@enable_mode
def edit_config(self, command):
for cmd in chain([b'config'], to_list(command), [b'end']):
self.send_command(cmd)
def get(self, *args, **kwargs):
return self.send_command(*args, **kwargs)
def get_capabilities(self):
result = {}
result['rpc'] = self.get_base_rpc()
result['network_api'] = 'cliconf'
result['device_info'] = self.get_device_info()
return json.dumps(result)
| gpl-3.0 |
benjamindeleener/odoo | addons/hr_gamification/wizard/grant_badge.py | 46 | 1666 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from openerp.osv import fields, osv
from openerp.tools.translate import _
from openerp.exceptions import UserError
class hr_grant_badge_wizard(osv.TransientModel):
_name = 'gamification.badge.user.wizard'
_inherit = ['gamification.badge.user.wizard']
_columns = {
'employee_id': fields.many2one("hr.employee", string='Employee', required=True),
'user_id': fields.related("employee_id", "user_id",
type="many2one", relation="res.users",
store=True, string='User')
}
def action_grant_badge(self, cr, uid, ids, context=None):
"""Wizard action for sending a badge to a chosen employee"""
if context is None:
context = {}
badge_user_obj = self.pool.get('gamification.badge.user')
for wiz in self.browse(cr, uid, ids, context=context):
if not wiz.user_id:
raise UserError(_('You can send badges only to employees linked to a user.'))
if uid == wiz.user_id.id:
raise UserError(_('You can not send a badge to yourself'))
values = {
'user_id': wiz.user_id.id,
'sender_id': uid,
'badge_id': wiz.badge_id.id,
'employee_id': wiz.employee_id.id,
'comment': wiz.comment,
}
badge_user = badge_user_obj.create(cr, uid, values, context=context)
result = badge_user_obj._send_badge(cr, uid, [badge_user], context=context)
return result
| gpl-3.0 |
dmych/cn | utils.py | 1 | 3805 | # This file is part of Coffee Notes project
#
# Coffee Notes is a crossplatform note-taking application
# inspired by Notational Velocity.
# <https://github.com/dmych/cn>
#
# Copyright (c) Dmitri Brechalov, 2011
#
# Coffee Notes is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Coffee Notes is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Coffee Notes. If not, see <http://www.gnu.org/licenses/>.
LOGNAME = '/tmp/cn.log'
def log(msg, restart=False):
if restart:
f = open(LOGNAME, 'w')
else:
f = open(LOGNAME, 'a')
f.write('%s\n' % msg)
f.close()
def getProgramPath():
import sys
import os.path
pname = sys.argv[0]
if os.path.islink(pname):
pname = os.readlink(pname)
return os.path.abspath(os.path.dirname(pname))
def openConfig(fname, mode):
'''Return config file object'''
import os.path
return open(os.path.expanduser(fname), mode)
class SimpleConfig:
def __init__(self, fileName):
self.data = {}
self.fileName = fileName
self.__readData()
def __readData(self):
self.data = {}
try:
f = openConfig(self.fileName, 'r')
except:
log('CANNOT FIND %s' % self.fileName)
return
for line in f.readlines():
line = line.strip()
if not line or line.startswith('#'):
continue # just empty line or comment
try:
(key, value) = line.split('=', 1)
self.data[key] = value
except:
pass
f.close()
log('CONFIG')
log(repr(self.data))
def save(self):
f = openConfig(self.fileName, 'w')
for (k, v) in self.data.items():
f.write('%s=%s\n' % (k, v))
f.close()
def close(self):
self.save()
def clear(self):
self.data.clear()
def readStr(self, key, default=None):
try:
value = self.data[key]
except:
value = default
return value
def readInt(self, key, default=None):
try:
return int(self.readStr(key))
except:
return default
def readBool(self, key, default=False):
try:
return bool(self.readInt(key))
except:
return default
def keys(self, start=None):
if start:
result = [ item for item in self.data.keys() \
if item.startswith(start) ]
else:
result = self.keys()
return result
def values(self, start=None):
keys = self.keys(start)
result = [ self.data[k] for k in keys ]
return result
def writeStr(self, key, value):
self.data[key] = str(value)
writeInt = writeStr
def writeBool(self, key, value):
self.writeStr(key, int(value))
def strip_hashes(txt):
'''Strip all hashes and spaces at the begining and the end of line
'''
while txt and txt[0] in '# \t':
txt = txt[1:]
while txt and txt[-1] in '# \t':
txt = txt[:-1]
return txt
def sanitize(txt):
'''Replace all "dangerous" characters (such as <>|\/")
Also strip hashes and spaces at the beginning or end of the line
'''
txt = strip_hashes(txt)
for c in ' \t<>/\|"\'?*:;~':
txt = txt.replace(c, '-')
return txt
# end of utils.py
| gpl-3.0 |
GladeRom/android_external_chromium_org | build/win/install-build-deps.py | 153 | 1463 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import shutil
import sys
import os
def patch_msbuild():
"""VS2010 MSBuild has a ULDI bug that we patch here. See http://goo.gl/Pn8tj.
"""
source_path = os.path.join(os.environ['ProgramFiles(x86)'],
"MSBuild",
"Microsoft.Cpp",
"v4.0",
"Microsoft.CppBuild.targets")
backup_path = source_path + ".backup"
if not os.path.exists(backup_path):
try:
print "Backing up %s..." % source_path
shutil.copyfile(source_path, backup_path)
except IOError:
print "Could not back up %s to %s. Run as Administrator?" % (
source_path, backup_path)
return 1
source = open(source_path).read()
base = ('''<Target Name="GetResolvedLinkObjs" Returns="@(ObjFullPath)" '''
'''DependsOnTargets="$(CommonBuildOnlyTargets);ComputeCLOutputs;'''
'''ResolvedLinkObjs"''')
find = base + '>'
replace = base + ''' Condition="'$(ConfigurationType)'=='StaticLibrary'">'''
result = source.replace(find, replace)
if result != source:
open(source_path, "w").write(result)
print "Patched %s." % source_path
return 0
def main():
return patch_msbuild()
if __name__ == "__main__":
sys.exit(main())
| bsd-3-clause |
jazztpt/edx-platform | lms/djangoapps/psychometrics/psychoanalyze.py | 68 | 11511 | #
# File: psychometrics/psychoanalyze.py
#
# generate pyschometrics plots from PsychometricData
from __future__ import division
import datetime
import logging
import json
import math
import numpy as np
from opaque_keys.edx.locator import BlockUsageLocator
from scipy.optimize import curve_fit
from django.conf import settings
from django.db.models import Sum, Max
from psychometrics.models import PsychometricData
from courseware.models import StudentModule
from pytz import UTC
log = logging.getLogger("edx.psychometrics")
#db = "ocwtutor" # for debugging
#db = "default"
db = getattr(settings, 'DATABASE_FOR_PSYCHOMETRICS', 'default')
#-----------------------------------------------------------------------------
# fit functions
def func_2pl(x, a, b):
"""
2-parameter logistic function
"""
D = 1.7
edax = np.exp(D * a * (x - b))
return edax / (1 + edax)
#-----------------------------------------------------------------------------
# statistics class
class StatVar(object):
"""
Simple statistics on floating point numbers: avg, sdv, var, min, max
"""
def __init__(self, unit=1):
self.sum = 0
self.sum2 = 0
self.cnt = 0
self.unit = unit
self.min = None
self.max = None
def add(self, x):
if x is None:
return
if self.min is None:
self.min = x
else:
if x < self.min:
self.min = x
if self.max is None:
self.max = x
else:
if x > self.max:
self.max = x
self.sum += x
self.sum2 += x ** 2
self.cnt += 1
def avg(self):
if self.cnt is None:
return 0
return self.sum / 1.0 / self.cnt / self.unit
def var(self):
if self.cnt is None:
return 0
return (self.sum2 / 1.0 / self.cnt / (self.unit ** 2)) - (self.avg() ** 2)
def sdv(self):
v = self.var()
if v > 0:
return math.sqrt(v)
else:
return 0
def __str__(self):
return 'cnt=%d, avg=%f, sdv=%f' % (self.cnt, self.avg(), self.sdv())
def __add__(self, x):
self.add(x)
return self
#-----------------------------------------------------------------------------
# histogram generator
def make_histogram(ydata, bins=None):
'''
Generate histogram of ydata using bins provided, or by default bins
from 0 to 100 by 10. bins should be ordered in increasing order.
returns dict with keys being bins, and values being counts.
special: hist['bins'] = bins
'''
if bins is None:
bins = range(0, 100, 10)
nbins = len(bins)
hist = dict(zip(bins, [0] * nbins))
for y in ydata:
for b in bins[::-1]: # in reverse order
if y > b:
hist[b] += 1
break
# hist['bins'] = bins
return hist
#-----------------------------------------------------------------------------
def problems_with_psychometric_data(course_id):
'''
Return dict of {problems (location urls): count} for which psychometric data is available.
Does this for a given course_id.
'''
pmdset = PsychometricData.objects.using(db).filter(studentmodule__course_id=course_id)
plist = [p['studentmodule__module_state_key'] for p in pmdset.values('studentmodule__module_state_key').distinct()]
problems = dict(
(
p,
pmdset.filter(
studentmodule__module_state_key=BlockUsageLocator.from_string(p)
).count()
) for p in plist
)
return problems
#-----------------------------------------------------------------------------
def generate_plots_for_problem(problem):
pmdset = PsychometricData.objects.using(db).filter(
studentmodule__module_state_key=BlockUsageLocator.from_string(problem)
)
nstudents = pmdset.count()
msg = ""
plots = []
if nstudents < 2:
msg += "%s nstudents=%d --> skipping, too few" % (problem, nstudents)
return msg, plots
max_grade = pmdset[0].studentmodule.max_grade
agdat = pmdset.aggregate(Sum('attempts'), Max('attempts'))
max_attempts = agdat['attempts__max']
total_attempts = agdat['attempts__sum'] # not used yet
msg += "max attempts = %d" % max_attempts
xdat = range(1, max_attempts + 1)
dataset = {'xdat': xdat}
# compute grade statistics
grades = [pmd.studentmodule.grade for pmd in pmdset]
gsv = StatVar()
for g in grades:
gsv += g
msg += "<br><p><font color='blue'>Grade distribution: %s</font></p>" % gsv
# generate grade histogram
ghist = []
axisopts = """{
xaxes: [{
axisLabel: 'Grade'
}],
yaxes: [{
position: 'left',
axisLabel: 'Count'
}]
}"""
if gsv.max > max_grade:
msg += "<br/><p><font color='red'>Something is wrong: max_grade=%s, but max(grades)=%s</font></p>" % (max_grade, gsv.max)
max_grade = gsv.max
if max_grade > 1:
ghist = make_histogram(grades, np.linspace(0, max_grade, max_grade + 1))
ghist_json = json.dumps(ghist.items())
plot = {'title': "Grade histogram for %s" % problem,
'id': 'histogram',
'info': '',
'data': "var dhist = %s;\n" % ghist_json,
'cmd': '[ {data: dhist, bars: { show: true, align: "center" }} ], %s' % axisopts,
}
plots.append(plot)
else:
msg += "<br/>Not generating histogram: max_grade=%s" % max_grade
# histogram of time differences between checks
# Warning: this is inefficient - doesn't scale to large numbers of students
dtset = [] # time differences in minutes
dtsv = StatVar()
for pmd in pmdset:
try:
checktimes = eval(pmd.checktimes) # update log of attempt timestamps
except:
continue
if len(checktimes) < 2:
continue
ct0 = checktimes[0]
for ct in checktimes[1:]:
dt = (ct - ct0).total_seconds() / 60.0
if dt < 20: # ignore if dt too long
dtset.append(dt)
dtsv += dt
ct0 = ct
if dtsv.cnt > 2:
msg += "<br/><p><font color='brown'>Time differences between checks: %s</font></p>" % dtsv
bins = np.linspace(0, 1.5 * dtsv.sdv(), 30)
dbar = bins[1] - bins[0]
thist = make_histogram(dtset, bins)
thist_json = json.dumps(sorted(thist.items(), key=lambda(x): x[0]))
axisopts = """{ xaxes: [{ axisLabel: 'Time (min)'}], yaxes: [{position: 'left',axisLabel: 'Count'}]}"""
plot = {'title': "Histogram of time differences between checks",
'id': 'thistogram',
'info': '',
'data': "var thist = %s;\n" % thist_json,
'cmd': '[ {data: thist, bars: { show: true, align: "center", barWidth:%f }} ], %s' % (dbar, axisopts),
}
plots.append(plot)
# one IRT plot curve for each grade received (TODO: this assumes integer grades)
for grade in range(1, int(max_grade) + 1):
yset = {}
gset = pmdset.filter(studentmodule__grade=grade)
ngset = gset.count()
if ngset == 0:
continue
ydat = []
ylast = 0
for x in xdat:
y = gset.filter(attempts=x).count() / ngset
ydat.append(y + ylast)
ylast = y + ylast
yset['ydat'] = ydat
if len(ydat) > 3: # try to fit to logistic function if enough data points
try:
cfp = curve_fit(func_2pl, xdat, ydat, [1.0, max_attempts / 2.0])
yset['fitparam'] = cfp
yset['fitpts'] = func_2pl(np.array(xdat), *cfp[0])
yset['fiterr'] = [yd - yf for (yd, yf) in zip(ydat, yset['fitpts'])]
fitx = np.linspace(xdat[0], xdat[-1], 100)
yset['fitx'] = fitx
yset['fity'] = func_2pl(np.array(fitx), *cfp[0])
except Exception as err:
log.debug('Error in psychoanalyze curve fitting: %s', err)
dataset['grade_%d' % grade] = yset
axisopts = """{
xaxes: [{
axisLabel: 'Number of Attempts'
}],
yaxes: [{
max:1.0,
position: 'left',
axisLabel: 'Probability of correctness'
}]
}"""
# generate points for flot plot
for grade in range(1, int(max_grade) + 1):
jsdata = ""
jsplots = []
gkey = 'grade_%d' % grade
if gkey in dataset:
yset = dataset[gkey]
jsdata += "var d%d = %s;\n" % (grade, json.dumps(zip(xdat, yset['ydat'])))
jsplots.append('{ data: d%d, lines: { show: false }, points: { show: true}, color: "red" }' % grade)
if 'fitpts' in yset:
jsdata += 'var fit = %s;\n' % (json.dumps(zip(yset['fitx'], yset['fity'])))
jsplots.append('{ data: fit, lines: { show: true }, color: "blue" }')
(a, b) = yset['fitparam'][0]
irtinfo = "(2PL: D=1.7, a=%6.3f, b=%6.3f)" % (a, b)
else:
irtinfo = ""
plots.append({'title': 'IRT Plot for grade=%s %s' % (grade, irtinfo),
'id': "irt%s" % grade,
'info': '',
'data': jsdata,
'cmd': '[%s], %s' % (','.join(jsplots), axisopts),
})
#log.debug('plots = %s' % plots)
return msg, plots
#-----------------------------------------------------------------------------
def make_psychometrics_data_update_handler(course_id, user, module_state_key):
"""
Construct and return a procedure which may be called to update
the PsychometricData instance for the given StudentModule instance.
"""
sm, status = StudentModule.objects.get_or_create(
course_id=course_id,
student=user,
module_state_key=module_state_key,
defaults={'state': '{}', 'module_type': 'problem'},
)
try:
pmd = PsychometricData.objects.using(db).get(studentmodule=sm)
except PsychometricData.DoesNotExist:
pmd = PsychometricData(studentmodule=sm)
def psychometrics_data_update_handler(state):
"""
This function may be called each time a problem is successfully checked
(eg on save_problem_check events in capa_module).
state = instance state (a nice, uniform way to interface - for more future psychometric feature extraction)
"""
try:
state = json.loads(sm.state)
done = state['done']
except:
log.exception("Oops, failed to eval state for %s (state=%s)", sm, sm.state)
return
pmd.done = done
try:
pmd.attempts = state.get('attempts', 0)
except:
log.exception("no attempts for %s (state=%s)", sm, sm.state)
try:
checktimes = eval(pmd.checktimes) # update log of attempt timestamps
except:
checktimes = []
checktimes.append(datetime.datetime.now(UTC))
pmd.checktimes = checktimes
try:
pmd.save()
except:
log.exception("Error in updating psychometrics data for %s", sm)
return psychometrics_data_update_handler
| agpl-3.0 |
prutseltje/ansible | lib/ansible/modules/cloud/amazon/aws_region_facts.py | 52 | 3496 | #!/usr/bin/python
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'supported_by': 'community',
'status': ['preview']
}
DOCUMENTATION = '''
module: aws_region_facts
short_description: Gather facts about AWS regions.
description:
- Gather facts about AWS regions.
version_added: '2.5'
author: 'Henrique Rodrigues (github.com/Sodki)'
options:
filters:
description:
- A dict of filters to apply. Each dict item consists of a filter key and a filter value. See
U(https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeRegions.html) for
possible filters. Filter names and values are case sensitive. You can also use underscores
instead of dashes (-) in the filter keys, which will take precedence in case of conflict.
default: {}
extends_documentation_fragment:
- aws
- ec2
requirements: [botocore, boto3]
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Gather facts about all regions
- aws_region_facts:
# Gather facts about a single region
- aws_region_facts:
filters:
region-name: eu-west-1
'''
RETURN = '''
regions:
returned: on success
description: >
Regions that match the provided filters. Each element consists of a dict with all the information related
to that region.
type: list
sample: "[{
'endpoint': 'ec2.us-west-1.amazonaws.com',
'region_name': 'us-west-1'
}]"
'''
import traceback
from ansible.module_utils._text import to_native
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import get_aws_connection_info, ec2_argument_spec, boto3_conn
from ansible.module_utils.ec2 import ansible_dict_to_boto3_filter_list, camel_dict_to_snake_dict, HAS_BOTO3
try:
from botocore.exceptions import ClientError, BotoCoreError
except ImportError:
pass # will be detected by imported HAS_BOTO3
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
filters=dict(default={}, type='dict')
)
)
module = AnsibleModule(argument_spec=argument_spec)
if not HAS_BOTO3:
module.fail_json(msg='boto3 required for this module')
region, ec2_url, aws_connect_params = get_aws_connection_info(module, boto3=True)
connection = boto3_conn(
module,
conn_type='client',
resource='ec2',
region=region,
endpoint=ec2_url,
**aws_connect_params
)
# Replace filter key underscores with dashes, for compatibility
sanitized_filters = dict((k.replace('_', '-'), v) for k, v in module.params.get('filters').items())
try:
regions = connection.describe_regions(
Filters=ansible_dict_to_boto3_filter_list(sanitized_filters)
)
except ClientError as e:
module.fail_json(msg="Unable to describe regions: {0}".format(to_native(e)),
exception=traceback.format_exc(),
**camel_dict_to_snake_dict(e.response))
except BotoCoreError as e:
module.fail_json(msg="Unable to describe regions: {0}".format(to_native(e)),
exception=traceback.format_exc())
module.exit_json(regions=[camel_dict_to_snake_dict(r) for r in regions['Regions']])
if __name__ == '__main__':
main()
| gpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.